index
int64 0
10k
| blob_id
stringlengths 40
40
| step-1
stringlengths 0
305k
| step-2
stringlengths 6
1.1M
⌀ | step-3
stringlengths 15
1.23M
⌀ | step-4
stringlengths 23
1.34M
⌀ | step-5
stringlengths 55
1.2M
⌀ | step-ids
listlengths 1
5
|
---|---|---|---|---|---|---|---|
2,500 |
cee77a97503cca517d03ce7cce189974da282a03
|
<mask token>
|
<mask token>
if len(inspect_tables) == 0:
for k, t in enumerate(tickers):
ticker_data = pd.DataFrame()
try:
ticker_data = wb.DataReader(t, data_source='yahoo', start=start_1)
ticker_data.to_sql(tablenames[k], con=engine, if_exists='replace')
except:
print('New Import from {} went wrong'.format(t))
else:
print('New Import from {} is done'.format(t))
else:
for k, t in enumerate(tickers):
if tablenames[k] not in inspect_tables:
ticker_data = pd.DataFrame()
try:
ticker_data = wb.DataReader(t, data_source='yahoo', start=
start_1)
ticker_data.to_sql(tablenames[k], con=engine, if_exists=
'replace')
except:
print('New Import from {} went wrong'.format(t))
else:
print('New Import from {} is done'.format(t))
else:
ticker_data = pd.DataFrame()
check_last_value = pd.read_sql_query(Select_last_value.format(
tablenames[k]), con=engine)
check_last_value2 = pd.to_datetime(check_last_value['Date'][0],
format='%Y-%m-%d').date()
if check_last_value2 != date_today:
try:
ticker_data = wb.DataReader(t, data_source='yahoo',
start=check_last_value2 + dt.timedelta(days=1))
ticker_data.to_sql(tablenames[k], con=engine, if_exists
='append')
except:
print('Update Import from {} went wrong'.format(t))
else:
print('Update Import from {} is done from {}'.format(t,
str(check_last_value2 + dt.timedelta(days=1))))
|
<mask token>
conn = sqlite3.connect('Portfolio_dividens.db')
c = conn.cursor()
<mask token>
engine = create_engine('sqlite:///Portfolio_dividens.db')
<mask token>
path = os.getcwd()
<mask token>
date_today = dt.date.today()
start_1 = '2005-1-1'
tickers_df = pd.read_excel(path + '\\Tickers.xlsx')
tickers = list(tickers_df['Ticker'])
tablenames = list(tickers_df['tablenames'])
<mask token>
inspector = inspect(engine)
inspect_tables = inspector.get_table_names()
Select_last_value = "Select Date from '{}' order by date desc limit 1;"
Check_table_exists = 'SHOW TABLES LIKE {}'
if len(inspect_tables) == 0:
for k, t in enumerate(tickers):
ticker_data = pd.DataFrame()
try:
ticker_data = wb.DataReader(t, data_source='yahoo', start=start_1)
ticker_data.to_sql(tablenames[k], con=engine, if_exists='replace')
except:
print('New Import from {} went wrong'.format(t))
else:
print('New Import from {} is done'.format(t))
else:
for k, t in enumerate(tickers):
if tablenames[k] not in inspect_tables:
ticker_data = pd.DataFrame()
try:
ticker_data = wb.DataReader(t, data_source='yahoo', start=
start_1)
ticker_data.to_sql(tablenames[k], con=engine, if_exists=
'replace')
except:
print('New Import from {} went wrong'.format(t))
else:
print('New Import from {} is done'.format(t))
else:
ticker_data = pd.DataFrame()
check_last_value = pd.read_sql_query(Select_last_value.format(
tablenames[k]), con=engine)
check_last_value2 = pd.to_datetime(check_last_value['Date'][0],
format='%Y-%m-%d').date()
if check_last_value2 != date_today:
try:
ticker_data = wb.DataReader(t, data_source='yahoo',
start=check_last_value2 + dt.timedelta(days=1))
ticker_data.to_sql(tablenames[k], con=engine, if_exists
='append')
except:
print('Update Import from {} went wrong'.format(t))
else:
print('Update Import from {} is done from {}'.format(t,
str(check_last_value2 + dt.timedelta(days=1))))
|
<mask token>
import sqlite3
conn = sqlite3.connect('Portfolio_dividens.db')
c = conn.cursor()
from sqlalchemy import create_engine
engine = create_engine('sqlite:///Portfolio_dividens.db')
import os
path = os.getcwd()
<mask token>
import pandas as pd
from pandas_datareader import data as wb
import datetime as dt
date_today = dt.date.today()
start_1 = '2005-1-1'
tickers_df = pd.read_excel(path + '\\Tickers.xlsx')
tickers = list(tickers_df['Ticker'])
tablenames = list(tickers_df['tablenames'])
from sqlalchemy import inspect
inspector = inspect(engine)
inspect_tables = inspector.get_table_names()
Select_last_value = "Select Date from '{}' order by date desc limit 1;"
Check_table_exists = 'SHOW TABLES LIKE {}'
if len(inspect_tables) == 0:
for k, t in enumerate(tickers):
ticker_data = pd.DataFrame()
try:
ticker_data = wb.DataReader(t, data_source='yahoo', start=start_1)
ticker_data.to_sql(tablenames[k], con=engine, if_exists='replace')
except:
print('New Import from {} went wrong'.format(t))
else:
print('New Import from {} is done'.format(t))
else:
for k, t in enumerate(tickers):
if tablenames[k] not in inspect_tables:
ticker_data = pd.DataFrame()
try:
ticker_data = wb.DataReader(t, data_source='yahoo', start=
start_1)
ticker_data.to_sql(tablenames[k], con=engine, if_exists=
'replace')
except:
print('New Import from {} went wrong'.format(t))
else:
print('New Import from {} is done'.format(t))
else:
ticker_data = pd.DataFrame()
check_last_value = pd.read_sql_query(Select_last_value.format(
tablenames[k]), con=engine)
check_last_value2 = pd.to_datetime(check_last_value['Date'][0],
format='%Y-%m-%d').date()
if check_last_value2 != date_today:
try:
ticker_data = wb.DataReader(t, data_source='yahoo',
start=check_last_value2 + dt.timedelta(days=1))
ticker_data.to_sql(tablenames[k], con=engine, if_exists
='append')
except:
print('Update Import from {} went wrong'.format(t))
else:
print('Update Import from {} is done from {}'.format(t,
str(check_last_value2 + dt.timedelta(days=1))))
|
"""
Created on Wed Nov 6 13:03:42 2019
@author: antonio.blago
"""
#%% Connect to database
import sqlite3
conn = sqlite3.connect('Portfolio_dividens.db')
c = conn.cursor()
from sqlalchemy import create_engine #suport pd.dataframe to sql table
#import mysqlclient
engine = create_engine("sqlite:///Portfolio_dividens.db")
#%% Set up path
import os
# detect the current working directory and print it
path = os.getcwd()
#%%
''' Yahoo finance'''
import pandas as pd
from pandas_datareader import data as wb
import datetime as dt
date_today=dt.date.today()
start_1='2005-1-1'
tickers_df=pd.read_excel(path+r'\Tickers.xlsx')
tickers=list(tickers_df['Ticker'])
tablenames=list(tickers_df['tablenames'])
from sqlalchemy import inspect
inspector = inspect(engine)
# Get table information
inspect_tables=inspector.get_table_names()
Select_last_value= "Select Date from '{}' order by date desc limit 1;"
Check_table_exists="SHOW TABLES LIKE {}"
#pd.read_sql_query(Select_last_value,con=engine)
if len(inspect_tables)==0: #first initialize db
for k, t in enumerate(tickers):
ticker_data=pd.DataFrame()
try:
ticker_data=wb.DataReader(t, data_source='yahoo',start=start_1)
ticker_data.to_sql(tablenames[k],con=engine,if_exists="replace" )
except:
print("New Import from {} went wrong".format(t))
else:
print("New Import from {} is done".format(t))
else:
for k, t in enumerate(tickers):
if tablenames[k] not in inspect_tables: #check table is existing
ticker_data=pd.DataFrame()
try:
ticker_data=wb.DataReader(t, data_source='yahoo',start=start_1)
ticker_data.to_sql(tablenames[k],con=engine,if_exists="replace" )
except:
print("New Import from {} went wrong".format(t))
else:
print("New Import from {} is done".format(t))
else:
ticker_data=pd.DataFrame()
check_last_value=pd.read_sql_query(Select_last_value.format(tablenames[k]),con=engine)
check_last_value2=(pd.to_datetime(check_last_value['Date'][0],format="%Y-%m-%d")).date()
if check_last_value2!=date_today: #dt.datetime.strptime("2019-11-13", "%Y-%m-%d")==pd.to_datetime(check_last_value['Date'][0])
try:
ticker_data=wb.DataReader(t, data_source='yahoo',start=check_last_value2+dt.timedelta(days=1))
ticker_data.to_sql(tablenames[k],con=engine,if_exists="append")
except:
print("Update Import from {} went wrong".format(t))
else:
print("Update Import from {} is done from {}".format(t,str(check_last_value2+dt.timedelta(days=1))))
|
[
0,
1,
2,
3,
4
] |
2,501 |
e05f545ca969e0c2330779ed54a33a594d6ebb25
|
<mask token>
|
<mask token>
print(r)
|
<mask token>
a = 'Python|Java|C#|C++|Kotlin|JavaScript'
r = re.findall('Java', a)
print(r)
|
import re
a = 'Python|Java|C#|C++|Kotlin|JavaScript'
r = re.findall('Java', a)
print(r)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/7/10 14:26
# @Author : MengHe
# @File : c1.py
# @Software: PyCharm
import re
a = 'Python|Java|C#|C++|Kotlin|JavaScript'
r = re.findall('Java', a)
print(r)
# print(a.index('Python') > -1)
# print('Kotlin' in a)
|
[
0,
1,
2,
3,
4
] |
2,502 |
eed79a3895975a0475c0b192bd8a42e80def2e78
|
<mask token>
class BadArgumentException(Exception):
<mask token>
def __str__(self):
return self.msg
class TooManyArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class NotEnoughArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class UsedArchivepgsqlAsArchiveWAL(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
<mask token>
|
<mask token>
class BadArgumentException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class TooManyArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class NotEnoughArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class UsedArchivepgsqlAsArchiveWAL(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def get_version():
version = VERSION or check_output(['git', 'describe']).strip()
return ' '.join(['%prog', version])
def create_common_parser(**kwargs):
kwargs['version'] = get_version()
parser = OptionParser(**kwargs)
parser.add_option('-c', '--config', dest='config_file', help=
'configuration file', default='/etc/bbpgsql.ini')
parser.add_option('--dry-run', dest='dry_run', help=
'test run - do not actually modify any files', action='store_true',
default=False)
return parser
def common_parse_args(args=None):
parser = create_common_parser()
options, args = parser.parse_args(args)
return parser, options, args
<mask token>
def non_destructive_minimal_parse_and_validate_args(args=None):
args = args or sys.argv[:]
parser, options, args = common_parse_args(args)
common_validate_options_and_args(options, args)
return options, args
<mask token>
def wal_file_exists(config, wal_path):
return os.path.isfile(get_wal_filename(config, wal_path))
def get_wal_filename(config, wal_path):
data_dir = get_data_dir(config)
return os.path.join(data_dir, wal_path)
def is_valid_file(config, wal_path):
return is_relative_path(wal_path) and wal_file_exists(config, wal_path)
<mask token>
def archivepgsql_parse_args(args=None):
archivepgsql_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])
parser = create_common_parser(usage=archivepgsql_usage)
options, args = parser.parse_args(args)
return parser, options, args
def archivepgsql_validate_options_and_args(options=None, args=None):
if not common_validate_options_and_args(options, args):
return False
if args:
if args[0].startswith('pg_xlog'):
raise UsedArchivepgsqlAsArchiveWAL(
'archivepgsql was called with a WAL file path as an argument. This is probably due to configuring archivepgsql as the archive_command in the PGSQL configuration instead of archivewal.'
)
raise TooManyArgumentsException(
'archivepgsql should not be called with any arguments. Are you using it as the archive_command instead of archivewal?'
)
return True
def restorewal_parse_args(args=None):
restorewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',
'<name_of_wal_file_to_restore>', '<path_to_write_restored_file>'])
parser = create_common_parser(usage=restorewal_usage)
options, args = parser.parse_args(args)
return parser, options, args
<mask token>
def storagestats_parse_args(args=None):
storagestats_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])
parser = create_common_parser(usage=storagestats_usage)
options, args = parser.parse_args(args)
return parser, options, args
<mask token>
|
<mask token>
class BadArgumentException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class TooManyArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class NotEnoughArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class UsedArchivepgsqlAsArchiveWAL(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def get_version():
version = VERSION or check_output(['git', 'describe']).strip()
return ' '.join(['%prog', version])
def create_common_parser(**kwargs):
kwargs['version'] = get_version()
parser = OptionParser(**kwargs)
parser.add_option('-c', '--config', dest='config_file', help=
'configuration file', default='/etc/bbpgsql.ini')
parser.add_option('--dry-run', dest='dry_run', help=
'test run - do not actually modify any files', action='store_true',
default=False)
return parser
def common_parse_args(args=None):
parser = create_common_parser()
options, args = parser.parse_args(args)
return parser, options, args
def common_validate_options_and_args(options=None, args=None):
if not os.path.exists(options.config_file):
raise Exception('File %s does not exist' % options.config_file)
if not os.access(options.config_file, os.R_OK):
raise Exception('No read access for %s' % options.config_file)
config_stats = os.stat(options.config_file)
if (config_stats.st_mode & stat.S_IRWXG | config_stats.st_mode & stat.
S_IRWXO):
raise Exception('File %s has open group or other permissions' %
options.config_file)
return True
def non_destructive_minimal_parse_and_validate_args(args=None):
args = args or sys.argv[:]
parser, options, args = common_parse_args(args)
common_validate_options_and_args(options, args)
return options, args
<mask token>
def wal_file_exists(config, wal_path):
return os.path.isfile(get_wal_filename(config, wal_path))
def get_wal_filename(config, wal_path):
data_dir = get_data_dir(config)
return os.path.join(data_dir, wal_path)
def is_valid_file(config, wal_path):
return is_relative_path(wal_path) and wal_file_exists(config, wal_path)
<mask token>
def archivepgsql_parse_args(args=None):
archivepgsql_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])
parser = create_common_parser(usage=archivepgsql_usage)
options, args = parser.parse_args(args)
return parser, options, args
def archivepgsql_validate_options_and_args(options=None, args=None):
if not common_validate_options_and_args(options, args):
return False
if args:
if args[0].startswith('pg_xlog'):
raise UsedArchivepgsqlAsArchiveWAL(
'archivepgsql was called with a WAL file path as an argument. This is probably due to configuring archivepgsql as the archive_command in the PGSQL configuration instead of archivewal.'
)
raise TooManyArgumentsException(
'archivepgsql should not be called with any arguments. Are you using it as the archive_command instead of archivewal?'
)
return True
def restorewal_parse_args(args=None):
restorewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',
'<name_of_wal_file_to_restore>', '<path_to_write_restored_file>'])
parser = create_common_parser(usage=restorewal_usage)
options, args = parser.parse_args(args)
return parser, options, args
<mask token>
def storagestats_parse_args(args=None):
storagestats_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])
parser = create_common_parser(usage=storagestats_usage)
options, args = parser.parse_args(args)
return parser, options, args
<mask token>
|
<mask token>
VERSION = ''
class BadArgumentException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class TooManyArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class NotEnoughArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class UsedArchivepgsqlAsArchiveWAL(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def get_version():
version = VERSION or check_output(['git', 'describe']).strip()
return ' '.join(['%prog', version])
def create_common_parser(**kwargs):
kwargs['version'] = get_version()
parser = OptionParser(**kwargs)
parser.add_option('-c', '--config', dest='config_file', help=
'configuration file', default='/etc/bbpgsql.ini')
parser.add_option('--dry-run', dest='dry_run', help=
'test run - do not actually modify any files', action='store_true',
default=False)
return parser
def common_parse_args(args=None):
parser = create_common_parser()
options, args = parser.parse_args(args)
return parser, options, args
def common_validate_options_and_args(options=None, args=None):
if not os.path.exists(options.config_file):
raise Exception('File %s does not exist' % options.config_file)
if not os.access(options.config_file, os.R_OK):
raise Exception('No read access for %s' % options.config_file)
config_stats = os.stat(options.config_file)
if (config_stats.st_mode & stat.S_IRWXG | config_stats.st_mode & stat.
S_IRWXO):
raise Exception('File %s has open group or other permissions' %
options.config_file)
return True
def non_destructive_minimal_parse_and_validate_args(args=None):
args = args or sys.argv[:]
parser, options, args = common_parse_args(args)
common_validate_options_and_args(options, args)
return options, args
def archivewal_parse_args(args=None):
archivewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',
'<path_to_wal_file_to_archive>'])
parser = create_common_parser(usage=archivewal_usage)
options, args = parser.parse_args(args)
return parser, options, args
def is_relative_path(wal_path):
return not os.path.isabs(wal_path)
def wal_file_exists(config, wal_path):
return os.path.isfile(get_wal_filename(config, wal_path))
def get_wal_filename(config, wal_path):
data_dir = get_data_dir(config)
return os.path.join(data_dir, wal_path)
def is_valid_file(config, wal_path):
return is_relative_path(wal_path) and wal_file_exists(config, wal_path)
def archivewal_validate_options_and_args(options=None, args=None):
args = args or []
if not common_validate_options_and_args(options, args):
return False
config = get_config_from_filename_and_set_up_logging(options.config_file)
if len(args) != 1 or not is_valid_file(config, args[0]):
raise Exception(
'A relative path to a WAL file to be archived must be provided!')
return True
def archivepgsql_parse_args(args=None):
archivepgsql_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])
parser = create_common_parser(usage=archivepgsql_usage)
options, args = parser.parse_args(args)
return parser, options, args
def archivepgsql_validate_options_and_args(options=None, args=None):
if not common_validate_options_and_args(options, args):
return False
if args:
if args[0].startswith('pg_xlog'):
raise UsedArchivepgsqlAsArchiveWAL(
'archivepgsql was called with a WAL file path as an argument. This is probably due to configuring archivepgsql as the archive_command in the PGSQL configuration instead of archivewal.'
)
raise TooManyArgumentsException(
'archivepgsql should not be called with any arguments. Are you using it as the archive_command instead of archivewal?'
)
return True
def restorewal_parse_args(args=None):
restorewal_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]',
'<name_of_wal_file_to_restore>', '<path_to_write_restored_file>'])
parser = create_common_parser(usage=restorewal_usage)
options, args = parser.parse_args(args)
return parser, options, args
def restorewal_validate_options_and_args(options=None, args=None):
args = args or []
if not common_validate_options_and_args(options, args):
return False
nargs = len(args)
if nargs != 2:
raise Exception(
'restorewal must be given the name of the WAL file to retrieve and the destination path to restore to.'
)
return True
def storagestats_parse_args(args=None):
storagestats_usage = ' '.join([os.path.basename(sys.argv[0]), '[options]'])
parser = create_common_parser(usage=storagestats_usage)
options, args = parser.parse_args(args)
return parser, options, args
def storagestats_validate_options_and_args(options=None, args=None):
if not common_validate_options_and_args(options, args):
return False
if args:
raise TooManyArgumentsException('storagestats takes no arguments')
return True
|
import os
import stat
from optparse import OptionParser
from bbpgsql.configuration import get_config_from_filename_and_set_up_logging
from bbpgsql.configuration.general import get_data_dir
from subprocess import check_output
import sys
VERSION = ''
class BadArgumentException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class TooManyArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class NotEnoughArgumentsException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class UsedArchivepgsqlAsArchiveWAL(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def get_version():
# override "version" with a constant string for release
version = VERSION or check_output(['git', 'describe']).strip()
return ' '.join(['%prog', version])
def create_common_parser(**kwargs):
kwargs['version'] = get_version()
parser = OptionParser(**kwargs)
parser.add_option('-c', '--config', dest='config_file',
help='configuration file', default='/etc/bbpgsql.ini')
parser.add_option('--dry-run', dest='dry_run',
help='test run - do not actually modify any files',
action='store_true',
default=False)
return parser
def common_parse_args(args=None):
parser = create_common_parser()
options, args = parser.parse_args(args)
return parser, options, args
def common_validate_options_and_args(options=None, args=None):
if not os.path.exists(options.config_file):
raise Exception("File %s does not exist" % (options.config_file))
if not os.access(options.config_file, os.R_OK):
raise Exception("No read access for %s" % (options.config_file))
config_stats = os.stat(options.config_file)
if ((config_stats.st_mode & stat.S_IRWXG) |
(config_stats.st_mode & stat.S_IRWXO)):
raise Exception("File %s has open group or other permissions" %
(options.config_file))
return True
def non_destructive_minimal_parse_and_validate_args(args=None):
args = args or sys.argv[:]
parser, options, args = common_parse_args(args)
common_validate_options_and_args(options, args)
return options, args
def archivewal_parse_args(args=None):
archivewal_usage = ' '.join([
os.path.basename(sys.argv[0]),
'[options]',
'<path_to_wal_file_to_archive>'])
parser = create_common_parser(usage=archivewal_usage)
options, args = parser.parse_args(args)
return parser, options, args
def is_relative_path(wal_path):
return not os.path.isabs(wal_path)
def wal_file_exists(config, wal_path):
return os.path.isfile(get_wal_filename(config, wal_path))
def get_wal_filename(config, wal_path):
data_dir = get_data_dir(config)
return os.path.join(data_dir, wal_path)
def is_valid_file(config, wal_path):
return is_relative_path(wal_path) and wal_file_exists(config, wal_path)
def archivewal_validate_options_and_args(options=None, args=None):
args = args or []
if not common_validate_options_and_args(options, args):
return False
config = get_config_from_filename_and_set_up_logging(options.config_file)
if len(args) != 1 or not is_valid_file(config, args[0]):
raise Exception('A relative path to a WAL file to be archived' \
' must be provided!')
return True
def archivepgsql_parse_args(args=None):
archivepgsql_usage = ' '.join([
os.path.basename(sys.argv[0]),
'[options]'])
parser = create_common_parser(usage=archivepgsql_usage)
options, args = parser.parse_args(args)
return parser, options, args
def archivepgsql_validate_options_and_args(options=None, args=None):
if not common_validate_options_and_args(options, args):
return False
if args:
if args[0].startswith('pg_xlog'):
raise UsedArchivepgsqlAsArchiveWAL('archivepgsql was called with' \
' a WAL file path as an argument. This is' \
' probably due to configuring archivepgsql' \
' as the archive_command in the PGSQL' \
' configuration instead of archivewal.')
raise TooManyArgumentsException('archivepgsql should not be called' \
' with any arguments. Are you using it as the' \
' archive_command instead of archivewal?')
return True
def restorewal_parse_args(args=None):
restorewal_usage = ' '.join([
os.path.basename(sys.argv[0]),
'[options]',
'<name_of_wal_file_to_restore>',
'<path_to_write_restored_file>',
])
parser = create_common_parser(usage=restorewal_usage)
options, args = parser.parse_args(args)
return parser, options, args
def restorewal_validate_options_and_args(options=None, args=None):
args = args or []
if not common_validate_options_and_args(options, args):
return False
nargs = len(args)
if nargs != 2:
raise Exception('restorewal must be given the name of the WAL' \
' file to retrieve and the destination path to' \
' restore to.')
return True
def storagestats_parse_args(args=None):
storagestats_usage = ' '.join([
os.path.basename(sys.argv[0]),
'[options]'])
parser = create_common_parser(usage=storagestats_usage)
options, args = parser.parse_args(args)
return parser, options, args
def storagestats_validate_options_and_args(options=None, args=None):
if not common_validate_options_and_args(options, args):
return False
if args:
raise TooManyArgumentsException('storagestats takes no arguments')
return True
|
[
11,
23,
24,
30,
32
] |
2,503 |
35c4e26acbe99ca7f37b63b67f38d5c40fbf0ea4
|
<mask token>
|
club_info = {'club_url':
'https://www.futbin.com///18/leagues/Major%20League%20Soccer?page=1&club=101112'
, 'club_logo':
'https://cdn.futbin.com/content/fifa18/img/clubs/101112.png',
'club_name': 'Vancouver Whitecaps FC'}
players = {}
players['Waston'] = {'player_url':
'https://www.futbin.com//18/player/15583/Kendall Waston', 'player_name':
'Kendall Waston', 'player_rating': '80', 'player_shortname': 'Waston',
'player_position': 'CB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/72.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/216811.png?v=2'}
players['Montero'] = {'player_url':
'https://www.futbin.com//18/player/1645/Fredy Montero', 'player_name':
'Fredy Montero', 'player_rating': '76', 'player_shortname': 'Montero',
'player_position': 'ST', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/56.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/190919.png?v=2'}
players['Waston'] = {'player_url':
'https://www.futbin.com//18/player/2545/Kendall Waston', 'player_name':
'Kendall Waston', 'player_rating': '74', 'player_shortname': 'Waston',
'player_position': 'CB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/72.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/216811.png?v=2'}
players['Laba'] = {'player_url':
'https://www.futbin.com//18/player/2526/Matías Laba', 'player_name':
'Matías Laba', 'player_rating': '74', 'player_shortname': 'Laba',
'player_position': 'CDM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/52.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/214594.png?v=2'}
players['Kamara'] = {'player_url':
'https://www.futbin.com//18/player/16045/Kei Kamara', 'player_name':
'Kei Kamara', 'player_rating': '74', 'player_shortname': 'Kamara',
'player_position': 'ST', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/138.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/176048.png?v=2'}
players['Ghazal'] = {'player_url':
'https://www.futbin.com//18/player/3000/Aly Ghazal', 'player_name':
'Aly Ghazal', 'player_rating': '73', 'player_shortname': 'Ghazal',
'player_position': 'CDM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/111.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/212469.png?v=2'}
players['Ousted'] = {'player_url':
'https://www.futbin.com//18/player/2630/David Ousted', 'player_name':
'David Ousted', 'player_rating': '73', 'player_shortname': 'Ousted',
'player_position': 'GK', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/13.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/162052.png?v=2'}
players['Bolaños'] = {'player_url':
'https://www.futbin.com//18/player/3890/Christian Bolaños',
'player_name': 'Christian Bolaños', 'player_rating': '71',
'player_shortname': 'Bolaños', 'player_position': 'RM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/72.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/175753.png?v=2'}
players['Juárez'] = {'player_url':
'https://www.futbin.com//18/player/18393/Efraín Juárez', 'player_name':
'Efraín Juárez', 'player_rating': '71', 'player_shortname': 'Juárez',
'player_position': 'RB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/83.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/167519.png?v=2'}
players['Mutch'] = {'player_url':
'https://www.futbin.com//18/player/18709/Jordon Mutch', 'player_name':
'Jordon Mutch', 'player_rating': '70', 'player_shortname': 'Mutch',
'player_position': 'CM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/14.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/186117.png?v=2'}
players['Parker'] = {'player_url':
'https://www.futbin.com//18/player/5180/Tim Parker', 'player_name':
'Tim Parker', 'player_rating': '70', 'player_shortname': 'Parker',
'player_position': 'CB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/226803.png?v=2'}
players['Felipe'] = {'player_url':
'https://www.futbin.com//18/player/18710/Felipe Martins', 'player_name':
'Felipe Martins', 'player_rating': '70', 'player_shortname': 'Felipe',
'player_position': 'CM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/54.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/207465.png?v=2'}
players['Techera'] = {'player_url':
'https://www.futbin.com//18/player/5285/Cristian Techera',
'player_name': 'Cristian Techera', 'player_rating': '69',
'player_shortname': 'Techera', 'player_position': 'RM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/60.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/228404.png?v=2'}
players['Maund'] = {'player_url':
'https://www.futbin.com//18/player/5824/Aaron Maund', 'player_name':
'Aaron Maund', 'player_rating': '69', 'player_shortname': 'Maund',
'player_position': 'CB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/207860.png?v=2'}
players['Marinovic'] = {'player_url':
'https://www.futbin.com//18/player/18161/Stefan Marinovic',
'player_name': 'Stefan Marinovic', 'player_rating': '68',
'player_shortname': 'Marinovic', 'player_position': 'GK',
'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/198.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/231056.png?v=2'}
players['Edgar'] = {'player_url':
'https://www.futbin.com//18/player/6202/David Edgar', 'player_name':
'David Edgar', 'player_rating': '68', 'player_shortname': 'Edgar',
'player_position': 'CB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/70.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/172207.png?v=2'}
players['Aja'] = {'player_url':
'https://www.futbin.com//18/player/18394/José Aja', 'player_name':
'José Aja', 'player_rating': '68', 'player_shortname': 'Aja',
'player_position': 'CB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/60.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/235280.png?v=2'}
players['Davies'] = {'player_url':
'https://www.futbin.com//18/player/18543/Alphonso Davies',
'player_name': 'Alphonso Davies', 'player_rating': '67',
'player_shortname': 'Davies', 'player_position': 'LM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/70.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/234396.png?v=2'}
players['Williams'] = {'player_url':
'https://www.futbin.com//18/player/7680/Sheanon Williams',
'player_name': 'Sheanon Williams', 'player_rating': '67',
'player_shortname': 'Williams', 'player_position': 'RB',
'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/190585.png?v=2'}
players['Harvey'] = {'player_url':
'https://www.futbin.com//18/player/7581/Jordan Harvey', 'player_name':
'Jordan Harvey', 'player_rating': '67', 'player_shortname': 'Harvey',
'player_position': 'LB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/178379.png?v=2'}
players['Franklin'] = {'player_url':
'https://www.futbin.com//18/player/18395/Sean Franklin', 'player_name':
'Sean Franklin', 'player_rating': '67', 'player_shortname': 'Franklin',
'player_position': 'RB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/186645.png?v=2'}
players['Henry'] = {'player_url':
'https://www.futbin.com//18/player/18396/Doneil Henry', 'player_name':
'Doneil Henry', 'player_rating': '66', 'player_shortname': 'Henry',
'player_position': 'CB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/70.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/201208.png?v=2'}
players['Ibini'] = {'player_url':
'https://www.futbin.com//18/player/8275/Bernie Ibini', 'player_name':
'Bernie Ibini', 'player_rating': '66', 'player_shortname': 'Ibini',
'player_position': 'RM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/195.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/203092.png?v=2'}
players['de Jong'] = {'player_url':
'https://www.futbin.com//18/player/7954/Marcel de Jong', 'player_name':
'Marcel de Jong', 'player_rating': '66', 'player_shortname': 'de Jong',
'player_position': 'LB', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/70.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/164581.png?v=2'}
players['Mezquida'] = {'player_url':
'https://www.futbin.com//18/player/8267/Nicolás Mezquida',
'player_name': 'Nicolás Mezquida', 'player_rating': '66',
'player_shortname': 'Mezquida', 'player_position': 'CAM',
'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/60.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/202866.png?v=2'}
players['Reyna'] = {'player_url':
'https://www.futbin.com//18/player/8529/Yordy Reyna', 'player_name':
'Yordy Reyna', 'player_rating': '66', 'player_shortname': 'Reyna',
'player_position': 'CF', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/59.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/214000.png?v=2'}
players['Hurtado'] = {'player_url':
'https://www.futbin.com//18/player/8507/Erik Hurtado', 'player_name':
'Erik Hurtado', 'player_rating': '66', 'player_shortname': 'Hurtado',
'player_position': 'ST', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/212750.png?v=2'}
players['Tchani'] = {'player_url':
'https://www.futbin.com//18/player/8175/Tony Tchani', 'player_name':
'Tony Tchani', 'player_rating': '66', 'player_shortname': 'Tchani',
'player_position': 'CM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/103.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/197717.png?v=2'}
players['Shea'] = {'player_url':
'https://www.futbin.com//18/player/8062/Brek Shea', 'player_name':
'Brek Shea', 'player_rating': '66', 'player_shortname': 'Shea',
'player_position': 'LM', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/186536.png?v=2'}
players['Rowe'] = {'player_url':
'https://www.futbin.com//18/player/16147/Brian Rowe', 'player_name':
'Brian Rowe', 'player_rating': '66', 'player_shortname': 'Rowe',
'player_position': 'GK', 'player_nation':
'https://cdn.futbin.com/content/fifa18/img/nation/95.png',
'player_photo':
'https://cdn.futbin.com/content/fifa18/img/players/210711.png?v=2'}
|
club_info = {'club_url': 'https://www.futbin.com///18/leagues/Major%20League%20Soccer?page=1&club=101112', 'club_logo': 'https://cdn.futbin.com/content/fifa18/img/clubs/101112.png', 'club_name': 'Vancouver Whitecaps FC'}
players = {}
players['Waston'] = {'player_url': 'https://www.futbin.com//18/player/15583/Kendall Waston', 'player_name': 'Kendall Waston', 'player_rating': '80', 'player_shortname': 'Waston', 'player_position': 'CB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/72.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/216811.png?v=2'}
players['Montero'] = {'player_url': 'https://www.futbin.com//18/player/1645/Fredy Montero', 'player_name': 'Fredy Montero', 'player_rating': '76', 'player_shortname': 'Montero', 'player_position': 'ST', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/56.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/190919.png?v=2'}
players['Waston'] = {'player_url': 'https://www.futbin.com//18/player/2545/Kendall Waston', 'player_name': 'Kendall Waston', 'player_rating': '74', 'player_shortname': 'Waston', 'player_position': 'CB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/72.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/216811.png?v=2'}
players['Laba'] = {'player_url': 'https://www.futbin.com//18/player/2526/Matías Laba', 'player_name': 'Matías Laba', 'player_rating': '74', 'player_shortname': 'Laba', 'player_position': 'CDM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/52.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/214594.png?v=2'}
players['Kamara'] = {'player_url': 'https://www.futbin.com//18/player/16045/Kei Kamara', 'player_name': 'Kei Kamara', 'player_rating': '74', 'player_shortname': 'Kamara', 'player_position': 'ST', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/138.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/176048.png?v=2'}
players['Ghazal'] = {'player_url': 'https://www.futbin.com//18/player/3000/Aly Ghazal', 'player_name': 'Aly Ghazal', 'player_rating': '73', 'player_shortname': 'Ghazal', 'player_position': 'CDM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/111.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/212469.png?v=2'}
players['Ousted'] = {'player_url': 'https://www.futbin.com//18/player/2630/David Ousted', 'player_name': 'David Ousted', 'player_rating': '73', 'player_shortname': 'Ousted', 'player_position': 'GK', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/13.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/162052.png?v=2'}
players['Bolaños'] = {'player_url': 'https://www.futbin.com//18/player/3890/Christian Bolaños', 'player_name': 'Christian Bolaños', 'player_rating': '71', 'player_shortname': 'Bolaños', 'player_position': 'RM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/72.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/175753.png?v=2'}
players['Juárez'] = {'player_url': 'https://www.futbin.com//18/player/18393/Efraín Juárez', 'player_name': 'Efraín Juárez', 'player_rating': '71', 'player_shortname': 'Juárez', 'player_position': 'RB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/83.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/167519.png?v=2'}
players['Mutch'] = {'player_url': 'https://www.futbin.com//18/player/18709/Jordon Mutch', 'player_name': 'Jordon Mutch', 'player_rating': '70', 'player_shortname': 'Mutch', 'player_position': 'CM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/14.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/186117.png?v=2'}
players['Parker'] = {'player_url': 'https://www.futbin.com//18/player/5180/Tim Parker', 'player_name': 'Tim Parker', 'player_rating': '70', 'player_shortname': 'Parker', 'player_position': 'CB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/226803.png?v=2'}
players['Felipe'] = {'player_url': 'https://www.futbin.com//18/player/18710/Felipe Martins', 'player_name': 'Felipe Martins', 'player_rating': '70', 'player_shortname': 'Felipe', 'player_position': 'CM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/54.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/207465.png?v=2'}
players['Techera'] = {'player_url': 'https://www.futbin.com//18/player/5285/Cristian Techera', 'player_name': 'Cristian Techera', 'player_rating': '69', 'player_shortname': 'Techera', 'player_position': 'RM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/60.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/228404.png?v=2'}
players['Maund'] = {'player_url': 'https://www.futbin.com//18/player/5824/Aaron Maund', 'player_name': 'Aaron Maund', 'player_rating': '69', 'player_shortname': 'Maund', 'player_position': 'CB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/207860.png?v=2'}
players['Marinovic'] = {'player_url': 'https://www.futbin.com//18/player/18161/Stefan Marinovic', 'player_name': 'Stefan Marinovic', 'player_rating': '68', 'player_shortname': 'Marinovic', 'player_position': 'GK', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/198.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/231056.png?v=2'}
players['Edgar'] = {'player_url': 'https://www.futbin.com//18/player/6202/David Edgar', 'player_name': 'David Edgar', 'player_rating': '68', 'player_shortname': 'Edgar', 'player_position': 'CB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/70.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/172207.png?v=2'}
players['Aja'] = {'player_url': 'https://www.futbin.com//18/player/18394/José Aja', 'player_name': 'José Aja', 'player_rating': '68', 'player_shortname': 'Aja', 'player_position': 'CB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/60.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/235280.png?v=2'}
players['Davies'] = {'player_url': 'https://www.futbin.com//18/player/18543/Alphonso Davies', 'player_name': 'Alphonso Davies', 'player_rating': '67', 'player_shortname': 'Davies', 'player_position': 'LM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/70.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/234396.png?v=2'}
players['Williams'] = {'player_url': 'https://www.futbin.com//18/player/7680/Sheanon Williams', 'player_name': 'Sheanon Williams', 'player_rating': '67', 'player_shortname': 'Williams', 'player_position': 'RB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/190585.png?v=2'}
players['Harvey'] = {'player_url': 'https://www.futbin.com//18/player/7581/Jordan Harvey', 'player_name': 'Jordan Harvey', 'player_rating': '67', 'player_shortname': 'Harvey', 'player_position': 'LB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/178379.png?v=2'}
players['Franklin'] = {'player_url': 'https://www.futbin.com//18/player/18395/Sean Franklin', 'player_name': 'Sean Franklin', 'player_rating': '67', 'player_shortname': 'Franklin', 'player_position': 'RB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/186645.png?v=2'}
players['Henry'] = {'player_url': 'https://www.futbin.com//18/player/18396/Doneil Henry', 'player_name': 'Doneil Henry', 'player_rating': '66', 'player_shortname': 'Henry', 'player_position': 'CB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/70.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/201208.png?v=2'}
players['Ibini'] = {'player_url': 'https://www.futbin.com//18/player/8275/Bernie Ibini', 'player_name': 'Bernie Ibini', 'player_rating': '66', 'player_shortname': 'Ibini', 'player_position': 'RM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/195.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/203092.png?v=2'}
players['de Jong'] = {'player_url': 'https://www.futbin.com//18/player/7954/Marcel de Jong', 'player_name': 'Marcel de Jong', 'player_rating': '66', 'player_shortname': 'de Jong', 'player_position': 'LB', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/70.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/164581.png?v=2'}
players['Mezquida'] = {'player_url': 'https://www.futbin.com//18/player/8267/Nicolás Mezquida', 'player_name': 'Nicolás Mezquida', 'player_rating': '66', 'player_shortname': 'Mezquida', 'player_position': 'CAM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/60.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/202866.png?v=2'}
players['Reyna'] = {'player_url': 'https://www.futbin.com//18/player/8529/Yordy Reyna', 'player_name': 'Yordy Reyna', 'player_rating': '66', 'player_shortname': 'Reyna', 'player_position': 'CF', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/59.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/214000.png?v=2'}
players['Hurtado'] = {'player_url': 'https://www.futbin.com//18/player/8507/Erik Hurtado', 'player_name': 'Erik Hurtado', 'player_rating': '66', 'player_shortname': 'Hurtado', 'player_position': 'ST', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/212750.png?v=2'}
players['Tchani'] = {'player_url': 'https://www.futbin.com//18/player/8175/Tony Tchani', 'player_name': 'Tony Tchani', 'player_rating': '66', 'player_shortname': 'Tchani', 'player_position': 'CM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/103.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/197717.png?v=2'}
players['Shea'] = {'player_url': 'https://www.futbin.com//18/player/8062/Brek Shea', 'player_name': 'Brek Shea', 'player_rating': '66', 'player_shortname': 'Shea', 'player_position': 'LM', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/186536.png?v=2'}
players['Rowe'] = {'player_url': 'https://www.futbin.com//18/player/16147/Brian Rowe', 'player_name': 'Brian Rowe', 'player_rating': '66', 'player_shortname': 'Rowe', 'player_position': 'GK', 'player_nation': 'https://cdn.futbin.com/content/fifa18/img/nation/95.png', 'player_photo': 'https://cdn.futbin.com/content/fifa18/img/players/210711.png?v=2'}
| null | null |
[
0,
1,
2
] |
2,504 |
a2d23c05e1ca04d25f5f5012881c4000e6316cb9
|
left_motor = 1563872856371375
right_motor = 7567382956378165
servo = 9275392915737265
def autonomous_setup():
print("Autonomous mode has started!")
Robot.run(autonomous_actions)
def autonomous_main():
pass
async def autonomous_actions():
print("Autonomous action sequence started")
await Actions.sleep(1.0)
print("1 second has passed in autonomous mode")
def teleop_setup():
print("Tele-operated mode has started!")
def move_arm();
Robot.get_value(left_motor, serv0, 1)
time.sleep(2)
Robot.get_value(left_motor, serv0, 0)
def teleop_main():
if gamepad.get_value("r_trigger") > 0.5:
while True:
# move forward
Robot.get_value(left_motor, "duty_cycle", -1.0)
Robot.get_value(right_motor, "duty_cycle", -1.0)
else if gamepad.get_value("l_trigger") > 0.5:
while True:
# move backward
Robot.get_value(left_motor, "duty_cycle", 1.0)
Robot.get_value(right_motor, "duty_cycle", 1.0)
else if 1.0 > gamepad.get_value("joystick_left_y") > 0.75
while True:
# turn right
Robot.get_value(left_motor, "duty_cycle", 1.0)
Robot.get_value(right_motor, "duty_cycle", -1.0)
time.sleep(1)
else if 1.0 > gamepad.get_value("joystick_right_y") > 0.75)
while True:
# turn left
Robot.get_value(left_motor, "duty_cycle", -1.0)
Robot.get_value(right_motor, "duty_cycle", 1.0)
time.sleep(1)
if gamepad.get_vlue("button_a") == 1:
move_arm()
| null | null | null | null |
[
0
] |
2,505 |
94334f91b1556c05dce0ed6f23c074bb8875f185
|
<mask token>
def end_num(s):
text = re.compile('.*[0-9]$')
if text.match(s):
return 'Yes!Number is present at the end of string'
else:
return 'No!Number is not present at the end of string'
<mask token>
|
<mask token>
def end_num(s):
text = re.compile('.*[0-9]$')
if text.match(s):
return 'Yes!Number is present at the end of string'
else:
return 'No!Number is not present at the end of string'
print(end_num(s))
|
<mask token>
s = input('enter the string:')
def end_num(s):
text = re.compile('.*[0-9]$')
if text.match(s):
return 'Yes!Number is present at the end of string'
else:
return 'No!Number is not present at the end of string'
print(end_num(s))
|
import re
s = input('enter the string:')
def end_num(s):
text = re.compile('.*[0-9]$')
if text.match(s):
return 'Yes!Number is present at the end of string'
else:
return 'No!Number is not present at the end of string'
print(end_num(s))
|
import re
s=input('enter the string:')
def end_num(s):
text = re.compile(r".*[0-9]$")
if text.match(s):
return 'Yes!Number is present at the end of string'
else:
return 'No!Number is not present at the end of string'
print(end_num(s))
|
[
1,
2,
3,
4,
5
] |
2,506 |
9696e5799d46adb5b92c0900e2064b927addfd93
|
<mask token>
|
<mask token>
for sequence_file in sequences:
f_in = open(current_dir + '/sample_genomes/' + sequence_file, 'r')
f_out.write(f_in.read())
f_in.close()
data = []
fa_file = current_dir + '/sample_genomes/' + sequence_file
seqs = SeqIO.parse(fa_file, 'fasta')
for record in seqs:
data.append(record.seq.upper())
seq = data[0]
temp_fos = []
temp_glcm = []
temp_lbp = []
temp_mlbp = []
for mapping_type in range(mapping_function_size):
skewness, my_kurtosis, energy, entropy = get_features(seq, mapping_type
)
temp_fos.append([skewness, my_kurtosis, energy, entropy])
entropy, contrast, energy, correlation, homogeneity = (
get_features_glcm(seq, mapping_type))
temp_glcm.append([entropy, contrast, energy, correlation, homogeneity])
hist_lbp = get_features_lbp(seq, mapping_type)
temp_lbp.append(hist_lbp)
hist_mlbp = get_features_mlbp(seq, mapping_type)
temp_mlbp.append(hist_mlbp)
data_features_fos.append(temp_fos)
data_features_glcm.append(temp_glcm)
data_features_lbp.append(temp_lbp)
data_features_mlbp.append(temp_mlbp)
f_out.close()
<mask token>
for mapping_type in range(mapping_function_size):
DIST_fos = np.zeros((data_features_fos.shape[0], data_features_fos.
shape[0]))
for i in range(data_features_fos.shape[0]):
row = np.zeros(data_features_fos.shape[0])
for j in range(i, data_features_fos.shape[0]):
dist = np.sqrt(np.sum((data_features_fos[i][mapping_type] -
data_features_fos[j][mapping_type]) ** 2))
row[j] = dist
DIST_fos[i] = row
DIST_fos = DIST_fos + DIST_fos.T - np.diag(np.diag(DIST_fos))
DIST_fos = (DIST_fos - np.min(DIST_fos)) / (np.max(DIST_fos) - np.min(
DIST_fos))
full_distances_fos.append(DIST_fos[0, 1:DIST_fos.shape[0]])
<mask token>
print('full_distances_fos', full_distances_fos.shape)
<mask token>
for mapping_type in range(mapping_function_size):
DIST_glcm = np.zeros((data_features_glcm.shape[0], data_features_glcm.
shape[0]))
for i in range(data_features_glcm.shape[0]):
row = np.zeros(data_features_glcm.shape[0])
for j in range(i, data_features_glcm.shape[0]):
dist = np.sqrt(np.sum((data_features_glcm[i][mapping_type] -
data_features_glcm[j][mapping_type]) ** 2))
row[j] = dist
DIST_glcm[i] = row
DIST_glcm = DIST_glcm + DIST_glcm.T - np.diag(np.diag(DIST_glcm))
DIST_glcm = (DIST_glcm - np.min(DIST_glcm)) / (np.max(DIST_glcm) - np.
min(DIST_glcm))
full_distances_glcm.append(DIST_glcm[0, 1:DIST_glcm.shape[0]])
<mask token>
print('full_distances_glcm', full_distances_glcm.shape)
<mask token>
for mapping_type in range(mapping_function_size):
DIST_lbp = np.zeros((data_features_lbp.shape[0], data_features_lbp.
shape[0]))
for i in range(data_features_lbp.shape[0]):
row = np.zeros(data_features_lbp.shape[0])
for j in range(i, data_features_lbp.shape[0]):
dist = np.sqrt(np.sum((data_features_lbp[i][mapping_type] -
data_features_lbp[j][mapping_type]) ** 2))
row[j] = dist
DIST_lbp[i] = row
DIST_lbp = DIST_lbp + DIST_lbp.T - np.diag(np.diag(DIST_lbp))
DIST_lbp = (DIST_lbp - np.min(DIST_lbp)) / (np.max(DIST_lbp) - np.min(
DIST_lbp))
full_distances_lbp.append(DIST_lbp[0, 1:DIST_lbp.shape[0]])
<mask token>
print('full_distances_lbp', full_distances_lbp.shape)
<mask token>
for mapping_type in range(mapping_function_size):
DIST_mlbp = np.zeros((data_features_mlbp.shape[0], data_features_mlbp.
shape[0]))
for i in range(data_features_mlbp.shape[0]):
row = np.zeros(data_features_mlbp.shape[0])
for j in range(i, data_features_mlbp.shape[0]):
dist = np.sqrt(np.sum((data_features_mlbp[i][mapping_type] -
data_features_mlbp[j][mapping_type]) ** 2))
row[j] = dist
DIST_mlbp[i] = row
DIST_mlbp = DIST_mlbp + DIST_mlbp.T - np.diag(np.diag(DIST_mlbp))
DIST_mlbp = (DIST_mlbp - np.min(DIST_mlbp)) / (np.max(DIST_mlbp) - np.
min(DIST_mlbp))
full_distances_mlbp.append(DIST_mlbp[0, 1:DIST_mlbp.shape[0]])
<mask token>
print('full_distances_mlbp', full_distances_mlbp.shape)
<mask token>
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_fos.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_glcm.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_lbp.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_mlbp.png', dpi=200, bbox_inches='tight')
<mask token>
for mapping_type in range(mapping_function_size):
error_fos.append(np.sum((full_distances_fos[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_glcm.append(np.sum((full_distances_glcm[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_lbp.append(np.sum((full_distances_lbp[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_mlbp.append(np.sum((full_distances_mlbp[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
data_csv.append(error_fos)
data_csv.append(error_glcm)
data_csv.append(error_lbp)
data_csv.append(error_mlbp)
<mask token>
print(df)
df.to_csv(results_file + '.csv', index=True)
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_0map.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_1map.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_2map.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_3map.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_4map.png', dpi=200, bbox_inches='tight')
plt.clf()
<mask token>
axs[0, 0].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_5map.png', dpi=200, bbox_inches='tight')
|
<mask token>
current_dir = os.path.dirname(os.path.abspath(__file__))
sequences = ['J01859.fna', 'NR_037066.fna', 'NR_040849.fna',
'NR_117152.fna', 'NR_132306.fna', 'NR_134817.fna', 'NR_134818.fna',
'NR_136784.fna', 'NR_148244.fna', 'NR_148787.fna', 'NR_152063.fna',
'KP317497.fna', 'NR_156072.fna']
names = ['Escherichia coli', 'T.Thermophilus', 'B.Wakoensis',
'T.Filiformis', 'T.Tengchongensis', 'S.Cameli', 'S.Tangierensis',
'T.amyloliquefaciens', 'B.Xiamenensis', 'B.Australimaris',
'S.Halotolerans', 'B.Maritimus', 'S.Himalayensis']
csv_mega = current_dir + '/sample_genomes/seqs_db1_distances.csv'
seq_file_full = current_dir + '/sample_genomes/seqs_db1.fasta'
results_file = current_dir + '/results/compare_features/db1'
sequences = ['L00016.fna', 'M22650.fna', 'M22651.fna', 'M22653.fna',
'M22654.fna', 'M22655.fna', 'M22656.fna', 'M22657.fna', 'V00658.fna',
'V00659.fna', 'V00672.fna', 'V00675.fna']
names = ['Human', 'Macaca mulatta', 'Macaca fuscata', 'Macaca fascicularis',
'Macaca sylvanus', 'Saimiri sciureus', 'Tarsius syrichta',
'Lemur catta', 'Gorilla', 'Hylobates', 'Chimpanzee', 'Sumatran Orangutan']
csv_mega = current_dir + '/sample_genomes/seqs_db2_distances.csv'
seq_file_full = current_dir + '/sample_genomes/seqs_db2.fasta'
results_file = current_dir + '/results/compare_features/db2'
sequences = ['V00662.fna', 'D38116.fna', 'D38113.fna', 'D38114.fna',
'D38115.fna', 'X99256.fna', 'Y18001.fna', 'X79547.fna', 'Y07726.fna',
'X63726.fna', 'X72004.fna', 'U20753.fna', 'X61145.fna', 'X72204.fna',
'V00654.fna', 'X14848.fna', 'V00711.fna', 'X83427.fna']
names = ['Human', 'Pygmy chimpanzee', 'Common chimpanzee', 'Gorilla',
'Orangutan', 'Gibbon', 'Baboon', 'Horse', 'White rhinoceros',
'Harbor seal', 'Gray seal', 'Cat', 'Fin whale', 'Blue whale', 'Cow',
'Rat', 'Mouse', 'Platypus']
csv_mega = current_dir + '/sample_genomes/seqs_db3_distances.csv'
seq_file_full = current_dir + '/sample_genomes/seqs_db3.fasta'
results_file = current_dir + '/results/compare_features/db3'
data_features_fos = []
data_features_glcm = []
data_features_lbp = []
data_features_mlbp = []
mapping_function_size = 6
f_out = open(seq_file_full, 'w')
for sequence_file in sequences:
f_in = open(current_dir + '/sample_genomes/' + sequence_file, 'r')
f_out.write(f_in.read())
f_in.close()
data = []
fa_file = current_dir + '/sample_genomes/' + sequence_file
seqs = SeqIO.parse(fa_file, 'fasta')
for record in seqs:
data.append(record.seq.upper())
seq = data[0]
temp_fos = []
temp_glcm = []
temp_lbp = []
temp_mlbp = []
for mapping_type in range(mapping_function_size):
skewness, my_kurtosis, energy, entropy = get_features(seq, mapping_type
)
temp_fos.append([skewness, my_kurtosis, energy, entropy])
entropy, contrast, energy, correlation, homogeneity = (
get_features_glcm(seq, mapping_type))
temp_glcm.append([entropy, contrast, energy, correlation, homogeneity])
hist_lbp = get_features_lbp(seq, mapping_type)
temp_lbp.append(hist_lbp)
hist_mlbp = get_features_mlbp(seq, mapping_type)
temp_mlbp.append(hist_mlbp)
data_features_fos.append(temp_fos)
data_features_glcm.append(temp_glcm)
data_features_lbp.append(temp_lbp)
data_features_mlbp.append(temp_mlbp)
f_out.close()
data_features_fos = np.array(data_features_fos)
data_features_glcm = np.array(data_features_glcm)
data_features_lbp = np.array(data_features_lbp)
data_features_mlbp = np.array(data_features_mlbp)
full_distances_fos = []
for mapping_type in range(mapping_function_size):
DIST_fos = np.zeros((data_features_fos.shape[0], data_features_fos.
shape[0]))
for i in range(data_features_fos.shape[0]):
row = np.zeros(data_features_fos.shape[0])
for j in range(i, data_features_fos.shape[0]):
dist = np.sqrt(np.sum((data_features_fos[i][mapping_type] -
data_features_fos[j][mapping_type]) ** 2))
row[j] = dist
DIST_fos[i] = row
DIST_fos = DIST_fos + DIST_fos.T - np.diag(np.diag(DIST_fos))
DIST_fos = (DIST_fos - np.min(DIST_fos)) / (np.max(DIST_fos) - np.min(
DIST_fos))
full_distances_fos.append(DIST_fos[0, 1:DIST_fos.shape[0]])
full_distances_fos = np.array(full_distances_fos)
print('full_distances_fos', full_distances_fos.shape)
full_distances_glcm = []
for mapping_type in range(mapping_function_size):
DIST_glcm = np.zeros((data_features_glcm.shape[0], data_features_glcm.
shape[0]))
for i in range(data_features_glcm.shape[0]):
row = np.zeros(data_features_glcm.shape[0])
for j in range(i, data_features_glcm.shape[0]):
dist = np.sqrt(np.sum((data_features_glcm[i][mapping_type] -
data_features_glcm[j][mapping_type]) ** 2))
row[j] = dist
DIST_glcm[i] = row
DIST_glcm = DIST_glcm + DIST_glcm.T - np.diag(np.diag(DIST_glcm))
DIST_glcm = (DIST_glcm - np.min(DIST_glcm)) / (np.max(DIST_glcm) - np.
min(DIST_glcm))
full_distances_glcm.append(DIST_glcm[0, 1:DIST_glcm.shape[0]])
full_distances_glcm = np.array(full_distances_glcm)
print('full_distances_glcm', full_distances_glcm.shape)
full_distances_lbp = []
for mapping_type in range(mapping_function_size):
DIST_lbp = np.zeros((data_features_lbp.shape[0], data_features_lbp.
shape[0]))
for i in range(data_features_lbp.shape[0]):
row = np.zeros(data_features_lbp.shape[0])
for j in range(i, data_features_lbp.shape[0]):
dist = np.sqrt(np.sum((data_features_lbp[i][mapping_type] -
data_features_lbp[j][mapping_type]) ** 2))
row[j] = dist
DIST_lbp[i] = row
DIST_lbp = DIST_lbp + DIST_lbp.T - np.diag(np.diag(DIST_lbp))
DIST_lbp = (DIST_lbp - np.min(DIST_lbp)) / (np.max(DIST_lbp) - np.min(
DIST_lbp))
full_distances_lbp.append(DIST_lbp[0, 1:DIST_lbp.shape[0]])
full_distances_lbp = np.array(full_distances_lbp)
print('full_distances_lbp', full_distances_lbp.shape)
full_distances_mlbp = []
for mapping_type in range(mapping_function_size):
DIST_mlbp = np.zeros((data_features_mlbp.shape[0], data_features_mlbp.
shape[0]))
for i in range(data_features_mlbp.shape[0]):
row = np.zeros(data_features_mlbp.shape[0])
for j in range(i, data_features_mlbp.shape[0]):
dist = np.sqrt(np.sum((data_features_mlbp[i][mapping_type] -
data_features_mlbp[j][mapping_type]) ** 2))
row[j] = dist
DIST_mlbp[i] = row
DIST_mlbp = DIST_mlbp + DIST_mlbp.T - np.diag(np.diag(DIST_mlbp))
DIST_mlbp = (DIST_mlbp - np.min(DIST_mlbp)) / (np.max(DIST_mlbp) - np.
min(DIST_mlbp))
full_distances_mlbp.append(DIST_mlbp[0, 1:DIST_mlbp.shape[0]])
full_distances_mlbp = np.array(full_distances_mlbp)
print('full_distances_mlbp', full_distances_mlbp.shape)
mega_dist_csv = pd.read_csv(csv_mega)
mega_dist_csv = mega_dist_csv.set_index(mega_dist_csv.columns[0])
DIST_mega = mega_dist_csv.values
DIST_mega[np.isnan(DIST_mega)] = 0
DIST_mega = DIST_mega + DIST_mega.T
distances_mega = DIST_mega[0, 1:DIST_mega.shape[0]]
distances_mega = (distances_mega - np.min(distances_mega)) / (np.max(
distances_mega) - np.min(distances_mega))
names_temp = np.array(sequences)
names_temp = names_temp[1:names_temp.shape[0]]
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_fos.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_glcm.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_lbp.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_mlbp.png', dpi=200, bbox_inches='tight')
data_csv = []
error_fos = []
error_glcm = []
error_lbp = []
error_mlbp = []
for mapping_type in range(mapping_function_size):
error_fos.append(np.sum((full_distances_fos[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_glcm.append(np.sum((full_distances_glcm[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_lbp.append(np.sum((full_distances_lbp[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_mlbp.append(np.sum((full_distances_mlbp[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
data_csv.append(error_fos)
data_csv.append(error_glcm)
data_csv.append(error_lbp)
data_csv.append(error_mlbp)
data_csv = np.array(data_csv)
df = pd.DataFrame(data=data_csv.T, index=['map0', 'map1', 'map2', 'map3',
'map4', 'map5'], columns=['FOS', 'GLCM', 'LBP', 'MLBP'])
print(df)
df.to_csv(results_file + '.csv', index=True)
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_0map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_1map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_2map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_3map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_4map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_5map.png', dpi=200, bbox_inches='tight')
|
from sklearn.model_selection import KFold
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from matplotlib import pyplot
import math
import os
import sys
import cv2
import numpy as np
import math
from scipy.stats import kurtosis, skew
from Bio import SeqIO
import pandas as pd
import seaborn as sns
from descriptor import get_features
from descriptor import get_features_glcm
from descriptor import get_features_lbp
from descriptor import get_features_mlbp
from ete3 import PhyloTree, TreeStyle
from ete3 import Tree
from skbio import DistanceMatrix
from skbio.tree import nj
current_dir = os.path.dirname(os.path.abspath(__file__))
sequences = ['J01859.fna', 'NR_037066.fna', 'NR_040849.fna',
'NR_117152.fna', 'NR_132306.fna', 'NR_134817.fna', 'NR_134818.fna',
'NR_136784.fna', 'NR_148244.fna', 'NR_148787.fna', 'NR_152063.fna',
'KP317497.fna', 'NR_156072.fna']
names = ['Escherichia coli', 'T.Thermophilus', 'B.Wakoensis',
'T.Filiformis', 'T.Tengchongensis', 'S.Cameli', 'S.Tangierensis',
'T.amyloliquefaciens', 'B.Xiamenensis', 'B.Australimaris',
'S.Halotolerans', 'B.Maritimus', 'S.Himalayensis']
csv_mega = current_dir + '/sample_genomes/seqs_db1_distances.csv'
seq_file_full = current_dir + '/sample_genomes/seqs_db1.fasta'
results_file = current_dir + '/results/compare_features/db1'
sequences = ['L00016.fna', 'M22650.fna', 'M22651.fna', 'M22653.fna',
'M22654.fna', 'M22655.fna', 'M22656.fna', 'M22657.fna', 'V00658.fna',
'V00659.fna', 'V00672.fna', 'V00675.fna']
names = ['Human', 'Macaca mulatta', 'Macaca fuscata', 'Macaca fascicularis',
'Macaca sylvanus', 'Saimiri sciureus', 'Tarsius syrichta',
'Lemur catta', 'Gorilla', 'Hylobates', 'Chimpanzee', 'Sumatran Orangutan']
csv_mega = current_dir + '/sample_genomes/seqs_db2_distances.csv'
seq_file_full = current_dir + '/sample_genomes/seqs_db2.fasta'
results_file = current_dir + '/results/compare_features/db2'
sequences = ['V00662.fna', 'D38116.fna', 'D38113.fna', 'D38114.fna',
'D38115.fna', 'X99256.fna', 'Y18001.fna', 'X79547.fna', 'Y07726.fna',
'X63726.fna', 'X72004.fna', 'U20753.fna', 'X61145.fna', 'X72204.fna',
'V00654.fna', 'X14848.fna', 'V00711.fna', 'X83427.fna']
names = ['Human', 'Pygmy chimpanzee', 'Common chimpanzee', 'Gorilla',
'Orangutan', 'Gibbon', 'Baboon', 'Horse', 'White rhinoceros',
'Harbor seal', 'Gray seal', 'Cat', 'Fin whale', 'Blue whale', 'Cow',
'Rat', 'Mouse', 'Platypus']
csv_mega = current_dir + '/sample_genomes/seqs_db3_distances.csv'
seq_file_full = current_dir + '/sample_genomes/seqs_db3.fasta'
results_file = current_dir + '/results/compare_features/db3'
data_features_fos = []
data_features_glcm = []
data_features_lbp = []
data_features_mlbp = []
mapping_function_size = 6
f_out = open(seq_file_full, 'w')
for sequence_file in sequences:
f_in = open(current_dir + '/sample_genomes/' + sequence_file, 'r')
f_out.write(f_in.read())
f_in.close()
data = []
fa_file = current_dir + '/sample_genomes/' + sequence_file
seqs = SeqIO.parse(fa_file, 'fasta')
for record in seqs:
data.append(record.seq.upper())
seq = data[0]
temp_fos = []
temp_glcm = []
temp_lbp = []
temp_mlbp = []
for mapping_type in range(mapping_function_size):
skewness, my_kurtosis, energy, entropy = get_features(seq, mapping_type
)
temp_fos.append([skewness, my_kurtosis, energy, entropy])
entropy, contrast, energy, correlation, homogeneity = (
get_features_glcm(seq, mapping_type))
temp_glcm.append([entropy, contrast, energy, correlation, homogeneity])
hist_lbp = get_features_lbp(seq, mapping_type)
temp_lbp.append(hist_lbp)
hist_mlbp = get_features_mlbp(seq, mapping_type)
temp_mlbp.append(hist_mlbp)
data_features_fos.append(temp_fos)
data_features_glcm.append(temp_glcm)
data_features_lbp.append(temp_lbp)
data_features_mlbp.append(temp_mlbp)
f_out.close()
data_features_fos = np.array(data_features_fos)
data_features_glcm = np.array(data_features_glcm)
data_features_lbp = np.array(data_features_lbp)
data_features_mlbp = np.array(data_features_mlbp)
full_distances_fos = []
for mapping_type in range(mapping_function_size):
DIST_fos = np.zeros((data_features_fos.shape[0], data_features_fos.
shape[0]))
for i in range(data_features_fos.shape[0]):
row = np.zeros(data_features_fos.shape[0])
for j in range(i, data_features_fos.shape[0]):
dist = np.sqrt(np.sum((data_features_fos[i][mapping_type] -
data_features_fos[j][mapping_type]) ** 2))
row[j] = dist
DIST_fos[i] = row
DIST_fos = DIST_fos + DIST_fos.T - np.diag(np.diag(DIST_fos))
DIST_fos = (DIST_fos - np.min(DIST_fos)) / (np.max(DIST_fos) - np.min(
DIST_fos))
full_distances_fos.append(DIST_fos[0, 1:DIST_fos.shape[0]])
full_distances_fos = np.array(full_distances_fos)
print('full_distances_fos', full_distances_fos.shape)
full_distances_glcm = []
for mapping_type in range(mapping_function_size):
DIST_glcm = np.zeros((data_features_glcm.shape[0], data_features_glcm.
shape[0]))
for i in range(data_features_glcm.shape[0]):
row = np.zeros(data_features_glcm.shape[0])
for j in range(i, data_features_glcm.shape[0]):
dist = np.sqrt(np.sum((data_features_glcm[i][mapping_type] -
data_features_glcm[j][mapping_type]) ** 2))
row[j] = dist
DIST_glcm[i] = row
DIST_glcm = DIST_glcm + DIST_glcm.T - np.diag(np.diag(DIST_glcm))
DIST_glcm = (DIST_glcm - np.min(DIST_glcm)) / (np.max(DIST_glcm) - np.
min(DIST_glcm))
full_distances_glcm.append(DIST_glcm[0, 1:DIST_glcm.shape[0]])
full_distances_glcm = np.array(full_distances_glcm)
print('full_distances_glcm', full_distances_glcm.shape)
full_distances_lbp = []
for mapping_type in range(mapping_function_size):
DIST_lbp = np.zeros((data_features_lbp.shape[0], data_features_lbp.
shape[0]))
for i in range(data_features_lbp.shape[0]):
row = np.zeros(data_features_lbp.shape[0])
for j in range(i, data_features_lbp.shape[0]):
dist = np.sqrt(np.sum((data_features_lbp[i][mapping_type] -
data_features_lbp[j][mapping_type]) ** 2))
row[j] = dist
DIST_lbp[i] = row
DIST_lbp = DIST_lbp + DIST_lbp.T - np.diag(np.diag(DIST_lbp))
DIST_lbp = (DIST_lbp - np.min(DIST_lbp)) / (np.max(DIST_lbp) - np.min(
DIST_lbp))
full_distances_lbp.append(DIST_lbp[0, 1:DIST_lbp.shape[0]])
full_distances_lbp = np.array(full_distances_lbp)
print('full_distances_lbp', full_distances_lbp.shape)
full_distances_mlbp = []
for mapping_type in range(mapping_function_size):
DIST_mlbp = np.zeros((data_features_mlbp.shape[0], data_features_mlbp.
shape[0]))
for i in range(data_features_mlbp.shape[0]):
row = np.zeros(data_features_mlbp.shape[0])
for j in range(i, data_features_mlbp.shape[0]):
dist = np.sqrt(np.sum((data_features_mlbp[i][mapping_type] -
data_features_mlbp[j][mapping_type]) ** 2))
row[j] = dist
DIST_mlbp[i] = row
DIST_mlbp = DIST_mlbp + DIST_mlbp.T - np.diag(np.diag(DIST_mlbp))
DIST_mlbp = (DIST_mlbp - np.min(DIST_mlbp)) / (np.max(DIST_mlbp) - np.
min(DIST_mlbp))
full_distances_mlbp.append(DIST_mlbp[0, 1:DIST_mlbp.shape[0]])
full_distances_mlbp = np.array(full_distances_mlbp)
print('full_distances_mlbp', full_distances_mlbp.shape)
mega_dist_csv = pd.read_csv(csv_mega)
mega_dist_csv = mega_dist_csv.set_index(mega_dist_csv.columns[0])
DIST_mega = mega_dist_csv.values
DIST_mega[np.isnan(DIST_mega)] = 0
DIST_mega = DIST_mega + DIST_mega.T
distances_mega = DIST_mega[0, 1:DIST_mega.shape[0]]
distances_mega = (distances_mega - np.min(distances_mega)) / (np.max(
distances_mega) - np.min(distances_mega))
names_temp = np.array(sequences)
names_temp = names_temp[1:names_temp.shape[0]]
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_fos.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_glcm.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_lbp.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(3, 2)
axs[0, 0].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
axs[2, 0].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[2, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 0].legend(loc='upper right', fontsize=6)
axs[2, 1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[2, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_mlbp.png', dpi=200, bbox_inches='tight')
data_csv = []
error_fos = []
error_glcm = []
error_lbp = []
error_mlbp = []
for mapping_type in range(mapping_function_size):
error_fos.append(np.sum((full_distances_fos[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_glcm.append(np.sum((full_distances_glcm[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_lbp.append(np.sum((full_distances_lbp[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
error_mlbp.append(np.sum((full_distances_mlbp[mapping_type] -
distances_mega) ** 2) / distances_mega.shape[0])
data_csv.append(error_fos)
data_csv.append(error_glcm)
data_csv.append(error_lbp)
data_csv.append(error_mlbp)
data_csv = np.array(data_csv)
df = pd.DataFrame(data=data_csv.T, index=['map0', 'map1', 'map2', 'map3',
'map4', 'map5'], columns=['FOS', 'GLCM', 'LBP', 'MLBP'])
print(df)
df.to_csv(results_file + '.csv', index=True)
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_0map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_1map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_2map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_3map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_4map.png', dpi=200, bbox_inches='tight')
plt.clf()
fig, axs = plt.subplots(2, 2)
axs[0, 0].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[0, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 0].legend(loc='upper right', fontsize=6)
axs[0, 1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[0, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0, 1].legend(loc='upper right', fontsize=6)
axs[1, 0].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[1, 0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 0].legend(loc='upper right', fontsize=6)
axs[1, 1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[1, 1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1, 1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light',
fontsize=6)
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6)
plt.savefig(results_file + '_5map.png', dpi=200, bbox_inches='tight')
|
# este script comprar diferente metodos de base2number
from sklearn.model_selection import KFold
from sklearn.model_selection import train_test_split
#from matplotlib import pyplot as plt
#from matplotlib import cm
import matplotlib.pyplot as plt
from matplotlib import pyplot
import math
import os
import sys
import cv2
import numpy as np
import math
from scipy.stats import kurtosis, skew
from Bio import SeqIO
import pandas as pd
import seaborn as sns
from descriptor import get_features
from descriptor import get_features_glcm
from descriptor import get_features_lbp
from descriptor import get_features_mlbp
from ete3 import PhyloTree, TreeStyle
from ete3 import Tree
from skbio import DistanceMatrix
from skbio.tree import nj
current_dir = os.path.dirname(os.path.abspath(__file__))
###################################################################################################################################
###################################################################################################################################
sequences = [ 'J01859.fna', 'NR_037066.fna', 'NR_040849.fna', 'NR_117152.fna', 'NR_132306.fna',
'NR_134817.fna', 'NR_134818.fna', 'NR_136784.fna', 'NR_148244.fna', 'NR_148787.fna',
'NR_152063.fna', 'KP317497.fna', 'NR_156072.fna' ]
names = [ 'Escherichia coli', 'T.Thermophilus', 'B.Wakoensis', 'T.Filiformis', 'T.Tengchongensis',
'S.Cameli', 'S.Tangierensis', 'T.amyloliquefaciens', 'B.Xiamenensis', 'B.Australimaris',
'S.Halotolerans', 'B.Maritimus', 'S.Himalayensis']
csv_mega = current_dir + "/sample_genomes/seqs_db1_distances.csv"
seq_file_full = current_dir + "/sample_genomes/seqs_db1.fasta"
results_file = current_dir + "/results/compare_features/db1"
###################################################################################################################################
###################################################################################################################################
sequences = [ 'L00016.fna', 'M22650.fna', 'M22651.fna', 'M22653.fna', 'M22654.fna',
'M22655.fna', 'M22656.fna', 'M22657.fna', 'V00658.fna', 'V00659.fna',
'V00672.fna', 'V00675.fna']
names = [ 'Human', 'Macaca mulatta', 'Macaca fuscata', 'Macaca fascicularis', 'Macaca sylvanus',
'Saimiri sciureus', 'Tarsius syrichta', 'Lemur catta', 'Gorilla', 'Hylobates',
'Chimpanzee', 'Sumatran Orangutan']
csv_mega = current_dir + "/sample_genomes/seqs_db2_distances.csv"
seq_file_full = current_dir + "/sample_genomes/seqs_db2.fasta"
results_file = current_dir + "/results/compare_features/db2"
###################################################################################################################################
###################################################################################################################################
sequences = [ 'V00662.fna', 'D38116.fna', 'D38113.fna', 'D38114.fna', 'D38115.fna',
'X99256.fna', 'Y18001.fna', 'X79547.fna', 'Y07726.fna', 'X63726.fna',
'X72004.fna', 'U20753.fna', 'X61145.fna', 'X72204.fna', 'V00654.fna',
'X14848.fna', 'V00711.fna', 'X83427.fna']
names = [ 'Human', 'Pygmy chimpanzee', 'Common chimpanzee', 'Gorilla', 'Orangutan',
'Gibbon', 'Baboon', 'Horse', 'White rhinoceros', 'Harbor seal',
'Gray seal', 'Cat', 'Fin whale', 'Blue whale', 'Cow',
'Rat', 'Mouse', 'Platypus']
csv_mega = current_dir + "/sample_genomes/seqs_db3_distances.csv"
seq_file_full = current_dir + "/sample_genomes/seqs_db3.fasta"
results_file = current_dir + "/results/compare_features/db3"
###################################################################################################################################
###################################################################################################################################
data_features_fos = []
data_features_glcm = []
data_features_lbp = []
data_features_mlbp = []
mapping_function_size = 6 # trere is 6 types of mapping functions
f_out = open(seq_file_full, "w")
for sequence_file in sequences:
f_in = open(current_dir + "/sample_genomes/" + sequence_file, "r")
f_out.write(f_in.read())
f_in.close()
data = []
fa_file = current_dir + "/sample_genomes/" + sequence_file
seqs = SeqIO.parse(fa_file, "fasta")
for record in seqs:
data.append(record.seq.upper())
seq = data[0]
temp_fos = []
temp_glcm = []
temp_lbp = []
temp_mlbp = []
# here we evaluate each mapping funciton
for mapping_type in range(mapping_function_size):
skewness, my_kurtosis, energy, entropy = get_features(seq, mapping_type)
temp_fos.append( [skewness, my_kurtosis, energy, entropy] )
#rint("fos mapping=",mapping_type, [skewness, my_kurtosis, energy, entropy])
entropy, contrast, energy, correlation, homogeneity = get_features_glcm(seq, mapping_type)
temp_glcm.append( [entropy, contrast, energy, correlation, homogeneity] )
#print("glcm mapping=",mapping_type, [entropy, contrast, energy, correlation, homogeneity])
hist_lbp = get_features_lbp(seq, mapping_type)
temp_lbp.append( hist_lbp )
#print("lbp mapping=",mapping_type, hist_lbp)
hist_mlbp = get_features_mlbp(seq, mapping_type)
temp_mlbp.append( hist_mlbp )
#print("mlbp mapping=",mapping_type, hist_mlbp)
data_features_fos.append(temp_fos)
data_features_glcm.append(temp_glcm)
data_features_lbp.append(temp_lbp)
data_features_mlbp.append(temp_mlbp)
f_out.close()
data_features_fos = np.array(data_features_fos)
data_features_glcm = np.array(data_features_glcm)
data_features_lbp = np.array(data_features_lbp)
data_features_mlbp = np.array(data_features_mlbp)
###################################################################################################################3
# procesamos las distancias con FOS
###################################################################################################################
full_distances_fos = []
for mapping_type in range(mapping_function_size):
DIST_fos = np.zeros((data_features_fos.shape[0], data_features_fos.shape[0]))
for i in range(data_features_fos.shape[0]):
row = np.zeros(data_features_fos.shape[0])
for j in range(i, data_features_fos.shape[0]):
dist = np.sqrt(np.sum((data_features_fos[i][mapping_type] - data_features_fos[j][mapping_type])**2))
row[j] = dist
DIST_fos[i] = row
DIST_fos = DIST_fos + DIST_fos.T - np.diag(np.diag(DIST_fos))
DIST_fos = (DIST_fos - np.min(DIST_fos)) / (np.max(DIST_fos) - np.min(DIST_fos))
full_distances_fos.append( DIST_fos[0,1:DIST_fos.shape[0]] )
full_distances_fos = np.array(full_distances_fos)
print("full_distances_fos", full_distances_fos.shape)
###################################################################################################################3
# procesamos las distancias con GLCM
###################################################################################################################
full_distances_glcm = []
for mapping_type in range(mapping_function_size):
DIST_glcm = np.zeros((data_features_glcm.shape[0], data_features_glcm.shape[0]))
for i in range(data_features_glcm.shape[0]):
row = np.zeros(data_features_glcm.shape[0])
for j in range(i, data_features_glcm.shape[0]):
dist = np.sqrt(np.sum((data_features_glcm[i][mapping_type] - data_features_glcm[j][mapping_type])**2))
row[j] = dist
DIST_glcm[i] = row
DIST_glcm = DIST_glcm + DIST_glcm.T - np.diag(np.diag(DIST_glcm))
DIST_glcm = (DIST_glcm - np.min(DIST_glcm)) / (np.max(DIST_glcm) - np.min(DIST_glcm))
full_distances_glcm.append( DIST_glcm[0,1:DIST_glcm.shape[0]] )
full_distances_glcm = np.array(full_distances_glcm)
print("full_distances_glcm", full_distances_glcm.shape)
###################################################################################################################3
# procesamos las distancias con LBP
###################################################################################################################
full_distances_lbp = []
for mapping_type in range(mapping_function_size):
DIST_lbp = np.zeros((data_features_lbp.shape[0], data_features_lbp.shape[0]))
for i in range(data_features_lbp.shape[0]):
row = np.zeros(data_features_lbp.shape[0])
for j in range(i, data_features_lbp.shape[0]):
dist = np.sqrt(np.sum((data_features_lbp[i][mapping_type] - data_features_lbp[j][mapping_type])**2))
row[j] = dist
DIST_lbp[i] = row
DIST_lbp = DIST_lbp + DIST_lbp.T - np.diag(np.diag(DIST_lbp))
DIST_lbp = (DIST_lbp - np.min(DIST_lbp)) / (np.max(DIST_lbp) - np.min(DIST_lbp))
full_distances_lbp.append( DIST_lbp[0,1:DIST_lbp.shape[0]] )
full_distances_lbp = np.array(full_distances_lbp)
print("full_distances_lbp", full_distances_lbp.shape)
###################################################################################################################3
# procesamos las distancias con MLBP
###################################################################################################################
full_distances_mlbp = []
for mapping_type in range(mapping_function_size):
DIST_mlbp = np.zeros((data_features_mlbp.shape[0], data_features_mlbp.shape[0]))
for i in range(data_features_mlbp.shape[0]):
row = np.zeros(data_features_mlbp.shape[0])
for j in range(i, data_features_mlbp.shape[0]):
dist = np.sqrt(np.sum((data_features_mlbp[i][mapping_type] - data_features_mlbp[j][mapping_type])**2))
row[j] = dist
DIST_mlbp[i] = row
DIST_mlbp = DIST_mlbp + DIST_mlbp.T - np.diag(np.diag(DIST_mlbp))
DIST_mlbp = (DIST_mlbp - np.min(DIST_mlbp)) / (np.max(DIST_mlbp) - np.min(DIST_mlbp))
full_distances_mlbp.append( DIST_mlbp[0,1:DIST_mlbp.shape[0]] )
full_distances_mlbp = np.array(full_distances_mlbp)
print("full_distances_mlbp", full_distances_mlbp.shape)
###################################################################################################################
### distances from mega ###########################################################
###################################################################################################################
mega_dist_csv = pd.read_csv(csv_mega)
mega_dist_csv = mega_dist_csv.set_index(mega_dist_csv.columns[0])
DIST_mega = mega_dist_csv.values
DIST_mega[np.isnan(DIST_mega)] = 0 # lllenamos con ceros los valores nan
DIST_mega = DIST_mega + DIST_mega.T #copiamos el triangulo inferior al superir en la matriz
distances_mega = DIST_mega[0,1:DIST_mega.shape[0]]
distances_mega = (distances_mega - np.min(distances_mega)) / (np.max(distances_mega) - np.min(distances_mega))
###################################################################################################################
###################################################################################################################
names_temp = np.array(sequences)
names_temp = names_temp[1:names_temp.shape[0]] # eliminamos el primer elemento
###################################################################################################################3
# procesamos las distancias con FOS
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(3,2)
axs[0,0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
axs[2,0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[2,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,0].legend(loc='upper right', fontsize=6)
axs[2,1].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[2,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_fos.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# procesamos las distancias con GLCM
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(3,2)
axs[0,0].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
axs[2,0].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[2,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,0].legend(loc='upper right', fontsize=6)
axs[2,1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[2,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_glcm.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# procesamos las distancias con LBP
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(3,2)
axs[0,0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
axs[2,0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[2,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,0].legend(loc='upper right', fontsize=6)
axs[2,1].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[2,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_lbp.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# procesamos las distancias con MLBP
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(3,2)
axs[0,0].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
axs[2,0].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[2,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,0].legend(loc='upper right', fontsize=6)
axs[2,1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[2,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[2,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_mlbp.png", dpi = 200, bbox_inches='tight')
data_csv = []
error_fos = [] # save the error for each mappoing function with FOS
error_glcm = [] # save the error for each mappoing function with GLCM
error_lbp = [] # save the error for each mappoing function with LBP
error_mlbp = [] # save the error for each mappoing function with MLBP
for mapping_type in range(mapping_function_size):
error_fos.append((np.sum((full_distances_fos[mapping_type] - distances_mega)**2))/distances_mega.shape[0])
error_glcm.append((np.sum((full_distances_glcm[mapping_type] - distances_mega)**2))/distances_mega.shape[0])
error_lbp.append((np.sum((full_distances_lbp[mapping_type] - distances_mega)**2))/distances_mega.shape[0])
error_mlbp.append((np.sum((full_distances_mlbp[mapping_type] - distances_mega)**2))/distances_mega.shape[0])
data_csv.append(error_fos)
data_csv.append(error_glcm)
data_csv.append(error_lbp)
data_csv.append(error_mlbp)
data_csv = np.array(data_csv)
df = pd.DataFrame(data=data_csv.T, index=["map0", "map1", "map2", "map3", "map4", "map5"], columns=["FOS", "GLCM", "LBP", "MLBP"])
print(df)
df.to_csv(results_file + ".csv", index=True)
#print(error_fos)
#print(error_glcm)
#print(error_lbp)
#print(error_mlbp)
###################################################################################################################
# proccesing a MAPPING 0 funciton with the all algorithms
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(2,2)
axs[0,0].plot(names_temp, full_distances_fos[0], 'b--', label='FOS-MAP0')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_glcm[0], 'b--', label='GLCM-MAP0')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_lbp[0], 'b--', label='LBP-MAP0')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_mlbp[0], 'b--', label='MLBP-MAP0')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_0map.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# proccesing a MAPPING 1 funciton with the all algorithms
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(2,2)
axs[0,0].plot(names_temp, full_distances_fos[1], 'b--', label='FOS-MAP1')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_glcm[1], 'b--', label='GLCM-MAP1')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_lbp[1], 'b--', label='LBP-MAP1')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_mlbp[1], 'b--', label='MLBP-MAP1')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_1map.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# proccesing a MAPPING 2 funciton with the all algorithms
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(2,2)
axs[0,0].plot(names_temp, full_distances_fos[2], 'b--', label='FOS-MAP2')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_glcm[2], 'b--', label='GLCM-MAP2')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_lbp[2], 'b--', label='LBP-MAP2')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_mlbp[2], 'b--', label='MLBP-MAP2')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_2map.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# proccesing a MAPPING 3 funciton with the all algorithms
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(2,2)
axs[0,0].plot(names_temp, full_distances_fos[3], 'b--', label='FOS-MAP3')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_glcm[3], 'b--', label='GLCM-MAP3')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_lbp[3], 'b--', label='LBP-MAP3')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_mlbp[3], 'b--', label='MLBP-MAP3')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_3map.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# proccesing a MAPPING 4 funciton with the all algorithms
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(2,2)
axs[0,0].plot(names_temp, full_distances_fos[4], 'b--', label='FOS-MAP4')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_glcm[4], 'b--', label='GLCM-MAP4')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_lbp[4], 'b--', label='LBP-MAP4')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_mlbp[4], 'b--', label='MLBP-MAP4')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_4map.png", dpi = 200, bbox_inches='tight')
###################################################################################################################
# proccesing a MAPPING 5 funciton with the all algorithms
###################################################################################################################
plt.clf()
fig, axs = plt.subplots(2,2)
axs[0,0].plot(names_temp, full_distances_fos[5], 'b--', label='FOS-MAP5')
axs[0,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,0].legend(loc='upper right', fontsize=6)
axs[0,1].plot(names_temp, full_distances_glcm[5], 'b--', label='GLCM-MAP5')
axs[0,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[0,1].legend(loc='upper right', fontsize=6)
axs[1,0].plot(names_temp, full_distances_lbp[5], 'b--', label='LBP-MAP5')
axs[1,0].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,0].legend(loc='upper right', fontsize=6)
axs[1,1].plot(names_temp, full_distances_mlbp[5], 'b--', label='MLBP-MAP5')
axs[1,1].plot(names_temp, distances_mega, 'r-.', label='CLUSTALW')
axs[1,1].legend(loc='upper right', fontsize=6)
for ax in axs.flat:
ax.label_outer()
ax.yaxis.set_tick_params(labelsize=6)
plt.sca(ax)
plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize=6 )
plt.xlabel('Sequences', fontsize=6)
fig.text(0.04, 0.5, 'Distances', va='center', rotation='vertical', fontsize=6 )
plt.savefig( results_file + "_5map.png", dpi = 200, bbox_inches='tight')
|
[
0,
1,
2,
3,
4
] |
2,507 |
89376b2464dfb724197a1c1e164af8277e03ad59
|
<mask token>
class SubsPcLrThumb(Opcode):
<mask token>
<mask token>
|
<mask token>
class SubsPcLrThumb(Opcode):
def __init__(self, instruction, imm32, n):
super().__init__(instruction)
self.imm32 = imm32
self.n = n
<mask token>
|
<mask token>
class SubsPcLrThumb(Opcode):
def __init__(self, instruction, imm32, n):
super().__init__(instruction)
self.imm32 = imm32
self.n = n
def execute(self, processor):
if processor.condition_passed():
if processor.registers.current_mode_is_user_or_system(
) or processor.registers.current_instr_set(
) == InstrSet.THUMB_EE:
print('unpredictable')
else:
operand2 = self.imm32
result = add_with_carry(processor.registers.get(self.n),
bit_not(operand2, 32), 1)[0]
if (processor.registers.cpsr.m == 26 and processor.
registers.cpsr.j and processor.registers.cpsr.t):
print('unpredictable')
else:
processor.branch_write_pc(result)
|
from armulator.armv6.bits_ops import add_with_carry, bit_not
from armulator.armv6.enums import InstrSet
from armulator.armv6.opcodes.opcode import Opcode
class SubsPcLrThumb(Opcode):
def __init__(self, instruction, imm32, n):
super().__init__(instruction)
self.imm32 = imm32
self.n = n
def execute(self, processor):
if processor.condition_passed():
if processor.registers.current_mode_is_user_or_system(
) or processor.registers.current_instr_set(
) == InstrSet.THUMB_EE:
print('unpredictable')
else:
operand2 = self.imm32
result = add_with_carry(processor.registers.get(self.n),
bit_not(operand2, 32), 1)[0]
if (processor.registers.cpsr.m == 26 and processor.
registers.cpsr.j and processor.registers.cpsr.t):
print('unpredictable')
else:
processor.branch_write_pc(result)
|
from armulator.armv6.bits_ops import add_with_carry, bit_not
from armulator.armv6.enums import InstrSet
from armulator.armv6.opcodes.opcode import Opcode
class SubsPcLrThumb(Opcode):
def __init__(self, instruction, imm32, n):
super().__init__(instruction)
self.imm32 = imm32
self.n = n
def execute(self, processor):
if processor.condition_passed():
if (processor.registers.current_mode_is_user_or_system() or
processor.registers.current_instr_set() == InstrSet.THUMB_EE):
print('unpredictable')
else:
operand2 = self.imm32
result = add_with_carry(processor.registers.get(self.n), bit_not(operand2, 32), 1)[0]
if (processor.registers.cpsr.m == 0b11010 and
processor.registers.cpsr.j and
processor.registers.cpsr.t):
print('unpredictable')
else:
processor.branch_write_pc(result)
|
[
1,
2,
3,
4,
5
] |
2,508 |
6d8c32fe51fadbe6b6ee14419e1e37c65d4f57bf
|
<mask token>
|
BLUE = '#1A94D6'
GREEN = '#73AD21'
PALE_GREEN = '#BBF864'
PALE_BLUE = '#A2C4DA'
BRIGHT_BLUE = '#04BAE3'
ORANGE = '#FF8000'
DARK_ORANGE = '#E65C00'
LIGHT_ORANGE = '#FFAA3E'
PALE_ORANGE = '#F8C381'
GUAVA = '#FF4F40'
FUSCIA = '#E22EFF'
PALE_FUSCIA = '#DFA0E9'
PURPLE = '#AE37C1'
PALE_PURPLE = '#C3AACF'
COLORS = [BLUE, ORANGE, GREEN, PURPLE, FUSCIA, PALE_BLUE, PALE_ORANGE,
PALE_GREEN]
|
BLUE = "#1A94D6"
GREEN = "#73AD21"
PALE_GREEN = "#BBF864"
PALE_BLUE = "#A2C4DA"
BRIGHT_BLUE = "#04BAE3"
ORANGE = "#FF8000"
DARK_ORANGE = "#E65C00"
LIGHT_ORANGE = "#FFAA3E"
PALE_ORANGE = "#F8C381"
GUAVA = "#FF4F40"
FUSCIA = "#E22EFF"
PALE_FUSCIA = "#DFA0E9"
PURPLE = "#AE37C1"
PALE_PURPLE = "#C3AACF"
COLORS = [BLUE, ORANGE, GREEN, PURPLE, FUSCIA, PALE_BLUE, PALE_ORANGE, PALE_GREEN]
| null | null |
[
0,
1,
2
] |
2,509 |
38f41fa87230ddc0b3a8c411b4c569f59f0ea065
|
<mask token>
class QuoteModel(db.Model):
<mask token>
<mask token>
<mask token>
<mask token>
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
<mask token>
<mask token>
|
<mask token>
class QuoteModel(db.Model):
<mask token>
<mask token>
<mask token>
<mask token>
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
def __str__(self):
return f'Quote. Author: {self.author}, q: {self.quote[:10]}...'
def __repr__(self):
return self.__str__()
|
<mask token>
class QuoteModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
author_id = db.Column(db.Integer, db.ForeignKey(AuthorModel.id))
quote = db.Column(db.String(255), unique=False)
rate = db.Column(db.Integer)
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
def __str__(self):
return f'Quote. Author: {self.author}, q: {self.quote[:10]}...'
def __repr__(self):
return self.__str__()
|
from api import db
from api.models.author import AuthorModel
class QuoteModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
author_id = db.Column(db.Integer, db.ForeignKey(AuthorModel.id))
quote = db.Column(db.String(255), unique=False)
rate = db.Column(db.Integer)
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
def __str__(self):
return f'Quote. Author: {self.author}, q: {self.quote[:10]}...'
def __repr__(self):
return self.__str__()
|
from api import db
from api.models.author import AuthorModel
class QuoteModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
author_id = db.Column(db.Integer, db.ForeignKey(AuthorModel.id))
quote = db.Column(db.String(255), unique=False)
rate = db.Column(db.Integer)
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
def __str__(self):
return f"Quote. Author: {self.author}, q: {self.quote[:10]}..."
def __repr__(self):
return self.__str__()
|
[
3,
5,
6,
7,
8
] |
2,510 |
91e1ac12ba99a8efd8f7f26310244d83bdd4aa52
|
<mask token>
class Partitioner:
<mask token>
def __init__(self, mesh, partitions, tmpdir):
metisMesh = tmpdir.path(METIS_MESH)
metis.MeshWriter(metisMesh, mesh.elements())
metisGraph = tmpdir.path(METIS_GRAPH)
p = subprocess.Popen(['m2gmetis', '-ncommon=3', metisMesh,
metisGraph], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
p = subprocess.Popen(['gpmetis', '-ptype=rb', metisGraph, str(
partitions)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
self.__partition = metis.PartitionReader(metisGraph + '.part.' +
str(partitions), partitions, len(mesh.elements()))
if self.__partition.size() != len(mesh.elements()):
raise Exception(
'Mesh size and partition size do not match: mesh size = ' +
str(len(mesh.elements())) + ' != partition size = ' + str(
self.__partition.size()))
def partition(self):
return self.__partition
|
<mask token>
class Partitioner:
"""Converts a mesh into graph and partitions it using metis"""
def __init__(self, mesh, partitions, tmpdir):
metisMesh = tmpdir.path(METIS_MESH)
metis.MeshWriter(metisMesh, mesh.elements())
metisGraph = tmpdir.path(METIS_GRAPH)
p = subprocess.Popen(['m2gmetis', '-ncommon=3', metisMesh,
metisGraph], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
p = subprocess.Popen(['gpmetis', '-ptype=rb', metisGraph, str(
partitions)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
self.__partition = metis.PartitionReader(metisGraph + '.part.' +
str(partitions), partitions, len(mesh.elements()))
if self.__partition.size() != len(mesh.elements()):
raise Exception(
'Mesh size and partition size do not match: mesh size = ' +
str(len(mesh.elements())) + ' != partition size = ' + str(
self.__partition.size()))
def partition(self):
return self.__partition
|
<mask token>
METIS_MESH = 'metis.mesh'
METIS_GRAPH = 'metis.graph'
class Partitioner:
"""Converts a mesh into graph and partitions it using metis"""
def __init__(self, mesh, partitions, tmpdir):
metisMesh = tmpdir.path(METIS_MESH)
metis.MeshWriter(metisMesh, mesh.elements())
metisGraph = tmpdir.path(METIS_GRAPH)
p = subprocess.Popen(['m2gmetis', '-ncommon=3', metisMesh,
metisGraph], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
p = subprocess.Popen(['gpmetis', '-ptype=rb', metisGraph, str(
partitions)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
self.__partition = metis.PartitionReader(metisGraph + '.part.' +
str(partitions), partitions, len(mesh.elements()))
if self.__partition.size() != len(mesh.elements()):
raise Exception(
'Mesh size and partition size do not match: mesh size = ' +
str(len(mesh.elements())) + ' != partition size = ' + str(
self.__partition.size()))
def partition(self):
return self.__partition
|
import metis
import subprocess
METIS_MESH = 'metis.mesh'
METIS_GRAPH = 'metis.graph'
class Partitioner:
"""Converts a mesh into graph and partitions it using metis"""
def __init__(self, mesh, partitions, tmpdir):
metisMesh = tmpdir.path(METIS_MESH)
metis.MeshWriter(metisMesh, mesh.elements())
metisGraph = tmpdir.path(METIS_GRAPH)
p = subprocess.Popen(['m2gmetis', '-ncommon=3', metisMesh,
metisGraph], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
p = subprocess.Popen(['gpmetis', '-ptype=rb', metisGraph, str(
partitions)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
self.__partition = metis.PartitionReader(metisGraph + '.part.' +
str(partitions), partitions, len(mesh.elements()))
if self.__partition.size() != len(mesh.elements()):
raise Exception(
'Mesh size and partition size do not match: mesh size = ' +
str(len(mesh.elements())) + ' != partition size = ' + str(
self.__partition.size()))
def partition(self):
return self.__partition
|
#!/usr/bin/python
##
# @file
# This file is part of SeisSol.
#
# @author Sebastian Rettenberger (rettenbs AT in.tum.de, http://www5.in.tum.de/wiki/index.php/Sebastian_Rettenberger,_M.Sc.)
#
# @section LICENSE
# Copyright (c) 2013, SeisSol Group
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import metis
import subprocess
METIS_MESH = 'metis.mesh'
METIS_GRAPH = 'metis.graph'
class Partitioner:
"""Converts a mesh into graph and partitions it using metis"""
def __init__(self, mesh, partitions, tmpdir):
metisMesh = tmpdir.path(METIS_MESH)
# Write metis mesh
metis.MeshWriter(metisMesh, mesh.elements())
# Convert to graph
metisGraph = tmpdir.path(METIS_GRAPH)
p = subprocess.Popen(['m2gmetis', '-ncommon=3', metisMesh, metisGraph],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
# Run metis
p = subprocess.Popen(['gpmetis', '-ptype=rb', metisGraph, str(partitions)],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, errmsg = p.communicate()
if p.returncode:
raise Exception(errmsg.strip())
# Read partitions
self.__partition = metis.PartitionReader(metisGraph+'.part.'+str(partitions),
partitions, len(mesh.elements()))
if self.__partition.size() != len(mesh.elements()):
raise Exception('Mesh size and partition size do not match: mesh size = '
+str(len(mesh.elements()))+' != partition size = '+str(self.__partition.size()))
def partition(self):
return self.__partition
|
[
3,
4,
5,
6,
7
] |
2,511 |
fd907dbcea01679c08aeae6bcbf6e61786f40260
|
<mask token>
def test_stringify_nums():
"""."""
from radixsort import stringify_nums
nums = [1, 2, 3, 4, 5]
stringified_nums = stringify_nums(nums)
assert stringified_nums == ['1', '2', '3', '4', '5']
def test_while_condition():
"""."""
from radixsort import while_condition
stringified_nums = ['1', '2', '3', '4', '5000']
assert while_condition(stringified_nums) == 4
<mask token>
def test_push_into_buckets():
"""."""
from radixsort import push_into_buckets
buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),
'2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(
), '7': Queue(), '8': Queue(), '9': Queue()})
nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)
for key in full_buckets_dict:
if full_buckets_dict[key].peek():
assert full_buckets_dict[key].dequeue() == key
<mask token>
|
<mask token>
def test_stringify_nums():
"""."""
from radixsort import stringify_nums
nums = [1, 2, 3, 4, 5]
stringified_nums = stringify_nums(nums)
assert stringified_nums == ['1', '2', '3', '4', '5']
def test_while_condition():
"""."""
from radixsort import while_condition
stringified_nums = ['1', '2', '3', '4', '5000']
assert while_condition(stringified_nums) == 4
<mask token>
def test_push_into_buckets():
"""."""
from radixsort import push_into_buckets
buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),
'2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(
), '7': Queue(), '8': Queue(), '9': Queue()})
nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)
for key in full_buckets_dict:
if full_buckets_dict[key].peek():
assert full_buckets_dict[key].dequeue() == key
<mask token>
def test_radix_sort_verbose():
"""Test with many lists."""
from radixsort import radixsort
for i in range(100):
list_length = random.randint(0, 100)
unsorted_list = []
for x in range(list_length):
unsorted_list.append(random.randint(0, 100))
assert radixsort(unsorted_list) == sorted(unsorted_list)
|
<mask token>
def test_stringify_nums():
"""."""
from radixsort import stringify_nums
nums = [1, 2, 3, 4, 5]
stringified_nums = stringify_nums(nums)
assert stringified_nums == ['1', '2', '3', '4', '5']
def test_while_condition():
"""."""
from radixsort import while_condition
stringified_nums = ['1', '2', '3', '4', '5000']
assert while_condition(stringified_nums) == 4
def test_unravel_buckets():
"""."""
from radixsort import unravel_buckets
buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),
'2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(
), '7': Queue(), '8': Queue(), '9': Queue()})
for bucket in buckets_dict:
buckets_dict[bucket].enqueue(bucket)
assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3',
'4', '5', '6', '7', '8', '9']
def test_push_into_buckets():
"""."""
from radixsort import push_into_buckets
buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),
'2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(
), '7': Queue(), '8': Queue(), '9': Queue()})
nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)
for key in full_buckets_dict:
if full_buckets_dict[key].peek():
assert full_buckets_dict[key].dequeue() == key
<mask token>
def test_radix_sort_verbose():
"""Test with many lists."""
from radixsort import radixsort
for i in range(100):
list_length = random.randint(0, 100)
unsorted_list = []
for x in range(list_length):
unsorted_list.append(random.randint(0, 100))
assert radixsort(unsorted_list) == sorted(unsorted_list)
|
<mask token>
def test_stringify_nums():
"""."""
from radixsort import stringify_nums
nums = [1, 2, 3, 4, 5]
stringified_nums = stringify_nums(nums)
assert stringified_nums == ['1', '2', '3', '4', '5']
def test_while_condition():
"""."""
from radixsort import while_condition
stringified_nums = ['1', '2', '3', '4', '5000']
assert while_condition(stringified_nums) == 4
def test_unravel_buckets():
"""."""
from radixsort import unravel_buckets
buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),
'2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(
), '7': Queue(), '8': Queue(), '9': Queue()})
for bucket in buckets_dict:
buckets_dict[bucket].enqueue(bucket)
assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3',
'4', '5', '6', '7', '8', '9']
def test_push_into_buckets():
"""."""
from radixsort import push_into_buckets
buckets_dict = OrderedDict({'none': Queue(), '0': Queue(), '1': Queue(),
'2': Queue(), '3': Queue(), '4': Queue(), '5': Queue(), '6': Queue(
), '7': Queue(), '8': Queue(), '9': Queue()})
nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)
for key in full_buckets_dict:
if full_buckets_dict[key].peek():
assert full_buckets_dict[key].dequeue() == key
def test_radix_sort():
"""Test with simple list."""
from radixsort import radixsort
nums = [5, 3, 2, 7, 9, 4, 0, 1]
assert radixsort(nums) == [0, 1, 2, 3, 4, 5, 7, 9]
def test_radix_sort_verbose():
"""Test with many lists."""
from radixsort import radixsort
for i in range(100):
list_length = random.randint(0, 100)
unsorted_list = []
for x in range(list_length):
unsorted_list.append(random.randint(0, 100))
assert radixsort(unsorted_list) == sorted(unsorted_list)
|
"""Test radix sort."""
import random
from collections import OrderedDict
from que_ import Queue
def test_stringify_nums():
"""."""
from radixsort import stringify_nums
nums = [1, 2, 3, 4, 5]
stringified_nums = stringify_nums(nums)
assert stringified_nums == ['1', '2', '3', '4', '5']
def test_while_condition():
"""."""
from radixsort import while_condition
stringified_nums = ['1', '2', '3', '4', '5000']
assert while_condition(stringified_nums) == 4
def test_unravel_buckets():
"""."""
from radixsort import unravel_buckets
buckets_dict = OrderedDict({
'none': Queue(),
'0': Queue(),
'1': Queue(),
'2': Queue(),
'3': Queue(),
'4': Queue(),
'5': Queue(),
'6': Queue(),
'7': Queue(),
'8': Queue(),
'9': Queue(),
})
for bucket in buckets_dict:
buckets_dict[bucket].enqueue(bucket)
assert unravel_buckets(buckets_dict) == ['none', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
def test_push_into_buckets():
"""."""
from radixsort import push_into_buckets
buckets_dict = OrderedDict({
'none': Queue(),
'0': Queue(),
'1': Queue(),
'2': Queue(),
'3': Queue(),
'4': Queue(),
'5': Queue(),
'6': Queue(),
'7': Queue(),
'8': Queue(),
'9': Queue(),
})
nums = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
full_buckets_dict = push_into_buckets(nums, 0, buckets_dict)
for key in full_buckets_dict:
if full_buckets_dict[key].peek():
assert full_buckets_dict[key].dequeue() == key
def test_radix_sort():
"""Test with simple list."""
from radixsort import radixsort
nums = [5, 3, 2, 7, 9, 4, 0, 1]
assert radixsort(nums) == [0, 1, 2, 3, 4, 5, 7, 9]
def test_radix_sort_verbose():
"""Test with many lists."""
from radixsort import radixsort
# test on 100 lists
for i in range(100):
# generate random length of list
list_length = random.randint(0, 100)
unsorted_list = []
for x in range(list_length):
# generate random numbers for random length list
unsorted_list.append(random.randint(0, 100))
# test that list is sorted
assert radixsort(unsorted_list) == sorted(unsorted_list)
|
[
3,
4,
5,
6,
8
] |
2,512 |
004a9cd0e459116bf3f88f3546ff4eded3dfb2a8
|
<mask token>
|
<mask token>
def exportVSSD(path, camName, wantTris=False, renderdata=None):
mainFileDict = {}
mainFilePath = path
mainFileStem = os.path.basename(path)[:-5]
mainFileDir = os.path.dirname(path)
resolution = pmc.ls('defaultResolution')[0]
renderWidth = resolution.width.get()
renderHeight = resolution.height.get()
if renderdata is not None:
mainFileDict['render'] = {'width': renderWidth, 'height':
renderHeight, 'spp': renderdata['spp']}
cam = pmc.ls(camName)[0].getShape()
mainFileDict['camera'] = {'focal': cam.getFocalLength(), 'gate': cam.
getVerticalFilmAperture(), 'aspect': renderWidth / renderHeight,
'eye': list(cam.getEyePoint(space='world')), 'up': list(cam.
upDirection(space='world')), 'look': list(cam.viewDirection(space=
'world'))}
bufPath = os.path.join(mainFileDir, '{}.bin'.format(mainFileStem))
geomList = pmc.ls(type='mesh', visible=True)
mainFileGeoms = []
offset = 0
with open(bufPath, 'wb') as bufFd:
for geom in geomList:
print('Processing {}...'.format(geom))
smoothLevel = pmc.displaySmoothness(geom, q=True, po=0)[0]
isSmooth = smoothLevel > 1
print('Smooth level {}'.format(smoothLevel))
faceBuf = ''
idxBuf = ''
vtxBuf = ''
nidxs = 0
for face in geom.f:
vtxidxs = face.getVertices()
nvtxidxs = len(vtxidxs)
if not isSmooth and wantTris:
if nvtxidxs > 3:
print(
'Non-triangulated face. Triangulate before exporting'
)
return
else:
faceBuf += struct.pack('<I', nvtxidxs)
nidxs += nvtxidxs
for vtxidx in vtxidxs:
idxBuf += struct.pack('<I', vtxidx)
for vertex in geom.vtx:
p = vertex.getPosition('world')
vtxBuf += struct.pack('<fff', p.x, p.y, p.z)
hasCreases = False
if isSmooth:
edges = geom.edges
creaseIdxBuf = ''
creaseValBuf = ''
creases = pmc.modeling.polyCrease(edges, q=True, v=0)
for e in range(0, len(edges)):
c = creases[e]
if c > 0:
hasCreases = True
vtxs = edges[e].connectedVertices()
creaseIdxBuf += struct.pack('<I', vtxs[0].index())
creaseIdxBuf += struct.pack('<I', vtxs[1].index())
creaseValBuf += struct.pack('<f', c)
buffers = [(idxBuf, 'indices'), (vtxBuf, 'vertices')]
if not wantTris:
buffers += [(faceBuf, 'faces')]
if hasCreases:
buffers += [(creaseIdxBuf, 'creaseindices'), (creaseValBuf,
'creasevalues')]
buffersList = []
for b in buffers:
print('Writing buffer {}'.format(b[1]))
bufFd.write(b[0])
s = len(b[0])
buffersList.append({'offset': offset, 'size': s, 'type': b[1]})
offset += s
sg = geom.connections(t='shadingEngine')[0]
mat = sg.surfaceShader.connections()[0]
albedo = mat.color.get()
emittance = mat.incandescence.get()
geomDict = {'triangles': wantTris, 'smooth': isSmooth,
'buffers': buffersList, 'material': {'albedo': list(albedo),
'emittance': list(emittance)}}
mainFileGeoms.append(geomDict)
mainFileDict['geometries'] = mainFileGeoms
mainFileJson = json.dumps(mainFileDict, indent=2)
with open(mainFilePath, 'w') as fd:
fd.write(mainFileJson)
print('Done')
|
import json
import struct
import pymel.core as pmc
import os.path
def exportVSSD(path, camName, wantTris=False, renderdata=None):
mainFileDict = {}
mainFilePath = path
mainFileStem = os.path.basename(path)[:-5]
mainFileDir = os.path.dirname(path)
resolution = pmc.ls('defaultResolution')[0]
renderWidth = resolution.width.get()
renderHeight = resolution.height.get()
if renderdata is not None:
mainFileDict['render'] = {'width': renderWidth, 'height':
renderHeight, 'spp': renderdata['spp']}
cam = pmc.ls(camName)[0].getShape()
mainFileDict['camera'] = {'focal': cam.getFocalLength(), 'gate': cam.
getVerticalFilmAperture(), 'aspect': renderWidth / renderHeight,
'eye': list(cam.getEyePoint(space='world')), 'up': list(cam.
upDirection(space='world')), 'look': list(cam.viewDirection(space=
'world'))}
bufPath = os.path.join(mainFileDir, '{}.bin'.format(mainFileStem))
geomList = pmc.ls(type='mesh', visible=True)
mainFileGeoms = []
offset = 0
with open(bufPath, 'wb') as bufFd:
for geom in geomList:
print('Processing {}...'.format(geom))
smoothLevel = pmc.displaySmoothness(geom, q=True, po=0)[0]
isSmooth = smoothLevel > 1
print('Smooth level {}'.format(smoothLevel))
faceBuf = ''
idxBuf = ''
vtxBuf = ''
nidxs = 0
for face in geom.f:
vtxidxs = face.getVertices()
nvtxidxs = len(vtxidxs)
if not isSmooth and wantTris:
if nvtxidxs > 3:
print(
'Non-triangulated face. Triangulate before exporting'
)
return
else:
faceBuf += struct.pack('<I', nvtxidxs)
nidxs += nvtxidxs
for vtxidx in vtxidxs:
idxBuf += struct.pack('<I', vtxidx)
for vertex in geom.vtx:
p = vertex.getPosition('world')
vtxBuf += struct.pack('<fff', p.x, p.y, p.z)
hasCreases = False
if isSmooth:
edges = geom.edges
creaseIdxBuf = ''
creaseValBuf = ''
creases = pmc.modeling.polyCrease(edges, q=True, v=0)
for e in range(0, len(edges)):
c = creases[e]
if c > 0:
hasCreases = True
vtxs = edges[e].connectedVertices()
creaseIdxBuf += struct.pack('<I', vtxs[0].index())
creaseIdxBuf += struct.pack('<I', vtxs[1].index())
creaseValBuf += struct.pack('<f', c)
buffers = [(idxBuf, 'indices'), (vtxBuf, 'vertices')]
if not wantTris:
buffers += [(faceBuf, 'faces')]
if hasCreases:
buffers += [(creaseIdxBuf, 'creaseindices'), (creaseValBuf,
'creasevalues')]
buffersList = []
for b in buffers:
print('Writing buffer {}'.format(b[1]))
bufFd.write(b[0])
s = len(b[0])
buffersList.append({'offset': offset, 'size': s, 'type': b[1]})
offset += s
sg = geom.connections(t='shadingEngine')[0]
mat = sg.surfaceShader.connections()[0]
albedo = mat.color.get()
emittance = mat.incandescence.get()
geomDict = {'triangles': wantTris, 'smooth': isSmooth,
'buffers': buffersList, 'material': {'albedo': list(albedo),
'emittance': list(emittance)}}
mainFileGeoms.append(geomDict)
mainFileDict['geometries'] = mainFileGeoms
mainFileJson = json.dumps(mainFileDict, indent=2)
with open(mainFilePath, 'w') as fd:
fd.write(mainFileJson)
print('Done')
|
import json
import struct
import pymel.core as pmc
import os.path
def exportVSSD(path, camName, wantTris=False, renderdata=None):
mainFileDict = {}
mainFilePath = path
mainFileStem = os.path.basename(path)[:-5]
mainFileDir = os.path.dirname(path)
resolution = pmc.ls('defaultResolution')[0]
renderWidth = resolution.width.get()
renderHeight = resolution.height.get()
if renderdata is not None:
mainFileDict['render'] = {
'width' : renderWidth,
'height': renderHeight,
'spp' : renderdata['spp']
}
cam = pmc.ls(camName)[0].getShape()
mainFileDict['camera'] = {
'focal' : cam.getFocalLength(),
'gate' : cam.getVerticalFilmAperture(),
'aspect': renderWidth / renderHeight,
'eye' : list(cam.getEyePoint(space='world')),
'up' : list(cam.upDirection(space='world')),
'look' : list(cam.viewDirection(space='world'))
}
bufPath = os.path.join(mainFileDir, '{}.bin'.format(mainFileStem))
geomList = pmc.ls(type='mesh', visible=True)
mainFileGeoms = []
offset = 0
with open(bufPath, 'wb') as bufFd:
for geom in geomList:
print('Processing {}...'.format(geom))
smoothLevel = pmc.displaySmoothness(geom, q=True, po=0)[0]
isSmooth = smoothLevel > 1
print('Smooth level {}'.format(smoothLevel))
faceBuf = ''
idxBuf = ''
vtxBuf = ''
nidxs = 0
for face in geom.f:
vtxidxs = face.getVertices()
nvtxidxs = len(vtxidxs)
if not isSmooth and wantTris:
if nvtxidxs > 3:
print('Non-triangulated face. Triangulate before exporting')
return
else:
faceBuf += struct.pack('<I', nvtxidxs)
nidxs += nvtxidxs
for vtxidx in vtxidxs:
idxBuf += struct.pack('<I', vtxidx)
for vertex in geom.vtx:
p = vertex.getPosition('world')
vtxBuf += struct.pack('<fff', p.x, p.y, p.z)
hasCreases = False
if isSmooth:
edges = geom.edges
creaseIdxBuf = ''
creaseValBuf = ''
creases = pmc.modeling.polyCrease(edges, q=True, v=0)
for e in range(0, len(edges)):
c = creases[e]
if c > 0:
hasCreases = True
vtxs = edges[e].connectedVertices()
creaseIdxBuf += struct.pack('<I', vtxs[0].index())
creaseIdxBuf += struct.pack('<I', vtxs[1].index())
creaseValBuf += struct.pack('<f', c)
buffers = [
(idxBuf, 'indices'),
(vtxBuf, 'vertices')
]
if not wantTris:
buffers += [
(faceBuf, 'faces')
]
if hasCreases:
buffers += [
(creaseIdxBuf, 'creaseindices'),
(creaseValBuf, 'creasevalues')
]
buffersList = []
for b in buffers:
print('Writing buffer {}'.format(b[1]))
bufFd.write(b[0])
s = len(b[0])
buffersList.append({
'offset': offset,
'size': s,
'type': b[1]
})
offset += s
sg = geom.connections(t='shadingEngine')[0]
mat = sg.surfaceShader.connections()[0]
albedo = mat.color.get()
emittance = mat.incandescence.get()
geomDict = {
'triangles' : wantTris,
'smooth' : isSmooth,
'buffers' : buffersList,
'material' : {
'albedo' : list(albedo),
'emittance' : list(emittance)
}
}
mainFileGeoms.append(geomDict)
mainFileDict['geometries'] = mainFileGeoms
mainFileJson = json.dumps(mainFileDict, indent=2)
with open(mainFilePath, 'w') as fd: fd.write(mainFileJson)
print('Done')
| null |
[
0,
1,
2,
3
] |
2,513 |
8a0e781f29c426161240e33b9d2adc7537b3d352
|
<mask token>
|
<mask token>
for i in range(N):
c, t = map(int, input().split())
if nm > c and T >= t:
nm = c
if nm == 1000000:
print('TLE')
else:
print(nm)
|
N, T = map(int, input().split())
nm = 1000000
for i in range(N):
c, t = map(int, input().split())
if nm > c and T >= t:
nm = c
if nm == 1000000:
print('TLE')
else:
print(nm)
|
N,T=map(int,input().split())
nm=1000000
for i in range(N):
c,t=map(int,input().split())
if nm>c and T>=t:
nm=c
if nm==1000000:
print("TLE")
else:
print(nm)
| null |
[
0,
1,
2,
3
] |
2,514 |
03cc3bf37ea8d971550a89107161005901d842de
|
<mask token>
|
<mask token>
class Solution:
<mask token>
|
<mask token>
class Solution:
def destCity(self, paths: List[List[str]]) ->str:
departCity = set()
destCity = []
for i in paths:
if i[1] not in departCity:
destCity.append(i[1])
if i[0] in destCity:
destCity.remove(i[0])
departCity.add(i[0])
return destCity[0]
|
from typing import List
class Solution:
def destCity(self, paths: List[List[str]]) ->str:
departCity = set()
destCity = []
for i in paths:
if i[1] not in departCity:
destCity.append(i[1])
if i[0] in destCity:
destCity.remove(i[0])
departCity.add(i[0])
return destCity[0]
|
from typing import List
class Solution:
def destCity(self, paths: List[List[str]]) -> str:
departCity = set()
destCity = []
for i in paths:
if i[1] not in departCity:
destCity.append(i[1])
if i[0] in destCity:
destCity.remove(i[0])
departCity.add(i[0])
return destCity[0]
|
[
0,
1,
2,
3,
4
] |
2,515 |
de2de26d0c82213393e8174d1144c3510c63b899
|
<mask token>
|
<mask token>
if __name__ == '__main__':
app.run(port=NLC.port, debug=True)
|
<mask token>
app = NLC.create_app()
if __name__ == '__main__':
app.run(port=NLC.port, debug=True)
|
import NLC
app = NLC.create_app()
if __name__ == '__main__':
app.run(port=NLC.port, debug=True)
|
import NLC
app = NLC.create_app()
if __name__ == '__main__':
app.run(port=NLC.port, debug=True)
|
[
0,
1,
2,
3,
4
] |
2,516 |
8b4bd2d267f20775ee5d41f7fe9ef6f6eeab5bb0
|
from scipy import misc
from math import exp
import tensorflow as tf
import timeit
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
IMAGE_WIDTH = 30
IMAGE_HEIGHT = 30
IMAGE_DEPTH = 3
IMAGE_PIXELS = IMAGE_WIDTH * IMAGE_HEIGHT
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
def get_single_img():
file_path = dir_path+'/trunk_data_set/img_test/true_seg_cube/220.png'
img = misc.imread(file_path)
print "the inpute image shape: ", img.shape
return img
def conv_net(x, W_conv1, b_conv1, W_conv2, b_conv2, W_fc1, b_fc1, W_fc2, b_fc2):
# first convolutional leyer
x_image = tf.reshape(x, [-1,30,30,3])
h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
# second convolutional leyer
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
# third leyer
h_pool2_flat = tf.reshape(h_pool2, [-1, 8*8*60])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
# drop out
h_fc1_drop = tf.nn.dropout(h_fc1, 1.0)
# rool out leyer
out = tf.add(tf.matmul(h_fc1_drop, W_fc2) , b_fc2)
return out
config = tf.ConfigProto( device_count = {'GPU': 0} )
with tf.Session(config=config) as sess1:
image_input = get_single_img()
saver = tf.train.import_meta_graph('learned_model/model.ckpt.meta')
saver.restore(sess1,"learned_model/model.ckpt")
start = timeit.default_timer()
#print("Model restored.")
#print tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
W_conv1 = [v for v in tf.trainable_variables() if v.name == "Variable:0"][0]
b_conv1 = [v for v in tf.trainable_variables() if v.name == "Variable_1:0"][0]
W_conv2 = [v for v in tf.trainable_variables() if v.name == "Variable_2:0"][0]
b_conv2 = [v for v in tf.trainable_variables() if v.name == "Variable_3:0"][0]
W_fc1 = [v for v in tf.trainable_variables() if v.name == "Variable_4:0"][0]
b_fc1 = [v for v in tf.trainable_variables() if v.name == "Variable_5:0"][0]
W_fc2 = [v for v in tf.trainable_variables() if v.name == "Variable_6:0"][0]
b_fc2 = [v for v in tf.trainable_variables() if v.name == "Variable_7:0"][0]
img2 = tf.convert_to_tensor(image_input)
img2 = tf.reshape( img2, [ IMAGE_PIXELS * IMAGE_DEPTH ] )
img2.set_shape( [ IMAGE_PIXELS * IMAGE_DEPTH ] )
image_input = tf.cast( img2, tf.float32 ) * ( 1. / 255 ) - 0.5
y = conv_net(image_input,W_conv1, b_conv1, W_conv2, b_conv2, W_fc1, b_fc1, W_fc2, b_fc2)
stop = timeit.default_timer()
print "There is no trunk with %f probablity" % (1/(1+exp(-y.eval()[0][1])))
print "There is a trunk with %f probablity" % (1/(1+exp(-y.eval()[0][0])))
print "calculation time :", stop - start
| null | null | null | null |
[
0
] |
2,517 |
9ab119b32ceac370b744658e5fa679292609373a
|
<mask token>
|
<mask token>
def merge_sort(items, temp, low, high):
if high <= low:
return None
mid = low + (high - low) // 2
merge_sort(items, temp, low, mid)
merge_sort(items, temp, mid + 1, high)
merge(items, temp, low, mid, high)
<mask token>
|
def merge(items, temp, low, mid, high):
i = low
j = mid + 1
for k in range(low, high + 1):
if i > mid:
temp[k] = items[j]
j += 1
elif j > high:
temp[k] = items[i]
i += 1
elif items[j] < items[i]:
temp[k] = items[j]
j += 1
else:
temp[k] = items[i]
i += 1
for k in range(low, high + 1):
items[k] = temp[k]
def merge_sort(items, temp, low, high):
if high <= low:
return None
mid = low + (high - low) // 2
merge_sort(items, temp, low, mid)
merge_sort(items, temp, mid + 1, high)
merge(items, temp, low, mid, high)
<mask token>
|
def merge(items, temp, low, mid, high):
i = low
j = mid + 1
for k in range(low, high + 1):
if i > mid:
temp[k] = items[j]
j += 1
elif j > high:
temp[k] = items[i]
i += 1
elif items[j] < items[i]:
temp[k] = items[j]
j += 1
else:
temp[k] = items[i]
i += 1
for k in range(low, high + 1):
items[k] = temp[k]
def merge_sort(items, temp, low, high):
if high <= low:
return None
mid = low + (high - low) // 2
merge_sort(items, temp, low, mid)
merge_sort(items, temp, mid + 1, high)
merge(items, temp, low, mid, high)
if __name__ == '__main__':
items = [5, 4, 3, 3, 5, 6, 4, 4, 3, 2]
temp = [None] * len(items)
print('정렬 전 : \t', end='')
print(items)
merge_sort(items, temp, 0, len(items) - 1)
print('정렬 전 : \t', end='')
print(items)
|
def merge(items, temp, low, mid, high):
i = low
j = mid + 1
for k in range(low, high+1):
if i > mid:
# 왼쪽 리스트의 순회를 마쳤음
# 남은 오른쪽 리스트의 원소들은 모두 왼쪽 리스트 원소보다 작음
temp[k] = items[j]
# 뒤에 나머지는 정렬되어있으니 그대로 넣기
j += 1
elif j > high:
# 오른쪽 리스트의 순회를 마쳤음
# 남은 왼쪽 리스트 원소들은 모두 오른쪽 리스트 원소보다 작음
temp[k] = items[i]
# 앞의 나머지는 정렬되어있으니 그대로 넣기
i += 1
elif items[j] < items[i]:
# 왼쪽 리스트의 원소가 더 큰 경우
# 오른쪽 리스트의 원소를 정렬리스트에 넣을거임
temp[k] = items[j]
j += 1
# 오른쪽 리스트 다음 원소를 비교해보자
else:
# 왼쪽 리스트의 원소가 더 작거나 같은 경우
# 왼쪽 리스트의 원소를 정렬리스트에 넣을거임
temp[k] = items[i]
i += 1
# 왼쪽 리스트 다음 원소를 비교해라
for k in range(low, high+1):
items[k] = temp[k]
# 이제 정렬해놓은거 원래 리스트로 복사해라
def merge_sort(items, temp, low, high):
if high <= low:
return None
# 다 정렬했으면 이제 끝내라
mid = low + (high - low)//2
# low, high, mid 는 값이 아니라 index 값임
merge_sort(items, temp, low, mid)
merge_sort(items, temp, mid+1, high)
merge(items, temp, low, mid, high)
if __name__ == '__main__':
items = [5,4,3,3,5,6,4,4,3,2]
temp = [None]*len(items)
print('정렬 전 : \t', end ='')
print(items)
merge_sort(items, temp, 0, len(items)-1)
print('정렬 전 : \t', end='')
print(items)
|
[
0,
1,
2,
3,
4
] |
2,518 |
8a2ab260f4758bcca7b1a68d1fb65b7eebab5533
|
<mask token>
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
|
class SpiderMiddlewares1(object):
<mask token>
<mask token>
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
|
class SpiderMiddlewares1(object):
<mask token>
def process_item(self, item):
print(u'SpiderMiddlewares1 process_item {}'.format(item.data))
return item
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
|
class SpiderMiddlewares1(object):
def process_request(self, request):
print(u'SpiderMiddlewares1 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares1 process_item {}'.format(item.data))
return item
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u'SpiderMiddlewares2 process_request {}'.format(request.url))
return request
def process_item(self, item):
print(u'SpiderMiddlewares2 process_item {}'.format(item.data))
return item
|
# coding:utf-8
class SpiderMiddlewares1(object):
def process_request(self, request):
print(u"SpiderMiddlewares1 process_request {}".format(request.url))
return request
def process_item(self, item):
print(u"SpiderMiddlewares1 process_item {}".format(item.data))
return item
class SpiderMiddlewares2(object):
def process_request(self, request):
print(u"SpiderMiddlewares2 process_request {}".format(request.url))
return request
def process_item(self, item):
print(u"SpiderMiddlewares2 process_item {}".format(item.data))
return item
|
[
3,
4,
5,
6,
7
] |
2,519 |
9c50a3abd353d5ba619eaa217dcc07ab76fb850c
|
<mask token>
class BookSerializer(serializers.ModelSerializer):
class Meta:
model = Book
fields = '__all__'
def create(self, validated_data):
formats = validated_data.pop('format', [])
book = Book.objects.create(**validated_data)
book.format.add(*formats)
return book
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['genre'] = GenresSerializer(instance.genre, context=
self.context).data
representation['reviews'] = ReviewSerializer(instance.reviews.all(),
many=True).data
representation['orders_count'] = instance.orders.count()
representation['price'] = PriceSerializer(instance.books_price.all(
), many=True).data
return representation
class BookListSerializer(serializers.ModelSerializer):
details = serializers.HyperlinkedIdentityField(view_name='book-detail',
lookup_field='slug')
class Meta:
model = Book
fields = ['title', 'author', 'genre', 'cover', 'details']
class ReviewSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Review
fields = 'user', 'book', 'text', 'rating', 'created_time'
def validate_rating(self, rating):
if rating not in range(1, 6):
raise serializers.ValidationError('Оценка от 1 до 5')
return rating
def create(self, validated_data):
request = self.context.get('request')
user = request.user
review = Review.objects.create(user=user, **validated_data)
return review
|
<mask token>
class FormatSerializer(serializers.ModelSerializer):
class Meta:
model = Format
fields = 'title',
class BookSerializer(serializers.ModelSerializer):
class Meta:
model = Book
fields = '__all__'
def create(self, validated_data):
formats = validated_data.pop('format', [])
book = Book.objects.create(**validated_data)
book.format.add(*formats)
return book
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['genre'] = GenresSerializer(instance.genre, context=
self.context).data
representation['reviews'] = ReviewSerializer(instance.reviews.all(),
many=True).data
representation['orders_count'] = instance.orders.count()
representation['price'] = PriceSerializer(instance.books_price.all(
), many=True).data
return representation
class BookListSerializer(serializers.ModelSerializer):
details = serializers.HyperlinkedIdentityField(view_name='book-detail',
lookup_field='slug')
class Meta:
model = Book
fields = ['title', 'author', 'genre', 'cover', 'details']
class ReviewSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Review
fields = 'user', 'book', 'text', 'rating', 'created_time'
def validate_rating(self, rating):
if rating not in range(1, 6):
raise serializers.ValidationError('Оценка от 1 до 5')
return rating
def create(self, validated_data):
request = self.context.get('request')
user = request.user
review = Review.objects.create(user=user, **validated_data)
return review
|
<mask token>
class GenresSerializer(serializers.ModelSerializer):
class Meta:
model = Genres
fields = 'title',
class PriceSerializer(serializers.ModelSerializer):
class Meta:
model = ExtraTableForPrice
fields = 'formats', 'price'
class FormatSerializer(serializers.ModelSerializer):
class Meta:
model = Format
fields = 'title',
class BookSerializer(serializers.ModelSerializer):
class Meta:
model = Book
fields = '__all__'
def create(self, validated_data):
formats = validated_data.pop('format', [])
book = Book.objects.create(**validated_data)
book.format.add(*formats)
return book
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['genre'] = GenresSerializer(instance.genre, context=
self.context).data
representation['reviews'] = ReviewSerializer(instance.reviews.all(),
many=True).data
representation['orders_count'] = instance.orders.count()
representation['price'] = PriceSerializer(instance.books_price.all(
), many=True).data
return representation
class BookListSerializer(serializers.ModelSerializer):
details = serializers.HyperlinkedIdentityField(view_name='book-detail',
lookup_field='slug')
class Meta:
model = Book
fields = ['title', 'author', 'genre', 'cover', 'details']
class ReviewSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Review
fields = 'user', 'book', 'text', 'rating', 'created_time'
def validate_rating(self, rating):
if rating not in range(1, 6):
raise serializers.ValidationError('Оценка от 1 до 5')
return rating
def create(self, validated_data):
request = self.context.get('request')
user = request.user
review = Review.objects.create(user=user, **validated_data)
return review
|
from rest_framework import serializers
from books.models import Genres, Format, Book, Review, ExtraTableForPrice
class GenresSerializer(serializers.ModelSerializer):
class Meta:
model = Genres
fields = 'title',
class PriceSerializer(serializers.ModelSerializer):
class Meta:
model = ExtraTableForPrice
fields = 'formats', 'price'
class FormatSerializer(serializers.ModelSerializer):
class Meta:
model = Format
fields = 'title',
class BookSerializer(serializers.ModelSerializer):
class Meta:
model = Book
fields = '__all__'
def create(self, validated_data):
formats = validated_data.pop('format', [])
book = Book.objects.create(**validated_data)
book.format.add(*formats)
return book
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['genre'] = GenresSerializer(instance.genre, context=
self.context).data
representation['reviews'] = ReviewSerializer(instance.reviews.all(),
many=True).data
representation['orders_count'] = instance.orders.count()
representation['price'] = PriceSerializer(instance.books_price.all(
), many=True).data
return representation
class BookListSerializer(serializers.ModelSerializer):
details = serializers.HyperlinkedIdentityField(view_name='book-detail',
lookup_field='slug')
class Meta:
model = Book
fields = ['title', 'author', 'genre', 'cover', 'details']
class ReviewSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Review
fields = 'user', 'book', 'text', 'rating', 'created_time'
def validate_rating(self, rating):
if rating not in range(1, 6):
raise serializers.ValidationError('Оценка от 1 до 5')
return rating
def create(self, validated_data):
request = self.context.get('request')
user = request.user
review = Review.objects.create(user=user, **validated_data)
return review
|
from rest_framework import serializers
from books.models import Genres, Format, Book, Review, ExtraTableForPrice
class GenresSerializer(serializers.ModelSerializer):
class Meta:
model = Genres
fields = ('title', )
class PriceSerializer(serializers.ModelSerializer):
class Meta:
model = ExtraTableForPrice
fields = ('formats', 'price', )
class FormatSerializer(serializers.ModelSerializer):
class Meta:
model = Format
fields = ('title', )
class BookSerializer(serializers.ModelSerializer):
class Meta:
model = Book
fields = '__all__'
def create(self, validated_data):
formats = validated_data.pop('format', [])
book = Book.objects.create(**validated_data)
book.format.add(*formats)
return book
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['genre'] = GenresSerializer(instance.genre, context=self.context).data
representation['reviews'] = ReviewSerializer(instance.reviews.all(), many=True).data
representation['orders_count'] = instance.orders.count()
representation['price'] = PriceSerializer(instance.books_price.all(), many=True).data
return representation
class BookListSerializer(serializers.ModelSerializer):
details = serializers.HyperlinkedIdentityField(view_name='book-detail', lookup_field='slug')
class Meta:
model = Book
fields = ['title', 'author', 'genre', 'cover', 'details']
class ReviewSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Review
fields = ('user', 'book', 'text', 'rating', 'created_time')
def validate_rating(self, rating):
if rating not in range(1, 6):
raise(serializers.ValidationError('Оценка от 1 до 5'))
return rating
def create(self, validated_data):
request = self.context.get('request')
user = request.user
review = Review.objects.create(user=user, **validated_data)
return review
|
[
9,
10,
12,
13,
14
] |
2,520 |
4bad45f8c135463fadea9b3eed52ab045a51e8db
|
<mask token>
|
<mask token>
if __name__ == '__main__':
text = input('Введите предложение: ')
x1 = text.index('с')
x2 = text.index('т')
if x1 > x2:
print("Бурква 'с' встречается позже")
else:
print("Бурква 'т' встречается позже")
|
'''
Дано предложение, в котором имеются буквы с и т. Определить, какая из них встречается
позже (при просмотре слова слева направо). Если таких букв несколько, то должны
учитываться последние из них. Оператор цикла с условием не использовать.
'''
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
if __name__ == '__main__':
text = input("Введите предложение: ")
x1 = text.index("с")
x2 = text.index("т")
if x1 > x2:
print("Бурква 'с' встречается позже")
else:
print("Бурква 'т' встречается позже")
| null | null |
[
0,
1,
2
] |
2,521 |
3b531c5935f0be89536c95ff471f96b4249d951c
|
<mask token>
class EchoBot(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler('session_start', self.session_start)
self.register_plugin('xep_0045')
<mask token>
<mask token>
|
<mask token>
class EchoBot(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler('session_start', self.session_start)
self.register_plugin('xep_0045')
def session_start(self, event):
self.send_presence()
self.get_roster()
self['xep_0045'].joinMUC('[email protected]', 'your_name',
wait=True)
<mask token>
|
<mask token>
class EchoBot(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler('session_start', self.session_start)
self.register_plugin('xep_0045')
def session_start(self, event):
self.send_presence()
self.get_roster()
self['xep_0045'].joinMUC('[email protected]', 'your_name',
wait=True)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format=
'%(levelname)-8s %(message)s')
xmpp = EchoBot('[email protected]', 'your_password')
xmpp.connect()
xmpp.process(block=True)
|
import logging
from sleekxmpp import ClientXMPP
from sleekxmpp.exceptions import IqError, IqTimeout
class EchoBot(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler('session_start', self.session_start)
self.register_plugin('xep_0045')
def session_start(self, event):
self.send_presence()
self.get_roster()
self['xep_0045'].joinMUC('[email protected]', 'your_name',
wait=True)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format=
'%(levelname)-8s %(message)s')
xmpp = EchoBot('[email protected]', 'your_password')
xmpp.connect()
xmpp.process(block=True)
|
import logging
from sleekxmpp import ClientXMPP
from sleekxmpp.exceptions import IqError, IqTimeout
class EchoBot(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler("session_start", self.session_start)
self.register_plugin('xep_0045') # Multi-User Chat
def session_start(self, event):
self.send_presence()
self.get_roster()
self['xep_0045'].joinMUC('[email protected]', 'your_name', wait=True)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(levelname)-8s %(message)s')
xmpp = EchoBot('[email protected]', 'your_password')
xmpp.connect()
xmpp.process(block=True)
|
[
2,
3,
4,
5,
6
] |
2,522 |
650f00dd9740d62546eb58724e6e5a74398b3e59
|
<mask token>
class DataGenerator(IterableDataset):
def __init__(self, memmap_directory, apikey_weighted_df):
super(DataGenerator, self).__init__()
self.data_generator = training_generator(memmap_directory,
apikey_weighted_df)
def __iter__(self):
return self.data_generator
class CrossEncoderModel(torch.nn.Module):
def __init__(self):
super(CrossEncoderModel, self).__init__()
self.bert = AutoModel.from_pretrained('distilbert-base-cased')
self.hidden = nn.Linear(768, 512)
self.out = nn.Linear(512, 1)
def forward(self, tensor_in, sep_token_id=102):
positive_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 1]], dim=1)
positive_pairs[:, 256] = sep_token_id
negative_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 2]], dim=1)
negative_pairs[:, 256] = sep_token_id
positive_labels = torch.ones(len(positive_pairs), dtype=torch.
float32, device=tensor_in.device)
negative_labels = torch.zeros_like(positive_labels)
labels = torch.cat([positive_labels, negative_labels])
inputs = torch.cat([positive_pairs, negative_pairs], dim=0)
assert len(labels) == inputs.shape[0]
out = self.bert(inputs)[0]
out = out.mean(dim=1, keepdims=False)
out = F.gelu(self.hidden(out))
out = torch.squeeze(self.out(out))
loss = F.binary_cross_entropy_with_logits(out, labels)
return loss
<mask token>
|
<mask token>
class DataGenerator(IterableDataset):
def __init__(self, memmap_directory, apikey_weighted_df):
super(DataGenerator, self).__init__()
self.data_generator = training_generator(memmap_directory,
apikey_weighted_df)
def __iter__(self):
return self.data_generator
class CrossEncoderModel(torch.nn.Module):
def __init__(self):
super(CrossEncoderModel, self).__init__()
self.bert = AutoModel.from_pretrained('distilbert-base-cased')
self.hidden = nn.Linear(768, 512)
self.out = nn.Linear(512, 1)
def forward(self, tensor_in, sep_token_id=102):
positive_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 1]], dim=1)
positive_pairs[:, 256] = sep_token_id
negative_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 2]], dim=1)
negative_pairs[:, 256] = sep_token_id
positive_labels = torch.ones(len(positive_pairs), dtype=torch.
float32, device=tensor_in.device)
negative_labels = torch.zeros_like(positive_labels)
labels = torch.cat([positive_labels, negative_labels])
inputs = torch.cat([positive_pairs, negative_pairs], dim=0)
assert len(labels) == inputs.shape[0]
out = self.bert(inputs)[0]
out = out.mean(dim=1, keepdims=False)
out = F.gelu(self.hidden(out))
out = torch.squeeze(self.out(out))
loss = F.binary_cross_entropy_with_logits(out, labels)
return loss
def main():
batch_size = 16
batches_per_epoch = 2 ** 19 // batch_size
eval_batches_per_epoch = 2 ** 18 // batch_size
save_path = Path('model.save')
train_weighted_apikeys, test_weighted_apikeys = get_train_test_apikeys(
MEMMAP_DIRECTORY)
debug_weighted_apikeys = pd.concat([train_weighted_apikeys,
test_weighted_apikeys]).query('num_posts > 1000000')
train_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
train_loader = DataLoader(train_dataset, batch_size=batch_size,
pin_memory=True, num_workers=1)
test_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
test_loader = DataLoader(test_dataset, batch_size=batch_size,
pin_memory=True, num_workers=1)
model = CrossEncoderModel().cuda()
model_params = model.parameters()
optimizer = torch.optim.Adam(model_params, lr=0.0001)
if save_path.is_file():
print('Loading state...')
checkpoint = torch.load(str(save_path))
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
start_epoch = checkpoint['epoch'] + 1
else:
start_epoch = 0
for epoch in range(start_epoch, 60):
with tqdm(total=batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f'Epoch {epoch}')
bar_loss = 0.0
model.train()
optimizer.zero_grad()
for i, batch in enumerate(train_loader):
batch = batch.cuda()
loss = model(batch)
loss.backward()
optimizer.step()
bar.update(1)
bar_loss = (bar_loss * i + float(loss.detach())) / (i + 1)
bar.set_postfix_str(f'Loss: {bar_loss:.3f}')
if i == batches_per_epoch - 1:
break
with tqdm(total=eval_batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f'Eval epoch {epoch}')
bar_loss = 0.0
model.eval()
with torch.no_grad():
for i, batch in enumerate(test_loader):
batch = batch.cuda()
loss = model(batch)
bar.update(1)
bar_loss = (bar_loss * i + float(loss.detach())) / (i + 1)
bar.set_postfix_str(f'Loss: {bar_loss:.3f}')
if i == eval_batches_per_epoch - 1:
break
torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict()}, str(save_path))
<mask token>
|
<mask token>
class DataGenerator(IterableDataset):
def __init__(self, memmap_directory, apikey_weighted_df):
super(DataGenerator, self).__init__()
self.data_generator = training_generator(memmap_directory,
apikey_weighted_df)
def __iter__(self):
return self.data_generator
class CrossEncoderModel(torch.nn.Module):
def __init__(self):
super(CrossEncoderModel, self).__init__()
self.bert = AutoModel.from_pretrained('distilbert-base-cased')
self.hidden = nn.Linear(768, 512)
self.out = nn.Linear(512, 1)
def forward(self, tensor_in, sep_token_id=102):
positive_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 1]], dim=1)
positive_pairs[:, 256] = sep_token_id
negative_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 2]], dim=1)
negative_pairs[:, 256] = sep_token_id
positive_labels = torch.ones(len(positive_pairs), dtype=torch.
float32, device=tensor_in.device)
negative_labels = torch.zeros_like(positive_labels)
labels = torch.cat([positive_labels, negative_labels])
inputs = torch.cat([positive_pairs, negative_pairs], dim=0)
assert len(labels) == inputs.shape[0]
out = self.bert(inputs)[0]
out = out.mean(dim=1, keepdims=False)
out = F.gelu(self.hidden(out))
out = torch.squeeze(self.out(out))
loss = F.binary_cross_entropy_with_logits(out, labels)
return loss
def main():
batch_size = 16
batches_per_epoch = 2 ** 19 // batch_size
eval_batches_per_epoch = 2 ** 18 // batch_size
save_path = Path('model.save')
train_weighted_apikeys, test_weighted_apikeys = get_train_test_apikeys(
MEMMAP_DIRECTORY)
debug_weighted_apikeys = pd.concat([train_weighted_apikeys,
test_weighted_apikeys]).query('num_posts > 1000000')
train_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
train_loader = DataLoader(train_dataset, batch_size=batch_size,
pin_memory=True, num_workers=1)
test_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
test_loader = DataLoader(test_dataset, batch_size=batch_size,
pin_memory=True, num_workers=1)
model = CrossEncoderModel().cuda()
model_params = model.parameters()
optimizer = torch.optim.Adam(model_params, lr=0.0001)
if save_path.is_file():
print('Loading state...')
checkpoint = torch.load(str(save_path))
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
start_epoch = checkpoint['epoch'] + 1
else:
start_epoch = 0
for epoch in range(start_epoch, 60):
with tqdm(total=batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f'Epoch {epoch}')
bar_loss = 0.0
model.train()
optimizer.zero_grad()
for i, batch in enumerate(train_loader):
batch = batch.cuda()
loss = model(batch)
loss.backward()
optimizer.step()
bar.update(1)
bar_loss = (bar_loss * i + float(loss.detach())) / (i + 1)
bar.set_postfix_str(f'Loss: {bar_loss:.3f}')
if i == batches_per_epoch - 1:
break
with tqdm(total=eval_batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f'Eval epoch {epoch}')
bar_loss = 0.0
model.eval()
with torch.no_grad():
for i, batch in enumerate(test_loader):
batch = batch.cuda()
loss = model(batch)
bar.update(1)
bar_loss = (bar_loss * i + float(loss.detach())) / (i + 1)
bar.set_postfix_str(f'Loss: {bar_loss:.3f}')
if i == eval_batches_per_epoch - 1:
break
torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict()}, str(save_path))
if __name__ == '__main__':
main()
|
from torch.utils.data import IterableDataset, DataLoader
from torch import nn
from torch.nn import functional as F
from triplet_training_generator import get_train_test_apikeys, training_generator
from pathlib import Path
from transformers import AutoModel
import torch
from tqdm import tqdm
import pandas as pd
MEMMAP_DIRECTORY = Path('/media/data/tokenized_crawl')
BATCHES_PER_EPOCH = 8192
class DataGenerator(IterableDataset):
def __init__(self, memmap_directory, apikey_weighted_df):
super(DataGenerator, self).__init__()
self.data_generator = training_generator(memmap_directory,
apikey_weighted_df)
def __iter__(self):
return self.data_generator
class CrossEncoderModel(torch.nn.Module):
def __init__(self):
super(CrossEncoderModel, self).__init__()
self.bert = AutoModel.from_pretrained('distilbert-base-cased')
self.hidden = nn.Linear(768, 512)
self.out = nn.Linear(512, 1)
def forward(self, tensor_in, sep_token_id=102):
positive_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 1]], dim=1)
positive_pairs[:, 256] = sep_token_id
negative_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 2]], dim=1)
negative_pairs[:, 256] = sep_token_id
positive_labels = torch.ones(len(positive_pairs), dtype=torch.
float32, device=tensor_in.device)
negative_labels = torch.zeros_like(positive_labels)
labels = torch.cat([positive_labels, negative_labels])
inputs = torch.cat([positive_pairs, negative_pairs], dim=0)
assert len(labels) == inputs.shape[0]
out = self.bert(inputs)[0]
out = out.mean(dim=1, keepdims=False)
out = F.gelu(self.hidden(out))
out = torch.squeeze(self.out(out))
loss = F.binary_cross_entropy_with_logits(out, labels)
return loss
def main():
batch_size = 16
batches_per_epoch = 2 ** 19 // batch_size
eval_batches_per_epoch = 2 ** 18 // batch_size
save_path = Path('model.save')
train_weighted_apikeys, test_weighted_apikeys = get_train_test_apikeys(
MEMMAP_DIRECTORY)
debug_weighted_apikeys = pd.concat([train_weighted_apikeys,
test_weighted_apikeys]).query('num_posts > 1000000')
train_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
train_loader = DataLoader(train_dataset, batch_size=batch_size,
pin_memory=True, num_workers=1)
test_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
test_loader = DataLoader(test_dataset, batch_size=batch_size,
pin_memory=True, num_workers=1)
model = CrossEncoderModel().cuda()
model_params = model.parameters()
optimizer = torch.optim.Adam(model_params, lr=0.0001)
if save_path.is_file():
print('Loading state...')
checkpoint = torch.load(str(save_path))
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
start_epoch = checkpoint['epoch'] + 1
else:
start_epoch = 0
for epoch in range(start_epoch, 60):
with tqdm(total=batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f'Epoch {epoch}')
bar_loss = 0.0
model.train()
optimizer.zero_grad()
for i, batch in enumerate(train_loader):
batch = batch.cuda()
loss = model(batch)
loss.backward()
optimizer.step()
bar.update(1)
bar_loss = (bar_loss * i + float(loss.detach())) / (i + 1)
bar.set_postfix_str(f'Loss: {bar_loss:.3f}')
if i == batches_per_epoch - 1:
break
with tqdm(total=eval_batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f'Eval epoch {epoch}')
bar_loss = 0.0
model.eval()
with torch.no_grad():
for i, batch in enumerate(test_loader):
batch = batch.cuda()
loss = model(batch)
bar.update(1)
bar_loss = (bar_loss * i + float(loss.detach())) / (i + 1)
bar.set_postfix_str(f'Loss: {bar_loss:.3f}')
if i == eval_batches_per_epoch - 1:
break
torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict()}, str(save_path))
if __name__ == '__main__':
main()
|
from torch.utils.data import IterableDataset, DataLoader
from torch import nn
from torch.nn import functional as F
from triplet_training_generator import get_train_test_apikeys, training_generator
from pathlib import Path
from transformers import AutoModel
import torch
from tqdm import tqdm
import pandas as pd
MEMMAP_DIRECTORY = Path("/media/data/tokenized_crawl")
BATCHES_PER_EPOCH = 8192
class DataGenerator(IterableDataset):
def __init__(self, memmap_directory, apikey_weighted_df):
super(DataGenerator, self).__init__()
self.data_generator = training_generator(memmap_directory, apikey_weighted_df)
def __iter__(self):
return self.data_generator
class CrossEncoderModel(torch.nn.Module):
def __init__(self):
super(CrossEncoderModel, self).__init__()
# We need to make sure this matches the model we tokenized for!
# self.bert = AutoModel.from_pretrained('distilbert-base-cased')
self.bert = AutoModel.from_pretrained('distilbert-base-cased')
self.hidden = nn.Linear(768, 512)
self.out = nn.Linear(512, 1)
# self.out = torch.nn.Linear(768, 768, bias=False)
def forward(self, tensor_in, sep_token_id=102):
positive_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 1]], dim=1)
positive_pairs[:, 256] = sep_token_id
negative_pairs = torch.cat([tensor_in[:, 0], tensor_in[:, 2]], dim=1)
negative_pairs[:, 256] = sep_token_id
positive_labels = torch.ones(len(positive_pairs), dtype=torch.float32, device=tensor_in.device)
negative_labels = torch.zeros_like(positive_labels)
labels = torch.cat([positive_labels, negative_labels])
inputs = torch.cat([positive_pairs, negative_pairs], dim=0)
assert len(labels) == inputs.shape[0]
out = self.bert(inputs)[0]
# out = out[:, 0, :] # CLS token
out = out.mean(dim=1, keepdims=False) # Mean pooling
out = F.gelu(self.hidden(out))
out = torch.squeeze(self.out(out))
loss = F.binary_cross_entropy_with_logits(out, labels)
return loss
def main():
batch_size = 16
batches_per_epoch = (2 ** 19) // batch_size
eval_batches_per_epoch = (2 ** 18) // batch_size
save_path = Path('model.save')
train_weighted_apikeys, test_weighted_apikeys = get_train_test_apikeys(MEMMAP_DIRECTORY)
debug_weighted_apikeys = pd.concat([train_weighted_apikeys, test_weighted_apikeys]).query('num_posts > 1000000')
train_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
train_loader = DataLoader(train_dataset, batch_size=batch_size, pin_memory=True, num_workers=1)
test_dataset = DataGenerator(MEMMAP_DIRECTORY, debug_weighted_apikeys)
test_loader = DataLoader(test_dataset, batch_size=batch_size, pin_memory=True, num_workers=1)
model = CrossEncoderModel().cuda()
# Diverges or just outputs the same vector for all samples at higher LRs
model_params = model.parameters()
optimizer = torch.optim.Adam(model_params, lr=1e-4)
if save_path.is_file():
print("Loading state...")
checkpoint = torch.load(str(save_path))
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
start_epoch = checkpoint['epoch'] + 1
else:
start_epoch = 0
for epoch in range(start_epoch, 60):
with tqdm(total=batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f"Epoch {epoch}")
bar_loss = 0.
model.train()
optimizer.zero_grad()
for i, batch in enumerate(train_loader):
batch = batch.cuda()
loss = model(batch)
loss.backward()
optimizer.step()
bar.update(1)
bar_loss = ((bar_loss * i) + float(loss.detach())) / (i + 1) # Rolling mean loss
bar.set_postfix_str(f"Loss: {bar_loss:.3f}")
if i == batches_per_epoch - 1:
break
with tqdm(total=eval_batches_per_epoch, dynamic_ncols=True) as bar:
bar.set_description(f"Eval epoch {epoch}")
bar_loss = 0.
model.eval()
with torch.no_grad():
for i, batch in enumerate(test_loader):
batch = batch.cuda()
loss = model(batch)
bar.update(1)
bar_loss = ((bar_loss * i) + float(loss.detach())) / (i + 1) # Rolling mean loss
bar.set_postfix_str(f"Loss: {bar_loss:.3f}")
if i == eval_batches_per_epoch - 1:
break
torch.save({
'epoch': epoch,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict()
}, str(save_path))
if __name__ == '__main__':
main()
|
[
6,
7,
8,
10,
11
] |
2,523 |
e2a50fbd277ab868fbe71f9ff113a68a30b9f893
|
<mask token>
def get_accuracy(logits, targets):
_, predictions = torch.max(logits, dim=-1)
return torch.mean(predictions.eq(targets).float())
def ModelConvMiniImagenet(out_features, hidden_size=84):
return MetaConvModel(3, out_features, hidden_size=hidden_size,
feature_size=5 * 5 * hidden_size)
<mask token>
|
<mask token>
def get_accuracy(logits, targets):
_, predictions = torch.max(logits, dim=-1)
return torch.mean(predictions.eq(targets).float())
def ModelConvMiniImagenet(out_features, hidden_size=84):
return MetaConvModel(3, out_features, hidden_size=hidden_size,
feature_size=5 * 5 * hidden_size)
if __name__ == '__main__':
classes_num = 5
model = ModelConvMiniImagenet(classes_num)
model.load_state_dict(torch.load(
'trained parameters/maml_miniimagenet_5shot_5way.th'))
model.zero_grad()
dataloader = FSDataLoader()
meta_optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
accuracy_l = list()
loss = nn.CrossEntropyLoss()
model.train()
num_of_tasks = 100
epochs = 1
with tqdm(dataloader, total=num_of_tasks) as qbar:
for idx, batch in enumerate(qbar):
model.zero_grad()
train_inputs, train_targets = batch['Train']
test_inputs, test_targets = batch['Test']
for _ in range(epochs):
for task_idx, (train_input, train_target, test_input,
test_target) in enumerate(zip(train_inputs,
train_targets, test_inputs, test_targets)):
outer_loss = torch.tensor(0.0, device='cuda')
accuracy = torch.tensor(0.0, device='cuda')
train_logit = model(train_input)
inner_loss = F.cross_entropy(train_logit, train_target)
params = gradient_update_parameters(model, inner_loss)
test_logit = model(test_input, params=params)
outer_loss += F.cross_entropy(test_logit, test_target)
with torch.no_grad():
accuracy += get_accuracy(test_logit, test_target)
outer_loss.div_(1)
accuracy.div_(1)
outer_loss.backward()
meta_optimizer.step()
accuracy_l.append(accuracy.item())
if idx > num_of_tasks - 1:
break
plt.title('MAML miniobjectnet training (100 tasks)')
plt.xlabel('Tasks (1 epoch)')
plt.ylabel('Accuracy')
plt.plot(accuracy_l)
plt.show()
print(sum(accuracy_l) / len(accuracy_l))
|
<mask token>
os.environ['KMP_DUPLICATE_LIB_OK'] = 'TRUE'
<mask token>
def get_accuracy(logits, targets):
_, predictions = torch.max(logits, dim=-1)
return torch.mean(predictions.eq(targets).float())
def ModelConvMiniImagenet(out_features, hidden_size=84):
return MetaConvModel(3, out_features, hidden_size=hidden_size,
feature_size=5 * 5 * hidden_size)
if __name__ == '__main__':
classes_num = 5
model = ModelConvMiniImagenet(classes_num)
model.load_state_dict(torch.load(
'trained parameters/maml_miniimagenet_5shot_5way.th'))
model.zero_grad()
dataloader = FSDataLoader()
meta_optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
accuracy_l = list()
loss = nn.CrossEntropyLoss()
model.train()
num_of_tasks = 100
epochs = 1
with tqdm(dataloader, total=num_of_tasks) as qbar:
for idx, batch in enumerate(qbar):
model.zero_grad()
train_inputs, train_targets = batch['Train']
test_inputs, test_targets = batch['Test']
for _ in range(epochs):
for task_idx, (train_input, train_target, test_input,
test_target) in enumerate(zip(train_inputs,
train_targets, test_inputs, test_targets)):
outer_loss = torch.tensor(0.0, device='cuda')
accuracy = torch.tensor(0.0, device='cuda')
train_logit = model(train_input)
inner_loss = F.cross_entropy(train_logit, train_target)
params = gradient_update_parameters(model, inner_loss)
test_logit = model(test_input, params=params)
outer_loss += F.cross_entropy(test_logit, test_target)
with torch.no_grad():
accuracy += get_accuracy(test_logit, test_target)
outer_loss.div_(1)
accuracy.div_(1)
outer_loss.backward()
meta_optimizer.step()
accuracy_l.append(accuracy.item())
if idx > num_of_tasks - 1:
break
plt.title('MAML miniobjectnet training (100 tasks)')
plt.xlabel('Tasks (1 epoch)')
plt.ylabel('Accuracy')
plt.plot(accuracy_l)
plt.show()
print(sum(accuracy_l) / len(accuracy_l))
|
import os
os.environ['KMP_DUPLICATE_LIB_OK'] = 'TRUE'
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from tqdm import tqdm
import torch.nn.functional as F
from torchmeta.utils.gradient_based import gradient_update_parameters
from libs.models.maml_model import MetaConvModel
from libs.mini_objecta_dataLoader import FSDataLoader
def get_accuracy(logits, targets):
_, predictions = torch.max(logits, dim=-1)
return torch.mean(predictions.eq(targets).float())
def ModelConvMiniImagenet(out_features, hidden_size=84):
return MetaConvModel(3, out_features, hidden_size=hidden_size,
feature_size=5 * 5 * hidden_size)
if __name__ == '__main__':
classes_num = 5
model = ModelConvMiniImagenet(classes_num)
model.load_state_dict(torch.load(
'trained parameters/maml_miniimagenet_5shot_5way.th'))
model.zero_grad()
dataloader = FSDataLoader()
meta_optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
accuracy_l = list()
loss = nn.CrossEntropyLoss()
model.train()
num_of_tasks = 100
epochs = 1
with tqdm(dataloader, total=num_of_tasks) as qbar:
for idx, batch in enumerate(qbar):
model.zero_grad()
train_inputs, train_targets = batch['Train']
test_inputs, test_targets = batch['Test']
for _ in range(epochs):
for task_idx, (train_input, train_target, test_input,
test_target) in enumerate(zip(train_inputs,
train_targets, test_inputs, test_targets)):
outer_loss = torch.tensor(0.0, device='cuda')
accuracy = torch.tensor(0.0, device='cuda')
train_logit = model(train_input)
inner_loss = F.cross_entropy(train_logit, train_target)
params = gradient_update_parameters(model, inner_loss)
test_logit = model(test_input, params=params)
outer_loss += F.cross_entropy(test_logit, test_target)
with torch.no_grad():
accuracy += get_accuracy(test_logit, test_target)
outer_loss.div_(1)
accuracy.div_(1)
outer_loss.backward()
meta_optimizer.step()
accuracy_l.append(accuracy.item())
if idx > num_of_tasks - 1:
break
plt.title('MAML miniobjectnet training (100 tasks)')
plt.xlabel('Tasks (1 epoch)')
plt.ylabel('Accuracy')
plt.plot(accuracy_l)
plt.show()
print(sum(accuracy_l) / len(accuracy_l))
|
# from mini_imagenet_dataloader import MiniImageNetDataLoader
import os
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from tqdm import tqdm
import torch.nn.functional as F
from torchmeta.utils.gradient_based import gradient_update_parameters
from libs.models.maml_model import MetaConvModel
from libs.mini_objecta_dataLoader import FSDataLoader
def get_accuracy(logits, targets):
_, predictions = torch.max(logits, dim=-1)
return torch.mean(predictions.eq(targets).float())
def ModelConvMiniImagenet(out_features, hidden_size=84):
return MetaConvModel(3, out_features, hidden_size=hidden_size,
feature_size=5 * 5 * hidden_size)
if __name__ == "__main__":
classes_num = 5
model = ModelConvMiniImagenet(classes_num)
model.load_state_dict(torch.load('trained parameters/maml_miniimagenet_5shot_5way.th'))
model.zero_grad()
dataloader = FSDataLoader()
meta_optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
accuracy_l = list()
loss = nn.CrossEntropyLoss()
model.train()
num_of_tasks = 100
epochs = 1
with tqdm(dataloader, total=num_of_tasks) as qbar:
for idx, batch in enumerate(qbar):
model.zero_grad()
train_inputs, train_targets = batch['Train']
test_inputs, test_targets = batch['Test']
for _ in range(epochs):
for task_idx, (train_input, train_target, test_input,
test_target) in enumerate(zip(train_inputs, train_targets,
test_inputs, test_targets)):
outer_loss = torch.tensor(0., device='cuda')
accuracy = torch.tensor(0., device='cuda')
train_logit = model(train_input)
inner_loss = F.cross_entropy(train_logit, train_target)
params = gradient_update_parameters(model, inner_loss)
test_logit = model(test_input , params=params)
outer_loss += F.cross_entropy(test_logit, test_target)
with torch.no_grad():
accuracy += get_accuracy(test_logit, test_target)
outer_loss.div_(1)
accuracy.div_(1)
outer_loss.backward()
meta_optimizer.step()
accuracy_l.append(accuracy.item())
if idx > num_of_tasks-1:
break
plt.title('MAML miniobjectnet training (100 tasks)')
plt.xlabel('Tasks (1 epoch)')
plt.ylabel('Accuracy')
plt.plot(accuracy_l)
plt.show()
print(sum(accuracy_l) / len(accuracy_l))
|
[
2,
3,
4,
5,
6
] |
2,524 |
8c364a518ab615803ea99520e90ee1dd24d37a8c
|
<mask token>
|
<mask token>
def load(indices, category='train'):
if category == 'train':
if max(indices) < len(X_train) and max(indices) < len(y_train):
return X_train[indices], y_train[indices]
else:
l = np.array([a for a in indices if a < len(X_train) and a <
len(y_train)], np.int64)
return X_train[l], y_train[l]
elif category == 'test':
return X_test[indices], y_test[indices]
|
<mask token>
pwd = os.path.dirname(os.path.realpath(__file__))
train_data = np.load(os.path.join(pwd, 'purchase2_train.npy'), allow_pickle
=True)
test_data = np.load(os.path.join(pwd, 'purchase2_test.npy'), allow_pickle=True)
train_data = train_data.reshape((1,))[0]
test_data = test_data.reshape((1,))[0]
X_train = train_data['X'].astype(np.float32)
X_test = test_data['X'].astype(np.float32)
y_train = train_data['y'].astype(np.int64)
y_test = test_data['y'].astype(np.int64)
def load(indices, category='train'):
if category == 'train':
if max(indices) < len(X_train) and max(indices) < len(y_train):
return X_train[indices], y_train[indices]
else:
l = np.array([a for a in indices if a < len(X_train) and a <
len(y_train)], np.int64)
return X_train[l], y_train[l]
elif category == 'test':
return X_test[indices], y_test[indices]
|
import numpy as np
import os
pwd = os.path.dirname(os.path.realpath(__file__))
train_data = np.load(os.path.join(pwd, 'purchase2_train.npy'), allow_pickle
=True)
test_data = np.load(os.path.join(pwd, 'purchase2_test.npy'), allow_pickle=True)
train_data = train_data.reshape((1,))[0]
test_data = test_data.reshape((1,))[0]
X_train = train_data['X'].astype(np.float32)
X_test = test_data['X'].astype(np.float32)
y_train = train_data['y'].astype(np.int64)
y_test = test_data['y'].astype(np.int64)
def load(indices, category='train'):
if category == 'train':
if max(indices) < len(X_train) and max(indices) < len(y_train):
return X_train[indices], y_train[indices]
else:
l = np.array([a for a in indices if a < len(X_train) and a <
len(y_train)], np.int64)
return X_train[l], y_train[l]
elif category == 'test':
return X_test[indices], y_test[indices]
|
import numpy as np
import os
pwd = os.path.dirname(os.path.realpath(__file__))
train_data = np.load(os.path.join(pwd, 'purchase2_train.npy'), allow_pickle=True)
test_data = np.load(os.path.join(pwd, 'purchase2_test.npy'), allow_pickle=True)
train_data = train_data.reshape((1,))[0]
test_data = test_data.reshape((1,))[0]
X_train = train_data['X'].astype(np.float32)
X_test = test_data['X'].astype(np.float32)
y_train = train_data['y'].astype(np.int64)
y_test = test_data['y'].astype(np.int64)
def load(indices, category='train'):
if category == 'train':
if max(indices) < len(X_train) and max(indices) < len(y_train):
return X_train[indices], y_train[indices]
else:
l = np.array([a for a in indices if a < len(X_train) and a < len(y_train)],np.int64)
return X_train[l], y_train[l]
elif category == 'test':
return X_test[indices], y_test[indices]
|
[
0,
1,
2,
3,
4
] |
2,525 |
358d4573ff386d6874d5bb5decfe71c71141bf1c
|
<mask token>
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
<mask token>
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
<mask token>
|
<mask token>
assert np.linalg.matrix_rank(Ctr) == 4
<mask token>
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
<mask token>
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
<mask token>
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps * 3 - 10
<mask token>
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
<mask token>
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i + int(num_trials / 2), :] = samples[i, :]
Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5
print(time.time() - start)
<mask token>
plt.close()
plt.plot(loss_hist)
plt.show()
<mask token>
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
<mask token>
print(end - start)
<mask token>
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
<mask token>
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])
<mask token>
print(end - start)
<mask token>
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
<mask token>
for i, res in enumerate(pool.imap(do_rollout, range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0,
0, 0])) ** 2))
if lqr_on:
lqr_list.append(i)
if err_hist[i] < 2:
success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
|
<mask token>
global_start = time.time()
m1 = 1
m2 = 1
l1 = 1
l2 = 2
lc1 = 0.5
lc2 = 1
I1 = 0.083
I2 = 0.33
g = 9.8
dt = 0.01
max_torque = 25
integrator = euler
Q = np.identity(4)
Q[0, 0] = 1
Q[1, 1] = 1
Q[2, 2] = 1
Q[3, 3] = 1
R = np.identity(2) * 0.01
eval_max_t = 10
th1 = pi / 2
th2 = 0
th1d = 0
th2d = 0
TAU = np.array([[0], [1]])
m11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)
) + I1 + I2
m22 = m2 * lc2 ** 2 + I2
m12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2
M = np.array([[m11, m12], [m12, m22]])
h1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2
) * th2d * th1d
h2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2
H = np.array([[h1], [h2]])
phi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)
phi2 = m2 * lc2 * g * cos(th1 + th2)
PHI = np.array([[phi1], [phi2]])
Bl = np.linalg.inv(M) @ TAU
Blin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])
DPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-
m2 * lc2 * g, -m2 * lc2 * g]])
Al = -np.linalg.inv(M) @ DPHI
Alin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [
Al[1, 0], Al[1, 1], 0, 0]])
Ctr = ctrb(Alin, Blin)
assert np.linalg.matrix_rank(Ctr) == 4
K, S, E = lqr(Alin, Blin, Q, R)
k = np.array(K[1, :])
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
start = time.time()
config = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,
'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,
'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}
env = gym.make('su_acrobot-v0', **config)
num_trials = 200000
reward_hist = np.zeros((num_trials, env.num_steps, 1))
X = np.zeros((num_trials, 4), dtype=np.float32)
Y = np.zeros((num_trials, 1), dtype=np.float32)
th1_min = pi / 2 - 0.5
th1_max = pi / 2 + 0.5
th2_min = -1
th2_max = 1
th1dot_min = -5
th1dot_max = 5
th2dot_min = -10
th2dot_max = 10
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps * 3 - 10
th1_min = 0
th1_max = 2 * pi
th2_min = -pi
th2_max = pi
th1dot_min = -10
th1dot_max = 10
th2dot_min = -30
th2dot_max = 30
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i + int(num_trials / 2), :] = samples[i, :]
Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5
print(time.time() - start)
<mask token>
net = MLP(4, 1, 2, 32)
Y0 = np.ones((num_trials, 1), dtype=np.float32)
w = 0.01
class_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)
loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.
BCEWithLogitsLoss(pos_weight=class_weight))
plt.close()
plt.plot(loss_hist)
plt.show()
n_thdot = 1
n_th = 1000
th1_vals = np.linspace(0, 2 * pi, n_th)
th2_vals = np.linspace(-pi, pi, n_th)
th1dot_vals = np.linspace(-10, 10, n_th)
th2dot_vals = np.linspace(-30, 30, n_th)
sig = torch.nn.Sigmoid()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
<mask token>
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1_vals, th2_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1dot_vals, th2dot_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
config = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':
l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':
integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}
env = gym.make('su_acrobot-v0', **config)
num_trials = 1000
action_hist = np.zeros((num_trials, env.num_steps, 1))
state_hist = np.zeros((num_trials, env.num_steps, env.observation_space.
shape[0]))
reward_hist = np.zeros((num_trials, env.num_steps, 1))
gate_hist = np.zeros((num_trials, env.num_steps, 1))
err_hist = np.zeros((num_trials, 1))
lqr_list = []
success_list = []
act_hold = 20
hold_count = 0
obs = env.reset()
start = time.time()
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0,
0, 0])) ** 2))
if lqr_on:
lqr_list.append(i)
if err_hist[i] < 2:
success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
|
import numpy as np
from numpy import sin, cos, pi
import gym
import seagul.envs
from seagul.integration import rk4, euler
from control import lqr, ctrb
from torch.multiprocessing import Pool
import matplotlib.pyplot as plt
import matplotlib
import time
global_start = time.time()
m1 = 1
m2 = 1
l1 = 1
l2 = 2
lc1 = 0.5
lc2 = 1
I1 = 0.083
I2 = 0.33
g = 9.8
dt = 0.01
max_torque = 25
integrator = euler
Q = np.identity(4)
Q[0, 0] = 1
Q[1, 1] = 1
Q[2, 2] = 1
Q[3, 3] = 1
R = np.identity(2) * 0.01
eval_max_t = 10
th1 = pi / 2
th2 = 0
th1d = 0
th2d = 0
TAU = np.array([[0], [1]])
m11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)
) + I1 + I2
m22 = m2 * lc2 ** 2 + I2
m12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2
M = np.array([[m11, m12], [m12, m22]])
h1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2
) * th2d * th1d
h2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2
H = np.array([[h1], [h2]])
phi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)
phi2 = m2 * lc2 * g * cos(th1 + th2)
PHI = np.array([[phi1], [phi2]])
Bl = np.linalg.inv(M) @ TAU
Blin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])
DPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-
m2 * lc2 * g, -m2 * lc2 * g]])
Al = -np.linalg.inv(M) @ DPHI
Alin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [
Al[1, 0], Al[1, 1], 0, 0]])
Ctr = ctrb(Alin, Blin)
assert np.linalg.matrix_rank(Ctr) == 4
K, S, E = lqr(Alin, Blin, Q, R)
k = np.array(K[1, :])
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
start = time.time()
config = {'init_state': [0, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [0, 0, 0, 0], 'max_t': 2.5, 'dt': dt, 'm2': m2,
'm1': m1, 'l1': l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2,
'integrator': integrator, 'reward_fn': reward_fn, 'act_hold': 1}
env = gym.make('su_acrobot-v0', **config)
num_trials = 200000
reward_hist = np.zeros((num_trials, env.num_steps, 1))
X = np.zeros((num_trials, 4), dtype=np.float32)
Y = np.zeros((num_trials, 1), dtype=np.float32)
th1_min = pi / 2 - 0.5
th1_max = pi / 2 + 0.5
th2_min = -1
th2_max = 1
th1dot_min = -5
th1dot_max = 5
th2dot_min = -10
th2dot_max = 10
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps * 3 - 10
th1_min = 0
th1_max = 2 * pi
th2_min = -pi
th2_max = pi
th1dot_min = -10
th1dot_max = 10
th2dot_min = -30
th2dot_max = 30
samples = np.random.random_sample((int(num_trials / 2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min -
th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(
num_trials / 2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i + int(num_trials / 2), :] = samples[i, :]
Y[i + int(num_trials / 2)] = sum(rews) > env.num_steps * 3 - 5
print(time.time() - start)
from seagul.nn import MLP, fit_model
import torch
net = MLP(4, 1, 2, 32)
Y0 = np.ones((num_trials, 1), dtype=np.float32)
w = 0.01
class_weight = torch.tensor(Y.shape[0] / sum(Y) * w, dtype=torch.float32)
loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.
BCEWithLogitsLoss(pos_weight=class_weight))
plt.close()
plt.plot(loss_hist)
plt.show()
n_thdot = 1
n_th = 1000
th1_vals = np.linspace(0, 2 * pi, n_th)
th2_vals = np.linspace(-pi, pi, n_th)
th1dot_vals = np.linspace(-10, 10, n_th)
th2dot_vals = np.linspace(-30, 30, n_th)
sig = torch.nn.Sigmoid()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
from itertools import product
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1_vals, th2_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi / 2, 0, th1dot_vals[i], th2dot_vals[j]])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
x, y = np.meshgrid(th1dot_vals, th2dot_vals)
z = preds
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0])
)
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > 0.85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque
)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return (local_action_hist, local_state_hist, local_reward_hist,
local_gate_hist, local_lqr)
config = {'init_state': [-pi / 2, 0, 0, 0], 'max_torque': max_torque,
'init_state_weights': [1, 1, 5, 5], 'dt': dt, 'm2': m2, 'm1': m1, 'l1':
l1, 'lc1': lc1, 'lc2': lc2, 'i1': I1, 'i2': I2, 'integrator':
integrator, 'reward_fn': reward_fn, 'act_hold': 1, 'max_t': 10}
env = gym.make('su_acrobot-v0', **config)
num_trials = 1000
action_hist = np.zeros((num_trials, env.num_steps, 1))
state_hist = np.zeros((num_trials, env.num_steps, env.observation_space.
shape[0]))
reward_hist = np.zeros((num_trials, env.num_steps, 1))
gate_hist = np.zeros((num_trials, env.num_steps, 1))
err_hist = np.zeros((num_trials, 1))
lqr_list = []
success_list = []
act_hold = 20
hold_count = 0
obs = env.reset()
start = time.time()
pool = Pool()
for i, res in enumerate(pool.imap(do_rollout, range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = np.sqrt(sum((state_hist[i, -1, :] - np.array([pi / 2, 0,
0, 0])) ** 2))
if lqr_on:
lqr_list.append(i)
if err_hist[i] < 2:
success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
|
# %%
import numpy as np
from numpy import sin, cos, pi
import gym
import seagul.envs
from seagul.integration import rk4,euler
from control import lqr, ctrb
from torch.multiprocessing import Pool
import matplotlib.pyplot as plt
import matplotlib
#matplotlib.use('Qt5Agg')
import time
global_start = time.time()
# %%
m1 = 1
m2 = 1
l1 = 1
l2 = 2
lc1 = .5
lc2 = 1
I1 = .083
I2 = .33
g = 9.8
#
# m1 = 1
# m2 = 1
# l1 = 1
# l2 = 1
# lc1 = .5
# lc2 = .5
# I1 = .2
# I2 = 1.0
# g = 9.8
dt = .01
max_torque = 25
integrator = euler
Q = np.identity(4)
Q[0, 0] = 1
Q[1, 1] = 1
Q[2, 2] = 1
Q[3, 3] = 1
#
# Q = np.array([[1000, -500, 0,0],[-500, 1000, 0, 0],[0, 0, 1000, -500],[0,0,-500,1000]])
R = np.identity(2) * .01
eval_max_t = 10
th1 = pi / 2
th2 = 0
th1d = 0
th2d = 0
TAU = np.array([[0], [1]])
m11 = m1 * lc1 ** 2 + m2 * (l1 ** 2 + lc2 ** 2 + 2 * l1 * lc2 * cos(th2)) + I1 + I2
m22 = m2 * lc2 ** 2 + I2
m12 = m2 * (lc2 ** 2 + l1 * lc2 * cos(th2)) + I2
M = np.array([[m11, m12], [m12, m22]])
h1 = -m2 * l1 * lc2 * sin(th2) * th2d ** 2 - 2 * m2 * l1 * lc2 * sin(th2) * th2d * th1d
h2 = m2 * l1 * lc2 * sin(th2) * th1d ** 2
H = np.array([[h1], [h2]])
phi1 = (m1 * lc1 + m2 * l1) * g * cos(th1) + m2 * lc2 * g * cos(th1 + th2)
phi2 = m2 * lc2 * g * cos(th1 + th2)
PHI = np.array([[phi1], [phi2]])
Bl = np.linalg.inv(M) @ TAU
Blin = np.array([[0, 0], [0, 0], [0, Bl[0].item()], [0, Bl[1].item()]])
DPHI = np.array([[-g * (m1 * lc1 + m2 * l1 + m2 * lc2), -m2 * lc2 * g], [-m2 * lc2 * g, -m2 * lc2 * g]])
Al = -np.linalg.inv(M) @ DPHI
Alin = np.array([[0, 0, 1, 0], [0, 0, 0, 1], [Al[0, 0], Al[0, 1], 0, 0], [Al[1, 0], Al[1, 1], 0, 0]])
Ctr = ctrb(Alin, Blin)
assert np.linalg.matrix_rank(Ctr) == 4
K, S, E = lqr(Alin, Blin, Q, R)
k = np.array(K[1, :])
print(k)
def control(q):
gs = np.array([pi / 2, 0, 0, 0])
return -k.dot(q - gs)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
done = reward < 2
return reward, done
def do_rollout(args):
x, trial_num = args
th1, th2, dth1, dth2 = x
np.random.seed(trial_num)
local_reward_hist = np.ones((env.num_steps, 1)) * -1
obs = env.reset(init_vec=[th1, th2, dth1, dth2])
for i in range(env.num_steps):
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
obs, reward, done, _ = env.step(actions)
local_reward_hist[i, :] = np.copy(reward)
if done:
break
return local_reward_hist, i
# %%b
start = time.time()
config = {"init_state": [0, 0, 0, 0],
"max_torque": max_torque,
"init_state_weights": [0, 0, 0, 0],
"max_t" : 2.5,
"dt": dt,
"m2": m2,
"m1": m1,
"l1": l1,
"lc1": lc1,
"lc2": lc2,
"i1": I1,
"i2": I2,
"integrator" : integrator,
"reward_fn": reward_fn,
"act_hold": 1
}
env = gym.make('su_acrobot-v0', **config)
num_trials = 200000
reward_hist = np.zeros((num_trials, env.num_steps, 1))
X = np.zeros((num_trials, 4), dtype=np.float32)
Y = np.zeros((num_trials, 1), dtype=np.float32)
th1_min = pi / 2 - .5
th1_max = pi / 2 + .5
th2_min = -1
th2_max = 1
th1dot_min = -5
th1dot_max = 5
th2dot_min = -10
th2dot_max = 10
samples = np.random.random_sample((int(num_trials/2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
pool = Pool() # defaults to number of available CPU's
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i, :] = samples[i, :]
Y[i] = sum(rews) > env.num_steps*3 - 10
th1_min = 0
th1_max = 2*pi
th2_min = -pi
th2_max = pi
th1dot_min = -10
th1dot_max = 10
th2dot_min = -30
th2dot_max = 30
samples = np.random.random_sample((int(num_trials/2), 4))
samples *= np.array([th1_min - th1_max, th2_min - th2_max, th1dot_min - th1dot_max, th2dot_min - th2dot_max])
samples += np.array([th1_max, th2_max, th1dot_max, th2dot_max])
total_steps = 0
for i, res in enumerate(pool.imap(do_rollout, zip(samples, range(int(num_trials/2), int(num_trials))))):
rews, steps = res
reward_hist[i, :, :] = rews
total_steps += steps
X[i+int(num_trials/2), :] = samples[i, :]
Y[i+int(num_trials/2)] = sum(rews) > env.num_steps*3 - 5
print(time.time() - start)
# %%
from seagul.nn import MLP, fit_model
import torch
net = MLP(4, 1, 2, 32) # output_activation=torch.nn.Softmax)
Y0 = np.ones((num_trials, 1), dtype=np.float32)
w = 1e-2
class_weight = torch.tensor(Y.shape[0]/sum(Y)*w, dtype=torch.float32)
loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))
#loss_hist = fit_model(net, X, Y, 50, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss())
# loss_hist = fit_model(net, X, Y, 100, batch_size=2048)
# loss_hist = fit_model(net, X, Y0, 5, batch_size=2048, loss_fn=torch.nn.BCEWithLogitsLoss(pos_weight=class_weight))
plt.close()
plt.plot(loss_hist)
plt.show()
# %%
n_thdot = 1
n_th = 1000
th1_vals = np.linspace(0, 2*pi, n_th)
th2_vals = np.linspace(-pi, pi, n_th)
th1dot_vals = np.linspace(-10, 10, n_th)
th2dot_vals = np.linspace(-30, 30, n_th)
sig = torch.nn.Sigmoid()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
from itertools import product
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([th1_vals[i], th2_vals[j], 0, 0])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
# generate 2 2d grids for the x & y bounds
x, y = np.meshgrid(th1_vals, th2_vals)
z = preds
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('Theta')
ax.set_xlabel('Th1')
ax.set_ylabel('Th2')
# set the limits of the plot to the limits of the data
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
coords = np.zeros((n_th, n_th, 4), dtype=np.float32)
start = time.time()
for i, j in product(range(n_th), range(n_th)):
coords[j, i, :] = np.array([pi/2, 0, th1dot_vals[i], th2dot_vals[j]])
preds = sig(net(coords.reshape(-1, 4)).reshape(n_th, n_th).detach())
end = time.time()
print(end - start)
fig, ax = plt.subplots(n_thdot, n_thdot, figsize=(8, 8))
# generate 2 2d grids for the x & y bounds
x, y = np.meshgrid(th1dot_vals, th2dot_vals)
z = preds
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
z = z[:-1, :-1]
z_min, z_max = 0, np.abs(z).max()
c = ax.pcolormesh(x, y, z, cmap='RdBu', vmin=z_min, vmax=z_max)
ax.set_title('DTheta')
ax.set_xlabel('dth1')
ax.set_ylabel('dth2')
# set the limits of the plot to the limits of the data
ax.axis([x.min(), x.max(), y.min(), y.max()])
fig.colorbar(c, ax=ax)
plt.show()
# %%
torch.set_default_dtype(torch.float32)
def reward_fn(s, a):
reward = np.sin(s[0]) + 2 * np.sin(s[0] + s[1])
return reward, False
def do_rollout(trial_num):
np.random.seed(trial_num)
act_hold = 20
hold_count = 0
obs = env.reset()
local_lqr = False
actions = np.random.randn(1) * 3
local_state_hist = np.zeros((env.num_steps, env.observation_space.shape[0]))
local_reward_hist = np.zeros((env.num_steps, 1))
local_gate_hist = np.zeros((env.num_steps, 1))
local_action_hist = np.zeros((env.num_steps, 1))
for i in range(env.num_steps):
obs = np.array(obs, dtype=np.float32)
if sig(net(obs)) > .85:
actions = np.clip(np.asarray(control(obs)), -max_torque, max_torque)
local_lqr = True
local_gate_hist[i] = 1
else:
local_gate_hist[i] = 0
if hold_count == act_hold:
actions = np.random.randn(1) * 3
hold_count = 0
hold_count += 1
obs, reward, done, _ = env.step(actions)
local_action_hist[i, :] = np.copy(actions)
local_state_hist[i, :] = np.copy(obs)
local_reward_hist[i, :] = np.copy(reward)
return local_action_hist, local_state_hist, local_reward_hist, local_gate_hist, local_lqr
config = {"init_state": [-pi / 2, 0, 0, 0],
"max_torque": max_torque,
"init_state_weights": [1, 1, 5, 5],
"dt": dt,
"m2": m2,
"m1": m1,
"l1": l1,
"lc1": lc1,
"lc2": lc2,
"i1": I1,
"i2": I2,
"integrator" : integrator,
"reward_fn": reward_fn,
"act_hold": 1,
"max_t" : 10
}
env = gym.make('su_acrobot-v0', **config)
num_trials = 1000
action_hist = np.zeros((num_trials, env.num_steps, 1))
state_hist = np.zeros((num_trials, env.num_steps, env.observation_space.shape[0]))
reward_hist = np.zeros((num_trials, env.num_steps, 1))
gate_hist = np.zeros((num_trials, env.num_steps, 1))
err_hist = np.zeros((num_trials, 1))
lqr_list = []
success_list = []
act_hold = 20
hold_count = 0
obs = env.reset()
start = time.time()
pool = Pool() # defaults to number of available CPU's
for i, res in enumerate(pool.imap(do_rollout,range(num_trials))):
acts, obs, rews, gate, lqr_on = res
action_hist[i, :, :] = acts
state_hist[i, :, :] = obs
reward_hist[i, :, :] = rews
gate_hist[i, :, :] = gate
err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))
if lqr_on:
lqr_list.append(i)
#print(err_hist[i])
#print(reward_hist[i,-1])
if err_hist[i] < 2:
success_list.append(i)
#
# for i in (range(num_trials)):
# res = do_rollout(i)
# acts, obs, rews, gate, lqr_on = res
# action_hist[i, :, :] = acts
# state_hist[i, :, :] = obs
# reward_hist[i, :, :] = rews
# gate_hist[i, :, :] = gate
# err_hist[i] = (np.sqrt(sum(((state_hist[i, -1, :] - np.array([pi / 2, 0, 0, 0])) ** 2))))
# if lqr_on:
# lqr_list.append(i)
# #print(err_hist[i])
# #print(reward_hist[i,-1])
# if err_hist[i] < 2:
# success_list.append(i)
print(len(lqr_list))
print(len(success_list))
print((time.time() - global_start) / 60)
|
[
3,
6,
7,
8,
9
] |
2,526 |
3086f62d4057812fc7fb4e21a18bc7d0ba786865
|
<mask token>
def predictperson():
level = ['not at all', 'in small does', 'in large does']
percenttats = float(input('percentage of time spent playing video games?'))
ffmiles = float(input('frequent flier miles earned per year?'))
icecream = float(input('liters of ice cream consumed per year?'))
data, datalabels = KNN_1.filel2matrix('datingTestSet2.txt')
normMat = KNN_3.autoNorm(data)
test_dataset = array([[percenttats, ffmiles, icecream]])
a = clf.classify0(test_dataset, data, 3, datalabels)
print(level[a[0] - 1])
<mask token>
|
<mask token>
def datingClassTest():
horatio = 0.1
data, datalabels = KNN_1.filel2matrix('datingTestSet2.txt')
normMat = KNN_3.autoNorm(data)
ml = normMat.shape[0]
numTestset = int(ml * horatio)
errorcount = 0
a = clf.classify0(normMat[0:numTestset, :], normMat[numTestset:ml, :],
3, datalabels[numTestset:ml])
for i in range(len(a)):
if a[i] != datalabels[i]:
errorcount += 1
c = errorcount / 100
return c
def predictperson():
level = ['not at all', 'in small does', 'in large does']
percenttats = float(input('percentage of time spent playing video games?'))
ffmiles = float(input('frequent flier miles earned per year?'))
icecream = float(input('liters of ice cream consumed per year?'))
data, datalabels = KNN_1.filel2matrix('datingTestSet2.txt')
normMat = KNN_3.autoNorm(data)
test_dataset = array([[percenttats, ffmiles, icecream]])
a = clf.classify0(test_dataset, data, 3, datalabels)
print(level[a[0] - 1])
<mask token>
|
<mask token>
def datingClassTest():
horatio = 0.1
data, datalabels = KNN_1.filel2matrix('datingTestSet2.txt')
normMat = KNN_3.autoNorm(data)
ml = normMat.shape[0]
numTestset = int(ml * horatio)
errorcount = 0
a = clf.classify0(normMat[0:numTestset, :], normMat[numTestset:ml, :],
3, datalabels[numTestset:ml])
for i in range(len(a)):
if a[i] != datalabels[i]:
errorcount += 1
c = errorcount / 100
return c
def predictperson():
level = ['not at all', 'in small does', 'in large does']
percenttats = float(input('percentage of time spent playing video games?'))
ffmiles = float(input('frequent flier miles earned per year?'))
icecream = float(input('liters of ice cream consumed per year?'))
data, datalabels = KNN_1.filel2matrix('datingTestSet2.txt')
normMat = KNN_3.autoNorm(data)
test_dataset = array([[percenttats, ffmiles, icecream]])
a = clf.classify0(test_dataset, data, 3, datalabels)
print(level[a[0] - 1])
predictperson()
|
from numpy import *
import KNN_1
import KNN_3
import KNN_suanfa as clf
def datingClassTest():
horatio = 0.1
data, datalabels = KNN_1.filel2matrix('datingTestSet2.txt')
normMat = KNN_3.autoNorm(data)
ml = normMat.shape[0]
numTestset = int(ml * horatio)
errorcount = 0
a = clf.classify0(normMat[0:numTestset, :], normMat[numTestset:ml, :],
3, datalabels[numTestset:ml])
for i in range(len(a)):
if a[i] != datalabels[i]:
errorcount += 1
c = errorcount / 100
return c
def predictperson():
level = ['not at all', 'in small does', 'in large does']
percenttats = float(input('percentage of time spent playing video games?'))
ffmiles = float(input('frequent flier miles earned per year?'))
icecream = float(input('liters of ice cream consumed per year?'))
data, datalabels = KNN_1.filel2matrix('datingTestSet2.txt')
normMat = KNN_3.autoNorm(data)
test_dataset = array([[percenttats, ffmiles, icecream]])
a = clf.classify0(test_dataset, data, 3, datalabels)
print(level[a[0] - 1])
predictperson()
|
from numpy import *
import KNN_1
import KNN_3
import KNN_suanfa as clf
def datingClassTest():
horatio = 0.1
data, datalabels = KNN_1.filel2matrix("datingTestSet2.txt")
normMat = KNN_3.autoNorm(data)
ml = normMat.shape[0]
numTestset = int(ml*horatio)
errorcount = 0
a=clf.classify0(normMat[0:numTestset,:],normMat[numTestset:ml,:],3,datalabels[numTestset:ml])
for i in range(len(a)):
if a[i] != datalabels[i]:
errorcount += 1
c = errorcount/100
return c
def predictperson():
level = ['not at all','in small does','in large does']
percenttats = float(input("percentage of time spent playing video games?"))
ffmiles = float(input("frequent flier miles earned per year?"))
icecream = float(input("liters of ice cream consumed per year?"))
data, datalabels = KNN_1.filel2matrix("datingTestSet2.txt")
normMat = KNN_3.autoNorm(data)
test_dataset = array([[percenttats,ffmiles,icecream]])
a = clf.classify0(test_dataset,data,3,datalabels)
print(level[a[0]-1])
predictperson()
|
[
1,
2,
3,
4,
5
] |
2,527 |
f928eb34155046107c99db8ded11747d5960c767
|
<mask token>
class ProgressBar:
<mask token>
class ProgressBar1:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = int(self.width * (x / 100.0))
return '|' + '#' * self.pointer + '-' * (self.width - self.pointer
) + '| %d %% done' % int(x)
class ProgressBar2:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = x
return '|' + '#' * self.pointer + '-' * (100 - self.pointer
) + '| %d %% done' % int(x)
@staticmethod
def run():
ProgressBar.progress_test()
<mask token>
|
<mask token>
class ProgressBar:
@staticmethod
def progress_test():
bar_length = 100
for percent in range(0, 101):
hashes = '#' * int(percent / 100.0 * bar_length)
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write('\rPercent: [%s] %d%%' % (hashes + spaces,
percent))
sys.stdout.flush()
time.sleep(0.05)
class ProgressBar1:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = int(self.width * (x / 100.0))
return '|' + '#' * self.pointer + '-' * (self.width - self.pointer
) + '| %d %% done' % int(x)
class ProgressBar2:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = x
return '|' + '#' * self.pointer + '-' * (100 - self.pointer
) + '| %d %% done' % int(x)
@staticmethod
def run():
ProgressBar.progress_test()
<mask token>
|
<mask token>
class ProgressBar:
@staticmethod
def progress_test():
bar_length = 100
for percent in range(0, 101):
hashes = '#' * int(percent / 100.0 * bar_length)
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write('\rPercent: [%s] %d%%' % (hashes + spaces,
percent))
sys.stdout.flush()
time.sleep(0.05)
class ProgressBar1:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = int(self.width * (x / 100.0))
return '|' + '#' * self.pointer + '-' * (self.width - self.pointer
) + '| %d %% done' % int(x)
class ProgressBar2:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = x
return '|' + '#' * self.pointer + '-' * (100 - self.pointer
) + '| %d %% done' % int(x)
@staticmethod
def run():
ProgressBar.progress_test()
if __name__ == '__main__':
ProgressBar.run()
|
import sys
import time
class ProgressBar:
@staticmethod
def progress_test():
bar_length = 100
for percent in range(0, 101):
hashes = '#' * int(percent / 100.0 * bar_length)
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write('\rPercent: [%s] %d%%' % (hashes + spaces,
percent))
sys.stdout.flush()
time.sleep(0.05)
class ProgressBar1:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = int(self.width * (x / 100.0))
return '|' + '#' * self.pointer + '-' * (self.width - self.pointer
) + '| %d %% done' % int(x)
class ProgressBar2:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
self.pointer = x
return '|' + '#' * self.pointer + '-' * (100 - self.pointer
) + '| %d %% done' % int(x)
@staticmethod
def run():
ProgressBar.progress_test()
if __name__ == '__main__':
ProgressBar.run()
|
# -*- coding:utf-8 -*-
import sys
import time
class ProgressBar:
@staticmethod
def progress_test():
bar_length = 100
for percent in range(0, 101):
hashes = '#' * int(percent / 100.0 * bar_length)
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write("\rPercent: [%s] %d%%" % (hashes + spaces, percent))
sys.stdout.flush()
time.sleep(0.05)
class ProgressBar1:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self, x):
# print('\t')
self.pointer = int(self.width * (x / 100.0))
return "|" + "#" * self.pointer + "-" * (self.width - self.pointer) + "| %d %% done" % int(x)
class ProgressBar2:
def __init__(self, width=50):
self.pointer = 0
self.width = width
def __call__(self,x):
# print('\r')
self.pointer = x
return "|" + "#" * self.pointer + "-" * (100 - self.pointer)+ "| %d %% done" % int(x)
@staticmethod
def run():
# progress_test()
ProgressBar.progress_test()
# pb = ProgressBar.ProgressBar1()
# for i in range(101):
# # os.system('cls')
# print(pb(i))
# time.sleep(0.02)
#
# pb = ProgressBar.ProgressBar2()
# for i in range(101):
# # os.system('cls')
# print(pb(i))
# time.sleep(0.02)
if __name__ == '__main__':
ProgressBar.run()
|
[
2,
3,
4,
5,
6
] |
2,528 |
51b28650f8ae6cbda3d81695acd27744e9bfebd1
|
<mask token>
|
<mask token>
def main(db_client: DBClient):
sns.set_theme()
peer_ids = db_client.get_dangling_peer_ids()
arrivals = db_client.get_inter_arrival_time(peer_ids)
results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda
x: x / 3600))
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)
sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df)
ax1.set_xlim(0, 48)
ax1.set_xticks(np.arange(0, 50, step=4))
ax1.set_xlabel('Time in Hours')
ax1.set_ylabel('Number of Peers in %')
ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p:
'%d' % int(x * 100)))
ax1.legend(loc='lower right', labels=[
f'dangling ({fmt_thousands(len(results_df))})'])
ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers')
labels = []
for agent in known_agents:
peer_ids = db_client.get_peer_ids_for_agent_versions([agent])
arrivals = db_client.get_inter_arrival_time(peer_ids)
data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600))
labels += [f'{agent} ({fmt_thousands(len(data))})']
sns.ecdfplot(ax=ax2, x='diff_in_h', data=data)
ax2.set_xlim(0, 48)
ax2.set_xticks(np.arange(0, 50, step=4))
ax2.set_xlabel('Time in Hours')
ax2.set_ylabel('Number of Peers in %')
ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x,
p: '%d' % int(x * 100)))
ax2.title.set_text(f'CDF of Inter Arrival Times by Agent')
ax2.legend(loc='lower right', labels=labels)
plt.tight_layout()
lib_plot.savefig('cdf-inter-arrival-dangling')
plt.show()
<mask token>
|
<mask token>
def main(db_client: DBClient):
sns.set_theme()
peer_ids = db_client.get_dangling_peer_ids()
arrivals = db_client.get_inter_arrival_time(peer_ids)
results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda
x: x / 3600))
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)
sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df)
ax1.set_xlim(0, 48)
ax1.set_xticks(np.arange(0, 50, step=4))
ax1.set_xlabel('Time in Hours')
ax1.set_ylabel('Number of Peers in %')
ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p:
'%d' % int(x * 100)))
ax1.legend(loc='lower right', labels=[
f'dangling ({fmt_thousands(len(results_df))})'])
ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers')
labels = []
for agent in known_agents:
peer_ids = db_client.get_peer_ids_for_agent_versions([agent])
arrivals = db_client.get_inter_arrival_time(peer_ids)
data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600))
labels += [f'{agent} ({fmt_thousands(len(data))})']
sns.ecdfplot(ax=ax2, x='diff_in_h', data=data)
ax2.set_xlim(0, 48)
ax2.set_xticks(np.arange(0, 50, step=4))
ax2.set_xlabel('Time in Hours')
ax2.set_ylabel('Number of Peers in %')
ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x,
p: '%d' % int(x * 100)))
ax2.title.set_text(f'CDF of Inter Arrival Times by Agent')
ax2.legend(loc='lower right', labels=labels)
plt.tight_layout()
lib_plot.savefig('cdf-inter-arrival-dangling')
plt.show()
if __name__ == '__main__':
client = DBClient()
main(client)
|
import pandas as pd
import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt, ticker
from analysis.report import lib_plot
from analysis.report.lib_agent import known_agents
from analysis.report.lib_fmt import fmt_thousands
from lib_db import DBClient
def main(db_client: DBClient):
sns.set_theme()
peer_ids = db_client.get_dangling_peer_ids()
arrivals = db_client.get_inter_arrival_time(peer_ids)
results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
results_df = results_df.assign(diff_in_h=results_df.diff_in_s.apply(lambda
x: x / 3600))
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)
sns.ecdfplot(ax=ax1, x='diff_in_h', data=results_df)
ax1.set_xlim(0, 48)
ax1.set_xticks(np.arange(0, 50, step=4))
ax1.set_xlabel('Time in Hours')
ax1.set_ylabel('Number of Peers in %')
ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p:
'%d' % int(x * 100)))
ax1.legend(loc='lower right', labels=[
f'dangling ({fmt_thousands(len(results_df))})'])
ax1.title.set_text(f'CDF of Inter Arrival Times of Dangling Peers')
labels = []
for agent in known_agents:
peer_ids = db_client.get_peer_ids_for_agent_versions([agent])
arrivals = db_client.get_inter_arrival_time(peer_ids)
data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
data = data.assign(diff_in_h=data.diff_in_s.apply(lambda x: x / 3600))
labels += [f'{agent} ({fmt_thousands(len(data))})']
sns.ecdfplot(ax=ax2, x='diff_in_h', data=data)
ax2.set_xlim(0, 48)
ax2.set_xticks(np.arange(0, 50, step=4))
ax2.set_xlabel('Time in Hours')
ax2.set_ylabel('Number of Peers in %')
ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x,
p: '%d' % int(x * 100)))
ax2.title.set_text(f'CDF of Inter Arrival Times by Agent')
ax2.legend(loc='lower right', labels=labels)
plt.tight_layout()
lib_plot.savefig('cdf-inter-arrival-dangling')
plt.show()
if __name__ == '__main__':
client = DBClient()
main(client)
|
import pandas as pd
import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt, ticker
from analysis.report import lib_plot
from analysis.report.lib_agent import known_agents
from analysis.report.lib_fmt import fmt_thousands
from lib_db import DBClient
def main(db_client: DBClient):
sns.set_theme()
peer_ids = db_client.get_dangling_peer_ids()
arrivals = db_client.get_inter_arrival_time(peer_ids)
results_df = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
results_df = results_df.assign(
diff_in_h=results_df.diff_in_s.apply(lambda x: x / 3600),
)
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5), sharey=True)
sns.ecdfplot(ax=ax1, x="diff_in_h", data=results_df)
ax1.set_xlim(0, 48)
ax1.set_xticks(np.arange(0, 50, step=4))
ax1.set_xlabel("Time in Hours")
ax1.set_ylabel("Number of Peers in %")
ax1.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: "%d" % int(x * 100)))
ax1.legend(loc='lower right', labels=[f"dangling ({fmt_thousands(len(results_df))})"])
ax1.title.set_text(f"CDF of Inter Arrival Times of Dangling Peers")
labels = []
for agent in known_agents:
peer_ids = db_client.get_peer_ids_for_agent_versions([agent])
arrivals = db_client.get_inter_arrival_time(peer_ids)
data = pd.DataFrame(arrivals, columns=['id', 'peer_id', 'diff_in_s'])
data = data.assign(
diff_in_h=data.diff_in_s.apply(lambda x: x / 3600),
)
labels += [f"{agent} ({fmt_thousands(len(data))})"]
sns.ecdfplot(ax=ax2, x="diff_in_h", data=data)
ax2.set_xlim(0, 48)
ax2.set_xticks(np.arange(0, 50, step=4))
ax2.set_xlabel("Time in Hours")
ax2.set_ylabel("Number of Peers in %")
ax2.get_yaxis().set_major_formatter(ticker.FuncFormatter(lambda x, p: "%d" % int(x * 100)))
ax2.title.set_text(f"CDF of Inter Arrival Times by Agent")
ax2.legend(loc='lower right', labels=labels)
plt.tight_layout()
lib_plot.savefig("cdf-inter-arrival-dangling")
plt.show()
if __name__ == '__main__':
client = DBClient()
main(client)
|
[
0,
1,
2,
3,
4
] |
2,529 |
bd202e18cb98efc2b62ce4670fadcf70c35a33cb
|
<mask token>
class UploaderThread(object):
<mask token>
def is_uploading_tourney(self, tourney):
return tourney in self.uploading_tourneys
<mask token>
<mask token>
def get_last_successful_upload_time(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
upload_time = tourney.get_last_successful_upload_time()
if (upload_time is None or upload_time < self.
tourney_upload_start_time.get(tourney_name, 0)):
return None
else:
return upload_time
except countdowntourney.TourneyException as e:
sys.stderr.write(
'Failed to get last successful upload time: %s\n' % str(e))
return None
def get_last_failed_upload(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
failed_upload = tourney.get_last_failed_upload()
if failed_upload is not None and failed_upload.get('ts', None
) is not None and failed_upload['ts'
] >= self.tourney_upload_start_time.get(tourney_name, 0):
return failed_upload
else:
return None
except countdowntourney.TourneyException as e:
sys.stderr.write('Failed to get last failed upload info: %s\n' %
str(e))
return None
<mask token>
def get_tourney_auth(self, tourney):
return self.tourney_auth.get(tourney)
def set_tourney_auth(self, tourney, username, password):
self.tourney_auth[tourney] = {'username': username, 'password':
password}
def get_upload_button_pressed_time(self, tourney):
if tourney not in self.uploading_tourneys:
return None
else:
return self.tourney_upload_start_time.get(tourney, None)
def write_log(self, message):
sys.stderr.write('%s: %s\r\n' % (time.strftime('%Y-%m-%d %H:%M:%S'),
message))
def body(self):
while True:
uploading_tourneys = self.uploading_tourneys.copy()
for tourney_name in uploading_tourneys:
now = time.time()
last_upload_time = self.tourney_last_upload_attempt_time.get(
tourney_name, 0)
if now >= last_upload_time + upload_interval_sec:
try:
self.tourney_last_upload_attempt_time[tourney_name
] = now
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
game_state = get_game_state(tourney)
tourney_unique_id = get_tourney_unique_id(tourney)
auth = self.tourney_auth.get(tourney_name, None)
if auth:
username = auth.get('username')
password = auth.get('password')
private = auth.get('private', False)
else:
username = None
password = None
private = False
req = {'username': username, 'password':
password, 'private': private, 'unique_id':
tourney_unique_id, 'tourney': tourney_name}
if (tourney_name not in self.
tourney_last_uploaded_game_state or
game_state != self.
tourney_last_uploaded_game_state[tourney_name]
):
req['state'] = game_state
rep = make_https_json_request(http_server_host,
http_server_port, http_submit_path, req)
num_viewers = None
if rep.get('success', False):
self.tourney_last_uploaded_game_state[
tourney_name] = game_state
tourney.log_successful_upload()
if 'state' in req:
self.write_log(
'Successfully uploaded state for tourney "%s"'
% tourney_name)
else:
self.write_log(
'No change since last upload of tourney "%s"'
% tourney_name)
num_viewers = rep.get('viewers', None)
if num_viewers is not None:
self.write_log(
'Server reports %d viewer%s.' % (
num_viewers, 's' if num_viewers !=
1 else ''))
else:
if rep.get('http_failure', False):
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_HTTP)
else:
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_REJECTED)
tourney.log_failed_upload(failure_type, rep
.get('message', '(no message)'))
self.write_log(
'Failed to upload state for tourney "%s": %s'
% (tourney_name, rep.get('message',
'(no message')))
self.tourney_num_viewers[tourney_name
] = num_viewers
except countdowntourney.TourneyException as e:
self.write_log(
"UploaderThread: couldn't open tourney %s: %s" %
(tourney_name, str(e)))
traceback.print_tb(e.__traceback__)
continue
except Exception as e:
self.write_log(
'Uploader thread threw exception: %s' % str(e))
traceback.print_tb(e.__traceback__)
continue
time.sleep(1)
class UploaderServiceHandler(BaseRequestHandler):
def get_fields_from_req(self, req, field_names):
field_values = []
for name in field_names:
value = req.get(name, None)
if value is None:
raise FieldNotFoundException()
field_values.append(value)
return tuple(field_values)
def process_request(self, req):
global uploader_thread
req_type = req.get('type', None)
if not req_type:
return make_error_response('Request has no request type')
req_body = req.get('request', None)
if req_body is None:
return make_error_response('Request has no body')
try:
if req_type == 'start_uploading':
tourney, username, password, private = (self.
get_fields_from_req(req_body, ['tourney', 'username',
'password', 'private']))
uploader_thread.add_tourney_to_upload_list(tourney,
username, password, private)
rep = make_ok_response()
elif req_type == 'stop_uploading':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = make_ok_response()
elif req_type == 'delete':
tourney, username, password = self.get_fields_from_req(req_body
, ['tourney', 'username', 'password'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = delete_tourney_from_web(tourney, username, password)
uploader_thread.set_tourney_auth(tourney, username, password)
elif req_type == 'status':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
rep = {'success': True}
auth = uploader_thread.get_tourney_auth(tourney)
rep['publishing'] = uploader_thread.is_uploading_tourney(
tourney)
rep['viewers'] = uploader_thread.get_num_viewers(tourney)
if auth:
rep['username'] = auth.get('username', None)
rep['password'] = auth.get('password', None)
rep['private'] = auth.get('private', False)
rep['last_successful_upload_time'
] = uploader_thread.get_last_successful_upload_time(tourney
)
rep['last_failed_upload'
] = uploader_thread.get_last_failed_upload(tourney)
rep['upload_button_pressed_time'
] = uploader_thread.get_upload_button_pressed_time(tourney)
rep['now'] = int(time.time())
else:
rep = make_error_response('Unrecognised request type')
except FieldNotFoundException:
return make_error_response('Request is not valid for type')
return rep
def handle(self):
line = read_line_from_socket(self.request)
if line is not None:
rep = None
try:
req = json.loads(line)
except Exception as e:
rep = make_error_response('Request is not valid JSON')
if not rep:
rep = self.process_request(req)
self.request.sendall((json.dumps(rep) + '\n').encode('utf-8'))
self.request.close()
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, addr_port, service_handler):
self.allow_reuse_address = True
super().__init__(addr_port, service_handler)
class TourneyUploaderService(object):
def __init__(self, listen_port):
global uploader_thread
self.listen_port = listen_port
self.socket_server = ThreadedTCPServer(('127.0.0.1', listen_port),
UploaderServiceHandler)
self.server_thread = threading.Thread(target=self.socket_server.
serve_forever)
if not uploader_thread:
uploader_thread = UploaderThread()
self.server_thread.daemon = True
self.server_thread.start()
def shutdown(self):
self.socket_server.shutdown()
|
<mask token>
class UploaderThread(object):
def __init__(self):
self.uploading_tourneys = set()
self.tourney_upload_start_time = {}
self.tourney_last_upload_attempt_time = {}
self.tourney_last_uploaded_game_state = {}
self.tourney_num_viewers = {}
self.tourney_auth = {}
self.thread = threading.Thread(target=self.body)
self.thread.daemon = True
self.thread.start()
def is_uploading_tourney(self, tourney):
return tourney in self.uploading_tourneys
def add_tourney_to_upload_list(self, tourney, username, password, private):
self.uploading_tourneys.add(tourney)
self.tourney_auth[tourney] = {'username': username, 'password':
password, 'private': private}
self.tourney_upload_start_time[tourney] = int(time.time())
if tourney in self.tourney_last_uploaded_game_state:
del self.tourney_last_uploaded_game_state[tourney]
self.tourney_last_upload_attempt_time[tourney] = 0
def remove_tourney_from_upload_list(self, tourney):
self.uploading_tourneys.discard(tourney)
def get_last_successful_upload_time(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
upload_time = tourney.get_last_successful_upload_time()
if (upload_time is None or upload_time < self.
tourney_upload_start_time.get(tourney_name, 0)):
return None
else:
return upload_time
except countdowntourney.TourneyException as e:
sys.stderr.write(
'Failed to get last successful upload time: %s\n' % str(e))
return None
def get_last_failed_upload(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
failed_upload = tourney.get_last_failed_upload()
if failed_upload is not None and failed_upload.get('ts', None
) is not None and failed_upload['ts'
] >= self.tourney_upload_start_time.get(tourney_name, 0):
return failed_upload
else:
return None
except countdowntourney.TourneyException as e:
sys.stderr.write('Failed to get last failed upload info: %s\n' %
str(e))
return None
def get_num_viewers(self, tourney_name):
return self.tourney_num_viewers.get(tourney_name, None)
def get_tourney_auth(self, tourney):
return self.tourney_auth.get(tourney)
def set_tourney_auth(self, tourney, username, password):
self.tourney_auth[tourney] = {'username': username, 'password':
password}
def get_upload_button_pressed_time(self, tourney):
if tourney not in self.uploading_tourneys:
return None
else:
return self.tourney_upload_start_time.get(tourney, None)
def write_log(self, message):
sys.stderr.write('%s: %s\r\n' % (time.strftime('%Y-%m-%d %H:%M:%S'),
message))
def body(self):
while True:
uploading_tourneys = self.uploading_tourneys.copy()
for tourney_name in uploading_tourneys:
now = time.time()
last_upload_time = self.tourney_last_upload_attempt_time.get(
tourney_name, 0)
if now >= last_upload_time + upload_interval_sec:
try:
self.tourney_last_upload_attempt_time[tourney_name
] = now
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
game_state = get_game_state(tourney)
tourney_unique_id = get_tourney_unique_id(tourney)
auth = self.tourney_auth.get(tourney_name, None)
if auth:
username = auth.get('username')
password = auth.get('password')
private = auth.get('private', False)
else:
username = None
password = None
private = False
req = {'username': username, 'password':
password, 'private': private, 'unique_id':
tourney_unique_id, 'tourney': tourney_name}
if (tourney_name not in self.
tourney_last_uploaded_game_state or
game_state != self.
tourney_last_uploaded_game_state[tourney_name]
):
req['state'] = game_state
rep = make_https_json_request(http_server_host,
http_server_port, http_submit_path, req)
num_viewers = None
if rep.get('success', False):
self.tourney_last_uploaded_game_state[
tourney_name] = game_state
tourney.log_successful_upload()
if 'state' in req:
self.write_log(
'Successfully uploaded state for tourney "%s"'
% tourney_name)
else:
self.write_log(
'No change since last upload of tourney "%s"'
% tourney_name)
num_viewers = rep.get('viewers', None)
if num_viewers is not None:
self.write_log(
'Server reports %d viewer%s.' % (
num_viewers, 's' if num_viewers !=
1 else ''))
else:
if rep.get('http_failure', False):
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_HTTP)
else:
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_REJECTED)
tourney.log_failed_upload(failure_type, rep
.get('message', '(no message)'))
self.write_log(
'Failed to upload state for tourney "%s": %s'
% (tourney_name, rep.get('message',
'(no message')))
self.tourney_num_viewers[tourney_name
] = num_viewers
except countdowntourney.TourneyException as e:
self.write_log(
"UploaderThread: couldn't open tourney %s: %s" %
(tourney_name, str(e)))
traceback.print_tb(e.__traceback__)
continue
except Exception as e:
self.write_log(
'Uploader thread threw exception: %s' % str(e))
traceback.print_tb(e.__traceback__)
continue
time.sleep(1)
class UploaderServiceHandler(BaseRequestHandler):
def get_fields_from_req(self, req, field_names):
field_values = []
for name in field_names:
value = req.get(name, None)
if value is None:
raise FieldNotFoundException()
field_values.append(value)
return tuple(field_values)
def process_request(self, req):
global uploader_thread
req_type = req.get('type', None)
if not req_type:
return make_error_response('Request has no request type')
req_body = req.get('request', None)
if req_body is None:
return make_error_response('Request has no body')
try:
if req_type == 'start_uploading':
tourney, username, password, private = (self.
get_fields_from_req(req_body, ['tourney', 'username',
'password', 'private']))
uploader_thread.add_tourney_to_upload_list(tourney,
username, password, private)
rep = make_ok_response()
elif req_type == 'stop_uploading':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = make_ok_response()
elif req_type == 'delete':
tourney, username, password = self.get_fields_from_req(req_body
, ['tourney', 'username', 'password'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = delete_tourney_from_web(tourney, username, password)
uploader_thread.set_tourney_auth(tourney, username, password)
elif req_type == 'status':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
rep = {'success': True}
auth = uploader_thread.get_tourney_auth(tourney)
rep['publishing'] = uploader_thread.is_uploading_tourney(
tourney)
rep['viewers'] = uploader_thread.get_num_viewers(tourney)
if auth:
rep['username'] = auth.get('username', None)
rep['password'] = auth.get('password', None)
rep['private'] = auth.get('private', False)
rep['last_successful_upload_time'
] = uploader_thread.get_last_successful_upload_time(tourney
)
rep['last_failed_upload'
] = uploader_thread.get_last_failed_upload(tourney)
rep['upload_button_pressed_time'
] = uploader_thread.get_upload_button_pressed_time(tourney)
rep['now'] = int(time.time())
else:
rep = make_error_response('Unrecognised request type')
except FieldNotFoundException:
return make_error_response('Request is not valid for type')
return rep
def handle(self):
line = read_line_from_socket(self.request)
if line is not None:
rep = None
try:
req = json.loads(line)
except Exception as e:
rep = make_error_response('Request is not valid JSON')
if not rep:
rep = self.process_request(req)
self.request.sendall((json.dumps(rep) + '\n').encode('utf-8'))
self.request.close()
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, addr_port, service_handler):
self.allow_reuse_address = True
super().__init__(addr_port, service_handler)
class TourneyUploaderService(object):
def __init__(self, listen_port):
global uploader_thread
self.listen_port = listen_port
self.socket_server = ThreadedTCPServer(('127.0.0.1', listen_port),
UploaderServiceHandler)
self.server_thread = threading.Thread(target=self.socket_server.
serve_forever)
if not uploader_thread:
uploader_thread = UploaderThread()
self.server_thread.daemon = True
self.server_thread.start()
def shutdown(self):
self.socket_server.shutdown()
|
<mask token>
class FieldNotFoundException(Exception):
pass
def make_error_response(message):
return {'success': False, 'message': message}
<mask token>
def get_tourney_unique_id(tourney):
return tourney.get_unique_id()
def delete_tourney_from_web(tourney_name, username, password):
req = {'username': username, 'password': password, 'tourney':
tourney_name, 'delete': True}
return make_https_json_request(http_server_host, http_server_port,
http_delete_path, req)
<mask token>
class UploaderThread(object):
def __init__(self):
self.uploading_tourneys = set()
self.tourney_upload_start_time = {}
self.tourney_last_upload_attempt_time = {}
self.tourney_last_uploaded_game_state = {}
self.tourney_num_viewers = {}
self.tourney_auth = {}
self.thread = threading.Thread(target=self.body)
self.thread.daemon = True
self.thread.start()
def is_uploading_tourney(self, tourney):
return tourney in self.uploading_tourneys
def add_tourney_to_upload_list(self, tourney, username, password, private):
self.uploading_tourneys.add(tourney)
self.tourney_auth[tourney] = {'username': username, 'password':
password, 'private': private}
self.tourney_upload_start_time[tourney] = int(time.time())
if tourney in self.tourney_last_uploaded_game_state:
del self.tourney_last_uploaded_game_state[tourney]
self.tourney_last_upload_attempt_time[tourney] = 0
def remove_tourney_from_upload_list(self, tourney):
self.uploading_tourneys.discard(tourney)
def get_last_successful_upload_time(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
upload_time = tourney.get_last_successful_upload_time()
if (upload_time is None or upload_time < self.
tourney_upload_start_time.get(tourney_name, 0)):
return None
else:
return upload_time
except countdowntourney.TourneyException as e:
sys.stderr.write(
'Failed to get last successful upload time: %s\n' % str(e))
return None
def get_last_failed_upload(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
failed_upload = tourney.get_last_failed_upload()
if failed_upload is not None and failed_upload.get('ts', None
) is not None and failed_upload['ts'
] >= self.tourney_upload_start_time.get(tourney_name, 0):
return failed_upload
else:
return None
except countdowntourney.TourneyException as e:
sys.stderr.write('Failed to get last failed upload info: %s\n' %
str(e))
return None
def get_num_viewers(self, tourney_name):
return self.tourney_num_viewers.get(tourney_name, None)
def get_tourney_auth(self, tourney):
return self.tourney_auth.get(tourney)
def set_tourney_auth(self, tourney, username, password):
self.tourney_auth[tourney] = {'username': username, 'password':
password}
def get_upload_button_pressed_time(self, tourney):
if tourney not in self.uploading_tourneys:
return None
else:
return self.tourney_upload_start_time.get(tourney, None)
def write_log(self, message):
sys.stderr.write('%s: %s\r\n' % (time.strftime('%Y-%m-%d %H:%M:%S'),
message))
def body(self):
while True:
uploading_tourneys = self.uploading_tourneys.copy()
for tourney_name in uploading_tourneys:
now = time.time()
last_upload_time = self.tourney_last_upload_attempt_time.get(
tourney_name, 0)
if now >= last_upload_time + upload_interval_sec:
try:
self.tourney_last_upload_attempt_time[tourney_name
] = now
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
game_state = get_game_state(tourney)
tourney_unique_id = get_tourney_unique_id(tourney)
auth = self.tourney_auth.get(tourney_name, None)
if auth:
username = auth.get('username')
password = auth.get('password')
private = auth.get('private', False)
else:
username = None
password = None
private = False
req = {'username': username, 'password':
password, 'private': private, 'unique_id':
tourney_unique_id, 'tourney': tourney_name}
if (tourney_name not in self.
tourney_last_uploaded_game_state or
game_state != self.
tourney_last_uploaded_game_state[tourney_name]
):
req['state'] = game_state
rep = make_https_json_request(http_server_host,
http_server_port, http_submit_path, req)
num_viewers = None
if rep.get('success', False):
self.tourney_last_uploaded_game_state[
tourney_name] = game_state
tourney.log_successful_upload()
if 'state' in req:
self.write_log(
'Successfully uploaded state for tourney "%s"'
% tourney_name)
else:
self.write_log(
'No change since last upload of tourney "%s"'
% tourney_name)
num_viewers = rep.get('viewers', None)
if num_viewers is not None:
self.write_log(
'Server reports %d viewer%s.' % (
num_viewers, 's' if num_viewers !=
1 else ''))
else:
if rep.get('http_failure', False):
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_HTTP)
else:
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_REJECTED)
tourney.log_failed_upload(failure_type, rep
.get('message', '(no message)'))
self.write_log(
'Failed to upload state for tourney "%s": %s'
% (tourney_name, rep.get('message',
'(no message')))
self.tourney_num_viewers[tourney_name
] = num_viewers
except countdowntourney.TourneyException as e:
self.write_log(
"UploaderThread: couldn't open tourney %s: %s" %
(tourney_name, str(e)))
traceback.print_tb(e.__traceback__)
continue
except Exception as e:
self.write_log(
'Uploader thread threw exception: %s' % str(e))
traceback.print_tb(e.__traceback__)
continue
time.sleep(1)
class UploaderServiceHandler(BaseRequestHandler):
def get_fields_from_req(self, req, field_names):
field_values = []
for name in field_names:
value = req.get(name, None)
if value is None:
raise FieldNotFoundException()
field_values.append(value)
return tuple(field_values)
def process_request(self, req):
global uploader_thread
req_type = req.get('type', None)
if not req_type:
return make_error_response('Request has no request type')
req_body = req.get('request', None)
if req_body is None:
return make_error_response('Request has no body')
try:
if req_type == 'start_uploading':
tourney, username, password, private = (self.
get_fields_from_req(req_body, ['tourney', 'username',
'password', 'private']))
uploader_thread.add_tourney_to_upload_list(tourney,
username, password, private)
rep = make_ok_response()
elif req_type == 'stop_uploading':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = make_ok_response()
elif req_type == 'delete':
tourney, username, password = self.get_fields_from_req(req_body
, ['tourney', 'username', 'password'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = delete_tourney_from_web(tourney, username, password)
uploader_thread.set_tourney_auth(tourney, username, password)
elif req_type == 'status':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
rep = {'success': True}
auth = uploader_thread.get_tourney_auth(tourney)
rep['publishing'] = uploader_thread.is_uploading_tourney(
tourney)
rep['viewers'] = uploader_thread.get_num_viewers(tourney)
if auth:
rep['username'] = auth.get('username', None)
rep['password'] = auth.get('password', None)
rep['private'] = auth.get('private', False)
rep['last_successful_upload_time'
] = uploader_thread.get_last_successful_upload_time(tourney
)
rep['last_failed_upload'
] = uploader_thread.get_last_failed_upload(tourney)
rep['upload_button_pressed_time'
] = uploader_thread.get_upload_button_pressed_time(tourney)
rep['now'] = int(time.time())
else:
rep = make_error_response('Unrecognised request type')
except FieldNotFoundException:
return make_error_response('Request is not valid for type')
return rep
def handle(self):
line = read_line_from_socket(self.request)
if line is not None:
rep = None
try:
req = json.loads(line)
except Exception as e:
rep = make_error_response('Request is not valid JSON')
if not rep:
rep = self.process_request(req)
self.request.sendall((json.dumps(rep) + '\n').encode('utf-8'))
self.request.close()
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, addr_port, service_handler):
self.allow_reuse_address = True
super().__init__(addr_port, service_handler)
class TourneyUploaderService(object):
def __init__(self, listen_port):
global uploader_thread
self.listen_port = listen_port
self.socket_server = ThreadedTCPServer(('127.0.0.1', listen_port),
UploaderServiceHandler)
self.server_thread = threading.Thread(target=self.socket_server.
serve_forever)
if not uploader_thread:
uploader_thread = UploaderThread()
self.server_thread.daemon = True
self.server_thread.start()
def shutdown(self):
self.socket_server.shutdown()
|
<mask token>
if not db_dir:
db_dir = os.path.join(os.getcwd(), 'tourneys')
<mask token>
class FieldNotFoundException(Exception):
pass
def make_error_response(message):
return {'success': False, 'message': message}
def make_ok_response():
return {'success': True}
def get_game_state(tourney):
return tourney2json.get_state_for_upload(tourney)
def get_tourney_unique_id(tourney):
return tourney.get_unique_id()
def delete_tourney_from_web(tourney_name, username, password):
req = {'username': username, 'password': password, 'tourney':
tourney_name, 'delete': True}
return make_https_json_request(http_server_host, http_server_port,
http_delete_path, req)
def read_line_from_socket(sock):
byte_array = b''
b = 0
while b != b'\n':
b = sock.recv(1)
if b is None or len(b) == 0:
return None
byte_array += b
return byte_array.decode('utf-8')
def make_https_json_request(server_host, server_port, path, request):
post_data = json.dumps(request)
httpcon = None
try:
httpcon = http.client.HTTPSConnection(host=server_host, port=
server_port, timeout=30)
httpcon.connect()
except Exception as e:
if httpcon:
httpcon.close()
sys.stderr.write('Failed to connect to %s: %s\r\n' % (server_host,
str(e)))
return {'success': False, 'http_failure': True, 'message':
'Failed to connect to %s: %s. Check your internet connection.' %
(server_host, str(e))}
try:
while path and path[0] == '/':
path = path[1:]
url = 'https://%s%s/%s' % (server_host, ':' + str(server_port) if
server_port else '', path)
httpcon.request('POST', url, post_data)
except ConnectionError as e:
httpcon.close()
sys.stderr.write('Failed to send HTTP request to %s: %s\r\n' % (url,
str(e)))
return {'success': False, 'http_failure': True, 'message':
'Failed to upload game state to server %s: %s. Check your internet connection.'
% (url, str(e))}
except Exception as e:
httpcon.close()
sys.stderr.write('Failed to send HTTP request to %s: %s\r\n' % (url,
str(e)))
return {'success': False, 'http_failure': True, 'message': str(e)}
try:
response = httpcon.getresponse()
except Exception as e:
sys.stderr.write('Failed to read response from %s: %s\r\n' % (url,
str(e)))
httpcon.close()
return {'success': False, 'http_failure': True, 'message': str(e)}
if response.status != 200:
sys.stderr.write(
'Failed to post data to %s: HTTP response %d: %s\r\n' % (url,
response.status, response.reason))
rep = {'success': False, 'http_failure': True, 'message':
'Failed to post update to server: HTTP %d: %s' % (response.
status, response.reason)}
else:
response_body = None
rep = None
try:
response_body = response.read()
except Exception as e:
sys.stderr.write('Failed to read response data from HTTP: ' +
str(e) + '\r\n')
rep = {'success': False, 'http_failure': True, 'message': str(e)}
if response_body is not None:
try:
rep = json.loads(response_body.decode('utf-8'))
if not rep.get('success', False):
message = rep.get('message', '(none)')
sys.stderr.write('Update failed. Message: ' + message +
'\r\n')
except Exception as e:
sys.stderr.write('Failed to parse server response: ' + str(
e) + '\r\n')
rep = {'success': False, 'message':
'Server response was invalid JSON: ' + str(e)}
httpcon.close()
return rep
class UploaderThread(object):
def __init__(self):
self.uploading_tourneys = set()
self.tourney_upload_start_time = {}
self.tourney_last_upload_attempt_time = {}
self.tourney_last_uploaded_game_state = {}
self.tourney_num_viewers = {}
self.tourney_auth = {}
self.thread = threading.Thread(target=self.body)
self.thread.daemon = True
self.thread.start()
def is_uploading_tourney(self, tourney):
return tourney in self.uploading_tourneys
def add_tourney_to_upload_list(self, tourney, username, password, private):
self.uploading_tourneys.add(tourney)
self.tourney_auth[tourney] = {'username': username, 'password':
password, 'private': private}
self.tourney_upload_start_time[tourney] = int(time.time())
if tourney in self.tourney_last_uploaded_game_state:
del self.tourney_last_uploaded_game_state[tourney]
self.tourney_last_upload_attempt_time[tourney] = 0
def remove_tourney_from_upload_list(self, tourney):
self.uploading_tourneys.discard(tourney)
def get_last_successful_upload_time(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
upload_time = tourney.get_last_successful_upload_time()
if (upload_time is None or upload_time < self.
tourney_upload_start_time.get(tourney_name, 0)):
return None
else:
return upload_time
except countdowntourney.TourneyException as e:
sys.stderr.write(
'Failed to get last successful upload time: %s\n' % str(e))
return None
def get_last_failed_upload(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
failed_upload = tourney.get_last_failed_upload()
if failed_upload is not None and failed_upload.get('ts', None
) is not None and failed_upload['ts'
] >= self.tourney_upload_start_time.get(tourney_name, 0):
return failed_upload
else:
return None
except countdowntourney.TourneyException as e:
sys.stderr.write('Failed to get last failed upload info: %s\n' %
str(e))
return None
def get_num_viewers(self, tourney_name):
return self.tourney_num_viewers.get(tourney_name, None)
def get_tourney_auth(self, tourney):
return self.tourney_auth.get(tourney)
def set_tourney_auth(self, tourney, username, password):
self.tourney_auth[tourney] = {'username': username, 'password':
password}
def get_upload_button_pressed_time(self, tourney):
if tourney not in self.uploading_tourneys:
return None
else:
return self.tourney_upload_start_time.get(tourney, None)
def write_log(self, message):
sys.stderr.write('%s: %s\r\n' % (time.strftime('%Y-%m-%d %H:%M:%S'),
message))
def body(self):
while True:
uploading_tourneys = self.uploading_tourneys.copy()
for tourney_name in uploading_tourneys:
now = time.time()
last_upload_time = self.tourney_last_upload_attempt_time.get(
tourney_name, 0)
if now >= last_upload_time + upload_interval_sec:
try:
self.tourney_last_upload_attempt_time[tourney_name
] = now
with countdowntourney.tourney_open(tourney_name, db_dir
) as tourney:
game_state = get_game_state(tourney)
tourney_unique_id = get_tourney_unique_id(tourney)
auth = self.tourney_auth.get(tourney_name, None)
if auth:
username = auth.get('username')
password = auth.get('password')
private = auth.get('private', False)
else:
username = None
password = None
private = False
req = {'username': username, 'password':
password, 'private': private, 'unique_id':
tourney_unique_id, 'tourney': tourney_name}
if (tourney_name not in self.
tourney_last_uploaded_game_state or
game_state != self.
tourney_last_uploaded_game_state[tourney_name]
):
req['state'] = game_state
rep = make_https_json_request(http_server_host,
http_server_port, http_submit_path, req)
num_viewers = None
if rep.get('success', False):
self.tourney_last_uploaded_game_state[
tourney_name] = game_state
tourney.log_successful_upload()
if 'state' in req:
self.write_log(
'Successfully uploaded state for tourney "%s"'
% tourney_name)
else:
self.write_log(
'No change since last upload of tourney "%s"'
% tourney_name)
num_viewers = rep.get('viewers', None)
if num_viewers is not None:
self.write_log(
'Server reports %d viewer%s.' % (
num_viewers, 's' if num_viewers !=
1 else ''))
else:
if rep.get('http_failure', False):
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_HTTP)
else:
failure_type = (countdowntourney.
UPLOAD_FAIL_TYPE_REJECTED)
tourney.log_failed_upload(failure_type, rep
.get('message', '(no message)'))
self.write_log(
'Failed to upload state for tourney "%s": %s'
% (tourney_name, rep.get('message',
'(no message')))
self.tourney_num_viewers[tourney_name
] = num_viewers
except countdowntourney.TourneyException as e:
self.write_log(
"UploaderThread: couldn't open tourney %s: %s" %
(tourney_name, str(e)))
traceback.print_tb(e.__traceback__)
continue
except Exception as e:
self.write_log(
'Uploader thread threw exception: %s' % str(e))
traceback.print_tb(e.__traceback__)
continue
time.sleep(1)
class UploaderServiceHandler(BaseRequestHandler):
def get_fields_from_req(self, req, field_names):
field_values = []
for name in field_names:
value = req.get(name, None)
if value is None:
raise FieldNotFoundException()
field_values.append(value)
return tuple(field_values)
def process_request(self, req):
global uploader_thread
req_type = req.get('type', None)
if not req_type:
return make_error_response('Request has no request type')
req_body = req.get('request', None)
if req_body is None:
return make_error_response('Request has no body')
try:
if req_type == 'start_uploading':
tourney, username, password, private = (self.
get_fields_from_req(req_body, ['tourney', 'username',
'password', 'private']))
uploader_thread.add_tourney_to_upload_list(tourney,
username, password, private)
rep = make_ok_response()
elif req_type == 'stop_uploading':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = make_ok_response()
elif req_type == 'delete':
tourney, username, password = self.get_fields_from_req(req_body
, ['tourney', 'username', 'password'])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = delete_tourney_from_web(tourney, username, password)
uploader_thread.set_tourney_auth(tourney, username, password)
elif req_type == 'status':
tourney, = self.get_fields_from_req(req_body, ['tourney'])
rep = {'success': True}
auth = uploader_thread.get_tourney_auth(tourney)
rep['publishing'] = uploader_thread.is_uploading_tourney(
tourney)
rep['viewers'] = uploader_thread.get_num_viewers(tourney)
if auth:
rep['username'] = auth.get('username', None)
rep['password'] = auth.get('password', None)
rep['private'] = auth.get('private', False)
rep['last_successful_upload_time'
] = uploader_thread.get_last_successful_upload_time(tourney
)
rep['last_failed_upload'
] = uploader_thread.get_last_failed_upload(tourney)
rep['upload_button_pressed_time'
] = uploader_thread.get_upload_button_pressed_time(tourney)
rep['now'] = int(time.time())
else:
rep = make_error_response('Unrecognised request type')
except FieldNotFoundException:
return make_error_response('Request is not valid for type')
return rep
def handle(self):
line = read_line_from_socket(self.request)
if line is not None:
rep = None
try:
req = json.loads(line)
except Exception as e:
rep = make_error_response('Request is not valid JSON')
if not rep:
rep = self.process_request(req)
self.request.sendall((json.dumps(rep) + '\n').encode('utf-8'))
self.request.close()
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, addr_port, service_handler):
self.allow_reuse_address = True
super().__init__(addr_port, service_handler)
class TourneyUploaderService(object):
def __init__(self, listen_port):
global uploader_thread
self.listen_port = listen_port
self.socket_server = ThreadedTCPServer(('127.0.0.1', listen_port),
UploaderServiceHandler)
self.server_thread = threading.Thread(target=self.socket_server.
serve_forever)
if not uploader_thread:
uploader_thread = UploaderThread()
self.server_thread.daemon = True
self.server_thread.start()
def shutdown(self):
self.socket_server.shutdown()
|
#!/usr/bin/python3
# The uploader service listens for connections from localhost on port 3961.
# It expects a JSON object on a line by itself as the request. It responds
# with another JSON object on a line by itself, then closes the connection.
# Atropine CGI scripts can send requests to this service to tell it to:
# * Add a tourney to the list of tourneys we're periodically uploading to
# greem.co.uk
# * Remove a tourney from that list (i.e. stop uploading it)
# * Get the upload state of a tourney (are we uploading it, when was the
# last successful upload, was the last upload successful, and if not what
# went wrong)
#
# The service is started with atropine.py, and runs alongside the web server
# which serves the web interface used by the tournament administrator. At
# startup, no tourneys are being uploaded.
import sys
import os
import socketserver
from socketserver import BaseRequestHandler
import json
import threading
import time
import http.client
import traceback
http_server_host = "greem.co.uk"
http_server_port = None
http_submit_path = "/cgi-bin/colive/submit.py"
http_delete_path = "/cgi-bin/colive/submit.py"
upload_interval_sec = 10
db_dir = os.getenv("TOURNEYSPATH")
if not db_dir:
db_dir = os.path.join(os.getcwd(), "tourneys")
import tourney2json
import countdowntourney
uploader_thread = None
class FieldNotFoundException(Exception):
pass
def make_error_response(message):
return { "success" : False, "message" : message }
def make_ok_response():
return { "success" : True }
def get_game_state(tourney):
return tourney2json.get_state_for_upload(tourney)
def get_tourney_unique_id(tourney):
return tourney.get_unique_id()
def delete_tourney_from_web(tourney_name, username, password):
req = {
"username" : username,
"password" : password,
"tourney" : tourney_name,
"delete" : True
}
return make_https_json_request(http_server_host, http_server_port, http_delete_path, req)
def read_line_from_socket(sock):
byte_array = b'';
b = 0
while b != b'\n':
b = sock.recv(1)
if b is None or len(b) == 0:
return None
byte_array += b
return byte_array.decode("utf-8")
def make_https_json_request(server_host, server_port, path, request):
post_data = json.dumps(request)
httpcon = None
try:
httpcon = http.client.HTTPSConnection(host=server_host, port=server_port, timeout=30)
httpcon.connect()
except Exception as e:
if httpcon:
httpcon.close()
sys.stderr.write("Failed to connect to %s: %s\r\n" % (server_host, str(e)))
return { "success" : False, "http_failure" : True, "message" : "Failed to connect to %s: %s. Check your internet connection." % (server_host, str(e)) }
try:
while path and path[0] == '/':
path = path[1:]
url = "https://%s%s/%s" % (server_host, (":" + str(server_port)) if server_port else "", path)
httpcon.request("POST", url, post_data)
except ConnectionError as e:
httpcon.close()
sys.stderr.write("Failed to send HTTP request to %s: %s\r\n" % (url, str(e)))
return {
"success" : False,
"http_failure" : True,
"message" : "Failed to upload game state to server %s: %s. Check your internet connection." % (url, str(e))
}
except Exception as e:
httpcon.close()
sys.stderr.write("Failed to send HTTP request to %s: %s\r\n" % (url, str(e)))
return { "success" : False, "http_failure" : True, "message" : str(e) }
try:
response = httpcon.getresponse()
except Exception as e:
sys.stderr.write("Failed to read response from %s: %s\r\n" % (url, str(e)))
httpcon.close()
return { "success" : False, "http_failure" : True, "message" : str(e) }
if response.status != 200:
sys.stderr.write("Failed to post data to %s: HTTP response %d: %s\r\n" % (url, response.status, response.reason))
rep = {
"success" : False,
"http_failure" : True,
"message" : "Failed to post update to server: HTTP %d: %s" % (response.status, response.reason)
}
else:
response_body = None
rep = None
try:
response_body = response.read()
except Exception as e:
sys.stderr.write("Failed to read response data from HTTP: " + str(e) + "\r\n")
rep = {
"success" : False,
"http_failure" : True,
"message" : str(e)
}
if response_body is not None:
try:
rep = json.loads(response_body.decode("utf-8"))
if not rep.get("success", False):
message = rep.get("message", "(none)")
sys.stderr.write("Update failed. Message: " + message + "\r\n")
except Exception as e:
sys.stderr.write("Failed to parse server response: " + str(e) + "\r\n")
rep = {
"success" : False,
"message" : "Server response was invalid JSON: " + str(e)
}
httpcon.close()
return rep
class UploaderThread(object):
def __init__(self):
self.uploading_tourneys = set()
self.tourney_upload_start_time = {}
self.tourney_last_upload_attempt_time = {}
self.tourney_last_uploaded_game_state = {}
self.tourney_num_viewers = {}
self.tourney_auth = {}
self.thread = threading.Thread(target=self.body)
self.thread.daemon = True
self.thread.start()
def is_uploading_tourney(self, tourney):
return (tourney in self.uploading_tourneys)
def add_tourney_to_upload_list(self, tourney, username, password, private):
self.uploading_tourneys.add(tourney)
self.tourney_auth[tourney] = { "username" : username, "password" : password, "private" : private }
self.tourney_upload_start_time[tourney] = int(time.time());
if tourney in self.tourney_last_uploaded_game_state:
del self.tourney_last_uploaded_game_state[tourney]
self.tourney_last_upload_attempt_time[tourney] = 0
def remove_tourney_from_upload_list(self, tourney):
self.uploading_tourneys.discard(tourney)
def get_last_successful_upload_time(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir) as tourney:
upload_time = tourney.get_last_successful_upload_time()
# Don't return this time if it's before the user even pressed
# the "start uploading" button"
if upload_time is None or upload_time < self.tourney_upload_start_time.get(tourney_name, 0):
return None
else:
return upload_time
except countdowntourney.TourneyException as e:
sys.stderr.write("Failed to get last successful upload time: %s\n" % (str(e)))
return None
def get_last_failed_upload(self, tourney_name):
try:
with countdowntourney.tourney_open(tourney_name, db_dir) as tourney:
failed_upload = tourney.get_last_failed_upload()
if failed_upload is not None and failed_upload.get("ts", None) is not None and failed_upload["ts"] >= self.tourney_upload_start_time.get(tourney_name, 0):
return failed_upload
else:
return None
except countdowntourney.TourneyException as e:
sys.stderr.write("Failed to get last failed upload info: %s\n" % (str(e)))
return None
def get_num_viewers(self, tourney_name):
return self.tourney_num_viewers.get(tourney_name, None)
def get_tourney_auth(self, tourney):
return self.tourney_auth.get(tourney)
def set_tourney_auth(self, tourney, username, password):
self.tourney_auth[tourney] = { "username" : username, "password" : password }
def get_upload_button_pressed_time(self, tourney):
if tourney not in self.uploading_tourneys:
return None
else:
return self.tourney_upload_start_time.get(tourney, None)
def write_log(self, message):
sys.stderr.write("%s: %s\r\n" % (time.strftime("%Y-%m-%d %H:%M:%S"), message))
def body(self):
while True:
uploading_tourneys = self.uploading_tourneys.copy()
for tourney_name in uploading_tourneys:
now = time.time()
last_upload_time = self.tourney_last_upload_attempt_time.get(tourney_name, 0)
if now >= last_upload_time + upload_interval_sec:
# Upload this tourney to the web if it's been at least
# upload_interval_sec seconds since the previous upload
# attempt.
try:
self.tourney_last_upload_attempt_time[tourney_name] = now
with countdowntourney.tourney_open(tourney_name, db_dir) as tourney:
game_state = get_game_state(tourney)
tourney_unique_id = get_tourney_unique_id(tourney)
auth = self.tourney_auth.get(tourney_name, None)
if auth:
username = auth.get("username")
password = auth.get("password")
private = auth.get("private", False)
else:
username = None
password = None
private = False
req = {
"username" : username,
"password" : password,
"private" : private,
"unique_id" : tourney_unique_id,
"tourney" : tourney_name
}
# If the game state has changed since the last time
# we did a successful upload, include the new game
# state, otherwise we just submit a null update
# which only checks the server still works and
# reads how many current visitors there are.
if tourney_name not in self.tourney_last_uploaded_game_state or game_state != self.tourney_last_uploaded_game_state[tourney_name]:
req["state"] = game_state
# Send the submission to the server & get the reply
rep = make_https_json_request(http_server_host, http_server_port, http_submit_path, req)
num_viewers = None
if rep.get("success", False):
self.tourney_last_uploaded_game_state[tourney_name] = game_state
tourney.log_successful_upload()
if "state" in req:
self.write_log("Successfully uploaded state for tourney \"%s\"" % (tourney_name))
else:
self.write_log("No change since last upload of tourney \"%s\"" % (tourney_name))
num_viewers = rep.get("viewers", None)
if num_viewers is not None:
self.write_log("Server reports %d viewer%s." % (num_viewers, "s" if num_viewers != 1 else ""))
else:
if rep.get("http_failure", False):
failure_type = countdowntourney.UPLOAD_FAIL_TYPE_HTTP
else:
failure_type = countdowntourney.UPLOAD_FAIL_TYPE_REJECTED
tourney.log_failed_upload(failure_type, rep.get("message", "(no message)"))
self.write_log("Failed to upload state for tourney \"%s\": %s" % (tourney_name, rep.get("message", "(no message")))
self.tourney_num_viewers[tourney_name] = num_viewers
except countdowntourney.TourneyException as e:
self.write_log("UploaderThread: couldn't open tourney %s: %s" % (tourney_name, str(e)))
traceback.print_tb(e.__traceback__)
continue
except Exception as e:
self.write_log("Uploader thread threw exception: %s" % (str(e)))
traceback.print_tb(e.__traceback__)
continue
time.sleep(1)
class UploaderServiceHandler(BaseRequestHandler):
def get_fields_from_req(self, req, field_names):
field_values = []
for name in field_names:
value = req.get(name, None)
if value is None:
raise FieldNotFoundException()
field_values.append(value)
return tuple(field_values)
def process_request(self, req):
global uploader_thread
req_type = req.get("type", None)
if not req_type:
return make_error_response("Request has no request type")
req_body = req.get("request", None)
if req_body is None:
return make_error_response("Request has no body")
try:
if req_type == "start_uploading":
(tourney, username, password, private) = self.get_fields_from_req(req_body, ["tourney", "username", "password", "private"])
uploader_thread.add_tourney_to_upload_list(tourney, username, password, private)
rep = make_ok_response()
elif req_type == "stop_uploading":
(tourney,) = self.get_fields_from_req(req_body, ["tourney"])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = make_ok_response()
elif req_type == "delete":
(tourney, username, password) = self.get_fields_from_req(req_body, ["tourney", "username", "password"])
uploader_thread.remove_tourney_from_upload_list(tourney)
rep = delete_tourney_from_web(tourney, username, password)
uploader_thread.set_tourney_auth(tourney, username, password)
elif req_type == "status":
(tourney,) = self.get_fields_from_req(req_body, ["tourney"])
rep = { "success" : True }
auth = uploader_thread.get_tourney_auth(tourney)
rep["publishing"] = uploader_thread.is_uploading_tourney(tourney)
rep["viewers"] = uploader_thread.get_num_viewers(tourney)
if auth:
rep["username"] = auth.get("username", None)
rep["password"] = auth.get("password", None)
rep["private"] = auth.get("private", False)
rep["last_successful_upload_time"] = uploader_thread.get_last_successful_upload_time(tourney)
rep["last_failed_upload"] = uploader_thread.get_last_failed_upload(tourney)
rep["upload_button_pressed_time"] = uploader_thread.get_upload_button_pressed_time(tourney)
rep["now"] = int(time.time())
else:
rep = make_error_response("Unrecognised request type")
except FieldNotFoundException:
return make_error_response("Request is not valid for type")
return rep
def handle(self):
# Request is expected to be a JSON object, on a line by itself
line = read_line_from_socket(self.request)
if line is not None:
rep = None
try:
req = json.loads(line)
except Exception as e:
rep = make_error_response("Request is not valid JSON")
if not rep:
rep = self.process_request(req)
self.request.sendall((json.dumps(rep) + "\n").encode("utf-8"))
self.request.close()
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, addr_port, service_handler):
self.allow_reuse_address = True
super().__init__(addr_port, service_handler)
class TourneyUploaderService(object):
def __init__(self, listen_port):
global uploader_thread
self.listen_port = listen_port
self.socket_server = ThreadedTCPServer(("127.0.0.1", listen_port), UploaderServiceHandler)
self.server_thread = threading.Thread(target=self.socket_server.serve_forever)
if not uploader_thread:
uploader_thread = UploaderThread()
self.server_thread.daemon = True
self.server_thread.start()
def shutdown(self):
self.socket_server.shutdown()
|
[
18,
22,
26,
31,
34
] |
2,530 |
2b141f12bec2006e496bf58a3fcb0167c95ab3b6
|
<mask token>
|
<mask token>
def get_new_message_list(channel_id: int):
with Connection() as cn:
token, channel = cn.s.query(SlackChannel.token, SlackChannel.channel
).filter(SlackChannel.id == channel_id).one()
user_dict = {user.user: user.other_name for user in cn.s.query(
SlackUser).all()}
with urllib.request.urlopen(
f'https://slack.com/api/channels.history?token={token}&channel={channel}'
) as res:
json_dict = json.load(res)
print(json_dict)
messages = sorted(json_dict['messages'], key=lambda x: x.get('ts', ''))
client_msg_id_list = [id_ for id_, in cn.s.query(SlackMessage.
client_msg_id).filter(SlackMessage.client_msg_id.in_([message.
get('client_msg_id') for message in messages])).all()]
message_list = []
insert_msg_id_list = []
for message in messages:
if not (message.get('user') and message.get('text') and message
.get('client_msg_id')):
continue
if message.get('client_msg_id') in client_msg_id_list:
continue
time_stamp = message.get('ts', '')
if time_stamp:
time_stamp = datetime.fromtimestamp(float(time_stamp), jst
).strftime('%m/%d %H:%M:%S')
text = message['text']
for user, name in user_dict.items():
text = text.replace(user, name)
message_list.append(user_dict[message['user']] + ':[' +
time_stamp + '] ' + text)
insert_msg_id_list.append({'client_msg_id': message[
'client_msg_id']})
cn.s.bulk_insert_mappings(SlackMessage, insert_msg_id_list)
cn.s.commit()
return message_list
|
<mask token>
jst = timezone(timedelta(hours=+9), 'JST')
def get_new_message_list(channel_id: int):
with Connection() as cn:
token, channel = cn.s.query(SlackChannel.token, SlackChannel.channel
).filter(SlackChannel.id == channel_id).one()
user_dict = {user.user: user.other_name for user in cn.s.query(
SlackUser).all()}
with urllib.request.urlopen(
f'https://slack.com/api/channels.history?token={token}&channel={channel}'
) as res:
json_dict = json.load(res)
print(json_dict)
messages = sorted(json_dict['messages'], key=lambda x: x.get('ts', ''))
client_msg_id_list = [id_ for id_, in cn.s.query(SlackMessage.
client_msg_id).filter(SlackMessage.client_msg_id.in_([message.
get('client_msg_id') for message in messages])).all()]
message_list = []
insert_msg_id_list = []
for message in messages:
if not (message.get('user') and message.get('text') and message
.get('client_msg_id')):
continue
if message.get('client_msg_id') in client_msg_id_list:
continue
time_stamp = message.get('ts', '')
if time_stamp:
time_stamp = datetime.fromtimestamp(float(time_stamp), jst
).strftime('%m/%d %H:%M:%S')
text = message['text']
for user, name in user_dict.items():
text = text.replace(user, name)
message_list.append(user_dict[message['user']] + ':[' +
time_stamp + '] ' + text)
insert_msg_id_list.append({'client_msg_id': message[
'client_msg_id']})
cn.s.bulk_insert_mappings(SlackMessage, insert_msg_id_list)
cn.s.commit()
return message_list
|
from datetime import datetime, timezone, timedelta
import json
import urllib.request
from mysql_dbcon import Connection
from model import SlackChannel, SlackUser, SlackMessage
jst = timezone(timedelta(hours=+9), 'JST')
def get_new_message_list(channel_id: int):
with Connection() as cn:
token, channel = cn.s.query(SlackChannel.token, SlackChannel.channel
).filter(SlackChannel.id == channel_id).one()
user_dict = {user.user: user.other_name for user in cn.s.query(
SlackUser).all()}
with urllib.request.urlopen(
f'https://slack.com/api/channels.history?token={token}&channel={channel}'
) as res:
json_dict = json.load(res)
print(json_dict)
messages = sorted(json_dict['messages'], key=lambda x: x.get('ts', ''))
client_msg_id_list = [id_ for id_, in cn.s.query(SlackMessage.
client_msg_id).filter(SlackMessage.client_msg_id.in_([message.
get('client_msg_id') for message in messages])).all()]
message_list = []
insert_msg_id_list = []
for message in messages:
if not (message.get('user') and message.get('text') and message
.get('client_msg_id')):
continue
if message.get('client_msg_id') in client_msg_id_list:
continue
time_stamp = message.get('ts', '')
if time_stamp:
time_stamp = datetime.fromtimestamp(float(time_stamp), jst
).strftime('%m/%d %H:%M:%S')
text = message['text']
for user, name in user_dict.items():
text = text.replace(user, name)
message_list.append(user_dict[message['user']] + ':[' +
time_stamp + '] ' + text)
insert_msg_id_list.append({'client_msg_id': message[
'client_msg_id']})
cn.s.bulk_insert_mappings(SlackMessage, insert_msg_id_list)
cn.s.commit()
return message_list
|
from datetime import datetime, timezone, timedelta
import json
import urllib.request
from mysql_dbcon import Connection
from model import SlackChannel, SlackUser, SlackMessage
# TODO set timezone at config
jst = timezone(timedelta(hours=+9), 'JST')
def get_new_message_list(channel_id: int):
with Connection() as cn:
token, channel = cn.s.query(SlackChannel.token, SlackChannel.channel).filter(
SlackChannel.id == channel_id).one()
user_dict = {user.user: user.other_name for user in cn.s.query(SlackUser).all()}
with urllib.request.urlopen(
f'https://slack.com/api/channels.history?token={token}&channel={channel}') as res:
json_dict = json.load(res)
print(json_dict)
messages = sorted(json_dict['messages'], key=lambda x: x.get('ts', ''))
client_msg_id_list = [
id_ for id_, in cn.s.query(SlackMessage.client_msg_id).filter(
SlackMessage.client_msg_id.in_([message.get('client_msg_id') for message in messages])
).all()]
message_list = []
insert_msg_id_list = []
for message in messages:
if not (message.get('user') and message.get('text') and message.get('client_msg_id')):
continue
if message.get('client_msg_id') in client_msg_id_list:
continue
time_stamp = message.get('ts', '')
if time_stamp:
time_stamp = datetime.fromtimestamp(float(time_stamp), jst).strftime('%m/%d %H:%M:%S')
text = message['text']
for user, name in user_dict.items():
text = text.replace(user, name)
message_list.append(user_dict[message['user']] + ':[' + time_stamp + '] ' + text)
insert_msg_id_list.append({'client_msg_id': message['client_msg_id']})
cn.s.bulk_insert_mappings(SlackMessage, insert_msg_id_list)
cn.s.commit()
return message_list
|
[
0,
1,
2,
3,
4
] |
2,531 |
fe63d9b0939bc91d2da14e4d966b33575eab5394
|
<mask token>
def v_measure(cluster_labels, true_labels):
h_score = homogeneity_score(true_labels, cluster_labels)
c_score = completeness_score(true_labels, cluster_labels)
v_score = v_measure_score(true_labels, cluster_labels)
print('Homogeneity Score: %.6f' % h_score)
print('Completeness Score: %.6f' % c_score)
print('V Measure Score: %.6f' % v_score)
return h_score, c_score, v_score
def silhouette_analysis(X, cluster_labels, n_clusters, figname):
plt.xlim([-0.1, 1])
plt.ylim([0, len(X) + (n_clusters + 1) * 10])
silhouette_avg = silhouette_score(X, cluster_labels)
print('For n_clusters =', n_clusters,
'The average silhouette_score is :', silhouette_avg)
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
ith_cluster_silhouette_values = sample_silhouette_values[
cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.nipy_spectral(float(i) / n_clusters)
plt.fill_betweenx(np.arange(y_lower, y_upper), 0,
ith_cluster_silhouette_values, facecolor=color, edgecolor=color,
alpha=0.7)
plt.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
y_lower = y_upper + 10
plt.title('The silhouette plot for the various clusters.')
plt.xlabel('The silhouette coefficient values')
plt.ylabel('Cluster label')
plt.axvline(x=silhouette_avg, color='red', linestyle='--')
plt.yticks([])
plt.xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
plt.savefig(figname, format='png')
plt.clf()
def visualize_cluster(X, cluster_labels, n_clusters, centers, figname):
if X.shape[1] < 2:
print('Invalid shape for X: ', X.shape)
return
colors = cm.nipy_spectral(cluster_labels.astype(float) / n_clusters)
plt.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7, c=
colors, edgecolor='k')
if len(centers) == n_clusters:
plt.scatter(centers[:, 0], centers[:, 1], marker='o', c='white',
alpha=1, s=200, edgecolor='k')
for i, c in enumerate(centers):
plt.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50,
edgecolor='k')
plt.title('The visualization of the clustered data.')
plt.xlabel('Feature space for the 1st feature')
plt.ylabel('Feature space for the 2nd feature')
plt.savefig(figname, format='png')
plt.clf()
def plot_gallery(title, images, figname, n_col=3, n_row=2, shape=(28, 28),
cmap=plt.cm.gray):
plt.figure(figsize=(2.0 * n_col, 2.26 * n_row))
plt.suptitle(title, size=16)
for i, comp in enumerate(images):
plt.subplot(n_row, n_col, i + 1)
vmax = max(comp.max(), -comp.min())
plt.imshow(comp.reshape(shape), cmap=cmap, interpolation='nearest',
vmin=-vmax, vmax=vmax)
plt.xticks(())
plt.yticks(())
plt.savefig(figname, format='png')
plt.clf()
def create_path(*arg, filename=None):
path = os.getcwd()
for directory in arg:
path = os.path.join(path, directory)
if not os.path.exists(path):
print("%s doesn't exist, creating..." % path)
os.mkdir(path)
if filename:
path = os.path.join(path, filename)
return path
def load_data(data_path, split_prop=0.2, is_shuffle=False, is_split=True):
pos_X, neg_X = [], []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
if instance[-1] == 1.0:
pos_X.append(instance[:-1])
else:
neg_X.append(instance[:-1])
if not is_split:
X, y = np.array(pos_X + neg_X), np.array([1] * len(pos_X) + [0] *
len(neg_X))
if is_shuffle:
indices = list(range(X.shape[0]))
shuffle(indices)
X, y = X[indices], y[indices]
return X, y, [], []
pos_test_size, neg_test_size = int(split_prop * len(pos_X)), int(
split_prop * len(neg_X))
pos_train_size, neg_train_size = len(pos_X) - pos_test_size, len(neg_X
) - neg_test_size
X_test, y_test = pos_X[:pos_test_size] + neg_X[:neg_test_size], [1
] * pos_test_size + [0] * neg_test_size
X_train, y_train = pos_X[pos_test_size:] + neg_X[neg_test_size:], [1
] * pos_train_size + [0] * neg_train_size
assert len(X_train) == len(y_train) and len(X_test) == len(y_test
), 'Dimention of X and y must be the same.'
X_train, X_test, y_train, y_test = np.array(X_train), np.array(X_test
), np.array(y_train), np.array(y_test)
if is_shuffle:
train_indices = list(range(X_train.shape[0]))
shuffle(train_indices)
test_indices = list(range(X_test.shape[0]))
shuffle(test_indices)
X_train, X_test, y_train, y_test = X_train[train_indices], X_test[
test_indices], y_train[train_indices], y_test[test_indices]
return X_train, X_test, y_train, y_test
def dump_data():
pass
<mask token>
def plot_learning_curve(train_scores_mean, train_scores_std,
val_scores_mean, val_scores_std, train_sizes, ylim=None, title='test',
fig_path='fig', format='png'):
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel('Training examples')
plt.ylabel('Score')
plt.grid(True, linestyle='-.', color='0.3')
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1, color='r')
plt.fill_between(train_sizes, val_scores_mean - val_scores_std,
val_scores_mean + val_scores_std, alpha=0.1, color='g')
plt.plot(train_sizes, train_scores_mean, 'o-', color='r', label=
'Training score')
plt.plot(train_sizes, val_scores_mean, 'o-', color='g', label=
'Cross-validation score')
plt.legend(loc='best')
plt.savefig(fig_path + '/' + title + '.' + format, format=format)
plt.clf()
def plot_and_save(x, ys, labels, title, x_axis, y_axis, axis_range='auto',
ylim=None, fig_path='fig', format='png'):
if axis_range is None:
plt.axis([x[0], x[-1], 0, 1])
elif type(axis_range) == type(list()):
plt.axis(axis_range)
elif axis_range == 'auto':
pass
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel(x_axis)
plt.ylabel(y_axis)
plt.title(title)
lines = []
for y in ys:
l, = plt.plot(x, y)
lines.append(l)
if len(labels) == len(ys):
plt.legend(lines, labels, loc='best')
plt.grid(True, linestyle='-.', color='0.3')
plt.savefig(fig_path + '.' + format, format=format)
plt.clf()
def print_score(scores, scoring, train=False):
if type(scoring) != type([]):
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores[
'train_score']), np.std(scores['train_score']) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_score']), np.std(scores['test_score']) * 2))
return
for s_method in scoring:
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores['train_' +
s_method]), np.std(scores['train_' + s_method]) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_' + s_method]), np.std(scores['test_' + s_method]) * 2))
|
<mask token>
def v_measure(cluster_labels, true_labels):
h_score = homogeneity_score(true_labels, cluster_labels)
c_score = completeness_score(true_labels, cluster_labels)
v_score = v_measure_score(true_labels, cluster_labels)
print('Homogeneity Score: %.6f' % h_score)
print('Completeness Score: %.6f' % c_score)
print('V Measure Score: %.6f' % v_score)
return h_score, c_score, v_score
def silhouette_analysis(X, cluster_labels, n_clusters, figname):
plt.xlim([-0.1, 1])
plt.ylim([0, len(X) + (n_clusters + 1) * 10])
silhouette_avg = silhouette_score(X, cluster_labels)
print('For n_clusters =', n_clusters,
'The average silhouette_score is :', silhouette_avg)
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
ith_cluster_silhouette_values = sample_silhouette_values[
cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.nipy_spectral(float(i) / n_clusters)
plt.fill_betweenx(np.arange(y_lower, y_upper), 0,
ith_cluster_silhouette_values, facecolor=color, edgecolor=color,
alpha=0.7)
plt.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
y_lower = y_upper + 10
plt.title('The silhouette plot for the various clusters.')
plt.xlabel('The silhouette coefficient values')
plt.ylabel('Cluster label')
plt.axvline(x=silhouette_avg, color='red', linestyle='--')
plt.yticks([])
plt.xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
plt.savefig(figname, format='png')
plt.clf()
def visualize_cluster(X, cluster_labels, n_clusters, centers, figname):
if X.shape[1] < 2:
print('Invalid shape for X: ', X.shape)
return
colors = cm.nipy_spectral(cluster_labels.astype(float) / n_clusters)
plt.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7, c=
colors, edgecolor='k')
if len(centers) == n_clusters:
plt.scatter(centers[:, 0], centers[:, 1], marker='o', c='white',
alpha=1, s=200, edgecolor='k')
for i, c in enumerate(centers):
plt.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50,
edgecolor='k')
plt.title('The visualization of the clustered data.')
plt.xlabel('Feature space for the 1st feature')
plt.ylabel('Feature space for the 2nd feature')
plt.savefig(figname, format='png')
plt.clf()
def plot_gallery(title, images, figname, n_col=3, n_row=2, shape=(28, 28),
cmap=plt.cm.gray):
plt.figure(figsize=(2.0 * n_col, 2.26 * n_row))
plt.suptitle(title, size=16)
for i, comp in enumerate(images):
plt.subplot(n_row, n_col, i + 1)
vmax = max(comp.max(), -comp.min())
plt.imshow(comp.reshape(shape), cmap=cmap, interpolation='nearest',
vmin=-vmax, vmax=vmax)
plt.xticks(())
plt.yticks(())
plt.savefig(figname, format='png')
plt.clf()
def create_path(*arg, filename=None):
path = os.getcwd()
for directory in arg:
path = os.path.join(path, directory)
if not os.path.exists(path):
print("%s doesn't exist, creating..." % path)
os.mkdir(path)
if filename:
path = os.path.join(path, filename)
return path
def load_data(data_path, split_prop=0.2, is_shuffle=False, is_split=True):
pos_X, neg_X = [], []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
if instance[-1] == 1.0:
pos_X.append(instance[:-1])
else:
neg_X.append(instance[:-1])
if not is_split:
X, y = np.array(pos_X + neg_X), np.array([1] * len(pos_X) + [0] *
len(neg_X))
if is_shuffle:
indices = list(range(X.shape[0]))
shuffle(indices)
X, y = X[indices], y[indices]
return X, y, [], []
pos_test_size, neg_test_size = int(split_prop * len(pos_X)), int(
split_prop * len(neg_X))
pos_train_size, neg_train_size = len(pos_X) - pos_test_size, len(neg_X
) - neg_test_size
X_test, y_test = pos_X[:pos_test_size] + neg_X[:neg_test_size], [1
] * pos_test_size + [0] * neg_test_size
X_train, y_train = pos_X[pos_test_size:] + neg_X[neg_test_size:], [1
] * pos_train_size + [0] * neg_train_size
assert len(X_train) == len(y_train) and len(X_test) == len(y_test
), 'Dimention of X and y must be the same.'
X_train, X_test, y_train, y_test = np.array(X_train), np.array(X_test
), np.array(y_train), np.array(y_test)
if is_shuffle:
train_indices = list(range(X_train.shape[0]))
shuffle(train_indices)
test_indices = list(range(X_test.shape[0]))
shuffle(test_indices)
X_train, X_test, y_train, y_test = X_train[train_indices], X_test[
test_indices], y_train[train_indices], y_test[test_indices]
return X_train, X_test, y_train, y_test
def dump_data():
pass
def analyze_data(data_path, threshold=50):
data = []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
data.append(instance)
count = [0] * len(data[0])
for instance in data:
for i in range(len(instance)):
if instance[i] != 0.0:
count[i] += 1
total = 0
for c in count:
if c >= threshold:
total += 1
return count, total
def plot_learning_curve(train_scores_mean, train_scores_std,
val_scores_mean, val_scores_std, train_sizes, ylim=None, title='test',
fig_path='fig', format='png'):
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel('Training examples')
plt.ylabel('Score')
plt.grid(True, linestyle='-.', color='0.3')
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1, color='r')
plt.fill_between(train_sizes, val_scores_mean - val_scores_std,
val_scores_mean + val_scores_std, alpha=0.1, color='g')
plt.plot(train_sizes, train_scores_mean, 'o-', color='r', label=
'Training score')
plt.plot(train_sizes, val_scores_mean, 'o-', color='g', label=
'Cross-validation score')
plt.legend(loc='best')
plt.savefig(fig_path + '/' + title + '.' + format, format=format)
plt.clf()
def plot_and_save(x, ys, labels, title, x_axis, y_axis, axis_range='auto',
ylim=None, fig_path='fig', format='png'):
if axis_range is None:
plt.axis([x[0], x[-1], 0, 1])
elif type(axis_range) == type(list()):
plt.axis(axis_range)
elif axis_range == 'auto':
pass
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel(x_axis)
plt.ylabel(y_axis)
plt.title(title)
lines = []
for y in ys:
l, = plt.plot(x, y)
lines.append(l)
if len(labels) == len(ys):
plt.legend(lines, labels, loc='best')
plt.grid(True, linestyle='-.', color='0.3')
plt.savefig(fig_path + '.' + format, format=format)
plt.clf()
def print_score(scores, scoring, train=False):
if type(scoring) != type([]):
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores[
'train_score']), np.std(scores['train_score']) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_score']), np.std(scores['test_score']) * 2))
return
for s_method in scoring:
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores['train_' +
s_method]), np.std(scores['train_' + s_method]) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_' + s_method]), np.std(scores['test_' + s_method]) * 2))
|
<mask token>
matplotlib.use('Agg')
<mask token>
warnings.simplefilter('ignore')
def v_measure(cluster_labels, true_labels):
h_score = homogeneity_score(true_labels, cluster_labels)
c_score = completeness_score(true_labels, cluster_labels)
v_score = v_measure_score(true_labels, cluster_labels)
print('Homogeneity Score: %.6f' % h_score)
print('Completeness Score: %.6f' % c_score)
print('V Measure Score: %.6f' % v_score)
return h_score, c_score, v_score
def silhouette_analysis(X, cluster_labels, n_clusters, figname):
plt.xlim([-0.1, 1])
plt.ylim([0, len(X) + (n_clusters + 1) * 10])
silhouette_avg = silhouette_score(X, cluster_labels)
print('For n_clusters =', n_clusters,
'The average silhouette_score is :', silhouette_avg)
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
ith_cluster_silhouette_values = sample_silhouette_values[
cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.nipy_spectral(float(i) / n_clusters)
plt.fill_betweenx(np.arange(y_lower, y_upper), 0,
ith_cluster_silhouette_values, facecolor=color, edgecolor=color,
alpha=0.7)
plt.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
y_lower = y_upper + 10
plt.title('The silhouette plot for the various clusters.')
plt.xlabel('The silhouette coefficient values')
plt.ylabel('Cluster label')
plt.axvline(x=silhouette_avg, color='red', linestyle='--')
plt.yticks([])
plt.xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
plt.savefig(figname, format='png')
plt.clf()
def visualize_cluster(X, cluster_labels, n_clusters, centers, figname):
if X.shape[1] < 2:
print('Invalid shape for X: ', X.shape)
return
colors = cm.nipy_spectral(cluster_labels.astype(float) / n_clusters)
plt.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7, c=
colors, edgecolor='k')
if len(centers) == n_clusters:
plt.scatter(centers[:, 0], centers[:, 1], marker='o', c='white',
alpha=1, s=200, edgecolor='k')
for i, c in enumerate(centers):
plt.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50,
edgecolor='k')
plt.title('The visualization of the clustered data.')
plt.xlabel('Feature space for the 1st feature')
plt.ylabel('Feature space for the 2nd feature')
plt.savefig(figname, format='png')
plt.clf()
def plot_gallery(title, images, figname, n_col=3, n_row=2, shape=(28, 28),
cmap=plt.cm.gray):
plt.figure(figsize=(2.0 * n_col, 2.26 * n_row))
plt.suptitle(title, size=16)
for i, comp in enumerate(images):
plt.subplot(n_row, n_col, i + 1)
vmax = max(comp.max(), -comp.min())
plt.imshow(comp.reshape(shape), cmap=cmap, interpolation='nearest',
vmin=-vmax, vmax=vmax)
plt.xticks(())
plt.yticks(())
plt.savefig(figname, format='png')
plt.clf()
def create_path(*arg, filename=None):
path = os.getcwd()
for directory in arg:
path = os.path.join(path, directory)
if not os.path.exists(path):
print("%s doesn't exist, creating..." % path)
os.mkdir(path)
if filename:
path = os.path.join(path, filename)
return path
def load_data(data_path, split_prop=0.2, is_shuffle=False, is_split=True):
pos_X, neg_X = [], []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
if instance[-1] == 1.0:
pos_X.append(instance[:-1])
else:
neg_X.append(instance[:-1])
if not is_split:
X, y = np.array(pos_X + neg_X), np.array([1] * len(pos_X) + [0] *
len(neg_X))
if is_shuffle:
indices = list(range(X.shape[0]))
shuffle(indices)
X, y = X[indices], y[indices]
return X, y, [], []
pos_test_size, neg_test_size = int(split_prop * len(pos_X)), int(
split_prop * len(neg_X))
pos_train_size, neg_train_size = len(pos_X) - pos_test_size, len(neg_X
) - neg_test_size
X_test, y_test = pos_X[:pos_test_size] + neg_X[:neg_test_size], [1
] * pos_test_size + [0] * neg_test_size
X_train, y_train = pos_X[pos_test_size:] + neg_X[neg_test_size:], [1
] * pos_train_size + [0] * neg_train_size
assert len(X_train) == len(y_train) and len(X_test) == len(y_test
), 'Dimention of X and y must be the same.'
X_train, X_test, y_train, y_test = np.array(X_train), np.array(X_test
), np.array(y_train), np.array(y_test)
if is_shuffle:
train_indices = list(range(X_train.shape[0]))
shuffle(train_indices)
test_indices = list(range(X_test.shape[0]))
shuffle(test_indices)
X_train, X_test, y_train, y_test = X_train[train_indices], X_test[
test_indices], y_train[train_indices], y_test[test_indices]
return X_train, X_test, y_train, y_test
def dump_data():
pass
def analyze_data(data_path, threshold=50):
data = []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
data.append(instance)
count = [0] * len(data[0])
for instance in data:
for i in range(len(instance)):
if instance[i] != 0.0:
count[i] += 1
total = 0
for c in count:
if c >= threshold:
total += 1
return count, total
def plot_learning_curve(train_scores_mean, train_scores_std,
val_scores_mean, val_scores_std, train_sizes, ylim=None, title='test',
fig_path='fig', format='png'):
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel('Training examples')
plt.ylabel('Score')
plt.grid(True, linestyle='-.', color='0.3')
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1, color='r')
plt.fill_between(train_sizes, val_scores_mean - val_scores_std,
val_scores_mean + val_scores_std, alpha=0.1, color='g')
plt.plot(train_sizes, train_scores_mean, 'o-', color='r', label=
'Training score')
plt.plot(train_sizes, val_scores_mean, 'o-', color='g', label=
'Cross-validation score')
plt.legend(loc='best')
plt.savefig(fig_path + '/' + title + '.' + format, format=format)
plt.clf()
def plot_and_save(x, ys, labels, title, x_axis, y_axis, axis_range='auto',
ylim=None, fig_path='fig', format='png'):
if axis_range is None:
plt.axis([x[0], x[-1], 0, 1])
elif type(axis_range) == type(list()):
plt.axis(axis_range)
elif axis_range == 'auto':
pass
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel(x_axis)
plt.ylabel(y_axis)
plt.title(title)
lines = []
for y in ys:
l, = plt.plot(x, y)
lines.append(l)
if len(labels) == len(ys):
plt.legend(lines, labels, loc='best')
plt.grid(True, linestyle='-.', color='0.3')
plt.savefig(fig_path + '.' + format, format=format)
plt.clf()
def print_score(scores, scoring, train=False):
if type(scoring) != type([]):
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores[
'train_score']), np.std(scores['train_score']) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_score']), np.std(scores['test_score']) * 2))
return
for s_method in scoring:
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores['train_' +
s_method]), np.std(scores['train_' + s_method]) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_' + s_method]), np.std(scores['test_' + s_method]) * 2))
|
from sklearn.model_selection import train_test_split
from sklearn.metrics import silhouette_samples, silhouette_score
from sklearn.metrics.cluster import homogeneity_score, completeness_score, v_measure_score
from sklearn import datasets
from random import shuffle
import os
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import warnings
warnings.simplefilter('ignore')
def v_measure(cluster_labels, true_labels):
h_score = homogeneity_score(true_labels, cluster_labels)
c_score = completeness_score(true_labels, cluster_labels)
v_score = v_measure_score(true_labels, cluster_labels)
print('Homogeneity Score: %.6f' % h_score)
print('Completeness Score: %.6f' % c_score)
print('V Measure Score: %.6f' % v_score)
return h_score, c_score, v_score
def silhouette_analysis(X, cluster_labels, n_clusters, figname):
plt.xlim([-0.1, 1])
plt.ylim([0, len(X) + (n_clusters + 1) * 10])
silhouette_avg = silhouette_score(X, cluster_labels)
print('For n_clusters =', n_clusters,
'The average silhouette_score is :', silhouette_avg)
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
ith_cluster_silhouette_values = sample_silhouette_values[
cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.nipy_spectral(float(i) / n_clusters)
plt.fill_betweenx(np.arange(y_lower, y_upper), 0,
ith_cluster_silhouette_values, facecolor=color, edgecolor=color,
alpha=0.7)
plt.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
y_lower = y_upper + 10
plt.title('The silhouette plot for the various clusters.')
plt.xlabel('The silhouette coefficient values')
plt.ylabel('Cluster label')
plt.axvline(x=silhouette_avg, color='red', linestyle='--')
plt.yticks([])
plt.xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
plt.savefig(figname, format='png')
plt.clf()
def visualize_cluster(X, cluster_labels, n_clusters, centers, figname):
if X.shape[1] < 2:
print('Invalid shape for X: ', X.shape)
return
colors = cm.nipy_spectral(cluster_labels.astype(float) / n_clusters)
plt.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7, c=
colors, edgecolor='k')
if len(centers) == n_clusters:
plt.scatter(centers[:, 0], centers[:, 1], marker='o', c='white',
alpha=1, s=200, edgecolor='k')
for i, c in enumerate(centers):
plt.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50,
edgecolor='k')
plt.title('The visualization of the clustered data.')
plt.xlabel('Feature space for the 1st feature')
plt.ylabel('Feature space for the 2nd feature')
plt.savefig(figname, format='png')
plt.clf()
def plot_gallery(title, images, figname, n_col=3, n_row=2, shape=(28, 28),
cmap=plt.cm.gray):
plt.figure(figsize=(2.0 * n_col, 2.26 * n_row))
plt.suptitle(title, size=16)
for i, comp in enumerate(images):
plt.subplot(n_row, n_col, i + 1)
vmax = max(comp.max(), -comp.min())
plt.imshow(comp.reshape(shape), cmap=cmap, interpolation='nearest',
vmin=-vmax, vmax=vmax)
plt.xticks(())
plt.yticks(())
plt.savefig(figname, format='png')
plt.clf()
def create_path(*arg, filename=None):
path = os.getcwd()
for directory in arg:
path = os.path.join(path, directory)
if not os.path.exists(path):
print("%s doesn't exist, creating..." % path)
os.mkdir(path)
if filename:
path = os.path.join(path, filename)
return path
def load_data(data_path, split_prop=0.2, is_shuffle=False, is_split=True):
pos_X, neg_X = [], []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
if instance[-1] == 1.0:
pos_X.append(instance[:-1])
else:
neg_X.append(instance[:-1])
if not is_split:
X, y = np.array(pos_X + neg_X), np.array([1] * len(pos_X) + [0] *
len(neg_X))
if is_shuffle:
indices = list(range(X.shape[0]))
shuffle(indices)
X, y = X[indices], y[indices]
return X, y, [], []
pos_test_size, neg_test_size = int(split_prop * len(pos_X)), int(
split_prop * len(neg_X))
pos_train_size, neg_train_size = len(pos_X) - pos_test_size, len(neg_X
) - neg_test_size
X_test, y_test = pos_X[:pos_test_size] + neg_X[:neg_test_size], [1
] * pos_test_size + [0] * neg_test_size
X_train, y_train = pos_X[pos_test_size:] + neg_X[neg_test_size:], [1
] * pos_train_size + [0] * neg_train_size
assert len(X_train) == len(y_train) and len(X_test) == len(y_test
), 'Dimention of X and y must be the same.'
X_train, X_test, y_train, y_test = np.array(X_train), np.array(X_test
), np.array(y_train), np.array(y_test)
if is_shuffle:
train_indices = list(range(X_train.shape[0]))
shuffle(train_indices)
test_indices = list(range(X_test.shape[0]))
shuffle(test_indices)
X_train, X_test, y_train, y_test = X_train[train_indices], X_test[
test_indices], y_train[train_indices], y_test[test_indices]
return X_train, X_test, y_train, y_test
def dump_data():
pass
def analyze_data(data_path, threshold=50):
data = []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
data.append(instance)
count = [0] * len(data[0])
for instance in data:
for i in range(len(instance)):
if instance[i] != 0.0:
count[i] += 1
total = 0
for c in count:
if c >= threshold:
total += 1
return count, total
def plot_learning_curve(train_scores_mean, train_scores_std,
val_scores_mean, val_scores_std, train_sizes, ylim=None, title='test',
fig_path='fig', format='png'):
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel('Training examples')
plt.ylabel('Score')
plt.grid(True, linestyle='-.', color='0.3')
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1, color='r')
plt.fill_between(train_sizes, val_scores_mean - val_scores_std,
val_scores_mean + val_scores_std, alpha=0.1, color='g')
plt.plot(train_sizes, train_scores_mean, 'o-', color='r', label=
'Training score')
plt.plot(train_sizes, val_scores_mean, 'o-', color='g', label=
'Cross-validation score')
plt.legend(loc='best')
plt.savefig(fig_path + '/' + title + '.' + format, format=format)
plt.clf()
def plot_and_save(x, ys, labels, title, x_axis, y_axis, axis_range='auto',
ylim=None, fig_path='fig', format='png'):
if axis_range is None:
plt.axis([x[0], x[-1], 0, 1])
elif type(axis_range) == type(list()):
plt.axis(axis_range)
elif axis_range == 'auto':
pass
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel(x_axis)
plt.ylabel(y_axis)
plt.title(title)
lines = []
for y in ys:
l, = plt.plot(x, y)
lines.append(l)
if len(labels) == len(ys):
plt.legend(lines, labels, loc='best')
plt.grid(True, linestyle='-.', color='0.3')
plt.savefig(fig_path + '.' + format, format=format)
plt.clf()
def print_score(scores, scoring, train=False):
if type(scoring) != type([]):
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores[
'train_score']), np.std(scores['train_score']) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_score']), np.std(scores['test_score']) * 2))
return
for s_method in scoring:
if train:
print('Train: %0.2f (+/- %0.2f)' % (np.mean(scores['train_' +
s_method]), np.std(scores['train_' + s_method]) * 2))
print('Cross validation: %0.2f (+/- %0.2f)' % (np.mean(scores[
'test_' + s_method]), np.std(scores['test_' + s_method]) * 2))
|
from sklearn.model_selection import train_test_split
from sklearn.metrics import silhouette_samples, silhouette_score
from sklearn.metrics.cluster import homogeneity_score, completeness_score, v_measure_score
from sklearn import datasets
from random import shuffle
import os
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import warnings
warnings.simplefilter("ignore")
def v_measure(cluster_labels, true_labels):
h_score = homogeneity_score(true_labels, cluster_labels)
c_score = completeness_score(true_labels, cluster_labels)
v_score = v_measure_score(true_labels, cluster_labels)
print("Homogeneity Score: %.6f" % h_score)
print("Completeness Score: %.6f" % c_score)
print("V Measure Score: %.6f" % v_score)
return h_score, c_score, v_score
def silhouette_analysis(X, cluster_labels, n_clusters, figname):
plt.xlim([-0.1, 1])
plt.ylim([0, len(X) + (n_clusters + 1) * 10])
silhouette_avg = silhouette_score(X, cluster_labels)
print("For n_clusters =", n_clusters,
"The average silhouette_score is :", silhouette_avg)
# Compute the silhouette scores for each sample
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
# Aggregate the silhouette scores for samples belonging to
# cluster i, and sort them
ith_cluster_silhouette_values = sample_silhouette_values[cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.nipy_spectral(float(i) / n_clusters)
plt.fill_betweenx(np.arange(y_lower, y_upper),
0, ith_cluster_silhouette_values,
facecolor=color, edgecolor=color, alpha=0.7)
# Label the silhouette plots with their cluster numbers at the middle
plt.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
# Compute the new y_lower for next plot
y_lower = y_upper + 10 # 10 for the 0 samples
plt.title("The silhouette plot for the various clusters.")
plt.xlabel("The silhouette coefficient values")
plt.ylabel("Cluster label")
# The vertical line for average silhouette score of all the values
plt.axvline(x=silhouette_avg, color="red", linestyle="--")
plt.yticks([]) # Clear the yaxis labels / ticks
plt.xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
plt.savefig(figname, format='png')
plt.clf()
def visualize_cluster(X, cluster_labels, n_clusters, centers, figname):
if X.shape[1] < 2:
print ("Invalid shape for X: ", X.shape)
return
colors = cm.nipy_spectral(cluster_labels.astype(float) / n_clusters)
plt.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7,
c=colors, edgecolor='k')
# Draw white circles at cluster centers
if len(centers) == n_clusters:
plt.scatter(centers[:, 0], centers[:, 1], marker='o',
c="white", alpha=1, s=200, edgecolor='k')
for i, c in enumerate(centers):
plt.scatter(c[0], c[1], marker='$%d$' % i, alpha=1,
s=50, edgecolor='k')
plt.title("The visualization of the clustered data.")
plt.xlabel("Feature space for the 1st feature")
plt.ylabel("Feature space for the 2nd feature")
plt.savefig(figname, format='png')
plt.clf()
def plot_gallery(title, images, figname, n_col=3, n_row=2, shape=(28, 28), cmap=plt.cm.gray):
plt.figure(figsize=(2. * n_col, 2.26 * n_row))
plt.suptitle(title, size=16)
for i, comp in enumerate(images):
plt.subplot(n_row, n_col, i + 1)
vmax = max(comp.max(), -comp.min())
plt.imshow(comp.reshape(shape), cmap=cmap,
interpolation='nearest',
vmin=-vmax, vmax=vmax)
plt.xticks(())
plt.yticks(())
#plt.subplots_adjust(0.01, 0.05, 0.99, 0.93, 0.04, 0.)
plt.savefig(figname, format='png')
plt.clf()
#plt.subplots_adjust()
def create_path(*arg, filename=None):
path = os.getcwd()
for directory in arg:
path = os.path.join(path, directory)
if not os.path.exists(path):
print('%s doesn\'t exist, creating...' % path)
os.mkdir(path)
if filename:
path = os.path.join(path, filename)
return path
def load_data(data_path, split_prop=0.2, is_shuffle=False, is_split=True):
pos_X, neg_X = [], []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
if instance[-1] == 1.0:
pos_X.append(instance[:-1])
else:
neg_X.append(instance[:-1])
if not is_split:
X, y = np.array(pos_X + neg_X), np.array([1] * len(pos_X) + [0] * len(neg_X))
if is_shuffle:
indices = list(range(X.shape[0]))
shuffle(indices)
X, y = X[indices], y[indices]
return X, y, [], []
pos_test_size, neg_test_size = int(split_prop * len(pos_X)), int(split_prop * len(neg_X))
pos_train_size, neg_train_size = len(pos_X) - pos_test_size, len(neg_X) - neg_test_size
X_test, y_test = pos_X[:pos_test_size] + neg_X[:neg_test_size], [1] * pos_test_size + [0] * neg_test_size
X_train, y_train = pos_X[pos_test_size:] + neg_X[neg_test_size:], [1] * pos_train_size + [0] * neg_train_size
assert len(X_train) == len(y_train) and len(X_test) == len(y_test), "Dimention of X and y must be the same."
X_train, X_test, y_train, y_test = np.array(X_train), np.array(X_test), np.array(y_train), np.array(y_test)
if is_shuffle:
train_indices = list(range(X_train.shape[0]))
shuffle(train_indices)
test_indices = list(range(X_test.shape[0]))
shuffle(test_indices)
X_train, X_test, y_train, y_test = X_train[train_indices], X_test[test_indices], y_train[train_indices], y_test[test_indices]
return X_train, X_test, y_train, y_test
def dump_data():
#Need implement
pass
def analyze_data(data_path, threshold=50):
data = []
with open(data_path, 'r') as f:
for line in f:
instance = list(map(float, line.strip().split(',')))
data.append(instance)
count = [0] * len(data[0])
for instance in data:
for i in range(len(instance)):
if instance[i] != 0.0:
count[i] += 1
total = 0
for c in count:
if c >= threshold:
total += 1
return count, total
def plot_learning_curve(train_scores_mean,
train_scores_std,
val_scores_mean,
val_scores_std,
train_sizes,
ylim=None,
title='test',
fig_path='fig',
format='png'):
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel("Training examples")
plt.ylabel("Score")
plt.grid(True, linestyle = "-.", color = '0.3')
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1,
color="r")
plt.fill_between(train_sizes, val_scores_mean - val_scores_std,
val_scores_mean + val_scores_std, alpha=0.1, color="g")
plt.plot(train_sizes, train_scores_mean, 'o-', color="r",
label="Training score")
plt.plot(train_sizes, val_scores_mean, 'o-', color="g",
label="Cross-validation score")
plt.legend(loc="best")
plt.savefig(fig_path + '/' + title + '.' + format, format=format)
plt.clf()
def plot_and_save(x, ys, labels, title, x_axis, y_axis, axis_range='auto', ylim=None, fig_path='fig', format='png'):
if axis_range is None:
plt.axis([x[0], x[-1], 0, 1])
elif type(axis_range) == type(list()):
plt.axis(axis_range)
elif axis_range == 'auto':
pass
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel(x_axis)
plt.ylabel(y_axis)
plt.title(title)
lines = []
for y in ys:
l, = plt.plot(x, y)
lines.append(l)
if len(labels) == len(ys):
plt.legend(lines, labels, loc="best")
plt.grid(True, linestyle = "-.", color = '0.3')
plt.savefig(fig_path + '.' + format, format=format)
plt.clf()
def print_score(scores, scoring, train=False):
if type(scoring) != type([]):
if train:
print("Train: %0.2f (+/- %0.2f)" % (np.mean(scores['train_score']), np.std(scores['train_score']) * 2))
print("Cross validation: %0.2f (+/- %0.2f)" % (np.mean(scores['test_score']), np.std(scores['test_score']) * 2))
return
for s_method in scoring:
if train:
print("Train: %0.2f (+/- %0.2f)" % (np.mean(scores['train_' + s_method]), np.std(scores['train_' + s_method]) * 2))
print("Cross validation: %0.2f (+/- %0.2f)" % (np.mean(scores['test_' + s_method]), np.std(scores['test_' + s_method]) * 2))
|
[
10,
11,
12,
13,
14
] |
2,532 |
a7deec1693c411988445528dceb602bf69e47d21
|
<mask token>
|
<mask token>
if __name__ == '__main__':
game = tools.Game(setup.STATE_DICT, 'menu')
game.run()
|
from source import tools, setup
if __name__ == '__main__':
game = tools.Game(setup.STATE_DICT, 'menu')
game.run()
|
from source import tools, setup
if __name__ == '__main__':
game = tools.Game(setup.STATE_DICT, 'menu')
game.run()
| null |
[
0,
1,
2,
3
] |
2,533 |
8f709af924820c77290f97731d9f96258c3db095
|
<mask token>
|
<mask token>
class TaobaoSpider(Spider):
<mask token>
<mask token>
<mask token>
<mask token>
|
<mask token>
class TaobaoSpider(Spider):
<mask token>
<mask token>
<mask token>
def parse(self, response, **kwargs):
pass
|
<mask token>
class TaobaoSpider(Spider):
name = 'taobao'
allowed_domains = ['www.taobao.com']
start_urls = ['http://www.taobao.com/']
def parse(self, response, **kwargs):
pass
|
from scrapy import Spider, Request
from urllib.parse import quote
from Product.items import ProductItem
class TaobaoSpider(Spider):
name = 'taobao'
allowed_domains = ['www.taobao.com']
start_urls = ['http://www.taobao.com/']
def parse(self, response, **kwargs):
pass
|
[
0,
1,
2,
3,
4
] |
2,534 |
bb335187dc61fae049ca4a9a55a93f856b3c7822
|
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
import os
#os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
import keras
from keras.layers import Dense, Dropout, Input, Embedding, LSTM, Reshape, CuDNNLSTM
from keras.models import Model,Sequential
from keras.datasets import mnist
from tqdm import tqdm
from keras.layers.advanced_activations import LeakyReLU
from keras.activations import relu
from keras.optimizers import adam
import numpy as np
import tensorflow as tf
import random
import pickle as pkl
import operator
import math
from sklearn import preprocessing
from keras.models import load_model
import time
from scipy.stats import norm
from scipy.io import loadmat
from natsort import natsorted
from scipy import stats
from seaborn import heatmap
import loading_data
from loading_data import load_train_vitheta_data_1225,load_real_data, load_standardized_data,load_train_data,load_train_data_V,load_train_vitheta_data_V,load_data_with_features,load_standardized_data_with_features
#%%
#%%
# =============================================================================
# =============================================================================
# # save data with V I and theta for 1225
# =============================================================================
# =============================================================================
filename='Raw_data/1225/data'
#os.listdir(filename)
#
pkl_file = open(filename, 'rb')
selected_data = pkl.load(pkl_file)
pkl_file.close()
cosin={}
# Reacive={}
# keys={}
# pf={}
cosin['TA']=np.cos((selected_data['L1ANG']-selected_data['C1ANG'])*(np.pi/180))
cosin['TB']=np.cos((selected_data['L2ANG']-selected_data['C2ANG'])*(np.pi/180))
cosin['TC']=np.cos((selected_data['L3ANG']-selected_data['C3ANG'])*(np.pi/180))
# Reacive['A']=selected_data['L1Mag']*selected_data['C1Mag']*(np.sin((selected_data['L1Ang']-selected_data['C1Ang'])*(np.pi/180)))
# Reacive['B']=selected_data['L2Mag']*selected_data['C2Mag']*(np.sin((selected_data['L2Ang']-selected_data['C2Ang'])*(np.pi/180)))
# Reacive['C']=selected_data['L3Mag']*selected_data['C3Mag']*(np.sin((selected_data['L3Ang']-selected_data['C3Ang'])*(np.pi/180)))
#
#pf['A']=Active['A']/np.sqrt(np.square(Active['A'])+np.square(Reacive['A']))
#pf['B']=Active['B']/np.sqrt(np.square(Active['B'])+np.square(Reacive['B']))
#pf['C']=Active['C']/np.sqrt(np.square(Active['C'])+np.square(Reacive['C']))
selected_data['TA']=cosin['TA']
selected_data['TB']=cosin['TB']
selected_data['TC']=cosin['TC']
k=['L1MAG','L2MAG', 'L3MAG','C1MAG','C2MAG', 'C3MAG','TA', 'TB', 'TC']
day_data={}
for key in k:
day_data[key]=selected_data[key]
dir='Raw_data/1225/VIT.pkl'
output = open(dir, 'wb')
pkl.dump(day_data, output)
output.close()
#%%
# =============================================================================
# =============================================================================
# # train data prepreation
# =============================================================================
# =============================================================================
#start,SampleNum,N=(0,40,500000)
#filename='Raw_data/1225/VIT.pkl'
#k=['L1MAG','L2MAG', 'L3MAG','C1MAG','C2MAG', 'C3MAG','TA', 'TB', 'TC']
##%%
#dds=load_standardized_data_with_features(filename,k)
##%%
#dd=load_data_with_features(filename,k)
#%%
# =============================================================================
# =============================================================================
# # real data for 1225 VIT
# =============================================================================
# =============================================================================
filename='Raw_data/1225/VIT.pkl'
pkl_file = open(filename, 'rb')
selected_data_1225_normal = pkl.load(pkl_file)
pkl_file.close()
#%%
# =============================================================================
# =============================================================================
# # data without key
# =============================================================================
# =============================================================================
selected_data_1225=[]
for f in k:
selected_data_1225.append(selected_data_1225_normal[f])
#%%
start,SampleNum,N=(0,40,500000)
filename='Raw_data/1225/VIT.pkl'
k=['L1MAG','L2MAG', 'L3MAG','C1MAG','C2MAG', 'C3MAG','TA', 'TB', 'TC']
tt=load_train_vitheta_data_1225(start,SampleNum,N,filename,k)
#%%
X_train = tt
scores={}
probability_mean={}
anomalies={}
kkk=k[0:1]
for idx,key in enumerate(kkk):
print(key)
X_train_temp=X_train[:,idx]
#X_train.reshape(N,3*SampleNum)
X_train_temp=X_train_temp.reshape(N,SampleNum,1)
id=int(np.floor(idx/3))
mode=k[id*3]
# dis_name='dis_sep_onelearn_'+mode+'.h5'
# print(dis_name)
#
# discriminator=load_model(dis_name)
rate=1000
shift=N/rate
scores[key]=[]
for i in range(rate-1):
temp=discriminator.predict_on_batch(X_train_temp[int(i*shift):int((i+1)*shift)])
scores[key].append(temp)
# print(i)
scores[key]=np.array(scores[key])
scores[key]=scores[key].ravel()
probability_mean[key]=np.mean(scores[key])
data=scores[key]-probability_mean[key]
mu, std = norm.fit(data)
zp=3
high=mu+zp*std
low=mu-zp*std
anomalies[key]=np.union1d(np.where(data>=high)[0], np.where(data<=low)[0])
print(anomalies[key].shape)
#%%
# =============================================================================
# =============================================================================
# # plot 1225
# =============================================================================
# =============================================================================
def show_1225(events):
SampleNum=40
for anom in events:
anom=int(anom)
print(anom)
plt.subplot(221)
for i in [0,1,2]:
plt.plot(selected_data_1225[i][anom*int(SampleNum/2)-240:(anom*int(SampleNum/2)+240)])
plt.legend('A' 'B' 'C')
plt.title('V')
plt.subplot(222)
for i in [3,4,5]:
plt.plot(selected_data_1225[i][anom*int(SampleNum/2)-240:(anom*int(SampleNum/2)+240)])
plt.legend('A' 'B' 'C')
plt.title('I')
plt.subplot(223)
for i in [6,7,8]:
plt.plot(selected_data_1225[i][anom*int(SampleNum/2)-240:(anom*int(SampleNum/2)+240)])
plt.legend('A' 'B' 'C')
plt.title('T')
plt.show()
#%%
X_train = tt
#%%
def adam_optimizer():
return adam(lr=0.0002, beta_1=0.5)
#%%
def create_generator():
generator=Sequential()
generator.add(CuDNNLSTM(units=256,input_shape=(100,1),return_sequences=True))
generator.add(LeakyReLU(0.2))
generator.add(CuDNNLSTM(units=512))
generator.add(LeakyReLU(0.2))
generator.add(Dense(units=512))
generator.add(LeakyReLU(0.2))
#
# generator.add(LSTM(units=1024))
# generator.add(LeakyReLU(0.2))
generator.add(Dense(units=1*40))
generator.compile(loss='binary_crossentropy', optimizer=adam_optimizer())
return generator
g=create_generator()
g.summary()
#%%
def create_discriminator():
discriminator=Sequential()
discriminator.add(CuDNNLSTM(units=256,input_shape=(40,1),return_sequences=True))
discriminator.add(LeakyReLU(0.2))
# discriminator.add(Dropout(0.3))
discriminator.add(CuDNNLSTM(units=512))
discriminator.add(LeakyReLU(0.2))
#
discriminator.add(Dense(units=512))
discriminator.add(LeakyReLU(0.2))
# discriminator.add(Dropout(0.3))
#
# discriminator.add(LSTM(units=256))
# discriminator.add(LeakyReLU(0.2))
discriminator.add(Dense(units=1, activation='sigmoid'))
discriminator.compile(loss='binary_crossentropy', optimizer=adam_optimizer())
return discriminator
d =create_discriminator()
d.summary()
#%%
def create_gan(discriminator, generator):
discriminator.trainable=False
gan_input = Input(shape=(100,1))
x = generator(gan_input)
x = Reshape((40,1), input_shape=(1*40,1))(x)
gan_output= discriminator(x)
gan= Model(inputs=gan_input, outputs=gan_output)
gan.compile(loss='binary_crossentropy', optimizer='adam')
return gan
gan = create_gan(d,g)
gan.summary()
#%%
batch_size=5
epochnum=2
#%%
start,SampleNum,N=(0,40,500000)
#X_train = load_data(start,SampleNum,N)
#filename=
X_train = tt
batch_count = X_train.shape[0] / batch_size
##%%
#X_train=X_train.reshape(N,3*SampleNum)
#X_train=X_train.reshape(N,SampleNum,3)
#%%
rnd={}
for i in range(epochnum):
rnd[i]=np.random.randint(low=0,high=N,size=batch_size)
# show(rnd[i])
#%%
generator= create_generator()
discriminator= create_discriminator()
gan = create_gan(discriminator, generator)
#%%
all_scores=[]
def training(generator,discriminator,gan,epochs, batch_size,all_scores):
# all_scores=[]
scale=1
for e in range(1,epochs+1 ):
all_score_temp=[]
tik=time.clock()
print("Epoch %d" %e)
for _ in tqdm(range(batch_size)):
#generate random noise as an input to initialize the generator
noise= scale*np.random.normal(0,1, [batch_size, 100])
noise=noise.reshape(batch_size,100,1)
# Generate fake MNIST images from noised input
generated_images = generator.predict(noise)
generated_images = generated_images.reshape(batch_size,SampleNum,1)
# print(generated_images.shape)
# Get a random set of real images
# random.seed(0)
image_batch =X_train_temp[rnd[e-1]]
# print(image_batch.shape)
#Construct different batches of real and fake data
X= np.concatenate([image_batch, generated_images])
# Labels for generated and real data
y_dis=np.zeros(2*batch_size)
y_dis[:batch_size]=0.9
#Pre train discriminator on fake and real data before starting the gan.
discriminator.trainable=True
discriminator.train_on_batch(X, y_dis)
#Tricking the noised input of the Generator as real data
noise= scale*np.random.normal(0,1, [batch_size, 100])
noise=noise.reshape(batch_size,100,1)
y_gen = np.ones(batch_size)
# During the training of gan,
# the weights of discriminator should be fixed.
#We can enforce that by setting the trainable flag
discriminator.trainable=False
#training the GAN by alternating the training of the Discriminator
#and training the chained GAN model with Discriminator’s weights freezed.
gan.train_on_batch(noise, y_gen)
rate=1000
shift=N/rate
all_score_temp=[]
for i in range(rate-1):
temp=discriminator.predict_on_batch(X_train_temp[int(i*shift):int((i+1)*shift)])
all_score_temp.append(temp)
# print(i)
all_score_temp=np.array(all_score_temp)
all_score_temp=all_score_temp.ravel()
all_scores.append(all_score_temp)
toc = time.clock()
print(toc-tik)
#%%
kk=['L1MAG']
for idx,key in enumerate(kk):
X_train_temp=X_train[:,(idx)]
#X_train.reshape(N,3*SampleNum)
X_train_temp=X_train_temp.reshape(N,SampleNum,1)
tic = time.clock()
training(generator,discriminator,gan,epochnum,batch_size,all_scores)
toc = time.clock()
print(toc-tic)
#
# gan_name='gan_sep_onelearn_good_09_'+key+'.h5'
# gen_name='gen_sep_onelearn_good_09_'+key+'.h5'
# dis_name='dis_sep_onelearn_good_09_'+key+'.h5'
# print(dis_name)
# gan.save(gan_name)
# generator.save(gen_name)
# discriminator.save(dis_name)
| null | null | null | null |
[
0
] |
2,535 |
b6adb956aed934451fc21e51663be36d08c5b645
|
import matplotlib.pyplot as plt
import matplotlib
import matplotlib.colors as colors
import matplotlib.cm as cm
def plot_hist(data_list):
plt.hist(data_list, bins=500)
plt.show()
return
def compare_hits_plot(np_array, compare=False):
if compare:
clist = list(np_array[:,2])
minima, maxima = min(clist), max(clist)
print minima, maxima
hits=np_array[np_array[:,2]==1]
total_hits=np_array[np_array[:,2]>=1]
scatter = plt.scatter(np_array[:,3], np_array[:,1], c=clist, vmin=0, vmax=1, s=8, cmap=cm.winter)
plt.ylim(ymin=0, ymax=max(hits[:,3]))
plt.colorbar(scatter)
plt.axhline(spot_count_cutoff)
else:
scatter = plt.scatter(np_array[:,3], np_array[:,1])
def pickle_ratio_plot(np_array):
clist = list(np_array[:,5])
minima, maxima = min(clist), max(clist)
print minima, maxima
scatter = plt.scatter(np_array[:,1], np_array[:,2], c=clist, s=8, cmap=cm.winter)
plt.colorbar(scatter)
plt.axhline(spot_count_cutoff)
| null | null | null | null |
[
0
] |
2,536 |
44c4a1f4b32b45fd95eb8b0a42a718d05d967e04
|
<mask token>
|
<mask token>
while formula >= 0 and formula <= 3:
a = float(input('Enter a:'))
min_x = float(input('Enter minx:'))
max_x = float(input('Enter maxx:'))
step = int(input('Enter steps:'))
x = min_x
if formula == 1:
d = -45 * a ** 2 + 26 * a * x + 7 * x ** 2
if d != 0:
for i in range(step):
while x <= max_x:
G = -7 * (4 * a ** 2 + 15 * a * x - 4 * x ** 2) / d
result.append(G)
print('x=%.3f \tG=%.3f' % (float(x), G))
x += (max_x - min_x) / (step - 1)
break
else:
print('Err')
elif formula == 2:
for i in range(step):
while x <= max_x:
F = 2 ** (40 * a ** 2 - 107 * a * x + 63 * x ** 2)
result.append(F)
print('x=%.3f \tF=%.3f' % (float(x), F))
x += (max_x - min_x) / (step - 1)
break
elif formula == 3:
for i in range(step):
while x <= max_x:
Y = log(a ** 2 - 2 * a * x + 3 * x ** 2 + 1)
result.append(Y)
print('x=%.3f \tY=%.3f' % (float(x), Y))
x += (max_x - min_x) / (step - 1)
break
else:
print('Err')
print('Max.res. = ', max(result))
print('Min.res. = ', min(result))
|
<mask token>
result = []
formula = int(input("""For exit press 0
Choose the formula #1 #2 #3: """))
while formula >= 0 and formula <= 3:
a = float(input('Enter a:'))
min_x = float(input('Enter minx:'))
max_x = float(input('Enter maxx:'))
step = int(input('Enter steps:'))
x = min_x
if formula == 1:
d = -45 * a ** 2 + 26 * a * x + 7 * x ** 2
if d != 0:
for i in range(step):
while x <= max_x:
G = -7 * (4 * a ** 2 + 15 * a * x - 4 * x ** 2) / d
result.append(G)
print('x=%.3f \tG=%.3f' % (float(x), G))
x += (max_x - min_x) / (step - 1)
break
else:
print('Err')
elif formula == 2:
for i in range(step):
while x <= max_x:
F = 2 ** (40 * a ** 2 - 107 * a * x + 63 * x ** 2)
result.append(F)
print('x=%.3f \tF=%.3f' % (float(x), F))
x += (max_x - min_x) / (step - 1)
break
elif formula == 3:
for i in range(step):
while x <= max_x:
Y = log(a ** 2 - 2 * a * x + 3 * x ** 2 + 1)
result.append(Y)
print('x=%.3f \tY=%.3f' % (float(x), Y))
x += (max_x - min_x) / (step - 1)
break
else:
print('Err')
print('Max.res. = ', max(result))
print('Min.res. = ', min(result))
|
from math import log
result = []
formula = int(input("""For exit press 0
Choose the formula #1 #2 #3: """))
while formula >= 0 and formula <= 3:
a = float(input('Enter a:'))
min_x = float(input('Enter minx:'))
max_x = float(input('Enter maxx:'))
step = int(input('Enter steps:'))
x = min_x
if formula == 1:
d = -45 * a ** 2 + 26 * a * x + 7 * x ** 2
if d != 0:
for i in range(step):
while x <= max_x:
G = -7 * (4 * a ** 2 + 15 * a * x - 4 * x ** 2) / d
result.append(G)
print('x=%.3f \tG=%.3f' % (float(x), G))
x += (max_x - min_x) / (step - 1)
break
else:
print('Err')
elif formula == 2:
for i in range(step):
while x <= max_x:
F = 2 ** (40 * a ** 2 - 107 * a * x + 63 * x ** 2)
result.append(F)
print('x=%.3f \tF=%.3f' % (float(x), F))
x += (max_x - min_x) / (step - 1)
break
elif formula == 3:
for i in range(step):
while x <= max_x:
Y = log(a ** 2 - 2 * a * x + 3 * x ** 2 + 1)
result.append(Y)
print('x=%.3f \tY=%.3f' % (float(x), Y))
x += (max_x - min_x) / (step - 1)
break
else:
print('Err')
print('Max.res. = ', max(result))
print('Min.res. = ', min(result))
|
from math import log
result = []
formula = int(input("For exit press 0\nChoose the formula #1 #2 #3: "))
while (formula >= 0) and (formula <= 3):
a = float(input("Enter a:"))
min_x = float(input("Enter minx:"))
max_x = float(input("Enter maxx:"))
step = int(input("Enter steps:"))
x = min_x
if formula == 1:
d = (-45*a**2+26*a*x+7*x**2)
if d !=0:
for i in range(step):
while x <= max_x:
G = ((-7*(4*a**2+15*a*x-4*x**2))/d)
result.append(G)
print("x=%.3f \tG=%.3f" % (float(x), G))
x += (max_x-min_x)/(step-1)
break
else:
print("Err")
elif formula == 2:
for i in range(step):
while x <= max_x:
F = (2**(40*(a**2)-107*a*x+63*(x**2)))
result.append(F)
print("x=%.3f \tF=%.3f" % (float(x), F))
x += (max_x-min_x)/(step-1)
break
elif formula == 3:
for i in range(step):
while x <= max_x:
Y = log(a**2-2*a*x+3*x**2+1)
result.append(Y)
print("x=%.3f \tY=%.3f" % (float(x), Y))
x += (max_x-min_x)/(step-1)
break
else:
print("Err")
print("Max.res. = ", max(result))
print("Min.res. = ", min(result))
|
[
0,
1,
2,
3,
4
] |
2,537 |
661f94f5770df1026352ee344d0006466662bb3c
|
def main():
try:
contacts = open('contactsLab4.txt', 'r')
names = []
birthdates = []
name = contacts.readline()
while name != '':
names.append(name.rstrip('\n'))
date = contacts.readline()
birthdates.append(date.rstrip('\n'))
name = contacts.readline()
contacts.close()
display_contacts(names, birthdates)
except FileNotFoundError:
print('File was not found')
except Exception as err:
print('Error:', err)
def find_season(birthdates):
month = birthdates.split('/', 3)
month = int(month[0])
if month == 12 or month == 1 or month == 2:
season = 'winter'
elif month == 3 or month == 4 or month == 5:
season = 'spring'
elif month == 6 or month == 7 or month == 8:
season = 'summer'
elif month == 9 or month == 10 or month == 11:
season = 'fall'
return season
<mask token>
def get_age(date, birthdates):
today = date.split('/')
todayMonth = int(today[0])
todayDay = int(today[1])
todayYear = int(today[2])
birthyear = birthdates.split('/')
birthMonth = int(birthyear[0])
birthDay = int(birthyear[1])
birthyear = int(birthyear[2])
if todayMonth > birthMonth:
age = todayYear - birthyear - 1
else:
age = todayYear - birthyear
return age
<mask token>
|
def main():
try:
contacts = open('contactsLab4.txt', 'r')
names = []
birthdates = []
name = contacts.readline()
while name != '':
names.append(name.rstrip('\n'))
date = contacts.readline()
birthdates.append(date.rstrip('\n'))
name = contacts.readline()
contacts.close()
display_contacts(names, birthdates)
except FileNotFoundError:
print('File was not found')
except Exception as err:
print('Error:', err)
def find_season(birthdates):
month = birthdates.split('/', 3)
month = int(month[0])
if month == 12 or month == 1 or month == 2:
season = 'winter'
elif month == 3 or month == 4 or month == 5:
season = 'spring'
elif month == 6 or month == 7 or month == 8:
season = 'summer'
elif month == 9 or month == 10 or month == 11:
season = 'fall'
return season
<mask token>
def get_age(date, birthdates):
today = date.split('/')
todayMonth = int(today[0])
todayDay = int(today[1])
todayYear = int(today[2])
birthyear = birthdates.split('/')
birthMonth = int(birthyear[0])
birthDay = int(birthyear[1])
birthyear = int(birthyear[2])
if todayMonth > birthMonth:
age = todayYear - birthyear - 1
else:
age = todayYear - birthyear
return age
def display_contacts(names, birthdates):
date = input('Enter current date in format m/d/yyyy: ')
print(format('Name', '25'), format('Age', '6'), format('Season', '8'),
format('Leap Year', '10'))
print(format('----', '25'), format('---', '6'), format('------', '8'),
format('---------', '10'))
for i in birthdates:
age = get_age(date, birthdates)
print(age)
for i in birthdates:
season = find_season(i)
print(season)
for i in birthdates:
year = is_leap_year(i)
print(year)
for i in range(len(name)):
print(format(name[i], '25'), format(str(age[i]), '6'), format(
season[i], '8'), format(year[i], '10'))
<mask token>
|
def main():
try:
contacts = open('contactsLab4.txt', 'r')
names = []
birthdates = []
name = contacts.readline()
while name != '':
names.append(name.rstrip('\n'))
date = contacts.readline()
birthdates.append(date.rstrip('\n'))
name = contacts.readline()
contacts.close()
display_contacts(names, birthdates)
except FileNotFoundError:
print('File was not found')
except Exception as err:
print('Error:', err)
def find_season(birthdates):
month = birthdates.split('/', 3)
month = int(month[0])
if month == 12 or month == 1 or month == 2:
season = 'winter'
elif month == 3 or month == 4 or month == 5:
season = 'spring'
elif month == 6 or month == 7 or month == 8:
season = 'summer'
elif month == 9 or month == 10 or month == 11:
season = 'fall'
return season
def is_leap_year(birthdates):
birthyear = birthdates.split('/')
birthyear = int(birthyear[2])
if birthyear % 4 == 0 and birthyear % 100 != 0 or birthyear % 400 == 0:
year = 'Yes'
else:
year = 'No'
return year
def get_age(date, birthdates):
today = date.split('/')
todayMonth = int(today[0])
todayDay = int(today[1])
todayYear = int(today[2])
birthyear = birthdates.split('/')
birthMonth = int(birthyear[0])
birthDay = int(birthyear[1])
birthyear = int(birthyear[2])
if todayMonth > birthMonth:
age = todayYear - birthyear - 1
else:
age = todayYear - birthyear
return age
def display_contacts(names, birthdates):
date = input('Enter current date in format m/d/yyyy: ')
print(format('Name', '25'), format('Age', '6'), format('Season', '8'),
format('Leap Year', '10'))
print(format('----', '25'), format('---', '6'), format('------', '8'),
format('---------', '10'))
for i in birthdates:
age = get_age(date, birthdates)
print(age)
for i in birthdates:
season = find_season(i)
print(season)
for i in birthdates:
year = is_leap_year(i)
print(year)
for i in range(len(name)):
print(format(name[i], '25'), format(str(age[i]), '6'), format(
season[i], '8'), format(year[i], '10'))
<mask token>
|
def main():
try:
contacts = open('contactsLab4.txt', 'r')
names = []
birthdates = []
name = contacts.readline()
while name != '':
names.append(name.rstrip('\n'))
date = contacts.readline()
birthdates.append(date.rstrip('\n'))
name = contacts.readline()
contacts.close()
display_contacts(names, birthdates)
except FileNotFoundError:
print('File was not found')
except Exception as err:
print('Error:', err)
def find_season(birthdates):
month = birthdates.split('/', 3)
month = int(month[0])
if month == 12 or month == 1 or month == 2:
season = 'winter'
elif month == 3 or month == 4 or month == 5:
season = 'spring'
elif month == 6 or month == 7 or month == 8:
season = 'summer'
elif month == 9 or month == 10 or month == 11:
season = 'fall'
return season
def is_leap_year(birthdates):
birthyear = birthdates.split('/')
birthyear = int(birthyear[2])
if birthyear % 4 == 0 and birthyear % 100 != 0 or birthyear % 400 == 0:
year = 'Yes'
else:
year = 'No'
return year
def get_age(date, birthdates):
today = date.split('/')
todayMonth = int(today[0])
todayDay = int(today[1])
todayYear = int(today[2])
birthyear = birthdates.split('/')
birthMonth = int(birthyear[0])
birthDay = int(birthyear[1])
birthyear = int(birthyear[2])
if todayMonth > birthMonth:
age = todayYear - birthyear - 1
else:
age = todayYear - birthyear
return age
def display_contacts(names, birthdates):
date = input('Enter current date in format m/d/yyyy: ')
print(format('Name', '25'), format('Age', '6'), format('Season', '8'),
format('Leap Year', '10'))
print(format('----', '25'), format('---', '6'), format('------', '8'),
format('---------', '10'))
for i in birthdates:
age = get_age(date, birthdates)
print(age)
for i in birthdates:
season = find_season(i)
print(season)
for i in birthdates:
year = is_leap_year(i)
print(year)
for i in range(len(name)):
print(format(name[i], '25'), format(str(age[i]), '6'), format(
season[i], '8'), format(year[i], '10'))
main()
|
###############################################################
# Yolanda Gunter
# Lab 4
# My program uses decisions, repetition, functions, files, lists
# and exception handling that will get the input from a file to
# run program that asks User for current date, reads a contact file
# list that contains names with DOB, calculate each contact's age,
# season born in and born in a leap year or not.
# Then my program will print the calculated average age of contacts.
###############################################################
########################################################
# Function name: main
# Input: contactsLab4.txt file
# Output: table of contact, age, birth season & if born leap yr or not
# Purpose: This function reads file, makes two lists, converts strings to
# integers, calculates ages, season born & if born leap yr or not
# then lastly calculats average age of contacts in entire file.
###############################################
def main():
# start exception handling
try:
# Open a file named contactlab4.txt
contacts = open('contactsLab4.txt', 'r')
# Create empty name list
names = []
# Create empty birthday list
birthdates = []
# Read file, establish records, strip \n, append to lists,
name = contacts.readline()
while name != '':
names.append(name.rstrip('\n'))
date = contacts.readline()
birthdates.append(date.rstrip('\n'))
name = contacts.readline()
# Close the file
contacts.close()
# Call display_contacts
display_contacts(names, birthdates)
# Simple exception if file is not found
except FileNotFoundError:
print("File was not found")
except Exception as err:
print("Error:", err)
###############################################
# Function name: find_season
# Input: birthdate
# Output: a string as a season
# Purpose: Determines which season contact is born
###############################################
def find_season(birthdates):
month = birthdates.split('/', 3)
month = int(month[0])
# Assign contact birth month to a season
if month == 12 or month == 1 or month == 2:
season = "winter"
elif month == 3 or month == 4 or month == 5:
season = "spring"
elif month == 6 or month == 7 or month == 8:
season = "summer"
elif month == 9 or month == 10 or month == 11:
season = "fall"
return season
###############################################
# Function name: is_leap_year
# Input: birthdate list
# Output: value leap year (Yes) or not (No)
# Purpose: Determines if birth year is leap year or not.
###############################################
def is_leap_year(birthdates):
birthyear = birthdates.split('/')
birthyear = int(birthyear[2])
# Calculate if User's birth year is a leap year or not
if birthyear % 4 == 0 and birthyear % 100 != 0 or \
birthyear % 400 == 0:
year = "Yes"
else:
year = "No"
return year
###############################################
# Function name: get_age
# Input: current date and birthdate list
# Output: age of contact
# Purpose: Caculates age of contact
###############################################
def get_age(date, birthdates):
today = date.split('/')
todayMonth = int(today[0])
todayDay = int(today[1])
todayYear = int(today[2])
birthyear = birthdates.split('/')
birthMonth = int(birthyear[0])
birthDay = int(birthyear[1])
birthyear = int(birthyear[2])
if todayMonth > birthMonth:
age = todayYear - birthyear-1
else:
age = todayYear - birthyear
return age
###############################################
# Function name: display_contacts
# Input: name and birthdate lists
# Output: value leap year (Yes) or not (No)
# Purpose: Determines if birth year is leap year or not.
###############################################
def display_contacts(names, birthdates):
# Get current date
date = input('Enter current date in format m/d/yyyy: ')
# format display in table format with column headings
print(format("Name", '25'), format("Age", '6'),
format("Season", '8'), format("Leap Year", '10'))
print(format("----", '25'), format("---", '6'),
format("------", '8'), format("---------", '10'))
# Call functions
for i in birthdates:
age = get_age(date, birthdates)
print(age)
for i in birthdates:
season = find_season(i)
print(season)
for i in birthdates:
year = is_leap_year(i)
print(year)
for i in range(len(name)):
print(format(name[i], '25'), format(str(age[i]), '6'),
format(season[i], '8'), format(year[i], '10'))
# Call the main function
main()
|
[
3,
4,
5,
6,
7
] |
2,538 |
0016e38d39ed2a4c7a75bed103bc47a5b6fd0e8c
|
<mask token>
def get_text_data(filename):
import codecs
with open(filename, 'rb') as f:
text = f.read()
length = len(text)
print('got corpus length:', length)
return text
def model_builder():
c = ct.Can()
gru, d1, d2 = c.add(GRU(256, 256)), c.add(LastDimDense(256, 64)), c.add(
LastDimDense(64, 256))
def call(i, starting_state=None):
i = gru(i, starting_state=starting_state)
shape = tf.shape(i)
b, t, d = shape[0], shape[1], shape[2]
ending_state = i[:, t - 1, :]
i = d1(i)
i = Act('elu')(i)
i = d2(i)
i = Act('softmax')(i)
return i, ending_state
c.set_function(call)
return c
def feed_gen():
input_text = tf.placeholder(tf.uint8, shape=[None, None])
input_text_float = tf.one_hot(input_text, depth=256, dtype=tf.float32)
xhead = input_text_float[:, :-1]
gt = input_text_float[:, 1:]
y, _ = model(xhead, starting_state=None)
def cross_entropy_loss_per_char(pred, gt):
def log2(i):
return tf.log(i) * 1.442695
return -tf.reduce_sum(log2(pred + 1e-14) * gt, axis=tf.rank(pred) - 1)
loss = ct.cross_entropy_loss(y, gt)
loss_per_char = cross_entropy_loss_per_char(y, gt)
train_step = tf.train.AdamOptimizer(0.001).minimize(loss, var_list=
model.get_weights())
def feed(minibatch):
nonlocal train_step, loss, input_text
sess = ct.get_session()
res = sess.run([loss, train_step], feed_dict={input_text: minibatch})
return res[0]
starting_state = tf.placeholder(tf.float32, shape=[None, None])
stateful_y, ending_state = model(input_text_float, starting_state=
starting_state)
stateful_y_init, ending_state_init = model(input_text_float)
def stateful_predict(st, i):
sess = ct.get_session()
if st is None:
res = sess.run([stateful_y_init, ending_state_init], feed_dict=
{input_text: i})
else:
res = sess.run([stateful_y, ending_state], feed_dict={
input_text: i, starting_state: st})
return res
def loss_statistics(i):
sess = ct.get_session()
res = sess.run([loss_per_char], feed_dict={input_text: i})
return res
return feed, stateful_predict, loss_statistics
def r(ep=100):
length = len(corpus)
batch_size = 256
mbl = time_steps * batch_size
sr = length - mbl - time_steps - 2
for i in range(ep):
print('---------------------iter', i, '/', ep)
j = np.random.choice(sr)
minibatch = corpus[j:j + mbl]
minibatch.shape = [batch_size, time_steps]
loss = feed(minibatch)
print('loss:', loss)
if i % 100 == 0:
pass
class utf8statemachine:
def __init__(self):
self.state = 0
self.buffer = []
def flush(self):
char = str(bytes(self.buffer), 'utf-8')
self.buffer = []
return char
def bytein(self, b):
if self.state == 0:
if b & 128 == 0:
self.buffer.append(b)
return self.flush()
if b & 240 == 224:
self.state = 2
self.buffer.append(b)
return None
if b & 224 == 192:
self.state = 1
self.buffer.append(b)
return None
raise NameError('byte should start with 0b0 or 0b110 or 0b1110')
if self.state > 0:
if b & 192 == 128:
self.state -= 1
self.buffer.append(b)
else:
raise NameError('byte should start with 0b10')
if self.state == 0:
return self.flush()
return None
def show2(length=400):
import sys, os
asc_buf = np.fromstring('\n', dtype='uint8').reshape(1, 1)
starting_state = None
sm = utf8statemachine()
errors = 0
for i in range(length):
stateful_y, ending_state = predict(starting_state, asc_buf)
dist = stateful_y[0, 0]
code = np.random.choice(256, p=dist)
try:
result = sm.bytein(code)
if result is not None:
sys.stdout.write(result)
asc_buf[0, 0] = code
starting_state = ending_state
except NameError:
errors += 1
if i % 10 == 0:
sys.stdout.flush()
pass
sys.stdout.flush()
print('')
print('total UTF-8 decoding errors:', errors)
def bsa(text):
buf = np.fromstring(text, dtype='uint8').reshape(1, len(text))
loss, = loss_stats(buf)
simplified = text[0]
print(text[0], 'initial')
for i in range(1, len(text)):
print(text[i], loss[0, i - 1])
if loss[0, i - 1] < 1:
simplified += '-'
else:
simplified += text[i]
print('simplified:', simplified)
<mask token>
|
<mask token>
def get_text_data(filename):
import codecs
with open(filename, 'rb') as f:
text = f.read()
length = len(text)
print('got corpus length:', length)
return text
def model_builder():
c = ct.Can()
gru, d1, d2 = c.add(GRU(256, 256)), c.add(LastDimDense(256, 64)), c.add(
LastDimDense(64, 256))
def call(i, starting_state=None):
i = gru(i, starting_state=starting_state)
shape = tf.shape(i)
b, t, d = shape[0], shape[1], shape[2]
ending_state = i[:, t - 1, :]
i = d1(i)
i = Act('elu')(i)
i = d2(i)
i = Act('softmax')(i)
return i, ending_state
c.set_function(call)
return c
def feed_gen():
input_text = tf.placeholder(tf.uint8, shape=[None, None])
input_text_float = tf.one_hot(input_text, depth=256, dtype=tf.float32)
xhead = input_text_float[:, :-1]
gt = input_text_float[:, 1:]
y, _ = model(xhead, starting_state=None)
def cross_entropy_loss_per_char(pred, gt):
def log2(i):
return tf.log(i) * 1.442695
return -tf.reduce_sum(log2(pred + 1e-14) * gt, axis=tf.rank(pred) - 1)
loss = ct.cross_entropy_loss(y, gt)
loss_per_char = cross_entropy_loss_per_char(y, gt)
train_step = tf.train.AdamOptimizer(0.001).minimize(loss, var_list=
model.get_weights())
def feed(minibatch):
nonlocal train_step, loss, input_text
sess = ct.get_session()
res = sess.run([loss, train_step], feed_dict={input_text: minibatch})
return res[0]
starting_state = tf.placeholder(tf.float32, shape=[None, None])
stateful_y, ending_state = model(input_text_float, starting_state=
starting_state)
stateful_y_init, ending_state_init = model(input_text_float)
def stateful_predict(st, i):
sess = ct.get_session()
if st is None:
res = sess.run([stateful_y_init, ending_state_init], feed_dict=
{input_text: i})
else:
res = sess.run([stateful_y, ending_state], feed_dict={
input_text: i, starting_state: st})
return res
def loss_statistics(i):
sess = ct.get_session()
res = sess.run([loss_per_char], feed_dict={input_text: i})
return res
return feed, stateful_predict, loss_statistics
def r(ep=100):
length = len(corpus)
batch_size = 256
mbl = time_steps * batch_size
sr = length - mbl - time_steps - 2
for i in range(ep):
print('---------------------iter', i, '/', ep)
j = np.random.choice(sr)
minibatch = corpus[j:j + mbl]
minibatch.shape = [batch_size, time_steps]
loss = feed(minibatch)
print('loss:', loss)
if i % 100 == 0:
pass
class utf8statemachine:
def __init__(self):
self.state = 0
self.buffer = []
def flush(self):
char = str(bytes(self.buffer), 'utf-8')
self.buffer = []
return char
def bytein(self, b):
if self.state == 0:
if b & 128 == 0:
self.buffer.append(b)
return self.flush()
if b & 240 == 224:
self.state = 2
self.buffer.append(b)
return None
if b & 224 == 192:
self.state = 1
self.buffer.append(b)
return None
raise NameError('byte should start with 0b0 or 0b110 or 0b1110')
if self.state > 0:
if b & 192 == 128:
self.state -= 1
self.buffer.append(b)
else:
raise NameError('byte should start with 0b10')
if self.state == 0:
return self.flush()
return None
def show2(length=400):
import sys, os
asc_buf = np.fromstring('\n', dtype='uint8').reshape(1, 1)
starting_state = None
sm = utf8statemachine()
errors = 0
for i in range(length):
stateful_y, ending_state = predict(starting_state, asc_buf)
dist = stateful_y[0, 0]
code = np.random.choice(256, p=dist)
try:
result = sm.bytein(code)
if result is not None:
sys.stdout.write(result)
asc_buf[0, 0] = code
starting_state = ending_state
except NameError:
errors += 1
if i % 10 == 0:
sys.stdout.flush()
pass
sys.stdout.flush()
print('')
print('total UTF-8 decoding errors:', errors)
def bsa(text):
buf = np.fromstring(text, dtype='uint8').reshape(1, len(text))
loss, = loss_stats(buf)
simplified = text[0]
print(text[0], 'initial')
for i in range(1, len(text)):
print(text[i], loss[0, i - 1])
if loss[0, i - 1] < 1:
simplified += '-'
else:
simplified += text[i]
print('simplified:', simplified)
<mask token>
if len(argv) < 2:
print(
'(Error)please provide a filename as the first argument. The file should be in UTF-8 encoding, without BOM.'
)
else:
text = get_text_data(argv[1])
corpus = np.fromstring(text, dtype='uint8')
print('corpus loaded. corpus[0]:', corpus[0], 'text[0]:', text[0])
model = model_builder()
feed, predict, loss_stats = feed_gen()
sess = ct.get_session()
sess.run(tf.global_variables_initializer())
|
<mask token>
time_steps = 16
def get_text_data(filename):
import codecs
with open(filename, 'rb') as f:
text = f.read()
length = len(text)
print('got corpus length:', length)
return text
def model_builder():
c = ct.Can()
gru, d1, d2 = c.add(GRU(256, 256)), c.add(LastDimDense(256, 64)), c.add(
LastDimDense(64, 256))
def call(i, starting_state=None):
i = gru(i, starting_state=starting_state)
shape = tf.shape(i)
b, t, d = shape[0], shape[1], shape[2]
ending_state = i[:, t - 1, :]
i = d1(i)
i = Act('elu')(i)
i = d2(i)
i = Act('softmax')(i)
return i, ending_state
c.set_function(call)
return c
def feed_gen():
input_text = tf.placeholder(tf.uint8, shape=[None, None])
input_text_float = tf.one_hot(input_text, depth=256, dtype=tf.float32)
xhead = input_text_float[:, :-1]
gt = input_text_float[:, 1:]
y, _ = model(xhead, starting_state=None)
def cross_entropy_loss_per_char(pred, gt):
def log2(i):
return tf.log(i) * 1.442695
return -tf.reduce_sum(log2(pred + 1e-14) * gt, axis=tf.rank(pred) - 1)
loss = ct.cross_entropy_loss(y, gt)
loss_per_char = cross_entropy_loss_per_char(y, gt)
train_step = tf.train.AdamOptimizer(0.001).minimize(loss, var_list=
model.get_weights())
def feed(minibatch):
nonlocal train_step, loss, input_text
sess = ct.get_session()
res = sess.run([loss, train_step], feed_dict={input_text: minibatch})
return res[0]
starting_state = tf.placeholder(tf.float32, shape=[None, None])
stateful_y, ending_state = model(input_text_float, starting_state=
starting_state)
stateful_y_init, ending_state_init = model(input_text_float)
def stateful_predict(st, i):
sess = ct.get_session()
if st is None:
res = sess.run([stateful_y_init, ending_state_init], feed_dict=
{input_text: i})
else:
res = sess.run([stateful_y, ending_state], feed_dict={
input_text: i, starting_state: st})
return res
def loss_statistics(i):
sess = ct.get_session()
res = sess.run([loss_per_char], feed_dict={input_text: i})
return res
return feed, stateful_predict, loss_statistics
def r(ep=100):
length = len(corpus)
batch_size = 256
mbl = time_steps * batch_size
sr = length - mbl - time_steps - 2
for i in range(ep):
print('---------------------iter', i, '/', ep)
j = np.random.choice(sr)
minibatch = corpus[j:j + mbl]
minibatch.shape = [batch_size, time_steps]
loss = feed(minibatch)
print('loss:', loss)
if i % 100 == 0:
pass
class utf8statemachine:
def __init__(self):
self.state = 0
self.buffer = []
def flush(self):
char = str(bytes(self.buffer), 'utf-8')
self.buffer = []
return char
def bytein(self, b):
if self.state == 0:
if b & 128 == 0:
self.buffer.append(b)
return self.flush()
if b & 240 == 224:
self.state = 2
self.buffer.append(b)
return None
if b & 224 == 192:
self.state = 1
self.buffer.append(b)
return None
raise NameError('byte should start with 0b0 or 0b110 or 0b1110')
if self.state > 0:
if b & 192 == 128:
self.state -= 1
self.buffer.append(b)
else:
raise NameError('byte should start with 0b10')
if self.state == 0:
return self.flush()
return None
def show2(length=400):
import sys, os
asc_buf = np.fromstring('\n', dtype='uint8').reshape(1, 1)
starting_state = None
sm = utf8statemachine()
errors = 0
for i in range(length):
stateful_y, ending_state = predict(starting_state, asc_buf)
dist = stateful_y[0, 0]
code = np.random.choice(256, p=dist)
try:
result = sm.bytein(code)
if result is not None:
sys.stdout.write(result)
asc_buf[0, 0] = code
starting_state = ending_state
except NameError:
errors += 1
if i % 10 == 0:
sys.stdout.flush()
pass
sys.stdout.flush()
print('')
print('total UTF-8 decoding errors:', errors)
def bsa(text):
buf = np.fromstring(text, dtype='uint8').reshape(1, len(text))
loss, = loss_stats(buf)
simplified = text[0]
print(text[0], 'initial')
for i in range(1, len(text)):
print(text[i], loss[0, i - 1])
if loss[0, i - 1] < 1:
simplified += '-'
else:
simplified += text[i]
print('simplified:', simplified)
argv = sys.argv
if len(argv) < 2:
print(
'(Error)please provide a filename as the first argument. The file should be in UTF-8 encoding, without BOM.'
)
else:
text = get_text_data(argv[1])
corpus = np.fromstring(text, dtype='uint8')
print('corpus loaded. corpus[0]:', corpus[0], 'text[0]:', text[0])
model = model_builder()
feed, predict, loss_stats = feed_gen()
sess = ct.get_session()
sess.run(tf.global_variables_initializer())
|
import numpy as np
import random
import sys
import canton as ct
from canton import *
import tensorflow as tf
time_steps = 16
def get_text_data(filename):
import codecs
with open(filename, 'rb') as f:
text = f.read()
length = len(text)
print('got corpus length:', length)
return text
def model_builder():
c = ct.Can()
gru, d1, d2 = c.add(GRU(256, 256)), c.add(LastDimDense(256, 64)), c.add(
LastDimDense(64, 256))
def call(i, starting_state=None):
i = gru(i, starting_state=starting_state)
shape = tf.shape(i)
b, t, d = shape[0], shape[1], shape[2]
ending_state = i[:, t - 1, :]
i = d1(i)
i = Act('elu')(i)
i = d2(i)
i = Act('softmax')(i)
return i, ending_state
c.set_function(call)
return c
def feed_gen():
input_text = tf.placeholder(tf.uint8, shape=[None, None])
input_text_float = tf.one_hot(input_text, depth=256, dtype=tf.float32)
xhead = input_text_float[:, :-1]
gt = input_text_float[:, 1:]
y, _ = model(xhead, starting_state=None)
def cross_entropy_loss_per_char(pred, gt):
def log2(i):
return tf.log(i) * 1.442695
return -tf.reduce_sum(log2(pred + 1e-14) * gt, axis=tf.rank(pred) - 1)
loss = ct.cross_entropy_loss(y, gt)
loss_per_char = cross_entropy_loss_per_char(y, gt)
train_step = tf.train.AdamOptimizer(0.001).minimize(loss, var_list=
model.get_weights())
def feed(minibatch):
nonlocal train_step, loss, input_text
sess = ct.get_session()
res = sess.run([loss, train_step], feed_dict={input_text: minibatch})
return res[0]
starting_state = tf.placeholder(tf.float32, shape=[None, None])
stateful_y, ending_state = model(input_text_float, starting_state=
starting_state)
stateful_y_init, ending_state_init = model(input_text_float)
def stateful_predict(st, i):
sess = ct.get_session()
if st is None:
res = sess.run([stateful_y_init, ending_state_init], feed_dict=
{input_text: i})
else:
res = sess.run([stateful_y, ending_state], feed_dict={
input_text: i, starting_state: st})
return res
def loss_statistics(i):
sess = ct.get_session()
res = sess.run([loss_per_char], feed_dict={input_text: i})
return res
return feed, stateful_predict, loss_statistics
def r(ep=100):
length = len(corpus)
batch_size = 256
mbl = time_steps * batch_size
sr = length - mbl - time_steps - 2
for i in range(ep):
print('---------------------iter', i, '/', ep)
j = np.random.choice(sr)
minibatch = corpus[j:j + mbl]
minibatch.shape = [batch_size, time_steps]
loss = feed(minibatch)
print('loss:', loss)
if i % 100 == 0:
pass
class utf8statemachine:
def __init__(self):
self.state = 0
self.buffer = []
def flush(self):
char = str(bytes(self.buffer), 'utf-8')
self.buffer = []
return char
def bytein(self, b):
if self.state == 0:
if b & 128 == 0:
self.buffer.append(b)
return self.flush()
if b & 240 == 224:
self.state = 2
self.buffer.append(b)
return None
if b & 224 == 192:
self.state = 1
self.buffer.append(b)
return None
raise NameError('byte should start with 0b0 or 0b110 or 0b1110')
if self.state > 0:
if b & 192 == 128:
self.state -= 1
self.buffer.append(b)
else:
raise NameError('byte should start with 0b10')
if self.state == 0:
return self.flush()
return None
def show2(length=400):
import sys, os
asc_buf = np.fromstring('\n', dtype='uint8').reshape(1, 1)
starting_state = None
sm = utf8statemachine()
errors = 0
for i in range(length):
stateful_y, ending_state = predict(starting_state, asc_buf)
dist = stateful_y[0, 0]
code = np.random.choice(256, p=dist)
try:
result = sm.bytein(code)
if result is not None:
sys.stdout.write(result)
asc_buf[0, 0] = code
starting_state = ending_state
except NameError:
errors += 1
if i % 10 == 0:
sys.stdout.flush()
pass
sys.stdout.flush()
print('')
print('total UTF-8 decoding errors:', errors)
def bsa(text):
buf = np.fromstring(text, dtype='uint8').reshape(1, len(text))
loss, = loss_stats(buf)
simplified = text[0]
print(text[0], 'initial')
for i in range(1, len(text)):
print(text[i], loss[0, i - 1])
if loss[0, i - 1] < 1:
simplified += '-'
else:
simplified += text[i]
print('simplified:', simplified)
argv = sys.argv
if len(argv) < 2:
print(
'(Error)please provide a filename as the first argument. The file should be in UTF-8 encoding, without BOM.'
)
else:
text = get_text_data(argv[1])
corpus = np.fromstring(text, dtype='uint8')
print('corpus loaded. corpus[0]:', corpus[0], 'text[0]:', text[0])
model = model_builder()
feed, predict, loss_stats = feed_gen()
sess = ct.get_session()
sess.run(tf.global_variables_initializer())
|
import numpy as np
import random
import sys
import canton as ct
from canton import *
import tensorflow as tf
time_steps = 16
def get_text_data(filename):
import codecs
with open(filename,'rb') as f:
text = f.read()
length = len(text)
print('got corpus length:', length)
return text
def model_builder():
c = ct.Can()
gru,d1,d2 = (
c.add(GRU(256,256)),
c.add(LastDimDense(256,64)),
c.add(LastDimDense(64,256)),
)
def call(i,starting_state=None):
# i is one-hot encoded
i = gru(i,starting_state=starting_state)
# (batch, time_steps, 512)
shape = tf.shape(i)
b,t,d = shape[0],shape[1],shape[2]
ending_state = i[:,t-1,:]
i = d1(i)
i = Act('elu')(i)
i = d2(i)
i = Act('softmax')(i)
return i, ending_state
c.set_function(call)
return c
def feed_gen():
input_text = tf.placeholder(tf.uint8,
shape=[None, None]) # [batch, timesteps]
input_text_float = tf.one_hot(input_text,depth=256,dtype=tf.float32)
xhead = input_text_float[:,:-1] # [batch, 0:timesteps-1, 256]
gt = input_text_float[:,1:] # [batch, 1:timesteps, 256]
y,_ = model(xhead,starting_state=None) # [batch, 1:timesteps, 256]
def cross_entropy_loss_per_char(pred,gt): # last dim is one_hot
def log2(i):
return tf.log(i) * 1.442695
return - tf.reduce_sum(log2(pred+1e-14) * gt, axis=tf.rank(pred)-1)
loss = ct.cross_entropy_loss(y,gt)
loss_per_char = cross_entropy_loss_per_char(y,gt)
train_step = tf.train.AdamOptimizer(1e-3).minimize(
loss,var_list=model.get_weights())
def feed(minibatch):
nonlocal train_step,loss,input_text
sess = ct.get_session()
res = sess.run([loss,train_step],feed_dict={input_text:minibatch})
return res[0]
# stateful predict:
# if we have starting_state for the RNN
starting_state = tf.placeholder(tf.float32, shape=[None, None])
stateful_y, ending_state = \
model(input_text_float,starting_state=starting_state)
# if we dont have starting state for the RNN
stateful_y_init, ending_state_init = \
model(input_text_float)
def stateful_predict(st,i):
sess = ct.get_session()
if st is None: # if swe dont have starting_state for the RNN
res = sess.run([stateful_y_init,ending_state_init],
feed_dict={input_text:i})
else:
res = sess.run([stateful_y,ending_state],
feed_dict={input_text:i,starting_state:st})
return res
def loss_statistics(i):
sess = ct.get_session()
res = sess.run([loss_per_char],
feed_dict={input_text:i})
return res
return feed, stateful_predict, loss_statistics
# if you are using IPython:
# run r(1000) to train the model
# run show2(1000) to generate text
def r(ep=100):
length = len(corpus)
batch_size = 256
mbl = time_steps * batch_size
sr = length - mbl - time_steps - 2
for i in range(ep):
print('---------------------iter',i,'/',ep)
j = np.random.choice(sr)
minibatch = corpus[j:j+mbl]
minibatch.shape = [batch_size, time_steps]
loss = feed(minibatch)
print('loss:',loss)
if i%100==0 : pass#show2()
class utf8statemachine:
# byte(int)s in. character out (when ready). raises error when illegal.
def __init__(self):
self.state=0
self.buffer = []
def flush(self):
char = str(bytes(self.buffer),'utf-8')
self.buffer = []
return char
def bytein(self,b):
# assume b is uint.
if self.state==0: # normal mode
if b & 0b10000000 == 0:# if first bit is 0
self.buffer.append(b)
return self.flush()
if b & 0b11110000 == 0b11100000: # if starts with 1110
self.state=2
self.buffer.append(b)
return None
if b & 0b11100000 == 0b11000000: # if starts with 110
self.state=1
self.buffer.append(b)
return None
raise NameError('byte should start with 0b0 or 0b110 or 0b1110')
if self.state>0:
if b & 0b11000000 == 0b10000000:# if starts with 10
self.state -=1
self.buffer.append(b)
else:
raise NameError('byte should start with 0b10')
if self.state==0:
return self.flush()
return None
def show2(length=400):
import sys,os
asc_buf = np.fromstring('\n',dtype='uint8').reshape(1,1)
starting_state = None
sm = utf8statemachine()
errors = 0
# sequentially generate text out of the GRU
for i in range(length):
stateful_y, ending_state = predict(starting_state,asc_buf)
dist = stateful_y[0,0] # last dimension is the probability distribution
code = np.random.choice(256, p=dist) # sample a byte from distribution
try:
result = sm.bytein(code) # put in utf8 state machine
if result is not None: # if the state machine spit out a character
sys.stdout.write(result) # write to stdout
# accept the result if no utf-8 decoding error detected
asc_buf[0,0] = code
starting_state = ending_state
except NameError: # if decoding error
# sys.stdout.write('e')
errors += 1
# don't accept the results, try sample again next iteration
if i%10==0:
sys.stdout.flush()
pass
sys.stdout.flush()
print('')
print('total UTF-8 decoding errors:',errors)
# bullshit analyzer
def bsa(text):
buf = np.fromstring(text,dtype='uint8').reshape(1,len(text))
# what is the entropy? start from 2nd byte
loss, = loss_stats(buf)
simplified = text[0]
print(text[0],'initial')
for i in range(1,len(text)):
print(text[i],loss[0,i-1])
if loss[0,i-1] < 1: # discard words that are less than 1-bit
simplified+='-'
else:
simplified+=text[i]
print('simplified:',simplified)
argv = sys.argv
if len(argv)<2:
print('(Error)please provide a filename as the first argument. The file should be in UTF-8 encoding, without BOM.')
else:
text = get_text_data(argv[1]) # the string
corpus = np.fromstring(text,dtype='uint8') # the bytes
print('corpus loaded. corpus[0]:',corpus[0], 'text[0]:',text[0])
model = model_builder()
feed, predict, loss_stats = feed_gen()
sess = ct.get_session()
sess.run(tf.global_variables_initializer())
|
[
10,
11,
12,
13,
14
] |
2,539 |
d44c76ff7e94bea6e03324c45d139602c724c7be
|
<mask token>
|
<mask token>
def password_validation(password):
return bool(re.search(
'^(?=.*[a-z])(?=.*[A-Z])(?=.*\\d)[a-zA-Z\\d]{6,}$', password))
|
import re
def password_validation(password):
return bool(re.search(
'^(?=.*[a-z])(?=.*[A-Z])(?=.*\\d)[a-zA-Z\\d]{6,}$', password))
|
import re
def password_validation(password):
return bool(re.search("^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)[a-zA-Z\d]{6,}$", password))
| null |
[
0,
1,
2,
3
] |
2,540 |
b27e89ff799f26b87a61254e1c4a5f782fcbe605
|
class Model:
<mask token>
def derivedVariablesDependsOn(self, models):
return []
<mask token>
<mask token>
def computeSimplifiedDerivedVariables(self, args, time):
return []
def initializeState(self):
return []
<mask token>
<mask token>
|
class Model:
<mask token>
def derivedVariablesDependsOn(self, models):
return []
def initializeSimplifiedModel(self, timeHistory, stateHistory,
derivedVariablesHistory):
return False
def computeSimplifiedState(self, args, time):
return []
def computeSimplifiedDerivedVariables(self, args, time):
return []
def initializeState(self):
return []
<mask token>
<mask token>
|
class Model:
<mask token>
def derivedVariablesDependsOn(self, models):
return []
def initializeSimplifiedModel(self, timeHistory, stateHistory,
derivedVariablesHistory):
return False
def computeSimplifiedState(self, args, time):
return []
def computeSimplifiedDerivedVariables(self, args, time):
return []
def initializeState(self):
return []
def computeDerivatives(self, t, state, derived, models):
return []
<mask token>
|
class Model:
<mask token>
def derivedVariablesDependsOn(self, models):
return []
def initializeSimplifiedModel(self, timeHistory, stateHistory,
derivedVariablesHistory):
return False
def computeSimplifiedState(self, args, time):
return []
def computeSimplifiedDerivedVariables(self, args, time):
return []
def initializeState(self):
return []
def computeDerivatives(self, t, state, derived, models):
return []
def computeDerivedVariables(self, t, state):
return []
|
class Model:
def derivativesDependsOn(self, models):
return []
def derivedVariablesDependsOn(self, models):
return []
def initializeSimplifiedModel(self, timeHistory, stateHistory, derivedVariablesHistory):
return False
def computeSimplifiedState(self, args, time):
return []
def computeSimplifiedDerivedVariables(self, args, time):
return []
def initializeState(self):
return []
def computeDerivatives(self, t, state, derived, models):
return []
def computeDerivedVariables(self, t, state):
return []
|
[
4,
6,
7,
8,
10
] |
2,541 |
638e21e1eb1e2e14244628260d9c7ac179983721
|
<mask token>
|
<mask token>
while len(s) != 1:
count += 1
a = 0
for i in range(len(s)):
a += int(s[i])
s = str(a)
print(count)
|
s = input()
count = 0
while len(s) != 1:
count += 1
a = 0
for i in range(len(s)):
a += int(s[i])
s = str(a)
print(count)
|
s=input()
count=0
while(len(s)!=1):
count+=1
a=0
for i in range(len(s)):
a+=int(s[i])
s=str(a)
print(count)
| null |
[
0,
1,
2,
3
] |
2,542 |
332e2945e34c861b2132f6b42ef59416a38455a5
|
<mask token>
class TestRoomService(BaseTestCase):
<mask token>
<mask token>
<mask token>
<mask token>
class TestDeviceService(BaseTestCase):
"""Test the device service"""
def test_get_device(self):
device = create_simple_device()
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(2, data['data']['id'])
self.assertEqual('LED1', data['data']['device_name'])
self.assertEqual('simpledevices', data['data']['type'])
self.assertIn('success', data['status'])
def test_get_invalid_device(self):
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_patch_device(self):
device = create_rgb_device()
patch_data = {'red': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response_data['data']['device_name'], 'LED Strip1'
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response_data['data']['red'], 225)
self.assertEqual('rgbleds', response_data['data']['type'])
self.assertIn('success', response_data['status'])
def test_patch_device_invalid_attribute(self):
device = create_rgb_device()
patch_data = {'purple': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('fail', response_data['status'])
self.assertIn('Attribute does not exist', response_data['message'])
|
<mask token>
class TestRoomService(BaseTestCase):
<mask token>
<mask token>
<mask token>
def test_get_all_rooms(self):
room1 = create_room()
room2 = create_room(room_name='Kitchen')
with self.client:
response = self.client.get('api/rooms')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['rooms']), 2)
self.assertEqual(data['data']['rooms'][0]['room_name'],
'Living Room')
self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')
self.assertIn('success', data['status'])
class TestDeviceService(BaseTestCase):
"""Test the device service"""
def test_get_device(self):
device = create_simple_device()
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(2, data['data']['id'])
self.assertEqual('LED1', data['data']['device_name'])
self.assertEqual('simpledevices', data['data']['type'])
self.assertIn('success', data['status'])
def test_get_invalid_device(self):
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_patch_device(self):
device = create_rgb_device()
patch_data = {'red': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response_data['data']['device_name'], 'LED Strip1'
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response_data['data']['red'], 225)
self.assertEqual('rgbleds', response_data['data']['type'])
self.assertIn('success', response_data['status'])
def test_patch_device_invalid_attribute(self):
device = create_rgb_device()
patch_data = {'purple': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('fail', response_data['status'])
self.assertIn('Attribute does not exist', response_data['message'])
|
<mask token>
class TestRoomService(BaseTestCase):
<mask token>
def test_get_room(self):
room = create_room()
with self.client:
response = self.client.get('/api/rooms/1')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(1, data['data']['id'])
self.assertEqual('Living Room', data['data']['room_name'])
self.assertIn('success', data['status'])
def test_invalid_room(self):
with self.client:
response = self.client.get('api/rooms/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_get_all_rooms(self):
room1 = create_room()
room2 = create_room(room_name='Kitchen')
with self.client:
response = self.client.get('api/rooms')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['rooms']), 2)
self.assertEqual(data['data']['rooms'][0]['room_name'],
'Living Room')
self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')
self.assertIn('success', data['status'])
class TestDeviceService(BaseTestCase):
"""Test the device service"""
def test_get_device(self):
device = create_simple_device()
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(2, data['data']['id'])
self.assertEqual('LED1', data['data']['device_name'])
self.assertEqual('simpledevices', data['data']['type'])
self.assertIn('success', data['status'])
def test_get_invalid_device(self):
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_patch_device(self):
device = create_rgb_device()
patch_data = {'red': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response_data['data']['device_name'], 'LED Strip1'
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response_data['data']['red'], 225)
self.assertEqual('rgbleds', response_data['data']['type'])
self.assertIn('success', response_data['status'])
def test_patch_device_invalid_attribute(self):
device = create_rgb_device()
patch_data = {'purple': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('fail', response_data['status'])
self.assertIn('Attribute does not exist', response_data['message'])
|
import unittest
import json
from app.tests.base import BaseTestCase
from app import db
from app.tests.utils import create_room, create_simple_device, create_rgb_device
class TestRoomService(BaseTestCase):
"""Test the room service"""
def test_get_room(self):
room = create_room()
with self.client:
response = self.client.get('/api/rooms/1')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(1, data['data']['id'])
self.assertEqual('Living Room', data['data']['room_name'])
self.assertIn('success', data['status'])
def test_invalid_room(self):
with self.client:
response = self.client.get('api/rooms/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_get_all_rooms(self):
room1 = create_room()
room2 = create_room(room_name='Kitchen')
with self.client:
response = self.client.get('api/rooms')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['rooms']), 2)
self.assertEqual(data['data']['rooms'][0]['room_name'],
'Living Room')
self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')
self.assertIn('success', data['status'])
class TestDeviceService(BaseTestCase):
"""Test the device service"""
def test_get_device(self):
device = create_simple_device()
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(2, data['data']['id'])
self.assertEqual('LED1', data['data']['device_name'])
self.assertEqual('simpledevices', data['data']['type'])
self.assertIn('success', data['status'])
def test_get_invalid_device(self):
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_patch_device(self):
device = create_rgb_device()
patch_data = {'red': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response_data['data']['device_name'], 'LED Strip1'
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response_data['data']['red'], 225)
self.assertEqual('rgbleds', response_data['data']['type'])
self.assertIn('success', response_data['status'])
def test_patch_device_invalid_attribute(self):
device = create_rgb_device()
patch_data = {'purple': 225}
with self.client:
response = self.client.patch('/api/devices/1', data=json.dumps(
patch_data), content_type='application/json')
response_data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('fail', response_data['status'])
self.assertIn('Attribute does not exist', response_data['message'])
|
import unittest
import json
from app.tests.base import BaseTestCase
from app import db
from app.tests.utils import create_room, create_simple_device, create_rgb_device
class TestRoomService(BaseTestCase):
"""Test the room service"""
def test_get_room(self):
room = create_room()
with self.client:
response = self.client.get('/api/rooms/1')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(1, data['data']['id'])
self.assertEqual('Living Room', data['data']['room_name'])
self.assertIn('success', data['status'])
def test_invalid_room(self):
with self.client:
response = self.client.get('api/rooms/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_get_all_rooms(self):
room1 = create_room()
room2 = create_room(room_name="Kitchen")
with self.client:
response = self.client.get('api/rooms')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['rooms']), 2)
self.assertEqual(data['data']['rooms'][0]['room_name'], 'Living Room')
self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')
self.assertIn('success', data['status'])
class TestDeviceService(BaseTestCase):
"""Test the device service"""
def test_get_device(self):
device = create_simple_device()
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(2, data['data']['id'])
self.assertEqual('LED1', data['data']['device_name'])
self.assertEqual('simpledevices', data['data']['type'])
self.assertIn('success', data['status'])
def test_get_invalid_device(self):
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_patch_device(self):
device = create_rgb_device()
patch_data = {'red': 225}
with self.client:
response = self.client.patch(
'/api/devices/1',
data=json.dumps(patch_data),
content_type='application/json'
)
response_data = json.loads(response.data.decode())
self.assertEqual(response_data['data']['device_name'], 'LED Strip1')
self.assertEqual(response.status_code, 200)
self.assertEqual(response_data['data']['red'], 225)
self.assertEqual('rgbleds', response_data['data']['type'])
self.assertIn('success', response_data['status'])
def test_patch_device_invalid_attribute(self):
device = create_rgb_device()
patch_data = {'purple': 225}
with self.client:
response = self.client.patch(
'/api/devices/1',
data=json.dumps(patch_data),
content_type='application/json'
)
response_data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('fail', response_data['status'])
self.assertIn('Attribute does not exist', response_data['message'])
|
[
7,
8,
10,
12,
13
] |
2,543 |
973fc3a973d952cb0f192221dfda63e255e4a8a0
|
<mask token>
|
<mask token>
def nextsteps(point):
for ns in nextsteps2d(point):
yield ns
if point in portals:
yield portals[point]
def should_visit(point):
return lines[point[0]][point[1]] == '.'
<mask token>
|
<mask token>
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
lines = sys.stdin.readlines()
i_max = len(lines)
j_max = len(lines[0])
deltas = [((0, 0), (0, 1), (0, 2)), ((0, 1), (0, 2), (0, 0)), ((0, 0), (1,
0), (2, 0)), ((1, 0), (2, 0), (0, 0))]
portals = {}
for i in range(i_max - 2):
for j in range(j_max - 2):
for d in deltas:
if lines[i + d[0][0]][j + d[0][1]] in LETTERS and lines[i + d[1][0]
][j + d[1][1]] in LETTERS and lines[i + d[2][0]][j + d[2][1]
] == '.':
portal = lines[i + d[0][0]][j + d[0][1]] + lines[i + d[1][0]][
j + d[1][1]]
this_end = i + d[2][0], j + d[2][1]
if portal in portals:
other_end = portals.pop(portal)
portals[this_end] = other_end
portals[other_end] = this_end
else:
portals[portal] = this_end
distance = {}
def nextsteps(point):
for ns in nextsteps2d(point):
yield ns
if point in portals:
yield portals[point]
def should_visit(point):
return lines[point[0]][point[1]] == '.'
part1 = bfs_visited(origin=portals['AA'], should_visit=should_visit,
nextsteps=nextsteps, destination=portals['ZZ'])
print(part1)
i, j = portals['AA']
origin = i, j, 0
i, j = portals['ZZ']
destination = i, j, 0
def nextsteps_with_recursion(point):
i, j, level = point
for i1, j1 in nextsteps2d((i, j)):
yield i1, j1, level
if (i, j) in portals:
if i == 2 or j == 2 or i == i_max - 3 or j == j_max - 4:
if level > 0:
i, j = portals[i, j]
yield i, j, level - 1
else:
i, j = portals[i, j]
yield i, j, level + 1
part2 = bfs_visited(origin=origin, should_visit=should_visit, nextsteps=
nextsteps_with_recursion, destination=destination)
print(part2)
|
import sys
import queue as q
from utils import *
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
lines = sys.stdin.readlines()
i_max = len(lines)
j_max = len(lines[0])
deltas = [((0, 0), (0, 1), (0, 2)), ((0, 1), (0, 2), (0, 0)), ((0, 0), (1,
0), (2, 0)), ((1, 0), (2, 0), (0, 0))]
portals = {}
for i in range(i_max - 2):
for j in range(j_max - 2):
for d in deltas:
if lines[i + d[0][0]][j + d[0][1]] in LETTERS and lines[i + d[1][0]
][j + d[1][1]] in LETTERS and lines[i + d[2][0]][j + d[2][1]
] == '.':
portal = lines[i + d[0][0]][j + d[0][1]] + lines[i + d[1][0]][
j + d[1][1]]
this_end = i + d[2][0], j + d[2][1]
if portal in portals:
other_end = portals.pop(portal)
portals[this_end] = other_end
portals[other_end] = this_end
else:
portals[portal] = this_end
distance = {}
def nextsteps(point):
for ns in nextsteps2d(point):
yield ns
if point in portals:
yield portals[point]
def should_visit(point):
return lines[point[0]][point[1]] == '.'
part1 = bfs_visited(origin=portals['AA'], should_visit=should_visit,
nextsteps=nextsteps, destination=portals['ZZ'])
print(part1)
i, j = portals['AA']
origin = i, j, 0
i, j = portals['ZZ']
destination = i, j, 0
def nextsteps_with_recursion(point):
i, j, level = point
for i1, j1 in nextsteps2d((i, j)):
yield i1, j1, level
if (i, j) in portals:
if i == 2 or j == 2 or i == i_max - 3 or j == j_max - 4:
if level > 0:
i, j = portals[i, j]
yield i, j, level - 1
else:
i, j = portals[i, j]
yield i, j, level + 1
part2 = bfs_visited(origin=origin, should_visit=should_visit, nextsteps=
nextsteps_with_recursion, destination=destination)
print(part2)
|
import sys
import queue as q
from utils import *
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
lines = sys.stdin.readlines()
i_max = len(lines)
j_max = len(lines[0])
deltas = [
((0, 0), (0, 1), (0, 2)),
((0, 1), (0, 2), (0, 0)),
((0, 0), (1, 0), (2, 0)),
((1, 0), (2, 0), (0, 0)),
]
portals = {}
for i in range(i_max - 2):
for j in range(j_max - 2):
for d in deltas:
if (lines[i+d[0][0]][j+d[0][1]] in LETTERS and
lines[i+d[1][0]][j+d[1][1]] in LETTERS and
lines[i+d[2][0]][j+d[2][1]] == '.'):
portal = lines[i+d[0][0]][j+d[0][1]] + lines[i+d[1][0]][j+d[1][1]]
this_end = (i+d[2][0], j+d[2][1])
if portal in portals:
other_end = portals.pop(portal)
portals[this_end] = other_end
portals[other_end] = this_end
else:
portals[portal] = this_end
# Part I
distance = {}
def nextsteps(point):
for ns in nextsteps2d(point):
yield ns
if point in portals:
yield portals[point]
def should_visit(point):
return lines[point[0]][point[1]] == '.'
part1 = bfs_visited(
origin=portals['AA'],
should_visit=should_visit,
nextsteps=nextsteps,
destination=portals['ZZ'])
print(part1)
# Part II
i, j = portals['AA']
origin = (i, j, 0)
i, j = portals['ZZ']
destination = (i, j, 0)
def nextsteps_with_recursion(point):
i, j, level = point
for i1, j1 in nextsteps2d((i, j)):
yield (i1, j1, level)
if (i, j) in portals:
if i == 2 or j == 2 or i == i_max - 3 or j == j_max - 4:
if level > 0:
i, j = portals[i, j]
yield (i, j, level-1)
else:
i, j = portals[i, j]
yield (i, j, level+1)
part2 = bfs_visited(
origin=origin,
should_visit=should_visit,
nextsteps=nextsteps_with_recursion,
destination=destination)
print(part2)
|
[
0,
2,
5,
6,
7
] |
2,544 |
d3382ead1d98ba2fb15fe3ea277430f1bb07131c
|
<mask token>
|
<mask token>
print(TSA, V)
|
l, w, h = map(int, input().split())
TSA = 2 * (l * w + w * h + h * l)
V = l * w * h
print(TSA, V)
|
l,w,h=map(int,input().split())
TSA = 2*(l*w + w*h + h*l)
V = l*w*h
print(TSA,V)
| null |
[
0,
1,
2,
3
] |
2,545 |
addf92a3d4060fa9464a802a4a4378cf9eeadde4
|
<mask token>
class DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos(TeaModel):
def __init__(self, value=None, code=None):
self.value = value
self.code = code
<mask token>
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos,
self).to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['Value'] = self.value
if self.code is not None:
result['Code'] = self.code
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Value') is not None:
self.value = m.get('Value')
if m.get('Code') is not None:
self.code = m.get('Code')
return self
class DescribeInstanceSpecInfoResponseBody(TeaModel):
def __init__(self, instance_spec_infos=None, request_id=None,
instance_id=None, version=None, expire_time=None):
self.instance_spec_infos = instance_spec_infos
self.request_id = request_id
self.instance_id = instance_id
self.version = version
self.expire_time = expire_time
def validate(self):
if self.instance_spec_infos:
for k in self.instance_spec_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
result['InstanceSpecInfos'] = []
if self.instance_spec_infos is not None:
for k in self.instance_spec_infos:
result['InstanceSpecInfos'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.version is not None:
result['Version'] = self.version
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
return result
def from_map(self, m=None):
m = m or dict()
self.instance_spec_infos = []
if m.get('InstanceSpecInfos') is not None:
for k in m.get('InstanceSpecInfos'):
temp_model = (
DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos())
self.instance_spec_infos.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
return self
class DescribeInstanceSpecInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceSpecInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, region=None, resource_group_id=
None, page_number=None, page_size=None, domain_names=None):
self.instance_id = instance_id
self.region = region
self.resource_group_id = resource_group_id
self.page_number = page_number
self.page_size = page_size
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.region is not None:
result['Region'] = self.region
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeLogServiceStatusResponseBodyDomainStatus(TeaModel):
def __init__(self, domain=None, sls_log_active=None):
self.domain = domain
self.sls_log_active = sls_log_active
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBodyDomainStatus, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.sls_log_active is not None:
result['SlsLogActive'] = self.sls_log_active
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SlsLogActive') is not None:
self.sls_log_active = m.get('SlsLogActive')
return self
class DescribeLogServiceStatusResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_status=None):
self.total_count = total_count
self.request_id = request_id
self.domain_status = domain_status
def validate(self):
if self.domain_status:
for k in self.domain_status:
if k:
k.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainStatus'] = []
if self.domain_status is not None:
for k in self.domain_status:
result['DomainStatus'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_status = []
if m.get('DomainStatus') is not None:
for k in m.get('DomainStatus'):
temp_model = DescribeLogServiceStatusResponseBodyDomainStatus()
self.domain_status.append(temp_model.from_map(k))
return self
class DescribeLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleCodeConfigRequest(TeaModel):
def __init__(self, source_ip=None, lang=None, code_type=None,
code_value=None, instance_id=None, resource_group_id=None):
self.source_ip = source_ip
self.lang = lang
self.code_type = code_type
self.code_value = code_value
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.source_ip is not None:
result['SourceIp'] = self.source_ip
if self.lang is not None:
result['Lang'] = self.lang
if self.code_type is not None:
result['CodeType'] = self.code_type
if self.code_value is not None:
result['CodeValue'] = self.code_value
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('SourceIp') is not None:
self.source_ip = m.get('SourceIp')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('CodeType') is not None:
self.code_type = m.get('CodeType')
if m.get('CodeValue') is not None:
self.code_value = m.get('CodeValue')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleCodeConfigResponseBody(TeaModel):
def __init__(self, request_id=None, code_configs=None):
self.request_id = request_id
self.code_configs = code_configs
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponseBody, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.code_configs is not None:
result['CodeConfigs'] = self.code_configs
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CodeConfigs') is not None:
self.code_configs = m.get('CodeConfigs')
return self
class DescribeProtectionModuleCodeConfigResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleCodeConfigResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None,
resource_group_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleModeResponseBody(TeaModel):
def __init__(self, learn_status=None, request_id=None, mode=None):
self.learn_status = learn_status
self.request_id = request_id
self.mode = mode
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.learn_status is not None:
result['LearnStatus'] = self.learn_status
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.mode is not None:
result['Mode'] = self.mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('LearnStatus') is not None:
self.learn_status = m.get('LearnStatus')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
return self
class DescribeProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleRulesRequest(TeaModel):
def __init__(self, page_size=None, page_number=None, domain=None,
defense_type=None, query=None, lang=None, instance_id=None,
resource_group_id=None):
self.page_size = page_size
self.page_number = page_number
self.domain = domain
self.defense_type = defense_type
self.query = query
self.lang = lang
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.query is not None:
result['Query'] = self.query
if self.lang is not None:
result['Lang'] = self.lang
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Query') is not None:
self.query = m.get('Query')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleRulesResponseBodyRules(TeaModel):
def __init__(self, status=None, time=None, version=None, content=None,
rule_id=None):
self.status = status
self.time = time
self.version = version
self.content = content
self.rule_id = rule_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBodyRules, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.time is not None:
result['Time'] = self.time
if self.version is not None:
result['Version'] = self.version
if self.content is not None:
result['Content'] = self.content
if self.rule_id is not None:
result['RuleId'] = self.rule_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Time') is not None:
self.time = m.get('Time')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('Content') is not None:
self.content = m.get('Content')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
return self
class DescribeProtectionModuleRulesResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, rules=None):
self.total_count = total_count
self.request_id = request_id
self.rules = rules
def validate(self):
if self.rules:
for k in self.rules:
if k:
k.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Rules'] = []
if self.rules is not None:
for k in self.rules:
result['Rules'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.rules = []
if m.get('Rules') is not None:
for k in m.get('Rules'):
temp_model = DescribeProtectionModuleRulesResponseBodyRules()
self.rules.append(temp_model.from_map(k))
return self
class DescribeProtectionModuleRulesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleRulesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None, module_status=None):
self.request_id = request_id
self.module_status = module_status
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
return self
class DescribeProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWafSourceIpSegmentRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeWafSourceIpSegmentResponseBody(TeaModel):
def __init__(self, request_id=None, ip_v6s=None, ips=None):
self.request_id = request_id
self.ip_v6s = ip_v6s
self.ips = ips
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.ip_v6s is not None:
result['IpV6s'] = self.ip_v6s
if self.ips is not None:
result['Ips'] = self.ips
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('IpV6s') is not None:
self.ip_v6s = m.get('IpV6s')
if m.get('Ips') is not None:
self.ips = m.get('Ips')
return self
class DescribeWafSourceIpSegmentResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeWafSourceIpSegmentResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None,
load_balancing=None, http_port=None, https_port=None, http_2port=
None, https_redirect=None, http_to_user_ip=None, is_access_product=
None, log_headers=None, cluster_type=None, connection_time=None,
read_time=None, write_time=None, access_type=None,
cloud_native_instances=None, ip_follow_status=None):
self.instance_id = instance_id
self.domain = domain
self.source_ips = source_ips
self.load_balancing = load_balancing
self.http_port = http_port
self.https_port = https_port
self.http_2port = http_2port
self.https_redirect = https_redirect
self.http_to_user_ip = http_to_user_ip
self.is_access_product = is_access_product
self.log_headers = log_headers
self.cluster_type = cluster_type
self.connection_time = connection_time
self.read_time = read_time
self.write_time = write_time
self.access_type = access_type
self.cloud_native_instances = cloud_native_instances
self.ip_follow_status = ip_follow_status
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class ModifyDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainIpv6StatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyDomainIpv6StatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainIpv6StatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainIpv6StatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogRetrievalStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogRetrievalStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogRetrievalStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogRetrievalStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogServiceStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, mode=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.mode = mode
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.mode is not None:
result['Mode'] = self.mode
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleModeResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, rule_id=
None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule = rule
self.rule_id = rule_id
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, module_status=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.module_status = module_status
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleCacheStatusRequest(TeaModel):
def __init__(self, domain=None, rule_id=None, defense_type=None,
instance_id=None):
self.domain = domain
self.rule_id = rule_id
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleCacheStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponseBody, self).to_map(
)
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleCacheStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleCacheStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None,
rule_status=None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule_id = rule_id
self.rule_status = rule_status
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.rule_status is not None:
result['RuleStatus'] = self.rule_status
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('RuleStatus') is not None:
self.rule_status = m.get('RuleStatus')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetDomainRuleGroupRequest(TeaModel):
def __init__(self, domains=None, rule_group_id=None, waf_version=None,
instance_id=None, resource_group_id=None):
self.domains = domains
self.rule_group_id = rule_group_id
self.waf_version = waf_version
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domains is not None:
result['Domains'] = self.domains
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.waf_version is not None:
result['WafVersion'] = self.waf_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domains') is not None:
self.domains = m.get('Domains')
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('WafVersion') is not None:
self.waf_version = m.get('WafVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class SetDomainRuleGroupResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(SetDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = SetDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
|
<mask token>
class DescribeInstanceInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfosRequest(TeaModel):
def __init__(self, instance_source=None, instance_id=None,
resource_group_id=None):
self.instance_source = instance_source
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_source is not None:
result['InstanceSource'] = self.instance_source
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceSource') is not None:
self.instance_source = m.get('InstanceSource')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfosResponseBodyInstanceInfos(TeaModel):
def __init__(self, status=None, end_date=None, remain_day=None, region=
None, pay_type=None, in_debt=None, instance_id=None,
subscription_type=None, trial=None):
self.status = status
self.end_date = end_date
self.remain_day = remain_day
self.region = region
self.pay_type = pay_type
self.in_debt = in_debt
self.instance_id = instance_id
self.subscription_type = subscription_type
self.trial = trial
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosResponseBodyInstanceInfos, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfosResponseBody(TeaModel):
def __init__(self, request_id=None, instance_infos=None):
self.request_id = request_id
self.instance_infos = instance_infos
def validate(self):
if self.instance_infos:
for k in self.instance_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['InstanceInfos'] = []
if self.instance_infos is not None:
for k in self.instance_infos:
result['InstanceInfos'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.instance_infos = []
if m.get('InstanceInfos') is not None:
for k in m.get('InstanceInfos'):
temp_model = DescribeInstanceInfosResponseBodyInstanceInfos()
self.instance_infos.append(temp_model.from_map(k))
return self
class DescribeInstanceInfosResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfosResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceSpecInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos(TeaModel):
def __init__(self, value=None, code=None):
self.value = value
self.code = code
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos,
self).to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['Value'] = self.value
if self.code is not None:
result['Code'] = self.code
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Value') is not None:
self.value = m.get('Value')
if m.get('Code') is not None:
self.code = m.get('Code')
return self
class DescribeInstanceSpecInfoResponseBody(TeaModel):
def __init__(self, instance_spec_infos=None, request_id=None,
instance_id=None, version=None, expire_time=None):
self.instance_spec_infos = instance_spec_infos
self.request_id = request_id
self.instance_id = instance_id
self.version = version
self.expire_time = expire_time
def validate(self):
if self.instance_spec_infos:
for k in self.instance_spec_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
result['InstanceSpecInfos'] = []
if self.instance_spec_infos is not None:
for k in self.instance_spec_infos:
result['InstanceSpecInfos'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.version is not None:
result['Version'] = self.version
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
return result
def from_map(self, m=None):
m = m or dict()
self.instance_spec_infos = []
if m.get('InstanceSpecInfos') is not None:
for k in m.get('InstanceSpecInfos'):
temp_model = (
DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos())
self.instance_spec_infos.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
return self
class DescribeInstanceSpecInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceSpecInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, region=None, resource_group_id=
None, page_number=None, page_size=None, domain_names=None):
self.instance_id = instance_id
self.region = region
self.resource_group_id = resource_group_id
self.page_number = page_number
self.page_size = page_size
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.region is not None:
result['Region'] = self.region
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeLogServiceStatusResponseBodyDomainStatus(TeaModel):
def __init__(self, domain=None, sls_log_active=None):
self.domain = domain
self.sls_log_active = sls_log_active
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBodyDomainStatus, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.sls_log_active is not None:
result['SlsLogActive'] = self.sls_log_active
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SlsLogActive') is not None:
self.sls_log_active = m.get('SlsLogActive')
return self
class DescribeLogServiceStatusResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_status=None):
self.total_count = total_count
self.request_id = request_id
self.domain_status = domain_status
def validate(self):
if self.domain_status:
for k in self.domain_status:
if k:
k.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainStatus'] = []
if self.domain_status is not None:
for k in self.domain_status:
result['DomainStatus'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_status = []
if m.get('DomainStatus') is not None:
for k in m.get('DomainStatus'):
temp_model = DescribeLogServiceStatusResponseBodyDomainStatus()
self.domain_status.append(temp_model.from_map(k))
return self
class DescribeLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleCodeConfigRequest(TeaModel):
def __init__(self, source_ip=None, lang=None, code_type=None,
code_value=None, instance_id=None, resource_group_id=None):
self.source_ip = source_ip
self.lang = lang
self.code_type = code_type
self.code_value = code_value
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.source_ip is not None:
result['SourceIp'] = self.source_ip
if self.lang is not None:
result['Lang'] = self.lang
if self.code_type is not None:
result['CodeType'] = self.code_type
if self.code_value is not None:
result['CodeValue'] = self.code_value
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('SourceIp') is not None:
self.source_ip = m.get('SourceIp')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('CodeType') is not None:
self.code_type = m.get('CodeType')
if m.get('CodeValue') is not None:
self.code_value = m.get('CodeValue')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleCodeConfigResponseBody(TeaModel):
def __init__(self, request_id=None, code_configs=None):
self.request_id = request_id
self.code_configs = code_configs
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponseBody, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.code_configs is not None:
result['CodeConfigs'] = self.code_configs
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CodeConfigs') is not None:
self.code_configs = m.get('CodeConfigs')
return self
class DescribeProtectionModuleCodeConfigResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleCodeConfigResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None,
resource_group_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleModeResponseBody(TeaModel):
def __init__(self, learn_status=None, request_id=None, mode=None):
self.learn_status = learn_status
self.request_id = request_id
self.mode = mode
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.learn_status is not None:
result['LearnStatus'] = self.learn_status
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.mode is not None:
result['Mode'] = self.mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('LearnStatus') is not None:
self.learn_status = m.get('LearnStatus')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
return self
class DescribeProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleRulesRequest(TeaModel):
def __init__(self, page_size=None, page_number=None, domain=None,
defense_type=None, query=None, lang=None, instance_id=None,
resource_group_id=None):
self.page_size = page_size
self.page_number = page_number
self.domain = domain
self.defense_type = defense_type
self.query = query
self.lang = lang
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.query is not None:
result['Query'] = self.query
if self.lang is not None:
result['Lang'] = self.lang
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Query') is not None:
self.query = m.get('Query')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleRulesResponseBodyRules(TeaModel):
def __init__(self, status=None, time=None, version=None, content=None,
rule_id=None):
self.status = status
self.time = time
self.version = version
self.content = content
self.rule_id = rule_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBodyRules, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.time is not None:
result['Time'] = self.time
if self.version is not None:
result['Version'] = self.version
if self.content is not None:
result['Content'] = self.content
if self.rule_id is not None:
result['RuleId'] = self.rule_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Time') is not None:
self.time = m.get('Time')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('Content') is not None:
self.content = m.get('Content')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
return self
class DescribeProtectionModuleRulesResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, rules=None):
self.total_count = total_count
self.request_id = request_id
self.rules = rules
def validate(self):
if self.rules:
for k in self.rules:
if k:
k.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Rules'] = []
if self.rules is not None:
for k in self.rules:
result['Rules'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.rules = []
if m.get('Rules') is not None:
for k in m.get('Rules'):
temp_model = DescribeProtectionModuleRulesResponseBodyRules()
self.rules.append(temp_model.from_map(k))
return self
class DescribeProtectionModuleRulesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleRulesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None, module_status=None):
self.request_id = request_id
self.module_status = module_status
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
return self
class DescribeProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWafSourceIpSegmentRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeWafSourceIpSegmentResponseBody(TeaModel):
def __init__(self, request_id=None, ip_v6s=None, ips=None):
self.request_id = request_id
self.ip_v6s = ip_v6s
self.ips = ips
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.ip_v6s is not None:
result['IpV6s'] = self.ip_v6s
if self.ips is not None:
result['Ips'] = self.ips
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('IpV6s') is not None:
self.ip_v6s = m.get('IpV6s')
if m.get('Ips') is not None:
self.ips = m.get('Ips')
return self
class DescribeWafSourceIpSegmentResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeWafSourceIpSegmentResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None,
load_balancing=None, http_port=None, https_port=None, http_2port=
None, https_redirect=None, http_to_user_ip=None, is_access_product=
None, log_headers=None, cluster_type=None, connection_time=None,
read_time=None, write_time=None, access_type=None,
cloud_native_instances=None, ip_follow_status=None):
self.instance_id = instance_id
self.domain = domain
self.source_ips = source_ips
self.load_balancing = load_balancing
self.http_port = http_port
self.https_port = https_port
self.http_2port = http_2port
self.https_redirect = https_redirect
self.http_to_user_ip = http_to_user_ip
self.is_access_product = is_access_product
self.log_headers = log_headers
self.cluster_type = cluster_type
self.connection_time = connection_time
self.read_time = read_time
self.write_time = write_time
self.access_type = access_type
self.cloud_native_instances = cloud_native_instances
self.ip_follow_status = ip_follow_status
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class ModifyDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainIpv6StatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyDomainIpv6StatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainIpv6StatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainIpv6StatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogRetrievalStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogRetrievalStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogRetrievalStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogRetrievalStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogServiceStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, mode=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.mode = mode
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.mode is not None:
result['Mode'] = self.mode
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleModeResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, rule_id=
None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule = rule
self.rule_id = rule_id
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, module_status=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.module_status = module_status
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleCacheStatusRequest(TeaModel):
def __init__(self, domain=None, rule_id=None, defense_type=None,
instance_id=None):
self.domain = domain
self.rule_id = rule_id
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleCacheStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponseBody, self).to_map(
)
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleCacheStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleCacheStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None,
rule_status=None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule_id = rule_id
self.rule_status = rule_status
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.rule_status is not None:
result['RuleStatus'] = self.rule_status
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('RuleStatus') is not None:
self.rule_status = m.get('RuleStatus')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetDomainRuleGroupRequest(TeaModel):
def __init__(self, domains=None, rule_group_id=None, waf_version=None,
instance_id=None, resource_group_id=None):
self.domains = domains
self.rule_group_id = rule_group_id
self.waf_version = waf_version
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domains is not None:
result['Domains'] = self.domains
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.waf_version is not None:
result['WafVersion'] = self.waf_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domains') is not None:
self.domains = m.get('Domains')
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('WafVersion') is not None:
self.waf_version = m.get('WafVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class SetDomainRuleGroupResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(SetDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = SetDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
|
<mask token>
class DescribeCertificatesResponseBody(TeaModel):
def __init__(self, request_id=None, certificates=None):
self.request_id = request_id
self.certificates = certificates
<mask token>
<mask token>
<mask token>
class DescribeCertificatesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertificatesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertificatesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertMatchStatusRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None,
instance_id=None):
self.domain = domain
self.certificate = certificate
self.private_key = private_key
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeCertMatchStatusResponseBody(TeaModel):
def __init__(self, request_id=None, match_status=None):
self.request_id = request_id
self.match_status = match_status
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.match_status is not None:
result['MatchStatus'] = self.match_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('MatchStatus') is not None:
self.match_status = m.get('MatchStatus')
return self
class DescribeCertMatchStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertMatchStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertMatchStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id
self.domain = domain
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs(
TeaModel):
def __init__(self, protocol=None, ports=None):
self.protocol = protocol
self.ports = ports
def validate(self):
pass
def to_map(self):
_map = super(
DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs
, self).to_map()
if _map is not None:
return _map
result = dict()
if self.protocol is not None:
result['Protocol'] = self.protocol
if self.ports is not None:
result['Ports'] = self.ports
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Protocol') is not None:
self.protocol = m.get('Protocol')
if m.get('Ports') is not None:
self.ports = m.get('Ports')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstances(TeaModel):
def __init__(self, protocol_port_configs=None, redirection_type_name=
None, cloud_native_product_name=None, instance_id=None,
ipaddress_list=None):
self.protocol_port_configs = protocol_port_configs
self.redirection_type_name = redirection_type_name
self.cloud_native_product_name = cloud_native_product_name
self.instance_id = instance_id
self.ipaddress_list = ipaddress_list
def validate(self):
if self.protocol_port_configs:
for k in self.protocol_port_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstances, self
).to_map()
if _map is not None:
return _map
result = dict()
result['ProtocolPortConfigs'] = []
if self.protocol_port_configs is not None:
for k in self.protocol_port_configs:
result['ProtocolPortConfigs'].append(k.to_map() if k else None)
if self.redirection_type_name is not None:
result['RedirectionTypeName'] = self.redirection_type_name
if self.cloud_native_product_name is not None:
result['CloudNativeProductName'] = self.cloud_native_product_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.ipaddress_list is not None:
result['IPAddressList'] = self.ipaddress_list
return result
def from_map(self, m=None):
m = m or dict()
self.protocol_port_configs = []
if m.get('ProtocolPortConfigs') is not None:
for k in m.get('ProtocolPortConfigs'):
temp_model = (
DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs
())
self.protocol_port_configs.append(temp_model.from_map(k))
if m.get('RedirectionTypeName') is not None:
self.redirection_type_name = m.get('RedirectionTypeName')
if m.get('CloudNativeProductName') is not None:
self.cloud_native_product_name = m.get('CloudNativeProductName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('IPAddressList') is not None:
self.ipaddress_list = m.get('IPAddressList')
return self
class DescribeDomainResponseBodyDomainLogHeaders(TeaModel):
def __init__(self, k=None, v=None):
self.k = k
self.v = v
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainLogHeaders, self).to_map()
if _map is not None:
return _map
result = dict()
if self.k is not None:
result['k'] = self.k
if self.v is not None:
result['v'] = self.v
return result
def from_map(self, m=None):
m = m or dict()
if m.get('k') is not None:
self.k = m.get('k')
if m.get('v') is not None:
self.v = m.get('v')
return self
class DescribeDomainResponseBodyDomain(TeaModel):
def __init__(self, http_2port=None, cloud_native_instances=None,
http_to_user_ip=None, http_port=None, log_headers=None,
is_access_product=None, access_headers=None, access_header_mode=
None, https_redirect=None, load_balancing=None, ip_follow_status=
None, access_type=None, version=None, cluster_type=None, read_time=
None, write_time=None, resource_group_id=None, cname=None,
source_ips=None, connection_time=None, https_port=None):
self.http_2port = http_2port
self.cloud_native_instances = cloud_native_instances
self.http_to_user_ip = http_to_user_ip
self.http_port = http_port
self.log_headers = log_headers
self.is_access_product = is_access_product
self.access_headers = access_headers
self.access_header_mode = access_header_mode
self.https_redirect = https_redirect
self.load_balancing = load_balancing
self.ip_follow_status = ip_follow_status
self.access_type = access_type
self.version = version
self.cluster_type = cluster_type
self.read_time = read_time
self.write_time = write_time
self.resource_group_id = resource_group_id
self.cname = cname
self.source_ips = source_ips
self.connection_time = connection_time
self.https_port = https_port
def validate(self):
if self.cloud_native_instances:
for k in self.cloud_native_instances:
if k:
k.validate()
if self.log_headers:
for k in self.log_headers:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomain, self).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
result['CloudNativeInstances'] = []
if self.cloud_native_instances is not None:
for k in self.cloud_native_instances:
result['CloudNativeInstances'].append(k.to_map() if k else None
)
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.http_port is not None:
result['HttpPort'] = self.http_port
result['LogHeaders'] = []
if self.log_headers is not None:
for k in self.log_headers:
result['LogHeaders'].append(k.to_map() if k else None)
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.cname is not None:
result['Cname'] = self.cname
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.https_port is not None:
result['HttpsPort'] = self.https_port
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
self.cloud_native_instances = []
if m.get('CloudNativeInstances') is not None:
for k in m.get('CloudNativeInstances'):
temp_model = (
DescribeDomainResponseBodyDomainCloudNativeInstances())
self.cloud_native_instances.append(temp_model.from_map(k))
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
self.log_headers = []
if m.get('LogHeaders') is not None:
for k in m.get('LogHeaders'):
temp_model = DescribeDomainResponseBodyDomainLogHeaders()
self.log_headers.append(temp_model.from_map(k))
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('AccessHeaders') is not None:
self.access_headers = m.get('AccessHeaders')
if m.get('AccessHeaderMode') is not None:
self.access_header_mode = m.get('AccessHeaderMode')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
return self
class DescribeDomainResponseBody(TeaModel):
def __init__(self, request_id=None, domain=None):
self.request_id = request_id
self.domain = domain
def validate(self):
if self.domain:
self.domain.validate()
def to_map(self):
_map = super(DescribeDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain is not None:
result['Domain'] = self.domain.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Domain') is not None:
temp_model = DescribeDomainResponseBodyDomain()
self.domain = temp_model.from_map(m['Domain'])
return self
class DescribeDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainAdvanceConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_list=None,
resource_group_id=None):
self.instance_id = instance_id
self.domain_list = domain_list
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_list is not None:
result['DomainList'] = self.domain_list
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainList') is not None:
self.domain_list = m.get('DomainList')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile(TeaModel):
def __init__(self, http_2port=None, ipv_6status=None, http_port=None,
gslbstatus=None, rs=None, vip_service_status=None, cluster_type=
None, exclusive_vip_status=None, cname=None, cert_status=None,
https_port=None, resolved_type=None):
self.http_2port = http_2port
self.ipv_6status = ipv_6status
self.http_port = http_port
self.gslbstatus = gslbstatus
self.rs = rs
self.vip_service_status = vip_service_status
self.cluster_type = cluster_type
self.exclusive_vip_status = exclusive_vip_status
self.cname = cname
self.cert_status = cert_status
self.https_port = https_port
self.resolved_type = resolved_type
def validate(self):
pass
def to_map(self):
_map = super(
DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.ipv_6status is not None:
result['Ipv6Status'] = self.ipv_6status
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.gslbstatus is not None:
result['GSLBStatus'] = self.gslbstatus
if self.rs is not None:
result['Rs'] = self.rs
if self.vip_service_status is not None:
result['VipServiceStatus'] = self.vip_service_status
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.exclusive_vip_status is not None:
result['ExclusiveVipStatus'] = self.exclusive_vip_status
if self.cname is not None:
result['Cname'] = self.cname
if self.cert_status is not None:
result['CertStatus'] = self.cert_status
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.resolved_type is not None:
result['ResolvedType'] = self.resolved_type
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('Ipv6Status') is not None:
self.ipv_6status = m.get('Ipv6Status')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('GSLBStatus') is not None:
self.gslbstatus = m.get('GSLBStatus')
if m.get('Rs') is not None:
self.rs = m.get('Rs')
if m.get('VipServiceStatus') is not None:
self.vip_service_status = m.get('VipServiceStatus')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ExclusiveVipStatus') is not None:
self.exclusive_vip_status = m.get('ExclusiveVipStatus')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('CertStatus') is not None:
self.cert_status = m.get('CertStatus')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('ResolvedType') is not None:
self.resolved_type = m.get('ResolvedType')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, profile=None, domain=None):
self.profile = profile
self.domain = domain
def validate(self):
if self.profile:
self.profile.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBodyDomainConfigs,
self).to_map()
if _map is not None:
return _map
result = dict()
if self.profile is not None:
result['Profile'] = self.profile.to_map()
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Profile') is not None:
temp_model = (
DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile())
self.profile = temp_model.from_map(m['Profile'])
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainAdvanceConfigsResponseBody(TeaModel):
def __init__(self, request_id=None, domain_configs=None):
self.request_id = request_id
self.domain_configs = domain_configs
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = (
DescribeDomainAdvanceConfigsResponseBodyDomainConfigs())
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainAdvanceConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainAdvanceConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainBasicConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_key=None, access_type=None,
cloud_native_product_id=None, page_number=None, page_size=None,
resource_group_id=None):
self.instance_id = instance_id
self.domain_key = domain_key
self.access_type = access_type
self.cloud_native_product_id = cloud_native_product_id
self.page_number = page_number
self.page_size = page_size
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_key is not None:
result['DomainKey'] = self.domain_key
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_product_id is not None:
result['CloudNativeProductId'] = self.cloud_native_product_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainKey') is not None:
self.domain_key = m.get('DomainKey')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeProductId') is not None:
self.cloud_native_product_id = m.get('CloudNativeProductId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainBasicConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, status=None, domain=None, owner=None, cc_mode=None,
cc_status=None, access_type=None, version=None, acl_status=None,
waf_status=None, waf_mode=None):
self.status = status
self.domain = domain
self.owner = owner
self.cc_mode = cc_mode
self.cc_status = cc_status
self.access_type = access_type
self.version = version
self.acl_status = acl_status
self.waf_status = waf_status
self.waf_mode = waf_mode
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBodyDomainConfigs, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.domain is not None:
result['Domain'] = self.domain
if self.owner is not None:
result['Owner'] = self.owner
if self.cc_mode is not None:
result['CcMode'] = self.cc_mode
if self.cc_status is not None:
result['CcStatus'] = self.cc_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.acl_status is not None:
result['AclStatus'] = self.acl_status
if self.waf_status is not None:
result['WafStatus'] = self.waf_status
if self.waf_mode is not None:
result['WafMode'] = self.waf_mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Owner') is not None:
self.owner = m.get('Owner')
if m.get('CcMode') is not None:
self.cc_mode = m.get('CcMode')
if m.get('CcStatus') is not None:
self.cc_status = m.get('CcStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('AclStatus') is not None:
self.acl_status = m.get('AclStatus')
if m.get('WafStatus') is not None:
self.waf_status = m.get('WafStatus')
if m.get('WafMode') is not None:
self.waf_mode = m.get('WafMode')
return self
class DescribeDomainBasicConfigsResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_configs=None):
self.total_count = total_count
self.request_id = request_id
self.domain_configs = domain_configs
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = (
DescribeDomainBasicConfigsResponseBodyDomainConfigs())
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainBasicConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainBasicConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainListRequest(TeaModel):
def __init__(self, resource_group_id=None, instance_id=None,
domain_name=None, page_number=None, page_size=None, is_sub=None,
domain_names=None):
self.resource_group_id = resource_group_id
self.instance_id = instance_id
self.domain_name = domain_name
self.page_number = page_number
self.page_size = page_size
self.is_sub = is_sub
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_name is not None:
result['DomainName'] = self.domain_name
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.is_sub is not None:
result['IsSub'] = self.is_sub
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainName') is not None:
self.domain_name = m.get('DomainName')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('IsSub') is not None:
self.is_sub = m.get('IsSub')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_names=None):
self.total_count = total_count
self.request_id = request_id
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainListResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainNamesRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainNamesResponseBody(TeaModel):
def __init__(self, request_id=None, domain_names=None):
self.request_id = request_id
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainNamesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainNamesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainNamesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRuleGroupRequest(TeaModel):
def __init__(self, domain=None, instance_id=None):
self.domain = domain
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeDomainRuleGroupResponseBody(TeaModel):
def __init__(self, rule_group_id=None, request_id=None):
self.rule_group_id = rule_group_id
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfoResponseBodyInstanceInfo(TeaModel):
def __init__(self, status=None, end_date=None, version=None, remain_day
=None, region=None, pay_type=None, in_debt=None, instance_id=None,
subscription_type=None, trial=None):
self.status = status
self.end_date = end_date
self.version = version
self.remain_day = remain_day
self.region = region
self.pay_type = pay_type
self.in_debt = in_debt
self.instance_id = instance_id
self.subscription_type = subscription_type
self.trial = trial
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoResponseBodyInstanceInfo, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.version is not None:
result['Version'] = self.version
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfoResponseBody(TeaModel):
def __init__(self, request_id=None, instance_info=None):
self.request_id = request_id
self.instance_info = instance_info
def validate(self):
if self.instance_info:
self.instance_info.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_info is not None:
result['InstanceInfo'] = self.instance_info.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceInfo') is not None:
temp_model = DescribeInstanceInfoResponseBodyInstanceInfo()
self.instance_info = temp_model.from_map(m['InstanceInfo'])
return self
class DescribeInstanceInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfosRequest(TeaModel):
def __init__(self, instance_source=None, instance_id=None,
resource_group_id=None):
self.instance_source = instance_source
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_source is not None:
result['InstanceSource'] = self.instance_source
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceSource') is not None:
self.instance_source = m.get('InstanceSource')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfosResponseBodyInstanceInfos(TeaModel):
def __init__(self, status=None, end_date=None, remain_day=None, region=
None, pay_type=None, in_debt=None, instance_id=None,
subscription_type=None, trial=None):
self.status = status
self.end_date = end_date
self.remain_day = remain_day
self.region = region
self.pay_type = pay_type
self.in_debt = in_debt
self.instance_id = instance_id
self.subscription_type = subscription_type
self.trial = trial
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosResponseBodyInstanceInfos, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfosResponseBody(TeaModel):
def __init__(self, request_id=None, instance_infos=None):
self.request_id = request_id
self.instance_infos = instance_infos
def validate(self):
if self.instance_infos:
for k in self.instance_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['InstanceInfos'] = []
if self.instance_infos is not None:
for k in self.instance_infos:
result['InstanceInfos'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.instance_infos = []
if m.get('InstanceInfos') is not None:
for k in m.get('InstanceInfos'):
temp_model = DescribeInstanceInfosResponseBodyInstanceInfos()
self.instance_infos.append(temp_model.from_map(k))
return self
class DescribeInstanceInfosResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfosResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceSpecInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos(TeaModel):
def __init__(self, value=None, code=None):
self.value = value
self.code = code
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos,
self).to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['Value'] = self.value
if self.code is not None:
result['Code'] = self.code
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Value') is not None:
self.value = m.get('Value')
if m.get('Code') is not None:
self.code = m.get('Code')
return self
class DescribeInstanceSpecInfoResponseBody(TeaModel):
def __init__(self, instance_spec_infos=None, request_id=None,
instance_id=None, version=None, expire_time=None):
self.instance_spec_infos = instance_spec_infos
self.request_id = request_id
self.instance_id = instance_id
self.version = version
self.expire_time = expire_time
def validate(self):
if self.instance_spec_infos:
for k in self.instance_spec_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
result['InstanceSpecInfos'] = []
if self.instance_spec_infos is not None:
for k in self.instance_spec_infos:
result['InstanceSpecInfos'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.version is not None:
result['Version'] = self.version
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
return result
def from_map(self, m=None):
m = m or dict()
self.instance_spec_infos = []
if m.get('InstanceSpecInfos') is not None:
for k in m.get('InstanceSpecInfos'):
temp_model = (
DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos())
self.instance_spec_infos.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
return self
class DescribeInstanceSpecInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceSpecInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, region=None, resource_group_id=
None, page_number=None, page_size=None, domain_names=None):
self.instance_id = instance_id
self.region = region
self.resource_group_id = resource_group_id
self.page_number = page_number
self.page_size = page_size
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.region is not None:
result['Region'] = self.region
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeLogServiceStatusResponseBodyDomainStatus(TeaModel):
def __init__(self, domain=None, sls_log_active=None):
self.domain = domain
self.sls_log_active = sls_log_active
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBodyDomainStatus, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.sls_log_active is not None:
result['SlsLogActive'] = self.sls_log_active
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SlsLogActive') is not None:
self.sls_log_active = m.get('SlsLogActive')
return self
class DescribeLogServiceStatusResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_status=None):
self.total_count = total_count
self.request_id = request_id
self.domain_status = domain_status
def validate(self):
if self.domain_status:
for k in self.domain_status:
if k:
k.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainStatus'] = []
if self.domain_status is not None:
for k in self.domain_status:
result['DomainStatus'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_status = []
if m.get('DomainStatus') is not None:
for k in m.get('DomainStatus'):
temp_model = DescribeLogServiceStatusResponseBodyDomainStatus()
self.domain_status.append(temp_model.from_map(k))
return self
class DescribeLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleCodeConfigRequest(TeaModel):
def __init__(self, source_ip=None, lang=None, code_type=None,
code_value=None, instance_id=None, resource_group_id=None):
self.source_ip = source_ip
self.lang = lang
self.code_type = code_type
self.code_value = code_value
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.source_ip is not None:
result['SourceIp'] = self.source_ip
if self.lang is not None:
result['Lang'] = self.lang
if self.code_type is not None:
result['CodeType'] = self.code_type
if self.code_value is not None:
result['CodeValue'] = self.code_value
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('SourceIp') is not None:
self.source_ip = m.get('SourceIp')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('CodeType') is not None:
self.code_type = m.get('CodeType')
if m.get('CodeValue') is not None:
self.code_value = m.get('CodeValue')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleCodeConfigResponseBody(TeaModel):
def __init__(self, request_id=None, code_configs=None):
self.request_id = request_id
self.code_configs = code_configs
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponseBody, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.code_configs is not None:
result['CodeConfigs'] = self.code_configs
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CodeConfigs') is not None:
self.code_configs = m.get('CodeConfigs')
return self
class DescribeProtectionModuleCodeConfigResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleCodeConfigResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None,
resource_group_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleModeResponseBody(TeaModel):
def __init__(self, learn_status=None, request_id=None, mode=None):
self.learn_status = learn_status
self.request_id = request_id
self.mode = mode
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.learn_status is not None:
result['LearnStatus'] = self.learn_status
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.mode is not None:
result['Mode'] = self.mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('LearnStatus') is not None:
self.learn_status = m.get('LearnStatus')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
return self
class DescribeProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleRulesRequest(TeaModel):
def __init__(self, page_size=None, page_number=None, domain=None,
defense_type=None, query=None, lang=None, instance_id=None,
resource_group_id=None):
self.page_size = page_size
self.page_number = page_number
self.domain = domain
self.defense_type = defense_type
self.query = query
self.lang = lang
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.query is not None:
result['Query'] = self.query
if self.lang is not None:
result['Lang'] = self.lang
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Query') is not None:
self.query = m.get('Query')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleRulesResponseBodyRules(TeaModel):
def __init__(self, status=None, time=None, version=None, content=None,
rule_id=None):
self.status = status
self.time = time
self.version = version
self.content = content
self.rule_id = rule_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBodyRules, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.time is not None:
result['Time'] = self.time
if self.version is not None:
result['Version'] = self.version
if self.content is not None:
result['Content'] = self.content
if self.rule_id is not None:
result['RuleId'] = self.rule_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Time') is not None:
self.time = m.get('Time')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('Content') is not None:
self.content = m.get('Content')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
return self
class DescribeProtectionModuleRulesResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, rules=None):
self.total_count = total_count
self.request_id = request_id
self.rules = rules
def validate(self):
if self.rules:
for k in self.rules:
if k:
k.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Rules'] = []
if self.rules is not None:
for k in self.rules:
result['Rules'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.rules = []
if m.get('Rules') is not None:
for k in m.get('Rules'):
temp_model = DescribeProtectionModuleRulesResponseBodyRules()
self.rules.append(temp_model.from_map(k))
return self
class DescribeProtectionModuleRulesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleRulesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None, module_status=None):
self.request_id = request_id
self.module_status = module_status
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
return self
class DescribeProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWafSourceIpSegmentRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeWafSourceIpSegmentResponseBody(TeaModel):
def __init__(self, request_id=None, ip_v6s=None, ips=None):
self.request_id = request_id
self.ip_v6s = ip_v6s
self.ips = ips
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.ip_v6s is not None:
result['IpV6s'] = self.ip_v6s
if self.ips is not None:
result['Ips'] = self.ips
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('IpV6s') is not None:
self.ip_v6s = m.get('IpV6s')
if m.get('Ips') is not None:
self.ips = m.get('Ips')
return self
class DescribeWafSourceIpSegmentResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeWafSourceIpSegmentResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None,
load_balancing=None, http_port=None, https_port=None, http_2port=
None, https_redirect=None, http_to_user_ip=None, is_access_product=
None, log_headers=None, cluster_type=None, connection_time=None,
read_time=None, write_time=None, access_type=None,
cloud_native_instances=None, ip_follow_status=None):
self.instance_id = instance_id
self.domain = domain
self.source_ips = source_ips
self.load_balancing = load_balancing
self.http_port = http_port
self.https_port = https_port
self.http_2port = http_2port
self.https_redirect = https_redirect
self.http_to_user_ip = http_to_user_ip
self.is_access_product = is_access_product
self.log_headers = log_headers
self.cluster_type = cluster_type
self.connection_time = connection_time
self.read_time = read_time
self.write_time = write_time
self.access_type = access_type
self.cloud_native_instances = cloud_native_instances
self.ip_follow_status = ip_follow_status
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class ModifyDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainIpv6StatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyDomainIpv6StatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainIpv6StatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainIpv6StatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogRetrievalStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogRetrievalStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogRetrievalStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogRetrievalStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogServiceStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, mode=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.mode = mode
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.mode is not None:
result['Mode'] = self.mode
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleModeResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, rule_id=
None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule = rule
self.rule_id = rule_id
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, module_status=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.module_status = module_status
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleCacheStatusRequest(TeaModel):
def __init__(self, domain=None, rule_id=None, defense_type=None,
instance_id=None):
self.domain = domain
self.rule_id = rule_id
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleCacheStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponseBody, self).to_map(
)
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleCacheStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleCacheStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None,
rule_status=None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule_id = rule_id
self.rule_status = rule_status
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.rule_status is not None:
result['RuleStatus'] = self.rule_status
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('RuleStatus') is not None:
self.rule_status = m.get('RuleStatus')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetDomainRuleGroupRequest(TeaModel):
def __init__(self, domains=None, rule_group_id=None, waf_version=None,
instance_id=None, resource_group_id=None):
self.domains = domains
self.rule_group_id = rule_group_id
self.waf_version = waf_version
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domains is not None:
result['Domains'] = self.domains
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.waf_version is not None:
result['WafVersion'] = self.waf_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domains') is not None:
self.domains = m.get('Domains')
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('WafVersion') is not None:
self.waf_version = m.get('WafVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class SetDomainRuleGroupResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(SetDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = SetDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
|
<mask token>
class DescribeCertificatesResponseBody(TeaModel):
def __init__(self, request_id=None, certificates=None):
self.request_id = request_id
self.certificates = certificates
<mask token>
<mask token>
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.certificates = []
if m.get('Certificates') is not None:
for k in m.get('Certificates'):
temp_model = DescribeCertificatesResponseBodyCertificates()
self.certificates.append(temp_model.from_map(k))
return self
class DescribeCertificatesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertificatesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertificatesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertMatchStatusRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None,
instance_id=None):
self.domain = domain
self.certificate = certificate
self.private_key = private_key
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeCertMatchStatusResponseBody(TeaModel):
def __init__(self, request_id=None, match_status=None):
self.request_id = request_id
self.match_status = match_status
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.match_status is not None:
result['MatchStatus'] = self.match_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('MatchStatus') is not None:
self.match_status = m.get('MatchStatus')
return self
class DescribeCertMatchStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertMatchStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertMatchStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id
self.domain = domain
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs(
TeaModel):
def __init__(self, protocol=None, ports=None):
self.protocol = protocol
self.ports = ports
def validate(self):
pass
def to_map(self):
_map = super(
DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs
, self).to_map()
if _map is not None:
return _map
result = dict()
if self.protocol is not None:
result['Protocol'] = self.protocol
if self.ports is not None:
result['Ports'] = self.ports
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Protocol') is not None:
self.protocol = m.get('Protocol')
if m.get('Ports') is not None:
self.ports = m.get('Ports')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstances(TeaModel):
def __init__(self, protocol_port_configs=None, redirection_type_name=
None, cloud_native_product_name=None, instance_id=None,
ipaddress_list=None):
self.protocol_port_configs = protocol_port_configs
self.redirection_type_name = redirection_type_name
self.cloud_native_product_name = cloud_native_product_name
self.instance_id = instance_id
self.ipaddress_list = ipaddress_list
def validate(self):
if self.protocol_port_configs:
for k in self.protocol_port_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstances, self
).to_map()
if _map is not None:
return _map
result = dict()
result['ProtocolPortConfigs'] = []
if self.protocol_port_configs is not None:
for k in self.protocol_port_configs:
result['ProtocolPortConfigs'].append(k.to_map() if k else None)
if self.redirection_type_name is not None:
result['RedirectionTypeName'] = self.redirection_type_name
if self.cloud_native_product_name is not None:
result['CloudNativeProductName'] = self.cloud_native_product_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.ipaddress_list is not None:
result['IPAddressList'] = self.ipaddress_list
return result
def from_map(self, m=None):
m = m or dict()
self.protocol_port_configs = []
if m.get('ProtocolPortConfigs') is not None:
for k in m.get('ProtocolPortConfigs'):
temp_model = (
DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs
())
self.protocol_port_configs.append(temp_model.from_map(k))
if m.get('RedirectionTypeName') is not None:
self.redirection_type_name = m.get('RedirectionTypeName')
if m.get('CloudNativeProductName') is not None:
self.cloud_native_product_name = m.get('CloudNativeProductName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('IPAddressList') is not None:
self.ipaddress_list = m.get('IPAddressList')
return self
class DescribeDomainResponseBodyDomainLogHeaders(TeaModel):
def __init__(self, k=None, v=None):
self.k = k
self.v = v
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainLogHeaders, self).to_map()
if _map is not None:
return _map
result = dict()
if self.k is not None:
result['k'] = self.k
if self.v is not None:
result['v'] = self.v
return result
def from_map(self, m=None):
m = m or dict()
if m.get('k') is not None:
self.k = m.get('k')
if m.get('v') is not None:
self.v = m.get('v')
return self
class DescribeDomainResponseBodyDomain(TeaModel):
def __init__(self, http_2port=None, cloud_native_instances=None,
http_to_user_ip=None, http_port=None, log_headers=None,
is_access_product=None, access_headers=None, access_header_mode=
None, https_redirect=None, load_balancing=None, ip_follow_status=
None, access_type=None, version=None, cluster_type=None, read_time=
None, write_time=None, resource_group_id=None, cname=None,
source_ips=None, connection_time=None, https_port=None):
self.http_2port = http_2port
self.cloud_native_instances = cloud_native_instances
self.http_to_user_ip = http_to_user_ip
self.http_port = http_port
self.log_headers = log_headers
self.is_access_product = is_access_product
self.access_headers = access_headers
self.access_header_mode = access_header_mode
self.https_redirect = https_redirect
self.load_balancing = load_balancing
self.ip_follow_status = ip_follow_status
self.access_type = access_type
self.version = version
self.cluster_type = cluster_type
self.read_time = read_time
self.write_time = write_time
self.resource_group_id = resource_group_id
self.cname = cname
self.source_ips = source_ips
self.connection_time = connection_time
self.https_port = https_port
def validate(self):
if self.cloud_native_instances:
for k in self.cloud_native_instances:
if k:
k.validate()
if self.log_headers:
for k in self.log_headers:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomain, self).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
result['CloudNativeInstances'] = []
if self.cloud_native_instances is not None:
for k in self.cloud_native_instances:
result['CloudNativeInstances'].append(k.to_map() if k else None
)
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.http_port is not None:
result['HttpPort'] = self.http_port
result['LogHeaders'] = []
if self.log_headers is not None:
for k in self.log_headers:
result['LogHeaders'].append(k.to_map() if k else None)
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.cname is not None:
result['Cname'] = self.cname
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.https_port is not None:
result['HttpsPort'] = self.https_port
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
self.cloud_native_instances = []
if m.get('CloudNativeInstances') is not None:
for k in m.get('CloudNativeInstances'):
temp_model = (
DescribeDomainResponseBodyDomainCloudNativeInstances())
self.cloud_native_instances.append(temp_model.from_map(k))
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
self.log_headers = []
if m.get('LogHeaders') is not None:
for k in m.get('LogHeaders'):
temp_model = DescribeDomainResponseBodyDomainLogHeaders()
self.log_headers.append(temp_model.from_map(k))
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('AccessHeaders') is not None:
self.access_headers = m.get('AccessHeaders')
if m.get('AccessHeaderMode') is not None:
self.access_header_mode = m.get('AccessHeaderMode')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
return self
class DescribeDomainResponseBody(TeaModel):
def __init__(self, request_id=None, domain=None):
self.request_id = request_id
self.domain = domain
def validate(self):
if self.domain:
self.domain.validate()
def to_map(self):
_map = super(DescribeDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain is not None:
result['Domain'] = self.domain.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Domain') is not None:
temp_model = DescribeDomainResponseBodyDomain()
self.domain = temp_model.from_map(m['Domain'])
return self
class DescribeDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainAdvanceConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_list=None,
resource_group_id=None):
self.instance_id = instance_id
self.domain_list = domain_list
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_list is not None:
result['DomainList'] = self.domain_list
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainList') is not None:
self.domain_list = m.get('DomainList')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile(TeaModel):
def __init__(self, http_2port=None, ipv_6status=None, http_port=None,
gslbstatus=None, rs=None, vip_service_status=None, cluster_type=
None, exclusive_vip_status=None, cname=None, cert_status=None,
https_port=None, resolved_type=None):
self.http_2port = http_2port
self.ipv_6status = ipv_6status
self.http_port = http_port
self.gslbstatus = gslbstatus
self.rs = rs
self.vip_service_status = vip_service_status
self.cluster_type = cluster_type
self.exclusive_vip_status = exclusive_vip_status
self.cname = cname
self.cert_status = cert_status
self.https_port = https_port
self.resolved_type = resolved_type
def validate(self):
pass
def to_map(self):
_map = super(
DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.ipv_6status is not None:
result['Ipv6Status'] = self.ipv_6status
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.gslbstatus is not None:
result['GSLBStatus'] = self.gslbstatus
if self.rs is not None:
result['Rs'] = self.rs
if self.vip_service_status is not None:
result['VipServiceStatus'] = self.vip_service_status
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.exclusive_vip_status is not None:
result['ExclusiveVipStatus'] = self.exclusive_vip_status
if self.cname is not None:
result['Cname'] = self.cname
if self.cert_status is not None:
result['CertStatus'] = self.cert_status
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.resolved_type is not None:
result['ResolvedType'] = self.resolved_type
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('Ipv6Status') is not None:
self.ipv_6status = m.get('Ipv6Status')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('GSLBStatus') is not None:
self.gslbstatus = m.get('GSLBStatus')
if m.get('Rs') is not None:
self.rs = m.get('Rs')
if m.get('VipServiceStatus') is not None:
self.vip_service_status = m.get('VipServiceStatus')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ExclusiveVipStatus') is not None:
self.exclusive_vip_status = m.get('ExclusiveVipStatus')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('CertStatus') is not None:
self.cert_status = m.get('CertStatus')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('ResolvedType') is not None:
self.resolved_type = m.get('ResolvedType')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, profile=None, domain=None):
self.profile = profile
self.domain = domain
def validate(self):
if self.profile:
self.profile.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBodyDomainConfigs,
self).to_map()
if _map is not None:
return _map
result = dict()
if self.profile is not None:
result['Profile'] = self.profile.to_map()
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Profile') is not None:
temp_model = (
DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile())
self.profile = temp_model.from_map(m['Profile'])
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainAdvanceConfigsResponseBody(TeaModel):
def __init__(self, request_id=None, domain_configs=None):
self.request_id = request_id
self.domain_configs = domain_configs
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = (
DescribeDomainAdvanceConfigsResponseBodyDomainConfigs())
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainAdvanceConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainAdvanceConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainBasicConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_key=None, access_type=None,
cloud_native_product_id=None, page_number=None, page_size=None,
resource_group_id=None):
self.instance_id = instance_id
self.domain_key = domain_key
self.access_type = access_type
self.cloud_native_product_id = cloud_native_product_id
self.page_number = page_number
self.page_size = page_size
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_key is not None:
result['DomainKey'] = self.domain_key
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_product_id is not None:
result['CloudNativeProductId'] = self.cloud_native_product_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainKey') is not None:
self.domain_key = m.get('DomainKey')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeProductId') is not None:
self.cloud_native_product_id = m.get('CloudNativeProductId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainBasicConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, status=None, domain=None, owner=None, cc_mode=None,
cc_status=None, access_type=None, version=None, acl_status=None,
waf_status=None, waf_mode=None):
self.status = status
self.domain = domain
self.owner = owner
self.cc_mode = cc_mode
self.cc_status = cc_status
self.access_type = access_type
self.version = version
self.acl_status = acl_status
self.waf_status = waf_status
self.waf_mode = waf_mode
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBodyDomainConfigs, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.domain is not None:
result['Domain'] = self.domain
if self.owner is not None:
result['Owner'] = self.owner
if self.cc_mode is not None:
result['CcMode'] = self.cc_mode
if self.cc_status is not None:
result['CcStatus'] = self.cc_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.acl_status is not None:
result['AclStatus'] = self.acl_status
if self.waf_status is not None:
result['WafStatus'] = self.waf_status
if self.waf_mode is not None:
result['WafMode'] = self.waf_mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Owner') is not None:
self.owner = m.get('Owner')
if m.get('CcMode') is not None:
self.cc_mode = m.get('CcMode')
if m.get('CcStatus') is not None:
self.cc_status = m.get('CcStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('AclStatus') is not None:
self.acl_status = m.get('AclStatus')
if m.get('WafStatus') is not None:
self.waf_status = m.get('WafStatus')
if m.get('WafMode') is not None:
self.waf_mode = m.get('WafMode')
return self
class DescribeDomainBasicConfigsResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_configs=None):
self.total_count = total_count
self.request_id = request_id
self.domain_configs = domain_configs
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = (
DescribeDomainBasicConfigsResponseBodyDomainConfigs())
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainBasicConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainBasicConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainListRequest(TeaModel):
def __init__(self, resource_group_id=None, instance_id=None,
domain_name=None, page_number=None, page_size=None, is_sub=None,
domain_names=None):
self.resource_group_id = resource_group_id
self.instance_id = instance_id
self.domain_name = domain_name
self.page_number = page_number
self.page_size = page_size
self.is_sub = is_sub
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_name is not None:
result['DomainName'] = self.domain_name
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.is_sub is not None:
result['IsSub'] = self.is_sub
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainName') is not None:
self.domain_name = m.get('DomainName')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('IsSub') is not None:
self.is_sub = m.get('IsSub')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_names=None):
self.total_count = total_count
self.request_id = request_id
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainListResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainNamesRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainNamesResponseBody(TeaModel):
def __init__(self, request_id=None, domain_names=None):
self.request_id = request_id
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainNamesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainNamesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainNamesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRuleGroupRequest(TeaModel):
def __init__(self, domain=None, instance_id=None):
self.domain = domain
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeDomainRuleGroupResponseBody(TeaModel):
def __init__(self, rule_group_id=None, request_id=None):
self.rule_group_id = rule_group_id
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfoResponseBodyInstanceInfo(TeaModel):
def __init__(self, status=None, end_date=None, version=None, remain_day
=None, region=None, pay_type=None, in_debt=None, instance_id=None,
subscription_type=None, trial=None):
self.status = status
self.end_date = end_date
self.version = version
self.remain_day = remain_day
self.region = region
self.pay_type = pay_type
self.in_debt = in_debt
self.instance_id = instance_id
self.subscription_type = subscription_type
self.trial = trial
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoResponseBodyInstanceInfo, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.version is not None:
result['Version'] = self.version
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfoResponseBody(TeaModel):
def __init__(self, request_id=None, instance_info=None):
self.request_id = request_id
self.instance_info = instance_info
def validate(self):
if self.instance_info:
self.instance_info.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_info is not None:
result['InstanceInfo'] = self.instance_info.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceInfo') is not None:
temp_model = DescribeInstanceInfoResponseBodyInstanceInfo()
self.instance_info = temp_model.from_map(m['InstanceInfo'])
return self
class DescribeInstanceInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfosRequest(TeaModel):
def __init__(self, instance_source=None, instance_id=None,
resource_group_id=None):
self.instance_source = instance_source
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_source is not None:
result['InstanceSource'] = self.instance_source
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceSource') is not None:
self.instance_source = m.get('InstanceSource')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfosResponseBodyInstanceInfos(TeaModel):
def __init__(self, status=None, end_date=None, remain_day=None, region=
None, pay_type=None, in_debt=None, instance_id=None,
subscription_type=None, trial=None):
self.status = status
self.end_date = end_date
self.remain_day = remain_day
self.region = region
self.pay_type = pay_type
self.in_debt = in_debt
self.instance_id = instance_id
self.subscription_type = subscription_type
self.trial = trial
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosResponseBodyInstanceInfos, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfosResponseBody(TeaModel):
def __init__(self, request_id=None, instance_infos=None):
self.request_id = request_id
self.instance_infos = instance_infos
def validate(self):
if self.instance_infos:
for k in self.instance_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['InstanceInfos'] = []
if self.instance_infos is not None:
for k in self.instance_infos:
result['InstanceInfos'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.instance_infos = []
if m.get('InstanceInfos') is not None:
for k in m.get('InstanceInfos'):
temp_model = DescribeInstanceInfosResponseBodyInstanceInfos()
self.instance_infos.append(temp_model.from_map(k))
return self
class DescribeInstanceInfosResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfosResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceSpecInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos(TeaModel):
def __init__(self, value=None, code=None):
self.value = value
self.code = code
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos,
self).to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['Value'] = self.value
if self.code is not None:
result['Code'] = self.code
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Value') is not None:
self.value = m.get('Value')
if m.get('Code') is not None:
self.code = m.get('Code')
return self
class DescribeInstanceSpecInfoResponseBody(TeaModel):
def __init__(self, instance_spec_infos=None, request_id=None,
instance_id=None, version=None, expire_time=None):
self.instance_spec_infos = instance_spec_infos
self.request_id = request_id
self.instance_id = instance_id
self.version = version
self.expire_time = expire_time
def validate(self):
if self.instance_spec_infos:
for k in self.instance_spec_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
result['InstanceSpecInfos'] = []
if self.instance_spec_infos is not None:
for k in self.instance_spec_infos:
result['InstanceSpecInfos'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.version is not None:
result['Version'] = self.version
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
return result
def from_map(self, m=None):
m = m or dict()
self.instance_spec_infos = []
if m.get('InstanceSpecInfos') is not None:
for k in m.get('InstanceSpecInfos'):
temp_model = (
DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos())
self.instance_spec_infos.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
return self
class DescribeInstanceSpecInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceSpecInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, region=None, resource_group_id=
None, page_number=None, page_size=None, domain_names=None):
self.instance_id = instance_id
self.region = region
self.resource_group_id = resource_group_id
self.page_number = page_number
self.page_size = page_size
self.domain_names = domain_names
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.region is not None:
result['Region'] = self.region
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeLogServiceStatusResponseBodyDomainStatus(TeaModel):
def __init__(self, domain=None, sls_log_active=None):
self.domain = domain
self.sls_log_active = sls_log_active
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBodyDomainStatus, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.sls_log_active is not None:
result['SlsLogActive'] = self.sls_log_active
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SlsLogActive') is not None:
self.sls_log_active = m.get('SlsLogActive')
return self
class DescribeLogServiceStatusResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_status=None):
self.total_count = total_count
self.request_id = request_id
self.domain_status = domain_status
def validate(self):
if self.domain_status:
for k in self.domain_status:
if k:
k.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainStatus'] = []
if self.domain_status is not None:
for k in self.domain_status:
result['DomainStatus'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_status = []
if m.get('DomainStatus') is not None:
for k in m.get('DomainStatus'):
temp_model = DescribeLogServiceStatusResponseBodyDomainStatus()
self.domain_status.append(temp_model.from_map(k))
return self
class DescribeLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleCodeConfigRequest(TeaModel):
def __init__(self, source_ip=None, lang=None, code_type=None,
code_value=None, instance_id=None, resource_group_id=None):
self.source_ip = source_ip
self.lang = lang
self.code_type = code_type
self.code_value = code_value
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.source_ip is not None:
result['SourceIp'] = self.source_ip
if self.lang is not None:
result['Lang'] = self.lang
if self.code_type is not None:
result['CodeType'] = self.code_type
if self.code_value is not None:
result['CodeValue'] = self.code_value
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('SourceIp') is not None:
self.source_ip = m.get('SourceIp')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('CodeType') is not None:
self.code_type = m.get('CodeType')
if m.get('CodeValue') is not None:
self.code_value = m.get('CodeValue')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleCodeConfigResponseBody(TeaModel):
def __init__(self, request_id=None, code_configs=None):
self.request_id = request_id
self.code_configs = code_configs
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponseBody, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.code_configs is not None:
result['CodeConfigs'] = self.code_configs
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CodeConfigs') is not None:
self.code_configs = m.get('CodeConfigs')
return self
class DescribeProtectionModuleCodeConfigResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleCodeConfigResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None,
resource_group_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleModeResponseBody(TeaModel):
def __init__(self, learn_status=None, request_id=None, mode=None):
self.learn_status = learn_status
self.request_id = request_id
self.mode = mode
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.learn_status is not None:
result['LearnStatus'] = self.learn_status
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.mode is not None:
result['Mode'] = self.mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('LearnStatus') is not None:
self.learn_status = m.get('LearnStatus')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
return self
class DescribeProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleRulesRequest(TeaModel):
def __init__(self, page_size=None, page_number=None, domain=None,
defense_type=None, query=None, lang=None, instance_id=None,
resource_group_id=None):
self.page_size = page_size
self.page_number = page_number
self.domain = domain
self.defense_type = defense_type
self.query = query
self.lang = lang
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.query is not None:
result['Query'] = self.query
if self.lang is not None:
result['Lang'] = self.lang
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Query') is not None:
self.query = m.get('Query')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleRulesResponseBodyRules(TeaModel):
def __init__(self, status=None, time=None, version=None, content=None,
rule_id=None):
self.status = status
self.time = time
self.version = version
self.content = content
self.rule_id = rule_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBodyRules, self
).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.time is not None:
result['Time'] = self.time
if self.version is not None:
result['Version'] = self.version
if self.content is not None:
result['Content'] = self.content
if self.rule_id is not None:
result['RuleId'] = self.rule_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Time') is not None:
self.time = m.get('Time')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('Content') is not None:
self.content = m.get('Content')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
return self
class DescribeProtectionModuleRulesResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, rules=None):
self.total_count = total_count
self.request_id = request_id
self.rules = rules
def validate(self):
if self.rules:
for k in self.rules:
if k:
k.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Rules'] = []
if self.rules is not None:
for k in self.rules:
result['Rules'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.rules = []
if m.get('Rules') is not None:
for k in m.get('Rules'):
temp_model = DescribeProtectionModuleRulesResponseBodyRules()
self.rules.append(temp_model.from_map(k))
return self
class DescribeProtectionModuleRulesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleRulesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None, module_status=None):
self.request_id = request_id
self.module_status = module_status
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
return self
class DescribeProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWafSourceIpSegmentRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeWafSourceIpSegmentResponseBody(TeaModel):
def __init__(self, request_id=None, ip_v6s=None, ips=None):
self.request_id = request_id
self.ip_v6s = ip_v6s
self.ips = ips
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.ip_v6s is not None:
result['IpV6s'] = self.ip_v6s
if self.ips is not None:
result['Ips'] = self.ips
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('IpV6s') is not None:
self.ip_v6s = m.get('IpV6s')
if m.get('Ips') is not None:
self.ips = m.get('Ips')
return self
class DescribeWafSourceIpSegmentResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeWafSourceIpSegmentResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None,
load_balancing=None, http_port=None, https_port=None, http_2port=
None, https_redirect=None, http_to_user_ip=None, is_access_product=
None, log_headers=None, cluster_type=None, connection_time=None,
read_time=None, write_time=None, access_type=None,
cloud_native_instances=None, ip_follow_status=None):
self.instance_id = instance_id
self.domain = domain
self.source_ips = source_ips
self.load_balancing = load_balancing
self.http_port = http_port
self.https_port = https_port
self.http_2port = http_2port
self.https_redirect = https_redirect
self.http_to_user_ip = http_to_user_ip
self.is_access_product = is_access_product
self.log_headers = log_headers
self.cluster_type = cluster_type
self.connection_time = connection_time
self.read_time = read_time
self.write_time = write_time
self.access_type = access_type
self.cloud_native_instances = cloud_native_instances
self.ip_follow_status = ip_follow_status
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class ModifyDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainIpv6StatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyDomainIpv6StatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainIpv6StatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainIpv6StatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogRetrievalStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogRetrievalStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogRetrievalStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogRetrievalStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id
self.domain = domain
self.enabled = enabled
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogServiceStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, mode=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.mode = mode
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.mode is not None:
result['Mode'] = self.mode
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleModeResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, rule_id=
None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule = rule
self.rule_id = rule_id
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, module_status=None,
instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.module_status = module_status
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleCacheStatusRequest(TeaModel):
def __init__(self, domain=None, rule_id=None, defense_type=None,
instance_id=None):
self.domain = domain
self.rule_id = rule_id
self.defense_type = defense_type
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleCacheStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponseBody, self).to_map(
)
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleCacheStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleCacheStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None,
rule_status=None, lock_version=None, instance_id=None):
self.domain = domain
self.defense_type = defense_type
self.rule_id = rule_id
self.rule_status = rule_status
self.lock_version = lock_version
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.rule_status is not None:
result['RuleStatus'] = self.rule_status
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('RuleStatus') is not None:
self.rule_status = m.get('RuleStatus')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetDomainRuleGroupRequest(TeaModel):
def __init__(self, domains=None, rule_group_id=None, waf_version=None,
instance_id=None, resource_group_id=None):
self.domains = domains
self.rule_group_id = rule_group_id
self.waf_version = waf_version
self.instance_id = instance_id
self.resource_group_id = resource_group_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domains is not None:
result['Domains'] = self.domains
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.waf_version is not None:
result['WafVersion'] = self.waf_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domains') is not None:
self.domains = m.get('Domains')
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('WafVersion') is not None:
self.waf_version = m.get('WafVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class SetDomainRuleGroupResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(SetDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = SetDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class CreateCertificateRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None, certificate_name=None, instance_id=None):
self.domain = domain # type: str
self.certificate = certificate # type: str
self.private_key = private_key # type: str
self.certificate_name = certificate_name # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.certificate_name is not None:
result['CertificateName'] = self.certificate_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('CertificateName') is not None:
self.certificate_name = m.get('CertificateName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateCertificateResponseBody(TeaModel):
def __init__(self, request_id=None, certificate_id=None):
self.request_id = request_id # type: str
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class CreateCertificateResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateCertificateResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateCertificateResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateCertificateResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateCertificateByCertificateIdRequest(TeaModel):
def __init__(self, domain=None, certificate_id=None, instance_id=None):
self.domain = domain # type: str
self.certificate_id = certificate_id # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateByCertificateIdRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateCertificateByCertificateIdResponseBody(TeaModel):
def __init__(self, request_id=None, certificate_id=None):
self.request_id = request_id # type: str
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateByCertificateIdResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class CreateCertificateByCertificateIdResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateCertificateByCertificateIdResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateCertificateByCertificateIdResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateCertificateByCertificateIdResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None, is_access_product=None,
access_header_mode=None, access_headers=None, load_balancing=None, log_headers=None, http_port=None, https_port=None,
http_2port=None, http_to_user_ip=None, https_redirect=None, cluster_type=None, resource_group_id=None,
connection_time=None, read_time=None, write_time=None, access_type=None, cloud_native_instances=None,
ip_follow_status=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.source_ips = source_ips # type: str
self.is_access_product = is_access_product # type: int
self.access_header_mode = access_header_mode # type: int
self.access_headers = access_headers # type: str
self.load_balancing = load_balancing # type: int
self.log_headers = log_headers # type: str
self.http_port = http_port # type: str
self.https_port = https_port # type: str
self.http_2port = http_2port # type: str
self.http_to_user_ip = http_to_user_ip # type: int
self.https_redirect = https_redirect # type: int
self.cluster_type = cluster_type # type: int
self.resource_group_id = resource_group_id # type: str
self.connection_time = connection_time # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.access_type = access_type # type: str
self.cloud_native_instances = cloud_native_instances # type: str
self.ip_follow_status = ip_follow_status # type: int
def validate(self):
pass
def to_map(self):
_map = super(CreateDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('AccessHeaderMode') is not None:
self.access_header_mode = m.get('AccessHeaderMode')
if m.get('AccessHeaders') is not None:
self.access_headers = m.get('AccessHeaders')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class CreateDomainResponseBody(TeaModel):
def __init__(self, request_id=None, cname=None):
self.request_id = request_id # type: str
self.cname = cname # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.cname is not None:
result['Cname'] = self.cname
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
return self
class CreateDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule = rule # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DeleteDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteInstanceRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteInstanceRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DeleteInstanceResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteInstanceResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteInstanceResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteInstanceResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteInstanceResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule_id = rule_id # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DeleteProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertificatesRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertificatesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeCertificatesResponseBodyCertificates(TeaModel):
def __init__(self, certificate_name=None, common_name=None, sans=None, is_using=None, certificate_id=None):
self.certificate_name = certificate_name # type: str
self.common_name = common_name # type: str
self.sans = sans # type: list[str]
self.is_using = is_using # type: bool
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertificatesResponseBodyCertificates, self).to_map()
if _map is not None:
return _map
result = dict()
if self.certificate_name is not None:
result['CertificateName'] = self.certificate_name
if self.common_name is not None:
result['CommonName'] = self.common_name
if self.sans is not None:
result['Sans'] = self.sans
if self.is_using is not None:
result['IsUsing'] = self.is_using
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('CertificateName') is not None:
self.certificate_name = m.get('CertificateName')
if m.get('CommonName') is not None:
self.common_name = m.get('CommonName')
if m.get('Sans') is not None:
self.sans = m.get('Sans')
if m.get('IsUsing') is not None:
self.is_using = m.get('IsUsing')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class DescribeCertificatesResponseBody(TeaModel):
def __init__(self, request_id=None, certificates=None):
self.request_id = request_id # type: str
self.certificates = certificates # type: list[DescribeCertificatesResponseBodyCertificates]
def validate(self):
if self.certificates:
for k in self.certificates:
if k:
k.validate()
def to_map(self):
_map = super(DescribeCertificatesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Certificates'] = []
if self.certificates is not None:
for k in self.certificates:
result['Certificates'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.certificates = []
if m.get('Certificates') is not None:
for k in m.get('Certificates'):
temp_model = DescribeCertificatesResponseBodyCertificates()
self.certificates.append(temp_model.from_map(k))
return self
class DescribeCertificatesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeCertificatesResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertificatesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertificatesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertMatchStatusRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None, instance_id=None):
self.domain = domain # type: str
self.certificate = certificate # type: str
self.private_key = private_key # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeCertMatchStatusResponseBody(TeaModel):
def __init__(self, request_id=None, match_status=None):
self.request_id = request_id # type: str
self.match_status = match_status # type: bool
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.match_status is not None:
result['MatchStatus'] = self.match_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('MatchStatus') is not None:
self.match_status = m.get('MatchStatus')
return self
class DescribeCertMatchStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeCertMatchStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertMatchStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertMatchStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs(TeaModel):
def __init__(self, protocol=None, ports=None):
self.protocol = protocol # type: str
self.ports = ports # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs, self).to_map()
if _map is not None:
return _map
result = dict()
if self.protocol is not None:
result['Protocol'] = self.protocol
if self.ports is not None:
result['Ports'] = self.ports
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Protocol') is not None:
self.protocol = m.get('Protocol')
if m.get('Ports') is not None:
self.ports = m.get('Ports')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstances(TeaModel):
def __init__(self, protocol_port_configs=None, redirection_type_name=None, cloud_native_product_name=None,
instance_id=None, ipaddress_list=None):
self.protocol_port_configs = protocol_port_configs # type: list[DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs]
self.redirection_type_name = redirection_type_name # type: str
self.cloud_native_product_name = cloud_native_product_name # type: str
self.instance_id = instance_id # type: str
self.ipaddress_list = ipaddress_list # type: str
def validate(self):
if self.protocol_port_configs:
for k in self.protocol_port_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstances, self).to_map()
if _map is not None:
return _map
result = dict()
result['ProtocolPortConfigs'] = []
if self.protocol_port_configs is not None:
for k in self.protocol_port_configs:
result['ProtocolPortConfigs'].append(k.to_map() if k else None)
if self.redirection_type_name is not None:
result['RedirectionTypeName'] = self.redirection_type_name
if self.cloud_native_product_name is not None:
result['CloudNativeProductName'] = self.cloud_native_product_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.ipaddress_list is not None:
result['IPAddressList'] = self.ipaddress_list
return result
def from_map(self, m=None):
m = m or dict()
self.protocol_port_configs = []
if m.get('ProtocolPortConfigs') is not None:
for k in m.get('ProtocolPortConfigs'):
temp_model = DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs()
self.protocol_port_configs.append(temp_model.from_map(k))
if m.get('RedirectionTypeName') is not None:
self.redirection_type_name = m.get('RedirectionTypeName')
if m.get('CloudNativeProductName') is not None:
self.cloud_native_product_name = m.get('CloudNativeProductName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('IPAddressList') is not None:
self.ipaddress_list = m.get('IPAddressList')
return self
class DescribeDomainResponseBodyDomainLogHeaders(TeaModel):
def __init__(self, k=None, v=None):
self.k = k # type: str
self.v = v # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainLogHeaders, self).to_map()
if _map is not None:
return _map
result = dict()
if self.k is not None:
result['k'] = self.k
if self.v is not None:
result['v'] = self.v
return result
def from_map(self, m=None):
m = m or dict()
if m.get('k') is not None:
self.k = m.get('k')
if m.get('v') is not None:
self.v = m.get('v')
return self
class DescribeDomainResponseBodyDomain(TeaModel):
def __init__(self, http_2port=None, cloud_native_instances=None, http_to_user_ip=None, http_port=None,
log_headers=None, is_access_product=None, access_headers=None, access_header_mode=None, https_redirect=None,
load_balancing=None, ip_follow_status=None, access_type=None, version=None, cluster_type=None, read_time=None,
write_time=None, resource_group_id=None, cname=None, source_ips=None, connection_time=None, https_port=None):
self.http_2port = http_2port # type: list[str]
self.cloud_native_instances = cloud_native_instances # type: list[DescribeDomainResponseBodyDomainCloudNativeInstances]
self.http_to_user_ip = http_to_user_ip # type: int
self.http_port = http_port # type: list[str]
self.log_headers = log_headers # type: list[DescribeDomainResponseBodyDomainLogHeaders]
self.is_access_product = is_access_product # type: int
self.access_headers = access_headers # type: list[str]
self.access_header_mode = access_header_mode # type: int
self.https_redirect = https_redirect # type: int
self.load_balancing = load_balancing # type: int
self.ip_follow_status = ip_follow_status # type: int
self.access_type = access_type # type: str
self.version = version # type: long
self.cluster_type = cluster_type # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.resource_group_id = resource_group_id # type: str
self.cname = cname # type: str
self.source_ips = source_ips # type: list[str]
self.connection_time = connection_time # type: int
self.https_port = https_port # type: list[str]
def validate(self):
if self.cloud_native_instances:
for k in self.cloud_native_instances:
if k:
k.validate()
if self.log_headers:
for k in self.log_headers:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomain, self).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
result['CloudNativeInstances'] = []
if self.cloud_native_instances is not None:
for k in self.cloud_native_instances:
result['CloudNativeInstances'].append(k.to_map() if k else None)
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.http_port is not None:
result['HttpPort'] = self.http_port
result['LogHeaders'] = []
if self.log_headers is not None:
for k in self.log_headers:
result['LogHeaders'].append(k.to_map() if k else None)
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.cname is not None:
result['Cname'] = self.cname
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.https_port is not None:
result['HttpsPort'] = self.https_port
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
self.cloud_native_instances = []
if m.get('CloudNativeInstances') is not None:
for k in m.get('CloudNativeInstances'):
temp_model = DescribeDomainResponseBodyDomainCloudNativeInstances()
self.cloud_native_instances.append(temp_model.from_map(k))
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
self.log_headers = []
if m.get('LogHeaders') is not None:
for k in m.get('LogHeaders'):
temp_model = DescribeDomainResponseBodyDomainLogHeaders()
self.log_headers.append(temp_model.from_map(k))
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('AccessHeaders') is not None:
self.access_headers = m.get('AccessHeaders')
if m.get('AccessHeaderMode') is not None:
self.access_header_mode = m.get('AccessHeaderMode')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
return self
class DescribeDomainResponseBody(TeaModel):
def __init__(self, request_id=None, domain=None):
self.request_id = request_id # type: str
self.domain = domain # type: DescribeDomainResponseBodyDomain
def validate(self):
if self.domain:
self.domain.validate()
def to_map(self):
_map = super(DescribeDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain is not None:
result['Domain'] = self.domain.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Domain') is not None:
temp_model = DescribeDomainResponseBodyDomain()
self.domain = temp_model.from_map(m['Domain'])
return self
class DescribeDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainAdvanceConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_list=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.domain_list = domain_list # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_list is not None:
result['DomainList'] = self.domain_list
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainList') is not None:
self.domain_list = m.get('DomainList')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile(TeaModel):
def __init__(self, http_2port=None, ipv_6status=None, http_port=None, gslbstatus=None, rs=None,
vip_service_status=None, cluster_type=None, exclusive_vip_status=None, cname=None, cert_status=None, https_port=None,
resolved_type=None):
self.http_2port = http_2port # type: str
self.ipv_6status = ipv_6status # type: int
self.http_port = http_port # type: str
self.gslbstatus = gslbstatus # type: str
self.rs = rs # type: str
self.vip_service_status = vip_service_status # type: int
self.cluster_type = cluster_type # type: int
self.exclusive_vip_status = exclusive_vip_status # type: int
self.cname = cname # type: str
self.cert_status = cert_status # type: int
self.https_port = https_port # type: str
self.resolved_type = resolved_type # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile, self).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.ipv_6status is not None:
result['Ipv6Status'] = self.ipv_6status
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.gslbstatus is not None:
result['GSLBStatus'] = self.gslbstatus
if self.rs is not None:
result['Rs'] = self.rs
if self.vip_service_status is not None:
result['VipServiceStatus'] = self.vip_service_status
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.exclusive_vip_status is not None:
result['ExclusiveVipStatus'] = self.exclusive_vip_status
if self.cname is not None:
result['Cname'] = self.cname
if self.cert_status is not None:
result['CertStatus'] = self.cert_status
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.resolved_type is not None:
result['ResolvedType'] = self.resolved_type
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('Ipv6Status') is not None:
self.ipv_6status = m.get('Ipv6Status')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('GSLBStatus') is not None:
self.gslbstatus = m.get('GSLBStatus')
if m.get('Rs') is not None:
self.rs = m.get('Rs')
if m.get('VipServiceStatus') is not None:
self.vip_service_status = m.get('VipServiceStatus')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ExclusiveVipStatus') is not None:
self.exclusive_vip_status = m.get('ExclusiveVipStatus')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('CertStatus') is not None:
self.cert_status = m.get('CertStatus')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('ResolvedType') is not None:
self.resolved_type = m.get('ResolvedType')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, profile=None, domain=None):
self.profile = profile # type: DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile
self.domain = domain # type: str
def validate(self):
if self.profile:
self.profile.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBodyDomainConfigs, self).to_map()
if _map is not None:
return _map
result = dict()
if self.profile is not None:
result['Profile'] = self.profile.to_map()
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Profile') is not None:
temp_model = DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile()
self.profile = temp_model.from_map(m['Profile'])
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainAdvanceConfigsResponseBody(TeaModel):
def __init__(self, request_id=None, domain_configs=None):
self.request_id = request_id # type: str
self.domain_configs = domain_configs # type: list[DescribeDomainAdvanceConfigsResponseBodyDomainConfigs]
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = DescribeDomainAdvanceConfigsResponseBodyDomainConfigs()
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainAdvanceConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainAdvanceConfigsResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainAdvanceConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainBasicConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_key=None, access_type=None, cloud_native_product_id=None,
page_number=None, page_size=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.domain_key = domain_key # type: str
self.access_type = access_type # type: str
self.cloud_native_product_id = cloud_native_product_id # type: int
self.page_number = page_number # type: int
self.page_size = page_size # type: int
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_key is not None:
result['DomainKey'] = self.domain_key
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_product_id is not None:
result['CloudNativeProductId'] = self.cloud_native_product_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainKey') is not None:
self.domain_key = m.get('DomainKey')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeProductId') is not None:
self.cloud_native_product_id = m.get('CloudNativeProductId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainBasicConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, status=None, domain=None, owner=None, cc_mode=None, cc_status=None, access_type=None,
version=None, acl_status=None, waf_status=None, waf_mode=None):
self.status = status # type: int
self.domain = domain # type: str
self.owner = owner # type: str
self.cc_mode = cc_mode # type: int
self.cc_status = cc_status # type: int
self.access_type = access_type # type: str
self.version = version # type: long
self.acl_status = acl_status # type: int
self.waf_status = waf_status # type: int
self.waf_mode = waf_mode # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBodyDomainConfigs, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.domain is not None:
result['Domain'] = self.domain
if self.owner is not None:
result['Owner'] = self.owner
if self.cc_mode is not None:
result['CcMode'] = self.cc_mode
if self.cc_status is not None:
result['CcStatus'] = self.cc_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.acl_status is not None:
result['AclStatus'] = self.acl_status
if self.waf_status is not None:
result['WafStatus'] = self.waf_status
if self.waf_mode is not None:
result['WafMode'] = self.waf_mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Owner') is not None:
self.owner = m.get('Owner')
if m.get('CcMode') is not None:
self.cc_mode = m.get('CcMode')
if m.get('CcStatus') is not None:
self.cc_status = m.get('CcStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('AclStatus') is not None:
self.acl_status = m.get('AclStatus')
if m.get('WafStatus') is not None:
self.waf_status = m.get('WafStatus')
if m.get('WafMode') is not None:
self.waf_mode = m.get('WafMode')
return self
class DescribeDomainBasicConfigsResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_configs=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.domain_configs = domain_configs # type: list[DescribeDomainBasicConfigsResponseBodyDomainConfigs]
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = DescribeDomainBasicConfigsResponseBodyDomainConfigs()
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainBasicConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainBasicConfigsResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainBasicConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainListRequest(TeaModel):
def __init__(self, resource_group_id=None, instance_id=None, domain_name=None, page_number=None, page_size=None,
is_sub=None, domain_names=None):
self.resource_group_id = resource_group_id # type: str
self.instance_id = instance_id # type: str
self.domain_name = domain_name # type: str
self.page_number = page_number # type: int
self.page_size = page_size # type: int
self.is_sub = is_sub # type: int
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_name is not None:
result['DomainName'] = self.domain_name
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.is_sub is not None:
result['IsSub'] = self.is_sub
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainName') is not None:
self.domain_name = m.get('DomainName')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('IsSub') is not None:
self.is_sub = m.get('IsSub')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_names=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainListResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainListResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainNamesRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainNamesResponseBody(TeaModel):
def __init__(self, request_id=None, domain_names=None):
self.request_id = request_id # type: str
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainNamesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainNamesResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainNamesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainNamesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRuleGroupRequest(TeaModel):
def __init__(self, domain=None, instance_id=None):
self.domain = domain # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeDomainRuleGroupResponseBody(TeaModel):
def __init__(self, rule_group_id=None, request_id=None):
self.rule_group_id = rule_group_id # type: long
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainRuleGroupResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfoResponseBodyInstanceInfo(TeaModel):
def __init__(self, status=None, end_date=None, version=None, remain_day=None, region=None, pay_type=None,
in_debt=None, instance_id=None, subscription_type=None, trial=None):
self.status = status # type: int
self.end_date = end_date # type: long
self.version = version # type: str
self.remain_day = remain_day # type: int
self.region = region # type: str
self.pay_type = pay_type # type: int
self.in_debt = in_debt # type: int
self.instance_id = instance_id # type: str
self.subscription_type = subscription_type # type: str
self.trial = trial # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoResponseBodyInstanceInfo, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.version is not None:
result['Version'] = self.version
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfoResponseBody(TeaModel):
def __init__(self, request_id=None, instance_info=None):
self.request_id = request_id # type: str
self.instance_info = instance_info # type: DescribeInstanceInfoResponseBodyInstanceInfo
def validate(self):
if self.instance_info:
self.instance_info.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_info is not None:
result['InstanceInfo'] = self.instance_info.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceInfo') is not None:
temp_model = DescribeInstanceInfoResponseBodyInstanceInfo()
self.instance_info = temp_model.from_map(m['InstanceInfo'])
return self
class DescribeInstanceInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeInstanceInfoResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfosRequest(TeaModel):
def __init__(self, instance_source=None, instance_id=None, resource_group_id=None):
self.instance_source = instance_source # type: str
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_source is not None:
result['InstanceSource'] = self.instance_source
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceSource') is not None:
self.instance_source = m.get('InstanceSource')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfosResponseBodyInstanceInfos(TeaModel):
def __init__(self, status=None, end_date=None, remain_day=None, region=None, pay_type=None, in_debt=None,
instance_id=None, subscription_type=None, trial=None):
self.status = status # type: int
self.end_date = end_date # type: long
self.remain_day = remain_day # type: int
self.region = region # type: str
self.pay_type = pay_type # type: int
self.in_debt = in_debt # type: int
self.instance_id = instance_id # type: str
self.subscription_type = subscription_type # type: str
self.trial = trial # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosResponseBodyInstanceInfos, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfosResponseBody(TeaModel):
def __init__(self, request_id=None, instance_infos=None):
self.request_id = request_id # type: str
self.instance_infos = instance_infos # type: list[DescribeInstanceInfosResponseBodyInstanceInfos]
def validate(self):
if self.instance_infos:
for k in self.instance_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['InstanceInfos'] = []
if self.instance_infos is not None:
for k in self.instance_infos:
result['InstanceInfos'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.instance_infos = []
if m.get('InstanceInfos') is not None:
for k in m.get('InstanceInfos'):
temp_model = DescribeInstanceInfosResponseBodyInstanceInfos()
self.instance_infos.append(temp_model.from_map(k))
return self
class DescribeInstanceInfosResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeInstanceInfosResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfosResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceSpecInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos(TeaModel):
def __init__(self, value=None, code=None):
self.value = value # type: str
self.code = code # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos, self).to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['Value'] = self.value
if self.code is not None:
result['Code'] = self.code
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Value') is not None:
self.value = m.get('Value')
if m.get('Code') is not None:
self.code = m.get('Code')
return self
class DescribeInstanceSpecInfoResponseBody(TeaModel):
def __init__(self, instance_spec_infos=None, request_id=None, instance_id=None, version=None, expire_time=None):
self.instance_spec_infos = instance_spec_infos # type: list[DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos]
self.request_id = request_id # type: str
self.instance_id = instance_id # type: str
self.version = version # type: str
self.expire_time = expire_time # type: long
def validate(self):
if self.instance_spec_infos:
for k in self.instance_spec_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
result['InstanceSpecInfos'] = []
if self.instance_spec_infos is not None:
for k in self.instance_spec_infos:
result['InstanceSpecInfos'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.version is not None:
result['Version'] = self.version
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
return result
def from_map(self, m=None):
m = m or dict()
self.instance_spec_infos = []
if m.get('InstanceSpecInfos') is not None:
for k in m.get('InstanceSpecInfos'):
temp_model = DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos()
self.instance_spec_infos.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
return self
class DescribeInstanceSpecInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeInstanceSpecInfoResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceSpecInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, region=None, resource_group_id=None, page_number=None, page_size=None,
domain_names=None):
self.instance_id = instance_id # type: str
self.region = region # type: str
self.resource_group_id = resource_group_id # type: str
self.page_number = page_number # type: int
self.page_size = page_size # type: int
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.region is not None:
result['Region'] = self.region
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeLogServiceStatusResponseBodyDomainStatus(TeaModel):
def __init__(self, domain=None, sls_log_active=None):
self.domain = domain # type: str
self.sls_log_active = sls_log_active # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBodyDomainStatus, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.sls_log_active is not None:
result['SlsLogActive'] = self.sls_log_active
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SlsLogActive') is not None:
self.sls_log_active = m.get('SlsLogActive')
return self
class DescribeLogServiceStatusResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_status=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.domain_status = domain_status # type: list[DescribeLogServiceStatusResponseBodyDomainStatus]
def validate(self):
if self.domain_status:
for k in self.domain_status:
if k:
k.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainStatus'] = []
if self.domain_status is not None:
for k in self.domain_status:
result['DomainStatus'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_status = []
if m.get('DomainStatus') is not None:
for k in m.get('DomainStatus'):
temp_model = DescribeLogServiceStatusResponseBodyDomainStatus()
self.domain_status.append(temp_model.from_map(k))
return self
class DescribeLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeLogServiceStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleCodeConfigRequest(TeaModel):
def __init__(self, source_ip=None, lang=None, code_type=None, code_value=None, instance_id=None,
resource_group_id=None):
self.source_ip = source_ip # type: str
self.lang = lang # type: str
self.code_type = code_type # type: int
self.code_value = code_value # type: int
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.source_ip is not None:
result['SourceIp'] = self.source_ip
if self.lang is not None:
result['Lang'] = self.lang
if self.code_type is not None:
result['CodeType'] = self.code_type
if self.code_value is not None:
result['CodeValue'] = self.code_value
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('SourceIp') is not None:
self.source_ip = m.get('SourceIp')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('CodeType') is not None:
self.code_type = m.get('CodeType')
if m.get('CodeValue') is not None:
self.code_value = m.get('CodeValue')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleCodeConfigResponseBody(TeaModel):
def __init__(self, request_id=None, code_configs=None):
self.request_id = request_id # type: str
self.code_configs = code_configs # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.code_configs is not None:
result['CodeConfigs'] = self.code_configs
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CodeConfigs') is not None:
self.code_configs = m.get('CodeConfigs')
return self
class DescribeProtectionModuleCodeConfigResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleCodeConfigResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleCodeConfigResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None, resource_group_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleModeResponseBody(TeaModel):
def __init__(self, learn_status=None, request_id=None, mode=None):
self.learn_status = learn_status # type: int
self.request_id = request_id # type: str
self.mode = mode # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.learn_status is not None:
result['LearnStatus'] = self.learn_status
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.mode is not None:
result['Mode'] = self.mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('LearnStatus') is not None:
self.learn_status = m.get('LearnStatus')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
return self
class DescribeProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleModeResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleRulesRequest(TeaModel):
def __init__(self, page_size=None, page_number=None, domain=None, defense_type=None, query=None, lang=None,
instance_id=None, resource_group_id=None):
self.page_size = page_size # type: int
self.page_number = page_number # type: int
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.query = query # type: str
self.lang = lang # type: str
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.query is not None:
result['Query'] = self.query
if self.lang is not None:
result['Lang'] = self.lang
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Query') is not None:
self.query = m.get('Query')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleRulesResponseBodyRules(TeaModel):
def __init__(self, status=None, time=None, version=None, content=None, rule_id=None):
self.status = status # type: long
self.time = time # type: long
self.version = version # type: long
self.content = content # type: dict[str, any]
self.rule_id = rule_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBodyRules, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.time is not None:
result['Time'] = self.time
if self.version is not None:
result['Version'] = self.version
if self.content is not None:
result['Content'] = self.content
if self.rule_id is not None:
result['RuleId'] = self.rule_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Time') is not None:
self.time = m.get('Time')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('Content') is not None:
self.content = m.get('Content')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
return self
class DescribeProtectionModuleRulesResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, rules=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.rules = rules # type: list[DescribeProtectionModuleRulesResponseBodyRules]
def validate(self):
if self.rules:
for k in self.rules:
if k:
k.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Rules'] = []
if self.rules is not None:
for k in self.rules:
result['Rules'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.rules = []
if m.get('Rules') is not None:
for k in m.get('Rules'):
temp_model = DescribeProtectionModuleRulesResponseBodyRules()
self.rules.append(temp_model.from_map(k))
return self
class DescribeProtectionModuleRulesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleRulesResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleRulesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None, module_status=None):
self.request_id = request_id # type: str
self.module_status = module_status # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
return self
class DescribeProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWafSourceIpSegmentRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeWafSourceIpSegmentResponseBody(TeaModel):
def __init__(self, request_id=None, ip_v6s=None, ips=None):
self.request_id = request_id # type: str
self.ip_v6s = ip_v6s # type: str
self.ips = ips # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.ip_v6s is not None:
result['IpV6s'] = self.ip_v6s
if self.ips is not None:
result['Ips'] = self.ips
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('IpV6s') is not None:
self.ip_v6s = m.get('IpV6s')
if m.get('Ips') is not None:
self.ips = m.get('Ips')
return self
class DescribeWafSourceIpSegmentResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeWafSourceIpSegmentResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeWafSourceIpSegmentResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None, load_balancing=None, http_port=None,
https_port=None, http_2port=None, https_redirect=None, http_to_user_ip=None, is_access_product=None,
log_headers=None, cluster_type=None, connection_time=None, read_time=None, write_time=None, access_type=None,
cloud_native_instances=None, ip_follow_status=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.source_ips = source_ips # type: str
self.load_balancing = load_balancing # type: int
self.http_port = http_port # type: str
self.https_port = https_port # type: str
self.http_2port = http_2port # type: str
self.https_redirect = https_redirect # type: int
self.http_to_user_ip = http_to_user_ip # type: int
self.is_access_product = is_access_product # type: int
self.log_headers = log_headers # type: str
self.cluster_type = cluster_type # type: int
self.connection_time = connection_time # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.access_type = access_type # type: str
self.cloud_native_instances = cloud_native_instances # type: str
self.ip_follow_status = ip_follow_status # type: int
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class ModifyDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainIpv6StatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.enabled = enabled # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyDomainIpv6StatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainIpv6StatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyDomainIpv6StatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainIpv6StatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogRetrievalStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.enabled = enabled # type: int
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogRetrievalStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogRetrievalStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyLogRetrievalStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogRetrievalStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.enabled = enabled # type: int
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogServiceStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyLogServiceStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, mode=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.mode = mode # type: int
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.mode is not None:
result['Mode'] = self.mode
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleModeResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionModuleModeResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, rule_id=None, lock_version=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule = rule # type: str
self.rule_id = rule_id # type: long
self.lock_version = lock_version # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, module_status=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.module_status = module_status # type: int
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionModuleStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleCacheStatusRequest(TeaModel):
def __init__(self, domain=None, rule_id=None, defense_type=None, instance_id=None):
self.domain = domain # type: str
self.rule_id = rule_id # type: long
self.defense_type = defense_type # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleCacheStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleCacheStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionRuleCacheStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleCacheStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None, rule_status=None, lock_version=None,
instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule_id = rule_id # type: long
self.rule_status = rule_status # type: int
self.lock_version = lock_version # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.rule_status is not None:
result['RuleStatus'] = self.rule_status
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('RuleStatus') is not None:
self.rule_status = m.get('RuleStatus')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionRuleStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetDomainRuleGroupRequest(TeaModel):
def __init__(self, domains=None, rule_group_id=None, waf_version=None, instance_id=None, resource_group_id=None):
self.domains = domains # type: str
self.rule_group_id = rule_group_id # type: long
self.waf_version = waf_version # type: long
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domains is not None:
result['Domains'] = self.domains
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.waf_version is not None:
result['WafVersion'] = self.waf_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domains') is not None:
self.domains = m.get('Domains')
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('WafVersion') is not None:
self.waf_version = m.get('WafVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class SetDomainRuleGroupResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: SetDomainRuleGroupResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(SetDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = SetDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
|
[
264,
295,
457,
458,
577
] |
2,546 |
9976eb2dd84448b37b81629d352f4a7490ab2316
|
import argparse
import sys
def get_precision_values(input_file):
prec_values = []
all_precs = []
means = []
medians = []
methods = []
with open(input_file) as lines:
for line in lines:
if "RESULTS_AGGREGATION" in line:
tokens = line.strip().split(',')
methods.append(tokens[1])
mean, median, prec = float(tokens[2]), float(tokens[3]), float(tokens[4])
prec_values.append(prec)
means.append(mean)
medians.append(median)
if "RESULTS_PREC" in line:
tokens = line.strip().split(',')
all_precs.append([float(t) for t in tokens[1:]])
print len(methods), len(prec_values), len(means), len(medians), len(all_precs)
return methods, prec_values, means, medians, all_precs
def naming(methods):
newnames = []
for m in methods:
if m=='jaccard':
newnames.append('JACCARD')
if m=='cosine':
newnames.append('COSINE')
if m=='stacking':
newnames.append('COMBINATION')
if m=='stacking_rrf':
newnames.append('STACK-RRF')
if m=='oneclass':
newnames.append('ONECLASS')
if m=='bayesian_tfidf':
newnames.append('BAYESIANSETS')
if m=='bayesian_bin':
newnames.append('BS-BIN')
if m=='pu_learning':
newnames.append('PULEARNING')
if m=='classifier':
newnames.append('TWOCLASS')
return newnames
def _filter(methods, precs, means, medians, all_precs):
newmethods = []
newprecs = []
newmeans = []
newmedians = []
newall_precs = []
for i in xrange(len(methods)):
if (methods[i]!='STACK-RRF') and (methods[i]!='BS-BIN'):
newmethods.append(methods[i])
newprecs.append(precs[i])
newmeans.append(means[i])
newmedians.append(medians[i])
newall_precs.append(all_precs[i])
return newmethods, newprecs, newmeans, newmedians, newall_precs
def prepare_data(infile, domain, outputdir):
methods, precs, means, medians, all_precs = get_precision_values(infile)
# Filter some results
methods = naming(methods)
methods, precs, means, medians, all_precs = _filter(methods, precs, means, medians, all_precs)
# P@K
fname = outputdir + "/prec_" + domain + ".csv"
with open(fname, 'w') as f:
f.write(','.join(methods) + '\n')
f.write(','.join([str(p) for p in precs]) + '\n')
k = (len(all_precs[0])-1)/2
diff_precs = [all_precs[i][k]-precs[i] for i in xrange(len(precs))]
f.write(','.join([str(p) for p in diff_precs]) + '\n')
# Variable K
fname = outputdir + "/k_" + domain + ".csv"
with open(fname, 'w') as f:
for i in xrange(len(methods)):
if methods[i]=='COMBINATION':
stacking_precs = all_precs[i]
interval = len(stacking_precs)/10+1
k = interval
line1 = "1"
line2 = str(stacking_precs[0])
while k<=len(stacking_precs):
if k>1:
line1 += ',' + str(k)
line2 += ',' + str(stacking_precs[k-1])
k += interval
if (k-len(stacking_precs))<interval:
line1 += ',' + str(len(stacking_precs))
line2 += ',' + str(stacking_precs[-1])
f.write(line1 + '\n')
f.write(line2 + '\n')
# mean@K
fname = outputdir + "/mean_" + domain + ".csv"
with open(fname, 'w') as f:
f.write(','.join(methods) + '\n')
f.write(','.join([str(p) for p in means]) + '\n')
# P@K
fname = outputdir + "/median_" + domain + ".csv"
with open(fname, 'w') as f:
f.write(','.join(methods) + '\n')
f.write(','.join([str(p) for p in medians]) + '\n')
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--inputfile", help="input file with data to plot", type=str)
parser.add_argument("-o", "--outputdir", help="output directory", type=str)
#parser.add_argument("-t", "--plottype", help="plot type: ['prec', 'median', 'mean']", type=str)
parser.add_argument("-d", "--domain", help="domain name", type=str)
args = parser.parse_args()
prepare_data(args.inputfile, args.domain, args.outputdir)
if __name__=='__main__':
main()
| null | null | null | null |
[
0
] |
2,547 |
895ece0b8d45cd64e43f8ddc54824f7647254185
|
<mask token>
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template('index.html', current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
<mask token>
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
<mask token>
|
<mask token>
for post in all_posts:
post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template('index.html', current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
@app.route('/blog')
def blog():
return render_template('blog.html', posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
if __name__ == '__main__':
app.run(debug=True)
|
<mask token>
app = Flask(__name__)
all_posts = all_posts = requests.get(
'https://api.npoint.io/5abcca6f4e39b4955965').json()
post_objects = []
for post in all_posts:
post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template('index.html', current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
@app.route('/blog')
def blog():
return render_template('blog.html', posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask
from flask import render_template
import datetime
from person import Person
import requests
from post import Post
app = Flask(__name__)
all_posts = all_posts = requests.get(
'https://api.npoint.io/5abcca6f4e39b4955965').json()
post_objects = []
for post in all_posts:
post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template('index.html', current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template('guess.html', name=person.name, gender=person.
gender, age=person.age, country=person.country)
@app.route('/blog')
def blog():
return render_template('blog.html', posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template('post.html', post=requested_post)
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask
from flask import render_template
import datetime
from person import Person
import requests
from post import Post
app = Flask(__name__)
all_posts = all_posts = requests.get(
"https://api.npoint.io/5abcca6f4e39b4955965").json()
post_objects = []
for post in all_posts:
post_obj = Post(post["id"], post["title"], post["subtitle"], post["body"])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template("index.html",
current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template("guess.html",
name=person.name,
gender=person.gender,
age=person.age,
country=person.country,
)
@app.route('/blog')
def blog():
return render_template("blog.html", posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template("post.html", post=requested_post)
if __name__ == "__main__":
app.run(debug=True)
|
[
3,
5,
6,
7,
8
] |
2,548 |
6097840cdf4b42efaca3e197f88703d927abe889
|
<mask token>
|
<mask token>
with open('./all-news.json') as f:
allNews = json.load(f)
<mask token>
with open('./recent-news.js', 'w') as f:
f.write("document.write('\\\n")
f.write('<ul>\\\n')
for value in allNews.values():
f.write('<li>\\\n')
date, content = value['date'], value['content']
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + ' - ' + content + '\\\n')
f.write('</li>\\\n')
recent_news_counter += 1
if recent_news_counter >= RECENT_NEWS_COUNT:
break
f.write('</ul>\\\n')
f.write("');")
with open('./all-news.js', 'w') as f:
f.write("document.write('\\\n")
f.write('<ul>\\\n')
for value in allNews.values():
f.write('<li>\\\n')
date, content = value['date'], value['content']
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + ' - ' + content + '\\\n')
f.write('</li>\\\n')
f.write('</ul>\\\n')
f.write("');")
|
RECENT_NEWS_COUNT = 5
<mask token>
with open('./all-news.json') as f:
allNews = json.load(f)
recent_news_counter = 0
with open('./recent-news.js', 'w') as f:
f.write("document.write('\\\n")
f.write('<ul>\\\n')
for value in allNews.values():
f.write('<li>\\\n')
date, content = value['date'], value['content']
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + ' - ' + content + '\\\n')
f.write('</li>\\\n')
recent_news_counter += 1
if recent_news_counter >= RECENT_NEWS_COUNT:
break
f.write('</ul>\\\n')
f.write("');")
with open('./all-news.js', 'w') as f:
f.write("document.write('\\\n")
f.write('<ul>\\\n')
for value in allNews.values():
f.write('<li>\\\n')
date, content = value['date'], value['content']
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + ' - ' + content + '\\\n')
f.write('</li>\\\n')
f.write('</ul>\\\n')
f.write("');")
|
RECENT_NEWS_COUNT = 5
import json
with open('./all-news.json') as f:
allNews = json.load(f)
recent_news_counter = 0
with open('./recent-news.js', 'w') as f:
f.write("document.write('\\\n")
f.write('<ul>\\\n')
for value in allNews.values():
f.write('<li>\\\n')
date, content = value['date'], value['content']
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + ' - ' + content + '\\\n')
f.write('</li>\\\n')
recent_news_counter += 1
if recent_news_counter >= RECENT_NEWS_COUNT:
break
f.write('</ul>\\\n')
f.write("');")
with open('./all-news.js', 'w') as f:
f.write("document.write('\\\n")
f.write('<ul>\\\n')
for value in allNews.values():
f.write('<li>\\\n')
date, content = value['date'], value['content']
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + ' - ' + content + '\\\n')
f.write('</li>\\\n')
f.write('</ul>\\\n')
f.write("');")
|
# written by Mohammad Shahrad @UBC
RECENT_NEWS_COUNT = 5
import json
with open("./all-news.json") as f:
allNews = json.load(f)
recent_news_counter = 0
with open("./recent-news.js", "w") as f:
f.write("document.write('\\\n")
f.write("<ul>\\\n")
for value in allNews.values():
f.write("<li>\\\n")
date, content = value["date"], value["content"]
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + " - " + content + "\\\n")
f.write("</li>\\\n")
recent_news_counter += 1
if recent_news_counter >= RECENT_NEWS_COUNT:
break
f.write("</ul>\\\n")
f.write("');")
with open("./all-news.js", "w") as f:
f.write("document.write('\\\n")
f.write("<ul>\\\n")
for value in allNews.values():
f.write("<li>\\\n")
date, content = value["date"], value["content"]
date = date.replace("'", "\\'")
content = content.replace("'", "\\'")
f.write(date + " - " + content + "\\\n")
f.write("</li>\\\n")
f.write("</ul>\\\n")
f.write("');")
|
[
0,
1,
2,
3,
4
] |
2,549 |
3bc009271c7dd34ad09bcef81214387b63dfac59
|
<mask token>
class Ensambler(object):
<mask token>
<mask token>
def first_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.num_ope == 1:
if self.instruction in mne.v_jump:
if self.instruction == 'JP':
self.x = self.TS[operands[0]]
print('l')
print(self.x)
if self.operands[0] in mne.v_jump:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.operands[0][1:-1].isnumeric():
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
if self.instruction in mne.v_jump:
self.operands[0] = 'nn'
self.instruction = (self.instruction + ' ' + self.
operands[0])
code, size = mne.map_mnem.get(self.instruction, 'Error'
)('0000')
self.cl += size
else:
print(self.instruction)
print(self.CO)
print(self.cl)
print(self.TS)
def Second_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.instruction in mne.v_jump:
if len(self.operands) == 2:
aux = self.operands[1]
else:
aux = self.operands[0]
if aux in self.TS.keys():
self.x = self.TS[aux]
self.instruction = self.instruction + ' ' + 'nn'
code, size = mne.map_mnem.get(self.instruction, 'Error')(
str(self.x))
self.CO.append(code)
else:
print('Error')
else:
if self.num_ope == 2:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
self.instruction = self.instruction + ' ' + self.operands[0
]
code, size = mne.map_mnem.get(self.instruction, 'Error')()
self.CO.append(code)
print(self.CO)
<mask token>
def get_label(self):
label = self.instruction.split(':')
if len(label) > 1:
if label[0] in mne.v_ops or label[0] in mne.map_mnem:
print('Error etiqueta invalida')
self.TS[label[0].strip()] = self.cl
del label[0]
self.instruction = label[0]
def get_operands(self):
line = self.instruction.split()
self.operands = [operand for operand in line]
self.instruction = self.operands[0]
del self.operands[0]
self.num_ope = len(self.operands)
<mask token>
|
<mask token>
class Ensambler(object):
def __init__(self, fileName):
self.fileName = fileName
self.fileLines = []
self.cl = 0
self.size = 0
self.code = ''
self.instruction = ''
self.num_ope = 0
self.operands = []
self.TS = {}
self.CO = []
self.x = 0
def leerArchivo(self):
file = open(self.fileName, 'r')
for line in file:
line = line.replace('\n', '')
line = line.replace('\t', '')
self.fileLines.append(line)
file.close()
def first_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.num_ope == 1:
if self.instruction in mne.v_jump:
if self.instruction == 'JP':
self.x = self.TS[operands[0]]
print('l')
print(self.x)
if self.operands[0] in mne.v_jump:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.operands[0][1:-1].isnumeric():
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
if self.instruction in mne.v_jump:
self.operands[0] = 'nn'
self.instruction = (self.instruction + ' ' + self.
operands[0])
code, size = mne.map_mnem.get(self.instruction, 'Error'
)('0000')
self.cl += size
else:
print(self.instruction)
print(self.CO)
print(self.cl)
print(self.TS)
def Second_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.instruction in mne.v_jump:
if len(self.operands) == 2:
aux = self.operands[1]
else:
aux = self.operands[0]
if aux in self.TS.keys():
self.x = self.TS[aux]
self.instruction = self.instruction + ' ' + 'nn'
code, size = mne.map_mnem.get(self.instruction, 'Error')(
str(self.x))
self.CO.append(code)
else:
print('Error')
else:
if self.num_ope == 2:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
self.instruction = self.instruction + ' ' + self.operands[0
]
code, size = mne.map_mnem.get(self.instruction, 'Error')()
self.CO.append(code)
print(self.CO)
def clean_line(self, line):
line = line.split(';')
self.instruction = line[0].upper().replace(',', '')
def get_label(self):
label = self.instruction.split(':')
if len(label) > 1:
if label[0] in mne.v_ops or label[0] in mne.map_mnem:
print('Error etiqueta invalida')
self.TS[label[0].strip()] = self.cl
del label[0]
self.instruction = label[0]
def get_operands(self):
line = self.instruction.split()
self.operands = [operand for operand in line]
self.instruction = self.operands[0]
del self.operands[0]
self.num_ope = len(self.operands)
<mask token>
|
<mask token>
class Ensambler(object):
def __init__(self, fileName):
self.fileName = fileName
self.fileLines = []
self.cl = 0
self.size = 0
self.code = ''
self.instruction = ''
self.num_ope = 0
self.operands = []
self.TS = {}
self.CO = []
self.x = 0
def leerArchivo(self):
file = open(self.fileName, 'r')
for line in file:
line = line.replace('\n', '')
line = line.replace('\t', '')
self.fileLines.append(line)
file.close()
def first_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.num_ope == 1:
if self.instruction in mne.v_jump:
if self.instruction == 'JP':
self.x = self.TS[operands[0]]
print('l')
print(self.x)
if self.operands[0] in mne.v_jump:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.operands[0][1:-1].isnumeric():
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
if self.instruction in mne.v_jump:
self.operands[0] = 'nn'
self.instruction = (self.instruction + ' ' + self.
operands[0])
code, size = mne.map_mnem.get(self.instruction, 'Error'
)('0000')
self.cl += size
else:
print(self.instruction)
print(self.CO)
print(self.cl)
print(self.TS)
def Second_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.instruction in mne.v_jump:
if len(self.operands) == 2:
aux = self.operands[1]
else:
aux = self.operands[0]
if aux in self.TS.keys():
self.x = self.TS[aux]
self.instruction = self.instruction + ' ' + 'nn'
code, size = mne.map_mnem.get(self.instruction, 'Error')(
str(self.x))
self.CO.append(code)
else:
print('Error')
else:
if self.num_ope == 2:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
self.instruction = self.instruction + ' ' + self.operands[0
]
code, size = mne.map_mnem.get(self.instruction, 'Error')()
self.CO.append(code)
print(self.CO)
def clean_line(self, line):
line = line.split(';')
self.instruction = line[0].upper().replace(',', '')
def get_label(self):
label = self.instruction.split(':')
if len(label) > 1:
if label[0] in mne.v_ops or label[0] in mne.map_mnem:
print('Error etiqueta invalida')
self.TS[label[0].strip()] = self.cl
del label[0]
self.instruction = label[0]
def get_operands(self):
line = self.instruction.split()
self.operands = [operand for operand in line]
self.instruction = self.operands[0]
del self.operands[0]
self.num_ope = len(self.operands)
<mask token>
aux.leerArchivo()
aux.first_pass()
aux.Second_pass()
|
import mnemonicos as mne
class Ensambler(object):
def __init__(self, fileName):
self.fileName = fileName
self.fileLines = []
self.cl = 0
self.size = 0
self.code = ''
self.instruction = ''
self.num_ope = 0
self.operands = []
self.TS = {}
self.CO = []
self.x = 0
def leerArchivo(self):
file = open(self.fileName, 'r')
for line in file:
line = line.replace('\n', '')
line = line.replace('\t', '')
self.fileLines.append(line)
file.close()
def first_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.num_ope == 1:
if self.instruction in mne.v_jump:
if self.instruction == 'JP':
self.x = self.TS[operands[0]]
print('l')
print(self.x)
if self.operands[0] in mne.v_jump:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.operands[0][1:-1].isnumeric():
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
if self.instruction in mne.v_jump:
self.operands[0] = 'nn'
self.instruction = (self.instruction + ' ' + self.
operands[0])
code, size = mne.map_mnem.get(self.instruction, 'Error'
)('0000')
self.cl += size
else:
print(self.instruction)
print(self.CO)
print(self.cl)
print(self.TS)
def Second_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.instruction in mne.v_jump:
if len(self.operands) == 2:
aux = self.operands[1]
else:
aux = self.operands[0]
if aux in self.TS.keys():
self.x = self.TS[aux]
self.instruction = self.instruction + ' ' + 'nn'
code, size = mne.map_mnem.get(self.instruction, 'Error')(
str(self.x))
self.CO.append(code)
else:
print('Error')
else:
if self.num_ope == 2:
self.instruction = self.instruction + ' ' + self.operands[0
] + ',' + self.operands[1]
if self.num_ope == 1:
self.instruction = self.instruction + ' ' + self.operands[0
]
code, size = mne.map_mnem.get(self.instruction, 'Error')()
self.CO.append(code)
print(self.CO)
def clean_line(self, line):
line = line.split(';')
self.instruction = line[0].upper().replace(',', '')
def get_label(self):
label = self.instruction.split(':')
if len(label) > 1:
if label[0] in mne.v_ops or label[0] in mne.map_mnem:
print('Error etiqueta invalida')
self.TS[label[0].strip()] = self.cl
del label[0]
self.instruction = label[0]
def get_operands(self):
line = self.instruction.split()
self.operands = [operand for operand in line]
self.instruction = self.operands[0]
del self.operands[0]
self.num_ope = len(self.operands)
aux = Ensambler('1.txt')
aux.leerArchivo()
aux.first_pass()
aux.Second_pass()
|
#from tkinter import Tk, Text, INSERT
import mnemonicos as mne
class Ensambler(object):
def __init__(self, fileName):
#Nombre del archivo
self.fileName = fileName
#Lineas del Archivo
self.fileLines = []
#Contador de Localidades
self.cl = 0
#Tamaño
self.size = 0
#Opcode
self.code = ""
#Intruccion
self.instruction = ""
#Contador de operadores
self.num_ope = 0
#Operandos
self.operands = []
# Tabla de simbolos
self.TS = {}
# Codigo Objeto
self.CO = []
#Aux
self.x = 0
#self.window = Tk()
#self.window.geometry('400x50')
def leerArchivo(self):
file = open(self.fileName, "r")
for line in file:
line = line.replace("\n", "")
line = line.replace("\t", "")
self.fileLines.append(line)
file.close()
#Primera Pasada
def first_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.num_ope == 1:
if self.instruction in mne.v_jump:
if self.instruction == "JP":
self.x = self.TS[operands[0]]
print("l")
print(self.x)
if self.operands[0] in mne.v_jump:
self.instruction = self.instruction + " " + self.operands[0]+","+self.operands[1]
if self.operands[0][1:-1].isnumeric():
self.instruction = self.instruction + " " + self.operands[0]+","+self.operands[1]
if self.num_ope == 1:
if self.instruction in mne.v_jump:
self.operands[0] = "nn"
self.instruction = self.instruction + " " + self.operands[0]
code, size = mne.map_mnem.get(self.instruction,"Error")("0000")
self.cl += size
else:
#Valida si no es opcode valido
print(self.instruction)
#code, size = mne.map_mnem.get(self.instruction,"Error")()
#lst = "CL: " + str(self.cl) + " Code: " + code
#self.CO.append(code)
print(self.CO)
print(self.cl)
print(self.TS)
def Second_pass(self):
for line in self.fileLines:
self.clean_line(line)
self.get_label()
self.get_operands()
if self.instruction in mne.v_jump:
if len(self.operands) == 2:
aux = self.operands[1]
else:
aux = self.operands[0]
if aux in self.TS.keys():
self.x = self.TS[aux]
self.instruction = self.instruction + " " + "nn"
code, size = mne.map_mnem.get(self.instruction,"Error")(str(self.x))
self.CO.append(code)
else:
print("Error")
else:
if self.num_ope == 2:
self.instruction = self.instruction + " " + self.operands[0]+","+self.operands[1]
if self.num_ope == 1:
self.instruction = self.instruction + " " + self.operands[0]
code, size = mne.map_mnem.get(self.instruction,"Error")()
self.CO.append(code)
print(self.CO)
#Quitar Comentarios
def clean_line(self,line):
line = line.split(";")
self.instruction = line[0].upper().replace(",","")
# Obtener y guardar etiqueta si existe
def get_label(self):
label = self.instruction.split(":")
if len(label) > 1:
if label[0] in mne.v_ops or label[0] in mne.map_mnem:
print("Error etiqueta invalida")
#Quitar espacio al inicio
self.TS[label[0].strip()] = self.cl
del label[0]
self.instruction = label[0]
#Obtener los operandos y la instruccion
def get_operands(self):
line = self.instruction.split()
self.operands = [operand for operand in line]
self.instruction = self.operands[0]
del self.operands[0]
self.num_ope = len(self.operands)
aux = Ensambler("1.txt")
aux.leerArchivo()
aux.first_pass()
aux.Second_pass()
|
[
5,
8,
9,
11,
12
] |
2,550 |
380a28958fc6d1b403b29ede229860bf5f709572
|
<mask token>
class LoginPageTests(RegistrationBaseTestCase):
def test_can_open_whatsapp_login_page(self):
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
def test_checkbox_remember_me_is_checked_by_default(self):
self.assertTrue(self.login_page.is_remember_me_selected())
<mask token>
def test_can_uncheck_and_check_again_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
self.login_page.remember_me = True
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_manually_login_successfully(self):
base_page = BasePage(self.driver)
base_page.load()
time.sleep(8)
self.assertTrue(base_page.is_title_matches())
self.assertTrue(base_page.is_welcome_page_available())
self.assertTrue(base_page.is_nav_bar_page_available())
self.assertTrue(base_page.is_search_page_available())
self.assertTrue(base_page.is_pane_page_available())
self.assertFalse(base_page.is_chat_page_available())
class LogoutTest(RegistrationBaseTestCase):
def test_can_logout_successfully_after_login(self):
header_page = HeaderPage(self.driver)
time.sleep(8)
self.assertTrue(header_page.is_welcome_page_available())
self.assertTrue(header_page.is_nav_bar_page_available())
header_page.logout()
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
<mask token>
|
<mask token>
class RegistrationBaseTestCase(TestCase):
<mask token>
def tearDown(self):
self.driver.close()
class LoginPageTests(RegistrationBaseTestCase):
def test_can_open_whatsapp_login_page(self):
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
def test_checkbox_remember_me_is_checked_by_default(self):
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_uncheck_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
def test_can_uncheck_and_check_again_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
self.login_page.remember_me = True
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_manually_login_successfully(self):
base_page = BasePage(self.driver)
base_page.load()
time.sleep(8)
self.assertTrue(base_page.is_title_matches())
self.assertTrue(base_page.is_welcome_page_available())
self.assertTrue(base_page.is_nav_bar_page_available())
self.assertTrue(base_page.is_search_page_available())
self.assertTrue(base_page.is_pane_page_available())
self.assertFalse(base_page.is_chat_page_available())
class LogoutTest(RegistrationBaseTestCase):
def test_can_logout_successfully_after_login(self):
header_page = HeaderPage(self.driver)
time.sleep(8)
self.assertTrue(header_page.is_welcome_page_available())
self.assertTrue(header_page.is_nav_bar_page_available())
header_page.logout()
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
<mask token>
|
<mask token>
class RegistrationBaseTestCase(TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.maximize_window()
self.login_page = LoginPage(self.driver)
self.login_page.load()
def tearDown(self):
self.driver.close()
class LoginPageTests(RegistrationBaseTestCase):
def test_can_open_whatsapp_login_page(self):
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
def test_checkbox_remember_me_is_checked_by_default(self):
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_uncheck_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
def test_can_uncheck_and_check_again_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
self.login_page.remember_me = True
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_manually_login_successfully(self):
base_page = BasePage(self.driver)
base_page.load()
time.sleep(8)
self.assertTrue(base_page.is_title_matches())
self.assertTrue(base_page.is_welcome_page_available())
self.assertTrue(base_page.is_nav_bar_page_available())
self.assertTrue(base_page.is_search_page_available())
self.assertTrue(base_page.is_pane_page_available())
self.assertFalse(base_page.is_chat_page_available())
class LogoutTest(RegistrationBaseTestCase):
def test_can_logout_successfully_after_login(self):
header_page = HeaderPage(self.driver)
time.sleep(8)
self.assertTrue(header_page.is_welcome_page_available())
self.assertTrue(header_page.is_nav_bar_page_available())
header_page.logout()
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
<mask token>
|
import time
import unittest
from unittest import TestCase
from selenium import webdriver
from simon.accounts.pages import LoginPage
from simon.header.pages import HeaderPage
from simon.pages import BasePage
class RegistrationBaseTestCase(TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.maximize_window()
self.login_page = LoginPage(self.driver)
self.login_page.load()
def tearDown(self):
self.driver.close()
class LoginPageTests(RegistrationBaseTestCase):
def test_can_open_whatsapp_login_page(self):
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
def test_checkbox_remember_me_is_checked_by_default(self):
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_uncheck_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
def test_can_uncheck_and_check_again_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
self.login_page.remember_me = True
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_manually_login_successfully(self):
base_page = BasePage(self.driver)
base_page.load()
time.sleep(8)
self.assertTrue(base_page.is_title_matches())
self.assertTrue(base_page.is_welcome_page_available())
self.assertTrue(base_page.is_nav_bar_page_available())
self.assertTrue(base_page.is_search_page_available())
self.assertTrue(base_page.is_pane_page_available())
self.assertFalse(base_page.is_chat_page_available())
class LogoutTest(RegistrationBaseTestCase):
def test_can_logout_successfully_after_login(self):
header_page = HeaderPage(self.driver)
time.sleep(8)
self.assertTrue(header_page.is_welcome_page_available())
self.assertTrue(header_page.is_nav_bar_page_available())
header_page.logout()
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
if __name__ == '__main__':
unittest.main()
|
import time
import unittest
from unittest import TestCase
from selenium import webdriver
from simon.accounts.pages import LoginPage
from simon.header.pages import HeaderPage
from simon.pages import BasePage
class RegistrationBaseTestCase(TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.maximize_window()
self.login_page = LoginPage(self.driver)
self.login_page.load()
def tearDown(self):
self.driver.close()
class LoginPageTests(RegistrationBaseTestCase):
def test_can_open_whatsapp_login_page(self):
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
def test_checkbox_remember_me_is_checked_by_default(self):
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_uncheck_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
def test_can_uncheck_and_check_again_checkbox_remember_me(self):
self.login_page.remember_me = False
self.assertFalse(self.login_page.is_remember_me_selected())
self.login_page.remember_me = True
self.assertTrue(self.login_page.is_remember_me_selected())
def test_can_manually_login_successfully(self):
base_page = BasePage(self.driver)
base_page.load()
# time for you to read QR code and access whatsapp
time.sleep(8)
self.assertTrue(base_page.is_title_matches())
self.assertTrue(base_page.is_welcome_page_available())
self.assertTrue(base_page.is_nav_bar_page_available())
self.assertTrue(base_page.is_search_page_available())
self.assertTrue(base_page.is_pane_page_available())
# chat is only available after you click on a person to open the chat
self.assertFalse(base_page.is_chat_page_available())
class LogoutTest(RegistrationBaseTestCase):
def test_can_logout_successfully_after_login(self):
header_page = HeaderPage(self.driver)
# time for you to read QR code and access whatsapp
time.sleep(8)
self.assertTrue(header_page.is_welcome_page_available())
self.assertTrue(header_page.is_nav_bar_page_available())
header_page.logout()
self.assertTrue(self.login_page.is_title_matches())
self.assertTrue(self.login_page.is_instruction_title_matches())
if __name__ == "__main__":
unittest.main()
|
[
7,
10,
11,
13,
14
] |
2,551 |
486cfc4bb4b46d78715b11cba44656e8ba077c9b
|
<mask token>
class TestTransliteratePackage(unittest.TestCase):
<mask token>
def test_romanize_royin_basic(self):
for word in _BASIC_TESTS:
expect = _BASIC_TESTS[word]
self.assertEqual(romanize(word, engine='royin'), expect)
def test_romanize_royin_consistency(self):
for word, part1, part2 in _CONSISTENCY_TESTS:
self.assertEqual(romanize(word, engine='royin'), romanize(part1,
engine='royin') + romanize(part2, engine='royin'))
def test_romanize_thai2rom(self):
self.assertEqual(romanize('แมว', engine='thai2rom'), 'maeo')
self.assertEqual(romanize('บ้านไร่', engine='thai2rom'), 'banrai')
self.assertEqual(romanize('สุนัข', engine='thai2rom'), 'sunak')
self.assertEqual(romanize('นก', engine='thai2rom'), 'nok')
self.assertEqual(romanize('ความอิ่ม', engine='thai2rom'), 'khwam-im')
self.assertEqual(romanize('กานต์ ณรงค์', engine='thai2rom'),
'kan narong')
self.assertEqual(romanize('สกุนต์', engine='thai2rom'), 'sakun')
self.assertEqual(romanize('ชารินทร์', engine='thai2rom'), 'charin')
def test_thai2rom_prepare_sequence(self):
transliterater = ThaiTransliterator()
UNK_TOKEN = 1
END_TOKEN = 3
self.assertListEqual(transliterater._prepare_sequence_in('A').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertListEqual(transliterater._prepare_sequence_in('♥').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertNotEqual(transliterater._prepare_sequence_in('ก').cpu().
detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
<mask token>
def test_pronunciate(self):
self.assertEqual(pronunciate(''), '')
remove('thai_w2p')
self.assertIsNotNone(pronunciate('คน', engine='w2p'))
self.assertIsNotNone(pronunciate('แมว', engine='w2p'))
self.assertIsNotNone(pronunciate('มข.', engine='w2p'))
self.assertIsNotNone(pronunciate('มช.', engine='w2p'))
self.assertIsNotNone(pronunciate('jks', engine='w2p'))
<mask token>
|
<mask token>
class TestTransliteratePackage(unittest.TestCase):
<mask token>
def test_romanize_royin_basic(self):
for word in _BASIC_TESTS:
expect = _BASIC_TESTS[word]
self.assertEqual(romanize(word, engine='royin'), expect)
def test_romanize_royin_consistency(self):
for word, part1, part2 in _CONSISTENCY_TESTS:
self.assertEqual(romanize(word, engine='royin'), romanize(part1,
engine='royin') + romanize(part2, engine='royin'))
def test_romanize_thai2rom(self):
self.assertEqual(romanize('แมว', engine='thai2rom'), 'maeo')
self.assertEqual(romanize('บ้านไร่', engine='thai2rom'), 'banrai')
self.assertEqual(romanize('สุนัข', engine='thai2rom'), 'sunak')
self.assertEqual(romanize('นก', engine='thai2rom'), 'nok')
self.assertEqual(romanize('ความอิ่ม', engine='thai2rom'), 'khwam-im')
self.assertEqual(romanize('กานต์ ณรงค์', engine='thai2rom'),
'kan narong')
self.assertEqual(romanize('สกุนต์', engine='thai2rom'), 'sakun')
self.assertEqual(romanize('ชารินทร์', engine='thai2rom'), 'charin')
def test_thai2rom_prepare_sequence(self):
transliterater = ThaiTransliterator()
UNK_TOKEN = 1
END_TOKEN = 3
self.assertListEqual(transliterater._prepare_sequence_in('A').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertListEqual(transliterater._prepare_sequence_in('♥').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertNotEqual(transliterater._prepare_sequence_in('ก').cpu().
detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
<mask token>
def test_pronunciate(self):
self.assertEqual(pronunciate(''), '')
remove('thai_w2p')
self.assertIsNotNone(pronunciate('คน', engine='w2p'))
self.assertIsNotNone(pronunciate('แมว', engine='w2p'))
self.assertIsNotNone(pronunciate('มข.', engine='w2p'))
self.assertIsNotNone(pronunciate('มช.', engine='w2p'))
self.assertIsNotNone(pronunciate('jks', engine='w2p'))
def test_puan(self):
self.assertEqual(puan('นาริน'), 'นิน-รา')
self.assertEqual(puan('นาริน', False), 'นินรา')
self.assertEqual(puan('แสงดีนะ'), 'แสง-ดะ-นี')
self.assertEqual(puan('แสงดีนะ', False), 'แสงดะนี')
with self.assertRaises(ValueError):
self.assertEqual(puan('สวัสดีครับ'), 'สวัสดีครับ')
|
<mask token>
class TestTransliteratePackage(unittest.TestCase):
def test_romanize(self):
self.assertEqual(romanize(None), '')
self.assertEqual(romanize(''), '')
self.assertEqual(romanize('แมว'), 'maeo')
self.assertEqual(romanize('แมว', engine='tltk'), 'maeo')
def test_romanize_royin_basic(self):
for word in _BASIC_TESTS:
expect = _BASIC_TESTS[word]
self.assertEqual(romanize(word, engine='royin'), expect)
def test_romanize_royin_consistency(self):
for word, part1, part2 in _CONSISTENCY_TESTS:
self.assertEqual(romanize(word, engine='royin'), romanize(part1,
engine='royin') + romanize(part2, engine='royin'))
def test_romanize_thai2rom(self):
self.assertEqual(romanize('แมว', engine='thai2rom'), 'maeo')
self.assertEqual(romanize('บ้านไร่', engine='thai2rom'), 'banrai')
self.assertEqual(romanize('สุนัข', engine='thai2rom'), 'sunak')
self.assertEqual(romanize('นก', engine='thai2rom'), 'nok')
self.assertEqual(romanize('ความอิ่ม', engine='thai2rom'), 'khwam-im')
self.assertEqual(romanize('กานต์ ณรงค์', engine='thai2rom'),
'kan narong')
self.assertEqual(romanize('สกุนต์', engine='thai2rom'), 'sakun')
self.assertEqual(romanize('ชารินทร์', engine='thai2rom'), 'charin')
def test_thai2rom_prepare_sequence(self):
transliterater = ThaiTransliterator()
UNK_TOKEN = 1
END_TOKEN = 3
self.assertListEqual(transliterater._prepare_sequence_in('A').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertListEqual(transliterater._prepare_sequence_in('♥').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertNotEqual(transliterater._prepare_sequence_in('ก').cpu().
detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
def test_transliterate(self):
self.assertEqual(transliterate(''), '')
self.assertEqual(transliterate('แมว', 'pyicu'), 'mæw')
self.assertEqual(transliterate('คน', engine='ipa'), 'kʰon')
self.assertIsNotNone(transliterate('คน', engine='thaig2p'))
self.assertIsNotNone(transliterate('แมว', engine='thaig2p'))
self.assertIsNotNone(transliterate('คน', engine='tltk_g2p'))
self.assertIsNotNone(transliterate('แมว', engine='tltk_g2p'))
self.assertIsNotNone(transliterate('คน', engine='tltk_ipa'))
self.assertIsNotNone(transliterate('แมว', engine='tltk_ipa'))
self.assertIsNotNone(trans_list('คน'))
self.assertIsNotNone(xsampa_list('คน'))
def test_pronunciate(self):
self.assertEqual(pronunciate(''), '')
remove('thai_w2p')
self.assertIsNotNone(pronunciate('คน', engine='w2p'))
self.assertIsNotNone(pronunciate('แมว', engine='w2p'))
self.assertIsNotNone(pronunciate('มข.', engine='w2p'))
self.assertIsNotNone(pronunciate('มช.', engine='w2p'))
self.assertIsNotNone(pronunciate('jks', engine='w2p'))
def test_puan(self):
self.assertEqual(puan('นาริน'), 'นิน-รา')
self.assertEqual(puan('นาริน', False), 'นินรา')
self.assertEqual(puan('แสงดีนะ'), 'แสง-ดะ-นี')
self.assertEqual(puan('แสงดีนะ', False), 'แสงดะนี')
with self.assertRaises(ValueError):
self.assertEqual(puan('สวัสดีครับ'), 'สวัสดีครับ')
|
import unittest
import torch
from pythainlp.transliterate import romanize, transliterate, pronunciate, puan
from pythainlp.transliterate.ipa import trans_list, xsampa_list
from pythainlp.transliterate.thai2rom import ThaiTransliterator
from pythainlp.corpus import remove
_BASIC_TESTS = {None: '', '': '', 'abc': 'abc', 'หมอก': 'mok', 'หาย': 'hai',
'แมว': 'maeo', 'เดือน': 'duean', 'ดำ': 'dam', 'ดู': 'du', 'บัว': 'bua',
'กก': 'kok', 'พร': 'phon', 'กร': 'kon', 'กรร': 'kan', 'กรรม': 'kam',
'ฝ้าย': 'fai', 'นพพร': 'nopphon', 'อัก': 'ak'}
_CONSISTENCY_TESTS = [('ตากใบ', 'ตาก', 'ใบ')]
class TestTransliteratePackage(unittest.TestCase):
def test_romanize(self):
self.assertEqual(romanize(None), '')
self.assertEqual(romanize(''), '')
self.assertEqual(romanize('แมว'), 'maeo')
self.assertEqual(romanize('แมว', engine='tltk'), 'maeo')
def test_romanize_royin_basic(self):
for word in _BASIC_TESTS:
expect = _BASIC_TESTS[word]
self.assertEqual(romanize(word, engine='royin'), expect)
def test_romanize_royin_consistency(self):
for word, part1, part2 in _CONSISTENCY_TESTS:
self.assertEqual(romanize(word, engine='royin'), romanize(part1,
engine='royin') + romanize(part2, engine='royin'))
def test_romanize_thai2rom(self):
self.assertEqual(romanize('แมว', engine='thai2rom'), 'maeo')
self.assertEqual(romanize('บ้านไร่', engine='thai2rom'), 'banrai')
self.assertEqual(romanize('สุนัข', engine='thai2rom'), 'sunak')
self.assertEqual(romanize('นก', engine='thai2rom'), 'nok')
self.assertEqual(romanize('ความอิ่ม', engine='thai2rom'), 'khwam-im')
self.assertEqual(romanize('กานต์ ณรงค์', engine='thai2rom'),
'kan narong')
self.assertEqual(romanize('สกุนต์', engine='thai2rom'), 'sakun')
self.assertEqual(romanize('ชารินทร์', engine='thai2rom'), 'charin')
def test_thai2rom_prepare_sequence(self):
transliterater = ThaiTransliterator()
UNK_TOKEN = 1
END_TOKEN = 3
self.assertListEqual(transliterater._prepare_sequence_in('A').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertListEqual(transliterater._prepare_sequence_in('♥').cpu()
.detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
self.assertNotEqual(transliterater._prepare_sequence_in('ก').cpu().
detach().numpy().tolist(), torch.tensor([UNK_TOKEN, END_TOKEN],
dtype=torch.long).cpu().detach().numpy().tolist())
def test_transliterate(self):
self.assertEqual(transliterate(''), '')
self.assertEqual(transliterate('แมว', 'pyicu'), 'mæw')
self.assertEqual(transliterate('คน', engine='ipa'), 'kʰon')
self.assertIsNotNone(transliterate('คน', engine='thaig2p'))
self.assertIsNotNone(transliterate('แมว', engine='thaig2p'))
self.assertIsNotNone(transliterate('คน', engine='tltk_g2p'))
self.assertIsNotNone(transliterate('แมว', engine='tltk_g2p'))
self.assertIsNotNone(transliterate('คน', engine='tltk_ipa'))
self.assertIsNotNone(transliterate('แมว', engine='tltk_ipa'))
self.assertIsNotNone(trans_list('คน'))
self.assertIsNotNone(xsampa_list('คน'))
def test_pronunciate(self):
self.assertEqual(pronunciate(''), '')
remove('thai_w2p')
self.assertIsNotNone(pronunciate('คน', engine='w2p'))
self.assertIsNotNone(pronunciate('แมว', engine='w2p'))
self.assertIsNotNone(pronunciate('มข.', engine='w2p'))
self.assertIsNotNone(pronunciate('มช.', engine='w2p'))
self.assertIsNotNone(pronunciate('jks', engine='w2p'))
def test_puan(self):
self.assertEqual(puan('นาริน'), 'นิน-รา')
self.assertEqual(puan('นาริน', False), 'นินรา')
self.assertEqual(puan('แสงดีนะ'), 'แสง-ดะ-นี')
self.assertEqual(puan('แสงดีนะ', False), 'แสงดะนี')
with self.assertRaises(ValueError):
self.assertEqual(puan('สวัสดีครับ'), 'สวัสดีครับ')
|
# -*- coding: utf-8 -*-
import unittest
import torch
from pythainlp.transliterate import romanize, transliterate, pronunciate, puan
from pythainlp.transliterate.ipa import trans_list, xsampa_list
from pythainlp.transliterate.thai2rom import ThaiTransliterator
from pythainlp.corpus import remove
_BASIC_TESTS = {
None: "",
"": "",
"abc": "abc",
"หมอก": "mok",
"หาย": "hai",
"แมว": "maeo",
"เดือน": "duean",
"ดำ": "dam",
"ดู": "du",
"บัว": "bua",
"กก": "kok",
"พร": "phon",
"กร": "kon",
"กรร": "kan",
"กรรม": "kam",
# "กรม": "krom", # failed
"ฝ้าย": "fai",
"นพพร": "nopphon",
"อัก": "ak",
# "ทีปกร": "thipakon", # failed
# "ธรรพ์": "than", # failed
# "ธรรม": "tham", # failed
# "มหา": "maha", # failed
# "หยาก": "yak", # failed
# "อยาก": "yak", # failed
# "ยมก": "yamok", # failed
# "กลัว": "klua", # failed
# "บ้านไร่": "banrai", # failed
# "ชารินทร์": "charin", # failed
}
# these are set of two-syllable words,
# to test if the transliteration/romanization is consistent, say
# romanize(1+2) = romanize(1) + romanize(2)
_CONSISTENCY_TESTS = [
# ("กระจก", "กระ", "จก"), # failed
# ("ระเบิด", "ระ", "เบิด"), # failed
# ("หยากไย่", "หยาก", "ไย่"), # failed
("ตากใบ", "ตาก", "ใบ"),
# ("จัดสรร", "จัด", "สรร"), # failed
]
class TestTransliteratePackage(unittest.TestCase):
def test_romanize(self):
self.assertEqual(romanize(None), "")
self.assertEqual(romanize(""), "")
self.assertEqual(romanize("แมว"), "maeo")
self.assertEqual(romanize("แมว", engine="tltk"), "maeo")
def test_romanize_royin_basic(self):
for word in _BASIC_TESTS:
expect = _BASIC_TESTS[word]
self.assertEqual(romanize(word, engine="royin"), expect)
def test_romanize_royin_consistency(self):
for word, part1, part2 in _CONSISTENCY_TESTS:
self.assertEqual(
romanize(word, engine="royin"),
(
romanize(part1, engine="royin")
+ romanize(part2, engine="royin")
),
)
def test_romanize_thai2rom(self):
self.assertEqual(romanize("แมว", engine="thai2rom"), "maeo")
self.assertEqual(romanize("บ้านไร่", engine="thai2rom"), "banrai")
self.assertEqual(romanize("สุนัข", engine="thai2rom"), "sunak")
self.assertEqual(romanize("นก", engine="thai2rom"), "nok")
self.assertEqual(romanize("ความอิ่ม", engine="thai2rom"), "khwam-im")
self.assertEqual(
romanize("กานต์ ณรงค์", engine="thai2rom"), "kan narong"
)
self.assertEqual(romanize("สกุนต์", engine="thai2rom"), "sakun")
self.assertEqual(romanize("ชารินทร์", engine="thai2rom"), "charin")
def test_thai2rom_prepare_sequence(self):
transliterater = ThaiTransliterator()
UNK_TOKEN = 1 # UNK_TOKEN or <UNK> is represented by 1
END_TOKEN = 3 # END_TOKEN or <end> is represented by 3
self.assertListEqual(
transliterater._prepare_sequence_in("A")
.cpu()
.detach()
.numpy()
.tolist(),
torch.tensor([UNK_TOKEN, END_TOKEN], dtype=torch.long)
.cpu()
.detach()
.numpy()
.tolist(),
)
self.assertListEqual(
transliterater._prepare_sequence_in("♥")
.cpu()
.detach()
.numpy()
.tolist(),
torch.tensor([UNK_TOKEN, END_TOKEN], dtype=torch.long)
.cpu()
.detach()
.numpy()
.tolist(),
)
self.assertNotEqual(
transliterater._prepare_sequence_in("ก")
.cpu()
.detach()
.numpy()
.tolist(),
torch.tensor([UNK_TOKEN, END_TOKEN], dtype=torch.long)
.cpu()
.detach()
.numpy()
.tolist(),
)
def test_transliterate(self):
self.assertEqual(transliterate(""), "")
self.assertEqual(transliterate("แมว", "pyicu"), "mæw")
self.assertEqual(transliterate("คน", engine="ipa"), "kʰon")
self.assertIsNotNone(transliterate("คน", engine="thaig2p"))
self.assertIsNotNone(transliterate("แมว", engine="thaig2p"))
self.assertIsNotNone(transliterate("คน", engine="tltk_g2p"))
self.assertIsNotNone(transliterate("แมว", engine="tltk_g2p"))
self.assertIsNotNone(transliterate("คน", engine="tltk_ipa"))
self.assertIsNotNone(transliterate("แมว", engine="tltk_ipa"))
self.assertIsNotNone(trans_list("คน"))
self.assertIsNotNone(xsampa_list("คน"))
def test_pronunciate(self):
self.assertEqual(pronunciate(""), "")
remove("thai_w2p")
self.assertIsNotNone(pronunciate("คน", engine="w2p"))
self.assertIsNotNone(pronunciate("แมว", engine="w2p"))
self.assertIsNotNone(pronunciate("มข.", engine="w2p"))
self.assertIsNotNone(pronunciate("มช.", engine="w2p"))
self.assertIsNotNone(pronunciate("jks", engine="w2p"))
def test_puan(self):
self.assertEqual(puan("นาริน"), "นิน-รา")
self.assertEqual(puan("นาริน", False), "นินรา")
self.assertEqual(puan("แสงดีนะ"), "แสง-ดะ-นี")
self.assertEqual(puan("แสงดีนะ", False), "แสงดะนี")
with self.assertRaises(ValueError):
self.assertEqual(puan("สวัสดีครับ"), "สวัสดีครับ")
|
[
6,
7,
9,
11,
12
] |
2,552 |
38c78a51a50ee9844aec8b8cdcdd42b858748518
|
<mask token>
class AttendanceDetailView(DetailView):
model = Attendance
template_name = 'attendance_detail.html'
<mask token>
class AttendanceCreateView(CreateView):
model = Attendance
template_name = 'attendance_new.html'
fields = ['group', 'disciple']
def get_context_data(self, *args, **kwargs):
groups_choices = ['ИУ1', 'ИУ2', 'ИУ3', 'ИУ4', 'ИУ5', 'ИУ6', 'ИУ7',
'ИУ8']
context = super(AttendanceCreateView, self).get_context_data(*args,
**kwargs)
context['students'] = CustomUser.objects.filter(student_group='ИУ6')
context['disciples'] = Disciple.objects.all()
context['groups'] = groups_choices
return context
def form_valid(self, form):
obj = form.save(commit=False)
obj.author = self.request.user
obj.date = timezone.now()
fname = f'Журнал-{obj.disciple.name}-{timezone.now()}.csv'
form_data = self.request.POST
print(form_data)
obj.save()
print(form, dir(form))
return super().form_valid(form)
|
<mask token>
class AttendanceListView(ListView):
<mask token>
<mask token>
<mask token>
class AttendanceDetailView(DetailView):
model = Attendance
template_name = 'attendance_detail.html'
<mask token>
class AttendanceCreateView(CreateView):
model = Attendance
template_name = 'attendance_new.html'
fields = ['group', 'disciple']
def get_context_data(self, *args, **kwargs):
groups_choices = ['ИУ1', 'ИУ2', 'ИУ3', 'ИУ4', 'ИУ5', 'ИУ6', 'ИУ7',
'ИУ8']
context = super(AttendanceCreateView, self).get_context_data(*args,
**kwargs)
context['students'] = CustomUser.objects.filter(student_group='ИУ6')
context['disciples'] = Disciple.objects.all()
context['groups'] = groups_choices
return context
def form_valid(self, form):
obj = form.save(commit=False)
obj.author = self.request.user
obj.date = timezone.now()
fname = f'Журнал-{obj.disciple.name}-{timezone.now()}.csv'
form_data = self.request.POST
print(form_data)
obj.save()
print(form, dir(form))
return super().form_valid(form)
|
<mask token>
class AttendanceListView(ListView):
<mask token>
<mask token>
def get_queryset(self):
return self.model.objects.order_by('-date')
class AttendanceDetailView(DetailView):
model = Attendance
template_name = 'attendance_detail.html'
<mask token>
class AttendanceCreateView(CreateView):
model = Attendance
template_name = 'attendance_new.html'
fields = ['group', 'disciple']
def get_context_data(self, *args, **kwargs):
groups_choices = ['ИУ1', 'ИУ2', 'ИУ3', 'ИУ4', 'ИУ5', 'ИУ6', 'ИУ7',
'ИУ8']
context = super(AttendanceCreateView, self).get_context_data(*args,
**kwargs)
context['students'] = CustomUser.objects.filter(student_group='ИУ6')
context['disciples'] = Disciple.objects.all()
context['groups'] = groups_choices
return context
def form_valid(self, form):
obj = form.save(commit=False)
obj.author = self.request.user
obj.date = timezone.now()
fname = f'Журнал-{obj.disciple.name}-{timezone.now()}.csv'
form_data = self.request.POST
print(form_data)
obj.save()
print(form, dir(form))
return super().form_valid(form)
|
<mask token>
class AttendanceListView(ListView):
model = Attendance
template_name = 'attendance_list.html'
def get_queryset(self):
return self.model.objects.order_by('-date')
class AttendanceDetailView(DetailView):
model = Attendance
template_name = 'attendance_detail.html'
<mask token>
class AttendanceCreateView(CreateView):
model = Attendance
template_name = 'attendance_new.html'
fields = ['group', 'disciple']
def get_context_data(self, *args, **kwargs):
groups_choices = ['ИУ1', 'ИУ2', 'ИУ3', 'ИУ4', 'ИУ5', 'ИУ6', 'ИУ7',
'ИУ8']
context = super(AttendanceCreateView, self).get_context_data(*args,
**kwargs)
context['students'] = CustomUser.objects.filter(student_group='ИУ6')
context['disciples'] = Disciple.objects.all()
context['groups'] = groups_choices
return context
def form_valid(self, form):
obj = form.save(commit=False)
obj.author = self.request.user
obj.date = timezone.now()
fname = f'Журнал-{obj.disciple.name}-{timezone.now()}.csv'
form_data = self.request.POST
print(form_data)
obj.save()
print(form, dir(form))
return super().form_valid(form)
|
from django.shortcuts import render
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView
from django.urls import reverse_lazy
from django.utils import timezone
from time import time
import json
from .models import Attendance, Disciple
from users.models import CustomUser
class AttendanceListView(ListView):
model = Attendance
template_name = 'attendance_list.html'
def get_queryset(self):
return self.model.objects.order_by('-date')
class AttendanceDetailView(DetailView):
model = Attendance
template_name = 'attendance_detail.html'
"""
class AttendanceCreateView(CreateView):
model = Attendance
template_name = 'attendance_new.html'
fields = ['title', 'document']
def form_valid(self, form):
obj = form.save(commit=False)
obj.author = self.request.user
obj.date = timezone.now()
obj.save()
return super().form_valid(form)
"""
class AttendanceCreateView(CreateView):
model = Attendance
template_name = 'attendance_new.html'
fields = ['group', 'disciple']
def get_context_data(self, *args, **kwargs):
groups_choices = [
'ИУ1',
'ИУ2',
'ИУ3',
'ИУ4',
'ИУ5',
'ИУ6',
'ИУ7',
'ИУ8',
]
context = super(AttendanceCreateView, self).get_context_data(*args, **kwargs)
context['students'] = CustomUser.objects.filter(student_group='ИУ6')
context['disciples'] = Disciple.objects.all()
context['groups'] = groups_choices
return context
def form_valid(self, form):
obj = form.save(commit=False)
obj.author = self.request.user
obj.date = timezone.now()
#obj.disciple =
fname = f'Журнал-{obj.disciple.name}-{timezone.now()}.csv'
#print(fname)
form_data = self.request.POST
print(form_data)
#Process form_data; mk csv_file based on it; save it to obj.document
#obj.document = doc
obj.save()
print(form, dir(form))
return super().form_valid(form)
|
[
6,
7,
8,
9,
11
] |
2,553 |
7644dcd956e1ad179f42e44870864386744c6cdf
|
<mask token>
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
|
<mask token>
class SignUpForm(forms.Form):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
class Meta:
model = AuthUser
fields = ('username', 'email', 'password1', 'password2',
'first_name', 'last_name')
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
|
<mask token>
class SignUpForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
email = forms.EmailField(widget=forms.EmailInput(attrs={'class':
'form-control'}))
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
first_name = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
last_name = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
class Meta:
model = AuthUser
fields = ('username', 'email', 'password1', 'password2',
'first_name', 'last_name')
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
|
from django import forms
from django.contrib.auth.forms import UserCreationForm
from .models import AuthUser
class SignUpForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
email = forms.EmailField(widget=forms.EmailInput(attrs={'class':
'form-control'}))
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
first_name = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
last_name = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
class Meta:
model = AuthUser
fields = ('username', 'email', 'password1', 'password2',
'first_name', 'last_name')
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':
'form-control'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':
'form-control'}))
|
from django import forms
from django.contrib.auth.forms import UserCreationForm
from .models import AuthUser
class SignUpForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}))
email = forms.EmailField(widget=forms.EmailInput(attrs={'class':'form-control'}))
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control'}))
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control'}))
first_name = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}))
last_name = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}))
class Meta:
model = AuthUser
fields = ('username','email', 'password1', 'password2', 'first_name', 'last_name' )
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control'}))
|
[
2,
3,
4,
5,
6
] |
2,554 |
4d58926e812789768fdf5be59bd54f9b66850e57
|
<mask token>
|
<mask token>
from . import utilities
from . import stats
from . import signal
from . import plot
from . import docopt
|
"""
The epitome package is a set of command-line tools for analyzing MRI data, and a
set of scriptuit modules for stitching them (and others) together.
"""
from . import utilities
from . import stats
from . import signal
from . import plot
from . import docopt
| null | null |
[
0,
1,
2
] |
2,555 |
f59db28b669a41051cc6d0d4b8e14d1c7b0edd11
|
<mask token>
|
<mask token>
app_user_one.get_user_info()
app_user_one.change_status('in job market')
app_user_one.get_user_info()
<mask token>
app_user_two.get_user_info()
<mask token>
new_post.get_post_info()
|
<mask token>
app_user_one = user.User('[email protected]', 'Riks R', 'ppp1', 'student')
app_user_one.get_user_info()
app_user_one.change_status('in job market')
app_user_one.get_user_info()
app_user_two = user.User('[email protected]', 'Bobby L', 'zz1', 'student')
app_user_two.get_user_info()
new_post = Post('Going for it', app_user_two.name)
new_post.get_post_info()
|
import user
from post import Post
app_user_one = user.User('[email protected]', 'Riks R', 'ppp1', 'student')
app_user_one.get_user_info()
app_user_one.change_status('in job market')
app_user_one.get_user_info()
app_user_two = user.User('[email protected]', 'Bobby L', 'zz1', 'student')
app_user_two.get_user_info()
new_post = Post('Going for it', app_user_two.name)
new_post.get_post_info()
|
import user
# or from user import User
from post import Post
app_user_one = user.User("[email protected]", "Riks R", "ppp1", "student")
app_user_one.get_user_info()
app_user_one.change_status("in job market")
app_user_one.get_user_info()
app_user_two = user.User("[email protected]", "Bobby L", "zz1", "student")
app_user_two.get_user_info()
new_post = Post("Going for it", app_user_two.name)
new_post.get_post_info()
|
[
0,
1,
2,
3,
4
] |
2,556 |
a65ab0faf08c13f007a132fb92f358a35834fdb7
|
<mask token>
|
<mask token>
for x in xs:
dist += min(x, K - x)
print(dist * 2)
|
N = int(input())
K = int(input())
xs = list(map(int, input().split()))
dist = 0
for x in xs:
dist += min(x, K - x)
print(dist * 2)
|
N = int(input())
K = int(input())
xs = list(map(int, input().split()))
dist = 0
for x in xs:
dist += min(x, K-x)
print(dist*2)
| null |
[
0,
1,
2,
3
] |
2,557 |
9f8065dfdfe07985244e18d92b59e1c045388a72
|
<mask token>
def distance(x1, y1, x2, y2):
return ((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)) ** 0.5
<mask token>
|
<mask token>
def distance(x1, y1, x2, y2):
return ((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)) ** 0.5
<mask token>
turtle.penup()
turtle.goto(x1, y1)
turtle.pendown()
turtle.write('Point 1')
turtle.goto(x2, y2)
turtle.write('Point 2')
turtle.penup()
turtle.goto((x1 + x2) / 2, (y1 + y2) / 2)
turtle.write('Distance')
turtle.done()
|
<mask token>
def distance(x1, y1, x2, y2):
return ((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)) ** 0.5
x1, y1 = eval(input('Enter x1 and y1 for point 1: '))
x2, y2 = eval(input('Enter x2 and y2 for point 2: '))
distanceBetweenPoints = distance(x1, y1, x2, y2)
turtle.penup()
turtle.goto(x1, y1)
turtle.pendown()
turtle.write('Point 1')
turtle.goto(x2, y2)
turtle.write('Point 2')
turtle.penup()
turtle.goto((x1 + x2) / 2, (y1 + y2) / 2)
turtle.write('Distance')
turtle.done()
|
import turtle
def distance(x1, y1, x2, y2):
return ((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)) ** 0.5
x1, y1 = eval(input('Enter x1 and y1 for point 1: '))
x2, y2 = eval(input('Enter x2 and y2 for point 2: '))
distanceBetweenPoints = distance(x1, y1, x2, y2)
turtle.penup()
turtle.goto(x1, y1)
turtle.pendown()
turtle.write('Point 1')
turtle.goto(x2, y2)
turtle.write('Point 2')
turtle.penup()
turtle.goto((x1 + x2) / 2, (y1 + y2) / 2)
turtle.write('Distance')
turtle.done()
|
import turtle
def distance(x1, y1, x2, y2):
return ((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)) ** 0.5
x1, y1 = eval(input("Enter x1 and y1 for point 1: "))
x2, y2 = eval(input("Enter x2 and y2 for point 2: "))
distanceBetweenPoints = distance(x1, y1, x2, y2)
turtle.penup()
turtle.goto(x1, y1)
turtle.pendown()
turtle.write("Point 1")
turtle.goto(x2, y2)
turtle.write("Point 2")
#Center of line
turtle.penup()
turtle.goto((x1 + x2) / 2, (y1 + y2) / 2)
turtle.write("Distance")
turtle.done()
|
[
1,
2,
3,
4,
5
] |
2,558 |
e73c4a99c421b3eca08c941ff1f83cb03faee97d
|
<mask token>
class Product(Model):
__tablename__ = 'products'
id = Column(Integer, primary_key=True)
name = Column(String(80), nullable=False)
description = Column(String(120))
img = Column(String(80), nullable=False)
category = Column(String(50), nullable=False)
price = Column(Float, nullable=False)
def __repr__(self):
return '<Product {} /n Description {}>'.format(self.name, self.
description)
class ProductUser(Model):
__tablename__ = 'productusers'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
product_id = Column(Integer, ForeignKey('products.id'), nullable=False)
user = relationship('User', backref='productuser')
product = relationship('Product', backref='productuser')
def __repr__(self):
return '<ProductUser userID: {} productID: {} >'.format(self.
user_id, self.product_id)
|
<mask token>
class User(Model):
<mask token>
<mask token>
<mask token>
<mask token>
<mask token>
def __repr__(self):
return '<User {}>'.format(self.username)
class Product(Model):
__tablename__ = 'products'
id = Column(Integer, primary_key=True)
name = Column(String(80), nullable=False)
description = Column(String(120))
img = Column(String(80), nullable=False)
category = Column(String(50), nullable=False)
price = Column(Float, nullable=False)
def __repr__(self):
return '<Product {} /n Description {}>'.format(self.name, self.
description)
class ProductUser(Model):
__tablename__ = 'productusers'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
product_id = Column(Integer, ForeignKey('products.id'), nullable=False)
user = relationship('User', backref='productuser')
product = relationship('Product', backref='productuser')
def __repr__(self):
return '<ProductUser userID: {} productID: {} >'.format(self.
user_id, self.product_id)
|
<mask token>
class User(Model):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
type = Column(String(80), unique=False, nullable=False)
email = Column(String(120), unique=True, nullable=False)
password = Column(String(80), unique=False, nullable=True)
def __repr__(self):
return '<User {}>'.format(self.username)
class Product(Model):
__tablename__ = 'products'
id = Column(Integer, primary_key=True)
name = Column(String(80), nullable=False)
description = Column(String(120))
img = Column(String(80), nullable=False)
category = Column(String(50), nullable=False)
price = Column(Float, nullable=False)
def __repr__(self):
return '<Product {} /n Description {}>'.format(self.name, self.
description)
class ProductUser(Model):
__tablename__ = 'productusers'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
product_id = Column(Integer, ForeignKey('products.id'), nullable=False)
user = relationship('User', backref='productuser')
product = relationship('Product', backref='productuser')
def __repr__(self):
return '<ProductUser userID: {} productID: {} >'.format(self.
user_id, self.product_id)
|
<mask token>
Model = declarative_base()
class User(Model):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
type = Column(String(80), unique=False, nullable=False)
email = Column(String(120), unique=True, nullable=False)
password = Column(String(80), unique=False, nullable=True)
def __repr__(self):
return '<User {}>'.format(self.username)
class Product(Model):
__tablename__ = 'products'
id = Column(Integer, primary_key=True)
name = Column(String(80), nullable=False)
description = Column(String(120))
img = Column(String(80), nullable=False)
category = Column(String(50), nullable=False)
price = Column(Float, nullable=False)
def __repr__(self):
return '<Product {} /n Description {}>'.format(self.name, self.
description)
class ProductUser(Model):
__tablename__ = 'productusers'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
product_id = Column(Integer, ForeignKey('products.id'), nullable=False)
user = relationship('User', backref='productuser')
product = relationship('Product', backref='productuser')
def __repr__(self):
return '<ProductUser userID: {} productID: {} >'.format(self.
user_id, self.product_id)
|
from sqlalchemy import Column, ForeignKey
from sqlalchemy.types import Integer, Text, String, DateTime, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
Model = declarative_base()
class User(Model):
__tablename__ = "users"
id = Column(Integer,
primary_key=True)
type = Column(String(80),
unique=False, nullable=False)
email = Column(String(120),
unique=True,
nullable=False)
password = Column(String(80),
unique=False, nullable=True)
# joined = Column(DateTime,
# unique=False,
# nullable=False)
# join_date=datetime.now()
def __repr__(self):
return '<User {}>'.format(self.username)
class Product(Model):
__tablename__ = "products"
id = Column(Integer,
primary_key=True)
name = Column(String(80),
nullable=False)
description = Column(String(120))
img = Column(String(80),
nullable=False)
category = Column(String(50),
nullable=False)
price = Column(Float,
nullable=False)
def __repr__(self):
return '<Product {} /n Description {}>'.format(self.name, self.description)
class ProductUser(Model):
__tablename__ = "productusers"
id = Column(Integer,
primary_key=True)
user_id = Column(Integer,
ForeignKey('users.id'),
nullable=False)
product_id = Column(Integer,
ForeignKey('products.id'),
nullable=False)
user = relationship("User", backref="productuser")
product = relationship("Product", backref="productuser")
def __repr__(self):
return '<ProductUser userID: {} productID: {} >'.format(self.user_id, self.product_id)
|
[
6,
8,
9,
10,
12
] |
2,559 |
a6eab1e5e7985de917d707c904fcd90f223c108c
|
#!/home/liud/anaconda3/envs/python/bin/python
# -*- coding: utf-8 -*-
'''
线性回归
公式:W = 1/(xTx) * xT * y
'''
#导入的包
import numpy as np
from numpy import linalg
from numpy import corrcoef
from sklearn import linear_model
import matplotlib.pyplot as plt
#加载数据
def loadDataSet(filename):
xList = []
yList = []
with open(filename) as fn:
for i in fn:
x = i.rstrip().split("\t")
#x = map(eval, x) 此函数eval容易造成恶意输入
x = map(eval, x)
xList.append(x[: -1])
yList.append(float(x[-1]))
return xList, yList
'''
def loadDataSet(filename):
numFeat = len(open(filename).readline().split("\t")) - 1
dataMat = []
labelMat= []
fr = open(filename)
for line in fr.readlines():
lineArr = []
curLine = line.strip().split("\t")
for i in range(numFeat):
lineArr.append(float(curLine[i]))
dataMat.append(lineArr)
labelMat.append(float(curLine[-1]))
return dataMat, labelMat
'''
#计算最佳拟合直线,得到模型参数
def standRegress(xList, yList):
xArr = np.array(xList)
yArr = np.transpose([yList]) #将yList转化成列向量
xTx = np.dot(xArr.T, xArr)
if linalg.det(xTx) == 0: #判断是否为非奇异矩阵
print "这个矩阵是奇异矩阵,行列式为0"
return
ws = np.dot(np.linalg.inv(xTx), np.dot(xArr.T, yArr))
return ws
#sklearn的写法
def sklearn_standRegress(xList, yList):
clf = linear_model.LinearRegression(fit_intercept = False) #加载线性回归模型,且让w0 = 0(w0指的是intercept)
clf.fit(xList, yList) #拟合
#print clf.intercept_
return np.transpose([clf.coef_]) #返回系数的列向量形式
#展示结果
def show(xList, yList, w):
xArr = np.array(xList)
yArr = np.transpose([yList])
fig = plt.figure() #创建一幅图
ax = fig.add_subplot(1, 1, 1)
ax.scatter(xArr[:, 1:2].flatten(), yArr.flatten())
xCopy = xArr.copy()
xCopy.sort(0)
yPredict = np.dot(xCopy, w)
#print yPredict.flatten()
ax.plot(xCopy[:, 1], yPredict.flatten())
plt.show()
#主函数
def main():
xList, yList = loadDataSet("/home/liud/PycharmProjects/Machine_Learning/Regression/data/ex0.txt")
_, n = np.shape(xList)
ws = np.zeros((n, 1))
while(1):
print '请输入你选择的方式(1.sklearn;2.regression自己实现的线性回归)'
selectStyle = raw_input()
if selectStyle == '1':
# sklearn的实现
ws = sklearn_standRegress(xList, yList)
break
elif selectStyle == '2':
# 自己按理解实现
ws = standRegress(xList, yList)
break
else:
print '错误输入,请重新输入'
print "最小二乘法得出的回归系数: \n", ws
show(xList, yList, ws)
yPredict = np.dot(xList, ws)
print "相关性:", corrcoef(yPredict.T.tolist(), yList) #corrcoef中的两个参数尽可能的类型相似,yList是list,因此yPredict是numpy.ndarray且为二维的列向量。
if __name__ == '__main__':
main()
print 'Success'
| null | null | null | null |
[
0
] |
2,560 |
0ebf5646ee9693b7d0c1de61436e05b3725b2c9f
|
<mask token>
|
<mask token>
host = 'datavis.cauuh8vzeelb.us-east-1.rds.amazonaws.com'
database = 'top5'
user = 'teamwonder'
password = 'visproject'
Gentrifying = [10002, 10003, 10009, 10026, 10027, 10029, 10030, 10031,
10032, 10033, 10034, 10035, 10037, 10039, 10040, 10454, 10455, 10456,
10457, 10458, 10459, 10460, 10474, 11102, 11103, 11105, 11106, 11206,
11211, 11212, 11213, 11216, 11220, 11221, 11222, 11225, 11232, 11233,
11237, 11249, 11370]
Non_Gentrifying = [10451, 10452, 10453, 10463, 10468, 10472, 10473, 11204,
11208, 11214, 11223, 11224, 11239]
Higher_Income = [83, 7020, 7030, 7114, 10000, 10001, 10004, 10005, 10006,
10007, 10010, 10011, 10012, 10013, 10014, 10016, 10017, 10018, 10019,
10020, 10021, 10022, 10023, 10024, 10025, 10028, 10036, 10038, 10041,
10044, 10045, 10048, 10055, 10065, 10069, 10075, 10103, 10104, 10105,
10107, 10111, 10112, 10118, 10119, 10120, 10121, 10122, 10123, 10128,
10129, 10153, 10154, 10155, 10158, 10162, 10165, 10166, 10167, 10168,
10169, 10170, 10171, 10172, 10173, 10177, 10178, 10179, 10270, 10271,
10278, 10279, 10280, 10281, 10282, 10301, 10302, 10303, 10304, 10305,
10306, 10307, 10308, 10309, 10310, 10312, 10314, 10461, 10462, 10464,
10465, 10466, 10467, 10469, 10470, 10471, 10475, 10507, 10704, 10803,
11001, 11004, 11005, 11040, 11101, 11104, 11109, 11201, 11203, 11205,
11207, 11209, 11210, 11215, 11217, 11218, 11219, 11226, 11228, 11229,
11230, 11231, 11234, 11235, 11236, 11238, 11241, 11242, 11251, 11354,
11355, 11356, 11357, 11358, 11359, 11360, 11361, 11362, 11363, 11364,
11365, 11366, 11367, 11368, 11369, 11371, 11372, 11373, 11374, 11375,
11377, 11378, 11379, 11385, 11411, 11412, 11413, 11414, 11415, 11416,
11417, 11418, 11419, 11420, 11421, 11422, 11423, 11426, 11427, 11428,
11429, 11430, 11432, 11433, 11434, 11435, 11436, 11530, 11691, 11692,
11693, 11694, 11695, 11697]
con = psycopg2.connect(host=host, database=database, user=user, password=
password)
cur = con.cursor()
|
import psycopg2
host = 'datavis.cauuh8vzeelb.us-east-1.rds.amazonaws.com'
database = 'top5'
user = 'teamwonder'
password = 'visproject'
Gentrifying = [10002, 10003, 10009, 10026, 10027, 10029, 10030, 10031,
10032, 10033, 10034, 10035, 10037, 10039, 10040, 10454, 10455, 10456,
10457, 10458, 10459, 10460, 10474, 11102, 11103, 11105, 11106, 11206,
11211, 11212, 11213, 11216, 11220, 11221, 11222, 11225, 11232, 11233,
11237, 11249, 11370]
Non_Gentrifying = [10451, 10452, 10453, 10463, 10468, 10472, 10473, 11204,
11208, 11214, 11223, 11224, 11239]
Higher_Income = [83, 7020, 7030, 7114, 10000, 10001, 10004, 10005, 10006,
10007, 10010, 10011, 10012, 10013, 10014, 10016, 10017, 10018, 10019,
10020, 10021, 10022, 10023, 10024, 10025, 10028, 10036, 10038, 10041,
10044, 10045, 10048, 10055, 10065, 10069, 10075, 10103, 10104, 10105,
10107, 10111, 10112, 10118, 10119, 10120, 10121, 10122, 10123, 10128,
10129, 10153, 10154, 10155, 10158, 10162, 10165, 10166, 10167, 10168,
10169, 10170, 10171, 10172, 10173, 10177, 10178, 10179, 10270, 10271,
10278, 10279, 10280, 10281, 10282, 10301, 10302, 10303, 10304, 10305,
10306, 10307, 10308, 10309, 10310, 10312, 10314, 10461, 10462, 10464,
10465, 10466, 10467, 10469, 10470, 10471, 10475, 10507, 10704, 10803,
11001, 11004, 11005, 11040, 11101, 11104, 11109, 11201, 11203, 11205,
11207, 11209, 11210, 11215, 11217, 11218, 11219, 11226, 11228, 11229,
11230, 11231, 11234, 11235, 11236, 11238, 11241, 11242, 11251, 11354,
11355, 11356, 11357, 11358, 11359, 11360, 11361, 11362, 11363, 11364,
11365, 11366, 11367, 11368, 11369, 11371, 11372, 11373, 11374, 11375,
11377, 11378, 11379, 11385, 11411, 11412, 11413, 11414, 11415, 11416,
11417, 11418, 11419, 11420, 11421, 11422, 11423, 11426, 11427, 11428,
11429, 11430, 11432, 11433, 11434, 11435, 11436, 11530, 11691, 11692,
11693, 11694, 11695, 11697]
con = psycopg2.connect(host=host, database=database, user=user, password=
password)
cur = con.cursor()
|
import psycopg2
host = "datavis.cauuh8vzeelb.us-east-1.rds.amazonaws.com"
database = "top5"
user = "teamwonder"
password = "visproject"
Gentrifying = [10002,10003,10009,10026,10027,10029,10030,10031,10032,10033,10034,10035,10037,10039,10040,10454,10455,10456,10457,10458,10459,10460,10474,11102,11103,11105,11106,11206,11211,11212,11213,11216,11220,11221,11222,11225,11232,11233,11237,11249,11370]
Non_Gentrifying = [10451,10452,10453,10463,10468,10472,10473,11204,11208,11214,11223,11224,11239]
Higher_Income = [83,7020,7030,7114,10000,10001,10004,10005,10006,10007,10010,10011,10012,10013,10014,10016,10017,10018,10019,10020,10021,10022,10023,10024,10025,10028,10036,10038,10041,10044,10045,10048,10055,10065,10069,10075,10103,10104,10105,10107,10111,10112,10118,10119,10120,10121,10122,10123,10128,10129,10153,10154,10155,10158,10162,10165,10166,10167,10168,10169,10170,10171,10172,10173,10177,10178,10179,10270,10271,10278,10279,10280,10281,10282,10301,10302,10303,10304,10305,10306,10307,10308,10309,10310,10312,10314,10461,10462,10464,10465,10466,10467,10469,10470,10471,10475,10507,10704,10803,11001,11004,11005,11040,11101,11104,11109,11201,11203,11205,11207,11209,11210,11215,11217,11218,11219,11226,11228,11229,11230,11231,11234,11235,11236,11238,11241,11242,11251,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365,11366,11367,11368,11369,11371,11372,11373,11374,11375,11377,11378,11379,11385,11411,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11426,11427,11428,11429,11430,11432,11433,11434,11435,11436,11530,11691,11692,11693,11694,11695,11697]
con = psycopg2.connect(host=host, database=database, user=user, password=password)
cur = con.cursor()
| null |
[
0,
1,
2,
3
] |
2,561 |
e8ba1ae98b247eaf90d83339e5fdc27287a70c73
|
<mask token>
|
<mask token>
class Container(containers.DeclarativeContainer):
<mask token>
<mask token>
<mask token>
|
<mask token>
class Container(containers.DeclarativeContainer):
config = providers.Configuration()
cache_repository = providers.Singleton(MemcachedRepository, host=config
.host, port=config.port)
fibonacci_service = providers.Factory(FibonacciService,
cache_repository=cache_repository)
|
from dependency_injector import containers, providers
from src.repositories import MemcachedRepository
from src.services import FibonacciService
class Container(containers.DeclarativeContainer):
config = providers.Configuration()
cache_repository = providers.Singleton(MemcachedRepository, host=config
.host, port=config.port)
fibonacci_service = providers.Factory(FibonacciService,
cache_repository=cache_repository)
|
from dependency_injector import containers, providers
from src.repositories import MemcachedRepository
from src.services import FibonacciService
class Container(containers.DeclarativeContainer):
config = providers.Configuration()
cache_repository = providers.Singleton(MemcachedRepository,
host=config.host,
port=config.port)
fibonacci_service = providers.Factory(
FibonacciService,
cache_repository=cache_repository,
)
|
[
0,
1,
2,
3,
4
] |
2,562 |
99c60befed32a9aa80b6e66b682d9f475e05a8d1
|
<mask token>
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print('onDelete')
<mask token>
|
<mask token>
bt.active(True)
<mask token>
def byteToMac(addr):
m = memoryview(addr)
a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],
m[2], m[3], m[4], m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print('onDelete')
bt.irq(handler)
<mask token>
ap.active(True)
ap.config(essid='Test', password='1234', authmode=0)
<mask token>
while True:
webserver.webserver(s, onAdd, onDelete)
print('scanning soon')
if time.time() - lastscan > 10:
print('scanning now...')
bt.gap_scan(10000)
lastscan = time.time()
|
<mask token>
bt = BLE()
bt.active(True)
_IRQ_SCAN_RESULT = const(5)
_IRQ_SCAN_DONE = const(6)
def byteToMac(addr):
m = memoryview(addr)
a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],
m[2], m[3], m[4], m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print('onDelete')
bt.irq(handler)
ap = network.WLAN(network.AP_IF)
ap.active(True)
ap.config(essid='Test', password='1234', authmode=0)
s = webserver.webserverstart()
lastscan = 0
while True:
webserver.webserver(s, onAdd, onDelete)
print('scanning soon')
if time.time() - lastscan > 10:
print('scanning now...')
bt.gap_scan(10000)
lastscan = time.time()
|
import gc
import network
import lib.gate as gate
import time
from micropython import const
from ubluetooth import BLE
import lib.webserver as webserver
bt = BLE()
bt.active(True)
_IRQ_SCAN_RESULT = const(5)
_IRQ_SCAN_DONE = const(6)
def byteToMac(addr):
m = memoryview(addr)
a = '{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}'.format(m[0], m[1],
m[2], m[3], m[4], m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type, memoryview(addr), adv_type, rssi, memoryview(adv_data)
)
for i in addr:
print('{0:x}'.format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'@\xe8\xe7\x85=\xed')):
print('device found')
elif event == _IRQ_SCAN_DONE:
print('scan complete')
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print('onDelete')
bt.irq(handler)
ap = network.WLAN(network.AP_IF)
ap.active(True)
ap.config(essid='Test', password='1234', authmode=0)
s = webserver.webserverstart()
lastscan = 0
while True:
webserver.webserver(s, onAdd, onDelete)
print('scanning soon')
if time.time() - lastscan > 10:
print('scanning now...')
bt.gap_scan(10000)
lastscan = time.time()
|
import gc
import network
import lib.gate as gate
import time
from micropython import const
from ubluetooth import BLE
import lib.webserver as webserver
bt = BLE()
bt.active(True)
_IRQ_SCAN_RESULT = const(5)
_IRQ_SCAN_DONE = const(6)
def byteToMac(addr):
m = memoryview(addr)
a = "{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}:{:0>2X}".format(m[0],m[1],m[2],m[3],m[4],m[5])
return a
def handler(event, data):
if event == _IRQ_SCAN_RESULT:
# A single scan result.
addr_type, addr, adv_type, rssi, adv_data = data
print(addr_type,memoryview(addr) , adv_type, rssi,memoryview( adv_data))
for i in addr:
print("{0:x}".format(i))
print(byteToMac(addr))
if addr == memoryview(bytearray(b'\x40\xe8\xe7\x85\x3d\xed')):
print("device found")
elif event == _IRQ_SCAN_DONE:
# Scan duration finished or manually stopped.
print("scan complete")
pass
def onAdd(addBT):
memoryview(addBT)
def onDelete(delBT):
print("onDelete")
bt.irq(handler)
ap = network.WLAN(network.AP_IF)
ap.active(True)
ap.config(essid="Test", password="1234",authmode= 0)
s = webserver.webserverstart()
lastscan = 0
while True:
webserver.webserver(s, onAdd, onDelete)
print("scanning soon")
if time.time() - lastscan > 10:
print("scanning now...")
bt.gap_scan(10000)
lastscan = time.time()
|
[
3,
5,
6,
7,
8
] |
2,563 |
d932ab84848c9a8ca8bb23a57424b8f6190b6260
|
<mask token>
class LogPlugin(Plugin):
<mask token>
<mask token>
def search(self, message, query, *additional_queries):
chat_history = read_lines_from_file('chatlog.log')
chat_history.reverse()
found_line = None
for line in chat_history:
if query in line:
found_line = line
for additional_query in additional_queries:
if additional_query not in line:
found_line = None
break
if found_line:
break
if found_line:
self.send_message(message.From, line)
return
def replay(self, message, startTime, endTime=None):
start_time = None
end_time = None
try:
start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')
if endTime:
end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')
except Exception as e:
self.send_message(message.From,
'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '
+ str(e))
return
chat_history = read_lines_from_file('chatlog.log')
for line in chat_history:
line_tokens = line.split(' : ')
line_time = None
try:
line_time = datetime.strptime(line_tokens[0],
'%Y-%m-%d %H:%M:%S.%f')
except:
continue
delta = line_time - start_time
delta_seconds = (delta.microseconds + (delta.seconds + delta.
days * 24 * 3600) * 10 ** 6) / 10 ** 6
if line_time > start_time and (end_time and line_time < end_time
) or not end_time and abs(delta_seconds) < 10:
self.send_message(message.From, line)
time.sleep(1)
self.send_message(message.From, 'Done replay.')
|
<mask token>
class LogPlugin(Plugin):
def initialize(self):
self.add_trigger(on_message)
self.add_command('!chatsearch', self.search)
self.add_command('!chatreplay', self.replay)
<mask token>
def search(self, message, query, *additional_queries):
chat_history = read_lines_from_file('chatlog.log')
chat_history.reverse()
found_line = None
for line in chat_history:
if query in line:
found_line = line
for additional_query in additional_queries:
if additional_query not in line:
found_line = None
break
if found_line:
break
if found_line:
self.send_message(message.From, line)
return
def replay(self, message, startTime, endTime=None):
start_time = None
end_time = None
try:
start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')
if endTime:
end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')
except Exception as e:
self.send_message(message.From,
'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '
+ str(e))
return
chat_history = read_lines_from_file('chatlog.log')
for line in chat_history:
line_tokens = line.split(' : ')
line_time = None
try:
line_time = datetime.strptime(line_tokens[0],
'%Y-%m-%d %H:%M:%S.%f')
except:
continue
delta = line_time - start_time
delta_seconds = (delta.microseconds + (delta.seconds + delta.
days * 24 * 3600) * 10 ** 6) / 10 ** 6
if line_time > start_time and (end_time and line_time < end_time
) or not end_time and abs(delta_seconds) < 10:
self.send_message(message.From, line)
time.sleep(1)
self.send_message(message.From, 'Done replay.')
|
<mask token>
class LogPlugin(Plugin):
def initialize(self):
self.add_trigger(on_message)
self.add_command('!chatsearch', self.search)
self.add_command('!chatreplay', self.replay)
def run(self, message):
append_to_file(str(datetime.now()) + ' : ' + message.From + ' : ' +
message.Body + '\n', 'chatlog.log')
def search(self, message, query, *additional_queries):
chat_history = read_lines_from_file('chatlog.log')
chat_history.reverse()
found_line = None
for line in chat_history:
if query in line:
found_line = line
for additional_query in additional_queries:
if additional_query not in line:
found_line = None
break
if found_line:
break
if found_line:
self.send_message(message.From, line)
return
def replay(self, message, startTime, endTime=None):
start_time = None
end_time = None
try:
start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')
if endTime:
end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')
except Exception as e:
self.send_message(message.From,
'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '
+ str(e))
return
chat_history = read_lines_from_file('chatlog.log')
for line in chat_history:
line_tokens = line.split(' : ')
line_time = None
try:
line_time = datetime.strptime(line_tokens[0],
'%Y-%m-%d %H:%M:%S.%f')
except:
continue
delta = line_time - start_time
delta_seconds = (delta.microseconds + (delta.seconds + delta.
days * 24 * 3600) * 10 ** 6) / 10 ** 6
if line_time > start_time and (end_time and line_time < end_time
) or not end_time and abs(delta_seconds) < 10:
self.send_message(message.From, line)
time.sleep(1)
self.send_message(message.From, 'Done replay.')
|
from base_plugin import *
from plugin_utils import *
from datetime import datetime
import time
class LogPlugin(Plugin):
def initialize(self):
self.add_trigger(on_message)
self.add_command('!chatsearch', self.search)
self.add_command('!chatreplay', self.replay)
def run(self, message):
append_to_file(str(datetime.now()) + ' : ' + message.From + ' : ' +
message.Body + '\n', 'chatlog.log')
def search(self, message, query, *additional_queries):
chat_history = read_lines_from_file('chatlog.log')
chat_history.reverse()
found_line = None
for line in chat_history:
if query in line:
found_line = line
for additional_query in additional_queries:
if additional_query not in line:
found_line = None
break
if found_line:
break
if found_line:
self.send_message(message.From, line)
return
def replay(self, message, startTime, endTime=None):
start_time = None
end_time = None
try:
start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')
if endTime:
end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')
except Exception as e:
self.send_message(message.From,
'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '
+ str(e))
return
chat_history = read_lines_from_file('chatlog.log')
for line in chat_history:
line_tokens = line.split(' : ')
line_time = None
try:
line_time = datetime.strptime(line_tokens[0],
'%Y-%m-%d %H:%M:%S.%f')
except:
continue
delta = line_time - start_time
delta_seconds = (delta.microseconds + (delta.seconds + delta.
days * 24 * 3600) * 10 ** 6) / 10 ** 6
if line_time > start_time and (end_time and line_time < end_time
) or not end_time and abs(delta_seconds) < 10:
self.send_message(message.From, line)
time.sleep(1)
self.send_message(message.From, 'Done replay.')
|
from base_plugin import *
from plugin_utils import *
from datetime import datetime
import time
class LogPlugin(Plugin):
def initialize(self):
self.add_trigger(on_message)
self.add_command("!chatsearch", self.search)
self.add_command("!chatreplay", self.replay)
def run(self, message):
append_to_file(str(datetime.now()) + " : " + message.From + " : " + message.Body + '\n', "chatlog.log")
def search(self, message, query, *additional_queries):
chat_history = read_lines_from_file("chatlog.log")
chat_history.reverse()
found_line = None
for line in chat_history:
if query in line:
found_line = line
for additional_query in additional_queries:
if additional_query not in line:
found_line = None
break
if found_line:
break
if found_line:
self.send_message(message.From, line)
return
def replay(self, message, startTime, endTime = None):
start_time = None
end_time = None
try:
start_time = datetime.strptime(startTime, "%Y-%m-%d,%H:%M")
if endTime:
end_time = datetime.strptime(endTime, "%Y-%m-%d,%H:%M")
except Exception as e:
self.send_message(message.From, "Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; " + str(e))
return
chat_history = read_lines_from_file("chatlog.log")
for line in chat_history:
line_tokens = line.split(" : ")
line_time = None
try:
line_time = datetime.strptime(line_tokens[0], "%Y-%m-%d %H:%M:%S.%f")
except:
continue
#2.6 compatibility.
delta = (line_time - start_time)
delta_seconds = (delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6) / 10**6
if ((line_time > start_time ) \
and ( end_time and line_time < end_time )) \
or (not end_time and abs(delta_seconds) < 10):
self.send_message(message.From, line)
time.sleep(1)
self.send_message(message.From, "Done replay.")
|
[
3,
4,
5,
6,
7
] |
2,564 |
40b3cacf55f6c5056c3541d70d8b2c0e2cc7d01b
|
<mask token>
class NURBS:
<mask token>
<mask token>
def addKnot(self, knot: float) ->None:
self._knots.append(knot)
def pointCount(self) ->int:
return len(self._points)
<mask token>
def _N(self, i: int, n: int, u: float) ->float:
if n == 0:
if self._knots[i] <= u <= self._knots[i + 1]:
return 1
return 0
else:
Nin1u = self._N(i, n - 1, u)
Ni1n1u = self._N(i + 1, n - 1, u)
if Nin1u == 0.0:
a = 0.0
else:
a = self._F(i, n, u) * Nin1u
if Ni1n1u == 0.0:
b = 0.0
else:
b = self._G(i, n, u) * Ni1n1u
return a + b
<mask token>
def _G(self, i: int, n: int, u: float) ->float:
denom = self._knots[i + n + 1] - self._knots[i]
if denom == 0:
return 0.0
return (self._knots[i + n + 1] - u) / denom
|
<mask token>
class NURBS:
<mask token>
<mask token>
def addKnot(self, knot: float) ->None:
self._knots.append(knot)
def pointCount(self) ->int:
return len(self._points)
def calculate(self, segments: int) ->List[complex]:
while len(self._weights) < len(self._points):
self._weights.append(1.0)
ret = []
for n in range(0, segments):
u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (
segments - 1)
nku = []
for m in range(0, len(self._points)):
nku.append(self._weights[m] * self._N(m, self._degree, u))
point = complex(0, 0)
denom = sum(nku)
for m in range(0, len(self._points)):
if nku[m] != 0.0 and denom != 0.0:
r_iku = nku[m] / denom
if r_iku != 0.0:
point += self._points[m] * r_iku
ret.append(point)
return ret
def _N(self, i: int, n: int, u: float) ->float:
if n == 0:
if self._knots[i] <= u <= self._knots[i + 1]:
return 1
return 0
else:
Nin1u = self._N(i, n - 1, u)
Ni1n1u = self._N(i + 1, n - 1, u)
if Nin1u == 0.0:
a = 0.0
else:
a = self._F(i, n, u) * Nin1u
if Ni1n1u == 0.0:
b = 0.0
else:
b = self._G(i, n, u) * Ni1n1u
return a + b
def _F(self, i: int, n: int, u: float) ->float:
denom = self._knots[i + n] - self._knots[i]
if denom == 0.0:
return 0.0
return (u - self._knots[i]) / denom
def _G(self, i: int, n: int, u: float) ->float:
denom = self._knots[i + n + 1] - self._knots[i]
if denom == 0:
return 0.0
return (self._knots[i + n + 1] - u) / denom
|
<mask token>
class NURBS:
def __init__(self, degree: int) ->None:
self._degree = degree
self._points = []
self._weights = []
self._knots = []
<mask token>
def addKnot(self, knot: float) ->None:
self._knots.append(knot)
def pointCount(self) ->int:
return len(self._points)
def calculate(self, segments: int) ->List[complex]:
while len(self._weights) < len(self._points):
self._weights.append(1.0)
ret = []
for n in range(0, segments):
u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (
segments - 1)
nku = []
for m in range(0, len(self._points)):
nku.append(self._weights[m] * self._N(m, self._degree, u))
point = complex(0, 0)
denom = sum(nku)
for m in range(0, len(self._points)):
if nku[m] != 0.0 and denom != 0.0:
r_iku = nku[m] / denom
if r_iku != 0.0:
point += self._points[m] * r_iku
ret.append(point)
return ret
def _N(self, i: int, n: int, u: float) ->float:
if n == 0:
if self._knots[i] <= u <= self._knots[i + 1]:
return 1
return 0
else:
Nin1u = self._N(i, n - 1, u)
Ni1n1u = self._N(i + 1, n - 1, u)
if Nin1u == 0.0:
a = 0.0
else:
a = self._F(i, n, u) * Nin1u
if Ni1n1u == 0.0:
b = 0.0
else:
b = self._G(i, n, u) * Ni1n1u
return a + b
def _F(self, i: int, n: int, u: float) ->float:
denom = self._knots[i + n] - self._knots[i]
if denom == 0.0:
return 0.0
return (u - self._knots[i]) / denom
def _G(self, i: int, n: int, u: float) ->float:
denom = self._knots[i + n + 1] - self._knots[i]
if denom == 0:
return 0.0
return (self._knots[i + n + 1] - u) / denom
|
<mask token>
class NURBS:
def __init__(self, degree: int) ->None:
self._degree = degree
self._points = []
self._weights = []
self._knots = []
def addPoint(self, p: complex) ->None:
self._points.append(p)
def addKnot(self, knot: float) ->None:
self._knots.append(knot)
def pointCount(self) ->int:
return len(self._points)
def calculate(self, segments: int) ->List[complex]:
while len(self._weights) < len(self._points):
self._weights.append(1.0)
ret = []
for n in range(0, segments):
u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (
segments - 1)
nku = []
for m in range(0, len(self._points)):
nku.append(self._weights[m] * self._N(m, self._degree, u))
point = complex(0, 0)
denom = sum(nku)
for m in range(0, len(self._points)):
if nku[m] != 0.0 and denom != 0.0:
r_iku = nku[m] / denom
if r_iku != 0.0:
point += self._points[m] * r_iku
ret.append(point)
return ret
def _N(self, i: int, n: int, u: float) ->float:
if n == 0:
if self._knots[i] <= u <= self._knots[i + 1]:
return 1
return 0
else:
Nin1u = self._N(i, n - 1, u)
Ni1n1u = self._N(i + 1, n - 1, u)
if Nin1u == 0.0:
a = 0.0
else:
a = self._F(i, n, u) * Nin1u
if Ni1n1u == 0.0:
b = 0.0
else:
b = self._G(i, n, u) * Ni1n1u
return a + b
def _F(self, i: int, n: int, u: float) ->float:
denom = self._knots[i + n] - self._knots[i]
if denom == 0.0:
return 0.0
return (u - self._knots[i]) / denom
def _G(self, i: int, n: int, u: float) ->float:
denom = self._knots[i + n + 1] - self._knots[i]
if denom == 0:
return 0.0
return (self._knots[i + n + 1] - u) / denom
|
from typing import List
class NURBS:
def __init__(self, degree: int) -> None:
self._degree = degree
self._points = [] # type: List[complex]
self._weights = [] # type: List[float]
self._knots = [] # type: List[float]
def addPoint(self, p: complex) -> None:
self._points.append(p)
def addKnot(self, knot: float) -> None:
self._knots.append(knot)
def pointCount(self) -> int:
return len(self._points)
def calculate(self, segments: int) -> List[complex]:
while len(self._weights) < len(self._points):
self._weights.append(1.0)
ret = []
for n in range(0, segments):
u = self._knots[0] + (self._knots[-1] - self._knots[0]) * n / (segments - 1)
nku = []
for m in range(0, len(self._points)):
nku.append(self._weights[m] * self._N(m, self._degree, u))
point = complex(0, 0)
denom = sum(nku)
for m in range(0, len(self._points)):
if nku[m] != 0.0 and denom != 0.0:
r_iku = nku[m] / denom
if r_iku != 0.0:
point += self._points[m] * r_iku
ret.append(point)
return ret
def _N(self, i: int, n: int, u: float) -> float:
if n == 0:
if self._knots[i] <= u <= self._knots[i+1]:
return 1
return 0
else:
Nin1u = self._N(i, n - 1, u)
Ni1n1u = self._N(i + 1, n - 1, u)
if Nin1u == 0.0:
a = 0.0
else:
a = self._F(i, n, u) * Nin1u
if Ni1n1u == 0.0:
b = 0.0
else:
b = self._G(i, n, u) * Ni1n1u
return a + b
def _F(self, i: int, n: int, u: float) -> float:
denom = self._knots[i + n] - self._knots[i]
if denom == 0.0:
return 0.0
return (u - self._knots[i]) / denom
def _G(self, i: int, n: int, u: float) -> float:
denom = self._knots[i + n + 1] - self._knots[i]
if denom == 0:
return 0.0
return (self._knots[i + n + 1] - u) / denom
|
[
5,
7,
8,
9,
11
] |
2,565 |
426002bf900e23fd9b1d32c484350ac854228459
|
<mask token>
def test_translit_long():
assert codecs.encode(data, 'translit/long') == u'GBP :-( woof meaaw'
def test_translit_short():
assert codecs.encode(data, 'translit/short') == u'GBP :-( woof meaw'
<mask token>
def test_translit_long_ascii():
data.encode('translit/long/ascii') == b'GBP :-( woof meaaw'
def test_translit_short_ascii():
data.encode('translit/short/ascii') == b'GBP :-( woof meaw'
def test_translit_one_ascii():
try:
codecs.encode(data, 'translit/one/ascii')
assert False
except UnicodeEncodeError:
assert True
assert codecs.encode(data, 'translit/one/ascii', 'replace'
) == b'? ? woof meaw'
<mask token>
def test_zero_width_space():
try:
char = codecs.encode(u'\u200b', 'translit/long')
assert char == u''
except TypeError:
assert False
|
<mask token>
def test_translit_long():
assert codecs.encode(data, 'translit/long') == u'GBP :-( woof meaaw'
def test_translit_short():
assert codecs.encode(data, 'translit/short') == u'GBP :-( woof meaw'
<mask token>
def test_translit_long_ascii():
data.encode('translit/long/ascii') == b'GBP :-( woof meaaw'
def test_translit_short_ascii():
data.encode('translit/short/ascii') == b'GBP :-( woof meaw'
def test_translit_one_ascii():
try:
codecs.encode(data, 'translit/one/ascii')
assert False
except UnicodeEncodeError:
assert True
assert codecs.encode(data, 'translit/one/ascii', 'replace'
) == b'? ? woof meaw'
def test_ascii_level_characters_remain():
assert codecs.encode(u"'", 'translit/long') == u"'"
def test_zero_width_space():
try:
char = codecs.encode(u'\u200b', 'translit/long')
assert char == u''
except TypeError:
assert False
|
<mask token>
def test_translit_long():
assert codecs.encode(data, 'translit/long') == u'GBP :-( woof meaaw'
def test_translit_short():
assert codecs.encode(data, 'translit/short') == u'GBP :-( woof meaw'
def test_translit_one():
assert codecs.encode(data, 'translit/one') == u'£ ☹ woof meaw'
def test_translit_long_ascii():
data.encode('translit/long/ascii') == b'GBP :-( woof meaaw'
def test_translit_short_ascii():
data.encode('translit/short/ascii') == b'GBP :-( woof meaw'
def test_translit_one_ascii():
try:
codecs.encode(data, 'translit/one/ascii')
assert False
except UnicodeEncodeError:
assert True
assert codecs.encode(data, 'translit/one/ascii', 'replace'
) == b'? ? woof meaw'
def test_ascii_level_characters_remain():
assert codecs.encode(u"'", 'translit/long') == u"'"
def test_zero_width_space():
try:
char = codecs.encode(u'\u200b', 'translit/long')
assert char == u''
except TypeError:
assert False
|
<mask token>
def test_default():
assert codecs.encode(data, 'transliterate') == u'GBP :-( woof meaaw'
def test_translit_long():
assert codecs.encode(data, 'translit/long') == u'GBP :-( woof meaaw'
def test_translit_short():
assert codecs.encode(data, 'translit/short') == u'GBP :-( woof meaw'
def test_translit_one():
assert codecs.encode(data, 'translit/one') == u'£ ☹ woof meaw'
def test_translit_long_ascii():
data.encode('translit/long/ascii') == b'GBP :-( woof meaaw'
def test_translit_short_ascii():
data.encode('translit/short/ascii') == b'GBP :-( woof meaw'
def test_translit_one_ascii():
try:
codecs.encode(data, 'translit/one/ascii')
assert False
except UnicodeEncodeError:
assert True
assert codecs.encode(data, 'translit/one/ascii', 'replace'
) == b'? ? woof meaw'
def test_ascii_level_characters_remain():
assert codecs.encode(u"'", 'translit/long') == u"'"
def test_zero_width_space():
try:
char = codecs.encode(u'\u200b', 'translit/long')
assert char == u''
except TypeError:
assert False
|
# -*- coding: utf-8 -*-
"""Very basic codec tests.
:copyright: the translitcodec authors and developers, see AUTHORS.
:license: MIT, see LICENSE for more details.
"""
import codecs
import translitcodec
data = u'£ ☹ wøóf méåw'
def test_default():
assert codecs.encode(data, 'transliterate') == u'GBP :-( woof meaaw'
def test_translit_long():
assert codecs.encode(data, 'translit/long') == u'GBP :-( woof meaaw'
def test_translit_short():
assert codecs.encode(data, 'translit/short') == u'GBP :-( woof meaw'
def test_translit_one():
assert codecs.encode(data, 'translit/one') == u'\u00a3 \u2639 woof meaw'
def test_translit_long_ascii():
data.encode('translit/long/ascii') == b'GBP :-( woof meaaw'
def test_translit_short_ascii():
data.encode('translit/short/ascii') == b'GBP :-( woof meaw'
def test_translit_one_ascii():
try:
codecs.encode(data, 'translit/one/ascii')
assert False
except UnicodeEncodeError:
assert True
assert codecs.encode(data, 'translit/one/ascii', 'replace') == b'? ? woof meaw'
def test_ascii_level_characters_remain():
assert codecs.encode(u"'", 'translit/long') == u"'"
def test_zero_width_space():
try:
char = codecs.encode(u'\u200b', 'translit/long')
assert char == u''
except TypeError:
assert False
|
[
6,
7,
8,
9,
12
] |
2,566 |
6ff4aff5811d2bd7ad150d7e8f925308d120ef74
|
<mask token>
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
games = db.relationship('Game', secondary='game_players')
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
<mask token>
class Game(db.Model):
__tablename__ = 'games'
id = db.Column(db.Integer, primary_key=True)
admin_id = db.Column(db.Integer, db.ForeignKey('users.id'))
admin = db.relationship('User', backref='admin', lazy='joined')
name = db.Column(db.String(64))
description = db.Column(db.String(100))
date = db.Column(db.Date())
time = db.Column(db.Time())
field = db.Column(db.String(100), nullable=True)
players = db.relationship('User', secondary='game_players')
def __init__(self, **kwargs):
super(Game, self).__init__(**kwargs)
self.players.append(self.admin)
class GamePlayer(db.Model):
__tablename__ = 'game_players'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
game_id = db.Column(db.Integer, db.ForeignKey('games.id'))
user = db.relationship('User', backref='user', lazy='joined')
game = db.relationship('Game', backref='game', lazy='joined')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
username = StringField('Username', validators=[DataRequired(), Length(1,
64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, numbers, dots or underscores')])
name = StringField('Name', validators=[DataRequired(), Length(1, 64)])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField('Confirm password', validators=[DataRequired(
), EqualTo('password', message='Passwords must match.')])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data.lower()).first():
raise ValidationError('Email alreadu registered')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already taken')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class NewGameForm(FlaskForm):
name = StringField('Name', validators=[DataRequired()])
description = TextAreaField('Description', validators=[DataRequired()])
date = DateField('Date', validators=[DataRequired()], format='%Y-%m-%d')
time = TimeField('Time', format='%H:%M')
field = StringField('Field', validators=[DataRequired()])
submit = SubmitField('Create New Game')
<mask token>
|
<mask token>
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
games = db.relationship('Game', secondary='game_players')
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
<mask token>
class Game(db.Model):
__tablename__ = 'games'
id = db.Column(db.Integer, primary_key=True)
admin_id = db.Column(db.Integer, db.ForeignKey('users.id'))
admin = db.relationship('User', backref='admin', lazy='joined')
name = db.Column(db.String(64))
description = db.Column(db.String(100))
date = db.Column(db.Date())
time = db.Column(db.Time())
field = db.Column(db.String(100), nullable=True)
players = db.relationship('User', secondary='game_players')
def __init__(self, **kwargs):
super(Game, self).__init__(**kwargs)
self.players.append(self.admin)
class GamePlayer(db.Model):
__tablename__ = 'game_players'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
game_id = db.Column(db.Integer, db.ForeignKey('games.id'))
user = db.relationship('User', backref='user', lazy='joined')
game = db.relationship('Game', backref='game', lazy='joined')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
username = StringField('Username', validators=[DataRequired(), Length(1,
64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, numbers, dots or underscores')])
name = StringField('Name', validators=[DataRequired(), Length(1, 64)])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField('Confirm password', validators=[DataRequired(
), EqualTo('password', message='Passwords must match.')])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data.lower()).first():
raise ValidationError('Email alreadu registered')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already taken')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class NewGameForm(FlaskForm):
name = StringField('Name', validators=[DataRequired()])
description = TextAreaField('Description', validators=[DataRequired()])
date = DateField('Date', validators=[DataRequired()], format='%Y-%m-%d')
time = TimeField('Time', format='%H:%M')
field = StringField('Field', validators=[DataRequired()])
submit = SubmitField('Create New Game')
<mask token>
@app.route('/login', methods=['GET', 'POST'])
def user_login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data.lower()).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
next = request.args.get('next')
if next is None or not next.startswith('/'):
next = url_for('index')
return redirect(next)
flash('Invalid email or password')
return render_template('login.html', form=form)
<mask token>
|
<mask token>
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
games = db.relationship('Game', secondary='game_players')
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
<mask token>
class Game(db.Model):
__tablename__ = 'games'
id = db.Column(db.Integer, primary_key=True)
admin_id = db.Column(db.Integer, db.ForeignKey('users.id'))
admin = db.relationship('User', backref='admin', lazy='joined')
name = db.Column(db.String(64))
description = db.Column(db.String(100))
date = db.Column(db.Date())
time = db.Column(db.Time())
field = db.Column(db.String(100), nullable=True)
players = db.relationship('User', secondary='game_players')
def __init__(self, **kwargs):
super(Game, self).__init__(**kwargs)
self.players.append(self.admin)
class GamePlayer(db.Model):
__tablename__ = 'game_players'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
game_id = db.Column(db.Integer, db.ForeignKey('games.id'))
user = db.relationship('User', backref='user', lazy='joined')
game = db.relationship('Game', backref='game', lazy='joined')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
username = StringField('Username', validators=[DataRequired(), Length(1,
64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, numbers, dots or underscores')])
name = StringField('Name', validators=[DataRequired(), Length(1, 64)])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField('Confirm password', validators=[DataRequired(
), EqualTo('password', message='Passwords must match.')])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data.lower()).first():
raise ValidationError('Email alreadu registered')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already taken')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class NewGameForm(FlaskForm):
name = StringField('Name', validators=[DataRequired()])
description = TextAreaField('Description', validators=[DataRequired()])
date = DateField('Date', validators=[DataRequired()], format='%Y-%m-%d')
time = TimeField('Time', format='%H:%M')
field = StringField('Field', validators=[DataRequired()])
submit = SubmitField('Create New Game')
<mask token>
@app.route('/')
@app.route('/games')
@login_required
def index():
game_player = GamePlayer.query.filter_by(user=current_user).all()
games = list(map(render_player_game, game_player))
return render_template('index.html', games=games)
<mask token>
@app.route('/games/<int:id>/quit')
@login_required
def game_quit(id):
game = Game.query.get(id)
if game:
if current_user in game.players:
game.players.remove(current_user)
db.session.commit()
flash('You were removed succesfully', 'sucess')
else:
flash('You were not in this game', 'warning')
else:
flash("The game you're trying to quit does not exist", 'warning')
return redirect(url_for('game_details', id=id))
<mask token>
@app.route('/login', methods=['GET', 'POST'])
def user_login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data.lower()).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
next = request.args.get('next')
if next is None or not next.startswith('/'):
next = url_for('index')
return redirect(next)
flash('Invalid email or password')
return render_template('login.html', form=form)
<mask token>
|
<mask token>
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
games = db.relationship('Game', secondary='game_players')
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class Game(db.Model):
__tablename__ = 'games'
id = db.Column(db.Integer, primary_key=True)
admin_id = db.Column(db.Integer, db.ForeignKey('users.id'))
admin = db.relationship('User', backref='admin', lazy='joined')
name = db.Column(db.String(64))
description = db.Column(db.String(100))
date = db.Column(db.Date())
time = db.Column(db.Time())
field = db.Column(db.String(100), nullable=True)
players = db.relationship('User', secondary='game_players')
def __init__(self, **kwargs):
super(Game, self).__init__(**kwargs)
self.players.append(self.admin)
class GamePlayer(db.Model):
__tablename__ = 'game_players'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
game_id = db.Column(db.Integer, db.ForeignKey('games.id'))
user = db.relationship('User', backref='user', lazy='joined')
game = db.relationship('Game', backref='game', lazy='joined')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
username = StringField('Username', validators=[DataRequired(), Length(1,
64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, numbers, dots or underscores')])
name = StringField('Name', validators=[DataRequired(), Length(1, 64)])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField('Confirm password', validators=[DataRequired(
), EqualTo('password', message='Passwords must match.')])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data.lower()).first():
raise ValidationError('Email alreadu registered')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already taken')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class NewGameForm(FlaskForm):
name = StringField('Name', validators=[DataRequired()])
description = TextAreaField('Description', validators=[DataRequired()])
date = DateField('Date', validators=[DataRequired()], format='%Y-%m-%d')
time = TimeField('Time', format='%H:%M')
field = StringField('Field', validators=[DataRequired()])
submit = SubmitField('Create New Game')
def render_player_game(game_player):
game = game_player.game
game_rendered = {'date': game.date, 'time': game.time, 'name': game.
name, 'field': game.field, 'id': game.id}
return game_rendered
@app.route('/')
@app.route('/games')
@login_required
def index():
game_player = GamePlayer.query.filter_by(user=current_user).all()
games = list(map(render_player_game, game_player))
return render_template('index.html', games=games)
@app.route('/games/join')
@app.route('/games/<int:id>')
def game_details(id=None):
game = Game.query.get(id)
return render_template('game_details.html', game=game)
@app.route('/games/<int:id>/join')
@login_required
def game_join(id):
game = Game.query.get(id)
if game:
if current_user not in game.players:
game.players.append(current_user)
db.session.commit()
flash('You were added succesfully', 'sucess')
else:
flash('You already were in this game', 'warning')
else:
flash("The game you're trying to enter does not exist", 'warning')
return redirect(url_for('game_details', id=id))
@app.route('/games/<int:id>/quit')
@login_required
def game_quit(id):
game = Game.query.get(id)
if game:
if current_user in game.players:
game.players.remove(current_user)
db.session.commit()
flash('You were removed succesfully', 'sucess')
else:
flash('You were not in this game', 'warning')
else:
flash("The game you're trying to quit does not exist", 'warning')
return redirect(url_for('game_details', id=id))
@app.route('/games/new', methods=['GET', 'POST'])
@login_required
def new_game():
form = NewGameForm()
if form.validate_on_submit():
new_game = Game(admin=current_user, name=form.name.data,
description=form.description.data, date=form.date.data, time=
form.time.data, field=form.field.data)
db.session.add(new_game)
db.session.commit()
flash('Your game was created succesfully', 'success')
return redirect(url_for('game_details', id=new_game.id))
return render_template('new_game.html', form=form)
<mask token>
@app.route('/login', methods=['GET', 'POST'])
def user_login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data.lower()).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
next = request.args.get('next')
if next is None or not next.startswith('/'):
next = url_for('index')
return redirect(next)
flash('Invalid email or password')
return render_template('login.html', form=form)
@app.route('/logout')
@login_required
def user_logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('index'))
@app.route('/test')
def test():
return render_template('layout.html', title='Test')
|
# Imports
import os
from flask import Flask, redirect, render_template, url_for, request, flash
from flask_login import LoginManager, login_user, logout_user, login_required, current_user
# Import - Database
from flask_sqlalchemy import SQLAlchemy
# Import - Models
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
# Import - Forms
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError, DateField, TimeField, TextAreaField, IntegerField
from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo
# Config
basedir = os.path.abspath(os.path.dirname(__file__))
app = Flask(__name__)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY')
app.config['SQLALCHEMY_DATABASE_URI'] =\
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# Setup
db = SQLAlchemy(app)
login_manager = LoginManager(app)
login_manager.login_view = 'user_login'
# Models
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
games = db.relationship('Game', secondary="game_players")
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class Game(db.Model):
__tablename__ = 'games'
id = db.Column(db.Integer, primary_key=True)
admin_id = db.Column(db.Integer, db.ForeignKey('users.id'))
admin = db.relationship('User', backref='admin', lazy='joined')
name = db.Column(db.String(64))
description = db.Column(db.String(100))
date = db.Column(db.Date())
time = db.Column(db.Time())
field = db.Column(db.String(100), nullable=True)
players = db.relationship('User', secondary="game_players")
def __init__(self, **kwargs):
super(Game, self).__init__(**kwargs)
self.players.append(self.admin)
class GamePlayer(db.Model):
__tablename__ = 'game_players'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
game_id = db.Column(db.Integer, db.ForeignKey('games.id'))
user = db.relationship('User', backref='user', lazy='joined')
game = db.relationship('Game', backref='game', lazy='joined')
# Forms
# Forms - User
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[
DataRequired(), Length(1, 64), Email()])
username = StringField('Username', validators=[
DataRequired(), Length(1, 64),
Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, numbers, dots or '
'underscores')
])
name = StringField('Name', validators=[DataRequired(), Length(1, 64)])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField('Confirm password', validators=[
DataRequired(), EqualTo('password', message='Passwords must match.')])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data.lower()).first():
raise ValidationError('Email alreadu registered')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already taken')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
# Forms - Games
class NewGameForm(FlaskForm):
name = StringField('Name', validators=[DataRequired()])
description = TextAreaField('Description', validators=[DataRequired()])
date = DateField('Date', validators=[
DataRequired()], format='%Y-%m-%d')
time = TimeField('Time', format='%H:%M')
field = StringField('Field', validators=[DataRequired()])
submit = SubmitField('Create New Game')
# Utils functions
def render_player_game(game_player):
game = game_player.game
game_rendered = {
'date': game.date,
'time': game.time,
'name': game.name,
'field': game.field,
'id': game.id
}
return game_rendered
# Routes
# Routes - Games
@app.route('/')
@app.route('/games')
@login_required
def index():
# Show all the games the user is in and a button to join game and another to create one
game_player = GamePlayer.query.filter_by(user=current_user).all()
games = list(map(render_player_game, game_player))
return render_template('index.html', games=games)
@app.route('/games/join')
@app.route('/games/<int:id>')
def game_details(id=None):
# Show the game details and players
# The admin has the same page, but with extra commands to remove, add...
game = Game.query.get(id)
return render_template('game_details.html', game=game)
@app.route('/games/<int:id>/join')
@login_required
def game_join(id):
game = Game.query.get(id)
if game:
if current_user not in game.players:
game.players.append(current_user)
db.session.commit()
flash('You were added succesfully', 'sucess')
else:
flash('You already were in this game', 'warning')
else:
flash("The game you're trying to enter does not exist", 'warning')
return redirect(url_for('game_details', id=id))
@app.route('/games/<int:id>/quit')
@login_required
def game_quit(id):
game = Game.query.get(id)
if game:
if current_user in game.players:
game.players.remove(current_user)
db.session.commit()
flash('You were removed succesfully', 'sucess')
else:
flash('You were not in this game', 'warning')
else:
flash("The game you're trying to quit does not exist", 'warning')
return redirect(url_for('game_details', id=id))
@app.route('/games/new', methods=['GET', 'POST'])
@login_required
def new_game():
# A form to create a new game
form = NewGameForm()
if form.validate_on_submit():
new_game = Game(
admin=current_user,
name=form.name.data,
description=form.description.data,
date=form.date.data,
time=form.time.data,
field=form.field.data,
)
db.session.add(new_game)
db.session.commit()
flash('Your game was created succesfully', 'success')
return redirect(url_for('game_details', id=new_game.id))
return render_template('new_game.html', form=form)
# Routes - User
@app.route('/register', methods=['GET', 'POST'])
def user_register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(
email=form.email.data.lower(),
username=form.username.data,
password=form.password.data,
name=form.name.data
)
db.session.add(user)
db.session.commit()
login_user(user)
return redirect(url_for('index'))
return render_template('register.html', form=form)
@app.route('/login', methods=['GET', 'POST'])
def user_login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data.lower()).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
next = request.args.get('next')
if next is None or not next.startswith('/'):
next = url_for('index')
return redirect(next)
flash('Invalid email or password')
return render_template('login.html', form=form)
@app.route('/logout')
@login_required
def user_logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('index'))
@app.route('/test')
def test():
return render_template('layout.html', title='Test')
|
[
18,
19,
21,
28,
32
] |
2,567 |
b1fe7e318c361930c8ad00758bcb86597fd8f3bd
|
<mask token>
|
<mask token>
while True:
answer = zfile.read(nothing).decode('utf-8')
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + '.txt'
print('answer is ...')
print(comments)
|
<mask token>
zfile = zipfile.ZipFile('./channel.zip')
<mask token>
comments = ''
nothing = '90052.txt'
target = '[0-9]+'
while True:
answer = zfile.read(nothing).decode('utf-8')
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + '.txt'
print('answer is ...')
print(comments)
|
<mask token>
import zipfile
import re
zfile = zipfile.ZipFile('./channel.zip')
<mask token>
comments = ''
nothing = '90052.txt'
target = '[0-9]+'
while True:
answer = zfile.read(nothing).decode('utf-8')
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + '.txt'
print('answer is ...')
print(comments)
|
'''''''''''''''''''''''''''''
> Filename: lv6.py
> Author: Kadrick, BoGwon Kang
> Created at: 2021/10/11 16:07
> Description: zip
'''''''''''''''''''''''''''''
import zipfile
import re
# open zipfile
zfile = zipfile.ZipFile('./channel.zip')
# check list
'''
print(zfile.namelist())
print(zfile.read("readme.txt"))
print(zfile.read("90052.txt"))
'''
# follow nothing & collect the comments
comments = ""
nothing = "90052.txt"
target = r"[0-9]+"
while True:
answer = zfile.read(nothing).decode('utf-8')
# collect comment
comments += zfile.getinfo(nothing).comment.decode('utf-8')
print(answer)
findRet = re.findall(target, answer)
if len(findRet) == 0:
break
nothing = findRet[0] + ".txt"
print("answer is ...")
print(comments)
|
[
0,
1,
2,
3,
4
] |
2,568 |
0e1ea8c7fba90c1b5d18eaa399b91f237d4defee
|
<mask token>
|
<mask token>
while m != 0:
m = int(input())
if m > maximum:
maximum = m
count = 1
elif m == maximum:
count += 1
print(count)
|
count = 0
maximum = -1
m = -1
while m != 0:
m = int(input())
if m > maximum:
maximum = m
count = 1
elif m == maximum:
count += 1
print(count)
|
count = 0
maximum = -1
m = -1
while m != 0:
m = int(input())
if m > maximum:
maximum = m
count = 1
elif m == maximum:
count += 1
print(count)
| null |
[
0,
1,
2,
3
] |
2,569 |
b3f376f4aec81cae853f996a74062e32bb4a8fa3
|
<mask token>
class s3Obj:
<mask token>
<mask token>
|
<mask token>
class s3Obj:
<mask token>
def getACL(self, client_s3):
"""
get ACL info and update the object
"""
try:
response = client_s3.get_object_acl(Bucket=self.bucket_name,
Key=self.name)
for permission in response['Grants']:
self.acl.append(permission['Permission'])
except botocore.exceptions.ClientError as e:
raise
|
<mask token>
class s3Obj:
def __init__(self, name, bucket_name, size, last_modified, storage_class):
self.name = name
self.size = size
self.last_modified = last_modified
self.storage_class = storage_class
self.bucket_name = bucket_name
self.acl = []
def getACL(self, client_s3):
"""
get ACL info and update the object
"""
try:
response = client_s3.get_object_acl(Bucket=self.bucket_name,
Key=self.name)
for permission in response['Grants']:
self.acl.append(permission['Permission'])
except botocore.exceptions.ClientError as e:
raise
|
import botocore
class s3Obj:
def __init__(self, name, bucket_name, size, last_modified, storage_class):
self.name = name
self.size = size
self.last_modified = last_modified
self.storage_class = storage_class
self.bucket_name = bucket_name
self.acl = []
def getACL(self, client_s3):
"""
get ACL info and update the object
"""
try:
response = client_s3.get_object_acl(Bucket=self.bucket_name,
Key=self.name)
for permission in response['Grants']:
self.acl.append(permission['Permission'])
except botocore.exceptions.ClientError as e:
raise
|
import botocore
class s3Obj:
def __init__(self, name, bucket_name, size, last_modified, storage_class):
self.name = name
self.size = size
self.last_modified = last_modified
self.storage_class = storage_class
self.bucket_name = bucket_name
self.acl = []
def getACL(self, client_s3):
'''
get ACL info and update the object
'''
try:
response = client_s3.get_object_acl(Bucket=self.bucket_name, Key=self.name)
for permission in response['Grants']:
self.acl.append(permission['Permission'])
except botocore.exceptions.ClientError as e:
raise
|
[
1,
2,
3,
4,
5
] |
2,570 |
8c6b7032c85354740d59aa91108ad8b5279e1d45
|
#! /usr/bin/env python
t = int(raw_input())
for i in xrange(1, t+1):
N = raw_input()
N1 = N
track = set()
if N == '0':
print "Case #%s: " % i + "INSOMNIA"
continue
count = 2
while len(track) !=10:
temp = set(x for x in N1)
track = temp | track
N1 = str(count*int(N))
count +=1
print "Case #%s: %d" % (i, int(N1) - int(N))
| null | null | null | null |
[
0
] |
2,571 |
f8b04f374e1c55d4985be793939f0ff9393c29e0
|
<mask token>
class Solution:
<mask token>
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
<mask token>
<mask token>
|
<mask token>
class Solution:
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return 'Error'
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
<mask token>
<mask token>
|
<mask token>
class Solution:
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return 'Error'
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
def mostCompetitive(self, nums, k):
if len(nums) < k:
print('length mismatch @ init')
return False
ans = list(nums[0:k])
tmp = list(nums[0:k])
i = k
while i < len(nums):
del tmp[0]
tmp.append(nums[i])
if self.isMoreCompetitive(tmp, ans):
ans = list(tmp)
i += 1
shouldContinue = True
idx = 0
foundAnswer, updateAns = self.refined(nums, 0, [], ans)
if foundAnswer == True:
return updateAns
return ans
<mask token>
|
<mask token>
class Solution:
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return 'Error'
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans) - len(a) <= len(nums) - i:
boolA, respA = self.refined(nums, i + 1, a + [nums[i]], ans)
boolB, respB = self.refined(nums, i + 1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
def mostCompetitive(self, nums, k):
if len(nums) < k:
print('length mismatch @ init')
return False
ans = list(nums[0:k])
tmp = list(nums[0:k])
i = k
while i < len(nums):
del tmp[0]
tmp.append(nums[i])
if self.isMoreCompetitive(tmp, ans):
ans = list(tmp)
i += 1
shouldContinue = True
idx = 0
foundAnswer, updateAns = self.refined(nums, 0, [], ans)
if foundAnswer == True:
return updateAns
return ans
if __name__ == '__main__':
s = Solution()
print(s.mostCompetitive([3, 5, 2, 6], 2))
print(s.mostCompetitive([2, 4, 3, 3, 5, 4, 9, 6], 4))
print(s.mostCompetitive([84, 10, 71, 23, 66, 61, 62, 64, 34, 41, 80, 25,
91, 43, 4, 75, 65, 13, 37, 41, 46, 90, 55, 8, 85, 61, 95, 71], 24))
print(s.mostCompetitive([2, 4, 3, 3, 5, 4, 9, 6], 4))
[11, 52, 57, 91, 47, 95, 86, 46, 87, 47, 70, 56, 54, 61, 89, 44, 3, 73,
1, 7, 87, 48, 17, 25, 49, 54, 6, 72, 97, 62, 16, 11, 47, 34, 68, 58,
14, 36, 46, 65, 2, 15]
18
|
'''
Given []int, most mostCompetitive subsequence is
a sublist of nums.
So we calculate a score, score is ∀ x ∈ nums, score += x_n - x_n-1
You can remove as many elements are you need to.
What is the mostCompetitive subsequence that you can come up with?
[1,3,5]
[1,3,4] ← More competitive
[1,2,5] ← More competitive
[1,3,4]
This is true b/c we evaluate on the first point where the two differ.
1) We care about creating lists that contain as small of numbers as
possible. The numbers don't need to be in order, they just need to be
small.
We care about all numbers, s.t. we can create a subsequence of k or more
behind them.
Get all possible sub-sequences, with length k or more. If more than k,
iterate through how we can remove the largest elements.
We should also keep track of the smallest number that corresponds to a valid
sequence?
I'm leaning towards a brute force method.
1) Find all sequences of length k. Store the most competitive.
So we should write a function that compares two sequences to see which is more
competitive.
Do one run, with subsequence == k.
Then try to beat that run.
Keep track of what the 'winning' subsequence is, and
iterate through possible values.
So two iterations.
[2,4,3,3,5,4,9,6] | k = 4
( )
ans = 2,4,3,3
[2,4,3,3,5,4,9,6] | k = 4
( )
2,4,3,3
^
idx = 0
Once we have 'beaten' it, out of the remaining
elements, remove the max element until length of
sublist is workable.
[2, 3, 3, ]
1) Write isMoreCompetitive
2) First pass → get most competitive with sliding window len = k
3) Second + pass. If we make a change/'win', re-run again. If re-run and
no change, we are done.
'''
'''
To Review:
def mostCompetitive(self, nums, k):
to_remove = len(nums) - k
stack = []
for x in nums:
while stack and x < stack[-1] and to_remove:
to_remove -= 1
stack.pop()
stack.append(x)
for _ in range(to_remove):
stack.pop()
return stack
'''
class Solution:
# is a more competitive than b?
def isMoreCompetitive(self, a, b):
if len(a) != len(b):
print("Error, len()'s do not match'")
return "Error"
for i in range(len(a)):
if a[i] == b[i]:
continue
elif a[i] < b[i]:
return True
else:
return False
return False
def refined(self, nums, i, a, ans):
if i >= len(nums):
if len(a) == len(ans) and self.isMoreCompetitive(a, ans) == False:
return False, None
elif len(a) != len(ans):
return False, None
else:
return True, a
elif i < len(nums) and len(ans)-len(a) <= len(nums)-i :
boolA, respA = self.refined(nums, i+1, a+[nums[i]], ans)
boolB, respB = self.refined(nums, i+1, a, ans)
if boolA == True and boolB == True:
if self.isMoreCompetitive(respA, respB):
return True, respA
else:
return True, respB
elif boolA == True:
return boolA, respA
elif boolB == True:
return True, respB
else:
return False, None
else:
return False, None
def mostCompetitive(self, nums, k):
if len(nums) < k :
print("length mismatch @ init")
return False
ans = list(nums[0:k])
tmp = list(nums[0:k])
i = k
# Initial pass
while i < len(nums):
# print(tmp)
del tmp[0]
# print(tmp)
tmp.append(nums[i])
# print(tmp)
if self.isMoreCompetitive(tmp, ans):
ans = list(tmp)
i += 1
# print("ans: {}, tmp:{}".format(ans, tmp))
# print("")
# Pass 2
shouldContinue = True
idx = 0
foundAnswer, updateAns = self.refined(nums, 0, [], ans)
if foundAnswer == True:
return updateAns
return ans
if __name__ == '__main__':
s = Solution()
print(s.mostCompetitive([3,5,2,6], 2))
print(s.mostCompetitive([2,4,3,3,5,4,9,6], 4))
print(s.mostCompetitive([84,10,71,23,66,61,62,64,34,41,80,25,91,43,4,75,65,13,37,41,46,90,55,8,85,61,95,71], 24))
print(s.mostCompetitive([2,4,3,3,5,4,9,6], 4))
[11,52,57,91,47,95,86,46,87,47,70,56,54,61,89,44,3,73,1,7,87,48,17,25,49,54,6,72,97,62,16,11,47,34,68,58,14,36,46,65,2,15]
18
|
[
2,
3,
4,
5,
6
] |
2,572 |
4411c81351ac76d72512faaa6b498cd577815691
|
<mask token>
|
def factorial(n):
"""returns n!"""
return 1 if n < 2 else n * factorial(n - 1)
<mask token>
|
def factorial(n):
"""returns n!"""
return 1 if n < 2 else n * factorial(n - 1)
<mask token>
print(list(map(fact, range(6))))
print([fact(n) for n in range(6)])
print(list(map(factorial, filter(lambda n: n % 2, range(6)))))
print([factorial(n) for n in range(6) if n % 2])
|
def factorial(n):
"""returns n!"""
return 1 if n < 2 else n * factorial(n - 1)
fact = factorial
print(list(map(fact, range(6))))
print([fact(n) for n in range(6)])
print(list(map(factorial, filter(lambda n: n % 2, range(6)))))
print([factorial(n) for n in range(6) if n % 2])
|
# 把函数视作对象
def factorial(n):
"""returns n!"""
return 1 if n < 2 else n * factorial(n - 1)
fact = factorial
print(list(map(fact, range(6)))) # 构建 0! 和 5! 的一个阶乘列表。
print([fact(n) for n in range(6)]) # 使用列表推导执行相同的操作。
# filter() 函数用于过滤序列,过滤掉不符合条件的元素,返回一个迭代器对象,如果要转换为列表,可以使用 list() 来转换。
# 该接收两个参数,第一个为函数,第二个为序列,序列的每个元素作为参数传递给函数进行判,然后返回 True 或 False,
# 最后将返回 True 的元素放到新列表中。
print(list(map(factorial, filter(lambda n: n % 2, range(6))))) # 使用 map 和 filter 计算直到 5! 的奇数阶乘列表。
print([factorial(n) for n in range(6) if n % 2]) # 使用列表推导做相同的工作,换掉 map 和 filter,并避免了使用 lambda 表达式。
|
[
0,
1,
2,
3,
4
] |
2,573 |
3ab1de77147f6abfabeea10f2a4e85686edffd6f
|
<mask token>
|
<mask token>
class Migration(migrations.Migration):
<mask token>
<mask token>
|
<mask token>
class Migration(migrations.Migration):
dependencies = [('autotasks', '0017_auto_20210210_1512')]
operations = [migrations.AddField(model_name='automatedtask', name=
'run_asap_after_missed', field=models.BooleanField(default=False))]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('autotasks', '0017_auto_20210210_1512')]
operations = [migrations.AddField(model_name='automatedtask', name=
'run_asap_after_missed', field=models.BooleanField(default=False))]
|
# Generated by Django 3.1.7 on 2021-02-24 05:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('autotasks', '0017_auto_20210210_1512'),
]
operations = [
migrations.AddField(
model_name='automatedtask',
name='run_asap_after_missed',
field=models.BooleanField(default=False),
),
]
|
[
0,
1,
2,
3,
4
] |
2,574 |
d1ee33ce6fb071aae800b0597a09e7039a209ec8
|
<mask token>
|
<mask token>
def reverse(string):
if len(string) == 0:
return
temp = string[0]
reverse(string[1:])
print(temp, end='')
<mask token>
|
<mask token>
def reverse(string):
if len(string) == 0:
return
temp = string[0]
reverse(string[1:])
print(temp, end='')
<mask token>
reverse(string)
|
<mask token>
def reverse(string):
if len(string) == 0:
return
temp = string[0]
reverse(string[1:])
print(temp, end='')
string = input()
reverse(string)
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 24 20:59:36 2021
@author: Abeg
"""
#factorial using recursion
"""def factorial(n):
if n==0 or n==1:
return 1
elif n==2:
return n
else:
return n*factorial(n-1)
n=int(input("enter the no"))
print(factorial(n))"""
#fibonancci using recursion
"""def fiborecursively(n):
if n<=1:
return n
else:
return(fiborecursively(n-1) + fiborecursively(n-2))
for i in range(0,10):
print(fiborecursively(i))"""
#reverse a string with recursion
def reverse(string):
if len(string) == 0:
return
temp = string[0]
reverse(string[1:])
print(temp,end="")
string=(input())
reverse(string)
|
[
0,
1,
2,
3,
4
] |
2,575 |
7277b045f85d58383f26ab0d3299feb166f45e36
|
<mask token>
|
while True:
print('Light Levels:' + input.light_level())
if input.light_level() < 6:
light.set_all(light.rgb(255, 0, 255))
elif input.light_level() < 13:
light.set_all(light.rgb(255, 0, 0))
else:
light.clear()
|
while True:
print("Light Levels:" + input.light_level())
if input.light_level() < 6:
light.set_all(light.rgb(255, 0, 255))
elif input.light_level() < 13:
light.set_all(light.rgb(255, 0, 0))
else:
light.clear()
| null | null |
[
0,
1,
2
] |
2,576 |
ee0f90b84df73ae5783ca0b8a52fe6fe9c979f15
|
<mask token>
@app.route('/unlock')
def web_unlock():
if not (request.args.get('token') and request.args.get('state')):
return 'Error'
else:
with shelve.open('Settings.conf') as settings:
if 'token' in settings:
token = settings['token']
else:
return 'System not setup !'
if request.args.get('token') != token:
return 'Invalid Token'
if request.args.get('state') == 'open':
lock.unlock()
elif request.args.get('state') == 'close':
lock.lock()
elif request.args.get('state') == 'switch':
lock.switch()
else:
return 'Invalid State'
return 'Done'
@app.route('/state')
def web_state():
return str(lock.state())
<mask token>
|
<mask token>
@app.route('/unlock')
def web_unlock():
if not (request.args.get('token') and request.args.get('state')):
return 'Error'
else:
with shelve.open('Settings.conf') as settings:
if 'token' in settings:
token = settings['token']
else:
return 'System not setup !'
if request.args.get('token') != token:
return 'Invalid Token'
if request.args.get('state') == 'open':
lock.unlock()
elif request.args.get('state') == 'close':
lock.lock()
elif request.args.get('state') == 'switch':
lock.switch()
else:
return 'Invalid State'
return 'Done'
@app.route('/state')
def web_state():
return str(lock.state())
if __name__ == '__main__':
app.run(debug=True, port=5000, host='0.0.0.0')
|
<mask token>
app = Flask(__name__)
@app.route('/unlock')
def web_unlock():
if not (request.args.get('token') and request.args.get('state')):
return 'Error'
else:
with shelve.open('Settings.conf') as settings:
if 'token' in settings:
token = settings['token']
else:
return 'System not setup !'
if request.args.get('token') != token:
return 'Invalid Token'
if request.args.get('state') == 'open':
lock.unlock()
elif request.args.get('state') == 'close':
lock.lock()
elif request.args.get('state') == 'switch':
lock.switch()
else:
return 'Invalid State'
return 'Done'
@app.route('/state')
def web_state():
return str(lock.state())
if __name__ == '__main__':
app.run(debug=True, port=5000, host='0.0.0.0')
|
from flask import request, Flask
import lock, shelve
app = Flask(__name__)
@app.route('/unlock')
def web_unlock():
if not (request.args.get('token') and request.args.get('state')):
return 'Error'
else:
with shelve.open('Settings.conf') as settings:
if 'token' in settings:
token = settings['token']
else:
return 'System not setup !'
if request.args.get('token') != token:
return 'Invalid Token'
if request.args.get('state') == 'open':
lock.unlock()
elif request.args.get('state') == 'close':
lock.lock()
elif request.args.get('state') == 'switch':
lock.switch()
else:
return 'Invalid State'
return 'Done'
@app.route('/state')
def web_state():
return str(lock.state())
if __name__ == '__main__':
app.run(debug=True, port=5000, host='0.0.0.0')
|
from flask import request, Flask
import lock, shelve
app = Flask(__name__)
@app.route("/unlock")
def web_unlock():
if not (request.args.get("token") and request.args.get("state")):
return "Error"
else:
with shelve.open("Settings.conf") as settings:
if "token" in settings:
token = settings["token"]
else:
return "System not setup !"
if request.args.get("token") != token:
return "Invalid Token"
if request.args.get("state") == "open":
lock.unlock()
elif request.args.get("state") == "close":
lock.lock()
elif request.args.get("state") == "switch":
lock.switch()
else:
return "Invalid State"
return "Done"
@app.route("/state")
def web_state():
return str(lock.state())
if __name__ == "__main__":
app.run(debug=True, port=5000, host="0.0.0.0")
|
[
2,
3,
4,
5,
6
] |
2,577 |
11dfb09286b8a5742550b5300c776ed82e69ead5
|
<mask token>
@app.route('/')
def addRegion():
return render_template('Website WordCount.html')
<mask token>
|
<mask token>
@app.route('/')
def addRegion():
return render_template('Website WordCount.html')
@app.route('/output_data', methods=['POST', 'GET'])
def output_data():
unique_links = []
link_len = {}
out_arr = []
if request.method == 'POST':
url = request.form['url']
main = re.sub('([\\w:///.]+com|info|in|org)([\\w///?/=/&/_-]*)',
'\\1', url, 0, re.MULTILINE | re.UNICODE | re.IGNORECASE)
req = Request(main, headers={'User-Agent':
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30'
})
sample = urlopen(req)
soap = bs(sample, 'lxml')
for data in soap.find_all('a', href=True):
links = data['href']
links = links if links.startswith(main) else str(main) + str(links
) if links.startswith('/') else str(main) + '/' + str(links)
if links in unique_links:
continue
unique_links.append(links)
req = Request(links, headers={'User-Agent':
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30'
})
sample1 = urlopen(req)
soap1 = bs(sample1, 'lxml')
[x.extract() for x in soap1.findAll(['script', 'style'])]
data = soap1.text
stri = re.sub('[.,/!"@:+*&^%~#=-_]', '', data)
stri = stri.split()
num_word = len(stri)
if num_word < 5:
continue
link_len['link'] = links
link_len['wordCount'] = num_word
out_arr.append(link_len)
print(out_arr)
return out_arr
if __name__ == '__main__':
app.run(debug=True, host='192.168.43.164')
|
<mask token>
app = Flask(__name__)
@app.route('/')
def addRegion():
return render_template('Website WordCount.html')
@app.route('/output_data', methods=['POST', 'GET'])
def output_data():
unique_links = []
link_len = {}
out_arr = []
if request.method == 'POST':
url = request.form['url']
main = re.sub('([\\w:///.]+com|info|in|org)([\\w///?/=/&/_-]*)',
'\\1', url, 0, re.MULTILINE | re.UNICODE | re.IGNORECASE)
req = Request(main, headers={'User-Agent':
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30'
})
sample = urlopen(req)
soap = bs(sample, 'lxml')
for data in soap.find_all('a', href=True):
links = data['href']
links = links if links.startswith(main) else str(main) + str(links
) if links.startswith('/') else str(main) + '/' + str(links)
if links in unique_links:
continue
unique_links.append(links)
req = Request(links, headers={'User-Agent':
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30'
})
sample1 = urlopen(req)
soap1 = bs(sample1, 'lxml')
[x.extract() for x in soap1.findAll(['script', 'style'])]
data = soap1.text
stri = re.sub('[.,/!"@:+*&^%~#=-_]', '', data)
stri = stri.split()
num_word = len(stri)
if num_word < 5:
continue
link_len['link'] = links
link_len['wordCount'] = num_word
out_arr.append(link_len)
print(out_arr)
return out_arr
if __name__ == '__main__':
app.run(debug=True, host='192.168.43.164')
|
from flask import request, Flask, render_template
from bs4 import BeautifulSoup as bs
from urllib.request import Request, urlopen
import re
app = Flask(__name__)
@app.route('/')
def addRegion():
return render_template('Website WordCount.html')
@app.route('/output_data', methods=['POST', 'GET'])
def output_data():
unique_links = []
link_len = {}
out_arr = []
if request.method == 'POST':
url = request.form['url']
main = re.sub('([\\w:///.]+com|info|in|org)([\\w///?/=/&/_-]*)',
'\\1', url, 0, re.MULTILINE | re.UNICODE | re.IGNORECASE)
req = Request(main, headers={'User-Agent':
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30'
})
sample = urlopen(req)
soap = bs(sample, 'lxml')
for data in soap.find_all('a', href=True):
links = data['href']
links = links if links.startswith(main) else str(main) + str(links
) if links.startswith('/') else str(main) + '/' + str(links)
if links in unique_links:
continue
unique_links.append(links)
req = Request(links, headers={'User-Agent':
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30'
})
sample1 = urlopen(req)
soap1 = bs(sample1, 'lxml')
[x.extract() for x in soap1.findAll(['script', 'style'])]
data = soap1.text
stri = re.sub('[.,/!"@:+*&^%~#=-_]', '', data)
stri = stri.split()
num_word = len(stri)
if num_word < 5:
continue
link_len['link'] = links
link_len['wordCount'] = num_word
out_arr.append(link_len)
print(out_arr)
return out_arr
if __name__ == '__main__':
app.run(debug=True, host='192.168.43.164')
|
from flask import request,Flask, render_template
from bs4 import BeautifulSoup as bs
from urllib.request import Request,urlopen
import re
app = Flask(__name__)
@app.route('/')
def addRegion():
return render_template('Website WordCount.html')
@app.route('/output_data', methods=['POST','GET'])
def output_data():
unique_links=[]
link_len={}
out_arr=[]
if request.method == 'POST':
url = request.form['url']
main = re.sub(r"([\w:///.]+com|info|in|org)([\w///?/=/&/_-]*)",r"\1",url,0, re.MULTILINE | re.UNICODE | re.IGNORECASE)
req =Request(main, headers={'User-Agent' : "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30"})
sample=urlopen(req)
soap=bs(sample,"lxml")
for data in soap.find_all('a', href=True):
links=data['href']
links=links if links.startswith(main) else (str(main)+str(links) if links.startswith( '/' ) else str(main)+"/"+str(links))
if(links in unique_links):
continue
unique_links.append(links)
req =Request(links, headers={'User-Agent' : "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.30 (KHTML, like Gecko) Ubuntu/11.04 Chromium/12.0.742.112 Chrome/12.0.742.112 Safari/534.30"})
sample1=urlopen(req)
soap1=bs(sample1,"lxml")
[x.extract() for x in soap1.findAll(['script', 'style'])]
data=soap1.text
stri=re.sub('[.,/!"@:+*&^%~#=-_]','',data)
stri=stri.split()
num_word=len(stri)
if(num_word<5):
continue
link_len['link']=links
link_len['wordCount']=num_word
out_arr.append(link_len)
print(out_arr)
return(out_arr)
if __name__ == '__main__':
app.run(debug = True,host='192.168.43.164')
|
[
1,
3,
4,
5,
6
] |
2,578 |
74f3b4001a0520a25a314ff537719b679ba0fca4
|
<mask token>
class DecompFactors(object):
<mask token>
def __init__(self, control, params, state, fluxes, met_data):
"""
Parameters
----------
control : integers, structure
model control flags
params: floats, structure
model parameters
state: floats, structure
model state
fluxes : floats, structure
model fluxes
met_data : floats, dictionary
meteorological forcing data
"""
self.params = params
self.fluxes = fluxes
self.control = control
self.state = state
self.met_data = met_data
self.wb = WaterBalance(self.control, self.params, self.state, self.
fluxes, self.met_data)
<mask token>
def soil_temp_factor(self, project_day):
"""Soil-temperature activity factor (A9).
Parameters:
-----------
project_day : int
current simulation day (index)
Returns:
--------
tfac : float
soil temperature factor [degC]
"""
tsoil = self.met_data['tsoil'][project_day]
if float_gt(tsoil, 0.0):
tfac = 0.0326 + 0.00351 * tsoil ** 1.652 - (tsoil / 41.748) ** 7.19
if float_lt(tfac, 0.0):
tfac = 0.0
else:
tfac = 0.0
return tfac
|
<mask token>
class DecompFactors(object):
""" Calculate C and N litter production rates """
def __init__(self, control, params, state, fluxes, met_data):
"""
Parameters
----------
control : integers, structure
model control flags
params: floats, structure
model parameters
state: floats, structure
model state
fluxes : floats, structure
model fluxes
met_data : floats, dictionary
meteorological forcing data
"""
self.params = params
self.fluxes = fluxes
self.control = control
self.state = state
self.met_data = met_data
self.wb = WaterBalance(self.control, self.params, self.state, self.
fluxes, self.met_data)
def decay_rates(self, project_day):
""" Model decay rates - temperature dependency (i.e. increase with temp)
[See section A8 in Comins and McMurtrie 1993].
Parameters:
-----------
project_day : int
current simulation day (index)
"""
tempact = self.soil_temp_factor(project_day)
wtfac = self.wb.calculate_soil_water_fac(topsoil=True)
self.params.decayrate[0] = self.params.kdec1 * math.exp(-3.0 * self
.params.ligshoot) * tempact * wtfac
self.params.decayrate[1] = self.params.kdec2 * tempact * wtfac
self.params.decayrate[2] = self.params.kdec3 * math.exp(-3.0 * self
.params.ligroot) * tempact * wtfac
self.params.decayrate[3] = self.params.kdec4 * tempact * wtfac
self.params.decayrate[4] = self.params.kdec5 * (1.0 - 0.75 * self.
params.finesoil) * tempact * wtfac
self.params.decayrate[5] = self.params.kdec6 * tempact * wtfac
self.params.decayrate[6] = self.params.kdec7 * tempact * wtfac
def soil_temp_factor(self, project_day):
"""Soil-temperature activity factor (A9).
Parameters:
-----------
project_day : int
current simulation day (index)
Returns:
--------
tfac : float
soil temperature factor [degC]
"""
tsoil = self.met_data['tsoil'][project_day]
if float_gt(tsoil, 0.0):
tfac = 0.0326 + 0.00351 * tsoil ** 1.652 - (tsoil / 41.748) ** 7.19
if float_lt(tfac, 0.0):
tfac = 0.0
else:
tfac = 0.0
return tfac
|
<mask token>
__author__ = 'Martin De Kauwe'
__version__ = '1.0 (25.02.2011)'
__email__ = '[email protected]'
class DecompFactors(object):
""" Calculate C and N litter production rates """
def __init__(self, control, params, state, fluxes, met_data):
"""
Parameters
----------
control : integers, structure
model control flags
params: floats, structure
model parameters
state: floats, structure
model state
fluxes : floats, structure
model fluxes
met_data : floats, dictionary
meteorological forcing data
"""
self.params = params
self.fluxes = fluxes
self.control = control
self.state = state
self.met_data = met_data
self.wb = WaterBalance(self.control, self.params, self.state, self.
fluxes, self.met_data)
def decay_rates(self, project_day):
""" Model decay rates - temperature dependency (i.e. increase with temp)
[See section A8 in Comins and McMurtrie 1993].
Parameters:
-----------
project_day : int
current simulation day (index)
"""
tempact = self.soil_temp_factor(project_day)
wtfac = self.wb.calculate_soil_water_fac(topsoil=True)
self.params.decayrate[0] = self.params.kdec1 * math.exp(-3.0 * self
.params.ligshoot) * tempact * wtfac
self.params.decayrate[1] = self.params.kdec2 * tempact * wtfac
self.params.decayrate[2] = self.params.kdec3 * math.exp(-3.0 * self
.params.ligroot) * tempact * wtfac
self.params.decayrate[3] = self.params.kdec4 * tempact * wtfac
self.params.decayrate[4] = self.params.kdec5 * (1.0 - 0.75 * self.
params.finesoil) * tempact * wtfac
self.params.decayrate[5] = self.params.kdec6 * tempact * wtfac
self.params.decayrate[6] = self.params.kdec7 * tempact * wtfac
def soil_temp_factor(self, project_day):
"""Soil-temperature activity factor (A9).
Parameters:
-----------
project_day : int
current simulation day (index)
Returns:
--------
tfac : float
soil temperature factor [degC]
"""
tsoil = self.met_data['tsoil'][project_day]
if float_gt(tsoil, 0.0):
tfac = 0.0326 + 0.00351 * tsoil ** 1.652 - (tsoil / 41.748) ** 7.19
if float_lt(tfac, 0.0):
tfac = 0.0
else:
tfac = 0.0
return tfac
|
<mask token>
import math
from water_balance import WaterBalance
from utilities import float_eq, float_lt, float_le, float_gt, float_ge, clip
__author__ = 'Martin De Kauwe'
__version__ = '1.0 (25.02.2011)'
__email__ = '[email protected]'
class DecompFactors(object):
""" Calculate C and N litter production rates """
def __init__(self, control, params, state, fluxes, met_data):
"""
Parameters
----------
control : integers, structure
model control flags
params: floats, structure
model parameters
state: floats, structure
model state
fluxes : floats, structure
model fluxes
met_data : floats, dictionary
meteorological forcing data
"""
self.params = params
self.fluxes = fluxes
self.control = control
self.state = state
self.met_data = met_data
self.wb = WaterBalance(self.control, self.params, self.state, self.
fluxes, self.met_data)
def decay_rates(self, project_day):
""" Model decay rates - temperature dependency (i.e. increase with temp)
[See section A8 in Comins and McMurtrie 1993].
Parameters:
-----------
project_day : int
current simulation day (index)
"""
tempact = self.soil_temp_factor(project_day)
wtfac = self.wb.calculate_soil_water_fac(topsoil=True)
self.params.decayrate[0] = self.params.kdec1 * math.exp(-3.0 * self
.params.ligshoot) * tempact * wtfac
self.params.decayrate[1] = self.params.kdec2 * tempact * wtfac
self.params.decayrate[2] = self.params.kdec3 * math.exp(-3.0 * self
.params.ligroot) * tempact * wtfac
self.params.decayrate[3] = self.params.kdec4 * tempact * wtfac
self.params.decayrate[4] = self.params.kdec5 * (1.0 - 0.75 * self.
params.finesoil) * tempact * wtfac
self.params.decayrate[5] = self.params.kdec6 * tempact * wtfac
self.params.decayrate[6] = self.params.kdec7 * tempact * wtfac
def soil_temp_factor(self, project_day):
"""Soil-temperature activity factor (A9).
Parameters:
-----------
project_day : int
current simulation day (index)
Returns:
--------
tfac : float
soil temperature factor [degC]
"""
tsoil = self.met_data['tsoil'][project_day]
if float_gt(tsoil, 0.0):
tfac = 0.0326 + 0.00351 * tsoil ** 1.652 - (tsoil / 41.748) ** 7.19
if float_lt(tfac, 0.0):
tfac = 0.0
else:
tfac = 0.0
return tfac
|
""" Soil and water decomposition rates """
import math
from water_balance import WaterBalance
from utilities import float_eq, float_lt, float_le, float_gt, float_ge, clip
__author__ = "Martin De Kauwe"
__version__ = "1.0 (25.02.2011)"
__email__ = "[email protected]"
class DecompFactors(object):
""" Calculate C and N litter production rates """
def __init__(self, control, params, state, fluxes, met_data):
"""
Parameters
----------
control : integers, structure
model control flags
params: floats, structure
model parameters
state: floats, structure
model state
fluxes : floats, structure
model fluxes
met_data : floats, dictionary
meteorological forcing data
"""
self.params = params
self.fluxes = fluxes
self.control = control
self.state = state
self.met_data = met_data
self.wb = WaterBalance(self.control, self.params, self.state,
self.fluxes, self.met_data)
def decay_rates(self, project_day):
""" Model decay rates - temperature dependency (i.e. increase with temp)
[See section A8 in Comins and McMurtrie 1993].
Parameters:
-----------
project_day : int
current simulation day (index)
"""
# temperature and water factors for decomposition
tempact = self.soil_temp_factor(project_day)
wtfac = self.wb.calculate_soil_water_fac(topsoil=True)
# decay rate of surface structural pool
self.params.decayrate[0] = (self.params.kdec1 *
math.exp(-3. * self.params.ligshoot) *
tempact * wtfac)
# decay rate of surface metabolic pool
self.params.decayrate[1] = self.params.kdec2 * tempact * wtfac
# decay rate of soil structural pool
self.params.decayrate[2] = (self.params.kdec3 *
math.exp(-3. * self.params.ligroot) *
tempact * wtfac)
# decay rate of soil metabolic pool
self.params.decayrate[3] = self.params.kdec4 * tempact * wtfac
# decay rate of active pool
self.params.decayrate[4] = (self.params.kdec5 *
(1.0 - 0.75 * self.params.finesoil) *
tempact * wtfac)
# decay rate of slow pool
self.params.decayrate[5] = self.params.kdec6 * tempact * wtfac
# decay rate of passive pool
self.params.decayrate[6] = self.params.kdec7 * tempact * wtfac
def soil_temp_factor(self, project_day):
"""Soil-temperature activity factor (A9).
Parameters:
-----------
project_day : int
current simulation day (index)
Returns:
--------
tfac : float
soil temperature factor [degC]
"""
tsoil = self.met_data['tsoil'][project_day]
if float_gt(tsoil, 0.0):
tfac = (0.0326 + 0.00351 * tsoil**1.652 - (tsoil / 41.748)**7.19)
if float_lt(tfac, 0.0):
tfac = 0.0
else:
# negative number cannot be raised to a fractional power
# number would need to be complex
tfac = 0.0
return tfac
|
[
3,
5,
6,
7,
8
] |
2,579 |
19ab44cec863560513aadd88b5fd4bb40f75e371
|
<mask token>
class TestInterpreter(unittest.TestCase):
<mask token>
def test_HelloWorld(self):
result = run_program(
"""
++++++++++[>+++++++>++++++++++>+++>+<<<<-]>++.>+.
+++++++..+++.>++.<<+++++++++++++++.>.+++.------.-
-------.>+.>."""
)
self.assertEquals(result, 'Hello World!')
def test_Squares(self):
result = run_program(
"""
++++[>+++++<-]>[<+++++>-]+<+[>[>+>+<<-]++>>[<<+>>
-]>>>[-]++>[-]+>>>+[[-]++++++>>>]<<<[[<++++++++<+
+>>-]+<.<[>----<-]<]<<[>>>>>[>>>[-]+++++++++<[>-<
-]+++++++++>[-[<->-]+[<<<]]<[>+<-]>]<<-]<<-]"""
)
expected_result = '\n'.join([str(x ** 2) for x in range(101)])
self.assertEquals(result, expected_result)
def test_ROT13(self):
result = run_program(
"""
-,+[-[>>++++[>++++++++<-]<+<-[>+>+>-[>>>]<[[>+<-]
>>+>]<<<<<-]]>>>[-]+>--[-[<->+++[-]]]<[++++++++++
++<[>-[>+>>]>[+[<+>-]>+>>]<<<<<-]>>[<+>-]>[-[-<<[
-]>>]<<[<<->>-]>>]<<[<<+>>-]]<[-]<.[-]<-,+]"""
, 'applesauce')
self.assertEquals(result, 'nccyrfnhpr')
def test_Clean(self):
self.assertRaises(Exception, brainfuck.clean, '[[]')
self.assertRaises(Exception, brainfuck.clean, '][')
<mask token>
|
<mask token>
class TestInterpreter(unittest.TestCase):
def setUp(self):
brainfuck.set_cell_size()
def test_HelloWorld(self):
result = run_program(
"""
++++++++++[>+++++++>++++++++++>+++>+<<<<-]>++.>+.
+++++++..+++.>++.<<+++++++++++++++.>.+++.------.-
-------.>+.>."""
)
self.assertEquals(result, 'Hello World!')
def test_Squares(self):
result = run_program(
"""
++++[>+++++<-]>[<+++++>-]+<+[>[>+>+<<-]++>>[<<+>>
-]>>>[-]++>[-]+>>>+[[-]++++++>>>]<<<[[<++++++++<+
+>>-]+<.<[>----<-]<]<<[>>>>>[>>>[-]+++++++++<[>-<
-]+++++++++>[-[<->-]+[<<<]]<[>+<-]>]<<-]<<-]"""
)
expected_result = '\n'.join([str(x ** 2) for x in range(101)])
self.assertEquals(result, expected_result)
def test_ROT13(self):
result = run_program(
"""
-,+[-[>>++++[>++++++++<-]<+<-[>+>+>-[>>>]<[[>+<-]
>>+>]<<<<<-]]>>>[-]+>--[-[<->+++[-]]]<[++++++++++
++<[>-[>+>>]>[+[<+>-]>+>>]<<<<<-]>>[<+>-]>[-[-<<[
-]>>]<<[<<->>-]>>]<<[<<+>>-]]<[-]<.[-]<-,+]"""
, 'applesauce')
self.assertEquals(result, 'nccyrfnhpr')
def test_Clean(self):
self.assertRaises(Exception, brainfuck.clean, '[[]')
self.assertRaises(Exception, brainfuck.clean, '][')
<mask token>
|
<mask token>
def run_program(program, input=None):
old_stdout = sys.stdout
old_stdin = sys.stdin
try:
out = StringIO()
sys.stdout = out
if input is not None:
input = StringIO(input)
sys.stdin = input
brainfuck.brainfuck(program)
finally:
sys.stdout = old_stdout
sys.stdin = old_stdin
return out.getvalue().strip()
class TestInterpreter(unittest.TestCase):
def setUp(self):
brainfuck.set_cell_size()
def test_HelloWorld(self):
result = run_program(
"""
++++++++++[>+++++++>++++++++++>+++>+<<<<-]>++.>+.
+++++++..+++.>++.<<+++++++++++++++.>.+++.------.-
-------.>+.>."""
)
self.assertEquals(result, 'Hello World!')
def test_Squares(self):
result = run_program(
"""
++++[>+++++<-]>[<+++++>-]+<+[>[>+>+<<-]++>>[<<+>>
-]>>>[-]++>[-]+>>>+[[-]++++++>>>]<<<[[<++++++++<+
+>>-]+<.<[>----<-]<]<<[>>>>>[>>>[-]+++++++++<[>-<
-]+++++++++>[-[<->-]+[<<<]]<[>+<-]>]<<-]<<-]"""
)
expected_result = '\n'.join([str(x ** 2) for x in range(101)])
self.assertEquals(result, expected_result)
def test_ROT13(self):
result = run_program(
"""
-,+[-[>>++++[>++++++++<-]<+<-[>+>+>-[>>>]<[[>+<-]
>>+>]<<<<<-]]>>>[-]+>--[-[<->+++[-]]]<[++++++++++
++<[>-[>+>>]>[+[<+>-]>+>>]<<<<<-]>>[<+>-]>[-[-<<[
-]>>]<<[<<->>-]>>]<<[<<+>>-]]<[-]<.[-]<-,+]"""
, 'applesauce')
self.assertEquals(result, 'nccyrfnhpr')
def test_Clean(self):
self.assertRaises(Exception, brainfuck.clean, '[[]')
self.assertRaises(Exception, brainfuck.clean, '][')
if __name__ == '__main__':
unittest.main()
|
import unittest
import brainfuck
import sys
from StringIO import StringIO
def run_program(program, input=None):
old_stdout = sys.stdout
old_stdin = sys.stdin
try:
out = StringIO()
sys.stdout = out
if input is not None:
input = StringIO(input)
sys.stdin = input
brainfuck.brainfuck(program)
finally:
sys.stdout = old_stdout
sys.stdin = old_stdin
return out.getvalue().strip()
class TestInterpreter(unittest.TestCase):
def setUp(self):
brainfuck.set_cell_size()
def test_HelloWorld(self):
result = run_program(
"""
++++++++++[>+++++++>++++++++++>+++>+<<<<-]>++.>+.
+++++++..+++.>++.<<+++++++++++++++.>.+++.------.-
-------.>+.>."""
)
self.assertEquals(result, 'Hello World!')
def test_Squares(self):
result = run_program(
"""
++++[>+++++<-]>[<+++++>-]+<+[>[>+>+<<-]++>>[<<+>>
-]>>>[-]++>[-]+>>>+[[-]++++++>>>]<<<[[<++++++++<+
+>>-]+<.<[>----<-]<]<<[>>>>>[>>>[-]+++++++++<[>-<
-]+++++++++>[-[<->-]+[<<<]]<[>+<-]>]<<-]<<-]"""
)
expected_result = '\n'.join([str(x ** 2) for x in range(101)])
self.assertEquals(result, expected_result)
def test_ROT13(self):
result = run_program(
"""
-,+[-[>>++++[>++++++++<-]<+<-[>+>+>-[>>>]<[[>+<-]
>>+>]<<<<<-]]>>>[-]+>--[-[<->+++[-]]]<[++++++++++
++<[>-[>+>>]>[+[<+>-]>+>>]<<<<<-]>>[<+>-]>[-[-<<[
-]>>]<<[<<->>-]>>]<<[<<+>>-]]<[-]<.[-]<-,+]"""
, 'applesauce')
self.assertEquals(result, 'nccyrfnhpr')
def test_Clean(self):
self.assertRaises(Exception, brainfuck.clean, '[[]')
self.assertRaises(Exception, brainfuck.clean, '][')
if __name__ == '__main__':
unittest.main()
|
import unittest
import brainfuck
import sys
from StringIO import StringIO
def run_program(program, input = None):
old_stdout = sys.stdout
old_stdin = sys.stdin
try:
out = StringIO()
sys.stdout = out
if input is not None:
input = StringIO(input)
sys.stdin = input
brainfuck.brainfuck(program)
finally:
sys.stdout = old_stdout
sys.stdin = old_stdin
return out.getvalue().strip()
class TestInterpreter(unittest.TestCase):
def setUp(self):
brainfuck.set_cell_size()
def test_HelloWorld(self):
result = run_program("""
++++++++++[>+++++++>++++++++++>+++>+<<<<-]>++.>+.
+++++++..+++.>++.<<+++++++++++++++.>.+++.------.-
-------.>+.>.""")
self.assertEquals(result, "Hello World!")
def test_Squares(self):
result = run_program("""
++++[>+++++<-]>[<+++++>-]+<+[>[>+>+<<-]++>>[<<+>>
-]>>>[-]++>[-]+>>>+[[-]++++++>>>]<<<[[<++++++++<+
+>>-]+<.<[>----<-]<]<<[>>>>>[>>>[-]+++++++++<[>-<
-]+++++++++>[-[<->-]+[<<<]]<[>+<-]>]<<-]<<-]""")
expected_result = "\n".join([str(x**2) for x in range(101)])
self.assertEquals(result, expected_result)
def test_ROT13(self):
result = run_program("""
-,+[-[>>++++[>++++++++<-]<+<-[>+>+>-[>>>]<[[>+<-]
>>+>]<<<<<-]]>>>[-]+>--[-[<->+++[-]]]<[++++++++++
++<[>-[>+>>]>[+[<+>-]>+>>]<<<<<-]>>[<+>-]>[-[-<<[
-]>>]<<[<<->>-]>>]<<[<<+>>-]]<[-]<.[-]<-,+]""", "applesauce")
self.assertEquals(result, "nccyrfnhpr")
def test_Clean(self):
self.assertRaises(Exception, brainfuck.clean, "[[]")
self.assertRaises(Exception, brainfuck.clean, "][")
if __name__ == '__main__':
unittest.main()
|
[
5,
6,
8,
9,
10
] |
2,580 |
a7e2b016131dfdb75e537e86875e1b2f19fb3d9d
|
<mask token>
class ClusterTestCase(unittest.TestCase):
<mask token>
<mask token>
|
<mask token>
class ClusterTestCase(unittest.TestCase):
def test_cluster(self):
n = 10
experiments, outcomes = utilities.load_flu_data()
data = outcomes['infected fraction R1'][0:n, :]
distances = clusterer.calculate_cid(data)
self.assertEqual(distances.shape, (n, n))
clusterer.plot_dendrogram(distances)
plt.draw()
assignment = clusterer.apply_agglomerative_clustering(distances, 2)
self.assertEqual(assignment.shape, (10,))
distances = clusterer.calculate_cid(data, condensed_form=True)
self.assertEqual(distances.shape, sum(np.arange(0, n)))
clusterer.plot_dendrogram(distances)
plt.draw()
plt.close('all')
<mask token>
|
<mask token>
class ClusterTestCase(unittest.TestCase):
def test_cluster(self):
n = 10
experiments, outcomes = utilities.load_flu_data()
data = outcomes['infected fraction R1'][0:n, :]
distances = clusterer.calculate_cid(data)
self.assertEqual(distances.shape, (n, n))
clusterer.plot_dendrogram(distances)
plt.draw()
assignment = clusterer.apply_agglomerative_clustering(distances, 2)
self.assertEqual(assignment.shape, (10,))
distances = clusterer.calculate_cid(data, condensed_form=True)
self.assertEqual(distances.shape, sum(np.arange(0, n)))
clusterer.plot_dendrogram(distances)
plt.draw()
plt.close('all')
if __name__ == '__main__':
unittest.main()
|
import matplotlib.pyplot as plt
import numpy as np
import unittest
from ema_workbench.analysis import clusterer
from test import utilities
class ClusterTestCase(unittest.TestCase):
def test_cluster(self):
n = 10
experiments, outcomes = utilities.load_flu_data()
data = outcomes['infected fraction R1'][0:n, :]
distances = clusterer.calculate_cid(data)
self.assertEqual(distances.shape, (n, n))
clusterer.plot_dendrogram(distances)
plt.draw()
assignment = clusterer.apply_agglomerative_clustering(distances, 2)
self.assertEqual(assignment.shape, (10,))
distances = clusterer.calculate_cid(data, condensed_form=True)
self.assertEqual(distances.shape, sum(np.arange(0, n)))
clusterer.plot_dendrogram(distances)
plt.draw()
plt.close('all')
if __name__ == '__main__':
unittest.main()
|
import matplotlib.pyplot as plt
import numpy as np
import unittest
from ema_workbench.analysis import clusterer
from test import utilities
class ClusterTestCase(unittest.TestCase):
def test_cluster(self):
n = 10
experiments, outcomes = utilities.load_flu_data()
data = outcomes["infected fraction R1"][0:n, :]
distances = clusterer.calculate_cid(data)
self.assertEqual(distances.shape, (n, n))
clusterer.plot_dendrogram(distances)
plt.draw()
assignment = clusterer.apply_agglomerative_clustering(distances, 2)
self.assertEqual(assignment.shape, (10,))
distances = clusterer.calculate_cid(data, condensed_form=True)
self.assertEqual(distances.shape, sum(np.arange(0, n)))
clusterer.plot_dendrogram(distances)
plt.draw()
plt.close("all")
if __name__ == "__main__":
unittest.main()
|
[
1,
2,
3,
4,
5
] |
2,581 |
538e582df7bfcf281973a5296adc14ca067be0a5
|
<mask token>
|
<mask token>
admin.autodiscover()
<mask token>
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<mask token>
admin.autodiscover()
urlpatterns = patterns('', ('', include(application.urls)), url('^admin/',
include(admin.site.urls)), url('^logout$', logout, name='logout'))
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf.urls import patterns, include, url
from django.contrib.auth.views import login, logout
from django.contrib import admin
from magmag_core.app import application
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from magmag import settings
admin.autodiscover()
urlpatterns = patterns('', ('', include(application.urls)), url('^admin/',
include(admin.site.urls)), url('^logout$', logout, name='logout'))
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib.auth.views import login, logout
from django.contrib import admin
from magmag_core.app import application
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from magmag import settings
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'magmag.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
(r'', include(application.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^logout$', logout,name='logout' ),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
0,
1,
2,
3,
4
] |
2,582 |
e3631a2a003f98fbf05c45a019250e76d3366949
|
<mask token>
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
<mask token>
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
<mask token>
|
<mask token>
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path: str, resources_path: str) ->Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(
vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],
dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'
)
to_return = []
sentences_xml_elements = etree_data.xpath('/*/*/*')
for sentence in train:
feature_1, feature_2, feature_3 = (
convert_sentence_to_features_no_padding(sentence, tokenizatore))
results = modello.model.predict({'input_word_ids': feature_1,
'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
def __decide_pos(pos: str) ->str:
"""
Decides the WN representation of the given pos in input
:param pos: the pos to interpret with WordNet
:return: the WN representation of the given pos
"""
to_return = None
if pos == 'NOUN':
to_return = 'n'
if pos == 'VERB':
to_return = 'v'
if pos == 'ADJ':
to_return = 'a'
if pos == 'ADV':
to_return = 'r'
return to_return
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in
value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(
is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
<mask token>
|
<mask token>
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path: str, resources_path: str) ->Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(
vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],
dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'
)
to_return = []
sentences_xml_elements = etree_data.xpath('/*/*/*')
for sentence in train:
feature_1, feature_2, feature_3 = (
convert_sentence_to_features_no_padding(sentence, tokenizatore))
results = modello.model.predict({'input_word_ids': feature_1,
'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
def __decide_pos(pos: str) ->str:
"""
Decides the WN representation of the given pos in input
:param pos: the pos to interpret with WordNet
:return: the WN representation of the given pos
"""
to_return = None
if pos == 'NOUN':
to_return = 'n'
if pos == 'VERB':
to_return = 'v'
if pos == 'ADJ':
to_return = 'a'
if pos == 'ADV':
to_return = 'r'
return to_return
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in
value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(
is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
def create_custom_label(list_of_possible_senses: List, word: str, vocab:
Dict, predictions, enable_coarse_grained: int=1) ->List:
"""
Converts the list of babelnet IDS to a number and outputs the converted list
:param list_of_possible_senses: the list that contains all the babelnet's IDs
:param word: the word for which we are predicting the sense in a specific moment
:param vocab: the vocabulary Word -> Serial to exploit for the conversion
:param predictions: the predictions made by the system
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:
1 --> The flow will still be the same
2,3 -> Flow will change, triggering the first step for the coarse-grained approach.
:return: a List with the IDs converted
"""
to_return = []
list_of_indices_to_delete = []
for indice in range(len(list_of_possible_senses)):
new_string = word + '_' + list_of_possible_senses[indice
] if enable_coarse_grained == 1 else list_of_possible_senses[indice
]
conversion = None
try:
conversion = int(vocab[new_string])
to_return.append(predictions[conversion])
except:
list_of_indices_to_delete.append(indice)
continue
if list_of_indices_to_delete:
list_of_possible_senses = [list_of_possible_senses[prov_index] for
prov_index in range(len(list_of_possible_senses)) if prov_index
not in list_of_indices_to_delete]
return to_return, list_of_possible_senses
<mask token>
|
<mask token>
mfs_counter = 0
def predict_babelnet(input_path: str, output_path: str, resources_path: str
) ->None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print('>>>> BABELNET PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'babelnet.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
0][0], vocab=vocab_label_bn, enable_coarse_grained=1,
vocab_for_coarse=None)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path: str, output_path: str,
resources_path: str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> WORDNET DOMAINS PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3] + 'wndomains.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
1][0], vocab=vocab_label_wndmn, enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path: str, output_path: str, resources_path:
str) ->None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print('>>>> LEXICOGRAPHER PREDICTION')
prediction_results, sentences_xml_elements = __predict(input_path,
resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + 'lexicon.gold.key.txt'
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename, sentences_xml_elements[
index], resources_path, output_path, prediction_results[index][
2][0], vocab=vocab_label_lex, enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print('Successfully saved {} out of {}'.format(correctly_saved, len(
prediction_results)))
del prediction_results
print('Of these, {} were MFS'.format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path: str, resources_path: str) ->Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path + '/vocabularies/bert_vocab.txt', [len(
vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)],
dropout=0.1, recurrent_dropout=0.1, learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path + '/saved_model/model_20_2.14.h5'
)
to_return = []
sentences_xml_elements = etree_data.xpath('/*/*/*')
for sentence in train:
feature_1, feature_2, feature_3 = (
convert_sentence_to_features_no_padding(sentence, tokenizatore))
results = modello.model.predict({'input_word_ids': feature_1,
'input_mask': feature_2, 'segment_ids': feature_3}, verbose=1)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str, frase, resources_path: str, outputh_path:
str, predictions, vocab=None, enable_coarse_grained: int=1,
vocab_for_coarse=None) ->int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path + '/mapping/lemma2wn.txt')
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(
list_of_possible_senses_first_step, bn2wn)
candidates, list_of_possible_senses_bn_version = (
create_custom_label(list_of_possible_senses_bn_version,
parola.text, vocab, predictions[index],
enable_coarse_grained=enable_coarse_grained))
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
mfs_counter += 1
the_actual_meaning = MFS(parola, bn2wn, vocab2=
vocab_for_coarse, pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + '/' + filename, 'a') as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + ' ' + tupla[1] + '\n')
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2: Dict=None, pred_case: int=1) ->str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = 'wn:' + str(wordnet_object.offset()).zfill(8
) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if
wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning, vocab=
vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int=1, vocab:
Dict=None) ->str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else 'factotum'
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) ->str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
def __decide_pos(pos: str) ->str:
"""
Decides the WN representation of the given pos in input
:param pos: the pos to interpret with WordNet
:return: the WN representation of the given pos
"""
to_return = None
if pos == 'NOUN':
to_return = 'n'
if pos == 'VERB':
to_return = 'v'
if pos == 'ADJ':
to_return = 'a'
if pos == 'ADV':
to_return = 'r'
return to_return
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) ->List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in
value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(
is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
def create_custom_label(list_of_possible_senses: List, word: str, vocab:
Dict, predictions, enable_coarse_grained: int=1) ->List:
"""
Converts the list of babelnet IDS to a number and outputs the converted list
:param list_of_possible_senses: the list that contains all the babelnet's IDs
:param word: the word for which we are predicting the sense in a specific moment
:param vocab: the vocabulary Word -> Serial to exploit for the conversion
:param predictions: the predictions made by the system
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:
1 --> The flow will still be the same
2,3 -> Flow will change, triggering the first step for the coarse-grained approach.
:return: a List with the IDs converted
"""
to_return = []
list_of_indices_to_delete = []
for indice in range(len(list_of_possible_senses)):
new_string = word + '_' + list_of_possible_senses[indice
] if enable_coarse_grained == 1 else list_of_possible_senses[indice
]
conversion = None
try:
conversion = int(vocab[new_string])
to_return.append(predictions[conversion])
except:
list_of_indices_to_delete.append(indice)
continue
if list_of_indices_to_delete:
list_of_possible_senses = [list_of_possible_senses[prov_index] for
prov_index in range(len(list_of_possible_senses)) if prov_index
not in list_of_indices_to_delete]
return to_return, list_of_possible_senses
if __name__ == '__main__':
predict_babelnet(
'/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml'
, '../output',
'/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources'
)
|
from model import WSD
from data_preprocessing import load_dataset, create_mapping_dictionary, reload_word_mapping,get_bn2wn,get_bn2wndomains, get_bn2lex
from typing import List, Dict, Tuple
from prova import convert_sentence_to_features_no_padding
import numpy as np
import os
from nltk.corpus import wordnet
mfs_counter = 0
def predict_babelnet(input_path : str, output_path : str, resources_path : str) -> None:
global mfs_counter
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <BABELSynset>" format (e.g. "d000.s000.t000 bn:01234567n").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
print(">>>> BABELNET PREDICTION")
prediction_results, sentences_xml_elements = __predict(input_path,resources_path)
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3]+"babelnet.gold.key.txt"
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename,
sentences_xml_elements[index],
resources_path, output_path,
prediction_results[index][0][0],
vocab=vocab_label_bn,
enable_coarse_grained=1,
vocab_for_coarse=None)
print("Successfully saved {} out of {}".format(correctly_saved, len(prediction_results)))
del prediction_results
print("Of these, {} were MFS".format(mfs_counter))
mfs_counter = 0
return
def predict_wordnet_domains(input_path : str, output_path : str, resources_path : str) -> None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <wordnetDomain>" format (e.g. "d000.s000.t000 sport").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print(">>>> WORDNET DOMAINS PREDICTION")
prediction_results, sentences_xml_elements = __predict(input_path,resources_path)
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
correctly_saved = 0
bn2wndom = get_bn2wndomains()
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
filename = filename[:-3]+"wndomains.gold.key.txt"
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename,
sentences_xml_elements[index],
resources_path, output_path,
prediction_results[index][1][0],
vocab=vocab_label_wndmn,
enable_coarse_grained=2,
vocab_for_coarse=bn2wndom)
print("Successfully saved {} out of {}".format(correctly_saved, len(prediction_results)))
del prediction_results
print("Of these, {} were MFS".format(mfs_counter))
mfs_counter = 0
return
def predict_lexicographer(input_path : str, output_path : str, resources_path : str) -> None:
"""
DO NOT MODIFY THE SIGNATURE!
This is the skeleton of the prediction function.
The predict function will build your model, load the weights from the checkpoint and write a new file (output_path)
with your predictions in the "<id> <lexicographerId>" format (e.g. "d000.s000.t000 noun.animal").
The resources folder should contain everything you need to make the predictions. It is the "resources" folder in your submission.
N.B. DO NOT HARD CODE PATHS IN HERE. Use resource_path instead, otherwise we will not be able to run the code.
If you don't know what HARD CODING means see: https://en.wikipedia.org/wiki/Hard_coding
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: None
"""
global mfs_counter
print(">>>> LEXICOGRAPHER PREDICTION")
prediction_results, sentences_xml_elements = __predict(input_path, resources_path)
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
correctly_saved = 0
filename = os.path.normpath(input_path)
filename = filename.split(os.sep)[-1]
bn2lex = get_bn2lex()
filename = filename[:-3] + "lexicon.gold.key.txt"
for index in range(len(prediction_results)):
correctly_saved += __write_result(filename,
sentences_xml_elements[index],
resources_path,output_path,
prediction_results[index][2][0],
vocab= vocab_label_lex,
enable_coarse_grained=3,
vocab_for_coarse=bn2lex)
print("Successfully saved {} out of {}".format(correctly_saved, len(prediction_results)))
del prediction_results
print("Of these, {} were MFS".format(mfs_counter))
mfs_counter = 0
return
def __predict(input_path : str, resources_path : str) -> Tuple:
"""
Actually predicts a sentence and returns the predictions in the requested formats
:param input_path: the path of the input file to predict in the same format as Raganato's framework (XML files you downloaded).
:param output_path: the path of the output file (where you save your predictions)
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:return: The actual prediction by the network
"""
train, etree_data = load_dataset(input_path)
train = [dato for dato in train if dato]
vocab_label_wndmn = create_mapping_dictionary(resources_path, mode='wndmn')
vocab_label_bn = create_mapping_dictionary(resources_path, mode='bn')
vocab_label_lex = create_mapping_dictionary(resources_path, mode='lex')
modello = WSD(resources_path+"/vocabularies/bert_vocab.txt", [len(vocab_label_bn), len(vocab_label_wndmn), len(vocab_label_lex)], dropout=0.1, recurrent_dropout=0.1,learning_rate=0.0003)
tokenizatore = modello.tokenizatore
modello.model.load_weights(resources_path+"/saved_model/model_20_2.14.h5")
to_return = []
sentences_xml_elements = etree_data.xpath("/*/*/*")
for sentence in train:
feature_1, feature_2, feature_3 = convert_sentence_to_features_no_padding(sentence,tokenizatore)
results = modello.model.predict(
{'input_word_ids': feature_1, 'input_mask': feature_2, 'segment_ids': feature_3},
verbose=1
)
to_return.append(results)
del vocab_label_lex
del vocab_label_wndmn
del vocab_label_bn
return to_return, sentences_xml_elements
def __write_result(filename: str,
frase,
resources_path: str,
outputh_path: str,
predictions,
vocab = None,
enable_coarse_grained: int = 1,
vocab_for_coarse = None) -> int:
"""
Write results in the file system
:param filename: the name of the file to save
:param frase: the object from which recover the sentence
:param resources_path: the path of the resources folder containing your model and stuff you might need.
:param output_path: the path of the output file (where you save your predictions)
:param predictions: the predictions made by the system
:param vocab: the vocab needed for giving a sense
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to 1. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab_for_coarse: The vocab in support of mode 2 or 3
:return: 1 if succeeds
"""
global mfs_counter
bn2wn = get_bn2wn()
lemma2wn = reload_word_mapping(resources_path+"/mapping/lemma2wn.txt")
to_write = []
for index, parola in enumerate(frase):
name = parola.xpath('name()')
if name == 'instance':
id = parola.get('id')
list_of_possible_senses_first_step = lemma2wn.get(parola.text)
if not list_of_possible_senses_first_step:
# MFS
the_actual_meaning = MFS(parola,
bn2wn,
vocab2=vocab_for_coarse,
pred_case=enable_coarse_grained)
mfs_counter += 1
to_write.append((id, the_actual_meaning))
continue
list_of_possible_senses_bn_version = convert_from_wnlist_2_bnlist(list_of_possible_senses_first_step, bn2wn)
candidates,list_of_possible_senses_bn_version = create_custom_label(list_of_possible_senses_bn_version,
parola.text,
vocab,
predictions[index],
enable_coarse_grained=enable_coarse_grained)
the_actual_meaning = None
if candidates:
argmax = np.argmax(candidates)
the_actual_meaning = list_of_possible_senses_bn_version[argmax]
else:
#MFS
mfs_counter += 1
the_actual_meaning = MFS(parola,
bn2wn,
vocab2=vocab_for_coarse,
pred_case=enable_coarse_grained)
to_write.append((id, the_actual_meaning))
with open(outputh_path + "/"+filename, "a") as test_saving:
for tupla in to_write:
test_saving.write(tupla[0] + " " + tupla[1]+"\n")
del to_write
del lemma2wn
del bn2wn
return 1
def MFS(parola, vocab: Dict, vocab2:Dict = None, pred_case: int = 1) -> str:
"""
Returns the sense by applying the Most Frequent Sense (MFS) strategy
:param parola: the Element object to which associate a sense
:param vocab: the vocab needed for giving a sense
:param vocab2: default to None. The other vocabulary to use if coarse-grained mode is enabled. Has to be populated if enable_coarse_grained
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:return: the chosen sense with the MFS technique
"""
pos = parola.get('pos')
pos_input = __decide_pos(pos)
wordnet_object = wordnet.synsets(parola.get('lemma'), pos=pos_input)
try:
wordnet_object = wordnet_object[0]
except:
print(wordnet_object)
print(parola.text)
wn_synset = "wn:" + str(wordnet_object.offset()).zfill(8) + wordnet_object.pos()
the_actual_meaning = next(key for key, value in vocab.items() if wn_synset in value)
to_return = __extrapolate_value_for_MFS(the_actual_meaning,vocab=vocab2, pred_case=pred_case)
return to_return
def __extrapolate_value_for_MFS(value: object, pred_case: int = 1, vocab: Dict = None) -> str:
"""
Taking either a List or String in input, that represents the found Babelnet ID, this function handles it and return a string that contains the value of the prediction
:param value: The Value from which to extrapolate the actual meaning found
:param pred_case: whether to adopt a "rollback" strategy such as MFS or not. Possible values:
1 --> Means I'm predicting with Babelnet. No extra precautions needed
2 --> Means I'm predicting with WordNet Domains. Need to consult the vocab. If I don't find anything, the empty class "factotum" is returned instead
3 --> Means I'm predicting with Lexicon. Need to consult the vocab.
:param vocab: The vocab in support of mode 2 or 3.
:return: the actual meaning found with MFS
"""
the_meaning_to_explot = __type_checker(value)
if pred_case == 1:
return the_meaning_to_explot
if pred_case == 2:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0] if to_return else "factotum"
if pred_case == 3:
to_return = vocab.get(the_meaning_to_explot)
return to_return[0]
def __type_checker(value: object) -> str:
"""
Checks the type of the object and, accordingly, returns it
:param value: the value to examinate
:return: a string that is the value expected
"""
if type(value) == str:
return value
if type(value) == list:
return value[0]
def __decide_pos(pos: str) -> str:
"""
Decides the WN representation of the given pos in input
:param pos: the pos to interpret with WordNet
:return: the WN representation of the given pos
"""
to_return = None
if pos == 'NOUN':
to_return = "n"
if pos == 'VERB':
to_return = 'v'
if pos == 'ADJ':
to_return = 'a'
if pos == 'ADV':
to_return = 'r'
return to_return
def convert_from_wnlist_2_bnlist(list_of_bn: List, vocab: Dict) -> List:
"""
Cast the given list (which contains only WN ids) to Babelnet IDs
:param list_of_bn: the list to cast
:param vocab: the vocabulary to use to perform the conversion
:return: the converted list
"""
list_of_possible_senses_bn_version = []
for candidate in list_of_bn:
is_it_here = next(key for key, value in vocab.items() if candidate in value)
if is_it_here:
list_of_possible_senses_bn_version.append(is_it_here if type(is_it_here) == str else is_it_here[0])
return list_of_possible_senses_bn_version
def create_custom_label(list_of_possible_senses: List, word: str, vocab: Dict, predictions, enable_coarse_grained: int = 1) -> List:
"""
Converts the list of babelnet IDS to a number and outputs the converted list
:param list_of_possible_senses: the list that contains all the babelnet's IDs
:param word: the word for which we are predicting the sense in a specific moment
:param vocab: the vocabulary Word -> Serial to exploit for the conversion
:param predictions: the predictions made by the system
:param enable_coarse_grained: changes the flow of the function from fine-grained to coarse-grained. Default to None. Possible values:
1 --> The flow will still be the same
2,3 -> Flow will change, triggering the first step for the coarse-grained approach.
:return: a List with the IDs converted
"""
to_return = []
list_of_indices_to_delete = []
for indice in range(len(list_of_possible_senses)):
new_string = word + "_" + list_of_possible_senses[indice] if enable_coarse_grained == 1 else list_of_possible_senses[indice]
conversion = None
try:
conversion = int(vocab[new_string])
to_return.append(predictions[conversion])
except:
list_of_indices_to_delete.append(indice)
continue
if list_of_indices_to_delete:
list_of_possible_senses = [list_of_possible_senses[prov_index] for prov_index in range(len(list_of_possible_senses)) if prov_index not in list_of_indices_to_delete]
return to_return, list_of_possible_senses
if __name__ == "__main__":
predict_babelnet("/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml", "../output", "/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources")
#predict_wordnet_domains("/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml", "../output", "/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources")
#predict_lexicographer("/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/dataset/test/senseval3.data.xml", "../output", "/Users/gimmi/Desktop/Università/MAGISTRALE/NLP/nlp-finalproject/resources")
|
[
7,
10,
11,
13,
15
] |
2,583 |
7bb9455e6f0c15ab0be6963cff06ff41df73e6e0
|
<mask token>
class Config(object):
def __init__(self, name=None):
"""
Load config for colin.
:param name: str (name of the config file (without .json), default is "default"
"""
self.name = name or 'default'
config_path = os.path.join(get_config_directory(), self.name + JSON)
try:
with open(config_path, mode='r') as config_file:
self.config_dict = json.load(config_file)
except Exception as ex:
raise ColinConfigException("Config file '{}' cannot be loaded."
.format(config_path))
def get_checks(self, target_type, group=None, severity=None, tags=None):
"""
Get all checks for given type/group/severity/tags.
:param target_type: TargetType enum
:param group: str (if not group, get checks from all groups/directories)
:param severity: str (optional x required)
:param tags: list of str
:return: list of check instances
"""
check_files = self._get_check_files(group=group, severity=severity)
groups = {}
for group, check_files in iteritems(check_files):
checks = []
for severity, check_file in check_files:
check_classes = load_check_implementation(path=check_file,
severity=severity)
for check_class in check_classes:
if is_compatible(target_type, check_class, severity, tags):
checks.append(check_class)
groups[group] = checks
return groups
<mask token>
<mask token>
def _get_check_groups(self, group=None):
"""
Get check group to validate
:param group: str (if None, all from the config will be used)
:return: list of str (group names)
"""
groups = [g for g in self.config_dict]
if group:
if group in groups:
check_groups = [group]
else:
check_groups = []
else:
check_groups = groups
return check_groups
def _get_check_files(self, group=None, severity=None):
"""
Get file names with checks filtered by group and severity.
:param group: str (if None, all groups will be used)
:param severity: str (if None, all severities will be used)
:return: list of str (absolute paths)
"""
groups = {}
for g in self._get_check_groups(group):
check_files = []
for sev, files in iteritems(self.config_dict[g]):
if not severity or severity == sev:
check_files += Config.get_check_files(group=g, names=
files, severity=sev)
groups[g] = check_files
return groups
<mask token>
|
<mask token>
class Config(object):
def __init__(self, name=None):
"""
Load config for colin.
:param name: str (name of the config file (without .json), default is "default"
"""
self.name = name or 'default'
config_path = os.path.join(get_config_directory(), self.name + JSON)
try:
with open(config_path, mode='r') as config_file:
self.config_dict = json.load(config_file)
except Exception as ex:
raise ColinConfigException("Config file '{}' cannot be loaded."
.format(config_path))
def get_checks(self, target_type, group=None, severity=None, tags=None):
"""
Get all checks for given type/group/severity/tags.
:param target_type: TargetType enum
:param group: str (if not group, get checks from all groups/directories)
:param severity: str (optional x required)
:param tags: list of str
:return: list of check instances
"""
check_files = self._get_check_files(group=group, severity=severity)
groups = {}
for group, check_files in iteritems(check_files):
checks = []
for severity, check_file in check_files:
check_classes = load_check_implementation(path=check_file,
severity=severity)
for check_class in check_classes:
if is_compatible(target_type, check_class, severity, tags):
checks.append(check_class)
groups[group] = checks
return groups
@staticmethod
def get_check_file(group, name):
"""
Get the check file from given group with given name.
:param group: str
:param name: str
:return: str (path)
"""
return os.path.join(get_checks_path(), group, name + '.py')
<mask token>
def _get_check_groups(self, group=None):
"""
Get check group to validate
:param group: str (if None, all from the config will be used)
:return: list of str (group names)
"""
groups = [g for g in self.config_dict]
if group:
if group in groups:
check_groups = [group]
else:
check_groups = []
else:
check_groups = groups
return check_groups
def _get_check_files(self, group=None, severity=None):
"""
Get file names with checks filtered by group and severity.
:param group: str (if None, all groups will be used)
:param severity: str (if None, all severities will be used)
:return: list of str (absolute paths)
"""
groups = {}
for g in self._get_check_groups(group):
check_files = []
for sev, files in iteritems(self.config_dict[g]):
if not severity or severity == sev:
check_files += Config.get_check_files(group=g, names=
files, severity=sev)
groups[g] = check_files
return groups
<mask token>
|
<mask token>
class Config(object):
def __init__(self, name=None):
"""
Load config for colin.
:param name: str (name of the config file (without .json), default is "default"
"""
self.name = name or 'default'
config_path = os.path.join(get_config_directory(), self.name + JSON)
try:
with open(config_path, mode='r') as config_file:
self.config_dict = json.load(config_file)
except Exception as ex:
raise ColinConfigException("Config file '{}' cannot be loaded."
.format(config_path))
def get_checks(self, target_type, group=None, severity=None, tags=None):
"""
Get all checks for given type/group/severity/tags.
:param target_type: TargetType enum
:param group: str (if not group, get checks from all groups/directories)
:param severity: str (optional x required)
:param tags: list of str
:return: list of check instances
"""
check_files = self._get_check_files(group=group, severity=severity)
groups = {}
for group, check_files in iteritems(check_files):
checks = []
for severity, check_file in check_files:
check_classes = load_check_implementation(path=check_file,
severity=severity)
for check_class in check_classes:
if is_compatible(target_type, check_class, severity, tags):
checks.append(check_class)
groups[group] = checks
return groups
@staticmethod
def get_check_file(group, name):
"""
Get the check file from given group with given name.
:param group: str
:param name: str
:return: str (path)
"""
return os.path.join(get_checks_path(), group, name + '.py')
@staticmethod
def get_check_files(group, names, severity):
"""
Get the check files from given group with given names.
:param severity: str
:param group: str
:param names: list of str
:return: list of str (paths)
"""
check_files = []
for f in names:
check_file = Config.get_check_file(group=group, name=f)
check_files.append((severity, check_file))
return check_files
def _get_check_groups(self, group=None):
"""
Get check group to validate
:param group: str (if None, all from the config will be used)
:return: list of str (group names)
"""
groups = [g for g in self.config_dict]
if group:
if group in groups:
check_groups = [group]
else:
check_groups = []
else:
check_groups = groups
return check_groups
def _get_check_files(self, group=None, severity=None):
"""
Get file names with checks filtered by group and severity.
:param group: str (if None, all groups will be used)
:param severity: str (if None, all severities will be used)
:return: list of str (absolute paths)
"""
groups = {}
for g in self._get_check_groups(group):
check_files = []
for sev, files in iteritems(self.config_dict[g]):
if not severity or severity == sev:
check_files += Config.get_check_files(group=g, names=
files, severity=sev)
groups[g] = check_files
return groups
<mask token>
|
<mask token>
class Config(object):
def __init__(self, name=None):
"""
Load config for colin.
:param name: str (name of the config file (without .json), default is "default"
"""
self.name = name or 'default'
config_path = os.path.join(get_config_directory(), self.name + JSON)
try:
with open(config_path, mode='r') as config_file:
self.config_dict = json.load(config_file)
except Exception as ex:
raise ColinConfigException("Config file '{}' cannot be loaded."
.format(config_path))
def get_checks(self, target_type, group=None, severity=None, tags=None):
"""
Get all checks for given type/group/severity/tags.
:param target_type: TargetType enum
:param group: str (if not group, get checks from all groups/directories)
:param severity: str (optional x required)
:param tags: list of str
:return: list of check instances
"""
check_files = self._get_check_files(group=group, severity=severity)
groups = {}
for group, check_files in iteritems(check_files):
checks = []
for severity, check_file in check_files:
check_classes = load_check_implementation(path=check_file,
severity=severity)
for check_class in check_classes:
if is_compatible(target_type, check_class, severity, tags):
checks.append(check_class)
groups[group] = checks
return groups
@staticmethod
def get_check_file(group, name):
"""
Get the check file from given group with given name.
:param group: str
:param name: str
:return: str (path)
"""
return os.path.join(get_checks_path(), group, name + '.py')
@staticmethod
def get_check_files(group, names, severity):
"""
Get the check files from given group with given names.
:param severity: str
:param group: str
:param names: list of str
:return: list of str (paths)
"""
check_files = []
for f in names:
check_file = Config.get_check_file(group=group, name=f)
check_files.append((severity, check_file))
return check_files
def _get_check_groups(self, group=None):
"""
Get check group to validate
:param group: str (if None, all from the config will be used)
:return: list of str (group names)
"""
groups = [g for g in self.config_dict]
if group:
if group in groups:
check_groups = [group]
else:
check_groups = []
else:
check_groups = groups
return check_groups
def _get_check_files(self, group=None, severity=None):
"""
Get file names with checks filtered by group and severity.
:param group: str (if None, all groups will be used)
:param severity: str (if None, all severities will be used)
:return: list of str (absolute paths)
"""
groups = {}
for g in self._get_check_groups(group):
check_files = []
for sev, files in iteritems(self.config_dict[g]):
if not severity or severity == sev:
check_files += Config.get_check_files(group=g, names=
files, severity=sev)
groups[g] = check_files
return groups
def get_checks_path():
"""
Get path to checks.
:return: str (absolute path of directory with checks)
"""
rel_path = os.path.join(os.pardir, os.pardir, os.pardir, 'checks')
return os.path.abspath(os.path.join(__file__, rel_path))
<mask token>
|
import json
import os
from six import iteritems
from ..exceptions import ColinConfigException
from ..constant import CONFIG_DIRECTORY, JSON
from ..loader import load_check_implementation
from ..target import is_compatible
class Config(object):
def __init__(self, name=None):
"""
Load config for colin.
:param name: str (name of the config file (without .json), default is "default"
"""
self.name = name or "default"
config_path = os.path.join(get_config_directory(), self.name + JSON)
try:
with open(config_path, mode='r') as config_file:
self.config_dict = json.load(config_file)
except Exception as ex:
raise ColinConfigException("Config file '{}' cannot be loaded.".format(config_path))
def get_checks(self, target_type, group=None, severity=None, tags=None):
"""
Get all checks for given type/group/severity/tags.
:param target_type: TargetType enum
:param group: str (if not group, get checks from all groups/directories)
:param severity: str (optional x required)
:param tags: list of str
:return: list of check instances
"""
check_files = self._get_check_files(group=group,
severity=severity)
groups = {}
for (group, check_files) in iteritems(check_files):
checks = []
for severity, check_file in check_files:
check_classes = load_check_implementation(path=check_file, severity=severity)
for check_class in check_classes:
if is_compatible(target_type, check_class, severity, tags):
checks.append(check_class)
groups[group] = checks
return groups
@staticmethod
def get_check_file(group, name):
"""
Get the check file from given group with given name.
:param group: str
:param name: str
:return: str (path)
"""
return os.path.join(get_checks_path(), group, name + ".py")
@staticmethod
def get_check_files(group, names, severity):
"""
Get the check files from given group with given names.
:param severity: str
:param group: str
:param names: list of str
:return: list of str (paths)
"""
check_files = []
for f in names:
check_file = Config.get_check_file(group=group,
name=f)
check_files.append((severity, check_file))
return check_files
def _get_check_groups(self, group=None):
"""
Get check group to validate
:param group: str (if None, all from the config will be used)
:return: list of str (group names)
"""
groups = [g for g in self.config_dict]
if group:
if group in groups:
check_groups = [group]
else:
check_groups = []
else:
check_groups = groups
return check_groups
def _get_check_files(self, group=None, severity=None):
"""
Get file names with checks filtered by group and severity.
:param group: str (if None, all groups will be used)
:param severity: str (if None, all severities will be used)
:return: list of str (absolute paths)
"""
groups = {}
for g in self._get_check_groups(group):
check_files = []
for sev, files in iteritems(self.config_dict[g]):
if (not severity) or severity == sev:
check_files += Config.get_check_files(group=g,
names=files,
severity=sev)
groups[g] = check_files
return groups
def get_checks_path():
"""
Get path to checks.
:return: str (absolute path of directory with checks)
"""
rel_path = os.path.join(os.pardir, os.pardir, os.pardir, "checks")
return os.path.abspath(os.path.join(__file__, rel_path))
def get_config_directory():
"""
Get the directory with config files
:return: str
"""
local_share = os.path.join(os.path.expanduser("~"),
".local",
CONFIG_DIRECTORY)
if os.path.isdir(local_share) and os.path.exists(local_share):
return local_share
usr_local_share = os.path.join("/usr/local", CONFIG_DIRECTORY)
if os.path.isdir(usr_local_share) and os.path.exists(usr_local_share):
return usr_local_share
raise ColinConfigException("Config directory cannot be found.")
|
[
5,
6,
7,
8,
11
] |
2,584 |
de003440be513d53b87f526ea95c0fbbc4a9f66f
|
<mask token>
|
<mask token>
def getlessonlist():
path = os.path.expanduser('~/.buzzers')
dirs = os.walk(os.path.expanduser('~/.buzzers/lessons'))
lessons = []
for root, d, fs in dirs:
fullfs = [(root + '/' + f) for f in fs]
lessons.extend(fs)
return lessons
|
<mask token>
def savelesson(text):
os.path.expanduser('~/.buzzers/lessons')
def getlessonlist():
path = os.path.expanduser('~/.buzzers')
dirs = os.walk(os.path.expanduser('~/.buzzers/lessons'))
lessons = []
for root, d, fs in dirs:
fullfs = [(root + '/' + f) for f in fs]
lessons.extend(fs)
return lessons
|
import os
def savelesson(text):
os.path.expanduser('~/.buzzers/lessons')
def getlessonlist():
path = os.path.expanduser('~/.buzzers')
dirs = os.walk(os.path.expanduser('~/.buzzers/lessons'))
lessons = []
for root, d, fs in dirs:
fullfs = [(root + '/' + f) for f in fs]
lessons.extend(fs)
return lessons
|
import os
def savelesson(text):
os.path.expanduser("~/.buzzers/lessons")
def getlessonlist():
path = os.path.expanduser("~/.buzzers")
dirs = os.walk(os.path.expanduser("~/.buzzers/lessons"))
#"/home/loadquo/files/lhsgghc/Programs/PCSoftware/src/admin/lessons")
lessons = []
for root, d, fs in dirs:
fullfs = [root +"/"+ f for f in fs]
lessons.extend(fs)
return lessons
|
[
0,
1,
2,
3,
4
] |
2,585 |
1a979933eb02e9d12dc034021448cbade59abc48
|
<mask token>
class RosEnvImg(RosEnvAbs):
<mask token>
<mask token>
def get_observation_(self):
"""
Function returns state that will be fed to the rl-agent
It includes
the laserscan and the waypoint information stored in an image.
:return: state
"""
obs = np.zeros(self.STATE_SIZE, dtype=np.float)
obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.
STATE_SIZE[0:2])
if self.debug_:
self.debugger_.show_input_occ_grid(self.input_img_)
self.debugger_.show_input_image(obs[:, :, 0])
return obs
|
<mask token>
class RosEnvImg(RosEnvAbs):
<mask token>
def __init__(self, ns, state_collector, execution_mode, task_mode,
state_size, observation_space, stack_offset, action_size,
action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):
state_collector.set_state_mode(0)
super(RosEnvImg, self).__init__(ns, state_collector, execution_mode,
task_mode, state_size, observation_space, stack_offset,
action_size, action_space, debug, goal_radius, wp_radius,
robot_radius, reward_fnc)
def get_observation_(self):
"""
Function returns state that will be fed to the rl-agent
It includes
the laserscan and the waypoint information stored in an image.
:return: state
"""
obs = np.zeros(self.STATE_SIZE, dtype=np.float)
obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.
STATE_SIZE[0:2])
if self.debug_:
self.debugger_.show_input_occ_grid(self.input_img_)
self.debugger_.show_input_image(obs[:, :, 0])
return obs
|
<mask token>
class RosEnvImg(RosEnvAbs):
"""
This (abstract) class is a simulation environment wrapper for
the X-Image Representation.
"""
def __init__(self, ns, state_collector, execution_mode, task_mode,
state_size, observation_space, stack_offset, action_size,
action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):
state_collector.set_state_mode(0)
super(RosEnvImg, self).__init__(ns, state_collector, execution_mode,
task_mode, state_size, observation_space, stack_offset,
action_size, action_space, debug, goal_radius, wp_radius,
robot_radius, reward_fnc)
def get_observation_(self):
"""
Function returns state that will be fed to the rl-agent
It includes
the laserscan and the waypoint information stored in an image.
:return: state
"""
obs = np.zeros(self.STATE_SIZE, dtype=np.float)
obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.
STATE_SIZE[0:2])
if self.debug_:
self.debugger_.show_input_occ_grid(self.input_img_)
self.debugger_.show_input_image(obs[:, :, 0])
return obs
|
<mask token>
import numpy as np
from rl_agent.env_wrapper.ros_env import RosEnvAbs
import rospy
class RosEnvImg(RosEnvAbs):
"""
This (abstract) class is a simulation environment wrapper for
the X-Image Representation.
"""
def __init__(self, ns, state_collector, execution_mode, task_mode,
state_size, observation_space, stack_offset, action_size,
action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):
state_collector.set_state_mode(0)
super(RosEnvImg, self).__init__(ns, state_collector, execution_mode,
task_mode, state_size, observation_space, stack_offset,
action_size, action_space, debug, goal_radius, wp_radius,
robot_radius, reward_fnc)
def get_observation_(self):
"""
Function returns state that will be fed to the rl-agent
It includes
the laserscan and the waypoint information stored in an image.
:return: state
"""
obs = np.zeros(self.STATE_SIZE, dtype=np.float)
obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.
STATE_SIZE[0:2])
if self.debug_:
self.debugger_.show_input_occ_grid(self.input_img_)
self.debugger_.show_input_image(obs[:, :, 0])
return obs
|
'''
@name: ros_env_img.py
@brief: This (abstract) class is a simulation environment wrapper for
the X-Image Representation.
@author: Ronja Gueldenring
@version: 3.5
@date: 2019/04/05
'''
# python relevant
import numpy as np
# custom classes
from rl_agent.env_wrapper.ros_env import RosEnvAbs
# ros-relevant
import rospy
class RosEnvImg(RosEnvAbs):
'''
This (abstract) class is a simulation environment wrapper for
the X-Image Representation.
'''
def __init__(self, ns, state_collector, execution_mode, task_mode, state_size, observation_space, stack_offset, action_size, action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):
state_collector.set_state_mode(0)
super(RosEnvImg, self).__init__(ns, state_collector, execution_mode, task_mode, state_size, observation_space, stack_offset, action_size, action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc)
def get_observation_(self):
"""
Function returns state that will be fed to the rl-agent
It includes
the laserscan and the waypoint information stored in an image.
:return: state
"""
obs = np.zeros(self.STATE_SIZE, dtype=np.float)
obs[:,:,0] = np.array(self.input_img_.data).reshape((self.STATE_SIZE[0:2]))
if self.debug_:
self.debugger_.show_input_occ_grid(self.input_img_)
self.debugger_.show_input_image(obs[:,:,0])
return obs
|
[
2,
3,
4,
5,
6
] |
2,586 |
74e70056ddfd8963a254f1a789a9058554c5489e
|
<mask token>
|
<mask token>
class BertBasedTODModel(nn.Module):
<mask token>
def forward(self, input_ids, attention_mask, token_type_ids):
sequence_output, cls = self.bert_model(input_ids=input_ids,
attention_mask=attention_mask, token_type_ids=token_type_ids)
intent_preds = self.intent_classifier(cls)
slot_preds = self.slot_classifier(sequence_output)
return intent_preds, slot_preds
|
<mask token>
class BertBasedTODModel(nn.Module):
def __init__(self, bert_type, num_intent_labels, num_slot_labels):
super(BertBasedTODModel, self).__init__()
self.bert_model = BertModel.from_pretrained(bert_type)
self.num_intent_labels = num_intent_labels
self.num_slot_labels = num_slot_labels
self.bert_output_dim = 768
self.intent_classifier = nn.Sequential(nn.Dropout(0.2), nn.Linear(
self.bert_output_dim, self.num_intent_labels))
self.slot_classifier = nn.Sequential(nn.Dropout(0.2), nn.Linear(
self.bert_output_dim, self.num_slot_labels))
def forward(self, input_ids, attention_mask, token_type_ids):
sequence_output, cls = self.bert_model(input_ids=input_ids,
attention_mask=attention_mask, token_type_ids=token_type_ids)
intent_preds = self.intent_classifier(cls)
slot_preds = self.slot_classifier(sequence_output)
return intent_preds, slot_preds
|
import torch.nn as nn
from transformers import BertModel
class BertBasedTODModel(nn.Module):
def __init__(self, bert_type, num_intent_labels, num_slot_labels):
super(BertBasedTODModel, self).__init__()
self.bert_model = BertModel.from_pretrained(bert_type)
self.num_intent_labels = num_intent_labels
self.num_slot_labels = num_slot_labels
self.bert_output_dim = 768
self.intent_classifier = nn.Sequential(nn.Dropout(0.2), nn.Linear(
self.bert_output_dim, self.num_intent_labels))
self.slot_classifier = nn.Sequential(nn.Dropout(0.2), nn.Linear(
self.bert_output_dim, self.num_slot_labels))
def forward(self, input_ids, attention_mask, token_type_ids):
sequence_output, cls = self.bert_model(input_ids=input_ids,
attention_mask=attention_mask, token_type_ids=token_type_ids)
intent_preds = self.intent_classifier(cls)
slot_preds = self.slot_classifier(sequence_output)
return intent_preds, slot_preds
|
import torch.nn as nn
from transformers import BertModel
class BertBasedTODModel(nn.Module):
def __init__(self, bert_type, num_intent_labels, num_slot_labels):
super(BertBasedTODModel, self).__init__()
self.bert_model = BertModel.from_pretrained(bert_type)
self.num_intent_labels = num_intent_labels
self.num_slot_labels = num_slot_labels
self.bert_output_dim = 768
self.intent_classifier = nn.Sequential(nn.Dropout(0.2), nn.Linear(self.bert_output_dim, self.num_intent_labels))
self.slot_classifier = nn.Sequential(nn.Dropout(0.2), nn.Linear(self.bert_output_dim, self.num_slot_labels))
def forward(self, input_ids, attention_mask, token_type_ids):
sequence_output, cls = self.bert_model(input_ids=input_ids, attention_mask=attention_mask,
token_type_ids=token_type_ids)
intent_preds = self.intent_classifier(cls)
slot_preds = self.slot_classifier(sequence_output)
return intent_preds, slot_preds
|
[
0,
2,
3,
4,
5
] |
2,587 |
6050e83e73faaf40cbd5455efd3ad01e4e131188
|
<mask token>
|
<mask token>
while a != 0:
t = a % 10
s = s + t
a = a // 10
print(s)
|
a = int(input())
s = 0
t = 0
while a != 0:
t = a % 10
s = s + t
a = a // 10
print(s)
|
a=int(input())
s=0
t=0
while(a!=0):
t=a%10
s=s+t
a=a//10
print(s)
| null |
[
0,
1,
2,
3
] |
2,588 |
700d876dd45548b74b563ed86f8124fa666e1739
|
a=10
b=20
c=400
d=100
e=500
f=30
z=a+b+c+d+e+f
print "The total sum is",z
print "variable d added"
print "Variable e added"
print "Variable f is equal to 30"
print "You are coming from test branch"
print "Your are very new in this branch"
| null | null | null | null |
[
0
] |
2,589 |
9cb4e550a0d19b44ec8357882f353b04748b213b
|
<mask token>
|
class Config(object):
<mask token>
<mask token>
|
class Config(object):
<mask token>
def get(self, section, name):
return self.config_dict[section][name]
|
class Config(object):
def __init__(self):
self.config_dict = {'data_path': {'vocab_path':
'../data/rumor/cnews.vocab.txt', 'trainingSet_path':
'../data/rumor/train_list.txt', 'valSet_path':
'../data/rumor/val_list.txt', 'testingSet_path':
'../data/rumor/test_list.txt'}, 'CNN_training_rule': {
'embedding_dim': 64, 'seq_length': 200, 'num_classes': 2,
'conv1_num_filters': 128, 'conv1_kernel_size': 1,
'conv2_num_filters': 128, 'conv2_kernel_size': 1, 'vocab_size':
5000, 'hidden_dim': 256, 'dropout_keep_prob': 0.5,
'learning_rate': 0.001, 'batch_size': 64, 'epochs': 5,
'print_per_batch': 50, 'save_per_batch': 500}, 'LSTM': {
'seq_length': 300, 'num_classes': 2, 'vocab_size': 5000,
'batch_size': 64}, 'result': {'CNN_model_path': 'CNN_model.h5',
'LSTM_model_path': 'LSTM_model.h5'}}
def get(self, section, name):
return self.config_dict[section][name]
|
# -*- coding: utf-8 -*-
class Config(object):
def __init__(self):
self.config_dict = {
"data_path": {
# "vocab_path": "../data/cnews/cnews.vocab.txt",
"vocab_path": "../data/rumor/cnews.vocab.txt",
# "trainingSet_path": "../data/cnews/cnews.train.txt",
"trainingSet_path": "../data/rumor/train_list.txt",
# "valSet_path": "../data/cnews/cnews.val.txt",
"valSet_path": "../data/rumor/val_list.txt",
# "testingSet_path": "../data/cnews/cnews.test.txt",
"testingSet_path": "../data/rumor/test_list.txt"
},
"CNN_training_rule": {
"embedding_dim": 64,
"seq_length": 200,
"num_classes": 2,
"conv1_num_filters": 128,
"conv1_kernel_size": 1,
"conv2_num_filters": 128,
"conv2_kernel_size": 1,
"vocab_size": 5000,
"hidden_dim": 256,
"dropout_keep_prob": 0.5,
"learning_rate": 1e-3,
"batch_size": 64,
"epochs": 5,
"print_per_batch": 50,
"save_per_batch": 500
},
"LSTM": {
"seq_length": 300,
"num_classes": 2,
"vocab_size": 5000,
"batch_size": 64
},
"result": {
"CNN_model_path": "CNN_model.h5",
"LSTM_model_path": "LSTM_model.h5"
}
}
def get(self, section, name):
return self.config_dict[section][name]
|
[
0,
1,
2,
3,
4
] |
2,590 |
2aee4af2e5a5c3f59dde4d9dd46f8d124a32fb27
|
<mask token>
def basic_block_coverage(r2, translation_blocks):
"""
Calculate the basic block coverage based on the covered TBs.
Returns a set of *covered* basic block start addresses
"""
covered_bbs = set()
for func_addr in function_addrs(r2):
graph = r2.cmdj('agj 0x%x' % func_addr)
assert len(graph) == 1
graph = graph[0]
for tb_start_addr, tb_end_addr in translation_blocks:
for bb in graph['blocks']:
bb_start_addr = bb['offset']
bb_end_addr = bb_start_addr + bb['size']
if (bb_end_addr >= tb_start_addr >= bb_start_addr or
bb_start_addr <= tb_end_addr <= bb_end_addr):
covered_bbs.add(bb_start_addr)
return covered_bbs
def render_functions(r2, covered_bbs, output_dir):
"""
Renders SVG graphs of each of the functions in the program. Basic blocks
that were executed by S2E are coloured green.
The resulting SVG images are written to `output_dir`.
"""
for func_addr in function_addrs(r2):
func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']
dot_str = r2.cmd('ag 0x%x' % func_addr)
dot = pydot.graph_from_dot_data(dot_str)
if not dot:
continue
else:
dot = dot[0]
for node in dot.get_nodes():
node_name = node.get_name()
try:
if node_name.startswith('"'):
node_name = node_name[1:-1]
node_addr = int(node_name, 16)
except ValueError:
continue
if node_addr in covered_bbs:
node.set_fillcolor('darkolivegreen2')
svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,
func_addr))
with open(svg_path, 'wb') as f:
svg = dot.create_svg()
f.write(svg)
<mask token>
|
<mask token>
def function_addrs(r2):
"""
Yield a list of all the function's start addresses.
"""
for addr in r2.cmdj('aflqj'):
yield int(addr, 16)
<mask token>
def basic_block_coverage(r2, translation_blocks):
"""
Calculate the basic block coverage based on the covered TBs.
Returns a set of *covered* basic block start addresses
"""
covered_bbs = set()
for func_addr in function_addrs(r2):
graph = r2.cmdj('agj 0x%x' % func_addr)
assert len(graph) == 1
graph = graph[0]
for tb_start_addr, tb_end_addr in translation_blocks:
for bb in graph['blocks']:
bb_start_addr = bb['offset']
bb_end_addr = bb_start_addr + bb['size']
if (bb_end_addr >= tb_start_addr >= bb_start_addr or
bb_start_addr <= tb_end_addr <= bb_end_addr):
covered_bbs.add(bb_start_addr)
return covered_bbs
def render_functions(r2, covered_bbs, output_dir):
"""
Renders SVG graphs of each of the functions in the program. Basic blocks
that were executed by S2E are coloured green.
The resulting SVG images are written to `output_dir`.
"""
for func_addr in function_addrs(r2):
func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']
dot_str = r2.cmd('ag 0x%x' % func_addr)
dot = pydot.graph_from_dot_data(dot_str)
if not dot:
continue
else:
dot = dot[0]
for node in dot.get_nodes():
node_name = node.get_name()
try:
if node_name.startswith('"'):
node_name = node_name[1:-1]
node_addr = int(node_name, 16)
except ValueError:
continue
if node_addr in covered_bbs:
node.set_fillcolor('darkolivegreen2')
svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,
func_addr))
with open(svg_path, 'wb') as f:
svg = dot.create_svg()
f.write(svg)
<mask token>
|
<mask token>
def function_addrs(r2):
"""
Yield a list of all the function's start addresses.
"""
for addr in r2.cmdj('aflqj'):
yield int(addr, 16)
<mask token>
def basic_block_coverage(r2, translation_blocks):
"""
Calculate the basic block coverage based on the covered TBs.
Returns a set of *covered* basic block start addresses
"""
covered_bbs = set()
for func_addr in function_addrs(r2):
graph = r2.cmdj('agj 0x%x' % func_addr)
assert len(graph) == 1
graph = graph[0]
for tb_start_addr, tb_end_addr in translation_blocks:
for bb in graph['blocks']:
bb_start_addr = bb['offset']
bb_end_addr = bb_start_addr + bb['size']
if (bb_end_addr >= tb_start_addr >= bb_start_addr or
bb_start_addr <= tb_end_addr <= bb_end_addr):
covered_bbs.add(bb_start_addr)
return covered_bbs
def render_functions(r2, covered_bbs, output_dir):
"""
Renders SVG graphs of each of the functions in the program. Basic blocks
that were executed by S2E are coloured green.
The resulting SVG images are written to `output_dir`.
"""
for func_addr in function_addrs(r2):
func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']
dot_str = r2.cmd('ag 0x%x' % func_addr)
dot = pydot.graph_from_dot_data(dot_str)
if not dot:
continue
else:
dot = dot[0]
for node in dot.get_nodes():
node_name = node.get_name()
try:
if node_name.startswith('"'):
node_name = node_name[1:-1]
node_addr = int(node_name, 16)
except ValueError:
continue
if node_addr in covered_bbs:
node.set_fillcolor('darkolivegreen2')
svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,
func_addr))
with open(svg_path, 'wb') as f:
svg = dot.create_svg()
f.write(svg)
def generate_graph(s2e_output_dir, s2e_num, project_name):
"""
Generate the PNG graph for the analysis in the output_dir
"""
s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH
output_dir = os.path.join(s2e_output_dir, 'functions')
os.makedirs(output_dir)
if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):
print('ERROR: %s is not an S2E environment' % s2e_env_path)
return
project_path = os.path.join(s2e_env_path, 'projects', project_name)
if not os.path.isdir(project_path):
print('ERROR: %s is not a valid project' % project_name)
return
if not os.path.isdir(output_dir):
print('ERROR: %s is not a valid output directory' % output_dir)
return
s2e_last_path = os.path.join(project_path, 's2e-last')
if not os.path.isdir(s2e_last_path):
print('ERROR: %s has no s2e-last' % project_name)
return
tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*',
'tbcoverage-*.json')) + glob.glob(os.path.join(s2e_last_path,
'tbcoverage-*.json'))
if not tb_coverage_files:
print(
'ERROR: No translation block coverage files found in s2e-last. Did you enable the ``TranslationBlockCoverage`` plugin in s2e-config.lua?'
)
return
covered_tbs = set()
for tb_coverage_file in tb_coverage_files:
tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)
if not tb_coverage_data:
continue
covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)
r2 = r2pipe.open(os.path.join(project_path, project_name))
r2.cmd('aaa')
covered_bbs = basic_block_coverage(r2, covered_tbs)
render_functions(r2, covered_bbs, output_dir)
base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')
return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.
listdir(output_dir)]
|
<mask token>
from __future__ import print_function
import glob
import json
import os
import pydot
import r2pipe
import s2e_web.S2E_settings as S2E_settings
def function_addrs(r2):
"""
Yield a list of all the function's start addresses.
"""
for addr in r2.cmdj('aflqj'):
yield int(addr, 16)
def parse_tb_file(path, module):
"""
Parse a translation block coverage file generated by S2E's
``TranslationBlockCoverage`` plugin.
"""
with open(path, 'r') as f:
try:
tb_coverage_data = json.load(f)
except Exception:
print('WARN: Failed to parse translation block JSON file %s' % path
)
return None
if not tb_coverage_data:
print('WARN: Translation block JSON file %s is empty' % path)
return None
if module not in tb_coverage_data:
print('WARN: Target %s not found in translation block JSON file %s' %
(module, path))
return None
return tb_coverage_data[module]
def basic_block_coverage(r2, translation_blocks):
"""
Calculate the basic block coverage based on the covered TBs.
Returns a set of *covered* basic block start addresses
"""
covered_bbs = set()
for func_addr in function_addrs(r2):
graph = r2.cmdj('agj 0x%x' % func_addr)
assert len(graph) == 1
graph = graph[0]
for tb_start_addr, tb_end_addr in translation_blocks:
for bb in graph['blocks']:
bb_start_addr = bb['offset']
bb_end_addr = bb_start_addr + bb['size']
if (bb_end_addr >= tb_start_addr >= bb_start_addr or
bb_start_addr <= tb_end_addr <= bb_end_addr):
covered_bbs.add(bb_start_addr)
return covered_bbs
def render_functions(r2, covered_bbs, output_dir):
"""
Renders SVG graphs of each of the functions in the program. Basic blocks
that were executed by S2E are coloured green.
The resulting SVG images are written to `output_dir`.
"""
for func_addr in function_addrs(r2):
func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']
dot_str = r2.cmd('ag 0x%x' % func_addr)
dot = pydot.graph_from_dot_data(dot_str)
if not dot:
continue
else:
dot = dot[0]
for node in dot.get_nodes():
node_name = node.get_name()
try:
if node_name.startswith('"'):
node_name = node_name[1:-1]
node_addr = int(node_name, 16)
except ValueError:
continue
if node_addr in covered_bbs:
node.set_fillcolor('darkolivegreen2')
svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,
func_addr))
with open(svg_path, 'wb') as f:
svg = dot.create_svg()
f.write(svg)
def generate_graph(s2e_output_dir, s2e_num, project_name):
"""
Generate the PNG graph for the analysis in the output_dir
"""
s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH
output_dir = os.path.join(s2e_output_dir, 'functions')
os.makedirs(output_dir)
if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):
print('ERROR: %s is not an S2E environment' % s2e_env_path)
return
project_path = os.path.join(s2e_env_path, 'projects', project_name)
if not os.path.isdir(project_path):
print('ERROR: %s is not a valid project' % project_name)
return
if not os.path.isdir(output_dir):
print('ERROR: %s is not a valid output directory' % output_dir)
return
s2e_last_path = os.path.join(project_path, 's2e-last')
if not os.path.isdir(s2e_last_path):
print('ERROR: %s has no s2e-last' % project_name)
return
tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*',
'tbcoverage-*.json')) + glob.glob(os.path.join(s2e_last_path,
'tbcoverage-*.json'))
if not tb_coverage_files:
print(
'ERROR: No translation block coverage files found in s2e-last. Did you enable the ``TranslationBlockCoverage`` plugin in s2e-config.lua?'
)
return
covered_tbs = set()
for tb_coverage_file in tb_coverage_files:
tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)
if not tb_coverage_data:
continue
covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)
r2 = r2pipe.open(os.path.join(project_path, project_name))
r2.cmd('aaa')
covered_bbs = basic_block_coverage(r2, covered_tbs)
render_functions(r2, covered_bbs, output_dir)
base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')
return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.
listdir(output_dir)]
|
"""
Copyright (C) Adrian Herrera, 2017
You will need to install r2pipe and pydot:
```
pip install r2pipe pydot
```
"""
from __future__ import print_function
import glob
import json
import os
import pydot
import r2pipe
import s2e_web.S2E_settings as S2E_settings
def function_addrs(r2):
"""
Yield a list of all the function's start addresses.
"""
for addr in r2.cmdj('aflqj'):
yield int(addr, 16)
def parse_tb_file(path, module):
"""
Parse a translation block coverage file generated by S2E's
``TranslationBlockCoverage`` plugin.
"""
with open(path, 'r') as f:
try:
tb_coverage_data = json.load(f)
except Exception:
print('WARN: Failed to parse translation block JSON file %s' % path)
return None
if not tb_coverage_data:
print('WARN: Translation block JSON file %s is empty' % path)
return None
if module not in tb_coverage_data:
print('WARN: Target %s not found in translation block JSON file %s' %
(module, path))
return None
return tb_coverage_data[module]
def basic_block_coverage(r2, translation_blocks):
"""
Calculate the basic block coverage based on the covered TBs.
Returns a set of *covered* basic block start addresses
"""
covered_bbs = set()
for func_addr in function_addrs(r2):
graph = r2.cmdj('agj 0x%x' % func_addr)
assert len(graph) == 1
graph = graph[0]
for tb_start_addr, tb_end_addr in translation_blocks:
for bb in graph['blocks']:
bb_start_addr = bb['offset']
bb_end_addr = bb_start_addr + bb['size']
# Check if the translation block falls within a basic block OR
# a basic block falls within a translation block
if (bb_end_addr >= tb_start_addr >= bb_start_addr or
bb_start_addr <= tb_end_addr <= bb_end_addr):
covered_bbs.add(bb_start_addr)
return covered_bbs
def render_functions(r2, covered_bbs, output_dir):
"""
Renders SVG graphs of each of the functions in the program. Basic blocks
that were executed by S2E are coloured green.
The resulting SVG images are written to `output_dir`.
"""
for func_addr in function_addrs(r2):
# Get the function name
func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']
dot_str = r2.cmd('ag 0x%x' % func_addr)
dot = pydot.graph_from_dot_data(dot_str)
if not dot:
continue
else:
dot = dot[0]
for node in dot.get_nodes():
node_name = node.get_name()
try:
# XXX This is very hacky - need something more robust
if node_name.startswith('"'):
node_name = node_name[1:-1]
node_addr = int(node_name, 16)
except ValueError:
# Node name is not a hex string
continue
if node_addr in covered_bbs:
node.set_fillcolor('darkolivegreen2')
svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name, func_addr))
with open(svg_path, 'wb') as f:
svg = dot.create_svg()
f.write(svg)
def generate_graph(s2e_output_dir, s2e_num, project_name):
"""
Generate the PNG graph for the analysis in the output_dir
"""
s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH
output_dir = os.path.join(s2e_output_dir, 'functions')
os.makedirs(output_dir)
# Check that the given S2E environment is legitimate
if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):
print('ERROR: %s is not an S2E environment' % s2e_env_path)
return
# Check that the given project exists in the environment
project_path = os.path.join(s2e_env_path, 'projects', project_name)
if not os.path.isdir(project_path):
print('ERROR: %s is not a valid project' % project_name)
return
# Check that the output directory exists
if not os.path.isdir(output_dir):
print('ERROR: %s is not a valid output directory' % output_dir)
return
# Check that the project has been executed at least once
s2e_last_path = os.path.join(project_path, 's2e-last')
if not os.path.isdir(s2e_last_path):
print('ERROR: %s has no s2e-last' % project_name)
return
# Get all the TB coverage files
tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*', 'tbcoverage-*.json')) + \
glob.glob(os.path.join(s2e_last_path, 'tbcoverage-*.json'))
if not tb_coverage_files:
print('ERROR: No translation block coverage files found in s2e-last. '
'Did you enable the ``TranslationBlockCoverage`` plugin in '
's2e-config.lua?')
return
# Parse the TB coverage files
covered_tbs = set()
for tb_coverage_file in tb_coverage_files:
# XXX A project can have a different name to the target program
tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)
if not tb_coverage_data:
continue
covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)
# Open the program in Radare and do the initial analysis
# XXX A project can have a different name to the target program
r2 = r2pipe.open(os.path.join(project_path, project_name))
r2.cmd('aaa')
# Calculate the basic block coverage and render the information as a set
# of PNG images for each function
covered_bbs = basic_block_coverage(r2, covered_tbs)
render_functions(r2, covered_bbs, output_dir)
base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')
return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.listdir(output_dir)]
|
[
2,
3,
4,
6,
7
] |
2,591 |
779ef8942bfb55bf017a8da9dfe34c03ac574a9a
|
<mask token>
class ElementarySortTest(unittest.TestCase):
<mask token>
def test_insertion_sort(self):
insertion = Insertion()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
insertion.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(insertion.compares, (self.n ** 2 - self.n) / 2)
self.assertLess(insertion.swaps, (self.n ** 2 - self.n) / 2)
def test_insertion_sort_sub_array(self):
insertion = Insertion()
input = Utilities.generate_random_array(self.n)
low = math.floor(0.1 * self.n)
high = math.floor(0.9 * self.n)
insertion.sort(input, low, high)
self.assertTrue(Utilities.is_sorted(input, low, high))
self.assertFalse(Utilities.is_sorted(input, 0, len(input)))
def test_selection_sort(self):
selection = Selection()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
selection.sort(expected)
self.assertEqual(expected, actual)
self.assertEqual(499500, selection.compares)
self.assertGreaterEqual(selection.swaps, 999)
self.assertLessEqual(selection.swaps, 1000)
def test_shell_sort(self):
shell = Shell()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
shell.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(13000, shell.compares)
self.assertLess(8000, shell.swaps)
<mask token>
|
<mask token>
class ElementarySortTest(unittest.TestCase):
def setUp(self):
self.n = 1000
def test_insertion_sort(self):
insertion = Insertion()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
insertion.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(insertion.compares, (self.n ** 2 - self.n) / 2)
self.assertLess(insertion.swaps, (self.n ** 2 - self.n) / 2)
def test_insertion_sort_sub_array(self):
insertion = Insertion()
input = Utilities.generate_random_array(self.n)
low = math.floor(0.1 * self.n)
high = math.floor(0.9 * self.n)
insertion.sort(input, low, high)
self.assertTrue(Utilities.is_sorted(input, low, high))
self.assertFalse(Utilities.is_sorted(input, 0, len(input)))
def test_selection_sort(self):
selection = Selection()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
selection.sort(expected)
self.assertEqual(expected, actual)
self.assertEqual(499500, selection.compares)
self.assertGreaterEqual(selection.swaps, 999)
self.assertLessEqual(selection.swaps, 1000)
def test_shell_sort(self):
shell = Shell()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
shell.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(13000, shell.compares)
self.assertLess(8000, shell.swaps)
<mask token>
|
<mask token>
class ElementarySortTest(unittest.TestCase):
def setUp(self):
self.n = 1000
def test_insertion_sort(self):
insertion = Insertion()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
insertion.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(insertion.compares, (self.n ** 2 - self.n) / 2)
self.assertLess(insertion.swaps, (self.n ** 2 - self.n) / 2)
def test_insertion_sort_sub_array(self):
insertion = Insertion()
input = Utilities.generate_random_array(self.n)
low = math.floor(0.1 * self.n)
high = math.floor(0.9 * self.n)
insertion.sort(input, low, high)
self.assertTrue(Utilities.is_sorted(input, low, high))
self.assertFalse(Utilities.is_sorted(input, 0, len(input)))
def test_selection_sort(self):
selection = Selection()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
selection.sort(expected)
self.assertEqual(expected, actual)
self.assertEqual(499500, selection.compares)
self.assertGreaterEqual(selection.swaps, 999)
self.assertLessEqual(selection.swaps, 1000)
def test_shell_sort(self):
shell = Shell()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
shell.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(13000, shell.compares)
self.assertLess(8000, shell.swaps)
if __name__ == '__main__':
unittest.main()
|
import unittest
import math
from python.src.sort.insertion import Insertion
from python.src.sort.selection import Selection
from python.src.sort.shell import Shell
from python.test.util.utilities import Utilities
class ElementarySortTest(unittest.TestCase):
def setUp(self):
self.n = 1000
def test_insertion_sort(self):
insertion = Insertion()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
insertion.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(insertion.compares, (self.n ** 2 - self.n) / 2)
self.assertLess(insertion.swaps, (self.n ** 2 - self.n) / 2)
def test_insertion_sort_sub_array(self):
insertion = Insertion()
input = Utilities.generate_random_array(self.n)
low = math.floor(0.1 * self.n)
high = math.floor(0.9 * self.n)
insertion.sort(input, low, high)
self.assertTrue(Utilities.is_sorted(input, low, high))
self.assertFalse(Utilities.is_sorted(input, 0, len(input)))
def test_selection_sort(self):
selection = Selection()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
selection.sort(expected)
self.assertEqual(expected, actual)
self.assertEqual(499500, selection.compares)
self.assertGreaterEqual(selection.swaps, 999)
self.assertLessEqual(selection.swaps, 1000)
def test_shell_sort(self):
shell = Shell()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
shell.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(13000, shell.compares)
self.assertLess(8000, shell.swaps)
if __name__ == '__main__':
unittest.main()
|
import unittest
import math
from python.src.sort.insertion import Insertion
from python.src.sort.selection import Selection
from python.src.sort.shell import Shell
from python.test.util.utilities import Utilities
class ElementarySortTest(unittest.TestCase):
def setUp(self):
self.n = 1000
def test_insertion_sort(self):
insertion = Insertion()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
insertion.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(insertion.compares, (self.n**2 - self.n) / 2)
self.assertLess(insertion.swaps, (self.n**2 - self.n) / 2)
def test_insertion_sort_sub_array(self):
insertion = Insertion()
input = Utilities.generate_random_array(self.n)
low = math.floor(0.1 * self.n)
high = math.floor(0.9 * self.n)
insertion.sort(input, low, high)
self.assertTrue(Utilities.is_sorted(input, low, high))
self.assertFalse(Utilities.is_sorted(input, 0, len(input)))
def test_selection_sort(self):
selection = Selection()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
selection.sort(expected)
self.assertEqual(expected, actual)
self.assertEqual(499500, selection.compares)
self.assertGreaterEqual(selection.swaps, 999)
self.assertLessEqual(selection.swaps, 1000)
def test_shell_sort(self):
shell = Shell()
actual = Utilities.generate_random_array(self.n)
expected = list(actual)
actual.sort()
shell.sort(expected)
self.assertEqual(expected, actual)
self.assertLess(13000, shell.compares)
self.assertLess(8000, shell.swaps)
if __name__ == '__main__':
unittest.main()
|
[
5,
6,
7,
8,
9
] |
2,592 |
c8aa93a33a6513129b4980180c4eb8d5d5eb3b5b
|
<mask token>
|
<mask token>
urlpatterns = [path('', views.index, name='index'), path('login', Login.
as_view(), name='login'), path('logout', logout, name='logout'), path(
'cart/', views.cart, name='cart'), path('order/', views.order, name=
'order'), path('check-out', views.CheckOut, name='checkout'), path(
'track/', views.tracker, name='tracker'), path('search/', views.search,
name='search'), path('checkout/', views.check, name='checkout'), path(
'productview/', views.proview, name='see')]
|
from django.contrib import admin
from django.urls import path
from . import views
from .views import index
from .views import Login, logout
from .views import CheckOut
urlpatterns = [path('', views.index, name='index'), path('login', Login.
as_view(), name='login'), path('logout', logout, name='logout'), path(
'cart/', views.cart, name='cart'), path('order/', views.order, name=
'order'), path('check-out', views.CheckOut, name='checkout'), path(
'track/', views.tracker, name='tracker'), path('search/', views.search,
name='search'), path('checkout/', views.check, name='checkout'), path(
'productview/', views.proview, name='see')]
|
from django.contrib import admin
from django.urls import path
from . import views
from .views import index
from .views import Login , logout
from .views import CheckOut
urlpatterns = [
path("",views.index, name="index"),
path('login', Login.as_view(), name='login'),
path('logout', logout , name='logout'),
path("cart/",views.cart , name="cart"),
path("order/",views.order , name="order"),
path('check-out', views.CheckOut , name='checkout'),
path("track/",views.tracker, name="tracker"),
path("search/",views.search, name="search"),
path("checkout/",views.check, name="checkout"),
path("productview/",views.proview, name="see"),
]
| null |
[
0,
1,
2,
3
] |
2,593 |
0ae9ad7af26e3d19f2d3967c02611503c32aea70
|
<mask token>
class Config(object):
<mask token>
def __init__(self):
self._opts = {}
for opt, value in self._DEFAULT.items():
if not weechat.config_is_set_plugin(opt):
weechat.config_set_plugin(opt, value)
self.update()
def update(self):
for opt in self._DEFAULT.keys():
self._opts[opt] = weechat.config_get_plugin(opt)
def __getitem__(self, key):
return self._opts[key]
<mask token>
|
<mask token>
class Config(object):
_DEFAULT = {'url': 'http://localhost:9999/notify', 'title':
'IRC Notification', 'activate_label': '', 'sound': ''}
def __init__(self):
self._opts = {}
for opt, value in self._DEFAULT.items():
if not weechat.config_is_set_plugin(opt):
weechat.config_set_plugin(opt, value)
self.update()
def update(self):
for opt in self._DEFAULT.keys():
self._opts[opt] = weechat.config_get_plugin(opt)
def __getitem__(self, key):
return self._opts[key]
def config_cb(data, option, value):
cfg.update()
return weechat.WEECHAT_RC_OK
def send_notify(**kwargs):
data = json.dumps(kwargs)
req = urllib2.Request(cfg['url'], data, {'Content-Type':
'application/json'})
f = urllib2.urlopen(req)
response = f.read()
f.close()
<mask token>
|
<mask token>
class Config(object):
_DEFAULT = {'url': 'http://localhost:9999/notify', 'title':
'IRC Notification', 'activate_label': '', 'sound': ''}
def __init__(self):
self._opts = {}
for opt, value in self._DEFAULT.items():
if not weechat.config_is_set_plugin(opt):
weechat.config_set_plugin(opt, value)
self.update()
def update(self):
for opt in self._DEFAULT.keys():
self._opts[opt] = weechat.config_get_plugin(opt)
def __getitem__(self, key):
return self._opts[key]
def config_cb(data, option, value):
cfg.update()
return weechat.WEECHAT_RC_OK
def send_notify(**kwargs):
data = json.dumps(kwargs)
req = urllib2.Request(cfg['url'], data, {'Content-Type':
'application/json'})
f = urllib2.urlopen(req)
response = f.read()
f.close()
<mask token>
def handle_msg(data, pbuffer, date, tags, displayed, highlight, prefix, message
):
highlight = bool(highlight)
buffer_type = weechat.buffer_get_string(pbuffer, 'localvar_type')
buffer_name = weechat.buffer_get_string(pbuffer, 'short_name')
away = weechat.buffer_get_string(pbuffer, 'localvar_away')
if buffer_type == 'private':
notify('Private message from {}'.format(buffer_name), message)
elif buffer_type == 'channel' and highlight:
notify('Highlight {}@{}'.format(prefix, buffer_name), message)
return weechat.WEECHAT_RC_OK
<mask token>
|
<mask token>
class Config(object):
_DEFAULT = {'url': 'http://localhost:9999/notify', 'title':
'IRC Notification', 'activate_label': '', 'sound': ''}
def __init__(self):
self._opts = {}
for opt, value in self._DEFAULT.items():
if not weechat.config_is_set_plugin(opt):
weechat.config_set_plugin(opt, value)
self.update()
def update(self):
for opt in self._DEFAULT.keys():
self._opts[opt] = weechat.config_get_plugin(opt)
def __getitem__(self, key):
return self._opts[key]
def config_cb(data, option, value):
cfg.update()
return weechat.WEECHAT_RC_OK
def send_notify(**kwargs):
data = json.dumps(kwargs)
req = urllib2.Request(cfg['url'], data, {'Content-Type':
'application/json'})
f = urllib2.urlopen(req)
response = f.read()
f.close()
def notify(subtitle, message):
opt = {}
if cfg['activate_label']:
opt['activate'] = cfg['activate_label']
if cfg['sound']:
opt['sound'] = cfg['sound']
send_notify(title=cfg['title'], subtitle=subtitle, message=message, **opt)
def handle_msg(data, pbuffer, date, tags, displayed, highlight, prefix, message
):
highlight = bool(highlight)
buffer_type = weechat.buffer_get_string(pbuffer, 'localvar_type')
buffer_name = weechat.buffer_get_string(pbuffer, 'short_name')
away = weechat.buffer_get_string(pbuffer, 'localvar_away')
if buffer_type == 'private':
notify('Private message from {}'.format(buffer_name), message)
elif buffer_type == 'channel' and highlight:
notify('Highlight {}@{}'.format(prefix, buffer_name), message)
return weechat.WEECHAT_RC_OK
<mask token>
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Brandon Bennett <[email protected]>
#
# Send a notification via notifyserver (https://github.com/nemith/notifyserver)
# on highlight/private message or new DCC.
#
# History:
#
# 2015-02-07, Brandon Bennett <[email protected]>:
# version 0.1: initial release
#
SCRIPT_NAME = 'notifyserver'
SCRIPT_AUTHOR = 'Brandon Bennett <[email protected]>'
SCRIPT_VERSION = '0.1'
SCRIPT_LICENSE = 'MIT'
SCRIPT_DESC = 'Send a notification to a notifyserver on highlight/private message or new DCC'
import_ok = True
try:
import weechat
except:
print('This script must be run under WeeChat.')
print('Get WeeChat now at: http://www.weechat.org/')
import_ok = False
try:
import json, urllib2
except ImportError as message:
print('Missing package(s) for %s: %s' % (SCRIPT_NAME, message))
import_ok = False
cfg = None
class Config(object):
_DEFAULT = {
'url' : 'http://localhost:9999/notify',
'title': 'IRC Notification',
'activate_label': "",
'sound': "",
}
def __init__(self):
self._opts = {}
for opt, value in self._DEFAULT.items():
if not weechat.config_is_set_plugin(opt):
weechat.config_set_plugin(opt, value)
self.update()
def update(self):
for opt in self._DEFAULT.keys():
self._opts[opt] = weechat.config_get_plugin(opt)
def __getitem__(self, key):
return self._opts[key]
def config_cb(data, option, value):
cfg.update()
return weechat.WEECHAT_RC_OK
def send_notify(**kwargs):
data = json.dumps(kwargs)
req = urllib2.Request(cfg['url'], data, {'Content-Type': 'application/json'})
f = urllib2.urlopen(req)
response = f.read()
f.close()
def notify(subtitle, message):
opt = {}
if cfg['activate_label']:
opt['activate'] = cfg['activate_label']
if cfg['sound']:
opt['sound'] = cfg['sound']
send_notify(
title=cfg['title'],
subtitle=subtitle,
message=message,
**opt)
def handle_msg(data, pbuffer, date, tags, displayed, highlight, prefix, message):
highlight = bool(highlight)
buffer_type = weechat.buffer_get_string(pbuffer, "localvar_type")
buffer_name = weechat.buffer_get_string(pbuffer, "short_name")
away = weechat.buffer_get_string(pbuffer, "localvar_away")
if buffer_type == 'private':
notify("Private message from {}".format(buffer_name), message)
elif buffer_type == 'channel' and highlight:
notify("Highlight {}@{}".format(prefix, buffer_name), message)
return weechat.WEECHAT_RC_OK
if __name__ == '__main__' and import_ok:
if weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION,
SCRIPT_LICENSE, SCRIPT_DESC, '', ''):
cfg = Config()
weechat.hook_config("plugins.var.python." + SCRIPT_NAME + ".*", "config_cb", "")
weechat.hook_print("", "", "", 1, "handle_msg", "")
|
[
4,
7,
8,
9,
12
] |
2,594 |
9731f45b19d40a031216f8a430c09764fd34e984
|
<mask token>
def get_blogs_common_data(request, blogs_all_list):
page_num = request.GET.get('page', 1)
paginator = Paginator(blogs_all_list, settings.BLOGS_PER_PAGE)
page_of_blogs = paginator.get_page(page_num)
current_page_num = page_of_blogs.number
page_range = list(range(max(1, current_page_num - 2), min(paginator.
num_pages + 1, current_page_num + 3)))
if page_range[0] != 1:
page_range.insert(0, 1)
if page_range[1] - page_range[0] >= 2:
page_range.insert(1, '...')
if page_range[-1] != paginator.num_pages:
page_range.append(paginator.num_pages)
if page_range[-1] - page_range[-2] >= 2:
page_range.insert(-1, '...')
blog_dates = dict()
all_dates = Blog.objects.dates('created_time', 'month', order='DESC')
for blogs_date in all_dates:
blogs_count = Blog.objects.filter(created_time__year=blogs_date.
year, created_time__month=blogs_date.month).count()
blog_dates[blogs_date] = blogs_count
context = dict()
context['blogs'] = page_of_blogs
context['page_range'] = page_range
context['blog_types'] = BlogType.objects.all()
context['blog_dates'] = blog_dates
return context
def blog_list(request):
blogs_all_list = Blog.objects.all()
context = get_blogs_common_data(request, blogs_all_list)
return render(request, 'blog/blog_list.html', context)
<mask token>
def blog_detail(request, blog_pk):
blog = get_object_or_404(Blog, pk=blog_pk)
read_cookie_key = read_statistics_once_read(request, blog)
context = dict()
context['blog'] = blog
context['blog_author'] = blog.author.get_nickname_or_username()
context['login_form'] = LoginForm()
context['pre_blog'] = Blog.objects.filter(created_time__gt=blog.
created_time).last()
context['next_blog'] = Blog.objects.filter(created_time__lt=blog.
created_time).first()
context['blog_dates'] = Blog.objects.dates('created_time', 'month',
order='DESC')
response = render(request, 'blog/blog_detail.html', context)
response.set_cookie(read_cookie_key, 'true')
return response
|
<mask token>
def get_blogs_common_data(request, blogs_all_list):
page_num = request.GET.get('page', 1)
paginator = Paginator(blogs_all_list, settings.BLOGS_PER_PAGE)
page_of_blogs = paginator.get_page(page_num)
current_page_num = page_of_blogs.number
page_range = list(range(max(1, current_page_num - 2), min(paginator.
num_pages + 1, current_page_num + 3)))
if page_range[0] != 1:
page_range.insert(0, 1)
if page_range[1] - page_range[0] >= 2:
page_range.insert(1, '...')
if page_range[-1] != paginator.num_pages:
page_range.append(paginator.num_pages)
if page_range[-1] - page_range[-2] >= 2:
page_range.insert(-1, '...')
blog_dates = dict()
all_dates = Blog.objects.dates('created_time', 'month', order='DESC')
for blogs_date in all_dates:
blogs_count = Blog.objects.filter(created_time__year=blogs_date.
year, created_time__month=blogs_date.month).count()
blog_dates[blogs_date] = blogs_count
context = dict()
context['blogs'] = page_of_blogs
context['page_range'] = page_range
context['blog_types'] = BlogType.objects.all()
context['blog_dates'] = blog_dates
return context
def blog_list(request):
blogs_all_list = Blog.objects.all()
context = get_blogs_common_data(request, blogs_all_list)
return render(request, 'blog/blog_list.html', context)
def blogs_with_type(request, blog_type_pk):
blog_type = get_object_or_404(BlogType, pk=blog_type_pk)
blogs_all_list = Blog.objects.filter(blog_type=blog_type)
context = get_blogs_common_data(request, blogs_all_list)
context['blog_type'] = blog_type
return render(request, 'blog/blogs_with_type.html', context)
<mask token>
def blog_detail(request, blog_pk):
blog = get_object_or_404(Blog, pk=blog_pk)
read_cookie_key = read_statistics_once_read(request, blog)
context = dict()
context['blog'] = blog
context['blog_author'] = blog.author.get_nickname_or_username()
context['login_form'] = LoginForm()
context['pre_blog'] = Blog.objects.filter(created_time__gt=blog.
created_time).last()
context['next_blog'] = Blog.objects.filter(created_time__lt=blog.
created_time).first()
context['blog_dates'] = Blog.objects.dates('created_time', 'month',
order='DESC')
response = render(request, 'blog/blog_detail.html', context)
response.set_cookie(read_cookie_key, 'true')
return response
|
<mask token>
def get_blogs_common_data(request, blogs_all_list):
page_num = request.GET.get('page', 1)
paginator = Paginator(blogs_all_list, settings.BLOGS_PER_PAGE)
page_of_blogs = paginator.get_page(page_num)
current_page_num = page_of_blogs.number
page_range = list(range(max(1, current_page_num - 2), min(paginator.
num_pages + 1, current_page_num + 3)))
if page_range[0] != 1:
page_range.insert(0, 1)
if page_range[1] - page_range[0] >= 2:
page_range.insert(1, '...')
if page_range[-1] != paginator.num_pages:
page_range.append(paginator.num_pages)
if page_range[-1] - page_range[-2] >= 2:
page_range.insert(-1, '...')
blog_dates = dict()
all_dates = Blog.objects.dates('created_time', 'month', order='DESC')
for blogs_date in all_dates:
blogs_count = Blog.objects.filter(created_time__year=blogs_date.
year, created_time__month=blogs_date.month).count()
blog_dates[blogs_date] = blogs_count
context = dict()
context['blogs'] = page_of_blogs
context['page_range'] = page_range
context['blog_types'] = BlogType.objects.all()
context['blog_dates'] = blog_dates
return context
def blog_list(request):
blogs_all_list = Blog.objects.all()
context = get_blogs_common_data(request, blogs_all_list)
return render(request, 'blog/blog_list.html', context)
def blogs_with_type(request, blog_type_pk):
blog_type = get_object_or_404(BlogType, pk=blog_type_pk)
blogs_all_list = Blog.objects.filter(blog_type=blog_type)
context = get_blogs_common_data(request, blogs_all_list)
context['blog_type'] = blog_type
return render(request, 'blog/blogs_with_type.html', context)
def blogs_with_date(request, year, month):
blogs_all_list = Blog.objects.filter(created_time__year=year,
created_time__month=month)
context = get_blogs_common_data(request, blogs_all_list)
context['blogs_with_date'] = '%s年%s' % (year, month)
return render(request, 'blog/blogs_with_date.html', context)
def blog_detail(request, blog_pk):
blog = get_object_or_404(Blog, pk=blog_pk)
read_cookie_key = read_statistics_once_read(request, blog)
context = dict()
context['blog'] = blog
context['blog_author'] = blog.author.get_nickname_or_username()
context['login_form'] = LoginForm()
context['pre_blog'] = Blog.objects.filter(created_time__gt=blog.
created_time).last()
context['next_blog'] = Blog.objects.filter(created_time__lt=blog.
created_time).first()
context['blog_dates'] = Blog.objects.dates('created_time', 'month',
order='DESC')
response = render(request, 'blog/blog_detail.html', context)
response.set_cookie(read_cookie_key, 'true')
return response
|
from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator
from .models import Blog, BlogType
from django.conf import settings
from read_statistics.utils import read_statistics_once_read
from user.forms import LoginForm
def get_blogs_common_data(request, blogs_all_list):
page_num = request.GET.get('page', 1)
paginator = Paginator(blogs_all_list, settings.BLOGS_PER_PAGE)
page_of_blogs = paginator.get_page(page_num)
current_page_num = page_of_blogs.number
page_range = list(range(max(1, current_page_num - 2), min(paginator.
num_pages + 1, current_page_num + 3)))
if page_range[0] != 1:
page_range.insert(0, 1)
if page_range[1] - page_range[0] >= 2:
page_range.insert(1, '...')
if page_range[-1] != paginator.num_pages:
page_range.append(paginator.num_pages)
if page_range[-1] - page_range[-2] >= 2:
page_range.insert(-1, '...')
blog_dates = dict()
all_dates = Blog.objects.dates('created_time', 'month', order='DESC')
for blogs_date in all_dates:
blogs_count = Blog.objects.filter(created_time__year=blogs_date.
year, created_time__month=blogs_date.month).count()
blog_dates[blogs_date] = blogs_count
context = dict()
context['blogs'] = page_of_blogs
context['page_range'] = page_range
context['blog_types'] = BlogType.objects.all()
context['blog_dates'] = blog_dates
return context
def blog_list(request):
blogs_all_list = Blog.objects.all()
context = get_blogs_common_data(request, blogs_all_list)
return render(request, 'blog/blog_list.html', context)
def blogs_with_type(request, blog_type_pk):
blog_type = get_object_or_404(BlogType, pk=blog_type_pk)
blogs_all_list = Blog.objects.filter(blog_type=blog_type)
context = get_blogs_common_data(request, blogs_all_list)
context['blog_type'] = blog_type
return render(request, 'blog/blogs_with_type.html', context)
def blogs_with_date(request, year, month):
blogs_all_list = Blog.objects.filter(created_time__year=year,
created_time__month=month)
context = get_blogs_common_data(request, blogs_all_list)
context['blogs_with_date'] = '%s年%s' % (year, month)
return render(request, 'blog/blogs_with_date.html', context)
def blog_detail(request, blog_pk):
blog = get_object_or_404(Blog, pk=blog_pk)
read_cookie_key = read_statistics_once_read(request, blog)
context = dict()
context['blog'] = blog
context['blog_author'] = blog.author.get_nickname_or_username()
context['login_form'] = LoginForm()
context['pre_blog'] = Blog.objects.filter(created_time__gt=blog.
created_time).last()
context['next_blog'] = Blog.objects.filter(created_time__lt=blog.
created_time).first()
context['blog_dates'] = Blog.objects.dates('created_time', 'month',
order='DESC')
response = render(request, 'blog/blog_detail.html', context)
response.set_cookie(read_cookie_key, 'true')
return response
|
from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator
from .models import Blog, BlogType
from django.conf import settings
from read_statistics.utils import read_statistics_once_read
from user.forms import LoginForm
# Create your views here.
#分页函数
def get_blogs_common_data(request, blogs_all_list):
# 分页器
page_num = request.GET.get('page', 1)
paginator = Paginator(blogs_all_list, settings.BLOGS_PER_PAGE)
page_of_blogs = paginator.get_page(page_num)
current_page_num = page_of_blogs.number
page_range = list(range(max(1, current_page_num - 2), min(paginator.num_pages + 1, current_page_num + 3)))
if page_range[0] != 1:
page_range.insert(0, 1)
if page_range[1] - page_range[0] >= 2:
page_range.insert(1, '...')
if page_range[-1] != paginator.num_pages:
page_range.append(paginator.num_pages)
if page_range[-1] - page_range[-2] >= 2:
page_range.insert(-1, '...')
# 获取日期归档的博客统计数量
blog_dates = dict()
all_dates = Blog.objects.dates('created_time', 'month', order='DESC')
for blogs_date in all_dates:
blogs_count = Blog.objects.filter(created_time__year=blogs_date.year,
created_time__month=blogs_date.month).count()
blog_dates[blogs_date] = blogs_count
# 获取公共的数据
context = dict()
context['blogs'] = page_of_blogs
context['page_range'] = page_range
# 运用annotate方法给对象添加注释
#context['blog_types'] = BlogType.objects.annotate(blog_count=Count('blog'))
context['blog_types'] = BlogType.objects.all()
context['blog_dates'] = blog_dates
return context
def blog_list(request):
blogs_all_list = Blog.objects.all()
context = get_blogs_common_data(request, blogs_all_list)
return render(request, 'blog/blog_list.html', context)
def blogs_with_type(request, blog_type_pk):
blog_type = get_object_or_404(BlogType, pk=blog_type_pk)
blogs_all_list = Blog.objects.filter(blog_type=blog_type)
context = get_blogs_common_data(request, blogs_all_list)
context['blog_type'] = blog_type
return render(request, 'blog/blogs_with_type.html', context)
def blogs_with_date(request, year, month):
blogs_all_list = Blog.objects.filter(created_time__year=year, created_time__month=month)
context = get_blogs_common_data(request, blogs_all_list)
context['blogs_with_date'] = '%s年%s' % (year, month)
return render(request, 'blog/blogs_with_date.html', context)
def blog_detail(request, blog_pk):
blog = get_object_or_404(Blog, pk=blog_pk)
read_cookie_key = read_statistics_once_read(request, blog)
context = dict()
context['blog'] = blog
context['blog_author'] = blog.author.get_nickname_or_username()
context['login_form'] = LoginForm()
context['pre_blog'] = Blog.objects.filter(created_time__gt=blog.created_time).last()
context['next_blog'] = Blog.objects.filter(created_time__lt=blog.created_time).first()
context['blog_dates'] = Blog.objects.dates('created_time', 'month', order='DESC')
response = render(request, 'blog/blog_detail.html', context)
response.set_cookie(read_cookie_key, 'true')
return response
|
[
3,
4,
5,
6,
7
] |
2,595 |
db684185c2b0a26cb101dc40090c84b64c554eeb
|
<mask token>
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
<mask token>
|
<mask token>
def create_test_file(filename):
with open(filename, 'w') as f:
f.write('foobar')
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir, '{}{:03d}'.format(
file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
<mask token>
|
<mask token>
def create_test_file(filename):
with open(filename, 'w') as f:
f.write('foobar')
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir, '{}{:03d}'.format(
file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
if __name__ == '__main__':
main()
|
import os
import shutil
import re
import argparse
def create_test_file(filename):
with open(filename, 'w') as f:
f.write('foobar')
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir, '{}{:03d}'.format(
file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir', help='Directory path.', dest=
'dir_path', required=True)
parser.add_argument('--file-prefix', required=True, help=
'File name prefix.')
parser.add_argument('-c', action='store_true', dest='create_tt', help=
'Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match('{0}\\d{{3}}'.format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, '{}{:03d}'.format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
if __name__ == '__main__':
main()
|
#! python3
import os
import shutil
import re
import argparse
def create_test_file(filename):
with open(filename, "w") as f:
f.write("foobar")
def create_test_files(test_dir, file_prefix):
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
for i in range(1, 10):
# Introduce gaps
if i in [2, 8]:
i += 1
testfile_path = os.path.join(test_dir,
"{}{:03d}".format(file_prefix, i))
create_test_file(testfile_path)
def main():
parser = argparse.ArgumentParser(description='Filling In The Gaps program')
parser.add_argument('-d', '--dir',
help="Directory path.",
dest='dir_path',
required=True)
parser.add_argument('--file-prefix',
required=True,
help='File name prefix.')
parser.add_argument('-c', action='store_true',
dest='create_tt',
help='Create test tree.')
args = parser.parse_args()
if args.create_tt:
create_test_files(args.dir_path, args.file_prefix)
prev_num = 0
for filename in os.listdir(args.dir_path):
if re.match(r"{0}\d{{3}}".format(args.file_prefix), filename):
curr_num = int(filename.split(args.file_prefix)[1])
expected_num = prev_num + 1
if curr_num != expected_num:
old_path = os.path.join(args.dir_path, filename)
new_path = os.path.join(args.dir_path, "{}{:03d}".format(
args.file_prefix, expected_num))
shutil.move(old_path, new_path)
curr_num = expected_num
prev_num = curr_num
if __name__ == "__main__":
main()
|
[
1,
3,
4,
5,
6
] |
2,596 |
a6ee2be7bed59b419fa66fd6cfe4b5fff3fac260
|
<mask token>
|
<mask token>
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
setup(name='stripe-requests', version='1.9.1-dev', description=
'Stripe python bindings using requests', author='Allan Lei',
author_email='[email protected]', url=
'https://github.com/allanlei/stripe-requests', license=open('LICENSE').
read(), packages=find_packages(), package_data={'stripe': [
'data/ca-certificates.crt']}, install_requires=[
'requests >= 1.2.0, < 1.3.0'], test_suite='stripe.tests', classifiers=(
'Intended Audience :: Developers', 'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy'))
|
import os
import sys
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
setup(name='stripe-requests', version='1.9.1-dev', description=
'Stripe python bindings using requests', author='Allan Lei',
author_email='[email protected]', url=
'https://github.com/allanlei/stripe-requests', license=open('LICENSE').
read(), packages=find_packages(), package_data={'stripe': [
'data/ca-certificates.crt']}, install_requires=[
'requests >= 1.2.0, < 1.3.0'], test_suite='stripe.tests', classifiers=(
'Intended Audience :: Developers', 'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy'))
|
import os
import sys
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
setup(
name='stripe-requests',
version='1.9.1-dev',
description='Stripe python bindings using requests',
author='Allan Lei',
author_email='[email protected]',
url='https://github.com/allanlei/stripe-requests',
license=open('LICENSE').read(),
packages=find_packages(),
package_data={'stripe': ['data/ca-certificates.crt']},
install_requires=[
'requests >= 1.2.0, < 1.3.0',
],
test_suite='stripe.tests',
classifiers=(
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
),
)
| null |
[
0,
1,
2,
3
] |
2,597 |
8edca4c50e48734073e80de85088964837247696
|
<mask token>
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
|
<mask token>
class HistoryListView(generics.GenericAPIView):
<mask token>
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
|
<mask token>
class HistoryListView(generics.GenericAPIView):
serializer_class = HistorySerializer
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
|
<mask token>
class HistoryMyList(generics.ListCreateAPIView):
<mask token>
<mask token>
class HistoryListView(generics.GenericAPIView):
serializer_class = HistorySerializer
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
|
from django.shortcuts import render
from rest_framework.response import Response
from .serializers import *
from rest_framework import generics, status
class HistoryMyList(generics.ListCreateAPIView):
serializer_class = HistorySer
queryset = History.objects.all()
class HistoryListView(generics.GenericAPIView):
serializer_class = HistorySerializer
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
|
[
2,
8,
9,
10,
13
] |
2,598 |
19f17044d48c8cc0f9d366cde7edc846ff343462
|
<mask token>
def search(query, finalIndexPath):
listOfDicts = list()
queryList = set()
tempList = query.strip().lower().replace("'", '').split(' ')
for word in tempList:
if word not in stopWords:
queryList.add(word)
print('Cleaned query tokens:')
print(queryList, '\n')
queryList = list(queryList)
for word in queryList:
charPath = word[0]
jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'
try:
with open(jsonFilePath, 'r') as file:
data = file.read()
jsonObj = json.loads(data)
docsDict = jsonObj['docList']
listOfDicts.append(docsDict)
except:
pass
return intersectDicts(listOfDicts)
def getDocURLs(intersectedDocs, indexPath, cacheURLs):
listUrls = list()
for docID in intersectedDocs:
if docID in cacheURLs:
fileUrl = cacheURLs[docID]
listUrls.append((fileUrl, intersectedDocs[docID]))
return listUrls
def intersectDicts(listOfDicts):
if len(listOfDicts) == 1:
return listOfDicts[0]
intersection = {}
for dictItem in listOfDicts:
for doc in dictItem:
if doc not in intersection:
intersection[doc] = dictItem[doc]
else:
intersection[doc] += dictItem[doc]
print('intersection = ', intersection)
return intersection
def flaskBackendQuery(queryUser, cacheURLs):
indexPath = GLOBALS.FINAL_INDEX
if queryUser.strip() == '':
print('Query needs to be at least one character')
unsortedDocs = search(queryUser, indexPath)
unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
return sortedURLs[0:10]
<mask token>
|
<mask token>
def search(query, finalIndexPath):
listOfDicts = list()
queryList = set()
tempList = query.strip().lower().replace("'", '').split(' ')
for word in tempList:
if word not in stopWords:
queryList.add(word)
print('Cleaned query tokens:')
print(queryList, '\n')
queryList = list(queryList)
for word in queryList:
charPath = word[0]
jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'
try:
with open(jsonFilePath, 'r') as file:
data = file.read()
jsonObj = json.loads(data)
docsDict = jsonObj['docList']
listOfDicts.append(docsDict)
except:
pass
return intersectDicts(listOfDicts)
def getDocURLs(intersectedDocs, indexPath, cacheURLs):
listUrls = list()
for docID in intersectedDocs:
if docID in cacheURLs:
fileUrl = cacheURLs[docID]
listUrls.append((fileUrl, intersectedDocs[docID]))
return listUrls
def intersectDicts(listOfDicts):
if len(listOfDicts) == 1:
return listOfDicts[0]
intersection = {}
for dictItem in listOfDicts:
for doc in dictItem:
if doc not in intersection:
intersection[doc] = dictItem[doc]
else:
intersection[doc] += dictItem[doc]
print('intersection = ', intersection)
return intersection
def flaskBackendQuery(queryUser, cacheURLs):
indexPath = GLOBALS.FINAL_INDEX
if queryUser.strip() == '':
print('Query needs to be at least one character')
unsortedDocs = search(queryUser, indexPath)
unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
return sortedURLs[0:10]
if __name__ == '__main__':
indexPath = 'C:\\1_Repos\\developer'
finalIndexPath = 'C:\\1_Repos\\developer'
query = input('Enter a search query: ')
if query.strip() == '':
print('Query needs to be at least one character')
unsortedDocs = search(query, finalIndexPath)
unsortedURLs = getDocURLs(unsortedDocs, indexPath)
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
print(f"\n------------ Top 5 Docs for '{query}' ------------\n")
for i, doc in enumerate(sortedURLs):
if i > 5:
break
print(doc[0], ' = ', doc[1])
print('\n------------ DONE! ------------\n')
|
<mask token>
stopWords = {'a', 'about', 'above', 'after', 'again', 'against', 'all',
'am', 'an', 'and', 'any', 'are', "aren't", 'as', 'at', 'be', 'because',
'been', 'before', 'being', 'below', 'between', 'both', 'but', 'by',
"can't", 'cannot', 'could', "couldn't", 'did', "didn't", 'do', 'does',
"doesn't", 'doing', "don't", 'down', 'during', 'each', 'few', 'for',
'from', 'further', 'had', "hadn't", 'has', "hasn't", 'have', "haven't",
'having', 'he', "he'd", "he'll", "he's", 'her', 'here', "here's",
'hers', 'herself', 'him', 'himself', 'his', 'how', "how's", 'i', "i'd",
"i'll", "i'm", "i've", 'if', 'in', 'into', 'is', "isn't", 'it', "it's",
'its', 'itself', "let's", 'me', 'more', 'most', "mustn't", 'my',
'myself', 'no', 'nor', 'not', 'of', 'off', 'on', 'once', 'only', 'or',
'other', 'ought', 'our', 'ours', 'ourselves', 'out', 'over', 'own',
'same', "shan't", 'she', "she'd", "she'll", "she's", 'should',
"shouldn't", 'so', 'some', 'such', 'than', 'that', "that's", 'the',
'their', 'theirs', 'them', 'themselves', 'then', 'there', "there's",
'these', 'they', "they'd", "they'll", "they're", "they've", 'this',
'those', 'through', 'to', 'too', 'under', 'until', 'up', 'very', 'was',
"wasn't", 'we', "we'd", "we'll", "we're", "we've", 'were', "weren't",
'what', "what's", 'when', "when's", 'where', "where's", 'which',
'while', 'who', "who's", 'whom', 'why', "why's", 'with', "won't",
'would', "wouldn't", 'you', "you'd", "you'll", "you're", "you've",
'your', 'yours', 'yourself', 'yourselves'}
def search(query, finalIndexPath):
listOfDicts = list()
queryList = set()
tempList = query.strip().lower().replace("'", '').split(' ')
for word in tempList:
if word not in stopWords:
queryList.add(word)
print('Cleaned query tokens:')
print(queryList, '\n')
queryList = list(queryList)
for word in queryList:
charPath = word[0]
jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'
try:
with open(jsonFilePath, 'r') as file:
data = file.read()
jsonObj = json.loads(data)
docsDict = jsonObj['docList']
listOfDicts.append(docsDict)
except:
pass
return intersectDicts(listOfDicts)
def getDocURLs(intersectedDocs, indexPath, cacheURLs):
listUrls = list()
for docID in intersectedDocs:
if docID in cacheURLs:
fileUrl = cacheURLs[docID]
listUrls.append((fileUrl, intersectedDocs[docID]))
return listUrls
def intersectDicts(listOfDicts):
if len(listOfDicts) == 1:
return listOfDicts[0]
intersection = {}
for dictItem in listOfDicts:
for doc in dictItem:
if doc not in intersection:
intersection[doc] = dictItem[doc]
else:
intersection[doc] += dictItem[doc]
print('intersection = ', intersection)
return intersection
def flaskBackendQuery(queryUser, cacheURLs):
indexPath = GLOBALS.FINAL_INDEX
if queryUser.strip() == '':
print('Query needs to be at least one character')
unsortedDocs = search(queryUser, indexPath)
unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
return sortedURLs[0:10]
if __name__ == '__main__':
indexPath = 'C:\\1_Repos\\developer'
finalIndexPath = 'C:\\1_Repos\\developer'
query = input('Enter a search query: ')
if query.strip() == '':
print('Query needs to be at least one character')
unsortedDocs = search(query, finalIndexPath)
unsortedURLs = getDocURLs(unsortedDocs, indexPath)
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
print(f"\n------------ Top 5 Docs for '{query}' ------------\n")
for i, doc in enumerate(sortedURLs):
if i > 5:
break
print(doc[0], ' = ', doc[1])
print('\n------------ DONE! ------------\n')
|
from multiprocessing import Pool
from pathlib import Path
import os
import re
import json
import string
import math
import GLOBALS
stopWords = {'a', 'about', 'above', 'after', 'again', 'against', 'all',
'am', 'an', 'and', 'any', 'are', "aren't", 'as', 'at', 'be', 'because',
'been', 'before', 'being', 'below', 'between', 'both', 'but', 'by',
"can't", 'cannot', 'could', "couldn't", 'did', "didn't", 'do', 'does',
"doesn't", 'doing', "don't", 'down', 'during', 'each', 'few', 'for',
'from', 'further', 'had', "hadn't", 'has', "hasn't", 'have', "haven't",
'having', 'he', "he'd", "he'll", "he's", 'her', 'here', "here's",
'hers', 'herself', 'him', 'himself', 'his', 'how', "how's", 'i', "i'd",
"i'll", "i'm", "i've", 'if', 'in', 'into', 'is', "isn't", 'it', "it's",
'its', 'itself', "let's", 'me', 'more', 'most', "mustn't", 'my',
'myself', 'no', 'nor', 'not', 'of', 'off', 'on', 'once', 'only', 'or',
'other', 'ought', 'our', 'ours', 'ourselves', 'out', 'over', 'own',
'same', "shan't", 'she', "she'd", "she'll", "she's", 'should',
"shouldn't", 'so', 'some', 'such', 'than', 'that', "that's", 'the',
'their', 'theirs', 'them', 'themselves', 'then', 'there', "there's",
'these', 'they', "they'd", "they'll", "they're", "they've", 'this',
'those', 'through', 'to', 'too', 'under', 'until', 'up', 'very', 'was',
"wasn't", 'we', "we'd", "we'll", "we're", "we've", 'were', "weren't",
'what', "what's", 'when', "when's", 'where', "where's", 'which',
'while', 'who', "who's", 'whom', 'why', "why's", 'with', "won't",
'would', "wouldn't", 'you', "you'd", "you'll", "you're", "you've",
'your', 'yours', 'yourself', 'yourselves'}
def search(query, finalIndexPath):
listOfDicts = list()
queryList = set()
tempList = query.strip().lower().replace("'", '').split(' ')
for word in tempList:
if word not in stopWords:
queryList.add(word)
print('Cleaned query tokens:')
print(queryList, '\n')
queryList = list(queryList)
for word in queryList:
charPath = word[0]
jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'
try:
with open(jsonFilePath, 'r') as file:
data = file.read()
jsonObj = json.loads(data)
docsDict = jsonObj['docList']
listOfDicts.append(docsDict)
except:
pass
return intersectDicts(listOfDicts)
def getDocURLs(intersectedDocs, indexPath, cacheURLs):
listUrls = list()
for docID in intersectedDocs:
if docID in cacheURLs:
fileUrl = cacheURLs[docID]
listUrls.append((fileUrl, intersectedDocs[docID]))
return listUrls
def intersectDicts(listOfDicts):
if len(listOfDicts) == 1:
return listOfDicts[0]
intersection = {}
for dictItem in listOfDicts:
for doc in dictItem:
if doc not in intersection:
intersection[doc] = dictItem[doc]
else:
intersection[doc] += dictItem[doc]
print('intersection = ', intersection)
return intersection
def flaskBackendQuery(queryUser, cacheURLs):
indexPath = GLOBALS.FINAL_INDEX
if queryUser.strip() == '':
print('Query needs to be at least one character')
unsortedDocs = search(queryUser, indexPath)
unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
return sortedURLs[0:10]
if __name__ == '__main__':
indexPath = 'C:\\1_Repos\\developer'
finalIndexPath = 'C:\\1_Repos\\developer'
query = input('Enter a search query: ')
if query.strip() == '':
print('Query needs to be at least one character')
unsortedDocs = search(query, finalIndexPath)
unsortedURLs = getDocURLs(unsortedDocs, indexPath)
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
print(f"\n------------ Top 5 Docs for '{query}' ------------\n")
for i, doc in enumerate(sortedURLs):
if i > 5:
break
print(doc[0], ' = ', doc[1])
print('\n------------ DONE! ------------\n')
|
from multiprocessing import Pool
from pathlib import Path
import os
import re
import json
import string
import math
import GLOBALS
stopWords = {"a", "about", "above", "after", "again", "against", "all", "am", "an", "and", "any", "are", "aren't",
"as", "at", "be", "because", "been", "before", "being", "below", "between", "both", "but", "by",
"can't",
"cannot", "could", "couldn't", "did", "didn't", "do", "does", "doesn't", "doing", "don't", "down",
"during",
"each", "few", "for", "from", "further", "had", "hadn't", "has", "hasn't", "have", "haven't", "having",
"he", "he'd",
"he'll", "he's", "her", "here", "here's", "hers", "herself", "him", "himself", "his", "how", "how's",
"i", "i'd", "i'll",
"i'm", "i've", "if", "in", "into", "is", "isn't", "it", "it's", "its", "itself", "let's", "me", "more",
"most", "mustn't", "my",
"myself", "no", "nor", "not", "of", "off", "on", "once", "only", "or", "other", "ought", "our", "ours",
"ourselves", "out", "over",
"own", "same", "shan't", "she", "she'd", "she'll", "she's", "should", "shouldn't", "so", "some",
"such", "than", "that", "that's",
"the", "their", "theirs", "them", "themselves", "then", "there", "there's", "these", "they", "they'd",
"they'll", "they're", "they've",
"this", "those", "through", "to", "too", "under", "until", "up", "very", "was", "wasn't", "we", "we'd",
"we'll", "we're", "we've", "were", "weren't",
"what", "what's", "when", "when's", "where", "where's", "which", "while", "who", "who's", "whom",
"why", "why's", "with", "won't", "would", "wouldn't",
"you", "you'd", "you'll", "you're", "you've", "your", "yours", "yourself", "yourselves"}
# Main Functions (aka functions called in __main__)
# Takes in query as str. Returns list of docs that match the OR query (inclusive)
def search(query, finalIndexPath):
listOfDicts = list()
queryList = set() # We use set() to remove duplicate terms, and we won't have to open a file twice
tempList = query.strip().lower().replace("'", "").split(" ")
for word in tempList:
if word not in stopWords:
queryList.add(word)
print("Cleaned query tokens:")
print(queryList, "\n") # query tokens with stopwords removed and replacing apostrohe and lower()
#convert set to list to enumerate
queryList = list(queryList)
for word in queryList:
charPath = word[0] #Get 1st char of current word, use to find subdir
# Get the file path of the final_indexed token.json file
jsonFilePath = str(Path(finalIndexPath) / charPath / word) + ".json"
try:
with open(jsonFilePath, "r") as file:
data = file.read()
jsonObj = json.loads(data)
docsDict = jsonObj["docList"]
listOfDicts.append(docsDict)
except:
pass
return intersectDicts(listOfDicts)
def getDocURLs(intersectedDocs, indexPath, cacheURLs):
listUrls = list() # holds unique file paths of .json files
#
# hashTablePath = Path(indexPath) / "hashurls.txt"
# with open(hashTablePath, "r") as file:
# data = file.read()
# hashSet = json.loads(data)
for docID in intersectedDocs:
if(docID in cacheURLs):
fileUrl = cacheURLs[docID]
listUrls.append( (fileUrl, intersectedDocs[docID]) )
return listUrls
# Helper Functions (aka functions called by other functions)
# Returns unique dict of file urls from hashurl.txt (or hasthtable.txt)
def intersectDicts(listOfDicts):
if len(listOfDicts) == 1:
return listOfDicts[0]
intersection = {}
for dictItem in listOfDicts:
for doc in dictItem:
if doc not in intersection:
intersection[doc] = dictItem[doc] #
else:
intersection[doc] += dictItem[doc] #adding tfidf weights
print("intersection = ", intersection)
return intersection
def flaskBackendQuery(queryUser, cacheURLs):
indexPath = GLOBALS.FINAL_INDEX
if (queryUser.strip() == ""):
print("Query needs to be at least one character")
unsortedDocs = search(queryUser, indexPath) #list of dictionaries
# Change filepaths to website URLs for displaying
unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)
# Sort docs by the TF-IDF score
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True) #highest scores shown first
return sortedURLs[0:10] #return 10 results
if __name__ == '__main__':
#####
# Aljon
# finalIndexPath = "C:\\Users\\aljon\\Documents\\CS_121\\Assignment_3\\CS121_InvertedIndex\\final_index"
# indexPath = "C:\\Users\\aljon\\Documents\\CS_121\\Assignment_3\\CS121_InvertedIndex\\index"
# William
# folderPath = "C:\\1_Repos\\developer\\partial_indexes"
# folderPath = "C:\\Anaconda3\\envs\\Projects\\developer\\partial_indexes"
indexPath = "C:\\1_Repos\\developer"
finalIndexPath = "C:\\1_Repos\\developer"
# Jerome
#folderPath = "C:\\Users\\arkse\\Desktop\\CS121_InvertedIndex\\DEV"
# Art
# windows
#folderPath = "C:\\Users\\aghar\\Downloads\\DEV"
# linux
#folderPath = "/home/anon/Downloads/DEV"
#####
# Get query from user
query = input("Enter a search query: ")
if(query.strip() == ""):
print("Query needs to be at least one character")
# Fetch all results of query, intersect them to follow Bool-AND logic
unsortedDocs = search(query, finalIndexPath)
# Change filepaths to website URLs for displaying
unsortedURLs = getDocURLs(unsortedDocs, indexPath)
# Sort docs by the TF-IDF score
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
# Print top 5 ranked file-urls for given query
print(f"\n------------ Top 5 Docs for '{query}' ------------\n")
for i, doc in enumerate(sortedURLs):
if (i > 5):
break
print(doc[0], " = ", doc[1])
print("\n------------ DONE! ------------\n")
|
[
4,
5,
6,
7,
8
] |
2,599 |
5f242ae801a239dde6a22e4fb68b4ef4b2459be6
|
<mask token>
def adVector(v, w):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxsum(v[k], w[k])]
return r
<mask token>
def MultEscalarVector(v, w):
n = len(w)
r = []
for k in range(n):
r += [lc.cplxproduct(v, w[k])]
return r
def sumaMatrix(v, w):
m = len(w)
n = len(v[0])
fila = []
r = [fila] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r += [lc.cplxsum(v[j][k], w[j][k])]
return r
def invAdMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r[j] += [lc.cplxproduct((-1, 0), v[j][k])]
return r
def MultEscMtx(v, w):
m = len(w)
n = len(w[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [lc.cplxproduct(v, w[j][k])]
return r
<mask token>
def ProdMtx(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def MtxVec(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def vectorPrInt(v):
r = int(lc.cplxmod(v) ** 2)
return r
def vectorNorm(v):
r = int(lc.cplxmod(v))
return r
def disV(v, w):
ele = 0
s = 0
for i in range(len(v)):
ele = v[i] - w[i]
ele = ele ** 2
s += ele
n = s ** (1 / 2)
return n
<mask token>
|
<mask token>
def adVector(v, w):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxsum(v[k], w[k])]
return r
<mask token>
def MultEscalarVector(v, w):
n = len(w)
r = []
for k in range(n):
r += [lc.cplxproduct(v, w[k])]
return r
def sumaMatrix(v, w):
m = len(w)
n = len(v[0])
fila = []
r = [fila] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r += [lc.cplxsum(v[j][k], w[j][k])]
return r
def invAdMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r[j] += [lc.cplxproduct((-1, 0), v[j][k])]
return r
def MultEscMtx(v, w):
m = len(w)
n = len(w[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [lc.cplxproduct(v, w[j][k])]
return r
def trMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [v[k][j]]
return r
<mask token>
def ProdMtx(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def MtxVec(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def vectorPrInt(v):
r = int(lc.cplxmod(v) ** 2)
return r
def vectorNorm(v):
r = int(lc.cplxmod(v))
return r
def disV(v, w):
ele = 0
s = 0
for i in range(len(v)):
ele = v[i] - w[i]
ele = ele ** 2
s += ele
n = s ** (1 / 2)
return n
def hermMtx(v):
if adjMtx(v) == v:
return True
<mask token>
|
<mask token>
def adVector(v, w):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxsum(v[k], w[k])]
return r
def invVector(v):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxproduct((-1, 0), v[k])]
return r
def MultEscalarVector(v, w):
n = len(w)
r = []
for k in range(n):
r += [lc.cplxproduct(v, w[k])]
return r
def sumaMatrix(v, w):
m = len(w)
n = len(v[0])
fila = []
r = [fila] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r += [lc.cplxsum(v[j][k], w[j][k])]
return r
def invAdMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r[j] += [lc.cplxproduct((-1, 0), v[j][k])]
return r
def MultEscMtx(v, w):
m = len(w)
n = len(w[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [lc.cplxproduct(v, w[j][k])]
return r
def trMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [v[k][j]]
return r
<mask token>
def ProdMtx(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def MtxVec(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def vectorPrInt(v):
r = int(lc.cplxmod(v) ** 2)
return r
def vectorNorm(v):
r = int(lc.cplxmod(v))
return r
def disV(v, w):
ele = 0
s = 0
for i in range(len(v)):
ele = v[i] - w[i]
ele = ele ** 2
s += ele
n = s ** (1 / 2)
return n
def hermMtx(v):
if adjMtx(v) == v:
return True
<mask token>
|
<mask token>
def adVector(v, w):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxsum(v[k], w[k])]
return r
def invVector(v):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxproduct((-1, 0), v[k])]
return r
def MultEscalarVector(v, w):
n = len(w)
r = []
for k in range(n):
r += [lc.cplxproduct(v, w[k])]
return r
def sumaMatrix(v, w):
m = len(w)
n = len(v[0])
fila = []
r = [fila] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r += [lc.cplxsum(v[j][k], w[j][k])]
return r
def invAdMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r[j] += [lc.cplxproduct((-1, 0), v[j][k])]
return r
def MultEscMtx(v, w):
m = len(w)
n = len(w[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [lc.cplxproduct(v, w[j][k])]
return r
def trMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [v[k][j]]
return r
<mask token>
def ProdMtx(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def MtxVec(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
def vectorPrInt(v):
r = int(lc.cplxmod(v) ** 2)
return r
def vectorNorm(v):
r = int(lc.cplxmod(v))
return r
def disV(v, w):
ele = 0
s = 0
for i in range(len(v)):
ele = v[i] - w[i]
ele = ele ** 2
s += ele
n = s ** (1 / 2)
return n
def hermMtx(v):
if adjMtx(v) == v:
return True
def vectorTsorProduct(A, B):
na = len(A)
nb = len(B)
nr = nb * na
R = [(0, 0)] * nr
index = 0
for j in range(na):
for k in range(nb):
R[index] = MultEscalarVector(A[j], B[k])
index = index + 1
return R
|
import Libcplx as lc
# 1.Adición de vectores complejos
def adVector(v, w):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxsum(v[k], w[k])]
return r
# 2.Inverso (aditivo) de un vector complejo
def invVector(v):
n = len(v)
r = []
for k in range(n):
r += [lc.cplxproduct((-1, 0), v[k])]
return r
# 3.Multiplicación de un escalar complejo
def MultEscalarVector(v, w):
n = len(w)
r = []
for k in range(n):
r += [lc.cplxproduct(v, w[k])]
return r
# 4.Adición de matrices complejas
def sumaMatrix(v, w):
m = len(w)
n = len(v[0])
fila = []
r = [fila] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r += [lc.cplxsum(v[j][k], w[j][k])]
return r
# 5.Inversa (aditiva) de una matriz compleja
def invAdMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(m):
fila = []
r[j] = fila
for k in range(n):
r[j] += [lc.cplxproduct((-1,0), v[j][k])]
return r
# 6. Multiplicación de un escalar por una matriz compleja
def MultEscMtx(v, w):
m = len(w)
n = len(w[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [lc.cplxproduct(v, w[j][k])]
return r
# 7. Transpuesta de una matriz/vector
def trMtx(v):
m = len(v)
n = len(v[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [v[k][j]]
return r
# 8. Conjugada de una matriz/vector
def conjMtx(A):
m = len(A)
n = len(A[0])
r = [n] * m
for j in range(n):
fila = []
r[j] = fila
for k in range(m):
r[j] += [lc.cplxconj((-1,0), A[j][k])]
return r
# 9.Adjunta (daga) de una matriz/vector
def adjMtx(A):
n = len(A)
m = len(A[0])
r = [n] * m
for j in range(n):
fila = [] * n
r[j] = fila
for k in range(m):
r[j] += [lc.cplxconj((-1,0), A[k][j])]
return r
# 10.Producto de dos matrices (de tamaños compatibles)
def ProdMtx(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
# 11. Función para calcular la "acción" de una matriz sobre un vector
def MtxVec(A, B):
m = len(A)
n = len(A[0])
fila = [(0, 0)] * n
r = [fila] * m
for j in range(m):
fila = [(0, 0)] * n
r[j] = fila
for k in range(n):
r[j][k] = lc.cplxproduct(A[j][k], B[j][k])
return r
# 12. Producto interno de dos vectores
def vectorPrInt(v):
r = int(lc.cplxmod(v) ** 2)
return r
# 13. Norma de un vector
def vectorNorm(v):
r = int(lc.cplxmod(v))
return r
# 14. Distancia entre dos vectores
def disV(v,w):
ele = 0
s = 0
for i in range(len(v)):
ele = v[i]-w[i]
ele = ele**2
s += ele
n = s ** (1/2)
return n
# 15. Revisar si una matriz es unitaria
# 16. Revisar si una matriz es Hermitiana
def hermMtx(v):
if adjMtx(v) == v:
return True
# 17. Producto tensor de dos matrices/vectores
def vectorTsorProduct(A, B):
na = len(A)
nb = len(B)
nr = nb * na
R = [(0, 0)] * nr
index = 0
for j in range(na):
for k in range(nb):
R[index] = MultEscalarVector(A[j], B[k])
index = index + 1
return R
# print(adVector(v,w))
# print(invVector(v))
# print(MultEscalarVector(v,w))
# print(sumaMatrix(v,w))
# print(InvAdMtx(v,w))
# print(MultEscMtx(v,w))
# print(trMtx(v))
|
[
10,
12,
13,
14,
18
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.