code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
# ********************************************************************************** #
# #
# Project: Data Frame Explorer #
# Author: Pawel Rosikiewicz #
# Contact: prosikiewicz(a)gmail.com #
# #
# License: MIT License #
# Copyright (C) 2021.01.30 Pawel Rosikiewicz #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
# #
# ********************************************************************************** #
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import pandas as pd
import random
import glob
import re
import os
import seaborn as sns
from IPython.display import display
from pandas.api.types import is_numeric_dtype
from pandas.api.types import is_string_dtype
# Function, ............................................................................
def find_and_display_patter_in_series(*, series, pattern):
"I used that function when i don't remeber full name of a given column"
res = series.loc[series.str.contains(pattern)]
return res
# Function, ...........................................................................................
def load_csv(*, path, filename, sep="\t", verbose=True):
"""
Loads csv into pandas df, based on pandas.read_scv(),
Returns error, if file or directoy not found
Parameters/Input
_________________ _______________________________________________________________________________
* path full path to directory
* csv_name. full csv file name
* separator "\t", by default
* display_head bool, True, by default, display df.head(),
irrespectively when the futions was called.
Returns
_________________ _______________________________________________________________________________
* DataFrame by Pandas
"""
os.chdir(path)
if len(glob.glob(filename))==1:
df = pd.read_csv(filename, sep=sep, low_memory=False)
# display example,
if verbose==True:
display(df.head(3))
print(df.shape)
else:
pass
# return,
return df
else:
if verbose==True:
print(f"""ERROR :csv file {filename}, was not found in: \n {path}""")
else:
pass
# Function, ............................................................................
def find_patter_in_series(*, s, pat, tolist=True):
'''
I used that function when i don't remeber full name of a given column
'''
res = s.loc[s.str.contains(pat)]
if tolist==True:
return res.values.tolist()
else:
return res
# Function, ...........................................................................................
def format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):
'''
formats columns in df into datetime dtype, and set all times to UTC
work with unix time units, ie. second number since 1970
columns in df, are find using full comlumn name or keywords in column name
'''
assert type(data)==pd.DataFrame, "please provide data in pandas dataframe format"
if isinstance(pattern_list, str):
pattern_list = [pattern_list]
else:
pass
for pat in pattern_list:
# find column names using provided patterns or their full names,
columns_with_potential_datetime_obj = list(find_and_display_patter_in_series(series=pd.Series(data.columns), pattern=pat))
# replace
for i in columns_with_potential_datetime_obj:
# keep example of old cell
before_formatting = str(data.loc[0, i])
# convert to one format
if unixtime==True:
s = pd.to_datetime(data.loc[:, i], errors="coerce", unit='s').copy()#,format cannot be used with unit="s", but it will be the same
data.loc[:, i] = s
if timezone!=None:
data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)
else:
pass
else:
s = pd.to_datetime(data.loc[:, i], errors="coerce",format=dt_format).copy()
data.loc[:, i] = s
if timezone!=None:
data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)
else:
pass
# info
if verbose==True:
print(f"date time formatted in: {i}")
print(f" - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce")
print(f" - Example: {before_formatting} -->> {str(data.loc[0, i])}", end="\n")
else:
pass
return data
# Function, ...........................................................................................
def replace_text(*,df ,pat="", colnames="all", fillna=np.nan, verbose=True):
"""
searches string with a given pattern and replace it with a new patter (fillna), eg: nan,
Parameters/Input
_________________ _______________________________________________________________________________
* df Pandas Dataframe
* searched_pattern "", str literal, used by pd.Series.str.contains()
* colnames default, "all", or list with selected colnames in df
* fillna default numpy.nan, or str literal
- what do you want to place instead of searched pattern in df
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
# for older version,
searched_pattern = pat
col_names = colnames
# check col_names with values to replace,
if col_names=="all":
sel_col_names = list(df.columns)
else:
sel_col_names = col_names
# display message header,
if verbose==True:
print(f"""\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\n""")
if verbose==False:
pass
# exchnage searched pattern in each column separately,
for i, col_name in enumerate(sel_col_names):
# .. test if you really have string values in that column, otherwise it masy be float for all NaN in a column, and no action will be taken
if is_string_dtype(df[col_name]):
try:
# .... find postions with a given pattern and select three examples to display for the user,
positions_to_replace = df[col_name].str.contains(searched_pattern, na=False).values# arr
examples_to_display = [str(x) for x in list(df.loc[list(positions_to_replace), col_name].str[0:20].values.tolist()[0:3])]
# .... replace postions, and find examples of unchnaged postions,
df.loc[list(positions_to_replace), col_name] = [fillna]*positions_to_replace.sum()
examples_of_positions_that_were_not_replaced = [str(x) for x in list(df.loc[list(positions_to_replace==False), col_name].str[0:20].values.tolist()[0:3])]
# .... diplay info,
if verbose==True:
perc_of_replaced_pos_in_col = "".join([str(positions_to_replace.sum()/df.shape[0]*100),"%"])
print(f"{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}")
print(f" - three examples of replaced postions: {'; '.join(examples_to_display)}", end="\n")
print(f" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}", end="\n\n")
# the second print returns three first examples of exchanged values, just to see what i did,
else:
pass
except:
if verbose==True:
print(f"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n")
else:
pass
else:
if verbose==True:
print(f"{i} - {col_name} - - is not of string type, Values were not replaced! \n")
else:
pass
return df.copy()
# Function, ...........................................................................................
def replace_numeric_values(*, df, colnames="all", lower_limit="none", upper_limit="none", equal=False, replace_with=np.nan, verbose=True):
"""
Replace numerical values that are outside of range of a values
prediced with a theoretical limits of a given variable,
eg less then 0 in weight of a product,
Provide examples and numbers of replaced instances
Parameters/Input
_________________ _______________________________________________________________________________
* df : Pandas DataFrame
* cols_in_df : list, exact colnames of selected or all columns in df
* lower_limit : int,float,"none", if "none" no action is taken
* upper_limit : int,float,"none", if "none" no action is taken
* replace_with : str, np.nan, int, float
* equal : bool, if True, >= and <= values then limits will be replaced,
if False (default), > and < values then limits will be replaced,
Returns
_________________ _______________________________________________________________________________
* DataFrame DataFramne.copy() with new values,
* display messages. number of replaced straings in each column, and examples of replcaced values
"""
cols_names = colnames
# .. check provided col_names,
if cols_names=="all":
cols = list(df.columns)
else:
cols = cols_names
# .. info, header,
if verbose==True:
print(f"""\n{"".join(["-"]*80)} \n Replacing Numerical Values in {len(cols)} columns""")
print(f" lower filter={lower_limit}, upper filter ={upper_limit}")
if equal==True:
print(f" Caution, equal=True, ie. values >= and <= then requested limits will be replaced")
print(f'{"".join(["-"]*80)}\n')
if verbose==False:
pass
# .. intelligent info,
total_count=[]
# .. count, to limit the number of displayed messages,
count = 0
# .. replace values and collect examples,
for i, j in enumerate(cols):
# ..... assume no values were replaced, so the messages work later,
info_lower_filter = 0
info_upper_filter = 0
# ..... test if the column is of the numeric type:
# from pandas.api.types import is_numeric_dtype
if is_numeric_dtype(df[j]):
# * replace values < or <= lower limit,
# - ----------------------------------
if lower_limit!="none":
if equal == True:
lower_filter = df.loc[:,j]<=lower_limit
if equal == False:
lower_filter = df.loc[:,j]<lower_limit
# info,
info_lower_filter=lower_filter.sum()
df.loc[list(lower_filter),j]=replace_with
# * replace values > or >= upper limit,
# - ----------------------------------
if upper_limit!="none":
if equal == True:
upper_filter = df.loc[:,j]>=upper_limit
if equal == False:
upper_filter = df.loc[:,j]>upper_limit
# info,
info_upper_filter=upper_filter.sum()
df.loc[list(upper_filter),j]=replace_with
# * find how many values were replaced, and add that to the total_count list
total_count.append(info_upper_filter+info_lower_filter)
# * display examples for 3 first columns with replaced values,
if verbose==True:
if info_upper_filter+info_lower_filter>0 and count <4:
print(f"eg: {i}, {j} : {info_lower_filter} values <{lower_limit}, ...{info_upper_filter} values <{upper_limit}")
else:
pass
# * add 1 to count, to limit the number of displayed examples,
count += 1
else:
if verbose==True:
print(f"{i, j} is not of numeric type, values were not replaced !")
else:
pass
# .. additional message, if more then 2 columns had replaced values,
if verbose==True:
if len(total_count)>3 and pd.Series(total_count).sum()>0:
print(f". and {len(total_count)-3} other columns had in total {pd.Series(total_count).sum()} replaced values \n")
# .. message in case no values vere replaced at all,
if pd.Series(total_count).sum()==0:
print("No values were replaced in requested columns....")
else:
pass
# .. return,
return df.copy()
# function, ...................................................
def drop_nan(df, method="any", row=True, verbose=True):
'''
function to dropna with thresholds from rows and columns
. method
. any : row/column wiht any missing data are removed
. all : row/column only wiht missing data are removed
. int, >0 : keeps row/clumns wiht this or larger number of non missing data
. float, >0 : as in the above, as fraction
'''
assert type(df)==pd.DataFrame, "incorrect df dtype"
df = df.copy()
if verbose==True:
print(df.shape)
else:
pass
# set funtion for rows or columns,
if row==True:
shapeidx, dfaxis = 1, 0
else:
shapeidx, dfaxis = 0, 1
# use threshold or "all", or None for do nothing,
if method==None:
pass
elif isinstance(method, str):
df = df.dropna(how=method, axis=dfaxis) # removes rows with NaN in all columns
elif isinstance(method, int):
tr = method
if tr==0:
pass
else:
if tr>=df.shape[shapeidx]:
tr=df.shape[shapeidx]
else:
pass
df = df.dropna(thresh=tr, axis=dfaxis) # eg Keep only the rows with at least 2 non-NA value
elif isinstance(method, float):
tr = int(np.ceil(df.shape[shapeidx]*(method)))
if tr==0:
pass
else:
if tr>=df.shape[shapeidx]:
tr=df.shape[shapeidx]
else:
pass
df = df.dropna(thresh=tr, axis=dfaxis) # eg Keep only the rows with at least 2 non-NA value
else:
pass
# info and return
if verbose==True:
print(df.shape)
else:
pass
return df
# Function, ...........................................................................................
def drop_columns(*, df, columns_to_drop, verbose=True):
"""
Small function to quickly remove columns from,
by column names stored in the list
- created to give info on removed columns and whether I am chnaging df in proper way,
- the function allows for column name duplicates,
"""
assert type(df)==pd.DataFrame, "please provide df in pandas dataframe format"
df = df.copy()
# find unique values in a list, just in case I made the mistake,
columns_to_drop = list(pd.Series(columns_to_drop).unique())
# .. info, header,
if verbose==True:
print(f"""Removing {len(columns_to_drop)} columns from df""")
else:
pass
# remove columns one by one,
for i,j in enumerate(columns_to_drop):
try:
df.drop(columns=[j], axis=1, inplace=True)
if verbose==True:
print(f"{i} removing: {j}, ==> new df.shape: {df.shape}")
else:
pass
except:
if verbose==True:
print(f"{i} .... column: {j}, was not found in df, check if name is correct....")
else:
pass
return df
|
normal
|
{
"blob_id": "5f50b20bd044471ebb8e1350d1a75a250b255d8f",
"index": 8854,
"step-1": "<mask token>\n\n\ndef find_and_display_patter_in_series(*, series, pattern):\n \"\"\"I used that function when i don't remeber full name of a given column\"\"\"\n res = series.loc[series.str.contains(pattern)]\n return res\n\n\n<mask token>\n\n\ndef find_patter_in_series(*, s, pat, tolist=True):\n \"\"\"\n I used that function when i don't remeber full name of a given column\n \"\"\"\n res = s.loc[s.str.contains(pat)]\n if tolist == True:\n return res.values.tolist()\n else:\n return res\n\n\ndef format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=\n False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):\n \"\"\"\n formats columns in df into datetime dtype, and set all times to UTC\n work with unix time units, ie. second number since 1970\n columns in df, are find using full comlumn name or keywords in column name\n \"\"\"\n assert type(data\n ) == pd.DataFrame, 'please provide data in pandas dataframe format'\n if isinstance(pattern_list, str):\n pattern_list = [pattern_list]\n else:\n pass\n for pat in pattern_list:\n columns_with_potential_datetime_obj = list(\n find_and_display_patter_in_series(series=pd.Series(data.columns\n ), pattern=pat))\n for i in columns_with_potential_datetime_obj:\n before_formatting = str(data.loc[0, i])\n if unixtime == True:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', unit='s'\n ).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)\n else:\n pass\n else:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', format=\n dt_format).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)\n else:\n pass\n if verbose == True:\n print(f'date time formatted in: {i}')\n print(\n f' - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce'\n )\n print(\n f' - Example: {before_formatting} -->> {str(data.loc[0, i])}'\n , end='\\n')\n else:\n pass\n return data\n\n\ndef replace_text(*, df, pat='', colnames='all', fillna=np.nan, verbose=True):\n \"\"\" \n searches string with a given pattern and replace it with a new patter (fillna), eg: nan,\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df Pandas Dataframe\n * searched_pattern \"\", str literal, used by pd.Series.str.contains() \n * colnames default, \"all\", or list with selected colnames in df\n * fillna default numpy.nan, or str literal \n - what do you want to place instead of searched pattern in df\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\"\n searched_pattern = pat\n col_names = colnames\n if col_names == 'all':\n sel_col_names = list(df.columns)\n else:\n sel_col_names = col_names\n if verbose == True:\n print(\n f'\\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\\n'\n )\n if verbose == False:\n pass\n for i, col_name in enumerate(sel_col_names):\n if is_string_dtype(df[col_name]):\n try:\n positions_to_replace = df[col_name].str.contains(\n searched_pattern, na=False).values\n examples_to_display = [str(x) for x in list(df.loc[list(\n positions_to_replace), col_name].str[0:20].values.\n tolist()[0:3])]\n df.loc[list(positions_to_replace), col_name] = [fillna\n ] * positions_to_replace.sum()\n examples_of_positions_that_were_not_replaced = [str(x) for\n x in list(df.loc[list(positions_to_replace == False),\n col_name].str[0:20].values.tolist()[0:3])]\n if verbose == True:\n perc_of_replaced_pos_in_col = ''.join([str(\n positions_to_replace.sum() / df.shape[0] * 100), '%'])\n print(\n f'{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}'\n )\n print(\n f\" - three examples of replaced postions: {'; '.join(examples_to_display)}\"\n , end='\\n')\n print(\n f\" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}\"\n , end='\\n\\n')\n else:\n pass\n except:\n if verbose == True:\n print(\n f\"\"\"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n\"\"\"\n )\n else:\n pass\n elif verbose == True:\n print(\n f'{i} - {col_name} - - is not of string type, Values were not replaced! \\n'\n )\n else:\n pass\n return df.copy()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef find_and_display_patter_in_series(*, series, pattern):\n \"\"\"I used that function when i don't remeber full name of a given column\"\"\"\n res = series.loc[series.str.contains(pattern)]\n return res\n\n\n<mask token>\n\n\ndef find_patter_in_series(*, s, pat, tolist=True):\n \"\"\"\n I used that function when i don't remeber full name of a given column\n \"\"\"\n res = s.loc[s.str.contains(pat)]\n if tolist == True:\n return res.values.tolist()\n else:\n return res\n\n\ndef format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=\n False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):\n \"\"\"\n formats columns in df into datetime dtype, and set all times to UTC\n work with unix time units, ie. second number since 1970\n columns in df, are find using full comlumn name or keywords in column name\n \"\"\"\n assert type(data\n ) == pd.DataFrame, 'please provide data in pandas dataframe format'\n if isinstance(pattern_list, str):\n pattern_list = [pattern_list]\n else:\n pass\n for pat in pattern_list:\n columns_with_potential_datetime_obj = list(\n find_and_display_patter_in_series(series=pd.Series(data.columns\n ), pattern=pat))\n for i in columns_with_potential_datetime_obj:\n before_formatting = str(data.loc[0, i])\n if unixtime == True:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', unit='s'\n ).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)\n else:\n pass\n else:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', format=\n dt_format).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)\n else:\n pass\n if verbose == True:\n print(f'date time formatted in: {i}')\n print(\n f' - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce'\n )\n print(\n f' - Example: {before_formatting} -->> {str(data.loc[0, i])}'\n , end='\\n')\n else:\n pass\n return data\n\n\ndef replace_text(*, df, pat='', colnames='all', fillna=np.nan, verbose=True):\n \"\"\" \n searches string with a given pattern and replace it with a new patter (fillna), eg: nan,\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df Pandas Dataframe\n * searched_pattern \"\", str literal, used by pd.Series.str.contains() \n * colnames default, \"all\", or list with selected colnames in df\n * fillna default numpy.nan, or str literal \n - what do you want to place instead of searched pattern in df\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\"\n searched_pattern = pat\n col_names = colnames\n if col_names == 'all':\n sel_col_names = list(df.columns)\n else:\n sel_col_names = col_names\n if verbose == True:\n print(\n f'\\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\\n'\n )\n if verbose == False:\n pass\n for i, col_name in enumerate(sel_col_names):\n if is_string_dtype(df[col_name]):\n try:\n positions_to_replace = df[col_name].str.contains(\n searched_pattern, na=False).values\n examples_to_display = [str(x) for x in list(df.loc[list(\n positions_to_replace), col_name].str[0:20].values.\n tolist()[0:3])]\n df.loc[list(positions_to_replace), col_name] = [fillna\n ] * positions_to_replace.sum()\n examples_of_positions_that_were_not_replaced = [str(x) for\n x in list(df.loc[list(positions_to_replace == False),\n col_name].str[0:20].values.tolist()[0:3])]\n if verbose == True:\n perc_of_replaced_pos_in_col = ''.join([str(\n positions_to_replace.sum() / df.shape[0] * 100), '%'])\n print(\n f'{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}'\n )\n print(\n f\" - three examples of replaced postions: {'; '.join(examples_to_display)}\"\n , end='\\n')\n print(\n f\" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}\"\n , end='\\n\\n')\n else:\n pass\n except:\n if verbose == True:\n print(\n f\"\"\"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n\"\"\"\n )\n else:\n pass\n elif verbose == True:\n print(\n f'{i} - {col_name} - - is not of string type, Values were not replaced! \\n'\n )\n else:\n pass\n return df.copy()\n\n\n<mask token>\n\n\ndef drop_nan(df, method='any', row=True, verbose=True):\n \"\"\"\n function to dropna with thresholds from rows and columns\n . method\n . any : row/column wiht any missing data are removed\n . all : row/column only wiht missing data are removed\n . int, >0 : keeps row/clumns wiht this or larger number of non missing data\n . float, >0 : as in the above, as fraction\n \n \"\"\"\n assert type(df) == pd.DataFrame, 'incorrect df dtype'\n df = df.copy()\n if verbose == True:\n print(df.shape)\n else:\n pass\n if row == True:\n shapeidx, dfaxis = 1, 0\n else:\n shapeidx, dfaxis = 0, 1\n if method == None:\n pass\n elif isinstance(method, str):\n df = df.dropna(how=method, axis=dfaxis)\n elif isinstance(method, int):\n tr = method\n if tr == 0:\n pass\n else:\n if tr >= df.shape[shapeidx]:\n tr = df.shape[shapeidx]\n else:\n pass\n df = df.dropna(thresh=tr, axis=dfaxis)\n elif isinstance(method, float):\n tr = int(np.ceil(df.shape[shapeidx] * method))\n if tr == 0:\n pass\n else:\n if tr >= df.shape[shapeidx]:\n tr = df.shape[shapeidx]\n else:\n pass\n df = df.dropna(thresh=tr, axis=dfaxis)\n else:\n pass\n if verbose == True:\n print(df.shape)\n else:\n pass\n return df\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef find_and_display_patter_in_series(*, series, pattern):\n \"\"\"I used that function when i don't remeber full name of a given column\"\"\"\n res = series.loc[series.str.contains(pattern)]\n return res\n\n\n<mask token>\n\n\ndef find_patter_in_series(*, s, pat, tolist=True):\n \"\"\"\n I used that function when i don't remeber full name of a given column\n \"\"\"\n res = s.loc[s.str.contains(pat)]\n if tolist == True:\n return res.values.tolist()\n else:\n return res\n\n\ndef format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=\n False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):\n \"\"\"\n formats columns in df into datetime dtype, and set all times to UTC\n work with unix time units, ie. second number since 1970\n columns in df, are find using full comlumn name or keywords in column name\n \"\"\"\n assert type(data\n ) == pd.DataFrame, 'please provide data in pandas dataframe format'\n if isinstance(pattern_list, str):\n pattern_list = [pattern_list]\n else:\n pass\n for pat in pattern_list:\n columns_with_potential_datetime_obj = list(\n find_and_display_patter_in_series(series=pd.Series(data.columns\n ), pattern=pat))\n for i in columns_with_potential_datetime_obj:\n before_formatting = str(data.loc[0, i])\n if unixtime == True:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', unit='s'\n ).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)\n else:\n pass\n else:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', format=\n dt_format).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)\n else:\n pass\n if verbose == True:\n print(f'date time formatted in: {i}')\n print(\n f' - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce'\n )\n print(\n f' - Example: {before_formatting} -->> {str(data.loc[0, i])}'\n , end='\\n')\n else:\n pass\n return data\n\n\ndef replace_text(*, df, pat='', colnames='all', fillna=np.nan, verbose=True):\n \"\"\" \n searches string with a given pattern and replace it with a new patter (fillna), eg: nan,\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df Pandas Dataframe\n * searched_pattern \"\", str literal, used by pd.Series.str.contains() \n * colnames default, \"all\", or list with selected colnames in df\n * fillna default numpy.nan, or str literal \n - what do you want to place instead of searched pattern in df\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\"\n searched_pattern = pat\n col_names = colnames\n if col_names == 'all':\n sel_col_names = list(df.columns)\n else:\n sel_col_names = col_names\n if verbose == True:\n print(\n f'\\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\\n'\n )\n if verbose == False:\n pass\n for i, col_name in enumerate(sel_col_names):\n if is_string_dtype(df[col_name]):\n try:\n positions_to_replace = df[col_name].str.contains(\n searched_pattern, na=False).values\n examples_to_display = [str(x) for x in list(df.loc[list(\n positions_to_replace), col_name].str[0:20].values.\n tolist()[0:3])]\n df.loc[list(positions_to_replace), col_name] = [fillna\n ] * positions_to_replace.sum()\n examples_of_positions_that_were_not_replaced = [str(x) for\n x in list(df.loc[list(positions_to_replace == False),\n col_name].str[0:20].values.tolist()[0:3])]\n if verbose == True:\n perc_of_replaced_pos_in_col = ''.join([str(\n positions_to_replace.sum() / df.shape[0] * 100), '%'])\n print(\n f'{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}'\n )\n print(\n f\" - three examples of replaced postions: {'; '.join(examples_to_display)}\"\n , end='\\n')\n print(\n f\" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}\"\n , end='\\n\\n')\n else:\n pass\n except:\n if verbose == True:\n print(\n f\"\"\"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n\"\"\"\n )\n else:\n pass\n elif verbose == True:\n print(\n f'{i} - {col_name} - - is not of string type, Values were not replaced! \\n'\n )\n else:\n pass\n return df.copy()\n\n\ndef replace_numeric_values(*, df, colnames='all', lower_limit='none',\n upper_limit='none', equal=False, replace_with=np.nan, verbose=True):\n \"\"\" \n\n Replace numerical values that are outside of range of a values \n prediced with a theoretical limits of a given variable, \n eg less then 0 in weight of a product, \n Provide examples and numbers of replaced instances\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df : Pandas DataFrame\n * cols_in_df : list, exact colnames of selected or all columns in df\n * lower_limit : int,float,\"none\", if \"none\" no action is taken\n * upper_limit : int,float,\"none\", if \"none\" no action is taken\n * replace_with : str, np.nan, int, float\n * equal : bool, if True, >= and <= values then limits will be replaced,\n if False (default), > and < values then limits will be replaced,\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\"\n cols_names = colnames\n if cols_names == 'all':\n cols = list(df.columns)\n else:\n cols = cols_names\n if verbose == True:\n print(\n f\"\\n{''.join(['-'] * 80)} \\n Replacing Numerical Values in {len(cols)} columns\"\n )\n print(\n f' lower filter={lower_limit}, upper filter ={upper_limit}')\n if equal == True:\n print(\n f' Caution, equal=True, ie. values >= and <= then requested limits will be replaced'\n )\n print(f\"{''.join(['-'] * 80)}\\n\")\n if verbose == False:\n pass\n total_count = []\n count = 0\n for i, j in enumerate(cols):\n info_lower_filter = 0\n info_upper_filter = 0\n if is_numeric_dtype(df[j]):\n if lower_limit != 'none':\n if equal == True:\n lower_filter = df.loc[:, j] <= lower_limit\n if equal == False:\n lower_filter = df.loc[:, j] < lower_limit\n info_lower_filter = lower_filter.sum()\n df.loc[list(lower_filter), j] = replace_with\n if upper_limit != 'none':\n if equal == True:\n upper_filter = df.loc[:, j] >= upper_limit\n if equal == False:\n upper_filter = df.loc[:, j] > upper_limit\n info_upper_filter = upper_filter.sum()\n df.loc[list(upper_filter), j] = replace_with\n total_count.append(info_upper_filter + info_lower_filter)\n if verbose == True:\n if info_upper_filter + info_lower_filter > 0 and count < 4:\n print(\n f'eg: {i}, {j} : {info_lower_filter} values <{lower_limit}, ...{info_upper_filter} values <{upper_limit}'\n )\n else:\n pass\n count += 1\n elif verbose == True:\n print(f'{i, j} is not of numeric type, values were not replaced !')\n else:\n pass\n if verbose == True:\n if len(total_count) > 3 and pd.Series(total_count).sum() > 0:\n print(\n f\"\"\". and {len(total_count) - 3} other columns had in total {pd.Series(total_count).sum()} replaced values \n\"\"\"\n )\n if pd.Series(total_count).sum() == 0:\n print('No values were replaced in requested columns....')\n else:\n pass\n return df.copy()\n\n\ndef drop_nan(df, method='any', row=True, verbose=True):\n \"\"\"\n function to dropna with thresholds from rows and columns\n . method\n . any : row/column wiht any missing data are removed\n . all : row/column only wiht missing data are removed\n . int, >0 : keeps row/clumns wiht this or larger number of non missing data\n . float, >0 : as in the above, as fraction\n \n \"\"\"\n assert type(df) == pd.DataFrame, 'incorrect df dtype'\n df = df.copy()\n if verbose == True:\n print(df.shape)\n else:\n pass\n if row == True:\n shapeidx, dfaxis = 1, 0\n else:\n shapeidx, dfaxis = 0, 1\n if method == None:\n pass\n elif isinstance(method, str):\n df = df.dropna(how=method, axis=dfaxis)\n elif isinstance(method, int):\n tr = method\n if tr == 0:\n pass\n else:\n if tr >= df.shape[shapeidx]:\n tr = df.shape[shapeidx]\n else:\n pass\n df = df.dropna(thresh=tr, axis=dfaxis)\n elif isinstance(method, float):\n tr = int(np.ceil(df.shape[shapeidx] * method))\n if tr == 0:\n pass\n else:\n if tr >= df.shape[shapeidx]:\n tr = df.shape[shapeidx]\n else:\n pass\n df = df.dropna(thresh=tr, axis=dfaxis)\n else:\n pass\n if verbose == True:\n print(df.shape)\n else:\n pass\n return df\n\n\ndef drop_columns(*, df, columns_to_drop, verbose=True):\n \"\"\"\n Small function to quickly remove columns from, \n by column names stored in the list\n - created to give info on removed columns and whether I am chnaging df in proper way,\n - the function allows for column name duplicates, \n \"\"\"\n assert type(df\n ) == pd.DataFrame, 'please provide df in pandas dataframe format'\n df = df.copy()\n columns_to_drop = list(pd.Series(columns_to_drop).unique())\n if verbose == True:\n print(f'Removing {len(columns_to_drop)} columns from df')\n else:\n pass\n for i, j in enumerate(columns_to_drop):\n try:\n df.drop(columns=[j], axis=1, inplace=True)\n if verbose == True:\n print(f'{i} removing: {j}, ==> new df.shape: {df.shape}')\n else:\n pass\n except:\n if verbose == True:\n print(\n f'{i} .... column: {j}, was not found in df, check if name is correct....'\n )\n else:\n pass\n return df\n",
"step-4": "<mask token>\n\n\ndef find_and_display_patter_in_series(*, series, pattern):\n \"\"\"I used that function when i don't remeber full name of a given column\"\"\"\n res = series.loc[series.str.contains(pattern)]\n return res\n\n\ndef load_csv(*, path, filename, sep='\\t', verbose=True):\n \"\"\" \n Loads csv into pandas df, based on pandas.read_scv(), \n Returns error, if file or directoy not found\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * path full path to directory\n * csv_name. full csv file name\n * separator \"\t\", by default\n * display_head bool, True, by default, display df.head(), \n irrespectively when the futions was called. \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame by Pandas\n\n \"\"\"\n os.chdir(path)\n if len(glob.glob(filename)) == 1:\n df = pd.read_csv(filename, sep=sep, low_memory=False)\n if verbose == True:\n display(df.head(3))\n print(df.shape)\n else:\n pass\n return df\n elif verbose == True:\n print(f'ERROR :csv file {filename}, was not found in: \\n {path}')\n else:\n pass\n\n\ndef find_patter_in_series(*, s, pat, tolist=True):\n \"\"\"\n I used that function when i don't remeber full name of a given column\n \"\"\"\n res = s.loc[s.str.contains(pat)]\n if tolist == True:\n return res.values.tolist()\n else:\n return res\n\n\ndef format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=\n False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):\n \"\"\"\n formats columns in df into datetime dtype, and set all times to UTC\n work with unix time units, ie. second number since 1970\n columns in df, are find using full comlumn name or keywords in column name\n \"\"\"\n assert type(data\n ) == pd.DataFrame, 'please provide data in pandas dataframe format'\n if isinstance(pattern_list, str):\n pattern_list = [pattern_list]\n else:\n pass\n for pat in pattern_list:\n columns_with_potential_datetime_obj = list(\n find_and_display_patter_in_series(series=pd.Series(data.columns\n ), pattern=pat))\n for i in columns_with_potential_datetime_obj:\n before_formatting = str(data.loc[0, i])\n if unixtime == True:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', unit='s'\n ).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)\n else:\n pass\n else:\n s = pd.to_datetime(data.loc[:, i], errors='coerce', format=\n dt_format).copy()\n data.loc[:, i] = s\n if timezone != None:\n data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)\n else:\n pass\n if verbose == True:\n print(f'date time formatted in: {i}')\n print(\n f' - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce'\n )\n print(\n f' - Example: {before_formatting} -->> {str(data.loc[0, i])}'\n , end='\\n')\n else:\n pass\n return data\n\n\ndef replace_text(*, df, pat='', colnames='all', fillna=np.nan, verbose=True):\n \"\"\" \n searches string with a given pattern and replace it with a new patter (fillna), eg: nan,\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df Pandas Dataframe\n * searched_pattern \"\", str literal, used by pd.Series.str.contains() \n * colnames default, \"all\", or list with selected colnames in df\n * fillna default numpy.nan, or str literal \n - what do you want to place instead of searched pattern in df\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\"\n searched_pattern = pat\n col_names = colnames\n if col_names == 'all':\n sel_col_names = list(df.columns)\n else:\n sel_col_names = col_names\n if verbose == True:\n print(\n f'\\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\\n'\n )\n if verbose == False:\n pass\n for i, col_name in enumerate(sel_col_names):\n if is_string_dtype(df[col_name]):\n try:\n positions_to_replace = df[col_name].str.contains(\n searched_pattern, na=False).values\n examples_to_display = [str(x) for x in list(df.loc[list(\n positions_to_replace), col_name].str[0:20].values.\n tolist()[0:3])]\n df.loc[list(positions_to_replace), col_name] = [fillna\n ] * positions_to_replace.sum()\n examples_of_positions_that_were_not_replaced = [str(x) for\n x in list(df.loc[list(positions_to_replace == False),\n col_name].str[0:20].values.tolist()[0:3])]\n if verbose == True:\n perc_of_replaced_pos_in_col = ''.join([str(\n positions_to_replace.sum() / df.shape[0] * 100), '%'])\n print(\n f'{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}'\n )\n print(\n f\" - three examples of replaced postions: {'; '.join(examples_to_display)}\"\n , end='\\n')\n print(\n f\" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}\"\n , end='\\n\\n')\n else:\n pass\n except:\n if verbose == True:\n print(\n f\"\"\"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \n\"\"\"\n )\n else:\n pass\n elif verbose == True:\n print(\n f'{i} - {col_name} - - is not of string type, Values were not replaced! \\n'\n )\n else:\n pass\n return df.copy()\n\n\ndef replace_numeric_values(*, df, colnames='all', lower_limit='none',\n upper_limit='none', equal=False, replace_with=np.nan, verbose=True):\n \"\"\" \n\n Replace numerical values that are outside of range of a values \n prediced with a theoretical limits of a given variable, \n eg less then 0 in weight of a product, \n Provide examples and numbers of replaced instances\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df : Pandas DataFrame\n * cols_in_df : list, exact colnames of selected or all columns in df\n * lower_limit : int,float,\"none\", if \"none\" no action is taken\n * upper_limit : int,float,\"none\", if \"none\" no action is taken\n * replace_with : str, np.nan, int, float\n * equal : bool, if True, >= and <= values then limits will be replaced,\n if False (default), > and < values then limits will be replaced,\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\"\n cols_names = colnames\n if cols_names == 'all':\n cols = list(df.columns)\n else:\n cols = cols_names\n if verbose == True:\n print(\n f\"\\n{''.join(['-'] * 80)} \\n Replacing Numerical Values in {len(cols)} columns\"\n )\n print(\n f' lower filter={lower_limit}, upper filter ={upper_limit}')\n if equal == True:\n print(\n f' Caution, equal=True, ie. values >= and <= then requested limits will be replaced'\n )\n print(f\"{''.join(['-'] * 80)}\\n\")\n if verbose == False:\n pass\n total_count = []\n count = 0\n for i, j in enumerate(cols):\n info_lower_filter = 0\n info_upper_filter = 0\n if is_numeric_dtype(df[j]):\n if lower_limit != 'none':\n if equal == True:\n lower_filter = df.loc[:, j] <= lower_limit\n if equal == False:\n lower_filter = df.loc[:, j] < lower_limit\n info_lower_filter = lower_filter.sum()\n df.loc[list(lower_filter), j] = replace_with\n if upper_limit != 'none':\n if equal == True:\n upper_filter = df.loc[:, j] >= upper_limit\n if equal == False:\n upper_filter = df.loc[:, j] > upper_limit\n info_upper_filter = upper_filter.sum()\n df.loc[list(upper_filter), j] = replace_with\n total_count.append(info_upper_filter + info_lower_filter)\n if verbose == True:\n if info_upper_filter + info_lower_filter > 0 and count < 4:\n print(\n f'eg: {i}, {j} : {info_lower_filter} values <{lower_limit}, ...{info_upper_filter} values <{upper_limit}'\n )\n else:\n pass\n count += 1\n elif verbose == True:\n print(f'{i, j} is not of numeric type, values were not replaced !')\n else:\n pass\n if verbose == True:\n if len(total_count) > 3 and pd.Series(total_count).sum() > 0:\n print(\n f\"\"\". and {len(total_count) - 3} other columns had in total {pd.Series(total_count).sum()} replaced values \n\"\"\"\n )\n if pd.Series(total_count).sum() == 0:\n print('No values were replaced in requested columns....')\n else:\n pass\n return df.copy()\n\n\ndef drop_nan(df, method='any', row=True, verbose=True):\n \"\"\"\n function to dropna with thresholds from rows and columns\n . method\n . any : row/column wiht any missing data are removed\n . all : row/column only wiht missing data are removed\n . int, >0 : keeps row/clumns wiht this or larger number of non missing data\n . float, >0 : as in the above, as fraction\n \n \"\"\"\n assert type(df) == pd.DataFrame, 'incorrect df dtype'\n df = df.copy()\n if verbose == True:\n print(df.shape)\n else:\n pass\n if row == True:\n shapeidx, dfaxis = 1, 0\n else:\n shapeidx, dfaxis = 0, 1\n if method == None:\n pass\n elif isinstance(method, str):\n df = df.dropna(how=method, axis=dfaxis)\n elif isinstance(method, int):\n tr = method\n if tr == 0:\n pass\n else:\n if tr >= df.shape[shapeidx]:\n tr = df.shape[shapeidx]\n else:\n pass\n df = df.dropna(thresh=tr, axis=dfaxis)\n elif isinstance(method, float):\n tr = int(np.ceil(df.shape[shapeidx] * method))\n if tr == 0:\n pass\n else:\n if tr >= df.shape[shapeidx]:\n tr = df.shape[shapeidx]\n else:\n pass\n df = df.dropna(thresh=tr, axis=dfaxis)\n else:\n pass\n if verbose == True:\n print(df.shape)\n else:\n pass\n return df\n\n\ndef drop_columns(*, df, columns_to_drop, verbose=True):\n \"\"\"\n Small function to quickly remove columns from, \n by column names stored in the list\n - created to give info on removed columns and whether I am chnaging df in proper way,\n - the function allows for column name duplicates, \n \"\"\"\n assert type(df\n ) == pd.DataFrame, 'please provide df in pandas dataframe format'\n df = df.copy()\n columns_to_drop = list(pd.Series(columns_to_drop).unique())\n if verbose == True:\n print(f'Removing {len(columns_to_drop)} columns from df')\n else:\n pass\n for i, j in enumerate(columns_to_drop):\n try:\n df.drop(columns=[j], axis=1, inplace=True)\n if verbose == True:\n print(f'{i} removing: {j}, ==> new df.shape: {df.shape}')\n else:\n pass\n except:\n if verbose == True:\n print(\n f'{i} .... column: {j}, was not found in df, check if name is correct....'\n )\n else:\n pass\n return df\n",
"step-5": "# ********************************************************************************** #\n# #\n# Project: Data Frame Explorer # \n# Author: Pawel Rosikiewicz #\n# Contact: prosikiewicz(a)gmail.com #\n# #\n# License: MIT License #\n# Copyright (C) 2021.01.30 Pawel Rosikiewicz #\n# #\n# Permission is hereby granted, free of charge, to any person obtaining a copy #\n# of this software and associated documentation files (the \"Software\"), to deal #\n# in the Software without restriction, including without limitation the rights #\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #\n# copies of the Software, and to permit persons to whom the Software is #\n# furnished to do so, subject to the following conditions: #\n# # \n# The above copyright notice and this permission notice shall be included in all #\n# copies or substantial portions of the Software. #\n# #\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #\n# SOFTWARE. #\n# #\n# ********************************************************************************** #\n\n\n# -*- coding: utf-8 -*-\nimport matplotlib.pyplot as plt\nimport matplotlib as mpl\nimport numpy as np\nimport pandas as pd\nimport random\nimport glob\nimport re\nimport os\nimport seaborn as sns\n\nfrom IPython.display import display\nfrom pandas.api.types import is_numeric_dtype\nfrom pandas.api.types import is_string_dtype\n\n\n\n\n\n\n# Function, ............................................................................\ndef find_and_display_patter_in_series(*, series, pattern):\n \"I used that function when i don't remeber full name of a given column\"\n res = series.loc[series.str.contains(pattern)]\n return res\n\n\n\n# Function, ...........................................................................................\ndef load_csv(*, path, filename, sep=\"\\t\", verbose=True):\n \"\"\" \n Loads csv into pandas df, based on pandas.read_scv(), \n Returns error, if file or directoy not found\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * path full path to directory\n * csv_name. full csv file name\n * separator \"\\t\", by default\n * display_head bool, True, by default, display df.head(), \n irrespectively when the futions was called. \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame by Pandas\n\n \"\"\"\n \n os.chdir(path)\n if len(glob.glob(filename))==1: \n df = pd.read_csv(filename, sep=sep, low_memory=False)\n \n # display example,\n if verbose==True:\n display(df.head(3))\n print(df.shape)\n else:\n pass\n \n # return,\n return df\n \n else:\n if verbose==True:\n print(f\"\"\"ERROR :csv file {filename}, was not found in: \\n {path}\"\"\")\n else:\n pass\n\n\n \n \n \n \n# Function, ............................................................................\ndef find_patter_in_series(*, s, pat, tolist=True):\n '''\n I used that function when i don't remeber full name of a given column\n '''\n res = s.loc[s.str.contains(pat)]\n \n if tolist==True:\n return res.values.tolist()\n else:\n return res \n \n \n \n \n\n \n# Function, ........................................................................................... \ndef format_to_datetime(*, data, pattern_list, timezone='UTC', unixtime=False, dt_format='%Y-%m-%d %H:%M:%S', verbose=False):\n '''\n formats columns in df into datetime dtype, and set all times to UTC\n work with unix time units, ie. second number since 1970\n columns in df, are find using full comlumn name or keywords in column name\n '''\n assert type(data)==pd.DataFrame, \"please provide data in pandas dataframe format\"\n \n if isinstance(pattern_list, str):\n pattern_list = [pattern_list]\n else: \n pass\n \n for pat in pattern_list: \n # find column names using provided patterns or their full names, \n columns_with_potential_datetime_obj = list(find_and_display_patter_in_series(series=pd.Series(data.columns), pattern=pat))\n \n # replace \n for i in columns_with_potential_datetime_obj:\n # keep example of old cell \n before_formatting = str(data.loc[0, i])\n \n # convert to one format\n if unixtime==True:\n s = pd.to_datetime(data.loc[:, i], errors=\"coerce\", unit='s').copy()#,format cannot be used with unit=\"s\", but it will be the same\n data.loc[:, i] = s\n if timezone!=None:\n data.loc[:, i] = data.loc[:, i].dt.tz_localize(timezone)\n else:\n pass\n \n else: \n s = pd.to_datetime(data.loc[:, i], errors=\"coerce\",format=dt_format).copy()\n data.loc[:, i] = s\n if timezone!=None:\n data.loc[:, i] = data.loc[:, i].dt.tz_convert(timezone)\n else:\n pass\n \n # info\n if verbose==True:\n print(f\"date time formatted in: {i}\") \n print(f\" - {data.loc[:, i].isnull().sum()} NaN were instroduced by coerce\")\n print(f\" - Example: {before_formatting} -->> {str(data.loc[0, i])}\", end=\"\\n\")\n else:\n pass\n\n return data \n \n \n \n \n \n \n \n# Function, ...........................................................................................\ndef replace_text(*,df ,pat=\"\", colnames=\"all\", fillna=np.nan, verbose=True):\n \"\"\" \n searches string with a given pattern and replace it with a new patter (fillna), eg: nan,\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df Pandas Dataframe\n * searched_pattern \"\", str literal, used by pd.Series.str.contains() \n * colnames default, \"all\", or list with selected colnames in df\n * fillna default numpy.nan, or str literal \n - what do you want to place instead of searched pattern in df\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\"\n \n # for older version, \n searched_pattern = pat\n col_names = colnames\n \n # check col_names with values to replace, \n if col_names==\"all\": \n sel_col_names = list(df.columns)\n else: \n sel_col_names = col_names \n\n # display message header, \n if verbose==True:\n print(f\"\"\"\\nReplacing Text in {len(sel_col_names)} columns: {sel_col_names}\\n\"\"\") \n \n if verbose==False:\n pass\n\n # exchnage searched pattern in each column separately, \n for i, col_name in enumerate(sel_col_names):\n \n # .. test if you really have string values in that column, otherwise it masy be float for all NaN in a column, and no action will be taken \n if is_string_dtype(df[col_name]):\n \n try:\n # .... find postions with a given pattern and select three examples to display for the user, \n positions_to_replace = df[col_name].str.contains(searched_pattern, na=False).values# arr\n examples_to_display = [str(x) for x in list(df.loc[list(positions_to_replace), col_name].str[0:20].values.tolist()[0:3])]\n\n # .... replace postions, and find examples of unchnaged postions,\n df.loc[list(positions_to_replace), col_name] = [fillna]*positions_to_replace.sum() \n examples_of_positions_that_were_not_replaced = [str(x) for x in list(df.loc[list(positions_to_replace==False), col_name].str[0:20].values.tolist()[0:3])]\n\n # .... diplay info,\n if verbose==True:\n perc_of_replaced_pos_in_col = \"\".join([str(positions_to_replace.sum()/df.shape[0]*100),\"%\"])\n print(f\"{i} - {col_name} - - {positions_to_replace.sum()} positions out of {df.shape[0]}, were replaced with {fillna}, ie. {perc_of_replaced_pos_in_col}\")\n print(f\" - three examples of replaced postions: {'; '.join(examples_to_display)}\", end=\"\\n\")\n print(f\" - three examples of unchanged postions: {'; '.join(examples_of_positions_that_were_not_replaced)}\", end=\"\\n\\n\")\n # the second print returns three first examples of exchanged values, just to see what i did,\n else:\n pass\n \n except:\n if verbose==True:\n print(f\"{i} - {col_name} - - probably only missing data datected, Values were not replaced! \\n\") \n else:\n pass\n \n else:\n if verbose==True:\n print(f\"{i} - {col_name} - - is not of string type, Values were not replaced! \\n\") \n else:\n pass\n \n return df.copy()\n\n\n \n \n \n\n\n\n# Function, ...........................................................................................\ndef replace_numeric_values(*, df, colnames=\"all\", lower_limit=\"none\", upper_limit=\"none\", equal=False, replace_with=np.nan, verbose=True):\n \"\"\" \n\n Replace numerical values that are outside of range of a values \n prediced with a theoretical limits of a given variable, \n eg less then 0 in weight of a product, \n Provide examples and numbers of replaced instances\n \n Parameters/Input \n _________________ _______________________________________________________________________________ \n\n * df : Pandas DataFrame\n * cols_in_df : list, exact colnames of selected or all columns in df\n * lower_limit : int,float,\"none\", if \"none\" no action is taken\n * upper_limit : int,float,\"none\", if \"none\" no action is taken\n * replace_with : str, np.nan, int, float\n * equal : bool, if True, >= and <= values then limits will be replaced,\n if False (default), > and < values then limits will be replaced,\n \n Returns \n _________________ _______________________________________________________________________________ \n\n * DataFrame DataFramne.copy() with new values,\n * display messages. number of replaced straings in each column, and examples of replcaced values\n \"\"\" \n\n \n cols_names = colnames\n \n # .. check provided col_names,\n if cols_names==\"all\": \n cols = list(df.columns)\n else: \n cols = cols_names \n\n # .. info, header, \n if verbose==True:\n print(f\"\"\"\\n{\"\".join([\"-\"]*80)} \\n Replacing Numerical Values in {len(cols)} columns\"\"\") \n print(f\" lower filter={lower_limit}, upper filter ={upper_limit}\")\n if equal==True:\n print(f\" Caution, equal=True, ie. values >= and <= then requested limits will be replaced\")\n print(f'{\"\".join([\"-\"]*80)}\\n') \n \n if verbose==False:\n pass\n \n \n # .. intelligent info,\n total_count=[]\n\n # .. count, to limit the number of displayed messages,\n count = 0\n\n # .. replace values and collect examples, \n for i, j in enumerate(cols):\n\n # ..... assume no values were replaced, so the messages work later, \n info_lower_filter = 0\n info_upper_filter = 0 \n \n # ..... test if the column is of the numeric type:\n # from pandas.api.types import is_numeric_dtype\n if is_numeric_dtype(df[j]):\n \n \n # * replace values < or <= lower limit,\n # - ----------------------------------\n if lower_limit!=\"none\": \n if equal == True:\n lower_filter = df.loc[:,j]<=lower_limit\n if equal == False:\n lower_filter = df.loc[:,j]<lower_limit\n \n # info,\n info_lower_filter=lower_filter.sum()\n df.loc[list(lower_filter),j]=replace_with\n \n \n # * replace values > or >= upper limit,\n # - ----------------------------------\n if upper_limit!=\"none\": \n if equal == True:\n upper_filter = df.loc[:,j]>=upper_limit\n if equal == False:\n upper_filter = df.loc[:,j]>upper_limit\n \n # info,\n info_upper_filter=upper_filter.sum()\n df.loc[list(upper_filter),j]=replace_with \n \n # * find how many values were replaced, and add that to the total_count list \n total_count.append(info_upper_filter+info_lower_filter)\n \n # * display examples for 3 first columns with replaced values,\n if verbose==True:\n if info_upper_filter+info_lower_filter>0 and count <4:\n print(f\"eg: {i}, {j} : {info_lower_filter} values <{lower_limit}, ...{info_upper_filter} values <{upper_limit}\")\n else:\n pass\n\n # * add 1 to count, to limit the number of displayed examples,\n count += 1 \n \n else:\n if verbose==True:\n print(f\"{i, j} is not of numeric type, values were not replaced !\")\n else:\n pass\n \n # .. additional message, if more then 2 columns had replaced values, \n if verbose==True:\n if len(total_count)>3 and pd.Series(total_count).sum()>0:\n print(f\". and {len(total_count)-3} other columns had in total {pd.Series(total_count).sum()} replaced values \\n\")\n\n # .. message in case no values vere replaced at all, \n if pd.Series(total_count).sum()==0:\n print(\"No values were replaced in requested columns....\")\n \n else:\n pass\n \n # .. return, \n return df.copy()\n \n \n \n \n\n \n \n# function, ...................................................\ndef drop_nan(df, method=\"any\", row=True, verbose=True): \n '''\n function to dropna with thresholds from rows and columns\n . method\n . any : row/column wiht any missing data are removed\n . all : row/column only wiht missing data are removed\n . int, >0 : keeps row/clumns wiht this or larger number of non missing data\n . float, >0 : as in the above, as fraction\n \n '''\n \n assert type(df)==pd.DataFrame, \"incorrect df dtype\"\n df = df.copy()\n \n if verbose==True:\n print(df.shape)\n else:\n pass\n \n # set funtion for rows or columns, \n if row==True:\n shapeidx, dfaxis = 1, 0\n else:\n shapeidx, dfaxis = 0, 1\n \n # use threshold or \"all\", or None for do nothing, \n if method==None:\n pass\n\n elif isinstance(method, str):\n df = df.dropna(how=method, axis=dfaxis) # removes rows with NaN in all columns \n\n elif isinstance(method, int):\n tr = method\n if tr==0:\n pass\n else:\n if tr>=df.shape[shapeidx]:\n tr=df.shape[shapeidx]\n else:\n pass \n df = df.dropna(thresh=tr, axis=dfaxis) # eg Keep only the rows with at least 2 non-NA value\n\n elif isinstance(method, float):\n tr = int(np.ceil(df.shape[shapeidx]*(method)))\n if tr==0:\n pass\n else:\n if tr>=df.shape[shapeidx]:\n tr=df.shape[shapeidx]\n else:\n pass \n df = df.dropna(thresh=tr, axis=dfaxis) # eg Keep only the rows with at least 2 non-NA value\n else:\n pass\n \n # info and return\n if verbose==True:\n print(df.shape)\n else:\n pass\n return df\n \n \n \n \n \n \n \n \n# Function, ...........................................................................................\ndef drop_columns(*, df, columns_to_drop, verbose=True):\n \"\"\"\n Small function to quickly remove columns from, \n by column names stored in the list\n - created to give info on removed columns and whether I am chnaging df in proper way,\n - the function allows for column name duplicates, \n \"\"\"\n \n assert type(df)==pd.DataFrame, \"please provide df in pandas dataframe format\"\n df = df.copy()\n \n # find unique values in a list, just in case I made the mistake, \n columns_to_drop = list(pd.Series(columns_to_drop).unique())\n\n # .. info, header, \n if verbose==True:\n print(f\"\"\"Removing {len(columns_to_drop)} columns from df\"\"\") \n else:\n pass\n\n \n # remove columns one by one, \n for i,j in enumerate(columns_to_drop):\n try:\n df.drop(columns=[j], axis=1, inplace=True)\n if verbose==True:\n print(f\"{i} removing: {j}, ==> new df.shape: {df.shape}\")\n else:\n pass\n \n except:\n if verbose==True:\n print(f\"{i} .... column: {j}, was not found in df, check if name is correct....\")\n else:\n pass\n \n return df\n\n",
"step-ids": [
4,
5,
7,
8,
10
]
}
|
[
4,
5,
7,
8,
10
] |
y = 10
x = 'Тишь да гладь'
print(f'Текст:{x}')
print(f'Число:{y}')
a1 = input('Введите первое число: ')
a2 = input('Введите второе число: ')
b1 = input('Введите первую строку: ')
b2 = input('Введите вторую строку: ')
print(f'Вы ввели числа: {a1}/{a2}')
print(f'Вы ввели строки: {b1} / {b2}')
|
normal
|
{
"blob_id": "2fabb03f0f6b0b297245354782e650380509424b",
"index": 8054,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(f'Текст:{x}')\nprint(f'Число:{y}')\n<mask token>\nprint(f'Вы ввели числа: {a1}/{a2}')\nprint(f'Вы ввели строки: {b1} / {b2}')\n",
"step-3": "y = 10\nx = 'Тишь да гладь'\nprint(f'Текст:{x}')\nprint(f'Число:{y}')\na1 = input('Введите первое число: ')\na2 = input('Введите второе число: ')\nb1 = input('Введите первую строку: ')\nb2 = input('Введите вторую строку: ')\nprint(f'Вы ввели числа: {a1}/{a2}')\nprint(f'Вы ввели строки: {b1} / {b2}')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# coding=utf-8
import pytest
from twitter_tunes.scripts import redis_data
from mock import patch
REDIS_PARSE = [
(b"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}",
{'trend1': 'url1', 'trend2': 'url2', 'trend3': 'url3'}),
(b"{}", {}),
(b"{'hello':'its me'}", {'hello': 'its me'}),
(b"{'trends': ['trend1', 'trend2', 'trend3']}",
{'trends': ['trend1', 'trend2', 'trend3']}),
(b"{'bob': []}",
{'bob': []}),
(b"{'hello': [u'its me']}", {'hello': ['its me']}),
]
GOOD_REDIS_RETURN = b"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}"
TWITTER_TRENDS = ["D'Angelo Russell",
'#ThenItAllWentHorriblyWrong',
'#SELFIEFORSEB',
'#ILikeWhatYouHave',
'#DolanTwinsNewVideo',
'#ManateePickUpLines',
'Wendy Bell',
'Brannen Greene',
'Jon Lester',
'Alison Rapp']
PARSE_LIST = [
(["D'Angelo Russell"], ['D Angelo Russell']),
(["B'O'B"], ['B O B']),
(["D''Angelo Russell"], ['D Angelo Russell']),
(["''"], [' ']),
(["D'Angelo Russ'ell"], ['D Angelo Russ ell']),
]
@pytest.mark.parametrize('data, parsed', REDIS_PARSE)
def test_parse_redis_data(data, parsed):
"""Test to see if data dict in bytes is parsed."""
assert redis_data.parse_redis_data(data) == parsed
def test_parse_redis_data_error():
"""Test to see if parse redis raises value error if bad input."""
with pytest.raises(ValueError):
redis_data.parse_redis_data(b"this is some data")
@patch('redis.from_url')
def test_get_redis_data_good_redis_key(from_url):
"""Test to see if get redis data returns data dictionary."""
mock_method = from_url().get
mock_method.return_value = GOOD_REDIS_RETURN
assert redis_data.get_redis_data('trends') == {'trend1': 'url1',
'trend2': 'url2',
'trend3': 'url3'}
@patch('redis.from_url')
def test_get_redis_data_bad_redis_key(from_url):
"""Test to see if get redis data returns data dictionary."""
mock_method = from_url().get
mock_method.return_value = None
assert redis_data.get_redis_data('bad') == {}
@patch('redis.from_url')
def test_set_redis_data(from_url):
"""Test to see if set redis data is called."""
mock_method = from_url().set
redis_data.set_redis_data('trends', 'val')
assert mock_method.call_count == 1
@patch('redis.from_url')
def test_set_redis_data_empty(from_url):
"""Test to see if set redis data is called with empty data."""
mock_method = from_url().set
redis_data.set_redis_data('trends', {})
assert mock_method.call_count == 1
def test_set_redis_no_val():
"""Test if set data fails with no arguments."""
with pytest.raises(TypeError):
redis_data.set_redis_data('key')
@pytest.mark.parametrize('data, result', PARSE_LIST)
def test_parse_redis_twiter_trends(data, result):
"""Test trend parser to remove apostrophes from trends."""
assert redis_data.redis_parse_twitter_trends(data) == result
@patch('redis.from_url')
def test_redis_set_trends(from_url):
"""Test the redis main function."""
mock_method = from_url().set
redis_data.set_redis_trend_list(TWITTER_TRENDS)
assert mock_method.call_count == 1
|
normal
|
{
"blob_id": "7f4a5779564efde7eaf08741d00254dd4aa37569",
"index": 4218,
"step-1": "<mask token>\n\n\[email protected]('data, parsed', REDIS_PARSE)\ndef test_parse_redis_data(data, parsed):\n \"\"\"Test to see if data dict in bytes is parsed.\"\"\"\n assert redis_data.parse_redis_data(data) == parsed\n\n\ndef test_parse_redis_data_error():\n \"\"\"Test to see if parse redis raises value error if bad input.\"\"\"\n with pytest.raises(ValueError):\n redis_data.parse_redis_data(b'this is some data')\n\n\n<mask token>\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_bad_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = None\n assert redis_data.get_redis_data('bad') == {}\n\n\n<mask token>\n\n\n@patch('redis.from_url')\ndef test_set_redis_data_empty(from_url):\n \"\"\"Test to see if set redis data is called with empty data.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', {})\n assert mock_method.call_count == 1\n\n\ndef test_set_redis_no_val():\n \"\"\"Test if set data fails with no arguments.\"\"\"\n with pytest.raises(TypeError):\n redis_data.set_redis_data('key')\n\n\[email protected]('data, result', PARSE_LIST)\ndef test_parse_redis_twiter_trends(data, result):\n \"\"\"Test trend parser to remove apostrophes from trends.\"\"\"\n assert redis_data.redis_parse_twitter_trends(data) == result\n\n\n@patch('redis.from_url')\ndef test_redis_set_trends(from_url):\n \"\"\"Test the redis main function.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_trend_list(TWITTER_TRENDS)\n assert mock_method.call_count == 1\n",
"step-2": "<mask token>\n\n\[email protected]('data, parsed', REDIS_PARSE)\ndef test_parse_redis_data(data, parsed):\n \"\"\"Test to see if data dict in bytes is parsed.\"\"\"\n assert redis_data.parse_redis_data(data) == parsed\n\n\ndef test_parse_redis_data_error():\n \"\"\"Test to see if parse redis raises value error if bad input.\"\"\"\n with pytest.raises(ValueError):\n redis_data.parse_redis_data(b'this is some data')\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_good_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = GOOD_REDIS_RETURN\n assert redis_data.get_redis_data('trends') == {'trend1': 'url1',\n 'trend2': 'url2', 'trend3': 'url3'}\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_bad_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = None\n assert redis_data.get_redis_data('bad') == {}\n\n\n<mask token>\n\n\n@patch('redis.from_url')\ndef test_set_redis_data_empty(from_url):\n \"\"\"Test to see if set redis data is called with empty data.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', {})\n assert mock_method.call_count == 1\n\n\ndef test_set_redis_no_val():\n \"\"\"Test if set data fails with no arguments.\"\"\"\n with pytest.raises(TypeError):\n redis_data.set_redis_data('key')\n\n\[email protected]('data, result', PARSE_LIST)\ndef test_parse_redis_twiter_trends(data, result):\n \"\"\"Test trend parser to remove apostrophes from trends.\"\"\"\n assert redis_data.redis_parse_twitter_trends(data) == result\n\n\n@patch('redis.from_url')\ndef test_redis_set_trends(from_url):\n \"\"\"Test the redis main function.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_trend_list(TWITTER_TRENDS)\n assert mock_method.call_count == 1\n",
"step-3": "<mask token>\nREDIS_PARSE = [(b\"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}\",\n {'trend1': 'url1', 'trend2': 'url2', 'trend3': 'url3'}), (b'{}', {}), (\n b\"{'hello':'its me'}\", {'hello': 'its me'}), (\n b\"{'trends': ['trend1', 'trend2', 'trend3']}\", {'trends': ['trend1',\n 'trend2', 'trend3']}), (b\"{'bob': []}\", {'bob': []}), (\n b\"{'hello': [u'its me']}\", {'hello': ['its me']})]\nGOOD_REDIS_RETURN = b\"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}\"\nTWITTER_TRENDS = [\"D'Angelo Russell\", '#ThenItAllWentHorriblyWrong',\n '#SELFIEFORSEB', '#ILikeWhatYouHave', '#DolanTwinsNewVideo',\n '#ManateePickUpLines', 'Wendy Bell', 'Brannen Greene', 'Jon Lester',\n 'Alison Rapp']\nPARSE_LIST = [([\"D'Angelo Russell\"], ['D Angelo Russell']), ([\"B'O'B\"], [\n 'B O B']), ([\"D''Angelo Russell\"], ['D Angelo Russell']), ([\"''\"], [\n ' ']), ([\"D'Angelo Russ'ell\"], ['D Angelo Russ ell'])]\n\n\[email protected]('data, parsed', REDIS_PARSE)\ndef test_parse_redis_data(data, parsed):\n \"\"\"Test to see if data dict in bytes is parsed.\"\"\"\n assert redis_data.parse_redis_data(data) == parsed\n\n\ndef test_parse_redis_data_error():\n \"\"\"Test to see if parse redis raises value error if bad input.\"\"\"\n with pytest.raises(ValueError):\n redis_data.parse_redis_data(b'this is some data')\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_good_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = GOOD_REDIS_RETURN\n assert redis_data.get_redis_data('trends') == {'trend1': 'url1',\n 'trend2': 'url2', 'trend3': 'url3'}\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_bad_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = None\n assert redis_data.get_redis_data('bad') == {}\n\n\n@patch('redis.from_url')\ndef test_set_redis_data(from_url):\n \"\"\"Test to see if set redis data is called.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', 'val')\n assert mock_method.call_count == 1\n\n\n@patch('redis.from_url')\ndef test_set_redis_data_empty(from_url):\n \"\"\"Test to see if set redis data is called with empty data.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', {})\n assert mock_method.call_count == 1\n\n\ndef test_set_redis_no_val():\n \"\"\"Test if set data fails with no arguments.\"\"\"\n with pytest.raises(TypeError):\n redis_data.set_redis_data('key')\n\n\[email protected]('data, result', PARSE_LIST)\ndef test_parse_redis_twiter_trends(data, result):\n \"\"\"Test trend parser to remove apostrophes from trends.\"\"\"\n assert redis_data.redis_parse_twitter_trends(data) == result\n\n\n@patch('redis.from_url')\ndef test_redis_set_trends(from_url):\n \"\"\"Test the redis main function.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_trend_list(TWITTER_TRENDS)\n assert mock_method.call_count == 1\n",
"step-4": "import pytest\nfrom twitter_tunes.scripts import redis_data\nfrom mock import patch\nREDIS_PARSE = [(b\"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}\",\n {'trend1': 'url1', 'trend2': 'url2', 'trend3': 'url3'}), (b'{}', {}), (\n b\"{'hello':'its me'}\", {'hello': 'its me'}), (\n b\"{'trends': ['trend1', 'trend2', 'trend3']}\", {'trends': ['trend1',\n 'trend2', 'trend3']}), (b\"{'bob': []}\", {'bob': []}), (\n b\"{'hello': [u'its me']}\", {'hello': ['its me']})]\nGOOD_REDIS_RETURN = b\"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}\"\nTWITTER_TRENDS = [\"D'Angelo Russell\", '#ThenItAllWentHorriblyWrong',\n '#SELFIEFORSEB', '#ILikeWhatYouHave', '#DolanTwinsNewVideo',\n '#ManateePickUpLines', 'Wendy Bell', 'Brannen Greene', 'Jon Lester',\n 'Alison Rapp']\nPARSE_LIST = [([\"D'Angelo Russell\"], ['D Angelo Russell']), ([\"B'O'B\"], [\n 'B O B']), ([\"D''Angelo Russell\"], ['D Angelo Russell']), ([\"''\"], [\n ' ']), ([\"D'Angelo Russ'ell\"], ['D Angelo Russ ell'])]\n\n\[email protected]('data, parsed', REDIS_PARSE)\ndef test_parse_redis_data(data, parsed):\n \"\"\"Test to see if data dict in bytes is parsed.\"\"\"\n assert redis_data.parse_redis_data(data) == parsed\n\n\ndef test_parse_redis_data_error():\n \"\"\"Test to see if parse redis raises value error if bad input.\"\"\"\n with pytest.raises(ValueError):\n redis_data.parse_redis_data(b'this is some data')\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_good_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = GOOD_REDIS_RETURN\n assert redis_data.get_redis_data('trends') == {'trend1': 'url1',\n 'trend2': 'url2', 'trend3': 'url3'}\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_bad_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = None\n assert redis_data.get_redis_data('bad') == {}\n\n\n@patch('redis.from_url')\ndef test_set_redis_data(from_url):\n \"\"\"Test to see if set redis data is called.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', 'val')\n assert mock_method.call_count == 1\n\n\n@patch('redis.from_url')\ndef test_set_redis_data_empty(from_url):\n \"\"\"Test to see if set redis data is called with empty data.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', {})\n assert mock_method.call_count == 1\n\n\ndef test_set_redis_no_val():\n \"\"\"Test if set data fails with no arguments.\"\"\"\n with pytest.raises(TypeError):\n redis_data.set_redis_data('key')\n\n\[email protected]('data, result', PARSE_LIST)\ndef test_parse_redis_twiter_trends(data, result):\n \"\"\"Test trend parser to remove apostrophes from trends.\"\"\"\n assert redis_data.redis_parse_twitter_trends(data) == result\n\n\n@patch('redis.from_url')\ndef test_redis_set_trends(from_url):\n \"\"\"Test the redis main function.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_trend_list(TWITTER_TRENDS)\n assert mock_method.call_count == 1\n",
"step-5": "# coding=utf-8\nimport pytest\nfrom twitter_tunes.scripts import redis_data\nfrom mock import patch\n\n\nREDIS_PARSE = [\n (b\"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}\",\n {'trend1': 'url1', 'trend2': 'url2', 'trend3': 'url3'}),\n (b\"{}\", {}),\n (b\"{'hello':'its me'}\", {'hello': 'its me'}),\n (b\"{'trends': ['trend1', 'trend2', 'trend3']}\",\n {'trends': ['trend1', 'trend2', 'trend3']}),\n (b\"{'bob': []}\",\n {'bob': []}),\n (b\"{'hello': [u'its me']}\", {'hello': ['its me']}),\n]\n\n\nGOOD_REDIS_RETURN = b\"{'trend3': 'url3', 'trend2': 'url2', 'trend1': 'url1'}\"\n\n\nTWITTER_TRENDS = [\"D'Angelo Russell\",\n '#ThenItAllWentHorriblyWrong',\n '#SELFIEFORSEB',\n '#ILikeWhatYouHave',\n '#DolanTwinsNewVideo',\n '#ManateePickUpLines',\n 'Wendy Bell',\n 'Brannen Greene',\n 'Jon Lester',\n 'Alison Rapp']\n\n\nPARSE_LIST = [\n ([\"D'Angelo Russell\"], ['D Angelo Russell']),\n ([\"B'O'B\"], ['B O B']),\n ([\"D''Angelo Russell\"], ['D Angelo Russell']),\n ([\"''\"], [' ']),\n ([\"D'Angelo Russ'ell\"], ['D Angelo Russ ell']),\n]\n\n\[email protected]('data, parsed', REDIS_PARSE)\ndef test_parse_redis_data(data, parsed):\n \"\"\"Test to see if data dict in bytes is parsed.\"\"\"\n assert redis_data.parse_redis_data(data) == parsed\n\n\ndef test_parse_redis_data_error():\n \"\"\"Test to see if parse redis raises value error if bad input.\"\"\"\n with pytest.raises(ValueError):\n redis_data.parse_redis_data(b\"this is some data\")\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_good_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = GOOD_REDIS_RETURN\n assert redis_data.get_redis_data('trends') == {'trend1': 'url1',\n 'trend2': 'url2',\n 'trend3': 'url3'}\n\n\n@patch('redis.from_url')\ndef test_get_redis_data_bad_redis_key(from_url):\n \"\"\"Test to see if get redis data returns data dictionary.\"\"\"\n mock_method = from_url().get\n mock_method.return_value = None\n assert redis_data.get_redis_data('bad') == {}\n\n\n@patch('redis.from_url')\ndef test_set_redis_data(from_url):\n \"\"\"Test to see if set redis data is called.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', 'val')\n assert mock_method.call_count == 1\n\n\n@patch('redis.from_url')\ndef test_set_redis_data_empty(from_url):\n \"\"\"Test to see if set redis data is called with empty data.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_data('trends', {})\n assert mock_method.call_count == 1\n\n\ndef test_set_redis_no_val():\n \"\"\"Test if set data fails with no arguments.\"\"\"\n with pytest.raises(TypeError):\n redis_data.set_redis_data('key')\n\n\[email protected]('data, result', PARSE_LIST)\ndef test_parse_redis_twiter_trends(data, result):\n \"\"\"Test trend parser to remove apostrophes from trends.\"\"\"\n assert redis_data.redis_parse_twitter_trends(data) == result\n\n\n@patch('redis.from_url')\ndef test_redis_set_trends(from_url):\n \"\"\"Test the redis main function.\"\"\"\n mock_method = from_url().set\n redis_data.set_redis_trend_list(TWITTER_TRENDS)\n assert mock_method.call_count == 1\n",
"step-ids": [
7,
8,
10,
11,
12
]
}
|
[
7,
8,
10,
11,
12
] |
from unittest import mock
import pytest
from lms.models import GroupInfo
from lms.services.group_info import GroupInfoService
from tests import factories
class TestGroupInfoService:
AUTHORITY = "TEST_AUTHORITY_PROVIDED_ID"
def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session, svc, params):
course = factories.Course(authority_provided_id=self.AUTHORITY)
svc.upsert_group_info(course, params=params)
group_info = self.get_inserted_group_info(db_session)
assert group_info.application_instance == course.application_instance
assert group_info.context_title == params["context_title"]
assert group_info.context_label == params["context_label"]
assert group_info.type == "course_group"
def test_upsert_group_info_updates_an_existing_if_one_already_exists(
self, db_session, svc, params, pre_existing_group
):
db_session.add(pre_existing_group)
new_application_instance = factories.ApplicationInstance()
# Sanity check that we can change the application instance
assert pre_existing_group.application_instance != new_application_instance
svc.upsert_group_info(
factories.Course(
authority_provided_id=self.AUTHORITY,
application_instance=new_application_instance,
),
params=dict(params, context_title="NEW_TITLE"),
)
group_info = self.get_inserted_group_info(db_session)
# This is very strange, but you can "steal" a group info row from
# another application instance
assert group_info.application_instance == new_application_instance
assert group_info.context_label == params["context_label"]
assert group_info.context_title == "NEW_TITLE"
assert group_info.type == "course_group"
def test_upsert_group_info_ignores_non_metadata_params(
self, db_session, svc, params
):
svc.upsert_group_info(
factories.Course(authority_provided_id=self.AUTHORITY),
params=dict(
params,
id="IGNORE ME 1",
authority_provided_id="IGNORE ME 2",
something_unrelated="IGNORED ME 3",
),
)
group_info = self.get_inserted_group_info(db_session)
assert group_info.authority_provided_id == self.AUTHORITY
assert group_info.id != "IGNORE ME 1"
@pytest.mark.usefixtures("user_is_instructor")
def test_upsert_group_info_records_instructors_with_group_info(
self, db_session, svc, pyramid_request
):
svc.upsert_group_info(
factories.Course(authority_provided_id=self.AUTHORITY), params={}
)
group_info = self.get_inserted_group_info(db_session)
assert len(group_info.instructors) == 1
assert (
group_info.instructors[0]["username"]
== pyramid_request.lti_user.h_user.username
)
assert group_info.instructors[0]["email"] == "test_email"
@pytest.mark.usefixtures("user_is_learner")
def test_upsert_group_info_doesnt_record_learners_with_group_info(
self, db_session, svc
):
svc.upsert_group_info(
factories.Course(authority_provided_id=self.AUTHORITY), params={}
)
group_info = self.get_inserted_group_info(db_session)
assert group_info.instructors == []
def get_inserted_group_info(self, db_session):
return (
db_session.query(GroupInfo)
.filter_by(authority_provided_id=self.AUTHORITY)
.one()
)
@pytest.fixture
def svc(self, pyramid_request):
return GroupInfoService(mock.sentinel.context, pyramid_request)
@pytest.fixture
def params(self):
return {
column: f"TEST_{column.upper()}"
for column in GroupInfo.columns()
if column not in ("consumer_key", "_info", "application_instance_id")
}
@pytest.fixture(
params=(True, False), ids=["GroupInfo w/o info", "GroupInfo w/info"]
)
def pre_existing_group(self, application_instance, request, params):
pre_existing_group = GroupInfo(
**dict(
params,
id=None,
authority_provided_id=self.AUTHORITY,
application_instance_id=application_instance.id,
)
)
if request.param:
pre_existing_group.info = None
return pre_existing_group
@pytest.fixture(autouse=True)
def with_existing_group_infos(self):
# Add some "noise" GroupInfo to make the tests more realistic
factories.GroupInfo.build_batch(3)
@pytest.fixture
def pyramid_request(self, pyramid_request):
pyramid_request.lti_user.email = "test_email"
return pyramid_request
|
normal
|
{
"blob_id": "07452795a677836b89eef85b6fb25b33eb464d91",
"index": 1919,
"step-1": "<mask token>\n\n\nclass TestGroupInfoService:\n <mask token>\n\n def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,\n svc, params):\n course = factories.Course(authority_provided_id=self.AUTHORITY)\n svc.upsert_group_info(course, params=params)\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.application_instance == course.application_instance\n assert group_info.context_title == params['context_title']\n assert group_info.context_label == params['context_label']\n assert group_info.type == 'course_group'\n <mask token>\n\n def test_upsert_group_info_ignores_non_metadata_params(self, db_session,\n svc, params):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params=dict(params, id='IGNORE ME 1',\n authority_provided_id='IGNORE ME 2', something_unrelated=\n 'IGNORED ME 3'))\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.authority_provided_id == self.AUTHORITY\n assert group_info.id != 'IGNORE ME 1'\n\n @pytest.mark.usefixtures('user_is_instructor')\n def test_upsert_group_info_records_instructors_with_group_info(self,\n db_session, svc, pyramid_request):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params={})\n group_info = self.get_inserted_group_info(db_session)\n assert len(group_info.instructors) == 1\n assert group_info.instructors[0]['username'\n ] == pyramid_request.lti_user.h_user.username\n assert group_info.instructors[0]['email'] == 'test_email'\n <mask token>\n\n def get_inserted_group_info(self, db_session):\n return db_session.query(GroupInfo).filter_by(authority_provided_id=\n self.AUTHORITY).one()\n\n @pytest.fixture\n def svc(self, pyramid_request):\n return GroupInfoService(mock.sentinel.context, pyramid_request)\n <mask token>\n <mask token>\n\n @pytest.fixture(autouse=True)\n def with_existing_group_infos(self):\n factories.GroupInfo.build_batch(3)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestGroupInfoService:\n <mask token>\n\n def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,\n svc, params):\n course = factories.Course(authority_provided_id=self.AUTHORITY)\n svc.upsert_group_info(course, params=params)\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.application_instance == course.application_instance\n assert group_info.context_title == params['context_title']\n assert group_info.context_label == params['context_label']\n assert group_info.type == 'course_group'\n\n def test_upsert_group_info_updates_an_existing_if_one_already_exists(self,\n db_session, svc, params, pre_existing_group):\n db_session.add(pre_existing_group)\n new_application_instance = factories.ApplicationInstance()\n assert pre_existing_group.application_instance != new_application_instance\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY, application_instance=new_application_instance),\n params=dict(params, context_title='NEW_TITLE'))\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.application_instance == new_application_instance\n assert group_info.context_label == params['context_label']\n assert group_info.context_title == 'NEW_TITLE'\n assert group_info.type == 'course_group'\n\n def test_upsert_group_info_ignores_non_metadata_params(self, db_session,\n svc, params):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params=dict(params, id='IGNORE ME 1',\n authority_provided_id='IGNORE ME 2', something_unrelated=\n 'IGNORED ME 3'))\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.authority_provided_id == self.AUTHORITY\n assert group_info.id != 'IGNORE ME 1'\n\n @pytest.mark.usefixtures('user_is_instructor')\n def test_upsert_group_info_records_instructors_with_group_info(self,\n db_session, svc, pyramid_request):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params={})\n group_info = self.get_inserted_group_info(db_session)\n assert len(group_info.instructors) == 1\n assert group_info.instructors[0]['username'\n ] == pyramid_request.lti_user.h_user.username\n assert group_info.instructors[0]['email'] == 'test_email'\n <mask token>\n\n def get_inserted_group_info(self, db_session):\n return db_session.query(GroupInfo).filter_by(authority_provided_id=\n self.AUTHORITY).one()\n\n @pytest.fixture\n def svc(self, pyramid_request):\n return GroupInfoService(mock.sentinel.context, pyramid_request)\n\n @pytest.fixture\n def params(self):\n return {column: f'TEST_{column.upper()}' for column in GroupInfo.\n columns() if column not in ('consumer_key', '_info',\n 'application_instance_id')}\n\n @pytest.fixture(params=(True, False), ids=['GroupInfo w/o info',\n 'GroupInfo w/info'])\n def pre_existing_group(self, application_instance, request, params):\n pre_existing_group = GroupInfo(**dict(params, id=None,\n authority_provided_id=self.AUTHORITY, application_instance_id=\n application_instance.id))\n if request.param:\n pre_existing_group.info = None\n return pre_existing_group\n\n @pytest.fixture(autouse=True)\n def with_existing_group_infos(self):\n factories.GroupInfo.build_batch(3)\n\n @pytest.fixture\n def pyramid_request(self, pyramid_request):\n pyramid_request.lti_user.email = 'test_email'\n return pyramid_request\n",
"step-3": "<mask token>\n\n\nclass TestGroupInfoService:\n AUTHORITY = 'TEST_AUTHORITY_PROVIDED_ID'\n\n def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,\n svc, params):\n course = factories.Course(authority_provided_id=self.AUTHORITY)\n svc.upsert_group_info(course, params=params)\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.application_instance == course.application_instance\n assert group_info.context_title == params['context_title']\n assert group_info.context_label == params['context_label']\n assert group_info.type == 'course_group'\n\n def test_upsert_group_info_updates_an_existing_if_one_already_exists(self,\n db_session, svc, params, pre_existing_group):\n db_session.add(pre_existing_group)\n new_application_instance = factories.ApplicationInstance()\n assert pre_existing_group.application_instance != new_application_instance\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY, application_instance=new_application_instance),\n params=dict(params, context_title='NEW_TITLE'))\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.application_instance == new_application_instance\n assert group_info.context_label == params['context_label']\n assert group_info.context_title == 'NEW_TITLE'\n assert group_info.type == 'course_group'\n\n def test_upsert_group_info_ignores_non_metadata_params(self, db_session,\n svc, params):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params=dict(params, id='IGNORE ME 1',\n authority_provided_id='IGNORE ME 2', something_unrelated=\n 'IGNORED ME 3'))\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.authority_provided_id == self.AUTHORITY\n assert group_info.id != 'IGNORE ME 1'\n\n @pytest.mark.usefixtures('user_is_instructor')\n def test_upsert_group_info_records_instructors_with_group_info(self,\n db_session, svc, pyramid_request):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params={})\n group_info = self.get_inserted_group_info(db_session)\n assert len(group_info.instructors) == 1\n assert group_info.instructors[0]['username'\n ] == pyramid_request.lti_user.h_user.username\n assert group_info.instructors[0]['email'] == 'test_email'\n\n @pytest.mark.usefixtures('user_is_learner')\n def test_upsert_group_info_doesnt_record_learners_with_group_info(self,\n db_session, svc):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params={})\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.instructors == []\n\n def get_inserted_group_info(self, db_session):\n return db_session.query(GroupInfo).filter_by(authority_provided_id=\n self.AUTHORITY).one()\n\n @pytest.fixture\n def svc(self, pyramid_request):\n return GroupInfoService(mock.sentinel.context, pyramid_request)\n\n @pytest.fixture\n def params(self):\n return {column: f'TEST_{column.upper()}' for column in GroupInfo.\n columns() if column not in ('consumer_key', '_info',\n 'application_instance_id')}\n\n @pytest.fixture(params=(True, False), ids=['GroupInfo w/o info',\n 'GroupInfo w/info'])\n def pre_existing_group(self, application_instance, request, params):\n pre_existing_group = GroupInfo(**dict(params, id=None,\n authority_provided_id=self.AUTHORITY, application_instance_id=\n application_instance.id))\n if request.param:\n pre_existing_group.info = None\n return pre_existing_group\n\n @pytest.fixture(autouse=True)\n def with_existing_group_infos(self):\n factories.GroupInfo.build_batch(3)\n\n @pytest.fixture\n def pyramid_request(self, pyramid_request):\n pyramid_request.lti_user.email = 'test_email'\n return pyramid_request\n",
"step-4": "from unittest import mock\nimport pytest\nfrom lms.models import GroupInfo\nfrom lms.services.group_info import GroupInfoService\nfrom tests import factories\n\n\nclass TestGroupInfoService:\n AUTHORITY = 'TEST_AUTHORITY_PROVIDED_ID'\n\n def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session,\n svc, params):\n course = factories.Course(authority_provided_id=self.AUTHORITY)\n svc.upsert_group_info(course, params=params)\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.application_instance == course.application_instance\n assert group_info.context_title == params['context_title']\n assert group_info.context_label == params['context_label']\n assert group_info.type == 'course_group'\n\n def test_upsert_group_info_updates_an_existing_if_one_already_exists(self,\n db_session, svc, params, pre_existing_group):\n db_session.add(pre_existing_group)\n new_application_instance = factories.ApplicationInstance()\n assert pre_existing_group.application_instance != new_application_instance\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY, application_instance=new_application_instance),\n params=dict(params, context_title='NEW_TITLE'))\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.application_instance == new_application_instance\n assert group_info.context_label == params['context_label']\n assert group_info.context_title == 'NEW_TITLE'\n assert group_info.type == 'course_group'\n\n def test_upsert_group_info_ignores_non_metadata_params(self, db_session,\n svc, params):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params=dict(params, id='IGNORE ME 1',\n authority_provided_id='IGNORE ME 2', something_unrelated=\n 'IGNORED ME 3'))\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.authority_provided_id == self.AUTHORITY\n assert group_info.id != 'IGNORE ME 1'\n\n @pytest.mark.usefixtures('user_is_instructor')\n def test_upsert_group_info_records_instructors_with_group_info(self,\n db_session, svc, pyramid_request):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params={})\n group_info = self.get_inserted_group_info(db_session)\n assert len(group_info.instructors) == 1\n assert group_info.instructors[0]['username'\n ] == pyramid_request.lti_user.h_user.username\n assert group_info.instructors[0]['email'] == 'test_email'\n\n @pytest.mark.usefixtures('user_is_learner')\n def test_upsert_group_info_doesnt_record_learners_with_group_info(self,\n db_session, svc):\n svc.upsert_group_info(factories.Course(authority_provided_id=self.\n AUTHORITY), params={})\n group_info = self.get_inserted_group_info(db_session)\n assert group_info.instructors == []\n\n def get_inserted_group_info(self, db_session):\n return db_session.query(GroupInfo).filter_by(authority_provided_id=\n self.AUTHORITY).one()\n\n @pytest.fixture\n def svc(self, pyramid_request):\n return GroupInfoService(mock.sentinel.context, pyramid_request)\n\n @pytest.fixture\n def params(self):\n return {column: f'TEST_{column.upper()}' for column in GroupInfo.\n columns() if column not in ('consumer_key', '_info',\n 'application_instance_id')}\n\n @pytest.fixture(params=(True, False), ids=['GroupInfo w/o info',\n 'GroupInfo w/info'])\n def pre_existing_group(self, application_instance, request, params):\n pre_existing_group = GroupInfo(**dict(params, id=None,\n authority_provided_id=self.AUTHORITY, application_instance_id=\n application_instance.id))\n if request.param:\n pre_existing_group.info = None\n return pre_existing_group\n\n @pytest.fixture(autouse=True)\n def with_existing_group_infos(self):\n factories.GroupInfo.build_batch(3)\n\n @pytest.fixture\n def pyramid_request(self, pyramid_request):\n pyramid_request.lti_user.email = 'test_email'\n return pyramid_request\n",
"step-5": "from unittest import mock\n\nimport pytest\n\nfrom lms.models import GroupInfo\nfrom lms.services.group_info import GroupInfoService\nfrom tests import factories\n\n\nclass TestGroupInfoService:\n AUTHORITY = \"TEST_AUTHORITY_PROVIDED_ID\"\n\n def test_upsert_group_info_adds_a_new_if_none_exists(self, db_session, svc, params):\n course = factories.Course(authority_provided_id=self.AUTHORITY)\n\n svc.upsert_group_info(course, params=params)\n\n group_info = self.get_inserted_group_info(db_session)\n\n assert group_info.application_instance == course.application_instance\n assert group_info.context_title == params[\"context_title\"]\n assert group_info.context_label == params[\"context_label\"]\n assert group_info.type == \"course_group\"\n\n def test_upsert_group_info_updates_an_existing_if_one_already_exists(\n self, db_session, svc, params, pre_existing_group\n ):\n db_session.add(pre_existing_group)\n new_application_instance = factories.ApplicationInstance()\n # Sanity check that we can change the application instance\n assert pre_existing_group.application_instance != new_application_instance\n\n svc.upsert_group_info(\n factories.Course(\n authority_provided_id=self.AUTHORITY,\n application_instance=new_application_instance,\n ),\n params=dict(params, context_title=\"NEW_TITLE\"),\n )\n\n group_info = self.get_inserted_group_info(db_session)\n\n # This is very strange, but you can \"steal\" a group info row from\n # another application instance\n assert group_info.application_instance == new_application_instance\n assert group_info.context_label == params[\"context_label\"]\n assert group_info.context_title == \"NEW_TITLE\"\n assert group_info.type == \"course_group\"\n\n def test_upsert_group_info_ignores_non_metadata_params(\n self, db_session, svc, params\n ):\n svc.upsert_group_info(\n factories.Course(authority_provided_id=self.AUTHORITY),\n params=dict(\n params,\n id=\"IGNORE ME 1\",\n authority_provided_id=\"IGNORE ME 2\",\n something_unrelated=\"IGNORED ME 3\",\n ),\n )\n\n group_info = self.get_inserted_group_info(db_session)\n\n assert group_info.authority_provided_id == self.AUTHORITY\n assert group_info.id != \"IGNORE ME 1\"\n\n @pytest.mark.usefixtures(\"user_is_instructor\")\n def test_upsert_group_info_records_instructors_with_group_info(\n self, db_session, svc, pyramid_request\n ):\n svc.upsert_group_info(\n factories.Course(authority_provided_id=self.AUTHORITY), params={}\n )\n\n group_info = self.get_inserted_group_info(db_session)\n\n assert len(group_info.instructors) == 1\n assert (\n group_info.instructors[0][\"username\"]\n == pyramid_request.lti_user.h_user.username\n )\n assert group_info.instructors[0][\"email\"] == \"test_email\"\n\n @pytest.mark.usefixtures(\"user_is_learner\")\n def test_upsert_group_info_doesnt_record_learners_with_group_info(\n self, db_session, svc\n ):\n svc.upsert_group_info(\n factories.Course(authority_provided_id=self.AUTHORITY), params={}\n )\n\n group_info = self.get_inserted_group_info(db_session)\n\n assert group_info.instructors == []\n\n def get_inserted_group_info(self, db_session):\n return (\n db_session.query(GroupInfo)\n .filter_by(authority_provided_id=self.AUTHORITY)\n .one()\n )\n\n @pytest.fixture\n def svc(self, pyramid_request):\n return GroupInfoService(mock.sentinel.context, pyramid_request)\n\n @pytest.fixture\n def params(self):\n return {\n column: f\"TEST_{column.upper()}\"\n for column in GroupInfo.columns()\n if column not in (\"consumer_key\", \"_info\", \"application_instance_id\")\n }\n\n @pytest.fixture(\n params=(True, False), ids=[\"GroupInfo w/o info\", \"GroupInfo w/info\"]\n )\n def pre_existing_group(self, application_instance, request, params):\n pre_existing_group = GroupInfo(\n **dict(\n params,\n id=None,\n authority_provided_id=self.AUTHORITY,\n application_instance_id=application_instance.id,\n )\n )\n\n if request.param:\n pre_existing_group.info = None\n\n return pre_existing_group\n\n @pytest.fixture(autouse=True)\n def with_existing_group_infos(self):\n # Add some \"noise\" GroupInfo to make the tests more realistic\n factories.GroupInfo.build_batch(3)\n\n @pytest.fixture\n def pyramid_request(self, pyramid_request):\n pyramid_request.lti_user.email = \"test_email\"\n return pyramid_request\n",
"step-ids": [
7,
11,
13,
14,
15
]
}
|
[
7,
11,
13,
14,
15
] |
import logging
from typing import Sequence
from django.core.exceptions import ValidationError
from django.db import IntegrityError
from django.db.models import F, Q
from django.utils import timezone
from sentry_sdk import capture_exception
from sentry.models import (
Environment,
Project,
Release,
ReleaseEnvironment,
ReleaseProjectEnvironment,
ReleaseStatus,
)
from sentry.release_health import release_monitor
from sentry.release_health.release_monitor.base import Totals
from sentry.tasks.base import instrumented_task
from sentry.utils import metrics
CHUNK_SIZE = 1000
MAX_SECONDS = 60
logger = logging.getLogger("sentry.tasks.releasemonitor")
@instrumented_task(
name="sentry.release_health.tasks.monitor_release_adoption",
queue="releasemonitor",
default_retry_delay=5,
max_retries=5,
) # type: ignore
def monitor_release_adoption(**kwargs) -> None:
metrics.incr("sentry.tasks.monitor_release_adoption.start", sample_rate=1.0)
with metrics.timer(
"sentry.tasks.monitor_release_adoption.process_projects_with_sessions", sample_rate=1.0
):
for org_id, project_ids in release_monitor.fetch_projects_with_recent_sessions().items():
process_projects_with_sessions.delay(org_id, project_ids)
@instrumented_task(
name="sentry.tasks.process_projects_with_sessions",
queue="releasemonitor",
default_retry_delay=5,
max_retries=5,
) # type: ignore
def process_projects_with_sessions(org_id, project_ids) -> None:
# Takes a single org id and a list of project ids
with metrics.timer("sentry.tasks.monitor_release_adoption.process_projects_with_sessions.core"):
# Set the `has_sessions` flag for these projects
Project.objects.filter(
organization_id=org_id,
id__in=project_ids,
flags=F("flags").bitand(~Project.flags.has_sessions),
).update(flags=F("flags").bitor(Project.flags.has_sessions))
totals = release_monitor.fetch_project_release_health_totals(org_id, project_ids)
adopted_ids = adopt_releases(org_id, totals)
cleanup_adopted_releases(project_ids, adopted_ids)
def adopt_releases(org_id: int, totals: Totals) -> Sequence[int]:
# Using the totals calculated in sum_sessions_and_releases, mark any releases as adopted if they reach a threshold.
adopted_ids = []
with metrics.timer(
"sentry.tasks.monitor_release_adoption.process_projects_with_sessions.updates"
):
for project_id, project_totals in totals.items():
for environment, environment_totals in project_totals.items():
total_releases = len(environment_totals["releases"])
for release_version in environment_totals["releases"]:
threshold = 0.1 / total_releases
if (
environment
and environment_totals["total_sessions"] != 0
and environment_totals["releases"][release_version]
/ environment_totals["total_sessions"]
>= threshold
):
rpe = None
try:
rpe = ReleaseProjectEnvironment.objects.get(
project_id=project_id,
release_id=Release.objects.get(
organization=org_id, version=release_version
).id,
environment__name=environment,
environment__organization_id=org_id,
)
updates = {}
if rpe.adopted is None:
updates["adopted"] = timezone.now()
if rpe.unadopted is not None:
updates["unadopted"] = None
if updates:
rpe.update(**updates)
except (Release.DoesNotExist, ReleaseProjectEnvironment.DoesNotExist):
metrics.incr("sentry.tasks.process_projects_with_sessions.creating_rpe")
try:
env = Environment.objects.get_or_create(
name=environment, organization_id=org_id
)[0]
try:
release = Release.objects.get_or_create(
organization_id=org_id,
version=release_version,
defaults={
"status": ReleaseStatus.OPEN,
},
)[0]
except IntegrityError:
release = Release.objects.get(
organization_id=org_id, version=release_version
)
except ValidationError:
release = None
logger.exception(
"sentry.tasks.process_projects_with_sessions.creating_rpe.ValidationError",
extra={
"org_id": org_id,
"release_version": release_version,
},
)
if release:
release.add_project(Project.objects.get(id=project_id))
ReleaseEnvironment.objects.get_or_create(
environment=env, organization_id=org_id, release=release
)
rpe = ReleaseProjectEnvironment.objects.create(
project_id=project_id,
release_id=release.id,
environment=env,
adopted=timezone.now(),
)
except (
Project.DoesNotExist,
Environment.DoesNotExist,
Release.DoesNotExist,
ReleaseEnvironment.DoesNotExist,
) as exc:
metrics.incr(
"sentry.tasks.process_projects_with_sessions.skipped_update"
)
capture_exception(exc)
if rpe:
adopted_ids.append(rpe.id)
return adopted_ids
def cleanup_adopted_releases(project_ids: Sequence[int], adopted_ids: Sequence[int]) -> None:
# Cleanup; adopted releases need to be marked as unadopted if they are not in `adopted_ids`
with metrics.timer(
"sentry.tasks.monitor_release_adoption.process_projects_with_sessions.cleanup"
):
ReleaseProjectEnvironment.objects.filter(
project_id__in=project_ids, unadopted__isnull=True
).exclude(Q(adopted=None) | Q(id__in=adopted_ids)).update(unadopted=timezone.now())
|
normal
|
{
"blob_id": "eb4271aa5abe3ddc05048858205e6ef807a4f8ac",
"index": 6863,
"step-1": "<mask token>\n\n\n@instrumented_task(name=\n 'sentry.release_health.tasks.monitor_release_adoption', queue=\n 'releasemonitor', default_retry_delay=5, max_retries=5)\ndef monitor_release_adoption(**kwargs) ->None:\n metrics.incr('sentry.tasks.monitor_release_adoption.start', sample_rate=1.0\n )\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions',\n sample_rate=1.0):\n for org_id, project_ids in release_monitor.fetch_projects_with_recent_sessions(\n ).items():\n process_projects_with_sessions.delay(org_id, project_ids)\n\n\n@instrumented_task(name='sentry.tasks.process_projects_with_sessions',\n queue='releasemonitor', default_retry_delay=5, max_retries=5)\ndef process_projects_with_sessions(org_id, project_ids) ->None:\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.core'\n ):\n Project.objects.filter(organization_id=org_id, id__in=project_ids,\n flags=F('flags').bitand(~Project.flags.has_sessions)).update(flags\n =F('flags').bitor(Project.flags.has_sessions))\n totals = release_monitor.fetch_project_release_health_totals(org_id,\n project_ids)\n adopted_ids = adopt_releases(org_id, totals)\n cleanup_adopted_releases(project_ids, adopted_ids)\n\n\ndef adopt_releases(org_id: int, totals: Totals) ->Sequence[int]:\n adopted_ids = []\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.updates'\n ):\n for project_id, project_totals in totals.items():\n for environment, environment_totals in project_totals.items():\n total_releases = len(environment_totals['releases'])\n for release_version in environment_totals['releases']:\n threshold = 0.1 / total_releases\n if environment and environment_totals['total_sessions'\n ] != 0 and environment_totals['releases'][\n release_version] / environment_totals['total_sessions'\n ] >= threshold:\n rpe = None\n try:\n rpe = ReleaseProjectEnvironment.objects.get(\n project_id=project_id, release_id=Release.\n objects.get(organization=org_id, version=\n release_version).id, environment__name=\n environment, environment__organization_id=\n org_id)\n updates = {}\n if rpe.adopted is None:\n updates['adopted'] = timezone.now()\n if rpe.unadopted is not None:\n updates['unadopted'] = None\n if updates:\n rpe.update(**updates)\n except (Release.DoesNotExist,\n ReleaseProjectEnvironment.DoesNotExist):\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe'\n )\n try:\n env = Environment.objects.get_or_create(name\n =environment, organization_id=org_id)[0]\n try:\n release = Release.objects.get_or_create(\n organization_id=org_id, version=\n release_version, defaults={'status':\n ReleaseStatus.OPEN})[0]\n except IntegrityError:\n release = Release.objects.get(\n organization_id=org_id, version=\n release_version)\n except ValidationError:\n release = None\n logger.exception(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe.ValidationError'\n , extra={'org_id': org_id,\n 'release_version': release_version})\n if release:\n release.add_project(Project.objects.get\n (id=project_id))\n ReleaseEnvironment.objects.get_or_create(\n environment=env, organization_id=\n org_id, release=release)\n rpe = (ReleaseProjectEnvironment.\n objects.create(project_id=\n project_id, release_id=release.id,\n environment=env, adopted=timezone.\n now()))\n except (Project.DoesNotExist, Environment.\n DoesNotExist, Release.DoesNotExist,\n ReleaseEnvironment.DoesNotExist) as exc:\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.skipped_update'\n )\n capture_exception(exc)\n if rpe:\n adopted_ids.append(rpe.id)\n return adopted_ids\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@instrumented_task(name=\n 'sentry.release_health.tasks.monitor_release_adoption', queue=\n 'releasemonitor', default_retry_delay=5, max_retries=5)\ndef monitor_release_adoption(**kwargs) ->None:\n metrics.incr('sentry.tasks.monitor_release_adoption.start', sample_rate=1.0\n )\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions',\n sample_rate=1.0):\n for org_id, project_ids in release_monitor.fetch_projects_with_recent_sessions(\n ).items():\n process_projects_with_sessions.delay(org_id, project_ids)\n\n\n@instrumented_task(name='sentry.tasks.process_projects_with_sessions',\n queue='releasemonitor', default_retry_delay=5, max_retries=5)\ndef process_projects_with_sessions(org_id, project_ids) ->None:\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.core'\n ):\n Project.objects.filter(organization_id=org_id, id__in=project_ids,\n flags=F('flags').bitand(~Project.flags.has_sessions)).update(flags\n =F('flags').bitor(Project.flags.has_sessions))\n totals = release_monitor.fetch_project_release_health_totals(org_id,\n project_ids)\n adopted_ids = adopt_releases(org_id, totals)\n cleanup_adopted_releases(project_ids, adopted_ids)\n\n\ndef adopt_releases(org_id: int, totals: Totals) ->Sequence[int]:\n adopted_ids = []\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.updates'\n ):\n for project_id, project_totals in totals.items():\n for environment, environment_totals in project_totals.items():\n total_releases = len(environment_totals['releases'])\n for release_version in environment_totals['releases']:\n threshold = 0.1 / total_releases\n if environment and environment_totals['total_sessions'\n ] != 0 and environment_totals['releases'][\n release_version] / environment_totals['total_sessions'\n ] >= threshold:\n rpe = None\n try:\n rpe = ReleaseProjectEnvironment.objects.get(\n project_id=project_id, release_id=Release.\n objects.get(organization=org_id, version=\n release_version).id, environment__name=\n environment, environment__organization_id=\n org_id)\n updates = {}\n if rpe.adopted is None:\n updates['adopted'] = timezone.now()\n if rpe.unadopted is not None:\n updates['unadopted'] = None\n if updates:\n rpe.update(**updates)\n except (Release.DoesNotExist,\n ReleaseProjectEnvironment.DoesNotExist):\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe'\n )\n try:\n env = Environment.objects.get_or_create(name\n =environment, organization_id=org_id)[0]\n try:\n release = Release.objects.get_or_create(\n organization_id=org_id, version=\n release_version, defaults={'status':\n ReleaseStatus.OPEN})[0]\n except IntegrityError:\n release = Release.objects.get(\n organization_id=org_id, version=\n release_version)\n except ValidationError:\n release = None\n logger.exception(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe.ValidationError'\n , extra={'org_id': org_id,\n 'release_version': release_version})\n if release:\n release.add_project(Project.objects.get\n (id=project_id))\n ReleaseEnvironment.objects.get_or_create(\n environment=env, organization_id=\n org_id, release=release)\n rpe = (ReleaseProjectEnvironment.\n objects.create(project_id=\n project_id, release_id=release.id,\n environment=env, adopted=timezone.\n now()))\n except (Project.DoesNotExist, Environment.\n DoesNotExist, Release.DoesNotExist,\n ReleaseEnvironment.DoesNotExist) as exc:\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.skipped_update'\n )\n capture_exception(exc)\n if rpe:\n adopted_ids.append(rpe.id)\n return adopted_ids\n\n\ndef cleanup_adopted_releases(project_ids: Sequence[int], adopted_ids:\n Sequence[int]) ->None:\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.cleanup'\n ):\n ReleaseProjectEnvironment.objects.filter(project_id__in=project_ids,\n unadopted__isnull=True).exclude(Q(adopted=None) | Q(id__in=\n adopted_ids)).update(unadopted=timezone.now())\n",
"step-3": "<mask token>\nCHUNK_SIZE = 1000\nMAX_SECONDS = 60\nlogger = logging.getLogger('sentry.tasks.releasemonitor')\n\n\n@instrumented_task(name=\n 'sentry.release_health.tasks.monitor_release_adoption', queue=\n 'releasemonitor', default_retry_delay=5, max_retries=5)\ndef monitor_release_adoption(**kwargs) ->None:\n metrics.incr('sentry.tasks.monitor_release_adoption.start', sample_rate=1.0\n )\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions',\n sample_rate=1.0):\n for org_id, project_ids in release_monitor.fetch_projects_with_recent_sessions(\n ).items():\n process_projects_with_sessions.delay(org_id, project_ids)\n\n\n@instrumented_task(name='sentry.tasks.process_projects_with_sessions',\n queue='releasemonitor', default_retry_delay=5, max_retries=5)\ndef process_projects_with_sessions(org_id, project_ids) ->None:\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.core'\n ):\n Project.objects.filter(organization_id=org_id, id__in=project_ids,\n flags=F('flags').bitand(~Project.flags.has_sessions)).update(flags\n =F('flags').bitor(Project.flags.has_sessions))\n totals = release_monitor.fetch_project_release_health_totals(org_id,\n project_ids)\n adopted_ids = adopt_releases(org_id, totals)\n cleanup_adopted_releases(project_ids, adopted_ids)\n\n\ndef adopt_releases(org_id: int, totals: Totals) ->Sequence[int]:\n adopted_ids = []\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.updates'\n ):\n for project_id, project_totals in totals.items():\n for environment, environment_totals in project_totals.items():\n total_releases = len(environment_totals['releases'])\n for release_version in environment_totals['releases']:\n threshold = 0.1 / total_releases\n if environment and environment_totals['total_sessions'\n ] != 0 and environment_totals['releases'][\n release_version] / environment_totals['total_sessions'\n ] >= threshold:\n rpe = None\n try:\n rpe = ReleaseProjectEnvironment.objects.get(\n project_id=project_id, release_id=Release.\n objects.get(organization=org_id, version=\n release_version).id, environment__name=\n environment, environment__organization_id=\n org_id)\n updates = {}\n if rpe.adopted is None:\n updates['adopted'] = timezone.now()\n if rpe.unadopted is not None:\n updates['unadopted'] = None\n if updates:\n rpe.update(**updates)\n except (Release.DoesNotExist,\n ReleaseProjectEnvironment.DoesNotExist):\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe'\n )\n try:\n env = Environment.objects.get_or_create(name\n =environment, organization_id=org_id)[0]\n try:\n release = Release.objects.get_or_create(\n organization_id=org_id, version=\n release_version, defaults={'status':\n ReleaseStatus.OPEN})[0]\n except IntegrityError:\n release = Release.objects.get(\n organization_id=org_id, version=\n release_version)\n except ValidationError:\n release = None\n logger.exception(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe.ValidationError'\n , extra={'org_id': org_id,\n 'release_version': release_version})\n if release:\n release.add_project(Project.objects.get\n (id=project_id))\n ReleaseEnvironment.objects.get_or_create(\n environment=env, organization_id=\n org_id, release=release)\n rpe = (ReleaseProjectEnvironment.\n objects.create(project_id=\n project_id, release_id=release.id,\n environment=env, adopted=timezone.\n now()))\n except (Project.DoesNotExist, Environment.\n DoesNotExist, Release.DoesNotExist,\n ReleaseEnvironment.DoesNotExist) as exc:\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.skipped_update'\n )\n capture_exception(exc)\n if rpe:\n adopted_ids.append(rpe.id)\n return adopted_ids\n\n\ndef cleanup_adopted_releases(project_ids: Sequence[int], adopted_ids:\n Sequence[int]) ->None:\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.cleanup'\n ):\n ReleaseProjectEnvironment.objects.filter(project_id__in=project_ids,\n unadopted__isnull=True).exclude(Q(adopted=None) | Q(id__in=\n adopted_ids)).update(unadopted=timezone.now())\n",
"step-4": "import logging\nfrom typing import Sequence\nfrom django.core.exceptions import ValidationError\nfrom django.db import IntegrityError\nfrom django.db.models import F, Q\nfrom django.utils import timezone\nfrom sentry_sdk import capture_exception\nfrom sentry.models import Environment, Project, Release, ReleaseEnvironment, ReleaseProjectEnvironment, ReleaseStatus\nfrom sentry.release_health import release_monitor\nfrom sentry.release_health.release_monitor.base import Totals\nfrom sentry.tasks.base import instrumented_task\nfrom sentry.utils import metrics\nCHUNK_SIZE = 1000\nMAX_SECONDS = 60\nlogger = logging.getLogger('sentry.tasks.releasemonitor')\n\n\n@instrumented_task(name=\n 'sentry.release_health.tasks.monitor_release_adoption', queue=\n 'releasemonitor', default_retry_delay=5, max_retries=5)\ndef monitor_release_adoption(**kwargs) ->None:\n metrics.incr('sentry.tasks.monitor_release_adoption.start', sample_rate=1.0\n )\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions',\n sample_rate=1.0):\n for org_id, project_ids in release_monitor.fetch_projects_with_recent_sessions(\n ).items():\n process_projects_with_sessions.delay(org_id, project_ids)\n\n\n@instrumented_task(name='sentry.tasks.process_projects_with_sessions',\n queue='releasemonitor', default_retry_delay=5, max_retries=5)\ndef process_projects_with_sessions(org_id, project_ids) ->None:\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.core'\n ):\n Project.objects.filter(organization_id=org_id, id__in=project_ids,\n flags=F('flags').bitand(~Project.flags.has_sessions)).update(flags\n =F('flags').bitor(Project.flags.has_sessions))\n totals = release_monitor.fetch_project_release_health_totals(org_id,\n project_ids)\n adopted_ids = adopt_releases(org_id, totals)\n cleanup_adopted_releases(project_ids, adopted_ids)\n\n\ndef adopt_releases(org_id: int, totals: Totals) ->Sequence[int]:\n adopted_ids = []\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.updates'\n ):\n for project_id, project_totals in totals.items():\n for environment, environment_totals in project_totals.items():\n total_releases = len(environment_totals['releases'])\n for release_version in environment_totals['releases']:\n threshold = 0.1 / total_releases\n if environment and environment_totals['total_sessions'\n ] != 0 and environment_totals['releases'][\n release_version] / environment_totals['total_sessions'\n ] >= threshold:\n rpe = None\n try:\n rpe = ReleaseProjectEnvironment.objects.get(\n project_id=project_id, release_id=Release.\n objects.get(organization=org_id, version=\n release_version).id, environment__name=\n environment, environment__organization_id=\n org_id)\n updates = {}\n if rpe.adopted is None:\n updates['adopted'] = timezone.now()\n if rpe.unadopted is not None:\n updates['unadopted'] = None\n if updates:\n rpe.update(**updates)\n except (Release.DoesNotExist,\n ReleaseProjectEnvironment.DoesNotExist):\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe'\n )\n try:\n env = Environment.objects.get_or_create(name\n =environment, organization_id=org_id)[0]\n try:\n release = Release.objects.get_or_create(\n organization_id=org_id, version=\n release_version, defaults={'status':\n ReleaseStatus.OPEN})[0]\n except IntegrityError:\n release = Release.objects.get(\n organization_id=org_id, version=\n release_version)\n except ValidationError:\n release = None\n logger.exception(\n 'sentry.tasks.process_projects_with_sessions.creating_rpe.ValidationError'\n , extra={'org_id': org_id,\n 'release_version': release_version})\n if release:\n release.add_project(Project.objects.get\n (id=project_id))\n ReleaseEnvironment.objects.get_or_create(\n environment=env, organization_id=\n org_id, release=release)\n rpe = (ReleaseProjectEnvironment.\n objects.create(project_id=\n project_id, release_id=release.id,\n environment=env, adopted=timezone.\n now()))\n except (Project.DoesNotExist, Environment.\n DoesNotExist, Release.DoesNotExist,\n ReleaseEnvironment.DoesNotExist) as exc:\n metrics.incr(\n 'sentry.tasks.process_projects_with_sessions.skipped_update'\n )\n capture_exception(exc)\n if rpe:\n adopted_ids.append(rpe.id)\n return adopted_ids\n\n\ndef cleanup_adopted_releases(project_ids: Sequence[int], adopted_ids:\n Sequence[int]) ->None:\n with metrics.timer(\n 'sentry.tasks.monitor_release_adoption.process_projects_with_sessions.cleanup'\n ):\n ReleaseProjectEnvironment.objects.filter(project_id__in=project_ids,\n unadopted__isnull=True).exclude(Q(adopted=None) | Q(id__in=\n adopted_ids)).update(unadopted=timezone.now())\n",
"step-5": "import logging\nfrom typing import Sequence\n\nfrom django.core.exceptions import ValidationError\nfrom django.db import IntegrityError\nfrom django.db.models import F, Q\nfrom django.utils import timezone\nfrom sentry_sdk import capture_exception\n\nfrom sentry.models import (\n Environment,\n Project,\n Release,\n ReleaseEnvironment,\n ReleaseProjectEnvironment,\n ReleaseStatus,\n)\nfrom sentry.release_health import release_monitor\nfrom sentry.release_health.release_monitor.base import Totals\nfrom sentry.tasks.base import instrumented_task\nfrom sentry.utils import metrics\n\nCHUNK_SIZE = 1000\nMAX_SECONDS = 60\n\nlogger = logging.getLogger(\"sentry.tasks.releasemonitor\")\n\n\n@instrumented_task(\n name=\"sentry.release_health.tasks.monitor_release_adoption\",\n queue=\"releasemonitor\",\n default_retry_delay=5,\n max_retries=5,\n) # type: ignore\ndef monitor_release_adoption(**kwargs) -> None:\n metrics.incr(\"sentry.tasks.monitor_release_adoption.start\", sample_rate=1.0)\n with metrics.timer(\n \"sentry.tasks.monitor_release_adoption.process_projects_with_sessions\", sample_rate=1.0\n ):\n for org_id, project_ids in release_monitor.fetch_projects_with_recent_sessions().items():\n process_projects_with_sessions.delay(org_id, project_ids)\n\n\n@instrumented_task(\n name=\"sentry.tasks.process_projects_with_sessions\",\n queue=\"releasemonitor\",\n default_retry_delay=5,\n max_retries=5,\n) # type: ignore\ndef process_projects_with_sessions(org_id, project_ids) -> None:\n # Takes a single org id and a list of project ids\n\n with metrics.timer(\"sentry.tasks.monitor_release_adoption.process_projects_with_sessions.core\"):\n # Set the `has_sessions` flag for these projects\n Project.objects.filter(\n organization_id=org_id,\n id__in=project_ids,\n flags=F(\"flags\").bitand(~Project.flags.has_sessions),\n ).update(flags=F(\"flags\").bitor(Project.flags.has_sessions))\n\n totals = release_monitor.fetch_project_release_health_totals(org_id, project_ids)\n\n adopted_ids = adopt_releases(org_id, totals)\n\n cleanup_adopted_releases(project_ids, adopted_ids)\n\n\ndef adopt_releases(org_id: int, totals: Totals) -> Sequence[int]:\n # Using the totals calculated in sum_sessions_and_releases, mark any releases as adopted if they reach a threshold.\n adopted_ids = []\n with metrics.timer(\n \"sentry.tasks.monitor_release_adoption.process_projects_with_sessions.updates\"\n ):\n for project_id, project_totals in totals.items():\n for environment, environment_totals in project_totals.items():\n total_releases = len(environment_totals[\"releases\"])\n for release_version in environment_totals[\"releases\"]:\n threshold = 0.1 / total_releases\n if (\n environment\n and environment_totals[\"total_sessions\"] != 0\n and environment_totals[\"releases\"][release_version]\n / environment_totals[\"total_sessions\"]\n >= threshold\n ):\n rpe = None\n try:\n rpe = ReleaseProjectEnvironment.objects.get(\n project_id=project_id,\n release_id=Release.objects.get(\n organization=org_id, version=release_version\n ).id,\n environment__name=environment,\n environment__organization_id=org_id,\n )\n\n updates = {}\n if rpe.adopted is None:\n updates[\"adopted\"] = timezone.now()\n\n if rpe.unadopted is not None:\n updates[\"unadopted\"] = None\n\n if updates:\n rpe.update(**updates)\n\n except (Release.DoesNotExist, ReleaseProjectEnvironment.DoesNotExist):\n metrics.incr(\"sentry.tasks.process_projects_with_sessions.creating_rpe\")\n try:\n env = Environment.objects.get_or_create(\n name=environment, organization_id=org_id\n )[0]\n try:\n release = Release.objects.get_or_create(\n organization_id=org_id,\n version=release_version,\n defaults={\n \"status\": ReleaseStatus.OPEN,\n },\n )[0]\n except IntegrityError:\n release = Release.objects.get(\n organization_id=org_id, version=release_version\n )\n except ValidationError:\n release = None\n logger.exception(\n \"sentry.tasks.process_projects_with_sessions.creating_rpe.ValidationError\",\n extra={\n \"org_id\": org_id,\n \"release_version\": release_version,\n },\n )\n\n if release:\n release.add_project(Project.objects.get(id=project_id))\n\n ReleaseEnvironment.objects.get_or_create(\n environment=env, organization_id=org_id, release=release\n )\n\n rpe = ReleaseProjectEnvironment.objects.create(\n project_id=project_id,\n release_id=release.id,\n environment=env,\n adopted=timezone.now(),\n )\n except (\n Project.DoesNotExist,\n Environment.DoesNotExist,\n Release.DoesNotExist,\n ReleaseEnvironment.DoesNotExist,\n ) as exc:\n metrics.incr(\n \"sentry.tasks.process_projects_with_sessions.skipped_update\"\n )\n capture_exception(exc)\n if rpe:\n adopted_ids.append(rpe.id)\n\n return adopted_ids\n\n\ndef cleanup_adopted_releases(project_ids: Sequence[int], adopted_ids: Sequence[int]) -> None:\n # Cleanup; adopted releases need to be marked as unadopted if they are not in `adopted_ids`\n with metrics.timer(\n \"sentry.tasks.monitor_release_adoption.process_projects_with_sessions.cleanup\"\n ):\n ReleaseProjectEnvironment.objects.filter(\n project_id__in=project_ids, unadopted__isnull=True\n ).exclude(Q(adopted=None) | Q(id__in=adopted_ids)).update(unadopted=timezone.now())\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
"""
This is the interface that allows for creating nested lists.
You should not implement it, or speculate about its implementation
class NestedInteger(object):
def isInteger(self):
# @return {boolean} True if this NestedInteger holds a single integer,
# rather than a nested list.
def getInteger(self):
# @return {int} the single integer that this NestedInteger holds,
# if it holds a single integer
# Return None if this NestedInteger holds a nested list
def getList(self):
# @return {NestedInteger[]} the nested list that this NestedInteger holds,
# if it holds a nested list
# Return None if this NestedInteger holds a single integer
"""
# Version 1: DFS Recursive
class Solution(object):
# @param {NestedInteger[]} nestedList a list of NestedInteger Object
# @return {int} an integer
def depthSum(self, nestedList):
return self.dfs(nestedList, 1)
def dfs(self, nestedList, depth):
sum = 0
for item in nestedList:
if item.isInteger():
sum += item.getInteger() * depth
else:
sum += self.dfs(item.getList(), depth + 1)
return sum
# Version 2: BFS, Non-Recursive
class Solution(object):
# @param {NestedInteger[]} nestedList a list of NestedInteger Object
# @return {int} an integer
def depthSum(self, nestedList):
if len(nestedList) == 0:
return 0
from queue import Queue
q = Queue()
sum = 0
depth = 1
for item in nestedList:
q.put(item)
while not q.empty():
for _ in range(q.qsize()):
item = q.get()
if item.isInteger():
sum += item.getInteger() * depth
else:
for next in item.getList():
q.put(next)
depth += 1
return sum
|
normal
|
{
"blob_id": "bb81027ed5311e625591d98193997e5c7b533b70",
"index": 4945,
"step-1": "<mask token>\n\n\nclass Solution(object):\n\n def depthSum(self, nestedList):\n if len(nestedList) == 0:\n return 0\n from queue import Queue\n q = Queue()\n sum = 0\n depth = 1\n for item in nestedList:\n q.put(item)\n while not q.empty():\n for _ in range(q.qsize()):\n item = q.get()\n if item.isInteger():\n sum += item.getInteger() * depth\n else:\n for next in item.getList():\n q.put(next)\n depth += 1\n return sum\n",
"step-2": "<mask token>\n\n\nclass Solution(object):\n <mask token>\n <mask token>\n\n\nclass Solution(object):\n\n def depthSum(self, nestedList):\n if len(nestedList) == 0:\n return 0\n from queue import Queue\n q = Queue()\n sum = 0\n depth = 1\n for item in nestedList:\n q.put(item)\n while not q.empty():\n for _ in range(q.qsize()):\n item = q.get()\n if item.isInteger():\n sum += item.getInteger() * depth\n else:\n for next in item.getList():\n q.put(next)\n depth += 1\n return sum\n",
"step-3": "<mask token>\n\n\nclass Solution(object):\n\n def depthSum(self, nestedList):\n return self.dfs(nestedList, 1)\n <mask token>\n\n\nclass Solution(object):\n\n def depthSum(self, nestedList):\n if len(nestedList) == 0:\n return 0\n from queue import Queue\n q = Queue()\n sum = 0\n depth = 1\n for item in nestedList:\n q.put(item)\n while not q.empty():\n for _ in range(q.qsize()):\n item = q.get()\n if item.isInteger():\n sum += item.getInteger() * depth\n else:\n for next in item.getList():\n q.put(next)\n depth += 1\n return sum\n",
"step-4": "<mask token>\n\n\nclass Solution(object):\n\n def depthSum(self, nestedList):\n return self.dfs(nestedList, 1)\n\n def dfs(self, nestedList, depth):\n sum = 0\n for item in nestedList:\n if item.isInteger():\n sum += item.getInteger() * depth\n else:\n sum += self.dfs(item.getList(), depth + 1)\n return sum\n\n\nclass Solution(object):\n\n def depthSum(self, nestedList):\n if len(nestedList) == 0:\n return 0\n from queue import Queue\n q = Queue()\n sum = 0\n depth = 1\n for item in nestedList:\n q.put(item)\n while not q.empty():\n for _ in range(q.qsize()):\n item = q.get()\n if item.isInteger():\n sum += item.getInteger() * depth\n else:\n for next in item.getList():\n q.put(next)\n depth += 1\n return sum\n",
"step-5": "\"\"\"\nThis is the interface that allows for creating nested lists.\nYou should not implement it, or speculate about its implementation\n\nclass NestedInteger(object):\n def isInteger(self):\n # @return {boolean} True if this NestedInteger holds a single integer,\n # rather than a nested list.\n\n def getInteger(self):\n # @return {int} the single integer that this NestedInteger holds,\n # if it holds a single integer\n # Return None if this NestedInteger holds a nested list\n\n def getList(self):\n # @return {NestedInteger[]} the nested list that this NestedInteger holds,\n # if it holds a nested list\n # Return None if this NestedInteger holds a single integer\n\"\"\"\n\n\n# Version 1: DFS Recursive\nclass Solution(object):\n # @param {NestedInteger[]} nestedList a list of NestedInteger Object\n # @return {int} an integer\n def depthSum(self, nestedList):\n return self.dfs(nestedList, 1)\n\n def dfs(self, nestedList, depth):\n sum = 0\n for item in nestedList:\n if item.isInteger():\n sum += item.getInteger() * depth\n else:\n sum += self.dfs(item.getList(), depth + 1)\n\n return sum\n\n\n\n\n# Version 2: BFS, Non-Recursive\nclass Solution(object):\n # @param {NestedInteger[]} nestedList a list of NestedInteger Object\n # @return {int} an integer\n def depthSum(self, nestedList):\n if len(nestedList) == 0:\n return 0\n\n from queue import Queue\n q = Queue()\n sum = 0\n depth = 1\n\n for item in nestedList:\n q.put(item)\n\n while not q.empty():\n for _ in range(q.qsize()):\n item = q.get()\n if item.isInteger():\n sum += item.getInteger() * depth\n else:\n for next in item.getList():\n q.put(next)\n depth += 1\n\n return sum",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import os
from flask import Flask
from flask import request
result=""
app = Flask(__name__)
@app.route('/postjson', methods = ['POST'])
def postJsonHandler():
global result
#print (request.is_json)
content = request.get_json()
#print (content)
#print ("true")
#print (content["encode"])
#print (content["aaa"])
result=(content["aaa"])
os.chdir("/home/ec2-user/sdpd")
with open("image.jpg", "wb") as fh:
fh.write(content["encode"].decode('base64'))
return 'JSON posted'
@app.route('/getjson')
def getJsonHandler():
global result
print result
if (result == "tomato"):
os.chdir("/home/ec2-user/sdpd/tomato")
os.system("python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt")
elif (result == "potato"):
os.chdir("/home/ec2-user/sdpd/tensor")
os.system("python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt")
elif (result == "corn"):
os.chdir("/home/ec2-user/sdpd/corn")
os.system("python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt")
elif (result == "grape"):
os.chdir("/home/ec2-user/sdpd/grape")
os.system("python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt")
file = open("a.txt", "r")
aa=""
for i in file.readline():
if (i.isdigit()):
break
aa= aa+i
baa = aa.replace(" ","")
os.chdir("/home/ec2-user/sdpd")
file1 = open(baa + ".txt","r")
aa = aa + " \n \n \n \n" + file1.read()
return aa
#return 'string posted'
app.run(host='ec2-13-127-4-47.ap-south-1.compute.amazonaws.com', port= 8090)
|
normal
|
{
"blob_id": "607fc97c4520c7f54ee44e768776ceae2b70c378",
"index": 190,
"step-1": "import os\nfrom flask import Flask\nfrom flask import request\nresult=\"\" \napp = Flask(__name__)\n \[email protected]('/postjson', methods = ['POST'])\ndef postJsonHandler():\n global result\n #print (request.is_json)\n content = request.get_json()\n #print (content)\n #print (\"true\")\n #print (content[\"encode\"])\n #print (content[\"aaa\"])\n result=(content[\"aaa\"])\n os.chdir(\"/home/ec2-user/sdpd\")\n with open(\"image.jpg\", \"wb\") as fh:\n \tfh.write(content[\"encode\"].decode('base64'))\n \t\n return 'JSON posted'\n\[email protected]('/getjson')\ndef getJsonHandler():\n global result\n print result\n if (result == \"tomato\"):\n \tos.chdir(\"/home/ec2-user/sdpd/tomato\")\n \tos.system(\"python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt\")\n elif (result == \"potato\"):\n \tos.chdir(\"/home/ec2-user/sdpd/tensor\")\n \tos.system(\"python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt\")\n elif (result == \"corn\"):\n os.chdir(\"/home/ec2-user/sdpd/corn\")\n os.system(\"python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt\")\n elif (result == \"grape\"):\n os.chdir(\"/home/ec2-user/sdpd/grape\")\n os.system(\"python -m scripts.label_image --graph=tf_files/retrained_graph.pb --image=/home/ec2-user/sdpd/image.jpg > a.txt\")\n\n file = open(\"a.txt\", \"r\") \n aa=\"\"\n for i in file.readline():\n if (i.isdigit()):\n break\n aa= aa+i\n baa = aa.replace(\" \",\"\")\n os.chdir(\"/home/ec2-user/sdpd\")\n file1 = open(baa + \".txt\",\"r\")\n aa = aa + \" \\n \\n \\n \\n\" + file1.read()\n return aa \n #return 'string posted' \n \n \napp.run(host='ec2-13-127-4-47.ap-south-1.compute.amazonaws.com', port= 8090)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import pytest
from moa.primitives import NDArray, UnaryOperation, BinaryOperation, Function
from moa.yaccer import build_parser
@pytest.mark.parametrize("expression,result", [
("< 1 2 3>", NDArray(shape=(3,), data=[1, 2, 3], constant=False)),
])
def test_parse_vector(expression, result):
parser = build_parser(start='vector')
assert parser.parse(expression) == result
@pytest.mark.parametrize("expression, result", [
("const array A^3 <4 3 5>", NDArray(
shape=(4, 3, 5), data=None, constant=True, identifier='A')),
])
def test_parse_constant_arrays(expression, result):
parser = build_parser(start='constant_array')
assert parser.parse(expression) == result
@pytest.mark.parametrize("expression, result", [
("array Zasdf_asdf^1 <3>", NDArray(
shape=(3,), data=None, constant=False, identifier='Zasdf_asdf')),
])
def test_parse_arrays(expression, result):
parser = build_parser(start='array')
assert parser.parse(expression) == result
@pytest.mark.parametrize("expression, result", [
("j psi x", BinaryOperation(
operator='PSI',
left=NDArray(shape=None, data=None, constant=False, identifier='j'),
right=NDArray(shape=None, data=None, constant=False, identifier='x'))),
("A omega <1 2>", BinaryOperation(
operator='OMEGA',
left=NDArray(shape=None, data=None, constant=False, identifier='A'),
right=NDArray(shape=(2,), data=[1, 2], constant=False, identifier=None))),
("A omega B cat C", BinaryOperation(
operator='CAT',
left=BinaryOperation(
operator='OMEGA',
left=NDArray(shape=None, data=None, constant=False, identifier='A'),
right=NDArray(shape=None, data=None, constant=False, identifier='B')),
right=NDArray(shape=None, data=None, constant=False, identifier='C'))),
("(A omega B) cat C", BinaryOperation(
operator='CAT',
left=BinaryOperation(
operator='OMEGA',
left=NDArray(shape=None, data=None, constant=False, identifier='A'),
right=NDArray(shape=None, data=None, constant=False, identifier='B')),
right=NDArray(shape=None, data=None, constant=False, identifier='C'))),
("dim A cat B", BinaryOperation(
operator='CAT',
left=UnaryOperation(
operator='DIM',
right=NDArray(shape=None, data=None, constant=False, identifier='A')),
right=NDArray(shape=None, data=None, constant=False, identifier='B'))),
("dim (A cat B)", UnaryOperation(
operator='DIM',
right=BinaryOperation(
operator='CAT',
left=NDArray(shape=None, data=None, constant=False, identifier='A'),
right=NDArray(shape=None, data=None, constant=False, identifier='B')))),
])
def test_parse_terms_and_operators(expression, result):
parser = build_parser(start='term')
assert parser.parse(expression) == result
@pytest.mark.parametrize("expression, result", [
('main(){}', Function(arguments=[], statements=[], identifier='main')),
('foo_bar(array A^1 <5>){}', Function(
arguments=[NDArray(shape=(5,), data=None, constant=False, identifier='A')],
statements=[],
identifier='foo_bar')),
('BizBAZZ(array A^2 < 3 5>, array B^3 <6 5 8>){}', Function(
arguments=[
NDArray(shape=(3, 5), data=None, constant=False, identifier='A'),
NDArray(shape=(6, 5, 8), data=None, constant=False, identifier='B')],
statements=[],
identifier='BizBAZZ')),
('A_2_3_a(array A^2 <9 1>, array B^2 <3 1>, array ASDF^1 <9>){}', Function(
arguments=[
NDArray(shape=(9, 1), data=None, constant=False, identifier='A'),
NDArray(shape=(3, 1), data=None, constant=False, identifier='B'),
NDArray(shape=(9,), data=None, constant=False, identifier='ASDF')],
statements=[],
identifier='A_2_3_a')),
])
def test_parse_function(expression, result):
parser = build_parser(start='function')
assert parser.parse(expression) == result
|
normal
|
{
"blob_id": "a8b5cf45e5f75ae4b493f5fc9bb4555319f1a725",
"index": 5294,
"step-1": "<mask token>\n\n\[email protected]('expression,result', [('< 1 2 3>', NDArray(shape=(\n 3,), data=[1, 2, 3], constant=False))])\ndef test_parse_vector(expression, result):\n parser = build_parser(start='vector')\n assert parser.parse(expression) == result\n\n\n<mask token>\n\n\[email protected]('expression, result', [('j psi x', BinaryOperation\n (operator='PSI', left=NDArray(shape=None, data=None, constant=False,\n identifier='j'), right=NDArray(shape=None, data=None, constant=False,\n identifier='x'))), ('A omega <1 2>', BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=(2,), data=[1, 2], constant=False, identifier=None)\n )), ('A omega B cat C', BinaryOperation(operator='CAT', left=\n BinaryOperation(operator='OMEGA', left=NDArray(shape=None, data=None,\n constant=False, identifier='A'), right=NDArray(shape=None, data=None,\n constant=False, identifier='B')), right=NDArray(shape=None, data=None,\n constant=False, identifier='C'))), ('(A omega B) cat C',\n BinaryOperation(operator='CAT', left=BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=None, data=None, constant=False, identifier='B')),\n right=NDArray(shape=None, data=None, constant=False, identifier='C'))),\n ('dim A cat B', BinaryOperation(operator='CAT', left=UnaryOperation(\n operator='DIM', right=NDArray(shape=None, data=None, constant=False,\n identifier='A')), right=NDArray(shape=None, data=None, constant=False,\n identifier='B'))), ('dim (A cat B)', UnaryOperation(operator='DIM',\n right=BinaryOperation(operator='CAT', left=NDArray(shape=None, data=\n None, constant=False, identifier='A'), right=NDArray(shape=None, data=\n None, constant=False, identifier='B'))))])\ndef test_parse_terms_and_operators(expression, result):\n parser = build_parser(start='term')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('main(){}', Function(\n arguments=[], statements=[], identifier='main')), (\n 'foo_bar(array A^1 <5>){}', Function(arguments=[NDArray(shape=(5,),\n data=None, constant=False, identifier='A')], statements=[], identifier=\n 'foo_bar')), ('BizBAZZ(array A^2 < 3 5>, array B^3 <6 5 8>){}',\n Function(arguments=[NDArray(shape=(3, 5), data=None, constant=False,\n identifier='A'), NDArray(shape=(6, 5, 8), data=None, constant=False,\n identifier='B')], statements=[], identifier='BizBAZZ')), (\n 'A_2_3_a(array A^2 <9 1>, array B^2 <3 1>, array ASDF^1 <9>){}',\n Function(arguments=[NDArray(shape=(9, 1), data=None, constant=False,\n identifier='A'), NDArray(shape=(3, 1), data=None, constant=False,\n identifier='B'), NDArray(shape=(9,), data=None, constant=False,\n identifier='ASDF')], statements=[], identifier='A_2_3_a'))])\ndef test_parse_function(expression, result):\n parser = build_parser(start='function')\n assert parser.parse(expression) == result\n",
"step-2": "<mask token>\n\n\[email protected]('expression,result', [('< 1 2 3>', NDArray(shape=(\n 3,), data=[1, 2, 3], constant=False))])\ndef test_parse_vector(expression, result):\n parser = build_parser(start='vector')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('const array A^3 <4 3 5>',\n NDArray(shape=(4, 3, 5), data=None, constant=True, identifier='A'))])\ndef test_parse_constant_arrays(expression, result):\n parser = build_parser(start='constant_array')\n assert parser.parse(expression) == result\n\n\n<mask token>\n\n\[email protected]('expression, result', [('j psi x', BinaryOperation\n (operator='PSI', left=NDArray(shape=None, data=None, constant=False,\n identifier='j'), right=NDArray(shape=None, data=None, constant=False,\n identifier='x'))), ('A omega <1 2>', BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=(2,), data=[1, 2], constant=False, identifier=None)\n )), ('A omega B cat C', BinaryOperation(operator='CAT', left=\n BinaryOperation(operator='OMEGA', left=NDArray(shape=None, data=None,\n constant=False, identifier='A'), right=NDArray(shape=None, data=None,\n constant=False, identifier='B')), right=NDArray(shape=None, data=None,\n constant=False, identifier='C'))), ('(A omega B) cat C',\n BinaryOperation(operator='CAT', left=BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=None, data=None, constant=False, identifier='B')),\n right=NDArray(shape=None, data=None, constant=False, identifier='C'))),\n ('dim A cat B', BinaryOperation(operator='CAT', left=UnaryOperation(\n operator='DIM', right=NDArray(shape=None, data=None, constant=False,\n identifier='A')), right=NDArray(shape=None, data=None, constant=False,\n identifier='B'))), ('dim (A cat B)', UnaryOperation(operator='DIM',\n right=BinaryOperation(operator='CAT', left=NDArray(shape=None, data=\n None, constant=False, identifier='A'), right=NDArray(shape=None, data=\n None, constant=False, identifier='B'))))])\ndef test_parse_terms_and_operators(expression, result):\n parser = build_parser(start='term')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('main(){}', Function(\n arguments=[], statements=[], identifier='main')), (\n 'foo_bar(array A^1 <5>){}', Function(arguments=[NDArray(shape=(5,),\n data=None, constant=False, identifier='A')], statements=[], identifier=\n 'foo_bar')), ('BizBAZZ(array A^2 < 3 5>, array B^3 <6 5 8>){}',\n Function(arguments=[NDArray(shape=(3, 5), data=None, constant=False,\n identifier='A'), NDArray(shape=(6, 5, 8), data=None, constant=False,\n identifier='B')], statements=[], identifier='BizBAZZ')), (\n 'A_2_3_a(array A^2 <9 1>, array B^2 <3 1>, array ASDF^1 <9>){}',\n Function(arguments=[NDArray(shape=(9, 1), data=None, constant=False,\n identifier='A'), NDArray(shape=(3, 1), data=None, constant=False,\n identifier='B'), NDArray(shape=(9,), data=None, constant=False,\n identifier='ASDF')], statements=[], identifier='A_2_3_a'))])\ndef test_parse_function(expression, result):\n parser = build_parser(start='function')\n assert parser.parse(expression) == result\n",
"step-3": "<mask token>\n\n\[email protected]('expression,result', [('< 1 2 3>', NDArray(shape=(\n 3,), data=[1, 2, 3], constant=False))])\ndef test_parse_vector(expression, result):\n parser = build_parser(start='vector')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('const array A^3 <4 3 5>',\n NDArray(shape=(4, 3, 5), data=None, constant=True, identifier='A'))])\ndef test_parse_constant_arrays(expression, result):\n parser = build_parser(start='constant_array')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('array Zasdf_asdf^1 <3>',\n NDArray(shape=(3,), data=None, constant=False, identifier='Zasdf_asdf'))])\ndef test_parse_arrays(expression, result):\n parser = build_parser(start='array')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('j psi x', BinaryOperation\n (operator='PSI', left=NDArray(shape=None, data=None, constant=False,\n identifier='j'), right=NDArray(shape=None, data=None, constant=False,\n identifier='x'))), ('A omega <1 2>', BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=(2,), data=[1, 2], constant=False, identifier=None)\n )), ('A omega B cat C', BinaryOperation(operator='CAT', left=\n BinaryOperation(operator='OMEGA', left=NDArray(shape=None, data=None,\n constant=False, identifier='A'), right=NDArray(shape=None, data=None,\n constant=False, identifier='B')), right=NDArray(shape=None, data=None,\n constant=False, identifier='C'))), ('(A omega B) cat C',\n BinaryOperation(operator='CAT', left=BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=None, data=None, constant=False, identifier='B')),\n right=NDArray(shape=None, data=None, constant=False, identifier='C'))),\n ('dim A cat B', BinaryOperation(operator='CAT', left=UnaryOperation(\n operator='DIM', right=NDArray(shape=None, data=None, constant=False,\n identifier='A')), right=NDArray(shape=None, data=None, constant=False,\n identifier='B'))), ('dim (A cat B)', UnaryOperation(operator='DIM',\n right=BinaryOperation(operator='CAT', left=NDArray(shape=None, data=\n None, constant=False, identifier='A'), right=NDArray(shape=None, data=\n None, constant=False, identifier='B'))))])\ndef test_parse_terms_and_operators(expression, result):\n parser = build_parser(start='term')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('main(){}', Function(\n arguments=[], statements=[], identifier='main')), (\n 'foo_bar(array A^1 <5>){}', Function(arguments=[NDArray(shape=(5,),\n data=None, constant=False, identifier='A')], statements=[], identifier=\n 'foo_bar')), ('BizBAZZ(array A^2 < 3 5>, array B^3 <6 5 8>){}',\n Function(arguments=[NDArray(shape=(3, 5), data=None, constant=False,\n identifier='A'), NDArray(shape=(6, 5, 8), data=None, constant=False,\n identifier='B')], statements=[], identifier='BizBAZZ')), (\n 'A_2_3_a(array A^2 <9 1>, array B^2 <3 1>, array ASDF^1 <9>){}',\n Function(arguments=[NDArray(shape=(9, 1), data=None, constant=False,\n identifier='A'), NDArray(shape=(3, 1), data=None, constant=False,\n identifier='B'), NDArray(shape=(9,), data=None, constant=False,\n identifier='ASDF')], statements=[], identifier='A_2_3_a'))])\ndef test_parse_function(expression, result):\n parser = build_parser(start='function')\n assert parser.parse(expression) == result\n",
"step-4": "import pytest\nfrom moa.primitives import NDArray, UnaryOperation, BinaryOperation, Function\nfrom moa.yaccer import build_parser\n\n\[email protected]('expression,result', [('< 1 2 3>', NDArray(shape=(\n 3,), data=[1, 2, 3], constant=False))])\ndef test_parse_vector(expression, result):\n parser = build_parser(start='vector')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('const array A^3 <4 3 5>',\n NDArray(shape=(4, 3, 5), data=None, constant=True, identifier='A'))])\ndef test_parse_constant_arrays(expression, result):\n parser = build_parser(start='constant_array')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('array Zasdf_asdf^1 <3>',\n NDArray(shape=(3,), data=None, constant=False, identifier='Zasdf_asdf'))])\ndef test_parse_arrays(expression, result):\n parser = build_parser(start='array')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('j psi x', BinaryOperation\n (operator='PSI', left=NDArray(shape=None, data=None, constant=False,\n identifier='j'), right=NDArray(shape=None, data=None, constant=False,\n identifier='x'))), ('A omega <1 2>', BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=(2,), data=[1, 2], constant=False, identifier=None)\n )), ('A omega B cat C', BinaryOperation(operator='CAT', left=\n BinaryOperation(operator='OMEGA', left=NDArray(shape=None, data=None,\n constant=False, identifier='A'), right=NDArray(shape=None, data=None,\n constant=False, identifier='B')), right=NDArray(shape=None, data=None,\n constant=False, identifier='C'))), ('(A omega B) cat C',\n BinaryOperation(operator='CAT', left=BinaryOperation(operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=None, data=None, constant=False, identifier='B')),\n right=NDArray(shape=None, data=None, constant=False, identifier='C'))),\n ('dim A cat B', BinaryOperation(operator='CAT', left=UnaryOperation(\n operator='DIM', right=NDArray(shape=None, data=None, constant=False,\n identifier='A')), right=NDArray(shape=None, data=None, constant=False,\n identifier='B'))), ('dim (A cat B)', UnaryOperation(operator='DIM',\n right=BinaryOperation(operator='CAT', left=NDArray(shape=None, data=\n None, constant=False, identifier='A'), right=NDArray(shape=None, data=\n None, constant=False, identifier='B'))))])\ndef test_parse_terms_and_operators(expression, result):\n parser = build_parser(start='term')\n assert parser.parse(expression) == result\n\n\[email protected]('expression, result', [('main(){}', Function(\n arguments=[], statements=[], identifier='main')), (\n 'foo_bar(array A^1 <5>){}', Function(arguments=[NDArray(shape=(5,),\n data=None, constant=False, identifier='A')], statements=[], identifier=\n 'foo_bar')), ('BizBAZZ(array A^2 < 3 5>, array B^3 <6 5 8>){}',\n Function(arguments=[NDArray(shape=(3, 5), data=None, constant=False,\n identifier='A'), NDArray(shape=(6, 5, 8), data=None, constant=False,\n identifier='B')], statements=[], identifier='BizBAZZ')), (\n 'A_2_3_a(array A^2 <9 1>, array B^2 <3 1>, array ASDF^1 <9>){}',\n Function(arguments=[NDArray(shape=(9, 1), data=None, constant=False,\n identifier='A'), NDArray(shape=(3, 1), data=None, constant=False,\n identifier='B'), NDArray(shape=(9,), data=None, constant=False,\n identifier='ASDF')], statements=[], identifier='A_2_3_a'))])\ndef test_parse_function(expression, result):\n parser = build_parser(start='function')\n assert parser.parse(expression) == result\n",
"step-5": "import pytest\n\nfrom moa.primitives import NDArray, UnaryOperation, BinaryOperation, Function\nfrom moa.yaccer import build_parser\n\n\[email protected](\"expression,result\", [\n (\"< 1 2 3>\", NDArray(shape=(3,), data=[1, 2, 3], constant=False)),\n])\ndef test_parse_vector(expression, result):\n parser = build_parser(start='vector')\n assert parser.parse(expression) == result\n\n\[email protected](\"expression, result\", [\n (\"const array A^3 <4 3 5>\", NDArray(\n shape=(4, 3, 5), data=None, constant=True, identifier='A')),\n])\ndef test_parse_constant_arrays(expression, result):\n parser = build_parser(start='constant_array')\n assert parser.parse(expression) == result\n\n\[email protected](\"expression, result\", [\n (\"array Zasdf_asdf^1 <3>\", NDArray(\n shape=(3,), data=None, constant=False, identifier='Zasdf_asdf')),\n])\ndef test_parse_arrays(expression, result):\n parser = build_parser(start='array')\n assert parser.parse(expression) == result\n\n\[email protected](\"expression, result\", [\n (\"j psi x\", BinaryOperation(\n operator='PSI',\n left=NDArray(shape=None, data=None, constant=False, identifier='j'),\n right=NDArray(shape=None, data=None, constant=False, identifier='x'))),\n (\"A omega <1 2>\", BinaryOperation(\n operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=(2,), data=[1, 2], constant=False, identifier=None))),\n (\"A omega B cat C\", BinaryOperation(\n operator='CAT',\n left=BinaryOperation(\n operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=None, data=None, constant=False, identifier='B')),\n right=NDArray(shape=None, data=None, constant=False, identifier='C'))),\n (\"(A omega B) cat C\", BinaryOperation(\n operator='CAT',\n left=BinaryOperation(\n operator='OMEGA',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=None, data=None, constant=False, identifier='B')),\n right=NDArray(shape=None, data=None, constant=False, identifier='C'))),\n (\"dim A cat B\", BinaryOperation(\n operator='CAT',\n left=UnaryOperation(\n operator='DIM',\n right=NDArray(shape=None, data=None, constant=False, identifier='A')),\n right=NDArray(shape=None, data=None, constant=False, identifier='B'))),\n (\"dim (A cat B)\", UnaryOperation(\n operator='DIM',\n right=BinaryOperation(\n operator='CAT',\n left=NDArray(shape=None, data=None, constant=False, identifier='A'),\n right=NDArray(shape=None, data=None, constant=False, identifier='B')))),\n])\ndef test_parse_terms_and_operators(expression, result):\n parser = build_parser(start='term')\n assert parser.parse(expression) == result\n\n\[email protected](\"expression, result\", [\n ('main(){}', Function(arguments=[], statements=[], identifier='main')),\n ('foo_bar(array A^1 <5>){}', Function(\n arguments=[NDArray(shape=(5,), data=None, constant=False, identifier='A')],\n statements=[],\n identifier='foo_bar')),\n ('BizBAZZ(array A^2 < 3 5>, array B^3 <6 5 8>){}', Function(\n arguments=[\n NDArray(shape=(3, 5), data=None, constant=False, identifier='A'),\n NDArray(shape=(6, 5, 8), data=None, constant=False, identifier='B')],\n statements=[],\n identifier='BizBAZZ')),\n ('A_2_3_a(array A^2 <9 1>, array B^2 <3 1>, array ASDF^1 <9>){}', Function(\n arguments=[\n NDArray(shape=(9, 1), data=None, constant=False, identifier='A'),\n NDArray(shape=(3, 1), data=None, constant=False, identifier='B'),\n NDArray(shape=(9,), data=None, constant=False, identifier='ASDF')],\n statements=[],\n identifier='A_2_3_a')),\n])\ndef test_parse_function(expression, result):\n parser = build_parser(start='function')\n assert parser.parse(expression) == result\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import pickle
class myPickle:
def make(self, obj,fileName):
print("myPickle make file",fileName)
pickle.dump( obj, open(fileName,'wb') )
print(" DONE")
def load(self, fileName):
print("myPickle load file",fileName)
tr = pickle.load( open(fileName,'rb') )
print(" DONE")
return tr
|
normal
|
{
"blob_id": "e50feccd583d7e33877d5fcc377a1d79dc247d3a",
"index": 3117,
"step-1": "<mask token>\n\n\nclass myPickle:\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass myPickle:\n\n def make(self, obj, fileName):\n print('myPickle make file', fileName)\n pickle.dump(obj, open(fileName, 'wb'))\n print(' DONE')\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass myPickle:\n\n def make(self, obj, fileName):\n print('myPickle make file', fileName)\n pickle.dump(obj, open(fileName, 'wb'))\n print(' DONE')\n\n def load(self, fileName):\n print('myPickle load file', fileName)\n tr = pickle.load(open(fileName, 'rb'))\n print(' DONE')\n return tr\n",
"step-4": "import pickle\n\n\nclass myPickle:\n\n def make(self, obj, fileName):\n print('myPickle make file', fileName)\n pickle.dump(obj, open(fileName, 'wb'))\n print(' DONE')\n\n def load(self, fileName):\n print('myPickle load file', fileName)\n tr = pickle.load(open(fileName, 'rb'))\n print(' DONE')\n return tr\n",
"step-5": "\nimport pickle\n\nclass myPickle:\n \n def make(self, obj,fileName):\n print(\"myPickle make file\",fileName)\n pickle.dump( obj, open(fileName,'wb') )\n print(\" DONE\")\n \n def load(self, fileName):\n print(\"myPickle load file\",fileName)\n tr = pickle.load( open(fileName,'rb') )\n print(\" DONE\")\n return tr\n ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
class Solution(object):
def smallestGoodBase(self, n):
"""
:type n: str
:rtype: str
"""
# k is the base and the representation is
# m bits of 1
# We then have from math
# (k**m - 1) / (k-1) = n
# m = log_k (n * k - n + 1)
# m needs to be integer
# we know that k = 2 m will be largest
m_max = int(math.ceil(math.log(1 + int(n), 2)))
for m in range(m_max, 1, -1):
# solve high order equation
# k**m - nk + n - 1 = 0
# Find k using newton approach
res = self.solve_equation(m, int(n))
if res != False:
return str(res)
# k**m - nk + n - 1 = 0
# TODO: Why newton approach always work here.
# Hard to prove they are always monotonic
def solve_equation(self, m, n):
k_l, k_h = 2, n - 1
while k_l <= k_h:
mid = (k_l + k_h) / 2
val = mid ** m - n * mid + n - 1
if val == 0:
return mid
elif val < 0:
k_l = mid + 1
else:
k_h = mid - 1
return False
|
normal
|
{
"blob_id": "de287d1bc644fdfd0f47bd8667580786b74444d0",
"index": 8863,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n <mask token>\n",
"step-3": "class Solution(object):\n <mask token>\n\n def solve_equation(self, m, n):\n k_l, k_h = 2, n - 1\n while k_l <= k_h:\n mid = (k_l + k_h) / 2\n val = mid ** m - n * mid + n - 1\n if val == 0:\n return mid\n elif val < 0:\n k_l = mid + 1\n else:\n k_h = mid - 1\n return False\n",
"step-4": "class Solution(object):\n\n def smallestGoodBase(self, n):\n \"\"\"\n :type n: str\n :rtype: str\n \"\"\"\n m_max = int(math.ceil(math.log(1 + int(n), 2)))\n for m in range(m_max, 1, -1):\n res = self.solve_equation(m, int(n))\n if res != False:\n return str(res)\n\n def solve_equation(self, m, n):\n k_l, k_h = 2, n - 1\n while k_l <= k_h:\n mid = (k_l + k_h) / 2\n val = mid ** m - n * mid + n - 1\n if val == 0:\n return mid\n elif val < 0:\n k_l = mid + 1\n else:\n k_h = mid - 1\n return False\n",
"step-5": "class Solution(object):\n def smallestGoodBase(self, n):\n \"\"\"\n :type n: str\n :rtype: str\n \"\"\"\n # k is the base and the representation is\n # m bits of 1\n # We then have from math\n # (k**m - 1) / (k-1) = n\n # m = log_k (n * k - n + 1)\n # m needs to be integer\n \n # we know that k = 2 m will be largest\n m_max = int(math.ceil(math.log(1 + int(n), 2)))\n for m in range(m_max, 1, -1):\n # solve high order equation\n # k**m - nk + n - 1 = 0\n \n # Find k using newton approach\n res = self.solve_equation(m, int(n))\n if res != False:\n return str(res)\n \n\n # k**m - nk + n - 1 = 0\n # TODO: Why newton approach always work here.\n # Hard to prove they are always monotonic\n def solve_equation(self, m, n):\n k_l, k_h = 2, n - 1\n while k_l <= k_h:\n mid = (k_l + k_h) / 2\n val = mid ** m - n * mid + n - 1 \n if val == 0:\n return mid\n elif val < 0:\n k_l = mid + 1\n else:\n k_h = mid - 1\n return False\n \n\n ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
# pymd2mc.xyzfile
"""
"""
__author__ = 'Mateusz Lis'
__version__= '0.1'
from optparse import OptionParser
import sys
from time import time
from constants import R, T
from energyCalc import EnergyCalculator
from latticeProjector import LatticeProjectorSimple
from lattices import HexLattice
from structures.xyzfile import XYZFile
from utils import delLine, clearFile
def main():
options = parseCommandLine()
inFile = XYZFile(options.inXyzFilename)
clearFile(options.outDatFilename)
outFile = open(options.outDatFilename, 'w')
i = 0
startTime = time()
omegas = []
sumOmegas = 0L
calc = EnergyCalculator(inFile, R, T)
while True:
i += 1
if options.verbose:
delLine()
print i,
omega = calc.getNextEnergy(options.symbol)
if omega is None:
break
omega , sim, diff = omega
if omega > -10**4 and omega < 10**10:
omegas.append(omega)
sumOmegas += omega
outFile.write("%d %f %f %f \n" % (i, omega, sim, diff))
outFile.close()
if options.verbose:
print "Done. Execution time=%f" % (time() - startTime)
print "omegas" ,sumOmegas, (sum(omegas))
lenOmegas = len(omegas)
midOmega = (sum(omegas)/len(omegas))
print "Result omegaAB = %f" % midOmega
sd = 0
for omega in omegas:
sd += (midOmega - omega)**2
sd /= len(omegas)
sd **= (1./2.)
print "Standard deviation = %f" % sd
def parseCommandLine():
"""
Sets up command line arguments and parses them
"""
parser = OptionParser(usage="%prog ", version="%prog " + __version__,
description='''
This program calculates omegaAB value from a hexagonal lattice trajectory
stored in xyz file (see for more details)''')
parser.add_option("-f", "--traj", dest="inXyzFilename",default = "hexTraj.xyz",
help="xyz input trajectory file (default traj.xyz)", metavar="INXYZFILE")
parser.add_option("-r", "--reference", dest="symbol",default = "P11",
help="reference particle name", metavar="ADATOM")
parser.add_option("-o", "--output", dest="outDatFilename", default="omega.dat",
help="output dat file with omega values for each frame. WARNING: it will be overriden", metavar="OUTXYZFILE")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, _) = parser.parse_args()
return options
if __name__ == '__main__':
sys.exit(main())
|
normal
|
{
"blob_id": "a325feba1c2bb588321429a045133d6eede9e8cf",
"index": 9350,
"step-1": "#!/usr/bin/python\n# pymd2mc.xyzfile\n\"\"\"\n\n\"\"\"\n\n__author__ = 'Mateusz Lis'\n__version__= '0.1'\n\n\nfrom optparse import OptionParser\nimport sys\nfrom time import time\n\nfrom constants import R, T\nfrom energyCalc import EnergyCalculator\nfrom latticeProjector import LatticeProjectorSimple\nfrom lattices import HexLattice\nfrom structures.xyzfile import XYZFile\nfrom utils import delLine, clearFile\n \n\n \ndef main():\n \n options = parseCommandLine()\n inFile = XYZFile(options.inXyzFilename)\n \n clearFile(options.outDatFilename)\n outFile = open(options.outDatFilename, 'w')\n i = 0\n startTime = time()\n omegas = []\n\n sumOmegas = 0L\n calc = EnergyCalculator(inFile, R, T)\n \n while True:\n i += 1\n if options.verbose:\n delLine()\n print i, \n omega = calc.getNextEnergy(options.symbol)\n if omega is None:\n break\n omega , sim, diff = omega\n if omega > -10**4 and omega < 10**10: \n omegas.append(omega)\n sumOmegas += omega\n outFile.write(\"%d %f %f %f \\n\" % (i, omega, sim, diff))\n\n \n outFile.close()\n if options.verbose: \n print \"Done. Execution time=%f\" % (time() - startTime) \n print \"omegas\" ,sumOmegas, (sum(omegas))\n lenOmegas = len(omegas)\n midOmega = (sum(omegas)/len(omegas))\n print \"Result omegaAB = %f\" % midOmega\n sd = 0\n for omega in omegas:\n sd += (midOmega - omega)**2\n sd /= len(omegas)\n sd **= (1./2.)\n print \"Standard deviation = %f\" % sd\ndef parseCommandLine():\n \"\"\"\n Sets up command line arguments and parses them\n \"\"\"\n parser = OptionParser(usage=\"%prog \", version=\"%prog \" + __version__,\n description='''\n This program calculates omegaAB value from a hexagonal lattice trajectory\n stored in xyz file (see for more details)''')\n parser.add_option(\"-f\", \"--traj\", dest=\"inXyzFilename\",default = \"hexTraj.xyz\",\n help=\"xyz input trajectory file (default traj.xyz)\", metavar=\"INXYZFILE\")\n parser.add_option(\"-r\", \"--reference\", dest=\"symbol\",default = \"P11\",\n help=\"reference particle name\", metavar=\"ADATOM\")\n parser.add_option(\"-o\", \"--output\", dest=\"outDatFilename\", default=\"omega.dat\",\n help=\"output dat file with omega values for each frame. WARNING: it will be overriden\", metavar=\"OUTXYZFILE\")\n \n parser.add_option(\"-q\", \"--quiet\",\n action=\"store_false\", dest=\"verbose\", default=True,\n help=\"don't print status messages to stdout\")\n\n (options, _) = parser.parse_args()\n\n return options \n \n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.db import models
from django.utils import timezone
from django.utils.text import slugify
from django.db.models.signals import pre_save
from NetFlix.db.models import PublishStateOptions
from NetFlix.db.receivers import publicado_stado_pre_save, slugify_pre_save
class VideoQuerySet(models.QuerySet):
def publicado(self):
ahora = timezone.now()
return self.filter(
stado = PublishStateOptions.PUBLISH,
tiempo_publicado__lte=ahora
)
class VideoManager(models.Manager):
def get_queryset(self):
return VideoQuerySet(self.model, using=self._db)
def publicado(self):
return self.get_queryset().publicado()
class Video(models.Model):
titulo = models.CharField(max_length=120)
descripcion = models.TextField(blank=True, null=True)
slug = models.SlugField(blank=True, null=True)
activo = models.BooleanField(default=True)
video_id = models.CharField(max_length=120, unique=True)
timestamp = models.DateTimeField(auto_now_add=True)
update = models.DateTimeField(auto_now=True)
stado = models.CharField(max_length=2, choices=PublishStateOptions.choices, default=PublishStateOptions.DRAFT)
tiempo_publicado = models.DateTimeField(auto_now_add=False, auto_now=False, blank=True, null=True)
objects = VideoManager()
def __str__(self):
return self.titulo
def get_video_id(self):
if not self.es_publicado:
return None
return self.video_id
def get_descripcion_trailer(self):
return self.descripcion
@property
def es_publicado(self):
if self.activo is False:
return False
estado = self.stado
if estado != PublishStateOptions.PUBLISH:
return False
tiempo_publicado = self.tiempo_publicado
if tiempo_publicado is None:
return False
ahora = timezone.now()
return tiempo_publicado <= ahora
def get_playlista_ids(self):
#self.<foreingkey_obj>_set.all()
#return list(self.playlist_set.all().values_list('id', flat=True))playlist_destacado
return list(self.playlist_destacado.all().values_list('id', flat=True))
#def save(self, *args, **kwargs):
# if self.stado == self.PublishStateOptions.PUBLISH and self.tiempo_publicado is None:
# print("Guardado el tiempo de publicado")
# self.tiempo_publicado = timezone.now()
# elif self.stado == self.PublishStateOptions.DRAFT:
# self.tiempo_publicado = None
# if self.slug is None:
# self.slug = slugify(self.titulo)
# super().save(*args, **kwargs)
class ProxiTodoLosVideo(Video):
class Meta:
proxy = True
verbose_name = "Todo los Video"
verbose_name_plural="Todos los Publicados"
class VideoPublicadoProxy(Video):
class Meta:
proxy =True
verbose_name ='Video Publicado'
verbose_name_plural = 'Videos Publicados'
pre_save.connect(publicado_stado_pre_save, sender=Video)
pre_save.connect(slugify_pre_save, sender=Video)
pre_save.connect(publicado_stado_pre_save, sender=ProxiTodoLosVideo)
pre_save.connect(slugify_pre_save, sender=ProxiTodoLosVideo)
pre_save.connect(publicado_stado_pre_save, sender=VideoPublicadoProxy)
pre_save.connect(slugify_pre_save, sender=VideoPublicadoProxy)
|
normal
|
{
"blob_id": "9c98ecde2e8aac00a33da7db6e5e6023519e4b84",
"index": 7731,
"step-1": "<mask token>\n\n\nclass Video(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.titulo\n\n def get_video_id(self):\n if not self.es_publicado:\n return None\n return self.video_id\n\n def get_descripcion_trailer(self):\n return self.descripcion\n\n @property\n def es_publicado(self):\n if self.activo is False:\n return False\n estado = self.stado\n if estado != PublishStateOptions.PUBLISH:\n return False\n tiempo_publicado = self.tiempo_publicado\n if tiempo_publicado is None:\n return False\n ahora = timezone.now()\n return tiempo_publicado <= ahora\n\n def get_playlista_ids(self):\n return list(self.playlist_destacado.all().values_list('id', flat=True))\n\n\nclass ProxiTodoLosVideo(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Todo los Video'\n verbose_name_plural = 'Todos los Publicados'\n\n\nclass VideoPublicadoProxy(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Video Publicado'\n verbose_name_plural = 'Videos Publicados'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass VideoQuerySet(models.QuerySet):\n\n def publicado(self):\n ahora = timezone.now()\n return self.filter(stado=PublishStateOptions.PUBLISH,\n tiempo_publicado__lte=ahora)\n\n\nclass VideoManager(models.Manager):\n\n def get_queryset(self):\n return VideoQuerySet(self.model, using=self._db)\n\n def publicado(self):\n return self.get_queryset().publicado()\n\n\nclass Video(models.Model):\n titulo = models.CharField(max_length=120)\n descripcion = models.TextField(blank=True, null=True)\n slug = models.SlugField(blank=True, null=True)\n activo = models.BooleanField(default=True)\n video_id = models.CharField(max_length=120, unique=True)\n timestamp = models.DateTimeField(auto_now_add=True)\n update = models.DateTimeField(auto_now=True)\n stado = models.CharField(max_length=2, choices=PublishStateOptions.\n choices, default=PublishStateOptions.DRAFT)\n tiempo_publicado = models.DateTimeField(auto_now_add=False, auto_now=\n False, blank=True, null=True)\n objects = VideoManager()\n\n def __str__(self):\n return self.titulo\n\n def get_video_id(self):\n if not self.es_publicado:\n return None\n return self.video_id\n\n def get_descripcion_trailer(self):\n return self.descripcion\n\n @property\n def es_publicado(self):\n if self.activo is False:\n return False\n estado = self.stado\n if estado != PublishStateOptions.PUBLISH:\n return False\n tiempo_publicado = self.tiempo_publicado\n if tiempo_publicado is None:\n return False\n ahora = timezone.now()\n return tiempo_publicado <= ahora\n\n def get_playlista_ids(self):\n return list(self.playlist_destacado.all().values_list('id', flat=True))\n\n\nclass ProxiTodoLosVideo(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Todo los Video'\n verbose_name_plural = 'Todos los Publicados'\n\n\nclass VideoPublicadoProxy(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Video Publicado'\n verbose_name_plural = 'Videos Publicados'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass VideoQuerySet(models.QuerySet):\n\n def publicado(self):\n ahora = timezone.now()\n return self.filter(stado=PublishStateOptions.PUBLISH,\n tiempo_publicado__lte=ahora)\n\n\nclass VideoManager(models.Manager):\n\n def get_queryset(self):\n return VideoQuerySet(self.model, using=self._db)\n\n def publicado(self):\n return self.get_queryset().publicado()\n\n\nclass Video(models.Model):\n titulo = models.CharField(max_length=120)\n descripcion = models.TextField(blank=True, null=True)\n slug = models.SlugField(blank=True, null=True)\n activo = models.BooleanField(default=True)\n video_id = models.CharField(max_length=120, unique=True)\n timestamp = models.DateTimeField(auto_now_add=True)\n update = models.DateTimeField(auto_now=True)\n stado = models.CharField(max_length=2, choices=PublishStateOptions.\n choices, default=PublishStateOptions.DRAFT)\n tiempo_publicado = models.DateTimeField(auto_now_add=False, auto_now=\n False, blank=True, null=True)\n objects = VideoManager()\n\n def __str__(self):\n return self.titulo\n\n def get_video_id(self):\n if not self.es_publicado:\n return None\n return self.video_id\n\n def get_descripcion_trailer(self):\n return self.descripcion\n\n @property\n def es_publicado(self):\n if self.activo is False:\n return False\n estado = self.stado\n if estado != PublishStateOptions.PUBLISH:\n return False\n tiempo_publicado = self.tiempo_publicado\n if tiempo_publicado is None:\n return False\n ahora = timezone.now()\n return tiempo_publicado <= ahora\n\n def get_playlista_ids(self):\n return list(self.playlist_destacado.all().values_list('id', flat=True))\n\n\nclass ProxiTodoLosVideo(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Todo los Video'\n verbose_name_plural = 'Todos los Publicados'\n\n\nclass VideoPublicadoProxy(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Video Publicado'\n verbose_name_plural = 'Videos Publicados'\n\n\npre_save.connect(publicado_stado_pre_save, sender=Video)\npre_save.connect(slugify_pre_save, sender=Video)\npre_save.connect(publicado_stado_pre_save, sender=ProxiTodoLosVideo)\npre_save.connect(slugify_pre_save, sender=ProxiTodoLosVideo)\npre_save.connect(publicado_stado_pre_save, sender=VideoPublicadoProxy)\npre_save.connect(slugify_pre_save, sender=VideoPublicadoProxy)\n",
"step-4": "from django.db import models\nfrom django.utils import timezone\nfrom django.utils.text import slugify\nfrom django.db.models.signals import pre_save\nfrom NetFlix.db.models import PublishStateOptions\nfrom NetFlix.db.receivers import publicado_stado_pre_save, slugify_pre_save\n\n\nclass VideoQuerySet(models.QuerySet):\n\n def publicado(self):\n ahora = timezone.now()\n return self.filter(stado=PublishStateOptions.PUBLISH,\n tiempo_publicado__lte=ahora)\n\n\nclass VideoManager(models.Manager):\n\n def get_queryset(self):\n return VideoQuerySet(self.model, using=self._db)\n\n def publicado(self):\n return self.get_queryset().publicado()\n\n\nclass Video(models.Model):\n titulo = models.CharField(max_length=120)\n descripcion = models.TextField(blank=True, null=True)\n slug = models.SlugField(blank=True, null=True)\n activo = models.BooleanField(default=True)\n video_id = models.CharField(max_length=120, unique=True)\n timestamp = models.DateTimeField(auto_now_add=True)\n update = models.DateTimeField(auto_now=True)\n stado = models.CharField(max_length=2, choices=PublishStateOptions.\n choices, default=PublishStateOptions.DRAFT)\n tiempo_publicado = models.DateTimeField(auto_now_add=False, auto_now=\n False, blank=True, null=True)\n objects = VideoManager()\n\n def __str__(self):\n return self.titulo\n\n def get_video_id(self):\n if not self.es_publicado:\n return None\n return self.video_id\n\n def get_descripcion_trailer(self):\n return self.descripcion\n\n @property\n def es_publicado(self):\n if self.activo is False:\n return False\n estado = self.stado\n if estado != PublishStateOptions.PUBLISH:\n return False\n tiempo_publicado = self.tiempo_publicado\n if tiempo_publicado is None:\n return False\n ahora = timezone.now()\n return tiempo_publicado <= ahora\n\n def get_playlista_ids(self):\n return list(self.playlist_destacado.all().values_list('id', flat=True))\n\n\nclass ProxiTodoLosVideo(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Todo los Video'\n verbose_name_plural = 'Todos los Publicados'\n\n\nclass VideoPublicadoProxy(Video):\n\n\n class Meta:\n proxy = True\n verbose_name = 'Video Publicado'\n verbose_name_plural = 'Videos Publicados'\n\n\npre_save.connect(publicado_stado_pre_save, sender=Video)\npre_save.connect(slugify_pre_save, sender=Video)\npre_save.connect(publicado_stado_pre_save, sender=ProxiTodoLosVideo)\npre_save.connect(slugify_pre_save, sender=ProxiTodoLosVideo)\npre_save.connect(publicado_stado_pre_save, sender=VideoPublicadoProxy)\npre_save.connect(slugify_pre_save, sender=VideoPublicadoProxy)\n",
"step-5": "from django.db import models\n\nfrom django.utils import timezone\nfrom django.utils.text import slugify\n\nfrom django.db.models.signals import pre_save\n\nfrom NetFlix.db.models import PublishStateOptions\nfrom NetFlix.db.receivers import publicado_stado_pre_save, slugify_pre_save\n\n\nclass VideoQuerySet(models.QuerySet):\n\tdef publicado(self):\n\t\tahora = timezone.now()\n\t\treturn self.filter(\n\t\t\tstado = PublishStateOptions.PUBLISH,\n\t\t\ttiempo_publicado__lte=ahora\n\t\t)\n\n\nclass VideoManager(models.Manager):\n\tdef get_queryset(self):\n\t\treturn VideoQuerySet(self.model, using=self._db)\n\n\tdef publicado(self):\n\t\treturn self.get_queryset().publicado()\n\n\nclass Video(models.Model):\n\n\ttitulo = models.CharField(max_length=120)\n\tdescripcion = models.TextField(blank=True, null=True)\n\tslug = models.SlugField(blank=True, null=True)\n\tactivo = models.BooleanField(default=True)\n\tvideo_id = models.CharField(max_length=120, unique=True)\n\n\ttimestamp = models.DateTimeField(auto_now_add=True)\n\tupdate = models.DateTimeField(auto_now=True)\n\n\tstado = models.CharField(max_length=2, choices=PublishStateOptions.choices, default=PublishStateOptions.DRAFT)\n\n\ttiempo_publicado = models.DateTimeField(auto_now_add=False, auto_now=False, blank=True, null=True)\n\n\tobjects = VideoManager()\n\n\tdef __str__(self):\n\t\treturn self.titulo\n\n\tdef get_video_id(self):\n\t\tif not self.es_publicado:\n\t\t\treturn None\n\t\treturn self.video_id\n\n\tdef get_descripcion_trailer(self):\n\t\treturn self.descripcion\n\n\t@property\n\tdef es_publicado(self):\n\t\tif self.activo is False:\n\t\t\treturn False\n\t\testado = self.stado\n\t\tif estado != PublishStateOptions.PUBLISH:\n\t\t\treturn False\n\t\ttiempo_publicado = self.tiempo_publicado\n\t\tif tiempo_publicado is None:\n\t\t\treturn False\n\t\tahora = timezone.now()\n\t\treturn tiempo_publicado <= ahora\n\n\tdef get_playlista_ids(self):\n\t\t#self.<foreingkey_obj>_set.all()\n\t\t#return list(self.playlist_set.all().values_list('id', flat=True))playlist_destacado\n\t\treturn list(self.playlist_destacado.all().values_list('id', flat=True))\n\n\t#def save(self, *args, **kwargs):\n\t#\tif self.stado == self.PublishStateOptions.PUBLISH and self.tiempo_publicado is None:\n\t#\t\tprint(\"Guardado el tiempo de publicado\")\n\t#\t\tself.tiempo_publicado = timezone.now()\n\n\t#\telif self.stado == self.PublishStateOptions.DRAFT:\n\t#\t\tself.tiempo_publicado = None\n\t#\tif self.slug is None:\n\t#\t\tself.slug = slugify(self.titulo)\n\t#\tsuper().save(*args, **kwargs)\n\nclass ProxiTodoLosVideo(Video):\n\tclass Meta:\n\t\tproxy = True\n\t\tverbose_name = \"Todo los Video\"\n\t\tverbose_name_plural=\"Todos los Publicados\"\n\nclass VideoPublicadoProxy(Video):\n\tclass Meta:\n\t\tproxy =True\n\t\tverbose_name ='Video Publicado'\n\t\tverbose_name_plural = 'Videos Publicados'\n\npre_save.connect(publicado_stado_pre_save, sender=Video)\npre_save.connect(slugify_pre_save, sender=Video)\n\npre_save.connect(publicado_stado_pre_save, sender=ProxiTodoLosVideo)\npre_save.connect(slugify_pre_save, sender=ProxiTodoLosVideo)\n\npre_save.connect(publicado_stado_pre_save, sender=VideoPublicadoProxy)\npre_save.connect(slugify_pre_save, sender=VideoPublicadoProxy)\n\n",
"step-ids": [
8,
14,
15,
16,
17
]
}
|
[
8,
14,
15,
16,
17
] |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import urllib2, os, logging, webapp2, random
#use logging.info("") to print stuff
from google.appengine.ext import webapp
from webapp2_extras import sessions
from google.appengine.ext.webapp import template
from google.appengine.ext import db
from conf import USERS, SESSION_KEY
from google.appengine.ext.db import BadValueError
class Job(db.Model):
title = db.StringProperty()
link = db.LinkProperty()
notes = db.TextProperty()
location = db.StringProperty()
compensation = db.StringProperty()
user = db.StringProperty()
class BaseHandler(webapp2.RequestHandler):
def unset_session(self):
self.session['user'] = ""
def dispatch(self):
self.session_store = sessions.get_store(request=self.request)
try:
webapp2.RequestHandler.dispatch(self)
finally:
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
return self.session_store.get_session()
def render_restricted_template(self, view_filename, params={}):
if ('user' in self.session and self.session['user'] != ""):
self.render_template(view_filename, params)
else:
self.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})
def render_template(self, view_filename, params={}):
path = os.path.join(os.path.dirname(__file__), 'templates', view_filename)
self.response.out.write(template.render(path, params))
class MainHandler(BaseHandler):
def get(self):
jobs = db.GqlQuery("SELECT * FROM Job WHERE user =:username", username=self.session['user'])
jobs_wid = []
for job in jobs:
jobs_wid.append([job, job.key().id()])
self.render_restricted_template('index.html', {'jobs': jobs_wid})
class ActionHandler(BaseHandler):
def get(self):
self.render_restricted_template('index.html', {})
def post(self):
#modify param value
if self.request.get('action') == 'modify' and self.request.get('id') and self.request.get('param') and self.request.get('value'):
job = Job.get_by_id(int(self.request.get('id')))
setattr(job, self.request.get('param'), self.request.get('value'))
job.put()
elif self.request.get('action') == 'delete' and self.request.get('id'):
job = Job.get_by_id(int(self.request.get('id')))
job.delete()
self.render_restricted_template('index.html', {})
class AddJobHandler(BaseHandler):
def get(self):
self.render_restricted_template('index.html', {})
def post(self):
try:
if self.request.get('link'):
link = self.request.get('link')
else:
link = None
job = Job(title=self.request.get('title'), link=link, notes=self.request.get('notes'), location=self.request.get('location'), compensation=self.request.get('compensation'), user=self.session['user'])
job.put()
self.render_restricted_template('index.html', {})
except BadValueError:
self.render_template('message.html', {'msg': 'Invalid Link', 'login': False, 'Error': True})
class LoginHandler(BaseHandler):
def get(self):
self.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})
def post(self):
if self.request.get('username') in USERS and USERS[self.request.get('username')] == self.request.get('password'):
self.session['user'] = self.request.get('username')
self.render_template('index.html', {'login': True})
else:
self.render_template('message.html', {'msg': 'Incorrect Credentials.', 'login': True, 'Error': True})
class LogoutHandler(BaseHandler):
def get(self):
self.session['user'] = ""
self.render_template('message.html', {'msg': 'Successfully Logged Out.'})
config = {'webapp2_extras.sessions': {'secret_key': SESSION_KEY}}
app = webapp2.WSGIApplication([
webapp2.Route('/', MainHandler, name='home'),
webapp2.Route('/login', LoginHandler, name='login'),
webapp2.Route('/logout', LogoutHandler, name='logout'),
webapp2.Route('/action', ActionHandler, name='action'),
webapp2.Route('/addjob', AddJobHandler, name='addjob')
], config=config, debug=True)
|
normal
|
{
"blob_id": "e7ef8debbff20cb178a3870b9618cbb0652af5af",
"index": 1626,
"step-1": "#!/usr/bin/env python\n#\n# Copyright 2007 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport urllib2, os, logging, webapp2, random\n#use logging.info(\"\") to print stuff\nfrom google.appengine.ext import webapp\nfrom webapp2_extras import sessions\nfrom google.appengine.ext.webapp import template\nfrom google.appengine.ext import db\nfrom conf import USERS, SESSION_KEY\nfrom google.appengine.ext.db import BadValueError\n\nclass Job(db.Model):\n\ttitle = db.StringProperty()\n\tlink = db.LinkProperty()\n\tnotes = db.TextProperty()\n\tlocation = db.StringProperty()\n\tcompensation = db.StringProperty()\n\tuser = db.StringProperty()\n\nclass BaseHandler(webapp2.RequestHandler):\n\tdef unset_session(self):\n\t\tself.session['user'] = \"\"\n\n\tdef dispatch(self):\n\t\tself.session_store = sessions.get_store(request=self.request)\n\t\ttry:\n\t\t\twebapp2.RequestHandler.dispatch(self)\n\t\tfinally:\n\t\t\tself.session_store.save_sessions(self.response)\n\n\[email protected]_property\n\tdef session(self):\n\t\treturn self.session_store.get_session()\n\n\tdef render_restricted_template(self, view_filename, params={}):\n\t\tif ('user' in self.session and self.session['user'] != \"\"):\n\t\t\tself.render_template(view_filename, params)\n\t\telse:\n\t\t\tself.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})\n\t\t\n\tdef render_template(self, view_filename, params={}):\n\t\tpath = os.path.join(os.path.dirname(__file__), 'templates', view_filename)\n\t\tself.response.out.write(template.render(path, params))\n\nclass MainHandler(BaseHandler):\n\tdef get(self):\n\t\tjobs = db.GqlQuery(\"SELECT * FROM Job WHERE user =:username\", username=self.session['user'])\n\t\tjobs_wid = []\n\t\tfor job in jobs:\n\t\t\tjobs_wid.append([job, job.key().id()])\n\t\tself.render_restricted_template('index.html', {'jobs': jobs_wid})\n\nclass ActionHandler(BaseHandler):\n\tdef get(self):\n\t\tself.render_restricted_template('index.html', {})\n\tdef post(self):\n\t\t#modify param value\n\t\tif self.request.get('action') == 'modify' and self.request.get('id') and self.request.get('param') and self.request.get('value'):\n\t\t\tjob = Job.get_by_id(int(self.request.get('id')))\n\t\t\tsetattr(job, self.request.get('param'), self.request.get('value'))\n\t\t\tjob.put()\n\t\telif self.request.get('action') == 'delete' and self.request.get('id'):\n\t\t\tjob = Job.get_by_id(int(self.request.get('id')))\n\t\t\tjob.delete()\n\t\tself.render_restricted_template('index.html', {})\n\nclass AddJobHandler(BaseHandler):\n\tdef get(self):\n\t\tself.render_restricted_template('index.html', {})\n\tdef post(self):\n\t\ttry:\n\t\t\tif self.request.get('link'):\n\t\t\t\tlink = self.request.get('link')\n\t\t\telse:\n\t\t\t\tlink = None\n\t\t\tjob = Job(title=self.request.get('title'), link=link, notes=self.request.get('notes'), location=self.request.get('location'), compensation=self.request.get('compensation'), user=self.session['user'])\n\t\t\tjob.put()\n\t\t\tself.render_restricted_template('index.html', {})\n\t\texcept BadValueError:\n\t\t\tself.render_template('message.html', {'msg': 'Invalid Link', 'login': False, 'Error': True})\n\n\nclass LoginHandler(BaseHandler):\n\tdef get(self):\n\t\tself.render_template('message.html', {'msg': 'Not Logged in.', 'login': True, 'Error': True})\n\tdef post(self):\n\t\tif self.request.get('username') in USERS and USERS[self.request.get('username')] == self.request.get('password'):\n\t\t\tself.session['user'] = self.request.get('username')\n\t\t\tself.render_template('index.html', {'login': True})\n\t\telse:\n\t\t\tself.render_template('message.html', {'msg': 'Incorrect Credentials.', 'login': True, 'Error': True})\n\nclass LogoutHandler(BaseHandler):\n def get(self):\n\t\tself.session['user'] = \"\"\n\t\tself.render_template('message.html', {'msg': 'Successfully Logged Out.'})\n\nconfig = {'webapp2_extras.sessions': {'secret_key': SESSION_KEY}}\napp = webapp2.WSGIApplication([\n webapp2.Route('/', MainHandler, name='home'),\n webapp2.Route('/login', LoginHandler, name='login'),\n webapp2.Route('/logout', LogoutHandler, name='logout'),\n webapp2.Route('/action', ActionHandler, name='action'),\n webapp2.Route('/addjob', AddJobHandler, name='addjob')\n], config=config, debug=True)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import sys
sys.path.append("..")
from packages import bitso as BS
from packages import account as ACCOUNT
from packages import currency_pair as CP
account=ACCOUNT.Account('577e4a03-540f9610-f686d434-qz5c4v5b6n','dd7b02f5-c286e9d4-f2cc78c3-bfab3')
bs=BS.Bitso(account)
currency_pair=CP.CurrencyPair('btc','xmn')
depth=bs.depth(currency_pair)
a=1
|
normal
|
{
"blob_id": "03147de944c4f75417006a5087e75354dba644ec",
"index": 6339,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.append('..')\n<mask token>\n",
"step-3": "<mask token>\nsys.path.append('..')\n<mask token>\naccount = ACCOUNT.Account('577e4a03-540f9610-f686d434-qz5c4v5b6n',\n 'dd7b02f5-c286e9d4-f2cc78c3-bfab3')\nbs = BS.Bitso(account)\ncurrency_pair = CP.CurrencyPair('btc', 'xmn')\ndepth = bs.depth(currency_pair)\na = 1\n",
"step-4": "import sys\nsys.path.append('..')\nfrom packages import bitso as BS\nfrom packages import account as ACCOUNT\nfrom packages import currency_pair as CP\naccount = ACCOUNT.Account('577e4a03-540f9610-f686d434-qz5c4v5b6n',\n 'dd7b02f5-c286e9d4-f2cc78c3-bfab3')\nbs = BS.Bitso(account)\ncurrency_pair = CP.CurrencyPair('btc', 'xmn')\ndepth = bs.depth(currency_pair)\na = 1\n",
"step-5": "import sys\nsys.path.append(\"..\")\nfrom packages import bitso as BS\nfrom packages import account as ACCOUNT\nfrom packages import currency_pair as CP\n\naccount=ACCOUNT.Account('577e4a03-540f9610-f686d434-qz5c4v5b6n','dd7b02f5-c286e9d4-f2cc78c3-bfab3')\nbs=BS.Bitso(account)\n\ncurrency_pair=CP.CurrencyPair('btc','xmn')\ndepth=bs.depth(currency_pair)\na=1\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# testa se uma aplicacao em modo de teste esta sendo construida
def test_config(app):
assert app.testing
|
normal
|
{
"blob_id": "96d7963faf720a3dc0d96b55ad65ee7ac83c1818",
"index": 5798,
"step-1": "<mask token>\n",
"step-2": "def test_config(app):\n assert app.testing\n",
"step-3": "# testa se uma aplicacao em modo de teste esta sendo construida\ndef test_config(app):\n assert app.testing\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# groupby()
# groupby()把迭代器中相邻的重复元素挑出来放在一起:
import itertools
for key, group in itertools.groupby('ABAABBBCCAAA'):
print(key, list(group))
# 小结
# itertools模块提供的全部是处理迭代功能的函数,它们的返回值不是list,而是Iterator,只有用for循环迭代的时候才真正计算。
|
normal
|
{
"blob_id": "b5568e84e19719f0fd72197ead47bd050e09f55d",
"index": 7310,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor key, group in itertools.groupby('ABAABBBCCAAA'):\n print(key, list(group))\n",
"step-3": "import itertools\nfor key, group in itertools.groupby('ABAABBBCCAAA'):\n print(key, list(group))\n",
"step-4": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n\n# groupby()\n# groupby()把迭代器中相邻的重复元素挑出来放在一起:\nimport itertools\nfor key, group in itertools.groupby('ABAABBBCCAAA'):\n print(key, list(group))\n\n\n# 小结\n# itertools模块提供的全部是处理迭代功能的函数,它们的返回值不是list,而是Iterator,只有用for循环迭代的时候才真正计算。\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import hashlib
import math
import random
from set5.ch_4 import get_num_byte_len
class Server:
def __init__(self):
self.private_key = random.randint(0, 2**100)
self.salt = random.randint(0, 2**100)
self.salt_bytes = self.salt.to_bytes(
byteorder="big",
length=get_num_byte_len(self.salt)
)
self.u = random.randint(0, 2**128)
def agree_params(self, n, g, password):
self.n = n
self.g = g
self.generate_password_params(password)
def generate_password_params(self, password):
hasher = hashlib.sha256()
hasher.update(self.salt_bytes + password.encode("ascii"))
x = int(hasher.digest().hex(), 16)
self.v = pow(self.g, x, self.n)
def send_salt_public_key_u(self, client):
self.public_key = pow(self.g, self.private_key, self.n)
client.accept_salt_public_key_u(self.salt, self.public_key, self.u)
def accept_public_key(self, client_public_key):
self.client_public_key = client_public_key
def compute_hashes(self):
self.s = pow(self.client_public_key * pow(self.v, self.u, self.n), self.private_key, self.n)
s_bytes = self.s.to_bytes(
byteorder="big",
length=get_num_byte_len(self.s)
)
hasher = hashlib.sha256()
hasher.update(s_bytes)
self.k = hasher.digest()
def authenticate(self, client_hmac):
hasher = hashlib.sha256()
hasher.update(self.k + self.salt_bytes)
check_hmac = hasher.digest().hex()
if check_hmac == client_hmac:
return True
else:
print(check_hmac, client_hmac)
return False
class Client:
def __init__(self, n, g, password):
self.n = n
self.g = g
self.password = password
self.private_key = random.randint(0, 2**100)
def agree_params(self, server):
server.agree_params(self.n, self.g, self.password)
def accept_salt_public_key_u(self, salt, server_public_key, u):
self.salt = salt
self.salt_bytes = self.salt.to_bytes(
byteorder="big",
length=get_num_byte_len(self.salt)
)
self.server_public_key = server_public_key
self.u = u
def send_public_key(self, server):
self.public_key = pow(self.g, self.private_key, self.n)
server.accept_public_key(self.public_key)
def compute_hashes(self):
hasher = hashlib.sha256()
hasher.update(self.salt_bytes + self.password.encode("ascii"))
x = int(hasher.digest().hex(), 16)
self.s = pow(self.server_public_key, self.private_key + (self.u * x), self.n)
s_bytes = self.s.to_bytes(
byteorder="big",
length=get_num_byte_len(self.s)
)
hasher = hashlib.sha256()
hasher.update(s_bytes)
self.k = hasher.digest()
def authenticate(self, server):
hasher = hashlib.sha256()
hasher.update(self.k + self.salt_bytes)
client_hmac = hasher.digest().hex()
if server.authenticate(client_hmac):
print("Successfully authenticated")
else:
raise Exception("Failed to authenticate")
class BadServer(Server):
def __init__(self, n, g):
self.private_key = random.randint(0, 2**100)
self.salt = random.randint(0, 2**100)
self.salt_bytes = self.salt.to_bytes(
byteorder="big",
length=get_num_byte_len(self.salt)
)
self.u = random.randint(0, 2**128)
self.n = n
self.g = g
def compute_hashes(self):
pass
def authenticate(self, client_hmac):
self.client_hmac = client_hmac
return True
def load_dict(self, path_to_dict):
with open(path_to_dict) as dict_file:
self.valid_words = set(dict_file.read().split())
def crack_password(self, path_to_dict):
self.load_dict(path_to_dict)
for w in self.valid_words:
hasher_x = hashlib.sha256()
hasher_x.update(self.salt_bytes + w.encode("ascii"))
x = int(hasher_x.digest().hex(), 16)
v = pow(self.g, x, self.n)
s = pow(self.client_public_key * pow(v, self.u, self.n), self.private_key, self.n)
s_bytes = s.to_bytes(
byteorder="big",
length=get_num_byte_len(s)
)
hasher_k = hashlib.sha256()
hasher_k.update(s_bytes)
k = hasher_k.digest()
hasher_hmac = hashlib.sha256()
hasher_hmac.update(k + self.salt_bytes)
check_hmac = hasher_hmac.digest().hex()
if check_hmac == self.client_hmac:
print("Successfully cracked password. Password = {}".format(w))
return
raise Exception("Failed to crack password")
def attempt_simple_srp_authenticate(client, server):
client.agree_params(server)
client.send_public_key(server)
server.send_salt_public_key_u(client)
server.compute_hashes()
client.compute_hashes()
client.authenticate(server)
def crack_simple_srp(client, server):
client.send_public_key(server)
server.send_salt_public_key_u(client)
server.compute_hashes()
client.compute_hashes()
client.authenticate(server)
server.crack_password("/Users/Adam/Dev/cryptopals_resources/words.txt")
if __name__=="__main__":
nist_p_hex = "ffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca237327ffffffffffffffff"
nist_p_bytearr = bytearray.fromhex(nist_p_hex)
n = int.from_bytes(nist_p_bytearr, byteorder="big")
g = 2
password = "castle"
client = Client(n, g, password)
server = Server()
attempt_simple_srp_authenticate(client, server)
naive_client = Client(n, g, password)
bad_server = BadServer(n, g)
crack_simple_srp(naive_client, bad_server)
|
normal
|
{
"blob_id": "cf7aeacedec211e76f2bfcb7f6e3cb06dbfdc36e",
"index": 3907,
"step-1": "<mask token>\n\n\nclass Server:\n\n def __init__(self):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n\n def agree_params(self, n, g, password):\n self.n = n\n self.g = g\n self.generate_password_params(password)\n <mask token>\n <mask token>\n <mask token>\n\n def compute_hashes(self):\n self.s = pow(self.client_public_key * pow(self.v, self.u, self.n),\n self.private_key, self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n <mask token>\n\n\nclass Client:\n\n def __init__(self, n, g, password):\n self.n = n\n self.g = g\n self.password = password\n self.private_key = random.randint(0, 2 ** 100)\n\n def agree_params(self, server):\n server.agree_params(self.n, self.g, self.password)\n\n def accept_salt_public_key_u(self, salt, server_public_key, u):\n self.salt = salt\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.server_public_key = server_public_key\n self.u = u\n\n def send_public_key(self, server):\n self.public_key = pow(self.g, self.private_key, self.n)\n server.accept_public_key(self.public_key)\n\n def compute_hashes(self):\n hasher = hashlib.sha256()\n hasher.update(self.salt_bytes + self.password.encode('ascii'))\n x = int(hasher.digest().hex(), 16)\n self.s = pow(self.server_public_key, self.private_key + self.u * x,\n self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, server):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n client_hmac = hasher.digest().hex()\n if server.authenticate(client_hmac):\n print('Successfully authenticated')\n else:\n raise Exception('Failed to authenticate')\n\n\nclass BadServer(Server):\n\n def __init__(self, n, g):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n self.n = n\n self.g = g\n\n def compute_hashes(self):\n pass\n\n def authenticate(self, client_hmac):\n self.client_hmac = client_hmac\n return True\n\n def load_dict(self, path_to_dict):\n with open(path_to_dict) as dict_file:\n self.valid_words = set(dict_file.read().split())\n\n def crack_password(self, path_to_dict):\n self.load_dict(path_to_dict)\n for w in self.valid_words:\n hasher_x = hashlib.sha256()\n hasher_x.update(self.salt_bytes + w.encode('ascii'))\n x = int(hasher_x.digest().hex(), 16)\n v = pow(self.g, x, self.n)\n s = pow(self.client_public_key * pow(v, self.u, self.n), self.\n private_key, self.n)\n s_bytes = s.to_bytes(byteorder='big', length=get_num_byte_len(s))\n hasher_k = hashlib.sha256()\n hasher_k.update(s_bytes)\n k = hasher_k.digest()\n hasher_hmac = hashlib.sha256()\n hasher_hmac.update(k + self.salt_bytes)\n check_hmac = hasher_hmac.digest().hex()\n if check_hmac == self.client_hmac:\n print('Successfully cracked password. Password = {}'.format(w))\n return\n raise Exception('Failed to crack password')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Server:\n\n def __init__(self):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n\n def agree_params(self, n, g, password):\n self.n = n\n self.g = g\n self.generate_password_params(password)\n <mask token>\n <mask token>\n\n def accept_public_key(self, client_public_key):\n self.client_public_key = client_public_key\n\n def compute_hashes(self):\n self.s = pow(self.client_public_key * pow(self.v, self.u, self.n),\n self.private_key, self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, client_hmac):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n check_hmac = hasher.digest().hex()\n if check_hmac == client_hmac:\n return True\n else:\n print(check_hmac, client_hmac)\n return False\n\n\nclass Client:\n\n def __init__(self, n, g, password):\n self.n = n\n self.g = g\n self.password = password\n self.private_key = random.randint(0, 2 ** 100)\n\n def agree_params(self, server):\n server.agree_params(self.n, self.g, self.password)\n\n def accept_salt_public_key_u(self, salt, server_public_key, u):\n self.salt = salt\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.server_public_key = server_public_key\n self.u = u\n\n def send_public_key(self, server):\n self.public_key = pow(self.g, self.private_key, self.n)\n server.accept_public_key(self.public_key)\n\n def compute_hashes(self):\n hasher = hashlib.sha256()\n hasher.update(self.salt_bytes + self.password.encode('ascii'))\n x = int(hasher.digest().hex(), 16)\n self.s = pow(self.server_public_key, self.private_key + self.u * x,\n self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, server):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n client_hmac = hasher.digest().hex()\n if server.authenticate(client_hmac):\n print('Successfully authenticated')\n else:\n raise Exception('Failed to authenticate')\n\n\nclass BadServer(Server):\n\n def __init__(self, n, g):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n self.n = n\n self.g = g\n\n def compute_hashes(self):\n pass\n\n def authenticate(self, client_hmac):\n self.client_hmac = client_hmac\n return True\n\n def load_dict(self, path_to_dict):\n with open(path_to_dict) as dict_file:\n self.valid_words = set(dict_file.read().split())\n\n def crack_password(self, path_to_dict):\n self.load_dict(path_to_dict)\n for w in self.valid_words:\n hasher_x = hashlib.sha256()\n hasher_x.update(self.salt_bytes + w.encode('ascii'))\n x = int(hasher_x.digest().hex(), 16)\n v = pow(self.g, x, self.n)\n s = pow(self.client_public_key * pow(v, self.u, self.n), self.\n private_key, self.n)\n s_bytes = s.to_bytes(byteorder='big', length=get_num_byte_len(s))\n hasher_k = hashlib.sha256()\n hasher_k.update(s_bytes)\n k = hasher_k.digest()\n hasher_hmac = hashlib.sha256()\n hasher_hmac.update(k + self.salt_bytes)\n check_hmac = hasher_hmac.digest().hex()\n if check_hmac == self.client_hmac:\n print('Successfully cracked password. Password = {}'.format(w))\n return\n raise Exception('Failed to crack password')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Server:\n\n def __init__(self):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n\n def agree_params(self, n, g, password):\n self.n = n\n self.g = g\n self.generate_password_params(password)\n <mask token>\n\n def send_salt_public_key_u(self, client):\n self.public_key = pow(self.g, self.private_key, self.n)\n client.accept_salt_public_key_u(self.salt, self.public_key, self.u)\n\n def accept_public_key(self, client_public_key):\n self.client_public_key = client_public_key\n\n def compute_hashes(self):\n self.s = pow(self.client_public_key * pow(self.v, self.u, self.n),\n self.private_key, self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, client_hmac):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n check_hmac = hasher.digest().hex()\n if check_hmac == client_hmac:\n return True\n else:\n print(check_hmac, client_hmac)\n return False\n\n\nclass Client:\n\n def __init__(self, n, g, password):\n self.n = n\n self.g = g\n self.password = password\n self.private_key = random.randint(0, 2 ** 100)\n\n def agree_params(self, server):\n server.agree_params(self.n, self.g, self.password)\n\n def accept_salt_public_key_u(self, salt, server_public_key, u):\n self.salt = salt\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.server_public_key = server_public_key\n self.u = u\n\n def send_public_key(self, server):\n self.public_key = pow(self.g, self.private_key, self.n)\n server.accept_public_key(self.public_key)\n\n def compute_hashes(self):\n hasher = hashlib.sha256()\n hasher.update(self.salt_bytes + self.password.encode('ascii'))\n x = int(hasher.digest().hex(), 16)\n self.s = pow(self.server_public_key, self.private_key + self.u * x,\n self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, server):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n client_hmac = hasher.digest().hex()\n if server.authenticate(client_hmac):\n print('Successfully authenticated')\n else:\n raise Exception('Failed to authenticate')\n\n\nclass BadServer(Server):\n\n def __init__(self, n, g):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n self.n = n\n self.g = g\n\n def compute_hashes(self):\n pass\n\n def authenticate(self, client_hmac):\n self.client_hmac = client_hmac\n return True\n\n def load_dict(self, path_to_dict):\n with open(path_to_dict) as dict_file:\n self.valid_words = set(dict_file.read().split())\n\n def crack_password(self, path_to_dict):\n self.load_dict(path_to_dict)\n for w in self.valid_words:\n hasher_x = hashlib.sha256()\n hasher_x.update(self.salt_bytes + w.encode('ascii'))\n x = int(hasher_x.digest().hex(), 16)\n v = pow(self.g, x, self.n)\n s = pow(self.client_public_key * pow(v, self.u, self.n), self.\n private_key, self.n)\n s_bytes = s.to_bytes(byteorder='big', length=get_num_byte_len(s))\n hasher_k = hashlib.sha256()\n hasher_k.update(s_bytes)\n k = hasher_k.digest()\n hasher_hmac = hashlib.sha256()\n hasher_hmac.update(k + self.salt_bytes)\n check_hmac = hasher_hmac.digest().hex()\n if check_hmac == self.client_hmac:\n print('Successfully cracked password. Password = {}'.format(w))\n return\n raise Exception('Failed to crack password')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Server:\n\n def __init__(self):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n\n def agree_params(self, n, g, password):\n self.n = n\n self.g = g\n self.generate_password_params(password)\n\n def generate_password_params(self, password):\n hasher = hashlib.sha256()\n hasher.update(self.salt_bytes + password.encode('ascii'))\n x = int(hasher.digest().hex(), 16)\n self.v = pow(self.g, x, self.n)\n\n def send_salt_public_key_u(self, client):\n self.public_key = pow(self.g, self.private_key, self.n)\n client.accept_salt_public_key_u(self.salt, self.public_key, self.u)\n\n def accept_public_key(self, client_public_key):\n self.client_public_key = client_public_key\n\n def compute_hashes(self):\n self.s = pow(self.client_public_key * pow(self.v, self.u, self.n),\n self.private_key, self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, client_hmac):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n check_hmac = hasher.digest().hex()\n if check_hmac == client_hmac:\n return True\n else:\n print(check_hmac, client_hmac)\n return False\n\n\nclass Client:\n\n def __init__(self, n, g, password):\n self.n = n\n self.g = g\n self.password = password\n self.private_key = random.randint(0, 2 ** 100)\n\n def agree_params(self, server):\n server.agree_params(self.n, self.g, self.password)\n\n def accept_salt_public_key_u(self, salt, server_public_key, u):\n self.salt = salt\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.server_public_key = server_public_key\n self.u = u\n\n def send_public_key(self, server):\n self.public_key = pow(self.g, self.private_key, self.n)\n server.accept_public_key(self.public_key)\n\n def compute_hashes(self):\n hasher = hashlib.sha256()\n hasher.update(self.salt_bytes + self.password.encode('ascii'))\n x = int(hasher.digest().hex(), 16)\n self.s = pow(self.server_public_key, self.private_key + self.u * x,\n self.n)\n s_bytes = self.s.to_bytes(byteorder='big', length=get_num_byte_len(\n self.s))\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, server):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n client_hmac = hasher.digest().hex()\n if server.authenticate(client_hmac):\n print('Successfully authenticated')\n else:\n raise Exception('Failed to authenticate')\n\n\nclass BadServer(Server):\n\n def __init__(self, n, g):\n self.private_key = random.randint(0, 2 ** 100)\n self.salt = random.randint(0, 2 ** 100)\n self.salt_bytes = self.salt.to_bytes(byteorder='big', length=\n get_num_byte_len(self.salt))\n self.u = random.randint(0, 2 ** 128)\n self.n = n\n self.g = g\n\n def compute_hashes(self):\n pass\n\n def authenticate(self, client_hmac):\n self.client_hmac = client_hmac\n return True\n\n def load_dict(self, path_to_dict):\n with open(path_to_dict) as dict_file:\n self.valid_words = set(dict_file.read().split())\n\n def crack_password(self, path_to_dict):\n self.load_dict(path_to_dict)\n for w in self.valid_words:\n hasher_x = hashlib.sha256()\n hasher_x.update(self.salt_bytes + w.encode('ascii'))\n x = int(hasher_x.digest().hex(), 16)\n v = pow(self.g, x, self.n)\n s = pow(self.client_public_key * pow(v, self.u, self.n), self.\n private_key, self.n)\n s_bytes = s.to_bytes(byteorder='big', length=get_num_byte_len(s))\n hasher_k = hashlib.sha256()\n hasher_k.update(s_bytes)\n k = hasher_k.digest()\n hasher_hmac = hashlib.sha256()\n hasher_hmac.update(k + self.salt_bytes)\n check_hmac = hasher_hmac.digest().hex()\n if check_hmac == self.client_hmac:\n print('Successfully cracked password. Password = {}'.format(w))\n return\n raise Exception('Failed to crack password')\n\n\ndef attempt_simple_srp_authenticate(client, server):\n client.agree_params(server)\n client.send_public_key(server)\n server.send_salt_public_key_u(client)\n server.compute_hashes()\n client.compute_hashes()\n client.authenticate(server)\n\n\ndef crack_simple_srp(client, server):\n client.send_public_key(server)\n server.send_salt_public_key_u(client)\n server.compute_hashes()\n client.compute_hashes()\n client.authenticate(server)\n server.crack_password('/Users/Adam/Dev/cryptopals_resources/words.txt')\n\n\nif __name__ == '__main__':\n nist_p_hex = (\n 'ffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca237327ffffffffffffffff'\n )\n nist_p_bytearr = bytearray.fromhex(nist_p_hex)\n n = int.from_bytes(nist_p_bytearr, byteorder='big')\n g = 2\n password = 'castle'\n client = Client(n, g, password)\n server = Server()\n attempt_simple_srp_authenticate(client, server)\n naive_client = Client(n, g, password)\n bad_server = BadServer(n, g)\n crack_simple_srp(naive_client, bad_server)\n",
"step-5": "import hashlib\nimport math\nimport random \n\nfrom set5.ch_4 import get_num_byte_len\n\nclass Server:\n def __init__(self):\n self.private_key = random.randint(0, 2**100)\n self.salt = random.randint(0, 2**100)\n self.salt_bytes = self.salt.to_bytes(\n byteorder=\"big\", \n length=get_num_byte_len(self.salt)\n )\n self.u = random.randint(0, 2**128)\n\n def agree_params(self, n, g, password):\n self.n = n\n self.g = g\n self.generate_password_params(password)\n\n def generate_password_params(self, password):\n hasher = hashlib.sha256()\n hasher.update(self.salt_bytes + password.encode(\"ascii\"))\n x = int(hasher.digest().hex(), 16)\n self.v = pow(self.g, x, self.n)\n\n def send_salt_public_key_u(self, client):\n self.public_key = pow(self.g, self.private_key, self.n)\n client.accept_salt_public_key_u(self.salt, self.public_key, self.u)\n\n def accept_public_key(self, client_public_key):\n self.client_public_key = client_public_key\n\n def compute_hashes(self):\n self.s = pow(self.client_public_key * pow(self.v, self.u, self.n), self.private_key, self.n)\n s_bytes = self.s.to_bytes(\n byteorder=\"big\", \n length=get_num_byte_len(self.s)\n )\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, client_hmac):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n check_hmac = hasher.digest().hex()\n if check_hmac == client_hmac:\n return True\n else:\n print(check_hmac, client_hmac)\n return False\n\nclass Client:\n def __init__(self, n, g, password):\n self.n = n\n self.g = g\n self.password = password\n self.private_key = random.randint(0, 2**100)\n\n def agree_params(self, server):\n server.agree_params(self.n, self.g, self.password)\n\n def accept_salt_public_key_u(self, salt, server_public_key, u):\n self.salt = salt\n self.salt_bytes = self.salt.to_bytes(\n byteorder=\"big\", \n length=get_num_byte_len(self.salt)\n )\n self.server_public_key = server_public_key\n self.u = u\n\n def send_public_key(self, server):\n self.public_key = pow(self.g, self.private_key, self.n)\n server.accept_public_key(self.public_key)\n\n def compute_hashes(self):\n hasher = hashlib.sha256()\n hasher.update(self.salt_bytes + self.password.encode(\"ascii\"))\n x = int(hasher.digest().hex(), 16)\n self.s = pow(self.server_public_key, self.private_key + (self.u * x), self.n)\n s_bytes = self.s.to_bytes(\n byteorder=\"big\", \n length=get_num_byte_len(self.s)\n )\n hasher = hashlib.sha256()\n hasher.update(s_bytes)\n self.k = hasher.digest()\n\n def authenticate(self, server):\n hasher = hashlib.sha256()\n hasher.update(self.k + self.salt_bytes)\n client_hmac = hasher.digest().hex()\n if server.authenticate(client_hmac):\n print(\"Successfully authenticated\") \n else:\n raise Exception(\"Failed to authenticate\")\n\n\nclass BadServer(Server):\n def __init__(self, n, g):\n self.private_key = random.randint(0, 2**100)\n self.salt = random.randint(0, 2**100)\n self.salt_bytes = self.salt.to_bytes(\n byteorder=\"big\", \n length=get_num_byte_len(self.salt)\n )\n self.u = random.randint(0, 2**128)\n self.n = n\n self.g = g\n\n \n def compute_hashes(self):\n pass\n\n def authenticate(self, client_hmac):\n self.client_hmac = client_hmac \n return True\n\n def load_dict(self, path_to_dict):\n with open(path_to_dict) as dict_file:\n self.valid_words = set(dict_file.read().split())\n\n def crack_password(self, path_to_dict):\n self.load_dict(path_to_dict)\n for w in self.valid_words:\n hasher_x = hashlib.sha256()\n hasher_x.update(self.salt_bytes + w.encode(\"ascii\"))\n x = int(hasher_x.digest().hex(), 16)\n v = pow(self.g, x, self.n)\n s = pow(self.client_public_key * pow(v, self.u, self.n), self.private_key, self.n)\n s_bytes = s.to_bytes(\n byteorder=\"big\", \n length=get_num_byte_len(s)\n )\n hasher_k = hashlib.sha256() \n hasher_k.update(s_bytes)\n k = hasher_k.digest()\n hasher_hmac = hashlib.sha256()\n hasher_hmac.update(k + self.salt_bytes)\n check_hmac = hasher_hmac.digest().hex()\n if check_hmac == self.client_hmac:\n print(\"Successfully cracked password. Password = {}\".format(w))\n return\n raise Exception(\"Failed to crack password\") \n\n \n\ndef attempt_simple_srp_authenticate(client, server):\n client.agree_params(server)\n client.send_public_key(server)\n server.send_salt_public_key_u(client)\n server.compute_hashes()\n client.compute_hashes()\n client.authenticate(server)\n\ndef crack_simple_srp(client, server):\n client.send_public_key(server)\n server.send_salt_public_key_u(client)\n server.compute_hashes()\n client.compute_hashes()\n client.authenticate(server)\n server.crack_password(\"/Users/Adam/Dev/cryptopals_resources/words.txt\")\n\nif __name__==\"__main__\":\n nist_p_hex = \"ffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca237327ffffffffffffffff\"\n nist_p_bytearr = bytearray.fromhex(nist_p_hex)\n n = int.from_bytes(nist_p_bytearr, byteorder=\"big\")\n g = 2\n \n password = \"castle\"\n\n client = Client(n, g, password)\n server = Server()\n attempt_simple_srp_authenticate(client, server)\n\n naive_client = Client(n, g, password)\n bad_server = BadServer(n, g)\n crack_simple_srp(naive_client, bad_server)\n",
"step-ids": [
17,
19,
20,
24,
26
]
}
|
[
17,
19,
20,
24,
26
] |
import sys
import pygame
import pygame.camera
from pygame.locals import *
from PIL import Image
pygame.init()
pygame.camera.init()
camlist = pygame.camera.list_cameras()
print(camlist)
# images = map(Image.open, ['Test1.jpg', 'Test2.jpg', 'Test3.jpg'])
# widths, heights = zip(*(i.size for i in images))
# total_width = sum(widths)
# max_height = max(heights)
# new_im = Image.new('RGB', (total_width, max_height))
# x_offset = 0
# for im in images:
# new_im.paste(im, (x_offset,0))
# x_offset += im.size[0]
# new_im.save('test.jpg')
|
normal
|
{
"blob_id": "aae280e049c00e70e2214662a07eee8bfa29227e",
"index": 6632,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npygame.init()\npygame.camera.init()\n<mask token>\nprint(camlist)\n",
"step-3": "<mask token>\npygame.init()\npygame.camera.init()\ncamlist = pygame.camera.list_cameras()\nprint(camlist)\n",
"step-4": "import sys\nimport pygame\nimport pygame.camera\nfrom pygame.locals import *\nfrom PIL import Image\npygame.init()\npygame.camera.init()\ncamlist = pygame.camera.list_cameras()\nprint(camlist)\n",
"step-5": "import sys\nimport pygame\nimport pygame.camera\nfrom pygame.locals import *\nfrom PIL import Image\n\n\npygame.init()\npygame.camera.init()\n\ncamlist = pygame.camera.list_cameras()\n\nprint(camlist)\n\n# images = map(Image.open, ['Test1.jpg', 'Test2.jpg', 'Test3.jpg'])\n# widths, heights = zip(*(i.size for i in images))\n\n# total_width = sum(widths)\n# max_height = max(heights)\n\n# new_im = Image.new('RGB', (total_width, max_height))\n\n# x_offset = 0\n# for im in images:\n# new_im.paste(im, (x_offset,0))\n# x_offset += im.size[0]\n\n# new_im.save('test.jpg')",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import cv2
import imutils
import detect
def detectByPathVideo(path, writer):
video = cv2.VideoCapture(path)
check, frame = video.read()
if check == False:
print('Video Not Found. Please Enter a Valid Path (Full path of Video Should be Provided).')
return
print('Detecting people...')
while video.isOpened():
#check is True if reading was successful
check, frame = video.read()
if check:
frame = imutils.resize(frame , width=min(800,frame.shape[1]))
frame = detect.detect(frame)
if writer is not None:
writer.write(frame)
key = cv2.waitKey(1)
if key== ord('q'):
break
else:
break
video.release()
cv2.destroyAllWindows()
def detectByCamera(writer):
video = cv2.VideoCapture(0)
print('Detecting people...')
while True:
check, frame = video.read()
frame = detect.detect(frame)
if writer is not None:
writer.write(frame)
key = cv2.waitKey(1)
if key == ord('q'):
break
video.release()
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "5044b8bc8cabd7762df6a0327828df4546ab8d96",
"index": 9000,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef detectByPathVideo(path, writer):\n video = cv2.VideoCapture(path)\n check, frame = video.read()\n if check == False:\n print(\n 'Video Not Found. Please Enter a Valid Path (Full path of Video Should be Provided).'\n )\n return\n print('Detecting people...')\n while video.isOpened():\n check, frame = video.read()\n if check:\n frame = imutils.resize(frame, width=min(800, frame.shape[1]))\n frame = detect.detect(frame)\n if writer is not None:\n writer.write(frame)\n key = cv2.waitKey(1)\n if key == ord('q'):\n break\n else:\n break\n video.release()\n cv2.destroyAllWindows()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef detectByPathVideo(path, writer):\n video = cv2.VideoCapture(path)\n check, frame = video.read()\n if check == False:\n print(\n 'Video Not Found. Please Enter a Valid Path (Full path of Video Should be Provided).'\n )\n return\n print('Detecting people...')\n while video.isOpened():\n check, frame = video.read()\n if check:\n frame = imutils.resize(frame, width=min(800, frame.shape[1]))\n frame = detect.detect(frame)\n if writer is not None:\n writer.write(frame)\n key = cv2.waitKey(1)\n if key == ord('q'):\n break\n else:\n break\n video.release()\n cv2.destroyAllWindows()\n\n\ndef detectByCamera(writer):\n video = cv2.VideoCapture(0)\n print('Detecting people...')\n while True:\n check, frame = video.read()\n frame = detect.detect(frame)\n if writer is not None:\n writer.write(frame)\n key = cv2.waitKey(1)\n if key == ord('q'):\n break\n video.release()\n cv2.destroyAllWindows()\n",
"step-4": "import cv2\nimport imutils\nimport detect\n\n\ndef detectByPathVideo(path, writer):\n video = cv2.VideoCapture(path)\n check, frame = video.read()\n if check == False:\n print(\n 'Video Not Found. Please Enter a Valid Path (Full path of Video Should be Provided).'\n )\n return\n print('Detecting people...')\n while video.isOpened():\n check, frame = video.read()\n if check:\n frame = imutils.resize(frame, width=min(800, frame.shape[1]))\n frame = detect.detect(frame)\n if writer is not None:\n writer.write(frame)\n key = cv2.waitKey(1)\n if key == ord('q'):\n break\n else:\n break\n video.release()\n cv2.destroyAllWindows()\n\n\ndef detectByCamera(writer):\n video = cv2.VideoCapture(0)\n print('Detecting people...')\n while True:\n check, frame = video.read()\n frame = detect.detect(frame)\n if writer is not None:\n writer.write(frame)\n key = cv2.waitKey(1)\n if key == ord('q'):\n break\n video.release()\n cv2.destroyAllWindows()\n",
"step-5": "import cv2\r\nimport imutils\r\nimport detect\r\n\r\ndef detectByPathVideo(path, writer):\r\n\r\n video = cv2.VideoCapture(path)\r\n check, frame = video.read()\r\n if check == False:\r\n print('Video Not Found. Please Enter a Valid Path (Full path of Video Should be Provided).')\r\n return\r\n\r\n print('Detecting people...')\r\n while video.isOpened():\r\n #check is True if reading was successful \r\n check, frame = video.read()\r\n\r\n if check:\r\n frame = imutils.resize(frame , width=min(800,frame.shape[1]))\r\n frame = detect.detect(frame)\r\n \r\n if writer is not None:\r\n writer.write(frame)\r\n \r\n key = cv2.waitKey(1)\r\n if key== ord('q'):\r\n break\r\n else:\r\n break\r\n video.release()\r\n cv2.destroyAllWindows()\r\n\r\ndef detectByCamera(writer): \r\n video = cv2.VideoCapture(0)\r\n print('Detecting people...')\r\n\r\n while True:\r\n check, frame = video.read()\r\n\r\n frame = detect.detect(frame)\r\n if writer is not None:\r\n writer.write(frame)\r\n\r\n key = cv2.waitKey(1)\r\n if key == ord('q'):\r\n break\r\n\r\n video.release()\r\n cv2.destroyAllWindows()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pytest
from domain.story import Story
from tests.dot_dictionary import DotDict
@pytest.fixture()
def deployed_story_over_a_weekend():
revision_0 = DotDict({
'CreationDate': "2019-07-11T14:33:20.000Z"
})
revision_1 = DotDict({
'CreationDate': "2019-07-31T15:33:20.000Z",
'Description': "SCHEDULE STATE changed from [To-Do] to [In-Progress], READY changed from [true] to [false]"
})
revision_2 = DotDict({
'CreationDate': "2019-08-06T16:33:20.000Z",
'Description': "SCHEDULE STATE changed from [Ready For Prod] to [Deployed]"
})
rally_story = DotDict({
'ScheduleState': 'Completed',
'RevisionHistory': DotDict({
'Revisions': [revision_2, revision_1, revision_0]
})
});
return Story(rally_story, ['Backlog', 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'],
{'In-Progress', 'Development'}, {'Deployed', 'Prod - ON'})
def test_cycle_time_only_includes_business_days(deployed_story_over_a_weekend):
assert deployed_story_over_a_weekend.cycle_time == 7
def test_find_current_start_state() :
assert 'In-Progress' == Story.find_current_state_name({'Backlog', 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'}, {'In-Progress', 'Development'})
|
normal
|
{
"blob_id": "d10c74338ea18ef3e5fb6a4dd2224faa4f94aa62",
"index": 9950,
"step-1": "<mask token>\n\n\ndef test_find_current_start_state():\n assert 'In-Progress' == Story.find_current_state_name({'Backlog',\n 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'},\n {'In-Progress', 'Development'})\n",
"step-2": "<mask token>\n\n\[email protected]()\ndef deployed_story_over_a_weekend():\n revision_0 = DotDict({'CreationDate': '2019-07-11T14:33:20.000Z'})\n revision_1 = DotDict({'CreationDate': '2019-07-31T15:33:20.000Z',\n 'Description':\n 'SCHEDULE STATE changed from [To-Do] to [In-Progress], READY changed from [true] to [false]'\n })\n revision_2 = DotDict({'CreationDate': '2019-08-06T16:33:20.000Z',\n 'Description':\n 'SCHEDULE STATE changed from [Ready For Prod] to [Deployed]'})\n rally_story = DotDict({'ScheduleState': 'Completed', 'RevisionHistory':\n DotDict({'Revisions': [revision_2, revision_1, revision_0]})})\n return Story(rally_story, ['Backlog', 'To-Do', 'In-Progress',\n 'Completed', 'Ready For Prod', 'Deployed'], {'In-Progress',\n 'Development'}, {'Deployed', 'Prod - ON'})\n\n\n<mask token>\n\n\ndef test_find_current_start_state():\n assert 'In-Progress' == Story.find_current_state_name({'Backlog',\n 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'},\n {'In-Progress', 'Development'})\n",
"step-3": "<mask token>\n\n\[email protected]()\ndef deployed_story_over_a_weekend():\n revision_0 = DotDict({'CreationDate': '2019-07-11T14:33:20.000Z'})\n revision_1 = DotDict({'CreationDate': '2019-07-31T15:33:20.000Z',\n 'Description':\n 'SCHEDULE STATE changed from [To-Do] to [In-Progress], READY changed from [true] to [false]'\n })\n revision_2 = DotDict({'CreationDate': '2019-08-06T16:33:20.000Z',\n 'Description':\n 'SCHEDULE STATE changed from [Ready For Prod] to [Deployed]'})\n rally_story = DotDict({'ScheduleState': 'Completed', 'RevisionHistory':\n DotDict({'Revisions': [revision_2, revision_1, revision_0]})})\n return Story(rally_story, ['Backlog', 'To-Do', 'In-Progress',\n 'Completed', 'Ready For Prod', 'Deployed'], {'In-Progress',\n 'Development'}, {'Deployed', 'Prod - ON'})\n\n\ndef test_cycle_time_only_includes_business_days(deployed_story_over_a_weekend):\n assert deployed_story_over_a_weekend.cycle_time == 7\n\n\ndef test_find_current_start_state():\n assert 'In-Progress' == Story.find_current_state_name({'Backlog',\n 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'},\n {'In-Progress', 'Development'})\n",
"step-4": "import pytest\nfrom domain.story import Story\nfrom tests.dot_dictionary import DotDict\n\n\[email protected]()\ndef deployed_story_over_a_weekend():\n revision_0 = DotDict({'CreationDate': '2019-07-11T14:33:20.000Z'})\n revision_1 = DotDict({'CreationDate': '2019-07-31T15:33:20.000Z',\n 'Description':\n 'SCHEDULE STATE changed from [To-Do] to [In-Progress], READY changed from [true] to [false]'\n })\n revision_2 = DotDict({'CreationDate': '2019-08-06T16:33:20.000Z',\n 'Description':\n 'SCHEDULE STATE changed from [Ready For Prod] to [Deployed]'})\n rally_story = DotDict({'ScheduleState': 'Completed', 'RevisionHistory':\n DotDict({'Revisions': [revision_2, revision_1, revision_0]})})\n return Story(rally_story, ['Backlog', 'To-Do', 'In-Progress',\n 'Completed', 'Ready For Prod', 'Deployed'], {'In-Progress',\n 'Development'}, {'Deployed', 'Prod - ON'})\n\n\ndef test_cycle_time_only_includes_business_days(deployed_story_over_a_weekend):\n assert deployed_story_over_a_weekend.cycle_time == 7\n\n\ndef test_find_current_start_state():\n assert 'In-Progress' == Story.find_current_state_name({'Backlog',\n 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'},\n {'In-Progress', 'Development'})\n",
"step-5": "import pytest\nfrom domain.story import Story\nfrom tests.dot_dictionary import DotDict\n\[email protected]()\ndef deployed_story_over_a_weekend():\n revision_0 = DotDict({\n 'CreationDate': \"2019-07-11T14:33:20.000Z\"\n })\n revision_1 = DotDict({\n 'CreationDate': \"2019-07-31T15:33:20.000Z\",\n 'Description': \"SCHEDULE STATE changed from [To-Do] to [In-Progress], READY changed from [true] to [false]\"\n })\n revision_2 = DotDict({\n 'CreationDate': \"2019-08-06T16:33:20.000Z\",\n 'Description': \"SCHEDULE STATE changed from [Ready For Prod] to [Deployed]\"\n })\n rally_story = DotDict({\n 'ScheduleState': 'Completed',\n 'RevisionHistory': DotDict({\n 'Revisions': [revision_2, revision_1, revision_0]\n })\n });\n return Story(rally_story, ['Backlog', 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'],\n {'In-Progress', 'Development'}, {'Deployed', 'Prod - ON'})\n\n\ndef test_cycle_time_only_includes_business_days(deployed_story_over_a_weekend):\n assert deployed_story_over_a_weekend.cycle_time == 7\n\n\ndef test_find_current_start_state() :\n assert 'In-Progress' == Story.find_current_state_name({'Backlog', 'To-Do', 'In-Progress', 'Completed', 'Ready For Prod', 'Deployed'}, {'In-Progress', 'Development'})\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from .base import paw_test
class warning_test(paw_test):
def test_warning_badchars(self):
self.paw.cset_lookup(self.badchar)
self.assertEqual(1, self.paw.wcount)
|
normal
|
{
"blob_id": "b4c6075aabe833f6fe23471f608d928edd25ef63",
"index": 372,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass warning_test(paw_test):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass warning_test(paw_test):\n\n def test_warning_badchars(self):\n self.paw.cset_lookup(self.badchar)\n self.assertEqual(1, self.paw.wcount)\n",
"step-4": "from .base import paw_test\n\n\nclass warning_test(paw_test):\n\n def test_warning_badchars(self):\n self.paw.cset_lookup(self.badchar)\n self.assertEqual(1, self.paw.wcount)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 2.1.2 on 2018-10-26 12:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20181010_0852'),
('accounts', '0004_playercards'),
]
operations = [
migrations.RenameModel(
old_name='PlayerCards',
new_name='PlayerCard',
),
migrations.RemoveField(
model_name='profile',
name='cards',
),
]
|
normal
|
{
"blob_id": "59596c69df6a2c453fd147a9c8a2c7d47ed79fb3",
"index": 3222,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('core', '0007_auto_20181010_0852'), ('accounts',\n '0004_playercards')]\n operations = [migrations.RenameModel(old_name='PlayerCards', new_name=\n 'PlayerCard'), migrations.RemoveField(model_name='profile', name=\n 'cards')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('core', '0007_auto_20181010_0852'), ('accounts',\n '0004_playercards')]\n operations = [migrations.RenameModel(old_name='PlayerCards', new_name=\n 'PlayerCard'), migrations.RemoveField(model_name='profile', name=\n 'cards')]\n",
"step-5": "# Generated by Django 2.1.2 on 2018-10-26 12:40\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0007_auto_20181010_0852'),\n ('accounts', '0004_playercards'),\n ]\n\n operations = [\n migrations.RenameModel(\n old_name='PlayerCards',\n new_name='PlayerCard',\n ),\n migrations.RemoveField(\n model_name='profile',\n name='cards',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import math
import backtrader as bt
from datetime import datetime
from bots.TelegramBot import TelegramBot
import logging
class Volume(bt.Strategy):
params = (('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25))
def __init__(self):
self.mysignal = (self.data.volume / bt.ind.Average(self.data.volume, period=self.params.avg_volume_period)) >= self.params.ratio
def next(self):
self.step_date = self.data.datetime.date().strftime("%Y-%m-%d")
self.today = datetime.now().strftime("%Y-%m-%d")
if self.mysignal and self.step_date == self.today:
TelegramBot.send("{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.".format(self.params.ticker, self.params.avg_volume_period))
|
normal
|
{
"blob_id": "acbe9a9501c6a8532249496f327c2470c1d2f8e0",
"index": 898,
"step-1": "<mask token>\n\n\nclass Volume(bt.Strategy):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Volume(bt.Strategy):\n <mask token>\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Volume(bt.Strategy):\n params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"step-4": "import math\nimport backtrader as bt\nfrom datetime import datetime\nfrom bots.TelegramBot import TelegramBot\nimport logging\n\n\nclass Volume(bt.Strategy):\n params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"step-5": "import math\nimport backtrader as bt\nfrom datetime import datetime\nfrom bots.TelegramBot import TelegramBot\nimport logging\nclass Volume(bt.Strategy):\n params = (('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25))\n\n def __init__(self):\n self.mysignal = (self.data.volume / bt.ind.Average(self.data.volume, period=self.params.avg_volume_period)) >= self.params.ratio\n def next(self):\n self.step_date = self.data.datetime.date().strftime(\"%Y-%m-%d\")\n self.today = datetime.now().strftime(\"%Y-%m-%d\")\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\"{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.\".format(self.params.ticker, self.params.avg_volume_period))\n ",
"step-ids": [
1,
2,
4,
5,
6
]
}
|
[
1,
2,
4,
5,
6
] |
import os
import requests
from pprint import pprint as pp
from lxml import html
from bs4 import BeautifulSoup
from dotenv import load_dotenv
import datetime
load_dotenv()
class PrometeoAPI:
def __init__(self, user, pwd):
self.base_url = 'https://prometeoapi.com'
self.session = requests.Session()
self.__user = user
self.__pwd = pwd
self._login()
def _generate_csrf_token(self, url):
'''
This function gets the csrf token from the login page needed to
do request in order log into the website
'''
response = self.session.get(url)
content = response.content
tree = html.fromstring(content)
csrf_element = tree.xpath("//input[@name='csrfmiddlewaretoken']")[0]
csrf = csrf_element.get('value')
return csrf
def _login(self):
'''
This function takes the username and password, logs in and sets api_key, user name, and
ammount of requests of the month, data available from the dashboard recieved after the log in
'''
url = f'{self.base_url}/dashboard/login/'
csrf = self._generate_csrf_token(url)
payload = {
'csrfmiddlewaretoken': csrf,
'username': self.__user,
'password': self.__pwd
}
response = self.session.request('POST', url, data=payload)
tree = html.fromstring(response.content)
page_title_element = tree.xpath("//title")[0]
page_title = str(page_title_element.text_content()).strip()
if 'Login - Prometeo' in page_title:
error = tree.xpath("//div[contains(@class,'alert')]")[0]
error_msj = self._strip_text(error)
raise Exception(f'Failed to log into the site, response text: {error_msj}')
username_element = tree.xpath("//nav//*[contains(@class,'login-info__data')]/p[contains(@class,'text-white')]")[
0]
self.username = self._strip_text(username_element)
api_key_element = tree.xpath("//p[contains(@class,'api-key-field')]")[0]
self.api_key = self._strip_text(api_key_element)
# requests_mes_element = tree.xpath("//p[contains(.,'Requests este mes:')]/b")[0]
# self.requests_mes = str(requests_mes_element.text_content()).strip()
def get_requests_current_month(self):
current_date = datetime.datetime.now()
request_url = f'{self.base_url}/dashboard/filter_requests/?format=json&month={current_date.month}&user_id=&year={current_date.year}'
response = self.session.get(request_url)
if response.status_code == 200:
json_table = response.json()
return json_table.get('usage_table')
def refresh_api_key(self):
csrf = self._generate_csrf_token(f'{self.base_url}/dashboard/')
headers = {'X-CSRFToken': csrf}
request_url = f'{self.base_url}/dashboard/reset-key/'
response = self.session.post(request_url, headers=headers)
self.api_key = response.json().get('api_key')
return self.api_key
def _strip_text(self, element):
return str(element.text_content()).strip()
if __name__ == '__main__':
api = PrometeoAPI(user=os.environ.get('PROMETEO_USERNAME'), pwd=os.environ.get('PROMETEO_PASSWORD'))
print(api.api_key)
print(api.username)
print(api.refresh_api_key())
pp(api.get_requests_current_month())
|
normal
|
{
"blob_id": "f3e654a589cc1c16b36203dd358671d0426556e6",
"index": 2676,
"step-1": "<mask token>\n\n\nclass PrometeoAPI:\n\n def __init__(self, user, pwd):\n self.base_url = 'https://prometeoapi.com'\n self.session = requests.Session()\n self.__user = user\n self.__pwd = pwd\n self._login()\n\n def _generate_csrf_token(self, url):\n \"\"\"\n This function gets the csrf token from the login page needed to\n do request in order log into the website\n\n \"\"\"\n response = self.session.get(url)\n content = response.content\n tree = html.fromstring(content)\n csrf_element = tree.xpath(\"//input[@name='csrfmiddlewaretoken']\")[0]\n csrf = csrf_element.get('value')\n return csrf\n <mask token>\n\n def get_requests_current_month(self):\n current_date = datetime.datetime.now()\n request_url = (\n f'{self.base_url}/dashboard/filter_requests/?format=json&month={current_date.month}&user_id=&year={current_date.year}'\n )\n response = self.session.get(request_url)\n if response.status_code == 200:\n json_table = response.json()\n return json_table.get('usage_table')\n <mask token>\n\n def _strip_text(self, element):\n return str(element.text_content()).strip()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass PrometeoAPI:\n\n def __init__(self, user, pwd):\n self.base_url = 'https://prometeoapi.com'\n self.session = requests.Session()\n self.__user = user\n self.__pwd = pwd\n self._login()\n\n def _generate_csrf_token(self, url):\n \"\"\"\n This function gets the csrf token from the login page needed to\n do request in order log into the website\n\n \"\"\"\n response = self.session.get(url)\n content = response.content\n tree = html.fromstring(content)\n csrf_element = tree.xpath(\"//input[@name='csrfmiddlewaretoken']\")[0]\n csrf = csrf_element.get('value')\n return csrf\n <mask token>\n\n def get_requests_current_month(self):\n current_date = datetime.datetime.now()\n request_url = (\n f'{self.base_url}/dashboard/filter_requests/?format=json&month={current_date.month}&user_id=&year={current_date.year}'\n )\n response = self.session.get(request_url)\n if response.status_code == 200:\n json_table = response.json()\n return json_table.get('usage_table')\n\n def refresh_api_key(self):\n csrf = self._generate_csrf_token(f'{self.base_url}/dashboard/')\n headers = {'X-CSRFToken': csrf}\n request_url = f'{self.base_url}/dashboard/reset-key/'\n response = self.session.post(request_url, headers=headers)\n self.api_key = response.json().get('api_key')\n return self.api_key\n\n def _strip_text(self, element):\n return str(element.text_content()).strip()\n\n\n<mask token>\n",
"step-3": "<mask token>\nload_dotenv()\n\n\nclass PrometeoAPI:\n\n def __init__(self, user, pwd):\n self.base_url = 'https://prometeoapi.com'\n self.session = requests.Session()\n self.__user = user\n self.__pwd = pwd\n self._login()\n\n def _generate_csrf_token(self, url):\n \"\"\"\n This function gets the csrf token from the login page needed to\n do request in order log into the website\n\n \"\"\"\n response = self.session.get(url)\n content = response.content\n tree = html.fromstring(content)\n csrf_element = tree.xpath(\"//input[@name='csrfmiddlewaretoken']\")[0]\n csrf = csrf_element.get('value')\n return csrf\n\n def _login(self):\n \"\"\"\n This function takes the username and password, logs in and sets api_key, user name, and\n ammount of requests of the month, data available from the dashboard recieved after the log in\n \"\"\"\n url = f'{self.base_url}/dashboard/login/'\n csrf = self._generate_csrf_token(url)\n payload = {'csrfmiddlewaretoken': csrf, 'username': self.__user,\n 'password': self.__pwd}\n response = self.session.request('POST', url, data=payload)\n tree = html.fromstring(response.content)\n page_title_element = tree.xpath('//title')[0]\n page_title = str(page_title_element.text_content()).strip()\n if 'Login - Prometeo' in page_title:\n error = tree.xpath(\"//div[contains(@class,'alert')]\")[0]\n error_msj = self._strip_text(error)\n raise Exception(\n f'Failed to log into the site, response text: {error_msj}')\n username_element = tree.xpath(\n \"//nav//*[contains(@class,'login-info__data')]/p[contains(@class,'text-white')]\"\n )[0]\n self.username = self._strip_text(username_element)\n api_key_element = tree.xpath(\"//p[contains(@class,'api-key-field')]\")[0\n ]\n self.api_key = self._strip_text(api_key_element)\n\n def get_requests_current_month(self):\n current_date = datetime.datetime.now()\n request_url = (\n f'{self.base_url}/dashboard/filter_requests/?format=json&month={current_date.month}&user_id=&year={current_date.year}'\n )\n response = self.session.get(request_url)\n if response.status_code == 200:\n json_table = response.json()\n return json_table.get('usage_table')\n\n def refresh_api_key(self):\n csrf = self._generate_csrf_token(f'{self.base_url}/dashboard/')\n headers = {'X-CSRFToken': csrf}\n request_url = f'{self.base_url}/dashboard/reset-key/'\n response = self.session.post(request_url, headers=headers)\n self.api_key = response.json().get('api_key')\n return self.api_key\n\n def _strip_text(self, element):\n return str(element.text_content()).strip()\n\n\nif __name__ == '__main__':\n api = PrometeoAPI(user=os.environ.get('PROMETEO_USERNAME'), pwd=os.\n environ.get('PROMETEO_PASSWORD'))\n print(api.api_key)\n print(api.username)\n print(api.refresh_api_key())\n pp(api.get_requests_current_month())\n",
"step-4": "import os\nimport requests\nfrom pprint import pprint as pp\nfrom lxml import html\nfrom bs4 import BeautifulSoup\nfrom dotenv import load_dotenv\nimport datetime\nload_dotenv()\n\n\nclass PrometeoAPI:\n\n def __init__(self, user, pwd):\n self.base_url = 'https://prometeoapi.com'\n self.session = requests.Session()\n self.__user = user\n self.__pwd = pwd\n self._login()\n\n def _generate_csrf_token(self, url):\n \"\"\"\n This function gets the csrf token from the login page needed to\n do request in order log into the website\n\n \"\"\"\n response = self.session.get(url)\n content = response.content\n tree = html.fromstring(content)\n csrf_element = tree.xpath(\"//input[@name='csrfmiddlewaretoken']\")[0]\n csrf = csrf_element.get('value')\n return csrf\n\n def _login(self):\n \"\"\"\n This function takes the username and password, logs in and sets api_key, user name, and\n ammount of requests of the month, data available from the dashboard recieved after the log in\n \"\"\"\n url = f'{self.base_url}/dashboard/login/'\n csrf = self._generate_csrf_token(url)\n payload = {'csrfmiddlewaretoken': csrf, 'username': self.__user,\n 'password': self.__pwd}\n response = self.session.request('POST', url, data=payload)\n tree = html.fromstring(response.content)\n page_title_element = tree.xpath('//title')[0]\n page_title = str(page_title_element.text_content()).strip()\n if 'Login - Prometeo' in page_title:\n error = tree.xpath(\"//div[contains(@class,'alert')]\")[0]\n error_msj = self._strip_text(error)\n raise Exception(\n f'Failed to log into the site, response text: {error_msj}')\n username_element = tree.xpath(\n \"//nav//*[contains(@class,'login-info__data')]/p[contains(@class,'text-white')]\"\n )[0]\n self.username = self._strip_text(username_element)\n api_key_element = tree.xpath(\"//p[contains(@class,'api-key-field')]\")[0\n ]\n self.api_key = self._strip_text(api_key_element)\n\n def get_requests_current_month(self):\n current_date = datetime.datetime.now()\n request_url = (\n f'{self.base_url}/dashboard/filter_requests/?format=json&month={current_date.month}&user_id=&year={current_date.year}'\n )\n response = self.session.get(request_url)\n if response.status_code == 200:\n json_table = response.json()\n return json_table.get('usage_table')\n\n def refresh_api_key(self):\n csrf = self._generate_csrf_token(f'{self.base_url}/dashboard/')\n headers = {'X-CSRFToken': csrf}\n request_url = f'{self.base_url}/dashboard/reset-key/'\n response = self.session.post(request_url, headers=headers)\n self.api_key = response.json().get('api_key')\n return self.api_key\n\n def _strip_text(self, element):\n return str(element.text_content()).strip()\n\n\nif __name__ == '__main__':\n api = PrometeoAPI(user=os.environ.get('PROMETEO_USERNAME'), pwd=os.\n environ.get('PROMETEO_PASSWORD'))\n print(api.api_key)\n print(api.username)\n print(api.refresh_api_key())\n pp(api.get_requests_current_month())\n",
"step-5": "import os\n\nimport requests\nfrom pprint import pprint as pp\nfrom lxml import html\nfrom bs4 import BeautifulSoup\nfrom dotenv import load_dotenv\nimport datetime\n\nload_dotenv()\n\n\nclass PrometeoAPI:\n def __init__(self, user, pwd):\n self.base_url = 'https://prometeoapi.com'\n self.session = requests.Session()\n self.__user = user\n self.__pwd = pwd\n self._login()\n\n def _generate_csrf_token(self, url):\n '''\n This function gets the csrf token from the login page needed to\n do request in order log into the website\n\n '''\n response = self.session.get(url)\n\n content = response.content\n tree = html.fromstring(content)\n\n csrf_element = tree.xpath(\"//input[@name='csrfmiddlewaretoken']\")[0]\n csrf = csrf_element.get('value')\n\n return csrf\n\n def _login(self):\n '''\n This function takes the username and password, logs in and sets api_key, user name, and\n ammount of requests of the month, data available from the dashboard recieved after the log in\n '''\n\n url = f'{self.base_url}/dashboard/login/'\n\n csrf = self._generate_csrf_token(url)\n\n payload = {\n 'csrfmiddlewaretoken': csrf,\n 'username': self.__user,\n 'password': self.__pwd\n }\n\n response = self.session.request('POST', url, data=payload)\n\n tree = html.fromstring(response.content)\n\n page_title_element = tree.xpath(\"//title\")[0]\n page_title = str(page_title_element.text_content()).strip()\n\n if 'Login - Prometeo' in page_title:\n error = tree.xpath(\"//div[contains(@class,'alert')]\")[0]\n error_msj = self._strip_text(error)\n raise Exception(f'Failed to log into the site, response text: {error_msj}')\n\n username_element = tree.xpath(\"//nav//*[contains(@class,'login-info__data')]/p[contains(@class,'text-white')]\")[\n 0]\n self.username = self._strip_text(username_element)\n\n api_key_element = tree.xpath(\"//p[contains(@class,'api-key-field')]\")[0]\n self.api_key = self._strip_text(api_key_element)\n\n # requests_mes_element = tree.xpath(\"//p[contains(.,'Requests este mes:')]/b\")[0]\n # self.requests_mes = str(requests_mes_element.text_content()).strip()\n\n def get_requests_current_month(self):\n\n current_date = datetime.datetime.now()\n\n request_url = f'{self.base_url}/dashboard/filter_requests/?format=json&month={current_date.month}&user_id=&year={current_date.year}'\n response = self.session.get(request_url)\n\n if response.status_code == 200:\n json_table = response.json()\n return json_table.get('usage_table')\n\n def refresh_api_key(self):\n csrf = self._generate_csrf_token(f'{self.base_url}/dashboard/')\n headers = {'X-CSRFToken': csrf}\n\n request_url = f'{self.base_url}/dashboard/reset-key/'\n response = self.session.post(request_url, headers=headers)\n self.api_key = response.json().get('api_key')\n\n return self.api_key\n\n def _strip_text(self, element):\n return str(element.text_content()).strip()\n\n\nif __name__ == '__main__':\n api = PrometeoAPI(user=os.environ.get('PROMETEO_USERNAME'), pwd=os.environ.get('PROMETEO_PASSWORD'))\n\n print(api.api_key)\n print(api.username)\n print(api.refresh_api_key())\n pp(api.get_requests_current_month())\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
from pydispatch import dispatcher
import time
import serial
import threading
from queue import Queue
PORT='/dev/ttys005'
#PORT='/dev/tty.usbmodem1461'
SPEED=4800.0
class GcodeSender(object):
PEN_LIFT_PULSE = 1500
PEN_DROP_PULSE = 800
def __init__(self, **kwargs):
super(GcodeSender, self).__init__(**kwargs)
self._stop = threading.Event()
self.parsing_thread = None
self.command_queue = Queue()
self.line_number = 1
self.plotter = None
dispatcher.connect(self.on_pen_lift, signal='PEN_LIFT', sender=dispatcher.Any)
dispatcher.connect(self.on_move_to_point, signal='MOVE_TO_POINT', sender=dispatcher.Any)
dispatcher.connect(self.on_pen_drop, signal='PEN_DROP', sender=dispatcher.Any)
def on_move_to_point(self, x, y):
print('X{0:.3f} Y{1:.3f}'.format(x,y))
command = 'G1 X{0:.3f} Y{1:.3f} F{2:.1f}'.format(x,y,SPEED)
self.command_queue.put_nowait(command)
def on_pen_drop(self):
#print("pen drop")
self.command_queue.put_nowait("M400")
self.command_queue.put_nowait("M340 P0 S{}".format(self.PEN_DROP_PULSE))
self.command_queue.put_nowait("G4 S1")
def on_pen_lift(self):
#print("pen lift")
self.command_queue.put_nowait("M400")
self.command_queue.put_nowait("M340 P0 S{}".format(self.PEN_LIFT_PULSE))
self.command_queue.put_nowait("G4 P500")
def start(self):
self._stop.clear()
self.parsing_thread = threading.Thread(target=self.start_processing)
self.parsing_thread.daemon = True
self.parsing_thread.start()
def stop(self):
if(self.plotter):
self.plotter.close()
self.plotter = None
def __del__(self):
self.stop_thread()
self.stop()
def start_processing(self):
self.command_queue.put_nowait('M110 N2')
self.command_queue.put_nowait('G90')
self.command_queue.put_nowait('G28')
self.plotter = serial.Serial(PORT, 115200, timeout=1)
self._read_and_process_and_wait_for_ok(break_on_timeout=True)
while True:
while not self.command_queue.empty():
command = self.command_queue.get_nowait()
self.command_queue.task_done()
self._send_line(command)
self._read_and_process_and_wait_for_ok()
time.sleep(0.5)
def _send_line(self, line):
command = 'N{} {} '.format(self.line_number, line)
command = '{}*{}\n'.format(command, self._checksum(command))
#print("SEND: {}".format(command))
self.line_number += 1
self.plotter.write(command.encode('utf-8'))
def _read_line(self):
response = self.plotter.readline()
print("READ: {}".format(response))
return response.decode('utf-8')
def _checksum(self, command):
checksum = 0
for char in command:
byte_char = char.encode('utf-8')
int_char = int.from_bytes(byte_char, 'big')
checksum = checksum ^ int_char
return checksum
def _read_and_process_and_wait_for_ok(self, break_on_timeout=False):
response = self._read_line()
if not response.strip() and break_on_timeout:
return
previous_line_number = self.line_number-1
while not response.startswith('ok'):
if response.startswith((f"rs {previous_line_number}", f"Resend:{previous_line_number}")):
print('resend request: {}'.format(response))
self.line_number = self.line_number-1
self._send_line(command)
response = self._read_line()
elif response.startswith(('rs', 'Resend')):
raise Exception('requested resend of some other line number: {}'.format(response))
elif response.startswith('!!'):
raise Exception('printer fault')
elif response.startswith('//'):
print('comment: {}'.format(response))
response = self._read_line()
elif response.startswith('wait'):
response = self._read_line()
time.sleep(0.5)
elif response.startswith('start'):
return
else:
print('unknown response: {}'.format(response))
response = self._read_line()
#raise Exception('unknown response: {}'.format(response))
def stop_thread(self):
self._stop.set()
self.parsing_thread = None
|
normal
|
{
"blob_id": "10d35ba3c04d9cd09e152c575e74b0382ff60572",
"index": 48,
"step-1": "<mask token>\n\n\nclass GcodeSender(object):\n <mask token>\n <mask token>\n\n def __init__(self, **kwargs):\n super(GcodeSender, self).__init__(**kwargs)\n self._stop = threading.Event()\n self.parsing_thread = None\n self.command_queue = Queue()\n self.line_number = 1\n self.plotter = None\n dispatcher.connect(self.on_pen_lift, signal='PEN_LIFT', sender=\n dispatcher.Any)\n dispatcher.connect(self.on_move_to_point, signal='MOVE_TO_POINT',\n sender=dispatcher.Any)\n dispatcher.connect(self.on_pen_drop, signal='PEN_DROP', sender=\n dispatcher.Any)\n <mask token>\n <mask token>\n\n def on_pen_lift(self):\n self.command_queue.put_nowait('M400')\n self.command_queue.put_nowait('M340 P0 S{}'.format(self.PEN_LIFT_PULSE)\n )\n self.command_queue.put_nowait('G4 P500')\n <mask token>\n\n def stop(self):\n if self.plotter:\n self.plotter.close()\n self.plotter = None\n <mask token>\n\n def start_processing(self):\n self.command_queue.put_nowait('M110 N2')\n self.command_queue.put_nowait('G90')\n self.command_queue.put_nowait('G28')\n self.plotter = serial.Serial(PORT, 115200, timeout=1)\n self._read_and_process_and_wait_for_ok(break_on_timeout=True)\n while True:\n while not self.command_queue.empty():\n command = self.command_queue.get_nowait()\n self.command_queue.task_done()\n self._send_line(command)\n self._read_and_process_and_wait_for_ok()\n time.sleep(0.5)\n\n def _send_line(self, line):\n command = 'N{} {} '.format(self.line_number, line)\n command = '{}*{}\\n'.format(command, self._checksum(command))\n self.line_number += 1\n self.plotter.write(command.encode('utf-8'))\n\n def _read_line(self):\n response = self.plotter.readline()\n print('READ: {}'.format(response))\n return response.decode('utf-8')\n\n def _checksum(self, command):\n checksum = 0\n for char in command:\n byte_char = char.encode('utf-8')\n int_char = int.from_bytes(byte_char, 'big')\n checksum = checksum ^ int_char\n return checksum\n\n def _read_and_process_and_wait_for_ok(self, break_on_timeout=False):\n response = self._read_line()\n if not response.strip() and break_on_timeout:\n return\n previous_line_number = self.line_number - 1\n while not response.startswith('ok'):\n if response.startswith((f'rs {previous_line_number}',\n f'Resend:{previous_line_number}')):\n print('resend request: {}'.format(response))\n self.line_number = self.line_number - 1\n self._send_line(command)\n response = self._read_line()\n elif response.startswith(('rs', 'Resend')):\n raise Exception(\n 'requested resend of some other line number: {}'.format\n (response))\n elif response.startswith('!!'):\n raise Exception('printer fault')\n elif response.startswith('//'):\n print('comment: {}'.format(response))\n response = self._read_line()\n elif response.startswith('wait'):\n response = self._read_line()\n time.sleep(0.5)\n elif response.startswith('start'):\n return\n else:\n print('unknown response: {}'.format(response))\n response = self._read_line()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass GcodeSender(object):\n <mask token>\n <mask token>\n\n def __init__(self, **kwargs):\n super(GcodeSender, self).__init__(**kwargs)\n self._stop = threading.Event()\n self.parsing_thread = None\n self.command_queue = Queue()\n self.line_number = 1\n self.plotter = None\n dispatcher.connect(self.on_pen_lift, signal='PEN_LIFT', sender=\n dispatcher.Any)\n dispatcher.connect(self.on_move_to_point, signal='MOVE_TO_POINT',\n sender=dispatcher.Any)\n dispatcher.connect(self.on_pen_drop, signal='PEN_DROP', sender=\n dispatcher.Any)\n\n def on_move_to_point(self, x, y):\n print('X{0:.3f} Y{1:.3f}'.format(x, y))\n command = 'G1 X{0:.3f} Y{1:.3f} F{2:.1f}'.format(x, y, SPEED)\n self.command_queue.put_nowait(command)\n\n def on_pen_drop(self):\n self.command_queue.put_nowait('M400')\n self.command_queue.put_nowait('M340 P0 S{}'.format(self.PEN_DROP_PULSE)\n )\n self.command_queue.put_nowait('G4 S1')\n\n def on_pen_lift(self):\n self.command_queue.put_nowait('M400')\n self.command_queue.put_nowait('M340 P0 S{}'.format(self.PEN_LIFT_PULSE)\n )\n self.command_queue.put_nowait('G4 P500')\n\n def start(self):\n self._stop.clear()\n self.parsing_thread = threading.Thread(target=self.start_processing)\n self.parsing_thread.daemon = True\n self.parsing_thread.start()\n\n def stop(self):\n if self.plotter:\n self.plotter.close()\n self.plotter = None\n\n def __del__(self):\n self.stop_thread()\n self.stop()\n\n def start_processing(self):\n self.command_queue.put_nowait('M110 N2')\n self.command_queue.put_nowait('G90')\n self.command_queue.put_nowait('G28')\n self.plotter = serial.Serial(PORT, 115200, timeout=1)\n self._read_and_process_and_wait_for_ok(break_on_timeout=True)\n while True:\n while not self.command_queue.empty():\n command = self.command_queue.get_nowait()\n self.command_queue.task_done()\n self._send_line(command)\n self._read_and_process_and_wait_for_ok()\n time.sleep(0.5)\n\n def _send_line(self, line):\n command = 'N{} {} '.format(self.line_number, line)\n command = '{}*{}\\n'.format(command, self._checksum(command))\n self.line_number += 1\n self.plotter.write(command.encode('utf-8'))\n\n def _read_line(self):\n response = self.plotter.readline()\n print('READ: {}'.format(response))\n return response.decode('utf-8')\n\n def _checksum(self, command):\n checksum = 0\n for char in command:\n byte_char = char.encode('utf-8')\n int_char = int.from_bytes(byte_char, 'big')\n checksum = checksum ^ int_char\n return checksum\n\n def _read_and_process_and_wait_for_ok(self, break_on_timeout=False):\n response = self._read_line()\n if not response.strip() and break_on_timeout:\n return\n previous_line_number = self.line_number - 1\n while not response.startswith('ok'):\n if response.startswith((f'rs {previous_line_number}',\n f'Resend:{previous_line_number}')):\n print('resend request: {}'.format(response))\n self.line_number = self.line_number - 1\n self._send_line(command)\n response = self._read_line()\n elif response.startswith(('rs', 'Resend')):\n raise Exception(\n 'requested resend of some other line number: {}'.format\n (response))\n elif response.startswith('!!'):\n raise Exception('printer fault')\n elif response.startswith('//'):\n print('comment: {}'.format(response))\n response = self._read_line()\n elif response.startswith('wait'):\n response = self._read_line()\n time.sleep(0.5)\n elif response.startswith('start'):\n return\n else:\n print('unknown response: {}'.format(response))\n response = self._read_line()\n\n def stop_thread(self):\n self._stop.set()\n self.parsing_thread = None\n",
"step-3": "<mask token>\n\n\nclass GcodeSender(object):\n PEN_LIFT_PULSE = 1500\n PEN_DROP_PULSE = 800\n\n def __init__(self, **kwargs):\n super(GcodeSender, self).__init__(**kwargs)\n self._stop = threading.Event()\n self.parsing_thread = None\n self.command_queue = Queue()\n self.line_number = 1\n self.plotter = None\n dispatcher.connect(self.on_pen_lift, signal='PEN_LIFT', sender=\n dispatcher.Any)\n dispatcher.connect(self.on_move_to_point, signal='MOVE_TO_POINT',\n sender=dispatcher.Any)\n dispatcher.connect(self.on_pen_drop, signal='PEN_DROP', sender=\n dispatcher.Any)\n\n def on_move_to_point(self, x, y):\n print('X{0:.3f} Y{1:.3f}'.format(x, y))\n command = 'G1 X{0:.3f} Y{1:.3f} F{2:.1f}'.format(x, y, SPEED)\n self.command_queue.put_nowait(command)\n\n def on_pen_drop(self):\n self.command_queue.put_nowait('M400')\n self.command_queue.put_nowait('M340 P0 S{}'.format(self.PEN_DROP_PULSE)\n )\n self.command_queue.put_nowait('G4 S1')\n\n def on_pen_lift(self):\n self.command_queue.put_nowait('M400')\n self.command_queue.put_nowait('M340 P0 S{}'.format(self.PEN_LIFT_PULSE)\n )\n self.command_queue.put_nowait('G4 P500')\n\n def start(self):\n self._stop.clear()\n self.parsing_thread = threading.Thread(target=self.start_processing)\n self.parsing_thread.daemon = True\n self.parsing_thread.start()\n\n def stop(self):\n if self.plotter:\n self.plotter.close()\n self.plotter = None\n\n def __del__(self):\n self.stop_thread()\n self.stop()\n\n def start_processing(self):\n self.command_queue.put_nowait('M110 N2')\n self.command_queue.put_nowait('G90')\n self.command_queue.put_nowait('G28')\n self.plotter = serial.Serial(PORT, 115200, timeout=1)\n self._read_and_process_and_wait_for_ok(break_on_timeout=True)\n while True:\n while not self.command_queue.empty():\n command = self.command_queue.get_nowait()\n self.command_queue.task_done()\n self._send_line(command)\n self._read_and_process_and_wait_for_ok()\n time.sleep(0.5)\n\n def _send_line(self, line):\n command = 'N{} {} '.format(self.line_number, line)\n command = '{}*{}\\n'.format(command, self._checksum(command))\n self.line_number += 1\n self.plotter.write(command.encode('utf-8'))\n\n def _read_line(self):\n response = self.plotter.readline()\n print('READ: {}'.format(response))\n return response.decode('utf-8')\n\n def _checksum(self, command):\n checksum = 0\n for char in command:\n byte_char = char.encode('utf-8')\n int_char = int.from_bytes(byte_char, 'big')\n checksum = checksum ^ int_char\n return checksum\n\n def _read_and_process_and_wait_for_ok(self, break_on_timeout=False):\n response = self._read_line()\n if not response.strip() and break_on_timeout:\n return\n previous_line_number = self.line_number - 1\n while not response.startswith('ok'):\n if response.startswith((f'rs {previous_line_number}',\n f'Resend:{previous_line_number}')):\n print('resend request: {}'.format(response))\n self.line_number = self.line_number - 1\n self._send_line(command)\n response = self._read_line()\n elif response.startswith(('rs', 'Resend')):\n raise Exception(\n 'requested resend of some other line number: {}'.format\n (response))\n elif response.startswith('!!'):\n raise Exception('printer fault')\n elif response.startswith('//'):\n print('comment: {}'.format(response))\n response = self._read_line()\n elif response.startswith('wait'):\n response = self._read_line()\n time.sleep(0.5)\n elif response.startswith('start'):\n return\n else:\n print('unknown response: {}'.format(response))\n response = self._read_line()\n\n def stop_thread(self):\n self._stop.set()\n self.parsing_thread = None\n",
"step-4": "<mask token>\nPORT = '/dev/ttys005'\nSPEED = 4800.0\n\n\nclass GcodeSender(object):\n PEN_LIFT_PULSE = 1500\n PEN_DROP_PULSE = 800\n\n def __init__(self, **kwargs):\n super(GcodeSender, self).__init__(**kwargs)\n self._stop = threading.Event()\n self.parsing_thread = None\n self.command_queue = Queue()\n self.line_number = 1\n self.plotter = None\n dispatcher.connect(self.on_pen_lift, signal='PEN_LIFT', sender=\n dispatcher.Any)\n dispatcher.connect(self.on_move_to_point, signal='MOVE_TO_POINT',\n sender=dispatcher.Any)\n dispatcher.connect(self.on_pen_drop, signal='PEN_DROP', sender=\n dispatcher.Any)\n\n def on_move_to_point(self, x, y):\n print('X{0:.3f} Y{1:.3f}'.format(x, y))\n command = 'G1 X{0:.3f} Y{1:.3f} F{2:.1f}'.format(x, y, SPEED)\n self.command_queue.put_nowait(command)\n\n def on_pen_drop(self):\n self.command_queue.put_nowait('M400')\n self.command_queue.put_nowait('M340 P0 S{}'.format(self.PEN_DROP_PULSE)\n )\n self.command_queue.put_nowait('G4 S1')\n\n def on_pen_lift(self):\n self.command_queue.put_nowait('M400')\n self.command_queue.put_nowait('M340 P0 S{}'.format(self.PEN_LIFT_PULSE)\n )\n self.command_queue.put_nowait('G4 P500')\n\n def start(self):\n self._stop.clear()\n self.parsing_thread = threading.Thread(target=self.start_processing)\n self.parsing_thread.daemon = True\n self.parsing_thread.start()\n\n def stop(self):\n if self.plotter:\n self.plotter.close()\n self.plotter = None\n\n def __del__(self):\n self.stop_thread()\n self.stop()\n\n def start_processing(self):\n self.command_queue.put_nowait('M110 N2')\n self.command_queue.put_nowait('G90')\n self.command_queue.put_nowait('G28')\n self.plotter = serial.Serial(PORT, 115200, timeout=1)\n self._read_and_process_and_wait_for_ok(break_on_timeout=True)\n while True:\n while not self.command_queue.empty():\n command = self.command_queue.get_nowait()\n self.command_queue.task_done()\n self._send_line(command)\n self._read_and_process_and_wait_for_ok()\n time.sleep(0.5)\n\n def _send_line(self, line):\n command = 'N{} {} '.format(self.line_number, line)\n command = '{}*{}\\n'.format(command, self._checksum(command))\n self.line_number += 1\n self.plotter.write(command.encode('utf-8'))\n\n def _read_line(self):\n response = self.plotter.readline()\n print('READ: {}'.format(response))\n return response.decode('utf-8')\n\n def _checksum(self, command):\n checksum = 0\n for char in command:\n byte_char = char.encode('utf-8')\n int_char = int.from_bytes(byte_char, 'big')\n checksum = checksum ^ int_char\n return checksum\n\n def _read_and_process_and_wait_for_ok(self, break_on_timeout=False):\n response = self._read_line()\n if not response.strip() and break_on_timeout:\n return\n previous_line_number = self.line_number - 1\n while not response.startswith('ok'):\n if response.startswith((f'rs {previous_line_number}',\n f'Resend:{previous_line_number}')):\n print('resend request: {}'.format(response))\n self.line_number = self.line_number - 1\n self._send_line(command)\n response = self._read_line()\n elif response.startswith(('rs', 'Resend')):\n raise Exception(\n 'requested resend of some other line number: {}'.format\n (response))\n elif response.startswith('!!'):\n raise Exception('printer fault')\n elif response.startswith('//'):\n print('comment: {}'.format(response))\n response = self._read_line()\n elif response.startswith('wait'):\n response = self._read_line()\n time.sleep(0.5)\n elif response.startswith('start'):\n return\n else:\n print('unknown response: {}'.format(response))\n response = self._read_line()\n\n def stop_thread(self):\n self._stop.set()\n self.parsing_thread = None\n",
"step-5": "from pydispatch import dispatcher\nimport time\nimport serial\nimport threading\nfrom queue import Queue\n\nPORT='/dev/ttys005'\n#PORT='/dev/tty.usbmodem1461'\nSPEED=4800.0\n\nclass GcodeSender(object):\n\n PEN_LIFT_PULSE = 1500\n PEN_DROP_PULSE = 800\n\n def __init__(self, **kwargs):\n super(GcodeSender, self).__init__(**kwargs)\n self._stop = threading.Event()\n self.parsing_thread = None\n\n self.command_queue = Queue()\n self.line_number = 1\n self.plotter = None\n\n dispatcher.connect(self.on_pen_lift, signal='PEN_LIFT', sender=dispatcher.Any)\n dispatcher.connect(self.on_move_to_point, signal='MOVE_TO_POINT', sender=dispatcher.Any)\n dispatcher.connect(self.on_pen_drop, signal='PEN_DROP', sender=dispatcher.Any)\n\n def on_move_to_point(self, x, y):\n print('X{0:.3f} Y{1:.3f}'.format(x,y))\n command = 'G1 X{0:.3f} Y{1:.3f} F{2:.1f}'.format(x,y,SPEED)\n self.command_queue.put_nowait(command)\n\n def on_pen_drop(self):\n #print(\"pen drop\")\n self.command_queue.put_nowait(\"M400\")\n self.command_queue.put_nowait(\"M340 P0 S{}\".format(self.PEN_DROP_PULSE))\n self.command_queue.put_nowait(\"G4 S1\")\n\n def on_pen_lift(self):\n #print(\"pen lift\")\n self.command_queue.put_nowait(\"M400\")\n self.command_queue.put_nowait(\"M340 P0 S{}\".format(self.PEN_LIFT_PULSE))\n self.command_queue.put_nowait(\"G4 P500\")\n\n def start(self):\n self._stop.clear()\n self.parsing_thread = threading.Thread(target=self.start_processing)\n self.parsing_thread.daemon = True\n self.parsing_thread.start()\n\n def stop(self):\n if(self.plotter):\n self.plotter.close()\n self.plotter = None\n\n def __del__(self):\n self.stop_thread()\n self.stop()\n\n def start_processing(self):\n self.command_queue.put_nowait('M110 N2')\n self.command_queue.put_nowait('G90')\n self.command_queue.put_nowait('G28')\n self.plotter = serial.Serial(PORT, 115200, timeout=1)\n\n self._read_and_process_and_wait_for_ok(break_on_timeout=True)\n\n while True:\n while not self.command_queue.empty():\n command = self.command_queue.get_nowait()\n self.command_queue.task_done()\n self._send_line(command)\n self._read_and_process_and_wait_for_ok()\n\n time.sleep(0.5)\n\n def _send_line(self, line):\n command = 'N{} {} '.format(self.line_number, line)\n command = '{}*{}\\n'.format(command, self._checksum(command))\n #print(\"SEND: {}\".format(command))\n self.line_number += 1\n self.plotter.write(command.encode('utf-8'))\n \n def _read_line(self):\n response = self.plotter.readline()\n print(\"READ: {}\".format(response))\n return response.decode('utf-8')\n\n def _checksum(self, command):\n checksum = 0\n for char in command:\n byte_char = char.encode('utf-8')\n int_char = int.from_bytes(byte_char, 'big')\n checksum = checksum ^ int_char\n return checksum\n\n def _read_and_process_and_wait_for_ok(self, break_on_timeout=False):\n response = self._read_line()\n\n if not response.strip() and break_on_timeout:\n return\n\n previous_line_number = self.line_number-1\n while not response.startswith('ok'):\n if response.startswith((f\"rs {previous_line_number}\", f\"Resend:{previous_line_number}\")):\n print('resend request: {}'.format(response))\n self.line_number = self.line_number-1\n self._send_line(command)\n response = self._read_line()\n elif response.startswith(('rs', 'Resend')):\n raise Exception('requested resend of some other line number: {}'.format(response))\n elif response.startswith('!!'):\n raise Exception('printer fault')\n elif response.startswith('//'):\n print('comment: {}'.format(response))\n response = self._read_line()\n elif response.startswith('wait'):\n response = self._read_line()\n time.sleep(0.5)\n elif response.startswith('start'):\n return\n else:\n print('unknown response: {}'.format(response))\n response = self._read_line()\n #raise Exception('unknown response: {}'.format(response))\n\n def stop_thread(self):\n self._stop.set()\n self.parsing_thread = None\n\n",
"step-ids": [
9,
14,
15,
16,
18
]
}
|
[
9,
14,
15,
16,
18
] |
# coding=utf-8
# __author__ = 'liwenxuan'
import random
chars = "1234567890ABCDEF"
ids = ["{0}{1}{2}{3}".format(i, j, k, l) for i in chars for j in chars for k in chars for l in chars]
def random_peer_id(prefix="F"*8, server_id="0000"):
"""
用于生成随机的peer_id(后四位随机)
:param prefix: 生成的peer_id的前八位, 测试用prefix为"FFFFFFFF"
:param server_id: 区分不同server的标识, 不区分server时, server_id为"0000"
:return:
"""
assert len(str(prefix)) == 8 and len(str(server_id)) == 4
return str(prefix) + str(server_id) + "0"*16 + random.choice(ids) # length: 8+4+16+4 = 32
def random_file_id(file_id_prefix="F"*8, server_id="0000"):
"""
用于生成随机的file_id(后四位随机)
:param file_id_prefix: 生成的file_id的前八位, 测试用prefix为"FFFFFFFF"
:param server_id: 区分不同server的标识, 不区分server时, server_id为"0000"
:return:
"""
assert len(str(file_id_prefix)) <= 8 and len(str(server_id)) == 4
return str(file_id_prefix).ljust(8, "F") + str(server_id) + "F"*16 + random.choice(ids) # length: 8+4+16+4 = 32
if __name__ == "__main__":
pass
print "peer_id", random_peer_id()
print "file_id", random_file_id()
|
normal
|
{
"blob_id": "c77ca4aa720b172d75aff2ceda096a4969057a00",
"index": 9735,
"step-1": "# coding=utf-8\n# __author__ = 'liwenxuan'\n\nimport random\n\nchars = \"1234567890ABCDEF\"\nids = [\"{0}{1}{2}{3}\".format(i, j, k, l) for i in chars for j in chars for k in chars for l in chars]\n\n\ndef random_peer_id(prefix=\"F\"*8, server_id=\"0000\"):\n \"\"\"\n 用于生成随机的peer_id(后四位随机)\n :param prefix: 生成的peer_id的前八位, 测试用prefix为\"FFFFFFFF\"\n :param server_id: 区分不同server的标识, 不区分server时, server_id为\"0000\"\n :return:\n \"\"\"\n assert len(str(prefix)) == 8 and len(str(server_id)) == 4\n return str(prefix) + str(server_id) + \"0\"*16 + random.choice(ids) # length: 8+4+16+4 = 32\n\n\ndef random_file_id(file_id_prefix=\"F\"*8, server_id=\"0000\"):\n \"\"\"\n 用于生成随机的file_id(后四位随机)\n :param file_id_prefix: 生成的file_id的前八位, 测试用prefix为\"FFFFFFFF\"\n :param server_id: 区分不同server的标识, 不区分server时, server_id为\"0000\"\n :return:\n \"\"\"\n assert len(str(file_id_prefix)) <= 8 and len(str(server_id)) == 4\n return str(file_id_prefix).ljust(8, \"F\") + str(server_id) + \"F\"*16 + random.choice(ids) # length: 8+4+16+4 = 32\n\n\nif __name__ == \"__main__\":\n pass\n print \"peer_id\", random_peer_id()\n print \"file_id\", random_file_id()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# coding:utf-8
import requests
import io
from zipfile import ZipFile
if __name__ == '__main__':
sentence_url = "http://www.manythings.org/anki/deu-eng.zip"
r = requests.get(sentence_url)
z = ZipFile(io.BytesIO(r.content))
file = z.read('deu.txt')
eng_ger_data = file.decode()
eng_ger_data = eng_ger_data.encode('ascii', errors='ignore')
eng_ger_data = eng_ger_data.decode().split('\n')
eng_ger_data = [x.split('\t') for x in eng_ger_data if len(x) >= 1]
[english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]
print(len(english_sentence))
print(len(german_sentence))
print(eng_ger_data[9])
print(eng_ger_data[10])
print(german_sentence)
|
normal
|
{
"blob_id": "559c665e5544dd864d2f020c967ac8a8665af134",
"index": 6805,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n sentence_url = 'http://www.manythings.org/anki/deu-eng.zip'\n r = requests.get(sentence_url)\n z = ZipFile(io.BytesIO(r.content))\n file = z.read('deu.txt')\n eng_ger_data = file.decode()\n eng_ger_data = eng_ger_data.encode('ascii', errors='ignore')\n eng_ger_data = eng_ger_data.decode().split('\\n')\n eng_ger_data = [x.split('\\t') for x in eng_ger_data if len(x) >= 1]\n [english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]\n print(len(english_sentence))\n print(len(german_sentence))\n print(eng_ger_data[9])\n print(eng_ger_data[10])\n print(german_sentence)\n",
"step-3": "import requests\nimport io\nfrom zipfile import ZipFile\nif __name__ == '__main__':\n sentence_url = 'http://www.manythings.org/anki/deu-eng.zip'\n r = requests.get(sentence_url)\n z = ZipFile(io.BytesIO(r.content))\n file = z.read('deu.txt')\n eng_ger_data = file.decode()\n eng_ger_data = eng_ger_data.encode('ascii', errors='ignore')\n eng_ger_data = eng_ger_data.decode().split('\\n')\n eng_ger_data = [x.split('\\t') for x in eng_ger_data if len(x) >= 1]\n [english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]\n print(len(english_sentence))\n print(len(german_sentence))\n print(eng_ger_data[9])\n print(eng_ger_data[10])\n print(german_sentence)\n",
"step-4": "# coding:utf-8\nimport requests\nimport io\nfrom zipfile import ZipFile\n\nif __name__ == '__main__':\n sentence_url = \"http://www.manythings.org/anki/deu-eng.zip\"\n r = requests.get(sentence_url)\n z = ZipFile(io.BytesIO(r.content))\n file = z.read('deu.txt')\n eng_ger_data = file.decode()\n eng_ger_data = eng_ger_data.encode('ascii', errors='ignore')\n eng_ger_data = eng_ger_data.decode().split('\\n')\n eng_ger_data = [x.split('\\t') for x in eng_ger_data if len(x) >= 1]\n [english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]\n print(len(english_sentence))\n print(len(german_sentence))\n print(eng_ger_data[9])\n print(eng_ger_data[10])\n print(german_sentence)\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from tensorflow import keras
class SkippableSeq(keras.utils.Sequence):
def __init__(self, seq):
super(SkippableSeq, self).__init__()
self.start = 0
self.seq = seq
def __iter__(self):
return self
def __next__(self):
res = self.seq[self.start]
self.start = (self.start + 1) % len(self)
return res
def __getitem__(self, i):
if isinstance(i, slice):
assert i.step == None == i.stop and self.start == 0, \
'only one suffix slicing allowed'
oth = copy.copy(self)
oth.start = i.start
return oth
else:
return self.seq[(self.start + i) % len(self)]
def __len__(self):
return len(self.seq)
class PostprocessSeq(SkippableSeq):
def __init__(self, postprocess, seq):
super(PostprocessSeq, self).__init__(seq)
self.postprocess = postprocess
def __next__(self):
return self.postprocess(super(PostprocessSeq, self).__next__())
def __getitem__(self, i):
return self.postprocess(super(PostprocessSeq, self).__getitem__(i))
def make_enqueuer_generator(sequence, workers):
data_enqueuer = keras.utils.OrderedEnqueuer(sequence)
data_enqueuer.start(workers=workers, max_queue_size=workers + 1)
return data_enqueuer.get()
|
normal
|
{
"blob_id": "2417dd4f3787742832fec53fec4592165d0fccfc",
"index": 9513,
"step-1": "<mask token>\n\n\nclass SkippableSeq(keras.utils.Sequence):\n\n def __init__(self, seq):\n super(SkippableSeq, self).__init__()\n self.start = 0\n self.seq = seq\n\n def __iter__(self):\n return self\n\n def __next__(self):\n res = self.seq[self.start]\n self.start = (self.start + 1) % len(self)\n return res\n <mask token>\n\n def __len__(self):\n return len(self.seq)\n\n\nclass PostprocessSeq(SkippableSeq):\n\n def __init__(self, postprocess, seq):\n super(PostprocessSeq, self).__init__(seq)\n self.postprocess = postprocess\n\n def __next__(self):\n return self.postprocess(super(PostprocessSeq, self).__next__())\n\n def __getitem__(self, i):\n return self.postprocess(super(PostprocessSeq, self).__getitem__(i))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SkippableSeq(keras.utils.Sequence):\n\n def __init__(self, seq):\n super(SkippableSeq, self).__init__()\n self.start = 0\n self.seq = seq\n\n def __iter__(self):\n return self\n\n def __next__(self):\n res = self.seq[self.start]\n self.start = (self.start + 1) % len(self)\n return res\n\n def __getitem__(self, i):\n if isinstance(i, slice):\n assert i.step == None == i.stop and self.start == 0, 'only one suffix slicing allowed'\n oth = copy.copy(self)\n oth.start = i.start\n return oth\n else:\n return self.seq[(self.start + i) % len(self)]\n\n def __len__(self):\n return len(self.seq)\n\n\nclass PostprocessSeq(SkippableSeq):\n\n def __init__(self, postprocess, seq):\n super(PostprocessSeq, self).__init__(seq)\n self.postprocess = postprocess\n\n def __next__(self):\n return self.postprocess(super(PostprocessSeq, self).__next__())\n\n def __getitem__(self, i):\n return self.postprocess(super(PostprocessSeq, self).__getitem__(i))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SkippableSeq(keras.utils.Sequence):\n\n def __init__(self, seq):\n super(SkippableSeq, self).__init__()\n self.start = 0\n self.seq = seq\n\n def __iter__(self):\n return self\n\n def __next__(self):\n res = self.seq[self.start]\n self.start = (self.start + 1) % len(self)\n return res\n\n def __getitem__(self, i):\n if isinstance(i, slice):\n assert i.step == None == i.stop and self.start == 0, 'only one suffix slicing allowed'\n oth = copy.copy(self)\n oth.start = i.start\n return oth\n else:\n return self.seq[(self.start + i) % len(self)]\n\n def __len__(self):\n return len(self.seq)\n\n\nclass PostprocessSeq(SkippableSeq):\n\n def __init__(self, postprocess, seq):\n super(PostprocessSeq, self).__init__(seq)\n self.postprocess = postprocess\n\n def __next__(self):\n return self.postprocess(super(PostprocessSeq, self).__next__())\n\n def __getitem__(self, i):\n return self.postprocess(super(PostprocessSeq, self).__getitem__(i))\n\n\ndef make_enqueuer_generator(sequence, workers):\n data_enqueuer = keras.utils.OrderedEnqueuer(sequence)\n data_enqueuer.start(workers=workers, max_queue_size=workers + 1)\n return data_enqueuer.get()\n",
"step-4": "from tensorflow import keras\n\n\nclass SkippableSeq(keras.utils.Sequence):\n\n def __init__(self, seq):\n super(SkippableSeq, self).__init__()\n self.start = 0\n self.seq = seq\n\n def __iter__(self):\n return self\n\n def __next__(self):\n res = self.seq[self.start]\n self.start = (self.start + 1) % len(self)\n return res\n\n def __getitem__(self, i):\n if isinstance(i, slice):\n assert i.step == None == i.stop and self.start == 0, 'only one suffix slicing allowed'\n oth = copy.copy(self)\n oth.start = i.start\n return oth\n else:\n return self.seq[(self.start + i) % len(self)]\n\n def __len__(self):\n return len(self.seq)\n\n\nclass PostprocessSeq(SkippableSeq):\n\n def __init__(self, postprocess, seq):\n super(PostprocessSeq, self).__init__(seq)\n self.postprocess = postprocess\n\n def __next__(self):\n return self.postprocess(super(PostprocessSeq, self).__next__())\n\n def __getitem__(self, i):\n return self.postprocess(super(PostprocessSeq, self).__getitem__(i))\n\n\ndef make_enqueuer_generator(sequence, workers):\n data_enqueuer = keras.utils.OrderedEnqueuer(sequence)\n data_enqueuer.start(workers=workers, max_queue_size=workers + 1)\n return data_enqueuer.get()\n",
"step-5": "from tensorflow import keras\n\n\nclass SkippableSeq(keras.utils.Sequence):\n def __init__(self, seq):\n super(SkippableSeq, self).__init__()\n self.start = 0\n self.seq = seq\n\n def __iter__(self):\n return self\n\n def __next__(self):\n res = self.seq[self.start]\n self.start = (self.start + 1) % len(self)\n return res\n\n def __getitem__(self, i):\n if isinstance(i, slice):\n assert i.step == None == i.stop and self.start == 0, \\\n 'only one suffix slicing allowed'\n oth = copy.copy(self)\n oth.start = i.start\n return oth\n else:\n return self.seq[(self.start + i) % len(self)]\n\n def __len__(self):\n return len(self.seq)\n\n\nclass PostprocessSeq(SkippableSeq):\n def __init__(self, postprocess, seq):\n super(PostprocessSeq, self).__init__(seq)\n self.postprocess = postprocess\n\n def __next__(self):\n return self.postprocess(super(PostprocessSeq, self).__next__())\n\n def __getitem__(self, i):\n return self.postprocess(super(PostprocessSeq, self).__getitem__(i))\n\n\ndef make_enqueuer_generator(sequence, workers):\n data_enqueuer = keras.utils.OrderedEnqueuer(sequence)\n data_enqueuer.start(workers=workers, max_queue_size=workers + 1)\n return data_enqueuer.get()\n",
"step-ids": [
9,
10,
11,
12,
13
]
}
|
[
9,
10,
11,
12,
13
] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
@project= Life_is_short_you_need_python
@file= judgement
@author= wubingyu
@create_time= 2017/12/21 下午2:58
"""
#a if condition else b
#(falseValue,trueValue)[test]
#(falseValue,trueValue)[test==True]
#(falseValue,trueValue)[bool(<expression>)]
|
normal
|
{
"blob_id": "73e23b3560294ca24428e7dd4cc995b97767335c",
"index": 4202,
"step-1": "<mask token>\n",
"step-2": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\"\"\"\n@project= Life_is_short_you_need_python\n@file= judgement\n@author= wubingyu\n@create_time= 2017/12/21 下午2:58\n\"\"\"\n\n#a if condition else b\n#(falseValue,trueValue)[test]\n#(falseValue,trueValue)[test==True]\n#(falseValue,trueValue)[bool(<expression>)]\n\n\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from PyQt5 import QtCore
from PyQt5.QtWidgets import QTableWidgetItem, QDialog
from QT_view.PassportAdd import PassportAddDialog
from QT_view.PassportWin import Ui_Dialog
from Repository.Rep_Passport import PassportRepository
class PassportQt(QDialog):
def __init__(self):
super(PassportQt, self).__init__()
self.passport_rep = PassportRepository()
self.initUI()
def initUI(self):
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.ui.tableWidget.setColumnWidth(1, 259)
self.ui.tableWidget.setSelectionBehavior(1)
self.ui.tableWidget.setSelectionMode(1)
self.ui.pushButton.clicked.connect(self.click_add)
self.ui.pushButton_2.clicked.connect(self.click_edit)
self.ui.pushButton_3.clicked.connect(self.click_del)
self.ui.pushButton_4.clicked.connect(self.click_cancel)
passport = self.passport_rep.get_passports()
self.ui.tableWidget.setRowCount(len(passport))
row = 0
for i in passport:
id_passport = QTableWidgetItem(str(i['id']))
id_passport.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
serial_passport = QTableWidgetItem(i['serial'])
serial_passport.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
number_passport = QTableWidgetItem(i['number'])
number_passport.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
self.ui.tableWidget.setItem(row, 0, id_passport)
self.ui.tableWidget.setItem(row, 1, serial_passport)
self.ui.tableWidget.setItem(row, 2, number_passport)
row += 1
def click_add(self):
p_dict = {'id': -1, 'serial': "", 'number': ""}
self.passport_rep.set_dict(p_dict)
passport_add = PassportAddDialog(self.passport_rep)
if (passport_add.exec()):
passport_d = self.passport_rep.get_dict()
count_row = self.ui.tableWidget.rowCount()
self.ui.tableWidget.setRowCount(count_row + 1)
id_passport = QTableWidgetItem(str(passport_d['id']))
id_passport.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
serial = QTableWidgetItem(passport_d['serial'])
serial.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
number = QTableWidgetItem(passport_d['number'])
number.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
self.ui.tableWidget.setItem(count_row, 0, id_passport)
self.ui.tableWidget.setItem(count_row, 1, serial)
self.ui.tableWidget.setItem(count_row, 2, number)
def click_edit(self):
edit_list = self.ui.tableWidget.selectedItems()
if (len(edit_list)):
select_row = self.ui.tableWidget.currentRow()
edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1].text(), 'number': edit_list[2].text()}
self.passport_rep.set_dict(edit_d)
passport_edit = PassportAddDialog(self.passport_rep)
if (passport_edit.exec()):
passport_d = self.passport_rep.get_dict()
id_passport = QTableWidgetItem(str(passport_d['id']))
id_passport.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
serial = QTableWidgetItem(passport_d['serial'])
serial.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
number = QTableWidgetItem(passport_d['number'])
number.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
)
self.ui.tableWidget.setItem(select_row, 0, id_passport)
self.ui.tableWidget.setItem(select_row, 1, serial)
self.ui.tableWidget.setItem(select_row, 2, number)
def click_del(self):
del_list = self.ui.tableWidget.selectedItems()
if (len(del_list)):
del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].text(), 'number': del_list[2].text()}
self.passport_rep.del_passport(del_p)
self.ui.tableWidget.removeRow(del_list[0].row())
def click_cancel(self):
self.accept()
|
normal
|
{
"blob_id": "3f1715763a066fb337b3ff3d03e3736d0fb36b3f",
"index": 7325,
"step-1": "<mask token>\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n <mask token>\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n <mask token>\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n\n def initUI(self):\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n self.ui.tableWidget.setColumnWidth(1, 259)\n self.ui.tableWidget.setSelectionBehavior(1)\n self.ui.tableWidget.setSelectionMode(1)\n self.ui.pushButton.clicked.connect(self.click_add)\n self.ui.pushButton_2.clicked.connect(self.click_edit)\n self.ui.pushButton_3.clicked.connect(self.click_del)\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\n passport = self.passport_rep.get_passports()\n self.ui.tableWidget.setRowCount(len(passport))\n row = 0\n for i in passport:\n id_passport = QTableWidgetItem(str(i['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial_passport = QTableWidgetItem(i['serial'])\n serial_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n number_passport = QTableWidgetItem(i['number'])\n number_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n self.ui.tableWidget.setItem(row, 0, id_passport)\n self.ui.tableWidget.setItem(row, 1, serial_passport)\n self.ui.tableWidget.setItem(row, 2, number_passport)\n row += 1\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n\n def click_edit(self):\n edit_list = self.ui.tableWidget.selectedItems()\n if len(edit_list):\n select_row = self.ui.tableWidget.currentRow()\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1\n ].text(), 'number': edit_list[2].text()}\n self.passport_rep.set_dict(edit_d)\n passport_edit = PassportAddDialog(self.passport_rep)\n if passport_edit.exec():\n passport_d = self.passport_rep.get_dict()\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\n self.ui.tableWidget.setItem(select_row, 1, serial)\n self.ui.tableWidget.setItem(select_row, 2, number)\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n\n def initUI(self):\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n self.ui.tableWidget.setColumnWidth(1, 259)\n self.ui.tableWidget.setSelectionBehavior(1)\n self.ui.tableWidget.setSelectionMode(1)\n self.ui.pushButton.clicked.connect(self.click_add)\n self.ui.pushButton_2.clicked.connect(self.click_edit)\n self.ui.pushButton_3.clicked.connect(self.click_del)\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\n passport = self.passport_rep.get_passports()\n self.ui.tableWidget.setRowCount(len(passport))\n row = 0\n for i in passport:\n id_passport = QTableWidgetItem(str(i['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial_passport = QTableWidgetItem(i['serial'])\n serial_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n number_passport = QTableWidgetItem(i['number'])\n number_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n self.ui.tableWidget.setItem(row, 0, id_passport)\n self.ui.tableWidget.setItem(row, 1, serial_passport)\n self.ui.tableWidget.setItem(row, 2, number_passport)\n row += 1\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n\n def click_edit(self):\n edit_list = self.ui.tableWidget.selectedItems()\n if len(edit_list):\n select_row = self.ui.tableWidget.currentRow()\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1\n ].text(), 'number': edit_list[2].text()}\n self.passport_rep.set_dict(edit_d)\n passport_edit = PassportAddDialog(self.passport_rep)\n if passport_edit.exec():\n passport_d = self.passport_rep.get_dict()\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\n self.ui.tableWidget.setItem(select_row, 1, serial)\n self.ui.tableWidget.setItem(select_row, 2, number)\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n\n def click_cancel(self):\n self.accept()\n",
"step-4": "from PyQt5 import QtCore\nfrom PyQt5.QtWidgets import QTableWidgetItem, QDialog\nfrom QT_view.PassportAdd import PassportAddDialog\nfrom QT_view.PassportWin import Ui_Dialog\nfrom Repository.Rep_Passport import PassportRepository\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n\n def initUI(self):\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n self.ui.tableWidget.setColumnWidth(1, 259)\n self.ui.tableWidget.setSelectionBehavior(1)\n self.ui.tableWidget.setSelectionMode(1)\n self.ui.pushButton.clicked.connect(self.click_add)\n self.ui.pushButton_2.clicked.connect(self.click_edit)\n self.ui.pushButton_3.clicked.connect(self.click_del)\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\n passport = self.passport_rep.get_passports()\n self.ui.tableWidget.setRowCount(len(passport))\n row = 0\n for i in passport:\n id_passport = QTableWidgetItem(str(i['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial_passport = QTableWidgetItem(i['serial'])\n serial_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n number_passport = QTableWidgetItem(i['number'])\n number_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n self.ui.tableWidget.setItem(row, 0, id_passport)\n self.ui.tableWidget.setItem(row, 1, serial_passport)\n self.ui.tableWidget.setItem(row, 2, number_passport)\n row += 1\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n\n def click_edit(self):\n edit_list = self.ui.tableWidget.selectedItems()\n if len(edit_list):\n select_row = self.ui.tableWidget.currentRow()\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1\n ].text(), 'number': edit_list[2].text()}\n self.passport_rep.set_dict(edit_d)\n passport_edit = PassportAddDialog(self.passport_rep)\n if passport_edit.exec():\n passport_d = self.passport_rep.get_dict()\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\n self.ui.tableWidget.setItem(select_row, 1, serial)\n self.ui.tableWidget.setItem(select_row, 2, number)\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n\n def click_cancel(self):\n self.accept()\n",
"step-5": "from PyQt5 import QtCore\r\nfrom PyQt5.QtWidgets import QTableWidgetItem, QDialog\r\n\r\nfrom QT_view.PassportAdd import PassportAddDialog\r\nfrom QT_view.PassportWin import Ui_Dialog\r\n\r\nfrom Repository.Rep_Passport import PassportRepository\r\n\r\nclass PassportQt(QDialog):\r\n def __init__(self):\r\n super(PassportQt, self).__init__()\r\n self.passport_rep = PassportRepository()\r\n self.initUI()\r\n\r\n def initUI(self):\r\n self.ui = Ui_Dialog()\r\n self.ui.setupUi(self)\r\n self.ui.tableWidget.setColumnWidth(1, 259)\r\n self.ui.tableWidget.setSelectionBehavior(1)\r\n self.ui.tableWidget.setSelectionMode(1)\r\n\r\n self.ui.pushButton.clicked.connect(self.click_add)\r\n self.ui.pushButton_2.clicked.connect(self.click_edit)\r\n self.ui.pushButton_3.clicked.connect(self.click_del)\r\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\r\n\r\n passport = self.passport_rep.get_passports()\r\n self.ui.tableWidget.setRowCount(len(passport))\r\n row = 0\r\n for i in passport:\r\n id_passport = QTableWidgetItem(str(i['id']))\r\n id_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n serial_passport = QTableWidgetItem(i['serial'])\r\n serial_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n number_passport = QTableWidgetItem(i['number'])\r\n number_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n self.ui.tableWidget.setItem(row, 0, id_passport)\r\n self.ui.tableWidget.setItem(row, 1, serial_passport)\r\n self.ui.tableWidget.setItem(row, 2, number_passport)\r\n row += 1\r\n\r\n def click_add(self):\r\n p_dict = {'id': -1, 'serial': \"\", 'number': \"\"}\r\n self.passport_rep.set_dict(p_dict)\r\n passport_add = PassportAddDialog(self.passport_rep)\r\n if (passport_add.exec()):\r\n passport_d = self.passport_rep.get_dict()\r\n count_row = self.ui.tableWidget.rowCount()\r\n self.ui.tableWidget.setRowCount(count_row + 1)\r\n id_passport = QTableWidgetItem(str(passport_d['id']))\r\n id_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n serial = QTableWidgetItem(passport_d['serial'])\r\n serial.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n number = QTableWidgetItem(passport_d['number'])\r\n number.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\r\n self.ui.tableWidget.setItem(count_row, 1, serial)\r\n self.ui.tableWidget.setItem(count_row, 2, number)\r\n\r\n def click_edit(self):\r\n edit_list = self.ui.tableWidget.selectedItems()\r\n if (len(edit_list)):\r\n select_row = self.ui.tableWidget.currentRow()\r\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1].text(), 'number': edit_list[2].text()}\r\n self.passport_rep.set_dict(edit_d)\r\n passport_edit = PassportAddDialog(self.passport_rep)\r\n if (passport_edit.exec()):\r\n passport_d = self.passport_rep.get_dict()\r\n id_passport = QTableWidgetItem(str(passport_d['id']))\r\n id_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n serial = QTableWidgetItem(passport_d['serial'])\r\n serial.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n number = QTableWidgetItem(passport_d['number'])\r\n number.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\r\n self.ui.tableWidget.setItem(select_row, 1, serial)\r\n self.ui.tableWidget.setItem(select_row, 2, number)\r\n def click_del(self):\r\n del_list = self.ui.tableWidget.selectedItems()\r\n if (len(del_list)):\r\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].text(), 'number': del_list[2].text()}\r\n self.passport_rep.del_passport(del_p)\r\n self.ui.tableWidget.removeRow(del_list[0].row())\r\n def click_cancel(self):\r\n self.accept()",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
from flask import Flask
from flask import render_template
# Creates a Flask application called 'app'
app = Flask(__name__, template_folder='C:\Users\jwhitehead\Documents\Webdev\Angular Web App')
# The route to display the HTML template on
@app.route('/')
def host():
return render_template('index.html')
# Run the Flask application
if __name__ == "__main__":
app.run(host='localhost', port='80')
|
normal
|
{
"blob_id": "3e1e2de555667bf09162cd6c62cad35dabbd0f54",
"index": 2482,
"step-1": "from flask import Flask\nfrom flask import render_template\n\n# Creates a Flask application called 'app'\napp = Flask(__name__, template_folder='C:\\Users\\jwhitehead\\Documents\\Webdev\\Angular Web App')\n\n# The route to display the HTML template on\[email protected]('/')\ndef host():\n return render_template('index.html')\n\n# Run the Flask application\nif __name__ == \"__main__\":\n app.run(host='localhost', port='80')\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.apps import AppConfig
class AutomationserverConfig(AppConfig):
name = 'automationserver'
|
normal
|
{
"blob_id": "3153218fe1d67fdc1c1957ffcfdb380688c159c1",
"index": 6483,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AutomationserverConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AutomationserverConfig(AppConfig):\n name = 'automationserver'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass AutomationserverConfig(AppConfig):\n name = 'automationserver'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from IPython import display
display.Image("./image.png")
|
normal
|
{
"blob_id": "3f5096ef5677373a1e436f454109c7b7577c0205",
"index": 6169,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndisplay.Image('./image.png')\n",
"step-3": "from IPython import display\ndisplay.Image('./image.png')\n",
"step-4": "from IPython import display\ndisplay.Image(\"./image.png\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from manim import *
class SlidingDoorIllustration(Scene):
def construct(self):
waiting_room = Rectangle(color=BLUE, stroke_width=8)
waiting_room.shift(LEFT + DOWN)
workspace = Rectangle(color=BLUE, stroke_width=8)
workspace.next_to(waiting_room, RIGHT + UP, buff=0)
workspace.shift(LEFT)
t1 = Text("Waiting Room").move_to(waiting_room.get_center()).scale(0.5)
t2 = Text("Workspace").move_to(workspace.get_center()).scale(0.5)
doors = Line(workspace.get_corner(DL) + LEFT, waiting_room.get_corner(UR), color=RED, stroke_width=8)
door = Line(workspace.get_corner(DL), waiting_room.get_corner(UR), color=GREEN, stroke_width=8)
self.add(waiting_room, workspace, t1, t2, doors, door)
self.play(door.animate.shift(LEFT))
self.wait()
self.play(door.animate.shift(RIGHT))
self.wait()
|
normal
|
{
"blob_id": "e93d5461a2604d3b8015489397c68e16d1cb222e",
"index": 3695,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass SlidingDoorIllustration(Scene):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SlidingDoorIllustration(Scene):\n\n def construct(self):\n waiting_room = Rectangle(color=BLUE, stroke_width=8)\n waiting_room.shift(LEFT + DOWN)\n workspace = Rectangle(color=BLUE, stroke_width=8)\n workspace.next_to(waiting_room, RIGHT + UP, buff=0)\n workspace.shift(LEFT)\n t1 = Text('Waiting Room').move_to(waiting_room.get_center()).scale(0.5)\n t2 = Text('Workspace').move_to(workspace.get_center()).scale(0.5)\n doors = Line(workspace.get_corner(DL) + LEFT, waiting_room.\n get_corner(UR), color=RED, stroke_width=8)\n door = Line(workspace.get_corner(DL), waiting_room.get_corner(UR),\n color=GREEN, stroke_width=8)\n self.add(waiting_room, workspace, t1, t2, doors, door)\n self.play(door.animate.shift(LEFT))\n self.wait()\n self.play(door.animate.shift(RIGHT))\n self.wait()\n",
"step-4": "from manim import *\n\n\nclass SlidingDoorIllustration(Scene):\n\n def construct(self):\n waiting_room = Rectangle(color=BLUE, stroke_width=8)\n waiting_room.shift(LEFT + DOWN)\n workspace = Rectangle(color=BLUE, stroke_width=8)\n workspace.next_to(waiting_room, RIGHT + UP, buff=0)\n workspace.shift(LEFT)\n t1 = Text('Waiting Room').move_to(waiting_room.get_center()).scale(0.5)\n t2 = Text('Workspace').move_to(workspace.get_center()).scale(0.5)\n doors = Line(workspace.get_corner(DL) + LEFT, waiting_room.\n get_corner(UR), color=RED, stroke_width=8)\n door = Line(workspace.get_corner(DL), waiting_room.get_corner(UR),\n color=GREEN, stroke_width=8)\n self.add(waiting_room, workspace, t1, t2, doors, door)\n self.play(door.animate.shift(LEFT))\n self.wait()\n self.play(door.animate.shift(RIGHT))\n self.wait()\n",
"step-5": "from manim import *\n\n\nclass SlidingDoorIllustration(Scene):\n def construct(self):\n waiting_room = Rectangle(color=BLUE, stroke_width=8)\n waiting_room.shift(LEFT + DOWN)\n workspace = Rectangle(color=BLUE, stroke_width=8)\n workspace.next_to(waiting_room, RIGHT + UP, buff=0)\n workspace.shift(LEFT)\n t1 = Text(\"Waiting Room\").move_to(waiting_room.get_center()).scale(0.5)\n t2 = Text(\"Workspace\").move_to(workspace.get_center()).scale(0.5)\n doors = Line(workspace.get_corner(DL) + LEFT, waiting_room.get_corner(UR), color=RED, stroke_width=8)\n door = Line(workspace.get_corner(DL), waiting_room.get_corner(UR), color=GREEN, stroke_width=8)\n self.add(waiting_room, workspace, t1, t2, doors, door)\n self.play(door.animate.shift(LEFT))\n self.wait()\n self.play(door.animate.shift(RIGHT))\n self.wait()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import random
import string
import steembase
import struct
import steem
from time import sleep
from time import time
from steem.transactionbuilder import TransactionBuilder
from steembase import operations
from steembase.transactions import SignedTransaction
from resultthread import MyThread
from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, GT, pair
from charm.toolbox.secretutil import SecretUtil
class GroupSignature():
def __init__(self, groupObj):
global util, group
util = SecretUtil(groupObj, debug)
self.group = groupObj
def pkGen(self, h1str):
gstr = "[6172776968119684165170291368128433652817636448173749093457023424948260385279837018774774149930982188956916913145008943931711059687988096415181819433817738, 8687587692191287108886119971783525001480020593934954052605681527814232399216375005546606067382536684351686344089456732201641997200939472924879001214689004]"
g2str = "[7648994551207171188393784904797547917038803147671542540175090956205316897431443264058433935237605598252399113847934759009659621851760599508222321653067284, 922489308494109901795721463782161260386164061515796674638135394871842997698175772871045949554746517321480649326465484116060959631197509151923296896589720]"
u0str = "[180015966842918451436547451263180245588308971597733548673037049536176684754209695288737508087729924028686259002375511049961436438196866049956546630518033, 1295050197915669955783867959538729894307963685491173858450359845766785488725907727220684060845012524740394664162328817669422178637925195059862486690053923]"
u1str = "[2555472719769037960206282327195096320915753855199743796256065902544200822503613205017219993060986152240852358189992579821797745072366030183800897743028220, 7573705235093543416041007636313631591000596820214067724084077929638801811700093589294454562385664531190678890366928407286293582994146887505184778221562373]"
u2str = "[6876276970903121931083294698771200898345396507892092532649392211995185517437159402176975528760594250374462299539306423347676182899798006533425047523984724, 5323739238507219125881988073888745575030677585404965990610324901624530474522642705344792075909082041735695801098770187248023797265998906693745587936574078]"
u3str = "[6628726193389375981104409894060310698729022957801238449570622103067828518416602275957863668289683360250722835022304456841105526036470008237775051984811323, 862537748555943361001122447731987661405436458862545177179548603003392540530328380518694788420155531238391922289886044667763424887444361610972254938158280]"
u4str = "[8157254219580822599577995921928211211847392705248772673869189421041858895589817404931780741226510985762564598862965174380020566416411083236239871342674775, 4736677719200783513058679582227494204159737596114643136852532046080608159561620208171676599501713934575216178076006396924589443776642926902969084668055006]"
hstr = "[6248393417805371388321299785844751688345516419281230263497475615452026459314582553252281068616984105757749673095320346188725995701858182333525688832492249, 351368339412205819108519989143352052898751906937356995136442397753142226531384069336237369861919799955237545207977716196031001184146017796598836939617335]"
nstr = "[75201312764006187596691102237923705656296213254701583615255122742135170369075831428394751330697143847448434841509551532135632624530360013837581615049543, 3886258599652934715331576083899336629981754505948456216299528998628273512432828729344158706718479567056972375128622026273382126529171409058157562418608963]"
g = self.group.fromstr(gstr, 10, G1)
g2 = self.group.fromstr(g2str, 10, G2)
u0 = self.group.fromstr(u0str, 10, G2)
u1 = self.group.fromstr(u1str, 10, G2)
u2 = self.group.fromstr(u2str, 10, G2)
u3 = self.group.fromstr(u3str, 10, G2)
u4 = self.group.fromstr(u4str, 10, G2)
h = self.group.fromstr(hstr, 10, G1)
n = self.group.fromstr(nstr, 10, GT)
h1 = self.group.fromstr(h1str, 10, G1)
pk = {'g': g, 'g2': g2, 'u0': u0, 'u1': u1, 'u2': u2, 'u3': u3, 'u4': u4, 'h': h, 'n': n, 'h1': h1}
return pk
def uskGen(self, usklist, pk, GID, UID, L, k):
t1 = time()
b0 = self.group.gen1_0(1)
b3 = self.group.gen1_0(1)
b4 = self.group.gen1_0(1)
b5 = self.group.gen1_0(1)
r2 = self.group.random(ZR)
for i in range(k):
b0 = b0 * (usklist[i]['b0'] ** L[i])
b3 = b3 * (usklist[i]['b3'] ** L[i])
b4 = b4 * (usklist[i]['b4'] ** L[i])
b5 = b5 * (usklist[i]['b5'] ** L[i])
b0 = b0 * (pk['u0'] * (pk['u1'] ** GID) * (pk['u2'] ** UID)) ** r2
b3 = b3 * (pk['u3'] ** r2)
b4 = b4 * (pk['u4'] ** r2)
b5 = b5 * (pk['g'] ** r2)
usk = {'b0': b0, 'b3': b3, 'b4': b4, 'b5': b5}
t2 = time()
with open("extracttime.txt", 'a') as f:
f.write(str(t2 - t1))
f.write('\n')
return usk
def LGen(self, n, k):
L = []
I = self.group.random(ZR)
J = self.group.random(ZR)
for i in range(n):
L.append(self.group.random(ZR))
L[i].set(1)
I.set(i + 1)
for j in range(1, k + 1):
print(j)
J.set(j)
if (i + 1) != j:
L[i] = L[i] * ((J) / (J - I))
return L
def verifyUsk(self, usk, vk, pk, GID, UID):
g = pk['g']
g2 = pk['g2']
u0 = pk['u0']
u1 = pk['u1']
u2 = pk['u2']
u3 = pk['u3']
u4 = pk['u4']
b0 = usk['b0']
b5 = usk['b5']
b3 = usk['b3']
b4 = usk['b4']
return pair(g, b0) == (pair(vk, g2) * pair(b5, u0) * pair(b5, u1 ** GID) * pair(b5, u2 ** UID)) and pair(g,
b3) == pair(
b5, u3) and pair(g, b4) == pair(b5, u4)
def sign(self, title, usk, pk, GID, UID, groupID):
t1 = time()
m = self.group.hash(title)
b0 = usk['b0']
b3 = usk['b3']
b4 = usk['b4']
b5 = usk['b5']
r4 = self.group.random(ZR)
r3 = self.group.random(ZR)
k = self.group.random(ZR)
c0 = b0 * (b3 ** m) * (b4 ** r4) * (
(pk['u0'] * (pk['u1'] ** GID) * (pk['u2'] ** UID) * (pk['u3'] ** m) * (pk['u4'] ** r4)) ** r3)
c5 = b5 * (pk['g'] ** r3)
c6 = (pk['u2'] ** UID) * (pk['u4'] ** r4)
e1 = pk['g'] ** k
e2 = (pk['u0'] * (pk['u1'] ** GID)) ** k
e3 = (pk['n'] ** UID) * (pair(pk['h1'], pk['g2']) ** k)
# 产生pok
f = pk['u0'] * (pk['u1'] ** GID)
gp = pair(pk['h1'], pk['g2'])
k1 = self.group.random(ZR)
k2 = self.group.random(ZR)
k3 = self.group.random(ZR)
r1 = (pk['u2'] ** k1) * (pk['u4'] ** k2)
r2 = pk['g'] ** k3
r3 = f ** k3
t4 = (pk['n'] ** k1) * (gp ** k3)
hashstr = str(r1) + str(r2) + str(r3) + str(t4)
c = self.group.hash(hashstr)
s1 = k1 + c * UID
s2 = k2 + c * r4
s3 = k3 + c * k
signature = {'c0': c0, 'c5': c5, 'c6': c6, 'e1': e1, 'e2': e2, 'e3': e3, 'c': c, 's1': s1, 's2': s2, 's3': s3}
t2 = time()
with open("gssigntime.txt", 'a') as f:
f.write(str(t2 - t1))
f.write('\n')
print("gs time", t2 - t1)
return signature
def open(self, okliststr, L, k):
t1 = time()
oklist = []
for ok in okliststr:
oklist.append({'ok1': self.group.fromstr(ok['ok1'], 10, GT), 'ok2': self.group.fromstr(ok['ok2'], 10, GT)})
ok1 = self.group.gen1_0(1)
ok2 = self.group.gen1_0(1)
for i in range(k):
ok1 = ok1 * (oklist[i]['ok1'] ** L[i])
ok2 = ok2 * (oklist[i]['ok2'] ** L[i])
t2 = time()
with open("opentime.txt", 'a') as f:
f.write(str(t2 - t1))
f.write('\n')
print("open time", t2 - t1)
return ok1 / ok2
def get_usk(userID, GID, UID, h1str="", count=0):
pk = {}
for i in range(n):
vkliststr.append(clientlist[i].get_vk()['vk'])
vklist.append(group_signature.group.fromstr(vkliststr[i], 10, G1))
uskliststr.append(clientlist[i].user_extract(userID))
usklist.append({})
usklist[i]['b0'] = group_signature.group.fromstr(uskliststr[i]['b0'], 10, G2)
usklist[i]['b3'] = group_signature.group.fromstr(uskliststr[i]['b3'], 10, G2)
usklist[i]['b4'] = group_signature.group.fromstr(uskliststr[i]['b4'], 10, G2)
usklist[i]['b5'] = group_signature.group.fromstr(uskliststr[i]['b5'], 10, G1)
print(usklist[i])
if h1str == "" or h1str == "0" or h1str == 0:
h1str = clientlist[i].get_pk()['pk']
print("h1str", h1str)
pk = group_signature.pkGen(h1str)
print("pk---------------\n", pk)
if (group_signature.verifyUsk(usklist[i], vklist[i], pk, GID, UID)):
count = count + 1
else:
print("key is invalide\n\n")
usk = group_signature.uskGen(usklist, pk, GID, UID, L, k)
print("usk---------------\n", usk)
return pk, usk
def get_lam(sig):
okliststr = []
i = 0
for client in clientlist:
okstr = client.get_ok(str(sig['e1']), str(sig['e2']))
print(okstr)
okliststr.append(okstr)
i = i + 1
if i < k:
print("the number of ok is not enough\n")
return
lam = group_signature.open(okliststr, L, k)
return lam
def tx_build_broad(op, steemd_instance, wallet_instance, account):
tx = TransactionBuilder(steemd_instance=steemd_instance, wallet_instance=wallet_instance,
no_broadcast=False)
tx.appendOps(op)
tx.appendSigner(account, 'posting')
tx.sign()
# print("txsign",tx)
re = tx.broadcast()
return re
def tx_build(op, steemd_instance, wallet_instance, account):
tx = TransactionBuilder(steemd_instance=steemd_instance, wallet_instance=wallet_instance,
no_broadcast=False)
tx.appendOps(op)
tx.appendSigner(account, 'posting')
tx.sign()
# print("txsign",tx)
# re = tx.broadcast()
return tx
def annoy_commit(account, usk, pk, GID, UID, title="paper_title", body="paper_body", groupID="computer"):
annoy_author = 'nya'
# group signature ------title 必须 这里面是对title进行hash 然后使用usk对hash进行签名
sig = group_signature.sign(title, usk, pk, GID, UID, groupID)
permlink = ''.join(random.choices(string.digits, k=7))
print("permlink is " + permlink)
op = operations.CommitPaper(
**{
"account": account,
"author": annoy_author,
"permlink": permlink,
"title": title,
"body": body,
"json_metadata": "",
"c0": str(sig['c0']),
"c5": str(sig['c5']),
"c6": str(sig['c6']),
"e1": str(sig['e1']),
"e2": str(sig['e2']),
"e3": str(sig['e3']),
"c": str(sig['c']),
"s1": str(sig['s1']),
"s2": str(sig['s2']),
"s3": str(sig['s3'])
}
)
print("commitop", op)
return op, sig, permlink
def open_op(account, sig, userID, permlink):
lam = get_lam(sig)
# E = (pk['n'] ** UID) * lam #计算出e3 即签名的e3 判断是否相等
op = operations.ApplyOpen(
**{
'account': account,
'author': userID,
'lambda': str(lam),
'permlink': permlink,
'json_metadata': ""
}
)
return op
def annoy_commit_tx(account, usk, pk, GID, UID, steemd_instance, wallet_instance, title="paper_title",
body="paper_body"):
commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID, title="paper_title", body="paper_body",
groupID="computer")
re = tx_build_broad(commitop, steemd_instance, wallet_instance, account)
print("commit-re", re)
return ssig, permlink
def open_tx(account, ssig, userID, permlink, steemd_instance, wallet_instance):
openop = open_op(account, ssig, userID, permlink)
re = tx_build_broad(openop, steemd_instance, wallet_instance, account)
print("open-re", re)
# 一个节点的 并发产生交易
def one_mul_annoy_tx(account, usk, pk, UID, steemd, wallet):
ssiglistone = []
permlinklistone = []
threads = []
for i in range(nodeTX):
t = MyThread(annoy_commit_tx, args=(account, usk, pk, GID, UID, steemd, wallet))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
for t in threads:
ssig, permlink = t.get_result()
ssiglistone.append(ssig)
permlinklistone.append(permlink)
return ssiglistone, permlinklistone
def one_mul_open_tx(account, ssiglistone, userID, permlinklistone, steemd, wallet):
threads = []
for i in range(nodeTX):
t = MyThread(open_tx,
args=(account, ssiglistone[i], userID, permlinklistone[i], steemd, wallet))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
def mul_annoy_tx(usk, pk, UID):
ssiglist = []
permlinklist = []
threads = []
for i in range(n):
# t = MyThread(annoy_commit_tx, args=(accountlist[i], usk, pk, GID, UID, clientlist[i].steemd, clientlist[i].wallet))
t = MyThread(one_mul_annoy_tx,
args=(accountlist[i], usk, pk, UID, clientlist[i].steemd, clientlist[i].wallet))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
for t in threads:
ssig, permlink = t.get_result()
ssiglist.append(ssig)
permlinklist.append(permlink)
return ssiglist, permlinklist
# 多个节点, 每个节点并发
def mul_open_tx(ssiglist, permlinklist, userID):
threads = []
for i in range(n):
# t = MyThread(open_tx,
# args=(accountlist[i], ssiglist[i], userID, permlinklist[i], clientlist[i].steemd, clientlist[i].wallet))
t = MyThread(one_mul_open_tx,
args=(
accountlist[i], ssiglist[i], userID, permlinklist[i], clientlist[i].steemd, clientlist[i].wallet))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
# for t in threads:
# t.get_result()
# 仅创造tx 不广播
def creat_commit_tx(account, usk, pk, GID, UID, steemd_instance, wallet_instance, title="paper_title",
body="paper_body"):
commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID, title, body, groupID="computer")
commit_tx = tx_build(commitop, steemd_instance, wallet_instance, account)
return ssig, permlink, commit_tx
def creat_num_commit_tx(num, account, usk, pk, GID, UID, steemd_instance, wallet_instance, ttitle="paper_title",
tbody="paper_body"):
ssiglist = []
permlinklist = []
txlist = []
threads = []
for i in range(num):
t = MyThread(creat_commit_tx, args=(account, usk, pk, GID, UID, steemd_instance, wallet_instance, ttitle,
tbody))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
for t in threads:
ssig, permlink, commit_tx = t.get_result()
ssiglist.append(ssig)
permlinklist.append(permlink)
txlist.append(commit_tx)
return ssiglist, permlinklist, txlist
def creat_open_tx(account, ssig, userID, permlink, steemd_instance, wallet_instance):
openop = open_op(account, ssig, userID, permlink)
open_tx = tx_build(openop, steemd_instance, wallet_instance, account)
return open_tx
def creat_num_open_tx(num, account, ssiglist, userID, permlinklist, steemd_instance, wallet_instance):
opentxlist = []
threads = []
for i in range(num):
t = MyThread(creat_open_tx,
args=(account, ssiglist[i], userID, permlinklist[i], steemd_instance,
wallet_instance))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
for t in threads:
opentx = t.get_result()
opentxlist.append(opentx)
return opentxlist
def tx_broad(tx):
tx.broadcast()
def mul_tx_broad(txlist):
threads = []
for tx in txlist:
t = MyThread(tx_broad, args=(tx,))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
# public parma
nodeTX = 5
k = 2
n = 3 # (k,n)
# 节点地址
nodelist = [
'http://101.76.208.83:8090',
'http://101.76.208.83:8094',
'http://101.76.208.83:8098'
]
accountlist = ["initminer2", "zy1", "zy2", "zy3", "zy4", "zy5", "zy6", "zy7", "zy8", "zy9", "zy10", "zy11", "zy12",
"zy13", "zy14", "zy15", "zy16", "zy17", "zy18", "zy19", "zy20"]
# 除了第一个 其他的都是posting key 5Hs4jcm5X4sanCnUKNFCjrq2irN8sH1Krzsb13Qd6DHqutZbhqu
keylist = ['5J3yMruND2TADZ7cZc6Cnp4VePrnehei2wvGdnLgf3aEj2nDGhc', '5Hs4jcm5X4sanCnUKNFCjrq2irN8sH1Krzsb13Qd6DHqutZbhqu', "5KPLLsQ3MuWgKvNYqAFRjziWZenBqefDhSe4K1uYuj8hT3zQoKv"]
debug = True
# 群签名相关
groupobj = PairingGroup('SS512')
group_signature = GroupSignature(groupobj)
L = group_signature.LGen(n, k)
# 密钥相关
clientlist = []
for i in range(n):
clientlist.append(steem.Steem(nodes=[nodelist[i]], keys=keylist[i]))
vkliststr = []
uskliststr = []
vklist = []
usklist = []
# steem testchain信息
steembase.chains.known_chains['TEST'] = {
'chain_id': '18dcf0a285365fc58b71f18b3d3fec954aa0c141c44e4e5cb4cf777b9eab274e',
'prefix': 'TST', 'steem_symbol': 'TESTS', 'sbd_symbol': 'TBD', 'vests_symbol': 'VESTS'
}
groupID = "computer"
GID = group_signature.group.hash(groupID)
def main():
# 假设不存在不可用节点(无法判断节点状态)
userID = "zhou"
UID = group_signature.group.hash(userID)
print("uid", UID)
# 获取usk
pk, usk = get_usk(userID, GID, UID)
ssig, permlink = annoy_commit_tx(accountlist[0], usk, pk, GID, UID, clientlist[0].steemd, clientlist[0].wallet, title="paper_title",
body="paper_body")
sleep(3)
open_tx(accountlist[0], ssig, userID, permlink, clientlist[0].steemd, clientlist[0].wallet)
return
if __name__ == "__main__":
main()
print("end")
|
normal
|
{
"blob_id": "a90b7e44cc54d4f96a13e5e6e2d15b632d3c4983",
"index": 290,
"step-1": "<mask token>\n\n\nclass GroupSignature:\n\n def __init__(self, groupObj):\n global util, group\n util = SecretUtil(groupObj, debug)\n self.group = groupObj\n\n def pkGen(self, h1str):\n gstr = (\n '[6172776968119684165170291368128433652817636448173749093457023424948260385279837018774774149930982188956916913145008943931711059687988096415181819433817738, 8687587692191287108886119971783525001480020593934954052605681527814232399216375005546606067382536684351686344089456732201641997200939472924879001214689004]'\n )\n g2str = (\n '[7648994551207171188393784904797547917038803147671542540175090956205316897431443264058433935237605598252399113847934759009659621851760599508222321653067284, 922489308494109901795721463782161260386164061515796674638135394871842997698175772871045949554746517321480649326465484116060959631197509151923296896589720]'\n )\n u0str = (\n '[180015966842918451436547451263180245588308971597733548673037049536176684754209695288737508087729924028686259002375511049961436438196866049956546630518033, 1295050197915669955783867959538729894307963685491173858450359845766785488725907727220684060845012524740394664162328817669422178637925195059862486690053923]'\n )\n u1str = (\n '[2555472719769037960206282327195096320915753855199743796256065902544200822503613205017219993060986152240852358189992579821797745072366030183800897743028220, 7573705235093543416041007636313631591000596820214067724084077929638801811700093589294454562385664531190678890366928407286293582994146887505184778221562373]'\n )\n u2str = (\n '[6876276970903121931083294698771200898345396507892092532649392211995185517437159402176975528760594250374462299539306423347676182899798006533425047523984724, 5323739238507219125881988073888745575030677585404965990610324901624530474522642705344792075909082041735695801098770187248023797265998906693745587936574078]'\n )\n u3str = (\n '[6628726193389375981104409894060310698729022957801238449570622103067828518416602275957863668289683360250722835022304456841105526036470008237775051984811323, 862537748555943361001122447731987661405436458862545177179548603003392540530328380518694788420155531238391922289886044667763424887444361610972254938158280]'\n )\n u4str = (\n '[8157254219580822599577995921928211211847392705248772673869189421041858895589817404931780741226510985762564598862965174380020566416411083236239871342674775, 4736677719200783513058679582227494204159737596114643136852532046080608159561620208171676599501713934575216178076006396924589443776642926902969084668055006]'\n )\n hstr = (\n '[6248393417805371388321299785844751688345516419281230263497475615452026459314582553252281068616984105757749673095320346188725995701858182333525688832492249, 351368339412205819108519989143352052898751906937356995136442397753142226531384069336237369861919799955237545207977716196031001184146017796598836939617335]'\n )\n nstr = (\n '[75201312764006187596691102237923705656296213254701583615255122742135170369075831428394751330697143847448434841509551532135632624530360013837581615049543, 3886258599652934715331576083899336629981754505948456216299528998628273512432828729344158706718479567056972375128622026273382126529171409058157562418608963]'\n )\n g = self.group.fromstr(gstr, 10, G1)\n g2 = self.group.fromstr(g2str, 10, G2)\n u0 = self.group.fromstr(u0str, 10, G2)\n u1 = self.group.fromstr(u1str, 10, G2)\n u2 = self.group.fromstr(u2str, 10, G2)\n u3 = self.group.fromstr(u3str, 10, G2)\n u4 = self.group.fromstr(u4str, 10, G2)\n h = self.group.fromstr(hstr, 10, G1)\n n = self.group.fromstr(nstr, 10, GT)\n h1 = self.group.fromstr(h1str, 10, G1)\n pk = {'g': g, 'g2': g2, 'u0': u0, 'u1': u1, 'u2': u2, 'u3': u3,\n 'u4': u4, 'h': h, 'n': n, 'h1': h1}\n return pk\n\n def uskGen(self, usklist, pk, GID, UID, L, k):\n t1 = time()\n b0 = self.group.gen1_0(1)\n b3 = self.group.gen1_0(1)\n b4 = self.group.gen1_0(1)\n b5 = self.group.gen1_0(1)\n r2 = self.group.random(ZR)\n for i in range(k):\n b0 = b0 * usklist[i]['b0'] ** L[i]\n b3 = b3 * usklist[i]['b3'] ** L[i]\n b4 = b4 * usklist[i]['b4'] ** L[i]\n b5 = b5 * usklist[i]['b5'] ** L[i]\n b0 = b0 * (pk['u0'] * pk['u1'] ** GID * pk['u2'] ** UID) ** r2\n b3 = b3 * pk['u3'] ** r2\n b4 = b4 * pk['u4'] ** r2\n b5 = b5 * pk['g'] ** r2\n usk = {'b0': b0, 'b3': b3, 'b4': b4, 'b5': b5}\n t2 = time()\n with open('extracttime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n return usk\n\n def LGen(self, n, k):\n L = []\n I = self.group.random(ZR)\n J = self.group.random(ZR)\n for i in range(n):\n L.append(self.group.random(ZR))\n L[i].set(1)\n I.set(i + 1)\n for j in range(1, k + 1):\n print(j)\n J.set(j)\n if i + 1 != j:\n L[i] = L[i] * (J / (J - I))\n return L\n\n def verifyUsk(self, usk, vk, pk, GID, UID):\n g = pk['g']\n g2 = pk['g2']\n u0 = pk['u0']\n u1 = pk['u1']\n u2 = pk['u2']\n u3 = pk['u3']\n u4 = pk['u4']\n b0 = usk['b0']\n b5 = usk['b5']\n b3 = usk['b3']\n b4 = usk['b4']\n return pair(g, b0) == pair(vk, g2) * pair(b5, u0) * pair(b5, u1 ** GID\n ) * pair(b5, u2 ** UID) and pair(g, b3) == pair(b5, u3) and pair(g,\n b4) == pair(b5, u4)\n\n def sign(self, title, usk, pk, GID, UID, groupID):\n t1 = time()\n m = self.group.hash(title)\n b0 = usk['b0']\n b3 = usk['b3']\n b4 = usk['b4']\n b5 = usk['b5']\n r4 = self.group.random(ZR)\n r3 = self.group.random(ZR)\n k = self.group.random(ZR)\n c0 = b0 * b3 ** m * b4 ** r4 * (pk['u0'] * pk['u1'] ** GID * pk[\n 'u2'] ** UID * pk['u3'] ** m * pk['u4'] ** r4) ** r3\n c5 = b5 * pk['g'] ** r3\n c6 = pk['u2'] ** UID * pk['u4'] ** r4\n e1 = pk['g'] ** k\n e2 = (pk['u0'] * pk['u1'] ** GID) ** k\n e3 = pk['n'] ** UID * pair(pk['h1'], pk['g2']) ** k\n f = pk['u0'] * pk['u1'] ** GID\n gp = pair(pk['h1'], pk['g2'])\n k1 = self.group.random(ZR)\n k2 = self.group.random(ZR)\n k3 = self.group.random(ZR)\n r1 = pk['u2'] ** k1 * pk['u4'] ** k2\n r2 = pk['g'] ** k3\n r3 = f ** k3\n t4 = pk['n'] ** k1 * gp ** k3\n hashstr = str(r1) + str(r2) + str(r3) + str(t4)\n c = self.group.hash(hashstr)\n s1 = k1 + c * UID\n s2 = k2 + c * r4\n s3 = k3 + c * k\n signature = {'c0': c0, 'c5': c5, 'c6': c6, 'e1': e1, 'e2': e2, 'e3':\n e3, 'c': c, 's1': s1, 's2': s2, 's3': s3}\n t2 = time()\n with open('gssigntime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('gs time', t2 - t1)\n return signature\n\n def open(self, okliststr, L, k):\n t1 = time()\n oklist = []\n for ok in okliststr:\n oklist.append({'ok1': self.group.fromstr(ok['ok1'], 10, GT),\n 'ok2': self.group.fromstr(ok['ok2'], 10, GT)})\n ok1 = self.group.gen1_0(1)\n ok2 = self.group.gen1_0(1)\n for i in range(k):\n ok1 = ok1 * oklist[i]['ok1'] ** L[i]\n ok2 = ok2 * oklist[i]['ok2'] ** L[i]\n t2 = time()\n with open('opentime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('open time', t2 - t1)\n return ok1 / ok2\n\n\n<mask token>\n\n\ndef creat_commit_tx(account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, title='paper_title', body='paper_body'):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID,\n title, body, groupID='computer')\n commit_tx = tx_build(commitop, steemd_instance, wallet_instance, account)\n return ssig, permlink, commit_tx\n\n\n<mask token>\n\n\ndef mul_tx_broad(txlist):\n threads = []\n for tx in txlist:\n t = MyThread(tx_broad, args=(tx,))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass GroupSignature:\n\n def __init__(self, groupObj):\n global util, group\n util = SecretUtil(groupObj, debug)\n self.group = groupObj\n\n def pkGen(self, h1str):\n gstr = (\n '[6172776968119684165170291368128433652817636448173749093457023424948260385279837018774774149930982188956916913145008943931711059687988096415181819433817738, 8687587692191287108886119971783525001480020593934954052605681527814232399216375005546606067382536684351686344089456732201641997200939472924879001214689004]'\n )\n g2str = (\n '[7648994551207171188393784904797547917038803147671542540175090956205316897431443264058433935237605598252399113847934759009659621851760599508222321653067284, 922489308494109901795721463782161260386164061515796674638135394871842997698175772871045949554746517321480649326465484116060959631197509151923296896589720]'\n )\n u0str = (\n '[180015966842918451436547451263180245588308971597733548673037049536176684754209695288737508087729924028686259002375511049961436438196866049956546630518033, 1295050197915669955783867959538729894307963685491173858450359845766785488725907727220684060845012524740394664162328817669422178637925195059862486690053923]'\n )\n u1str = (\n '[2555472719769037960206282327195096320915753855199743796256065902544200822503613205017219993060986152240852358189992579821797745072366030183800897743028220, 7573705235093543416041007636313631591000596820214067724084077929638801811700093589294454562385664531190678890366928407286293582994146887505184778221562373]'\n )\n u2str = (\n '[6876276970903121931083294698771200898345396507892092532649392211995185517437159402176975528760594250374462299539306423347676182899798006533425047523984724, 5323739238507219125881988073888745575030677585404965990610324901624530474522642705344792075909082041735695801098770187248023797265998906693745587936574078]'\n )\n u3str = (\n '[6628726193389375981104409894060310698729022957801238449570622103067828518416602275957863668289683360250722835022304456841105526036470008237775051984811323, 862537748555943361001122447731987661405436458862545177179548603003392540530328380518694788420155531238391922289886044667763424887444361610972254938158280]'\n )\n u4str = (\n '[8157254219580822599577995921928211211847392705248772673869189421041858895589817404931780741226510985762564598862965174380020566416411083236239871342674775, 4736677719200783513058679582227494204159737596114643136852532046080608159561620208171676599501713934575216178076006396924589443776642926902969084668055006]'\n )\n hstr = (\n '[6248393417805371388321299785844751688345516419281230263497475615452026459314582553252281068616984105757749673095320346188725995701858182333525688832492249, 351368339412205819108519989143352052898751906937356995136442397753142226531384069336237369861919799955237545207977716196031001184146017796598836939617335]'\n )\n nstr = (\n '[75201312764006187596691102237923705656296213254701583615255122742135170369075831428394751330697143847448434841509551532135632624530360013837581615049543, 3886258599652934715331576083899336629981754505948456216299528998628273512432828729344158706718479567056972375128622026273382126529171409058157562418608963]'\n )\n g = self.group.fromstr(gstr, 10, G1)\n g2 = self.group.fromstr(g2str, 10, G2)\n u0 = self.group.fromstr(u0str, 10, G2)\n u1 = self.group.fromstr(u1str, 10, G2)\n u2 = self.group.fromstr(u2str, 10, G2)\n u3 = self.group.fromstr(u3str, 10, G2)\n u4 = self.group.fromstr(u4str, 10, G2)\n h = self.group.fromstr(hstr, 10, G1)\n n = self.group.fromstr(nstr, 10, GT)\n h1 = self.group.fromstr(h1str, 10, G1)\n pk = {'g': g, 'g2': g2, 'u0': u0, 'u1': u1, 'u2': u2, 'u3': u3,\n 'u4': u4, 'h': h, 'n': n, 'h1': h1}\n return pk\n\n def uskGen(self, usklist, pk, GID, UID, L, k):\n t1 = time()\n b0 = self.group.gen1_0(1)\n b3 = self.group.gen1_0(1)\n b4 = self.group.gen1_0(1)\n b5 = self.group.gen1_0(1)\n r2 = self.group.random(ZR)\n for i in range(k):\n b0 = b0 * usklist[i]['b0'] ** L[i]\n b3 = b3 * usklist[i]['b3'] ** L[i]\n b4 = b4 * usklist[i]['b4'] ** L[i]\n b5 = b5 * usklist[i]['b5'] ** L[i]\n b0 = b0 * (pk['u0'] * pk['u1'] ** GID * pk['u2'] ** UID) ** r2\n b3 = b3 * pk['u3'] ** r2\n b4 = b4 * pk['u4'] ** r2\n b5 = b5 * pk['g'] ** r2\n usk = {'b0': b0, 'b3': b3, 'b4': b4, 'b5': b5}\n t2 = time()\n with open('extracttime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n return usk\n\n def LGen(self, n, k):\n L = []\n I = self.group.random(ZR)\n J = self.group.random(ZR)\n for i in range(n):\n L.append(self.group.random(ZR))\n L[i].set(1)\n I.set(i + 1)\n for j in range(1, k + 1):\n print(j)\n J.set(j)\n if i + 1 != j:\n L[i] = L[i] * (J / (J - I))\n return L\n\n def verifyUsk(self, usk, vk, pk, GID, UID):\n g = pk['g']\n g2 = pk['g2']\n u0 = pk['u0']\n u1 = pk['u1']\n u2 = pk['u2']\n u3 = pk['u3']\n u4 = pk['u4']\n b0 = usk['b0']\n b5 = usk['b5']\n b3 = usk['b3']\n b4 = usk['b4']\n return pair(g, b0) == pair(vk, g2) * pair(b5, u0) * pair(b5, u1 ** GID\n ) * pair(b5, u2 ** UID) and pair(g, b3) == pair(b5, u3) and pair(g,\n b4) == pair(b5, u4)\n\n def sign(self, title, usk, pk, GID, UID, groupID):\n t1 = time()\n m = self.group.hash(title)\n b0 = usk['b0']\n b3 = usk['b3']\n b4 = usk['b4']\n b5 = usk['b5']\n r4 = self.group.random(ZR)\n r3 = self.group.random(ZR)\n k = self.group.random(ZR)\n c0 = b0 * b3 ** m * b4 ** r4 * (pk['u0'] * pk['u1'] ** GID * pk[\n 'u2'] ** UID * pk['u3'] ** m * pk['u4'] ** r4) ** r3\n c5 = b5 * pk['g'] ** r3\n c6 = pk['u2'] ** UID * pk['u4'] ** r4\n e1 = pk['g'] ** k\n e2 = (pk['u0'] * pk['u1'] ** GID) ** k\n e3 = pk['n'] ** UID * pair(pk['h1'], pk['g2']) ** k\n f = pk['u0'] * pk['u1'] ** GID\n gp = pair(pk['h1'], pk['g2'])\n k1 = self.group.random(ZR)\n k2 = self.group.random(ZR)\n k3 = self.group.random(ZR)\n r1 = pk['u2'] ** k1 * pk['u4'] ** k2\n r2 = pk['g'] ** k3\n r3 = f ** k3\n t4 = pk['n'] ** k1 * gp ** k3\n hashstr = str(r1) + str(r2) + str(r3) + str(t4)\n c = self.group.hash(hashstr)\n s1 = k1 + c * UID\n s2 = k2 + c * r4\n s3 = k3 + c * k\n signature = {'c0': c0, 'c5': c5, 'c6': c6, 'e1': e1, 'e2': e2, 'e3':\n e3, 'c': c, 's1': s1, 's2': s2, 's3': s3}\n t2 = time()\n with open('gssigntime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('gs time', t2 - t1)\n return signature\n\n def open(self, okliststr, L, k):\n t1 = time()\n oklist = []\n for ok in okliststr:\n oklist.append({'ok1': self.group.fromstr(ok['ok1'], 10, GT),\n 'ok2': self.group.fromstr(ok['ok2'], 10, GT)})\n ok1 = self.group.gen1_0(1)\n ok2 = self.group.gen1_0(1)\n for i in range(k):\n ok1 = ok1 * oklist[i]['ok1'] ** L[i]\n ok2 = ok2 * oklist[i]['ok2'] ** L[i]\n t2 = time()\n with open('opentime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('open time', t2 - t1)\n return ok1 / ok2\n\n\n<mask token>\n\n\ndef get_lam(sig):\n okliststr = []\n i = 0\n for client in clientlist:\n okstr = client.get_ok(str(sig['e1']), str(sig['e2']))\n print(okstr)\n okliststr.append(okstr)\n i = i + 1\n if i < k:\n print('the number of ok is not enough\\n')\n return\n lam = group_signature.open(okliststr, L, k)\n return lam\n\n\ndef tx_build_broad(op, steemd_instance, wallet_instance, account):\n tx = TransactionBuilder(steemd_instance=steemd_instance,\n wallet_instance=wallet_instance, no_broadcast=False)\n tx.appendOps(op)\n tx.appendSigner(account, 'posting')\n tx.sign()\n re = tx.broadcast()\n return re\n\n\n<mask token>\n\n\ndef annoy_commit_tx(account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, title='paper_title', body='paper_body'):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID,\n title='paper_title', body='paper_body', groupID='computer')\n re = tx_build_broad(commitop, steemd_instance, wallet_instance, account)\n print('commit-re', re)\n return ssig, permlink\n\n\n<mask token>\n\n\ndef one_mul_annoy_tx(account, usk, pk, UID, steemd, wallet):\n ssiglistone = []\n permlinklistone = []\n threads = []\n for i in range(nodeTX):\n t = MyThread(annoy_commit_tx, args=(account, usk, pk, GID, UID,\n steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink = t.get_result()\n ssiglistone.append(ssig)\n permlinklistone.append(permlink)\n return ssiglistone, permlinklistone\n\n\ndef one_mul_open_tx(account, ssiglistone, userID, permlinklistone, steemd,\n wallet):\n threads = []\n for i in range(nodeTX):\n t = MyThread(open_tx, args=(account, ssiglistone[i], userID,\n permlinklistone[i], steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\n<mask token>\n\n\ndef creat_commit_tx(account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, title='paper_title', body='paper_body'):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID,\n title, body, groupID='computer')\n commit_tx = tx_build(commitop, steemd_instance, wallet_instance, account)\n return ssig, permlink, commit_tx\n\n\ndef creat_num_commit_tx(num, account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, ttitle='paper_title', tbody='paper_body'):\n ssiglist = []\n permlinklist = []\n txlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_commit_tx, args=(account, usk, pk, GID, UID,\n steemd_instance, wallet_instance, ttitle, tbody))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink, commit_tx = t.get_result()\n ssiglist.append(ssig)\n permlinklist.append(permlink)\n txlist.append(commit_tx)\n return ssiglist, permlinklist, txlist\n\n\ndef creat_open_tx(account, ssig, userID, permlink, steemd_instance,\n wallet_instance):\n openop = open_op(account, ssig, userID, permlink)\n open_tx = tx_build(openop, steemd_instance, wallet_instance, account)\n return open_tx\n\n\ndef creat_num_open_tx(num, account, ssiglist, userID, permlinklist,\n steemd_instance, wallet_instance):\n opentxlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_open_tx, args=(account, ssiglist[i], userID,\n permlinklist[i], steemd_instance, wallet_instance))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n opentx = t.get_result()\n opentxlist.append(opentx)\n return opentxlist\n\n\ndef tx_broad(tx):\n tx.broadcast()\n\n\ndef mul_tx_broad(txlist):\n threads = []\n for tx in txlist:\n t = MyThread(tx_broad, args=(tx,))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass GroupSignature:\n\n def __init__(self, groupObj):\n global util, group\n util = SecretUtil(groupObj, debug)\n self.group = groupObj\n\n def pkGen(self, h1str):\n gstr = (\n '[6172776968119684165170291368128433652817636448173749093457023424948260385279837018774774149930982188956916913145008943931711059687988096415181819433817738, 8687587692191287108886119971783525001480020593934954052605681527814232399216375005546606067382536684351686344089456732201641997200939472924879001214689004]'\n )\n g2str = (\n '[7648994551207171188393784904797547917038803147671542540175090956205316897431443264058433935237605598252399113847934759009659621851760599508222321653067284, 922489308494109901795721463782161260386164061515796674638135394871842997698175772871045949554746517321480649326465484116060959631197509151923296896589720]'\n )\n u0str = (\n '[180015966842918451436547451263180245588308971597733548673037049536176684754209695288737508087729924028686259002375511049961436438196866049956546630518033, 1295050197915669955783867959538729894307963685491173858450359845766785488725907727220684060845012524740394664162328817669422178637925195059862486690053923]'\n )\n u1str = (\n '[2555472719769037960206282327195096320915753855199743796256065902544200822503613205017219993060986152240852358189992579821797745072366030183800897743028220, 7573705235093543416041007636313631591000596820214067724084077929638801811700093589294454562385664531190678890366928407286293582994146887505184778221562373]'\n )\n u2str = (\n '[6876276970903121931083294698771200898345396507892092532649392211995185517437159402176975528760594250374462299539306423347676182899798006533425047523984724, 5323739238507219125881988073888745575030677585404965990610324901624530474522642705344792075909082041735695801098770187248023797265998906693745587936574078]'\n )\n u3str = (\n '[6628726193389375981104409894060310698729022957801238449570622103067828518416602275957863668289683360250722835022304456841105526036470008237775051984811323, 862537748555943361001122447731987661405436458862545177179548603003392540530328380518694788420155531238391922289886044667763424887444361610972254938158280]'\n )\n u4str = (\n '[8157254219580822599577995921928211211847392705248772673869189421041858895589817404931780741226510985762564598862965174380020566416411083236239871342674775, 4736677719200783513058679582227494204159737596114643136852532046080608159561620208171676599501713934575216178076006396924589443776642926902969084668055006]'\n )\n hstr = (\n '[6248393417805371388321299785844751688345516419281230263497475615452026459314582553252281068616984105757749673095320346188725995701858182333525688832492249, 351368339412205819108519989143352052898751906937356995136442397753142226531384069336237369861919799955237545207977716196031001184146017796598836939617335]'\n )\n nstr = (\n '[75201312764006187596691102237923705656296213254701583615255122742135170369075831428394751330697143847448434841509551532135632624530360013837581615049543, 3886258599652934715331576083899336629981754505948456216299528998628273512432828729344158706718479567056972375128622026273382126529171409058157562418608963]'\n )\n g = self.group.fromstr(gstr, 10, G1)\n g2 = self.group.fromstr(g2str, 10, G2)\n u0 = self.group.fromstr(u0str, 10, G2)\n u1 = self.group.fromstr(u1str, 10, G2)\n u2 = self.group.fromstr(u2str, 10, G2)\n u3 = self.group.fromstr(u3str, 10, G2)\n u4 = self.group.fromstr(u4str, 10, G2)\n h = self.group.fromstr(hstr, 10, G1)\n n = self.group.fromstr(nstr, 10, GT)\n h1 = self.group.fromstr(h1str, 10, G1)\n pk = {'g': g, 'g2': g2, 'u0': u0, 'u1': u1, 'u2': u2, 'u3': u3,\n 'u4': u4, 'h': h, 'n': n, 'h1': h1}\n return pk\n\n def uskGen(self, usklist, pk, GID, UID, L, k):\n t1 = time()\n b0 = self.group.gen1_0(1)\n b3 = self.group.gen1_0(1)\n b4 = self.group.gen1_0(1)\n b5 = self.group.gen1_0(1)\n r2 = self.group.random(ZR)\n for i in range(k):\n b0 = b0 * usklist[i]['b0'] ** L[i]\n b3 = b3 * usklist[i]['b3'] ** L[i]\n b4 = b4 * usklist[i]['b4'] ** L[i]\n b5 = b5 * usklist[i]['b5'] ** L[i]\n b0 = b0 * (pk['u0'] * pk['u1'] ** GID * pk['u2'] ** UID) ** r2\n b3 = b3 * pk['u3'] ** r2\n b4 = b4 * pk['u4'] ** r2\n b5 = b5 * pk['g'] ** r2\n usk = {'b0': b0, 'b3': b3, 'b4': b4, 'b5': b5}\n t2 = time()\n with open('extracttime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n return usk\n\n def LGen(self, n, k):\n L = []\n I = self.group.random(ZR)\n J = self.group.random(ZR)\n for i in range(n):\n L.append(self.group.random(ZR))\n L[i].set(1)\n I.set(i + 1)\n for j in range(1, k + 1):\n print(j)\n J.set(j)\n if i + 1 != j:\n L[i] = L[i] * (J / (J - I))\n return L\n\n def verifyUsk(self, usk, vk, pk, GID, UID):\n g = pk['g']\n g2 = pk['g2']\n u0 = pk['u0']\n u1 = pk['u1']\n u2 = pk['u2']\n u3 = pk['u3']\n u4 = pk['u4']\n b0 = usk['b0']\n b5 = usk['b5']\n b3 = usk['b3']\n b4 = usk['b4']\n return pair(g, b0) == pair(vk, g2) * pair(b5, u0) * pair(b5, u1 ** GID\n ) * pair(b5, u2 ** UID) and pair(g, b3) == pair(b5, u3) and pair(g,\n b4) == pair(b5, u4)\n\n def sign(self, title, usk, pk, GID, UID, groupID):\n t1 = time()\n m = self.group.hash(title)\n b0 = usk['b0']\n b3 = usk['b3']\n b4 = usk['b4']\n b5 = usk['b5']\n r4 = self.group.random(ZR)\n r3 = self.group.random(ZR)\n k = self.group.random(ZR)\n c0 = b0 * b3 ** m * b4 ** r4 * (pk['u0'] * pk['u1'] ** GID * pk[\n 'u2'] ** UID * pk['u3'] ** m * pk['u4'] ** r4) ** r3\n c5 = b5 * pk['g'] ** r3\n c6 = pk['u2'] ** UID * pk['u4'] ** r4\n e1 = pk['g'] ** k\n e2 = (pk['u0'] * pk['u1'] ** GID) ** k\n e3 = pk['n'] ** UID * pair(pk['h1'], pk['g2']) ** k\n f = pk['u0'] * pk['u1'] ** GID\n gp = pair(pk['h1'], pk['g2'])\n k1 = self.group.random(ZR)\n k2 = self.group.random(ZR)\n k3 = self.group.random(ZR)\n r1 = pk['u2'] ** k1 * pk['u4'] ** k2\n r2 = pk['g'] ** k3\n r3 = f ** k3\n t4 = pk['n'] ** k1 * gp ** k3\n hashstr = str(r1) + str(r2) + str(r3) + str(t4)\n c = self.group.hash(hashstr)\n s1 = k1 + c * UID\n s2 = k2 + c * r4\n s3 = k3 + c * k\n signature = {'c0': c0, 'c5': c5, 'c6': c6, 'e1': e1, 'e2': e2, 'e3':\n e3, 'c': c, 's1': s1, 's2': s2, 's3': s3}\n t2 = time()\n with open('gssigntime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('gs time', t2 - t1)\n return signature\n\n def open(self, okliststr, L, k):\n t1 = time()\n oklist = []\n for ok in okliststr:\n oklist.append({'ok1': self.group.fromstr(ok['ok1'], 10, GT),\n 'ok2': self.group.fromstr(ok['ok2'], 10, GT)})\n ok1 = self.group.gen1_0(1)\n ok2 = self.group.gen1_0(1)\n for i in range(k):\n ok1 = ok1 * oklist[i]['ok1'] ** L[i]\n ok2 = ok2 * oklist[i]['ok2'] ** L[i]\n t2 = time()\n with open('opentime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('open time', t2 - t1)\n return ok1 / ok2\n\n\n<mask token>\n\n\ndef get_lam(sig):\n okliststr = []\n i = 0\n for client in clientlist:\n okstr = client.get_ok(str(sig['e1']), str(sig['e2']))\n print(okstr)\n okliststr.append(okstr)\n i = i + 1\n if i < k:\n print('the number of ok is not enough\\n')\n return\n lam = group_signature.open(okliststr, L, k)\n return lam\n\n\ndef tx_build_broad(op, steemd_instance, wallet_instance, account):\n tx = TransactionBuilder(steemd_instance=steemd_instance,\n wallet_instance=wallet_instance, no_broadcast=False)\n tx.appendOps(op)\n tx.appendSigner(account, 'posting')\n tx.sign()\n re = tx.broadcast()\n return re\n\n\n<mask token>\n\n\ndef annoy_commit(account, usk, pk, GID, UID, title='paper_title', body=\n 'paper_body', groupID='computer'):\n annoy_author = 'nya'\n sig = group_signature.sign(title, usk, pk, GID, UID, groupID)\n permlink = ''.join(random.choices(string.digits, k=7))\n print('permlink is ' + permlink)\n op = operations.CommitPaper(**{'account': account, 'author':\n annoy_author, 'permlink': permlink, 'title': title, 'body': body,\n 'json_metadata': '', 'c0': str(sig['c0']), 'c5': str(sig['c5']),\n 'c6': str(sig['c6']), 'e1': str(sig['e1']), 'e2': str(sig['e2']),\n 'e3': str(sig['e3']), 'c': str(sig['c']), 's1': str(sig['s1']),\n 's2': str(sig['s2']), 's3': str(sig['s3'])})\n print('commitop', op)\n return op, sig, permlink\n\n\n<mask token>\n\n\ndef annoy_commit_tx(account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, title='paper_title', body='paper_body'):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID,\n title='paper_title', body='paper_body', groupID='computer')\n re = tx_build_broad(commitop, steemd_instance, wallet_instance, account)\n print('commit-re', re)\n return ssig, permlink\n\n\n<mask token>\n\n\ndef one_mul_annoy_tx(account, usk, pk, UID, steemd, wallet):\n ssiglistone = []\n permlinklistone = []\n threads = []\n for i in range(nodeTX):\n t = MyThread(annoy_commit_tx, args=(account, usk, pk, GID, UID,\n steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink = t.get_result()\n ssiglistone.append(ssig)\n permlinklistone.append(permlink)\n return ssiglistone, permlinklistone\n\n\ndef one_mul_open_tx(account, ssiglistone, userID, permlinklistone, steemd,\n wallet):\n threads = []\n for i in range(nodeTX):\n t = MyThread(open_tx, args=(account, ssiglistone[i], userID,\n permlinklistone[i], steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\ndef mul_annoy_tx(usk, pk, UID):\n ssiglist = []\n permlinklist = []\n threads = []\n for i in range(n):\n t = MyThread(one_mul_annoy_tx, args=(accountlist[i], usk, pk, UID,\n clientlist[i].steemd, clientlist[i].wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink = t.get_result()\n ssiglist.append(ssig)\n permlinklist.append(permlink)\n return ssiglist, permlinklist\n\n\ndef mul_open_tx(ssiglist, permlinklist, userID):\n threads = []\n for i in range(n):\n t = MyThread(one_mul_open_tx, args=(accountlist[i], ssiglist[i],\n userID, permlinklist[i], clientlist[i].steemd, clientlist[i].\n wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\ndef creat_commit_tx(account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, title='paper_title', body='paper_body'):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID,\n title, body, groupID='computer')\n commit_tx = tx_build(commitop, steemd_instance, wallet_instance, account)\n return ssig, permlink, commit_tx\n\n\ndef creat_num_commit_tx(num, account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, ttitle='paper_title', tbody='paper_body'):\n ssiglist = []\n permlinklist = []\n txlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_commit_tx, args=(account, usk, pk, GID, UID,\n steemd_instance, wallet_instance, ttitle, tbody))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink, commit_tx = t.get_result()\n ssiglist.append(ssig)\n permlinklist.append(permlink)\n txlist.append(commit_tx)\n return ssiglist, permlinklist, txlist\n\n\ndef creat_open_tx(account, ssig, userID, permlink, steemd_instance,\n wallet_instance):\n openop = open_op(account, ssig, userID, permlink)\n open_tx = tx_build(openop, steemd_instance, wallet_instance, account)\n return open_tx\n\n\ndef creat_num_open_tx(num, account, ssiglist, userID, permlinklist,\n steemd_instance, wallet_instance):\n opentxlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_open_tx, args=(account, ssiglist[i], userID,\n permlinklist[i], steemd_instance, wallet_instance))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n opentx = t.get_result()\n opentxlist.append(opentx)\n return opentxlist\n\n\ndef tx_broad(tx):\n tx.broadcast()\n\n\ndef mul_tx_broad(txlist):\n threads = []\n for tx in txlist:\n t = MyThread(tx_broad, args=(tx,))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass GroupSignature:\n\n def __init__(self, groupObj):\n global util, group\n util = SecretUtil(groupObj, debug)\n self.group = groupObj\n\n def pkGen(self, h1str):\n gstr = (\n '[6172776968119684165170291368128433652817636448173749093457023424948260385279837018774774149930982188956916913145008943931711059687988096415181819433817738, 8687587692191287108886119971783525001480020593934954052605681527814232399216375005546606067382536684351686344089456732201641997200939472924879001214689004]'\n )\n g2str = (\n '[7648994551207171188393784904797547917038803147671542540175090956205316897431443264058433935237605598252399113847934759009659621851760599508222321653067284, 922489308494109901795721463782161260386164061515796674638135394871842997698175772871045949554746517321480649326465484116060959631197509151923296896589720]'\n )\n u0str = (\n '[180015966842918451436547451263180245588308971597733548673037049536176684754209695288737508087729924028686259002375511049961436438196866049956546630518033, 1295050197915669955783867959538729894307963685491173858450359845766785488725907727220684060845012524740394664162328817669422178637925195059862486690053923]'\n )\n u1str = (\n '[2555472719769037960206282327195096320915753855199743796256065902544200822503613205017219993060986152240852358189992579821797745072366030183800897743028220, 7573705235093543416041007636313631591000596820214067724084077929638801811700093589294454562385664531190678890366928407286293582994146887505184778221562373]'\n )\n u2str = (\n '[6876276970903121931083294698771200898345396507892092532649392211995185517437159402176975528760594250374462299539306423347676182899798006533425047523984724, 5323739238507219125881988073888745575030677585404965990610324901624530474522642705344792075909082041735695801098770187248023797265998906693745587936574078]'\n )\n u3str = (\n '[6628726193389375981104409894060310698729022957801238449570622103067828518416602275957863668289683360250722835022304456841105526036470008237775051984811323, 862537748555943361001122447731987661405436458862545177179548603003392540530328380518694788420155531238391922289886044667763424887444361610972254938158280]'\n )\n u4str = (\n '[8157254219580822599577995921928211211847392705248772673869189421041858895589817404931780741226510985762564598862965174380020566416411083236239871342674775, 4736677719200783513058679582227494204159737596114643136852532046080608159561620208171676599501713934575216178076006396924589443776642926902969084668055006]'\n )\n hstr = (\n '[6248393417805371388321299785844751688345516419281230263497475615452026459314582553252281068616984105757749673095320346188725995701858182333525688832492249, 351368339412205819108519989143352052898751906937356995136442397753142226531384069336237369861919799955237545207977716196031001184146017796598836939617335]'\n )\n nstr = (\n '[75201312764006187596691102237923705656296213254701583615255122742135170369075831428394751330697143847448434841509551532135632624530360013837581615049543, 3886258599652934715331576083899336629981754505948456216299528998628273512432828729344158706718479567056972375128622026273382126529171409058157562418608963]'\n )\n g = self.group.fromstr(gstr, 10, G1)\n g2 = self.group.fromstr(g2str, 10, G2)\n u0 = self.group.fromstr(u0str, 10, G2)\n u1 = self.group.fromstr(u1str, 10, G2)\n u2 = self.group.fromstr(u2str, 10, G2)\n u3 = self.group.fromstr(u3str, 10, G2)\n u4 = self.group.fromstr(u4str, 10, G2)\n h = self.group.fromstr(hstr, 10, G1)\n n = self.group.fromstr(nstr, 10, GT)\n h1 = self.group.fromstr(h1str, 10, G1)\n pk = {'g': g, 'g2': g2, 'u0': u0, 'u1': u1, 'u2': u2, 'u3': u3,\n 'u4': u4, 'h': h, 'n': n, 'h1': h1}\n return pk\n\n def uskGen(self, usklist, pk, GID, UID, L, k):\n t1 = time()\n b0 = self.group.gen1_0(1)\n b3 = self.group.gen1_0(1)\n b4 = self.group.gen1_0(1)\n b5 = self.group.gen1_0(1)\n r2 = self.group.random(ZR)\n for i in range(k):\n b0 = b0 * usklist[i]['b0'] ** L[i]\n b3 = b3 * usklist[i]['b3'] ** L[i]\n b4 = b4 * usklist[i]['b4'] ** L[i]\n b5 = b5 * usklist[i]['b5'] ** L[i]\n b0 = b0 * (pk['u0'] * pk['u1'] ** GID * pk['u2'] ** UID) ** r2\n b3 = b3 * pk['u3'] ** r2\n b4 = b4 * pk['u4'] ** r2\n b5 = b5 * pk['g'] ** r2\n usk = {'b0': b0, 'b3': b3, 'b4': b4, 'b5': b5}\n t2 = time()\n with open('extracttime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n return usk\n\n def LGen(self, n, k):\n L = []\n I = self.group.random(ZR)\n J = self.group.random(ZR)\n for i in range(n):\n L.append(self.group.random(ZR))\n L[i].set(1)\n I.set(i + 1)\n for j in range(1, k + 1):\n print(j)\n J.set(j)\n if i + 1 != j:\n L[i] = L[i] * (J / (J - I))\n return L\n\n def verifyUsk(self, usk, vk, pk, GID, UID):\n g = pk['g']\n g2 = pk['g2']\n u0 = pk['u0']\n u1 = pk['u1']\n u2 = pk['u2']\n u3 = pk['u3']\n u4 = pk['u4']\n b0 = usk['b0']\n b5 = usk['b5']\n b3 = usk['b3']\n b4 = usk['b4']\n return pair(g, b0) == pair(vk, g2) * pair(b5, u0) * pair(b5, u1 ** GID\n ) * pair(b5, u2 ** UID) and pair(g, b3) == pair(b5, u3) and pair(g,\n b4) == pair(b5, u4)\n\n def sign(self, title, usk, pk, GID, UID, groupID):\n t1 = time()\n m = self.group.hash(title)\n b0 = usk['b0']\n b3 = usk['b3']\n b4 = usk['b4']\n b5 = usk['b5']\n r4 = self.group.random(ZR)\n r3 = self.group.random(ZR)\n k = self.group.random(ZR)\n c0 = b0 * b3 ** m * b4 ** r4 * (pk['u0'] * pk['u1'] ** GID * pk[\n 'u2'] ** UID * pk['u3'] ** m * pk['u4'] ** r4) ** r3\n c5 = b5 * pk['g'] ** r3\n c6 = pk['u2'] ** UID * pk['u4'] ** r4\n e1 = pk['g'] ** k\n e2 = (pk['u0'] * pk['u1'] ** GID) ** k\n e3 = pk['n'] ** UID * pair(pk['h1'], pk['g2']) ** k\n f = pk['u0'] * pk['u1'] ** GID\n gp = pair(pk['h1'], pk['g2'])\n k1 = self.group.random(ZR)\n k2 = self.group.random(ZR)\n k3 = self.group.random(ZR)\n r1 = pk['u2'] ** k1 * pk['u4'] ** k2\n r2 = pk['g'] ** k3\n r3 = f ** k3\n t4 = pk['n'] ** k1 * gp ** k3\n hashstr = str(r1) + str(r2) + str(r3) + str(t4)\n c = self.group.hash(hashstr)\n s1 = k1 + c * UID\n s2 = k2 + c * r4\n s3 = k3 + c * k\n signature = {'c0': c0, 'c5': c5, 'c6': c6, 'e1': e1, 'e2': e2, 'e3':\n e3, 'c': c, 's1': s1, 's2': s2, 's3': s3}\n t2 = time()\n with open('gssigntime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('gs time', t2 - t1)\n return signature\n\n def open(self, okliststr, L, k):\n t1 = time()\n oklist = []\n for ok in okliststr:\n oklist.append({'ok1': self.group.fromstr(ok['ok1'], 10, GT),\n 'ok2': self.group.fromstr(ok['ok2'], 10, GT)})\n ok1 = self.group.gen1_0(1)\n ok2 = self.group.gen1_0(1)\n for i in range(k):\n ok1 = ok1 * oklist[i]['ok1'] ** L[i]\n ok2 = ok2 * oklist[i]['ok2'] ** L[i]\n t2 = time()\n with open('opentime.txt', 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print('open time', t2 - t1)\n return ok1 / ok2\n\n\ndef get_usk(userID, GID, UID, h1str='', count=0):\n pk = {}\n for i in range(n):\n vkliststr.append(clientlist[i].get_vk()['vk'])\n vklist.append(group_signature.group.fromstr(vkliststr[i], 10, G1))\n uskliststr.append(clientlist[i].user_extract(userID))\n usklist.append({})\n usklist[i]['b0'] = group_signature.group.fromstr(uskliststr[i]['b0'\n ], 10, G2)\n usklist[i]['b3'] = group_signature.group.fromstr(uskliststr[i]['b3'\n ], 10, G2)\n usklist[i]['b4'] = group_signature.group.fromstr(uskliststr[i]['b4'\n ], 10, G2)\n usklist[i]['b5'] = group_signature.group.fromstr(uskliststr[i]['b5'\n ], 10, G1)\n print(usklist[i])\n if h1str == '' or h1str == '0' or h1str == 0:\n h1str = clientlist[i].get_pk()['pk']\n print('h1str', h1str)\n pk = group_signature.pkGen(h1str)\n print('pk---------------\\n', pk)\n if group_signature.verifyUsk(usklist[i], vklist[i], pk, GID, UID):\n count = count + 1\n else:\n print('key is invalide\\n\\n')\n usk = group_signature.uskGen(usklist, pk, GID, UID, L, k)\n print('usk---------------\\n', usk)\n return pk, usk\n\n\ndef get_lam(sig):\n okliststr = []\n i = 0\n for client in clientlist:\n okstr = client.get_ok(str(sig['e1']), str(sig['e2']))\n print(okstr)\n okliststr.append(okstr)\n i = i + 1\n if i < k:\n print('the number of ok is not enough\\n')\n return\n lam = group_signature.open(okliststr, L, k)\n return lam\n\n\ndef tx_build_broad(op, steemd_instance, wallet_instance, account):\n tx = TransactionBuilder(steemd_instance=steemd_instance,\n wallet_instance=wallet_instance, no_broadcast=False)\n tx.appendOps(op)\n tx.appendSigner(account, 'posting')\n tx.sign()\n re = tx.broadcast()\n return re\n\n\n<mask token>\n\n\ndef annoy_commit(account, usk, pk, GID, UID, title='paper_title', body=\n 'paper_body', groupID='computer'):\n annoy_author = 'nya'\n sig = group_signature.sign(title, usk, pk, GID, UID, groupID)\n permlink = ''.join(random.choices(string.digits, k=7))\n print('permlink is ' + permlink)\n op = operations.CommitPaper(**{'account': account, 'author':\n annoy_author, 'permlink': permlink, 'title': title, 'body': body,\n 'json_metadata': '', 'c0': str(sig['c0']), 'c5': str(sig['c5']),\n 'c6': str(sig['c6']), 'e1': str(sig['e1']), 'e2': str(sig['e2']),\n 'e3': str(sig['e3']), 'c': str(sig['c']), 's1': str(sig['s1']),\n 's2': str(sig['s2']), 's3': str(sig['s3'])})\n print('commitop', op)\n return op, sig, permlink\n\n\n<mask token>\n\n\ndef annoy_commit_tx(account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, title='paper_title', body='paper_body'):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID,\n title='paper_title', body='paper_body', groupID='computer')\n re = tx_build_broad(commitop, steemd_instance, wallet_instance, account)\n print('commit-re', re)\n return ssig, permlink\n\n\n<mask token>\n\n\ndef one_mul_annoy_tx(account, usk, pk, UID, steemd, wallet):\n ssiglistone = []\n permlinklistone = []\n threads = []\n for i in range(nodeTX):\n t = MyThread(annoy_commit_tx, args=(account, usk, pk, GID, UID,\n steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink = t.get_result()\n ssiglistone.append(ssig)\n permlinklistone.append(permlink)\n return ssiglistone, permlinklistone\n\n\ndef one_mul_open_tx(account, ssiglistone, userID, permlinklistone, steemd,\n wallet):\n threads = []\n for i in range(nodeTX):\n t = MyThread(open_tx, args=(account, ssiglistone[i], userID,\n permlinklistone[i], steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\ndef mul_annoy_tx(usk, pk, UID):\n ssiglist = []\n permlinklist = []\n threads = []\n for i in range(n):\n t = MyThread(one_mul_annoy_tx, args=(accountlist[i], usk, pk, UID,\n clientlist[i].steemd, clientlist[i].wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink = t.get_result()\n ssiglist.append(ssig)\n permlinklist.append(permlink)\n return ssiglist, permlinklist\n\n\ndef mul_open_tx(ssiglist, permlinklist, userID):\n threads = []\n for i in range(n):\n t = MyThread(one_mul_open_tx, args=(accountlist[i], ssiglist[i],\n userID, permlinklist[i], clientlist[i].steemd, clientlist[i].\n wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\ndef creat_commit_tx(account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, title='paper_title', body='paper_body'):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID,\n title, body, groupID='computer')\n commit_tx = tx_build(commitop, steemd_instance, wallet_instance, account)\n return ssig, permlink, commit_tx\n\n\ndef creat_num_commit_tx(num, account, usk, pk, GID, UID, steemd_instance,\n wallet_instance, ttitle='paper_title', tbody='paper_body'):\n ssiglist = []\n permlinklist = []\n txlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_commit_tx, args=(account, usk, pk, GID, UID,\n steemd_instance, wallet_instance, ttitle, tbody))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink, commit_tx = t.get_result()\n ssiglist.append(ssig)\n permlinklist.append(permlink)\n txlist.append(commit_tx)\n return ssiglist, permlinklist, txlist\n\n\ndef creat_open_tx(account, ssig, userID, permlink, steemd_instance,\n wallet_instance):\n openop = open_op(account, ssig, userID, permlink)\n open_tx = tx_build(openop, steemd_instance, wallet_instance, account)\n return open_tx\n\n\ndef creat_num_open_tx(num, account, ssiglist, userID, permlinklist,\n steemd_instance, wallet_instance):\n opentxlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_open_tx, args=(account, ssiglist[i], userID,\n permlinklist[i], steemd_instance, wallet_instance))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n opentx = t.get_result()\n opentxlist.append(opentx)\n return opentxlist\n\n\ndef tx_broad(tx):\n tx.broadcast()\n\n\ndef mul_tx_broad(txlist):\n threads = []\n for tx in txlist:\n t = MyThread(tx_broad, args=(tx,))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\n<mask token>\n\n\ndef main():\n userID = 'zhou'\n UID = group_signature.group.hash(userID)\n print('uid', UID)\n pk, usk = get_usk(userID, GID, UID)\n ssig, permlink = annoy_commit_tx(accountlist[0], usk, pk, GID, UID,\n clientlist[0].steemd, clientlist[0].wallet, title='paper_title',\n body='paper_body')\n sleep(3)\n open_tx(accountlist[0], ssig, userID, permlink, clientlist[0].steemd,\n clientlist[0].wallet)\n return\n\n\n<mask token>\n",
"step-5": "import random\nimport string\nimport steembase\nimport struct\nimport steem\nfrom time import sleep\nfrom time import time\nfrom steem.transactionbuilder import TransactionBuilder\nfrom steembase import operations\nfrom steembase.transactions import SignedTransaction\nfrom resultthread import MyThread\nfrom charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, GT, pair\nfrom charm.toolbox.secretutil import SecretUtil\n\n\nclass GroupSignature():\n\n def __init__(self, groupObj):\n global util, group\n util = SecretUtil(groupObj, debug)\n self.group = groupObj\n\n def pkGen(self, h1str):\n gstr = \"[6172776968119684165170291368128433652817636448173749093457023424948260385279837018774774149930982188956916913145008943931711059687988096415181819433817738, 8687587692191287108886119971783525001480020593934954052605681527814232399216375005546606067382536684351686344089456732201641997200939472924879001214689004]\"\n g2str = \"[7648994551207171188393784904797547917038803147671542540175090956205316897431443264058433935237605598252399113847934759009659621851760599508222321653067284, 922489308494109901795721463782161260386164061515796674638135394871842997698175772871045949554746517321480649326465484116060959631197509151923296896589720]\"\n u0str = \"[180015966842918451436547451263180245588308971597733548673037049536176684754209695288737508087729924028686259002375511049961436438196866049956546630518033, 1295050197915669955783867959538729894307963685491173858450359845766785488725907727220684060845012524740394664162328817669422178637925195059862486690053923]\"\n u1str = \"[2555472719769037960206282327195096320915753855199743796256065902544200822503613205017219993060986152240852358189992579821797745072366030183800897743028220, 7573705235093543416041007636313631591000596820214067724084077929638801811700093589294454562385664531190678890366928407286293582994146887505184778221562373]\"\n u2str = \"[6876276970903121931083294698771200898345396507892092532649392211995185517437159402176975528760594250374462299539306423347676182899798006533425047523984724, 5323739238507219125881988073888745575030677585404965990610324901624530474522642705344792075909082041735695801098770187248023797265998906693745587936574078]\"\n u3str = \"[6628726193389375981104409894060310698729022957801238449570622103067828518416602275957863668289683360250722835022304456841105526036470008237775051984811323, 862537748555943361001122447731987661405436458862545177179548603003392540530328380518694788420155531238391922289886044667763424887444361610972254938158280]\"\n u4str = \"[8157254219580822599577995921928211211847392705248772673869189421041858895589817404931780741226510985762564598862965174380020566416411083236239871342674775, 4736677719200783513058679582227494204159737596114643136852532046080608159561620208171676599501713934575216178076006396924589443776642926902969084668055006]\"\n hstr = \"[6248393417805371388321299785844751688345516419281230263497475615452026459314582553252281068616984105757749673095320346188725995701858182333525688832492249, 351368339412205819108519989143352052898751906937356995136442397753142226531384069336237369861919799955237545207977716196031001184146017796598836939617335]\"\n nstr = \"[75201312764006187596691102237923705656296213254701583615255122742135170369075831428394751330697143847448434841509551532135632624530360013837581615049543, 3886258599652934715331576083899336629981754505948456216299528998628273512432828729344158706718479567056972375128622026273382126529171409058157562418608963]\"\n\n g = self.group.fromstr(gstr, 10, G1)\n g2 = self.group.fromstr(g2str, 10, G2)\n u0 = self.group.fromstr(u0str, 10, G2)\n u1 = self.group.fromstr(u1str, 10, G2)\n u2 = self.group.fromstr(u2str, 10, G2)\n u3 = self.group.fromstr(u3str, 10, G2)\n u4 = self.group.fromstr(u4str, 10, G2)\n h = self.group.fromstr(hstr, 10, G1)\n n = self.group.fromstr(nstr, 10, GT)\n h1 = self.group.fromstr(h1str, 10, G1)\n\n pk = {'g': g, 'g2': g2, 'u0': u0, 'u1': u1, 'u2': u2, 'u3': u3, 'u4': u4, 'h': h, 'n': n, 'h1': h1}\n\n return pk\n\n def uskGen(self, usklist, pk, GID, UID, L, k):\n t1 = time()\n b0 = self.group.gen1_0(1)\n b3 = self.group.gen1_0(1)\n b4 = self.group.gen1_0(1)\n b5 = self.group.gen1_0(1)\n\n r2 = self.group.random(ZR)\n\n for i in range(k):\n b0 = b0 * (usklist[i]['b0'] ** L[i])\n b3 = b3 * (usklist[i]['b3'] ** L[i])\n b4 = b4 * (usklist[i]['b4'] ** L[i])\n b5 = b5 * (usklist[i]['b5'] ** L[i])\n\n b0 = b0 * (pk['u0'] * (pk['u1'] ** GID) * (pk['u2'] ** UID)) ** r2\n b3 = b3 * (pk['u3'] ** r2)\n b4 = b4 * (pk['u4'] ** r2)\n b5 = b5 * (pk['g'] ** r2)\n\n usk = {'b0': b0, 'b3': b3, 'b4': b4, 'b5': b5}\n t2 = time()\n with open(\"extracttime.txt\", 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n return usk\n\n def LGen(self, n, k):\n L = []\n I = self.group.random(ZR)\n J = self.group.random(ZR)\n for i in range(n):\n L.append(self.group.random(ZR))\n L[i].set(1)\n I.set(i + 1)\n for j in range(1, k + 1):\n print(j)\n J.set(j)\n if (i + 1) != j:\n L[i] = L[i] * ((J) / (J - I))\n return L\n\n def verifyUsk(self, usk, vk, pk, GID, UID):\n g = pk['g']\n g2 = pk['g2']\n u0 = pk['u0']\n u1 = pk['u1']\n u2 = pk['u2']\n u3 = pk['u3']\n u4 = pk['u4']\n\n b0 = usk['b0']\n b5 = usk['b5']\n b3 = usk['b3']\n b4 = usk['b4']\n\n return pair(g, b0) == (pair(vk, g2) * pair(b5, u0) * pair(b5, u1 ** GID) * pair(b5, u2 ** UID)) and pair(g,\n b3) == pair(\n b5, u3) and pair(g, b4) == pair(b5, u4)\n\n def sign(self, title, usk, pk, GID, UID, groupID):\n t1 = time()\n m = self.group.hash(title)\n b0 = usk['b0']\n b3 = usk['b3']\n b4 = usk['b4']\n b5 = usk['b5']\n\n r4 = self.group.random(ZR)\n r3 = self.group.random(ZR)\n k = self.group.random(ZR)\n\n c0 = b0 * (b3 ** m) * (b4 ** r4) * (\n (pk['u0'] * (pk['u1'] ** GID) * (pk['u2'] ** UID) * (pk['u3'] ** m) * (pk['u4'] ** r4)) ** r3)\n c5 = b5 * (pk['g'] ** r3)\n c6 = (pk['u2'] ** UID) * (pk['u4'] ** r4)\n e1 = pk['g'] ** k\n e2 = (pk['u0'] * (pk['u1'] ** GID)) ** k\n e3 = (pk['n'] ** UID) * (pair(pk['h1'], pk['g2']) ** k)\n\n # 产生pok\n f = pk['u0'] * (pk['u1'] ** GID)\n gp = pair(pk['h1'], pk['g2'])\n\n k1 = self.group.random(ZR)\n k2 = self.group.random(ZR)\n k3 = self.group.random(ZR)\n\n r1 = (pk['u2'] ** k1) * (pk['u4'] ** k2)\n r2 = pk['g'] ** k3\n r3 = f ** k3\n t4 = (pk['n'] ** k1) * (gp ** k3)\n\n hashstr = str(r1) + str(r2) + str(r3) + str(t4)\n\n c = self.group.hash(hashstr)\n\n s1 = k1 + c * UID\n\n s2 = k2 + c * r4\n\n s3 = k3 + c * k\n\n signature = {'c0': c0, 'c5': c5, 'c6': c6, 'e1': e1, 'e2': e2, 'e3': e3, 'c': c, 's1': s1, 's2': s2, 's3': s3}\n t2 = time()\n with open(\"gssigntime.txt\", 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print(\"gs time\", t2 - t1)\n return signature\n\n def open(self, okliststr, L, k):\n t1 = time()\n oklist = []\n for ok in okliststr:\n oklist.append({'ok1': self.group.fromstr(ok['ok1'], 10, GT), 'ok2': self.group.fromstr(ok['ok2'], 10, GT)})\n ok1 = self.group.gen1_0(1)\n ok2 = self.group.gen1_0(1)\n for i in range(k):\n ok1 = ok1 * (oklist[i]['ok1'] ** L[i])\n ok2 = ok2 * (oklist[i]['ok2'] ** L[i])\n t2 = time()\n with open(\"opentime.txt\", 'a') as f:\n f.write(str(t2 - t1))\n f.write('\\n')\n print(\"open time\", t2 - t1)\n return ok1 / ok2\n\n\ndef get_usk(userID, GID, UID, h1str=\"\", count=0):\n pk = {}\n for i in range(n):\n vkliststr.append(clientlist[i].get_vk()['vk'])\n vklist.append(group_signature.group.fromstr(vkliststr[i], 10, G1))\n\n uskliststr.append(clientlist[i].user_extract(userID))\n usklist.append({})\n usklist[i]['b0'] = group_signature.group.fromstr(uskliststr[i]['b0'], 10, G2)\n usklist[i]['b3'] = group_signature.group.fromstr(uskliststr[i]['b3'], 10, G2)\n usklist[i]['b4'] = group_signature.group.fromstr(uskliststr[i]['b4'], 10, G2)\n usklist[i]['b5'] = group_signature.group.fromstr(uskliststr[i]['b5'], 10, G1)\n print(usklist[i])\n if h1str == \"\" or h1str == \"0\" or h1str == 0:\n h1str = clientlist[i].get_pk()['pk']\n print(\"h1str\", h1str)\n pk = group_signature.pkGen(h1str)\n print(\"pk---------------\\n\", pk)\n\n if (group_signature.verifyUsk(usklist[i], vklist[i], pk, GID, UID)):\n count = count + 1\n else:\n print(\"key is invalide\\n\\n\")\n usk = group_signature.uskGen(usklist, pk, GID, UID, L, k)\n\n print(\"usk---------------\\n\", usk)\n return pk, usk\n\n\ndef get_lam(sig):\n okliststr = []\n i = 0\n for client in clientlist:\n okstr = client.get_ok(str(sig['e1']), str(sig['e2']))\n print(okstr)\n okliststr.append(okstr)\n i = i + 1\n\n if i < k:\n print(\"the number of ok is not enough\\n\")\n return\n\n lam = group_signature.open(okliststr, L, k)\n return lam\n\n\ndef tx_build_broad(op, steemd_instance, wallet_instance, account):\n tx = TransactionBuilder(steemd_instance=steemd_instance, wallet_instance=wallet_instance,\n no_broadcast=False)\n tx.appendOps(op)\n tx.appendSigner(account, 'posting')\n tx.sign()\n # print(\"txsign\",tx)\n re = tx.broadcast()\n return re\n\n\ndef tx_build(op, steemd_instance, wallet_instance, account):\n tx = TransactionBuilder(steemd_instance=steemd_instance, wallet_instance=wallet_instance,\n no_broadcast=False)\n tx.appendOps(op)\n tx.appendSigner(account, 'posting')\n tx.sign()\n # print(\"txsign\",tx)\n # re = tx.broadcast()\n return tx\n\n\ndef annoy_commit(account, usk, pk, GID, UID, title=\"paper_title\", body=\"paper_body\", groupID=\"computer\"):\n annoy_author = 'nya'\n # group signature ------title 必须 这里面是对title进行hash 然后使用usk对hash进行签名\n sig = group_signature.sign(title, usk, pk, GID, UID, groupID)\n\n permlink = ''.join(random.choices(string.digits, k=7))\n print(\"permlink is \" + permlink)\n op = operations.CommitPaper(\n **{\n \"account\": account,\n \"author\": annoy_author,\n \"permlink\": permlink,\n \"title\": title,\n \"body\": body,\n \"json_metadata\": \"\",\n \"c0\": str(sig['c0']),\n \"c5\": str(sig['c5']),\n \"c6\": str(sig['c6']),\n \"e1\": str(sig['e1']),\n \"e2\": str(sig['e2']),\n \"e3\": str(sig['e3']),\n \"c\": str(sig['c']),\n \"s1\": str(sig['s1']),\n \"s2\": str(sig['s2']),\n \"s3\": str(sig['s3'])\n }\n )\n print(\"commitop\", op)\n return op, sig, permlink\n\n\ndef open_op(account, sig, userID, permlink):\n lam = get_lam(sig)\n # E = (pk['n'] ** UID) * lam #计算出e3 即签名的e3 判断是否相等\n op = operations.ApplyOpen(\n **{\n 'account': account,\n 'author': userID,\n 'lambda': str(lam),\n 'permlink': permlink,\n 'json_metadata': \"\"\n }\n )\n return op\n\n\ndef annoy_commit_tx(account, usk, pk, GID, UID, steemd_instance, wallet_instance, title=\"paper_title\",\n body=\"paper_body\"):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID, title=\"paper_title\", body=\"paper_body\",\n groupID=\"computer\")\n re = tx_build_broad(commitop, steemd_instance, wallet_instance, account)\n print(\"commit-re\", re)\n return ssig, permlink\n\n\ndef open_tx(account, ssig, userID, permlink, steemd_instance, wallet_instance):\n openop = open_op(account, ssig, userID, permlink)\n re = tx_build_broad(openop, steemd_instance, wallet_instance, account)\n print(\"open-re\", re)\n\n\n# 一个节点的 并发产生交易\ndef one_mul_annoy_tx(account, usk, pk, UID, steemd, wallet):\n ssiglistone = []\n permlinklistone = []\n threads = []\n for i in range(nodeTX):\n t = MyThread(annoy_commit_tx, args=(account, usk, pk, GID, UID, steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink = t.get_result()\n ssiglistone.append(ssig)\n permlinklistone.append(permlink)\n return ssiglistone, permlinklistone\n\n\ndef one_mul_open_tx(account, ssiglistone, userID, permlinklistone, steemd, wallet):\n threads = []\n for i in range(nodeTX):\n t = MyThread(open_tx,\n args=(account, ssiglistone[i], userID, permlinklistone[i], steemd, wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\ndef mul_annoy_tx(usk, pk, UID):\n ssiglist = []\n permlinklist = []\n threads = []\n for i in range(n):\n # t = MyThread(annoy_commit_tx, args=(accountlist[i], usk, pk, GID, UID, clientlist[i].steemd, clientlist[i].wallet))\n t = MyThread(one_mul_annoy_tx,\n args=(accountlist[i], usk, pk, UID, clientlist[i].steemd, clientlist[i].wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink = t.get_result()\n ssiglist.append(ssig)\n permlinklist.append(permlink)\n return ssiglist, permlinklist\n\n\n# 多个节点, 每个节点并发\ndef mul_open_tx(ssiglist, permlinklist, userID):\n threads = []\n for i in range(n):\n # t = MyThread(open_tx,\n # args=(accountlist[i], ssiglist[i], userID, permlinklist[i], clientlist[i].steemd, clientlist[i].wallet))\n t = MyThread(one_mul_open_tx,\n args=(\n accountlist[i], ssiglist[i], userID, permlinklist[i], clientlist[i].steemd, clientlist[i].wallet))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n # for t in threads:\n # t.get_result()\n\n\n# 仅创造tx 不广播\ndef creat_commit_tx(account, usk, pk, GID, UID, steemd_instance, wallet_instance, title=\"paper_title\",\n body=\"paper_body\"):\n commitop, ssig, permlink = annoy_commit(account, usk, pk, GID, UID, title, body, groupID=\"computer\")\n commit_tx = tx_build(commitop, steemd_instance, wallet_instance, account)\n return ssig, permlink, commit_tx\n\n\ndef creat_num_commit_tx(num, account, usk, pk, GID, UID, steemd_instance, wallet_instance, ttitle=\"paper_title\",\n tbody=\"paper_body\"):\n ssiglist = []\n permlinklist = []\n txlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_commit_tx, args=(account, usk, pk, GID, UID, steemd_instance, wallet_instance, ttitle,\n tbody))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n ssig, permlink, commit_tx = t.get_result()\n ssiglist.append(ssig)\n permlinklist.append(permlink)\n txlist.append(commit_tx)\n return ssiglist, permlinklist, txlist\n\n\ndef creat_open_tx(account, ssig, userID, permlink, steemd_instance, wallet_instance):\n openop = open_op(account, ssig, userID, permlink)\n open_tx = tx_build(openop, steemd_instance, wallet_instance, account)\n return open_tx\n\n\ndef creat_num_open_tx(num, account, ssiglist, userID, permlinklist, steemd_instance, wallet_instance):\n opentxlist = []\n threads = []\n for i in range(num):\n t = MyThread(creat_open_tx,\n args=(account, ssiglist[i], userID, permlinklist[i], steemd_instance,\n wallet_instance))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n for t in threads:\n opentx = t.get_result()\n opentxlist.append(opentx)\n return opentxlist\n\n\ndef tx_broad(tx):\n tx.broadcast()\n\n\ndef mul_tx_broad(txlist):\n threads = []\n for tx in txlist:\n t = MyThread(tx_broad, args=(tx,))\n threads.append(t)\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n\n# public parma\nnodeTX = 5\nk = 2\nn = 3 # (k,n)\n# 节点地址\nnodelist = [\n 'http://101.76.208.83:8090',\n 'http://101.76.208.83:8094',\n 'http://101.76.208.83:8098'\n\n]\naccountlist = [\"initminer2\", \"zy1\", \"zy2\", \"zy3\", \"zy4\", \"zy5\", \"zy6\", \"zy7\", \"zy8\", \"zy9\", \"zy10\", \"zy11\", \"zy12\",\n \"zy13\", \"zy14\", \"zy15\", \"zy16\", \"zy17\", \"zy18\", \"zy19\", \"zy20\"]\n# 除了第一个 其他的都是posting key 5Hs4jcm5X4sanCnUKNFCjrq2irN8sH1Krzsb13Qd6DHqutZbhqu\nkeylist = ['5J3yMruND2TADZ7cZc6Cnp4VePrnehei2wvGdnLgf3aEj2nDGhc', '5Hs4jcm5X4sanCnUKNFCjrq2irN8sH1Krzsb13Qd6DHqutZbhqu', \"5KPLLsQ3MuWgKvNYqAFRjziWZenBqefDhSe4K1uYuj8hT3zQoKv\"]\ndebug = True\n# 群签名相关\ngroupobj = PairingGroup('SS512')\ngroup_signature = GroupSignature(groupobj)\nL = group_signature.LGen(n, k)\n# 密钥相关\nclientlist = []\nfor i in range(n):\n clientlist.append(steem.Steem(nodes=[nodelist[i]], keys=keylist[i]))\n\nvkliststr = []\nuskliststr = []\nvklist = []\nusklist = []\n# steem testchain信息\nsteembase.chains.known_chains['TEST'] = {\n 'chain_id': '18dcf0a285365fc58b71f18b3d3fec954aa0c141c44e4e5cb4cf777b9eab274e',\n 'prefix': 'TST', 'steem_symbol': 'TESTS', 'sbd_symbol': 'TBD', 'vests_symbol': 'VESTS'\n}\ngroupID = \"computer\"\nGID = group_signature.group.hash(groupID)\n\n\ndef main():\n # 假设不存在不可用节点(无法判断节点状态)\n userID = \"zhou\"\n UID = group_signature.group.hash(userID)\n print(\"uid\", UID)\n # 获取usk\n pk, usk = get_usk(userID, GID, UID)\n\n ssig, permlink = annoy_commit_tx(accountlist[0], usk, pk, GID, UID, clientlist[0].steemd, clientlist[0].wallet, title=\"paper_title\",\n body=\"paper_body\")\n sleep(3)\n open_tx(accountlist[0], ssig, userID, permlink, clientlist[0].steemd, clientlist[0].wallet)\n return\n\n\nif __name__ == \"__main__\":\n main()\n\nprint(\"end\")\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
10,
19,
22,
24,
31
]
}
|
[
10,
19,
22,
24,
31
] |
class Date:
def __init__(self, strDate):
strDate = strDate.split('.')
self.day = strDate[0]
self.month = strDate[1]
self.year = strDate[2]
|
normal
|
{
"blob_id": "805fc9a26650f85227d14da972311ffbd9dbd555",
"index": 16,
"step-1": "<mask token>\n",
"step-2": "class Date:\n <mask token>\n",
"step-3": "class Date:\n\n def __init__(self, strDate):\n strDate = strDate.split('.')\n self.day = strDate[0]\n self.month = strDate[1]\n self.year = strDate[2]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from flask import Flask, render_template, url_for, request, jsonify
from model.model import load_site_config, load_hero_mapping, load_pretrained_model, valid_input, data_to_feature
from model.model import combine_list, hero_ids
from itertools import product
import numpy as np
app = Flask(__name__,static_folder='./static')
@app.route('/')
def demo():
return render_template("home.html",hero_mapping = hero_mapping)
@app.route('/predict', methods=['POST'])
def predict():
# do check to validate data input
valid, res = valid_input(list(request.json))
if not valid:
return res
else:
feature = data_to_feature(res)
prob = model.predict_proba(feature)[0]
# prob: probabilities
ret_val = dict()
ret_val[0] = prob[0]
ret_val[1] = prob[1]
return ret_val
@app.route('/recommend', methods=['POST'])
def recommend():
idx = -1
raw_data = list(request.json)
for i, id_str in enumerate(list(request.json)):
if id_str == -1:
idx = i
break
if idx == -1:
return "ERROR: illegal input."
predict_side = 0 if idx < 5 else 1
hero_2_prob = dict()
max_prob = 0
recommended_hero_id = -1
for hero_id in hero_ids:
raw_data[idx] = str(hero_id)
valid, current_data = valid_input(raw_data)
if not valid:
continue
feature = data_to_feature(current_data)
prob = model.predict_proba(feature)[0,predict_side]
hero_2_prob[hero_id] = prob
if prob > max_prob:
recommended_hero_id = hero_id
max_prob = prob
ret_val = dict()
ret_val['hero_id'] = recommended_hero_id
ret_val['hero_name'] = inverse_hero_mapping[recommended_hero_id]
return ret_val
if __name__ == '__main__':
# site initialization
config = load_site_config('App/model/site_config.json')
hero_mapping, inverse_hero_mapping = load_hero_mapping(config['hero_mapping_path'])
model = load_pretrained_model(config['model_path'])
app.run(debug=True)
|
normal
|
{
"blob_id": "06605bbd91c62a02a66770ca3f37a9d2d1401ccb",
"index": 9929,
"step-1": "<mask token>\n\n\[email protected]('/')\ndef demo():\n return render_template('home.html', hero_mapping=hero_mapping)\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n valid, res = valid_input(list(request.json))\n if not valid:\n return res\n else:\n feature = data_to_feature(res)\n prob = model.predict_proba(feature)[0]\n ret_val = dict()\n ret_val[0] = prob[0]\n ret_val[1] = prob[1]\n return ret_val\n\n\[email protected]('/recommend', methods=['POST'])\ndef recommend():\n idx = -1\n raw_data = list(request.json)\n for i, id_str in enumerate(list(request.json)):\n if id_str == -1:\n idx = i\n break\n if idx == -1:\n return 'ERROR: illegal input.'\n predict_side = 0 if idx < 5 else 1\n hero_2_prob = dict()\n max_prob = 0\n recommended_hero_id = -1\n for hero_id in hero_ids:\n raw_data[idx] = str(hero_id)\n valid, current_data = valid_input(raw_data)\n if not valid:\n continue\n feature = data_to_feature(current_data)\n prob = model.predict_proba(feature)[0, predict_side]\n hero_2_prob[hero_id] = prob\n if prob > max_prob:\n recommended_hero_id = hero_id\n max_prob = prob\n ret_val = dict()\n ret_val['hero_id'] = recommended_hero_id\n ret_val['hero_name'] = inverse_hero_mapping[recommended_hero_id]\n return ret_val\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef demo():\n return render_template('home.html', hero_mapping=hero_mapping)\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n valid, res = valid_input(list(request.json))\n if not valid:\n return res\n else:\n feature = data_to_feature(res)\n prob = model.predict_proba(feature)[0]\n ret_val = dict()\n ret_val[0] = prob[0]\n ret_val[1] = prob[1]\n return ret_val\n\n\[email protected]('/recommend', methods=['POST'])\ndef recommend():\n idx = -1\n raw_data = list(request.json)\n for i, id_str in enumerate(list(request.json)):\n if id_str == -1:\n idx = i\n break\n if idx == -1:\n return 'ERROR: illegal input.'\n predict_side = 0 if idx < 5 else 1\n hero_2_prob = dict()\n max_prob = 0\n recommended_hero_id = -1\n for hero_id in hero_ids:\n raw_data[idx] = str(hero_id)\n valid, current_data = valid_input(raw_data)\n if not valid:\n continue\n feature = data_to_feature(current_data)\n prob = model.predict_proba(feature)[0, predict_side]\n hero_2_prob[hero_id] = prob\n if prob > max_prob:\n recommended_hero_id = hero_id\n max_prob = prob\n ret_val = dict()\n ret_val['hero_id'] = recommended_hero_id\n ret_val['hero_name'] = inverse_hero_mapping[recommended_hero_id]\n return ret_val\n\n\nif __name__ == '__main__':\n config = load_site_config('App/model/site_config.json')\n hero_mapping, inverse_hero_mapping = load_hero_mapping(config[\n 'hero_mapping_path'])\n model = load_pretrained_model(config['model_path'])\n app.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__, static_folder='./static')\n\n\[email protected]('/')\ndef demo():\n return render_template('home.html', hero_mapping=hero_mapping)\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n valid, res = valid_input(list(request.json))\n if not valid:\n return res\n else:\n feature = data_to_feature(res)\n prob = model.predict_proba(feature)[0]\n ret_val = dict()\n ret_val[0] = prob[0]\n ret_val[1] = prob[1]\n return ret_val\n\n\[email protected]('/recommend', methods=['POST'])\ndef recommend():\n idx = -1\n raw_data = list(request.json)\n for i, id_str in enumerate(list(request.json)):\n if id_str == -1:\n idx = i\n break\n if idx == -1:\n return 'ERROR: illegal input.'\n predict_side = 0 if idx < 5 else 1\n hero_2_prob = dict()\n max_prob = 0\n recommended_hero_id = -1\n for hero_id in hero_ids:\n raw_data[idx] = str(hero_id)\n valid, current_data = valid_input(raw_data)\n if not valid:\n continue\n feature = data_to_feature(current_data)\n prob = model.predict_proba(feature)[0, predict_side]\n hero_2_prob[hero_id] = prob\n if prob > max_prob:\n recommended_hero_id = hero_id\n max_prob = prob\n ret_val = dict()\n ret_val['hero_id'] = recommended_hero_id\n ret_val['hero_name'] = inverse_hero_mapping[recommended_hero_id]\n return ret_val\n\n\nif __name__ == '__main__':\n config = load_site_config('App/model/site_config.json')\n hero_mapping, inverse_hero_mapping = load_hero_mapping(config[\n 'hero_mapping_path'])\n model = load_pretrained_model(config['model_path'])\n app.run(debug=True)\n",
"step-4": "from flask import Flask, render_template, url_for, request, jsonify\nfrom model.model import load_site_config, load_hero_mapping, load_pretrained_model, valid_input, data_to_feature\nfrom model.model import combine_list, hero_ids\nfrom itertools import product\nimport numpy as np\napp = Flask(__name__, static_folder='./static')\n\n\[email protected]('/')\ndef demo():\n return render_template('home.html', hero_mapping=hero_mapping)\n\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n valid, res = valid_input(list(request.json))\n if not valid:\n return res\n else:\n feature = data_to_feature(res)\n prob = model.predict_proba(feature)[0]\n ret_val = dict()\n ret_val[0] = prob[0]\n ret_val[1] = prob[1]\n return ret_val\n\n\[email protected]('/recommend', methods=['POST'])\ndef recommend():\n idx = -1\n raw_data = list(request.json)\n for i, id_str in enumerate(list(request.json)):\n if id_str == -1:\n idx = i\n break\n if idx == -1:\n return 'ERROR: illegal input.'\n predict_side = 0 if idx < 5 else 1\n hero_2_prob = dict()\n max_prob = 0\n recommended_hero_id = -1\n for hero_id in hero_ids:\n raw_data[idx] = str(hero_id)\n valid, current_data = valid_input(raw_data)\n if not valid:\n continue\n feature = data_to_feature(current_data)\n prob = model.predict_proba(feature)[0, predict_side]\n hero_2_prob[hero_id] = prob\n if prob > max_prob:\n recommended_hero_id = hero_id\n max_prob = prob\n ret_val = dict()\n ret_val['hero_id'] = recommended_hero_id\n ret_val['hero_name'] = inverse_hero_mapping[recommended_hero_id]\n return ret_val\n\n\nif __name__ == '__main__':\n config = load_site_config('App/model/site_config.json')\n hero_mapping, inverse_hero_mapping = load_hero_mapping(config[\n 'hero_mapping_path'])\n model = load_pretrained_model(config['model_path'])\n app.run(debug=True)\n",
"step-5": "from flask import Flask, render_template, url_for, request, jsonify\nfrom model.model import load_site_config, load_hero_mapping, load_pretrained_model, valid_input, data_to_feature\nfrom model.model import combine_list, hero_ids\nfrom itertools import product\nimport numpy as np\n\napp = Flask(__name__,static_folder='./static')\n\n\[email protected]('/')\ndef demo():\n return render_template(\"home.html\",hero_mapping = hero_mapping)\n\[email protected]('/predict', methods=['POST'])\ndef predict():\n # do check to validate data input\n valid, res = valid_input(list(request.json))\n if not valid:\n return res\n else:\n feature = data_to_feature(res)\n prob = model.predict_proba(feature)[0]\n # prob: probabilities\n ret_val = dict()\n ret_val[0] = prob[0]\n ret_val[1] = prob[1]\n return ret_val\n\[email protected]('/recommend', methods=['POST'])\ndef recommend():\n idx = -1\n raw_data = list(request.json)\n for i, id_str in enumerate(list(request.json)):\n if id_str == -1:\n idx = i\n break\n if idx == -1:\n return \"ERROR: illegal input.\"\n \n predict_side = 0 if idx < 5 else 1\n hero_2_prob = dict()\n max_prob = 0\n recommended_hero_id = -1\n for hero_id in hero_ids:\n raw_data[idx] = str(hero_id)\n valid, current_data = valid_input(raw_data)\n if not valid:\n continue\n feature = data_to_feature(current_data)\n prob = model.predict_proba(feature)[0,predict_side]\n hero_2_prob[hero_id] = prob\n if prob > max_prob:\n recommended_hero_id = hero_id\n max_prob = prob\n ret_val = dict()\n ret_val['hero_id'] = recommended_hero_id\n ret_val['hero_name'] = inverse_hero_mapping[recommended_hero_id]\n return ret_val\n\n\nif __name__ == '__main__':\n\n # site initialization\n config = load_site_config('App/model/site_config.json')\n hero_mapping, inverse_hero_mapping = load_hero_mapping(config['hero_mapping_path'])\n model = load_pretrained_model(config['model_path'])\n \n app.run(debug=True)",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# coding=UTF-8
#!/usr/bin/env python
# for models.py
from django.db import models
from django.db.models import F, Q, Sum, Avg
from django.db import transaction
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.sites.models import Site
# from apps.router.models import User
# from django.contrib.auth.models import Message
# from django.contrib import messages TODO: wangqi 20150521 Message�ƺ�û�õ��ˣ����Ҫ�������������滻
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.template.loader import render_to_string
from datetime import datetime, timedelta, date
# from apps.common.utils.utils_collection import *
# from apps.common.utils.utils_datetime import *
# from apps.common.utils.utils_mysql import *
# from apps.common.utils.utils_number import *
# from apps.common.utils.utils_render import *
# from apps.common.biz_utils.utils_sorter import *
# from apps.common.utils.utils_string import *
# from apps.common.biz_utils.utils_misc import *
# from apilib import *
# from apilib import tsapi
|
normal
|
{
"blob_id": "d551cab1856fbdb91918f9171d5c02b8dab84aba",
"index": 8223,
"step-1": "<mask token>\n",
"step-2": "from django.db import models\nfrom django.db.models import F, Q, Sum, Avg\nfrom django.db import transaction\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.contrib.contenttypes import generic\nfrom django.contrib.sites.models import Site\nfrom django.conf import settings\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.template.loader import render_to_string\nfrom datetime import datetime, timedelta, date\n",
"step-3": "# coding=UTF-8\n#!/usr/bin/env python\n\n# for models.py\nfrom django.db import models\nfrom django.db.models import F, Q, Sum, Avg\nfrom django.db import transaction\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.contrib.contenttypes import generic\nfrom django.contrib.sites.models import Site\n# from apps.router.models import User\n# from django.contrib.auth.models import Message\n# from django.contrib import messages TODO: wangqi 20150521 Message�ƺ�û�õ��ˣ����Ҫ�������������滻\nfrom django.conf import settings\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.template.loader import render_to_string\nfrom datetime import datetime, timedelta, date\n\n# from apps.common.utils.utils_collection import *\n# from apps.common.utils.utils_datetime import *\n# from apps.common.utils.utils_mysql import *\n# from apps.common.utils.utils_number import *\n# from apps.common.utils.utils_render import *\n# from apps.common.biz_utils.utils_sorter import *\n# from apps.common.utils.utils_string import *\n# from apps.common.biz_utils.utils_misc import *\n# from apilib import *\n# from apilib import tsapi\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""
Implements Single Instance Learning SVM
From https://github.com/garydoranjr/misvm/blob/master/misvm/sil.py
Modified by Nicolas
"""
from __future__ import print_function, division
import numpy as np
import inspect
from sklearn.svm import LinearSVC as SVM
from milsvm.util import slices
class SIL(SVM):
"""
Single-Instance Learning applied to MI data
"""
def __init__(self,C=1.0, scale_C=True,
verbose=True, sv_cutoff=1e-7, **kwargs):
"""
@param kernel : the desired kernel function; can be linear, quadratic,
polynomial, or rbf [default: linear]
@param C : the loss/regularization tradeoff constant [default: 1.0]
@param scale_C : if False [default], scale C by the number of examples
@param p : polynomial degree when a 'polynomial' kernel is used
[default: 3]
@param gamma : RBF scale parameter when an 'rbf' kernel is used
[default: 1.0]
@param verbose : print optimization status messages [default: True]
@param sv_cutoff : the numerical cutoff for an example to be considered
a support vector [default: 1e-7]
"""
self._bags = None
self._bag_predictions = None
self.scale_C = scale_C
self.verbose = verbose
self.sv_cutoff = sv_cutoff
self.C = C
self._X = None
self._y = None
self._objective = None
self._alphas = None
self._sv = None
self._sv_alphas = None
self._sv_X = None
self._sv_y = None
self._b = None
self._predictions = None
super(SIL, self).__init__(**kwargs)
def fit(self, bags, y):
"""
@param bags : a sequence of n bags; each bag is an m-by-k array-like
object containing m instances with k features
@param y : an array-like object of length n containing -1/+1 labels
"""
self._bags = [np.asmatrix(bag) for bag in bags]
y = np.asmatrix(y).reshape((-1, 1))
svm_X = np.vstack(self._bags)
svm_y = np.vstack([float(cls) * np.matrix(np.ones((len(bag), 1)))
for bag, cls in zip(self._bags, y)])
super(SIL, self).fit(svm_X, svm_y)
def _compute_separator(self, K):
super(SIL, self)._compute_separator(K)
self._bag_predictions = _inst_to_bag_preds(self._predictions, self._bags)
def predict(self, bags, instancePrediction = None):
"""
@param bags : a sequence of n bags; each bag is an m-by-k array-like
object containing m instances with k features
@param instancePrediction : flag to indicate if instance predictions
should be given as output.
@return : an array of length n containing real-valued label predictions
(threshold at zero to produce binary predictions)
"""
if instancePrediction is None:
instancePrediction = False
bags = [np.asmatrix(bag) for bag in bags]
inst_preds = super(SIL, self).predict(np.vstack(bags))
if instancePrediction:
return _inst_to_bag_preds(inst_preds, bags), inst_preds
else:
return _inst_to_bag_preds(inst_preds, bags)
def get_params(self, deep=True):
"""
return params
"""
args, _, _, _ = inspect.getargspec(super(SIL, self).__init__)
args.pop(0)
return {key: getattr(self, key, None) for key in args}
def _inst_to_bag_preds(inst_preds, bags):
return np.array([np.max(inst_preds[slice(*bidx)])
for bidx in slices(map(len, bags))])
|
normal
|
{
"blob_id": "f125269d5b52da41734ce94683139c44f0c4a66a",
"index": 3402,
"step-1": "<mask token>\n\n\nclass SIL(SVM):\n <mask token>\n <mask token>\n\n def fit(self, bags, y):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param y : an array-like object of length n containing -1/+1 labels\n \"\"\"\n self._bags = [np.asmatrix(bag) for bag in bags]\n y = np.asmatrix(y).reshape((-1, 1))\n svm_X = np.vstack(self._bags)\n svm_y = np.vstack([(float(cls) * np.matrix(np.ones((len(bag), 1)))) for\n bag, cls in zip(self._bags, y)])\n super(SIL, self).fit(svm_X, svm_y)\n <mask token>\n\n def predict(self, bags, instancePrediction=None):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param instancePrediction : flag to indicate if instance predictions \n should be given as output.\n @return : an array of length n containing real-valued label predictions\n (threshold at zero to produce binary predictions)\n \"\"\"\n if instancePrediction is None:\n instancePrediction = False\n bags = [np.asmatrix(bag) for bag in bags]\n inst_preds = super(SIL, self).predict(np.vstack(bags))\n if instancePrediction:\n return _inst_to_bag_preds(inst_preds, bags), inst_preds\n else:\n return _inst_to_bag_preds(inst_preds, bags)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SIL(SVM):\n <mask token>\n\n def __init__(self, C=1.0, scale_C=True, verbose=True, sv_cutoff=1e-07,\n **kwargs):\n \"\"\"\n @param kernel : the desired kernel function; can be linear, quadratic,\n polynomial, or rbf [default: linear]\n @param C : the loss/regularization tradeoff constant [default: 1.0]\n @param scale_C : if False [default], scale C by the number of examples\n @param p : polynomial degree when a 'polynomial' kernel is used\n [default: 3]\n @param gamma : RBF scale parameter when an 'rbf' kernel is used\n [default: 1.0]\n @param verbose : print optimization status messages [default: True]\n @param sv_cutoff : the numerical cutoff for an example to be considered\n a support vector [default: 1e-7]\n \"\"\"\n self._bags = None\n self._bag_predictions = None\n self.scale_C = scale_C\n self.verbose = verbose\n self.sv_cutoff = sv_cutoff\n self.C = C\n self._X = None\n self._y = None\n self._objective = None\n self._alphas = None\n self._sv = None\n self._sv_alphas = None\n self._sv_X = None\n self._sv_y = None\n self._b = None\n self._predictions = None\n super(SIL, self).__init__(**kwargs)\n\n def fit(self, bags, y):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param y : an array-like object of length n containing -1/+1 labels\n \"\"\"\n self._bags = [np.asmatrix(bag) for bag in bags]\n y = np.asmatrix(y).reshape((-1, 1))\n svm_X = np.vstack(self._bags)\n svm_y = np.vstack([(float(cls) * np.matrix(np.ones((len(bag), 1)))) for\n bag, cls in zip(self._bags, y)])\n super(SIL, self).fit(svm_X, svm_y)\n <mask token>\n\n def predict(self, bags, instancePrediction=None):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param instancePrediction : flag to indicate if instance predictions \n should be given as output.\n @return : an array of length n containing real-valued label predictions\n (threshold at zero to produce binary predictions)\n \"\"\"\n if instancePrediction is None:\n instancePrediction = False\n bags = [np.asmatrix(bag) for bag in bags]\n inst_preds = super(SIL, self).predict(np.vstack(bags))\n if instancePrediction:\n return _inst_to_bag_preds(inst_preds, bags), inst_preds\n else:\n return _inst_to_bag_preds(inst_preds, bags)\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SIL(SVM):\n \"\"\"\n Single-Instance Learning applied to MI data\n \"\"\"\n\n def __init__(self, C=1.0, scale_C=True, verbose=True, sv_cutoff=1e-07,\n **kwargs):\n \"\"\"\n @param kernel : the desired kernel function; can be linear, quadratic,\n polynomial, or rbf [default: linear]\n @param C : the loss/regularization tradeoff constant [default: 1.0]\n @param scale_C : if False [default], scale C by the number of examples\n @param p : polynomial degree when a 'polynomial' kernel is used\n [default: 3]\n @param gamma : RBF scale parameter when an 'rbf' kernel is used\n [default: 1.0]\n @param verbose : print optimization status messages [default: True]\n @param sv_cutoff : the numerical cutoff for an example to be considered\n a support vector [default: 1e-7]\n \"\"\"\n self._bags = None\n self._bag_predictions = None\n self.scale_C = scale_C\n self.verbose = verbose\n self.sv_cutoff = sv_cutoff\n self.C = C\n self._X = None\n self._y = None\n self._objective = None\n self._alphas = None\n self._sv = None\n self._sv_alphas = None\n self._sv_X = None\n self._sv_y = None\n self._b = None\n self._predictions = None\n super(SIL, self).__init__(**kwargs)\n\n def fit(self, bags, y):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param y : an array-like object of length n containing -1/+1 labels\n \"\"\"\n self._bags = [np.asmatrix(bag) for bag in bags]\n y = np.asmatrix(y).reshape((-1, 1))\n svm_X = np.vstack(self._bags)\n svm_y = np.vstack([(float(cls) * np.matrix(np.ones((len(bag), 1)))) for\n bag, cls in zip(self._bags, y)])\n super(SIL, self).fit(svm_X, svm_y)\n\n def _compute_separator(self, K):\n super(SIL, self)._compute_separator(K)\n self._bag_predictions = _inst_to_bag_preds(self._predictions, self.\n _bags)\n\n def predict(self, bags, instancePrediction=None):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param instancePrediction : flag to indicate if instance predictions \n should be given as output.\n @return : an array of length n containing real-valued label predictions\n (threshold at zero to produce binary predictions)\n \"\"\"\n if instancePrediction is None:\n instancePrediction = False\n bags = [np.asmatrix(bag) for bag in bags]\n inst_preds = super(SIL, self).predict(np.vstack(bags))\n if instancePrediction:\n return _inst_to_bag_preds(inst_preds, bags), inst_preds\n else:\n return _inst_to_bag_preds(inst_preds, bags)\n\n def get_params(self, deep=True):\n \"\"\"\n return params\n \"\"\"\n args, _, _, _ = inspect.getargspec(super(SIL, self).__init__)\n args.pop(0)\n return {key: getattr(self, key, None) for key in args}\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass SIL(SVM):\n \"\"\"\n Single-Instance Learning applied to MI data\n \"\"\"\n\n def __init__(self, C=1.0, scale_C=True, verbose=True, sv_cutoff=1e-07,\n **kwargs):\n \"\"\"\n @param kernel : the desired kernel function; can be linear, quadratic,\n polynomial, or rbf [default: linear]\n @param C : the loss/regularization tradeoff constant [default: 1.0]\n @param scale_C : if False [default], scale C by the number of examples\n @param p : polynomial degree when a 'polynomial' kernel is used\n [default: 3]\n @param gamma : RBF scale parameter when an 'rbf' kernel is used\n [default: 1.0]\n @param verbose : print optimization status messages [default: True]\n @param sv_cutoff : the numerical cutoff for an example to be considered\n a support vector [default: 1e-7]\n \"\"\"\n self._bags = None\n self._bag_predictions = None\n self.scale_C = scale_C\n self.verbose = verbose\n self.sv_cutoff = sv_cutoff\n self.C = C\n self._X = None\n self._y = None\n self._objective = None\n self._alphas = None\n self._sv = None\n self._sv_alphas = None\n self._sv_X = None\n self._sv_y = None\n self._b = None\n self._predictions = None\n super(SIL, self).__init__(**kwargs)\n\n def fit(self, bags, y):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param y : an array-like object of length n containing -1/+1 labels\n \"\"\"\n self._bags = [np.asmatrix(bag) for bag in bags]\n y = np.asmatrix(y).reshape((-1, 1))\n svm_X = np.vstack(self._bags)\n svm_y = np.vstack([(float(cls) * np.matrix(np.ones((len(bag), 1)))) for\n bag, cls in zip(self._bags, y)])\n super(SIL, self).fit(svm_X, svm_y)\n\n def _compute_separator(self, K):\n super(SIL, self)._compute_separator(K)\n self._bag_predictions = _inst_to_bag_preds(self._predictions, self.\n _bags)\n\n def predict(self, bags, instancePrediction=None):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param instancePrediction : flag to indicate if instance predictions \n should be given as output.\n @return : an array of length n containing real-valued label predictions\n (threshold at zero to produce binary predictions)\n \"\"\"\n if instancePrediction is None:\n instancePrediction = False\n bags = [np.asmatrix(bag) for bag in bags]\n inst_preds = super(SIL, self).predict(np.vstack(bags))\n if instancePrediction:\n return _inst_to_bag_preds(inst_preds, bags), inst_preds\n else:\n return _inst_to_bag_preds(inst_preds, bags)\n\n def get_params(self, deep=True):\n \"\"\"\n return params\n \"\"\"\n args, _, _, _ = inspect.getargspec(super(SIL, self).__init__)\n args.pop(0)\n return {key: getattr(self, key, None) for key in args}\n\n\ndef _inst_to_bag_preds(inst_preds, bags):\n return np.array([np.max(inst_preds[slice(*bidx)]) for bidx in slices(\n map(len, bags))])\n",
"step-5": "\"\"\"\nImplements Single Instance Learning SVM\nFrom https://github.com/garydoranjr/misvm/blob/master/misvm/sil.py\nModified by Nicolas\n\"\"\"\nfrom __future__ import print_function, division\nimport numpy as np\nimport inspect\nfrom sklearn.svm import LinearSVC as SVM\nfrom milsvm.util import slices\n\n\nclass SIL(SVM):\n \"\"\"\n Single-Instance Learning applied to MI data\n \"\"\"\n\n def __init__(self,C=1.0, scale_C=True,\n verbose=True, sv_cutoff=1e-7, **kwargs):\n \"\"\"\n @param kernel : the desired kernel function; can be linear, quadratic,\n polynomial, or rbf [default: linear]\n @param C : the loss/regularization tradeoff constant [default: 1.0]\n @param scale_C : if False [default], scale C by the number of examples\n @param p : polynomial degree when a 'polynomial' kernel is used\n [default: 3]\n @param gamma : RBF scale parameter when an 'rbf' kernel is used\n [default: 1.0]\n @param verbose : print optimization status messages [default: True]\n @param sv_cutoff : the numerical cutoff for an example to be considered\n a support vector [default: 1e-7]\n \"\"\"\n \n self._bags = None\n self._bag_predictions = None\n self.scale_C = scale_C\n self.verbose = verbose\n self.sv_cutoff = sv_cutoff\n self.C = C\n\n self._X = None\n self._y = None\n self._objective = None\n self._alphas = None\n self._sv = None\n self._sv_alphas = None\n self._sv_X = None\n self._sv_y = None\n self._b = None\n self._predictions = None\n super(SIL, self).__init__(**kwargs)\n\n def fit(self, bags, y):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param y : an array-like object of length n containing -1/+1 labels\n \"\"\"\n self._bags = [np.asmatrix(bag) for bag in bags]\n y = np.asmatrix(y).reshape((-1, 1))\n svm_X = np.vstack(self._bags)\n svm_y = np.vstack([float(cls) * np.matrix(np.ones((len(bag), 1)))\n for bag, cls in zip(self._bags, y)])\n super(SIL, self).fit(svm_X, svm_y)\n\n def _compute_separator(self, K):\n super(SIL, self)._compute_separator(K)\n self._bag_predictions = _inst_to_bag_preds(self._predictions, self._bags)\n\n def predict(self, bags, instancePrediction = None):\n \"\"\"\n @param bags : a sequence of n bags; each bag is an m-by-k array-like\n object containing m instances with k features\n @param instancePrediction : flag to indicate if instance predictions \n should be given as output.\n @return : an array of length n containing real-valued label predictions\n (threshold at zero to produce binary predictions)\n \"\"\"\n if instancePrediction is None:\n instancePrediction = False\n \n bags = [np.asmatrix(bag) for bag in bags]\n inst_preds = super(SIL, self).predict(np.vstack(bags))\n\n if instancePrediction: \n return _inst_to_bag_preds(inst_preds, bags), inst_preds\n else:\n return _inst_to_bag_preds(inst_preds, bags)\n\n def get_params(self, deep=True):\n \"\"\"\n return params\n \"\"\"\n args, _, _, _ = inspect.getargspec(super(SIL, self).__init__)\n args.pop(0)\n return {key: getattr(self, key, None) for key in args}\n\n\ndef _inst_to_bag_preds(inst_preds, bags):\n return np.array([np.max(inst_preds[slice(*bidx)])\n for bidx in slices(map(len, bags))])\n",
"step-ids": [
3,
4,
7,
8,
10
]
}
|
[
3,
4,
7,
8,
10
] |
from numpy import exp, array, dot
from read import normalized
class NeuralNetwork():
def __init__(self, layer1, layer2):
self.layer1 = layer1
self.layer2 = layer2
def __sigmoid(self, x):
return 1 / (1 + exp(-x))
def __sigmoid_derivative(self, x):
return x * (1 - x)
def train(self, training_set_inputs, training_set_outputs, number_of_training_iterations):
for iteration in range(number_of_training_iterations):
output_from_layer_1, output_from_layer_2 = self.think(training_set_inputs)
layer2_error = training_set_outputs - output_from_layer_2
layer2_delta = layer2_error * self.__sigmoid_derivative(output_from_layer_2)
layer1_error = layer2_delta.dot(self.layer2.T)
layer1_delta = layer1_error * self.__sigmoid_derivative(output_from_layer_1)
layer1_adjustment = training_set_inputs.T.dot(layer1_delta)
layer2_adjustment = output_from_layer_1.T.dot(layer2_delta)
self.layer1 += layer1_adjustment
self.layer2 += layer2_adjustment
def think(self, inputs):
output_from_layer1 = self.__sigmoid(dot(inputs, self.layer1))
output_from_layer2 = self.__sigmoid(dot(output_from_layer1, self.layer2))
return output_from_layer1, output_from_layer2
def print_weights(self):
print(self.layer1)
print(self.layer2)
if __name__ == "__main__":
layer1 = array([[0.2, 0.1], [0.3, 0.1], [0.2, 0.1]])
layer2 = array([[0.5, 0.1]]).T
neural_network = NeuralNetwork(layer1, layer2)
neural_network.print_weights()
training_set_inputs = array(
[
[normalized_set['input1'][0], normalized_set['input2'][0], normalized_set['input3'][0]],
[normalized_set['input1'][1], normalized_set['input2'][1], normalized_set['input3'][1]],
[normalized_set['input1'][2], normalized_set['input2'][2], normalized_set['input3'][2]],
[normalized_set['input1'][3], normalized_set['input2'][3], normalized_set['input3'][3]],
[normalized_set['input1'][4], normalized_set['input2'][4], normalized_set['input3'][4]],
[normalized_set['input1'][5], normalized_set['input2'][5], normalized_set['input3'][5]]
])
training_set_outputs = array(
[[
normalized_set['output'][0],
normalized_set['output'][1],
normalized_set['output'][2],
normalized_set['output'][3],
normalized_set['output'][4],
normalized_set['output'][5]
]]).T
print("Inputs", training_set_inputs)
print("Output", training_set_outputs)
neural_network.train(training_set_inputs, training_set_outputs, 60000)
print("Weights ")
neural_network.print_weights()
output = neural_network.think(array([0.5, 0.6, 0.1]))
print("Weights", output[0])
print("Out ", output[1])
|
normal
|
{
"blob_id": "8109fcc136b967e0ed4ca06077b32612605d5e5f",
"index": 1136,
"step-1": "<mask token>\n\n\nclass NeuralNetwork:\n\n def __init__(self, layer1, layer2):\n self.layer1 = layer1\n self.layer2 = layer2\n <mask token>\n <mask token>\n <mask token>\n\n def think(self, inputs):\n output_from_layer1 = self.__sigmoid(dot(inputs, self.layer1))\n output_from_layer2 = self.__sigmoid(dot(output_from_layer1, self.\n layer2))\n return output_from_layer1, output_from_layer2\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass NeuralNetwork:\n\n def __init__(self, layer1, layer2):\n self.layer1 = layer1\n self.layer2 = layer2\n <mask token>\n\n def __sigmoid_derivative(self, x):\n return x * (1 - x)\n\n def train(self, training_set_inputs, training_set_outputs,\n number_of_training_iterations):\n for iteration in range(number_of_training_iterations):\n output_from_layer_1, output_from_layer_2 = self.think(\n training_set_inputs)\n layer2_error = training_set_outputs - output_from_layer_2\n layer2_delta = layer2_error * self.__sigmoid_derivative(\n output_from_layer_2)\n layer1_error = layer2_delta.dot(self.layer2.T)\n layer1_delta = layer1_error * self.__sigmoid_derivative(\n output_from_layer_1)\n layer1_adjustment = training_set_inputs.T.dot(layer1_delta)\n layer2_adjustment = output_from_layer_1.T.dot(layer2_delta)\n self.layer1 += layer1_adjustment\n self.layer2 += layer2_adjustment\n\n def think(self, inputs):\n output_from_layer1 = self.__sigmoid(dot(inputs, self.layer1))\n output_from_layer2 = self.__sigmoid(dot(output_from_layer1, self.\n layer2))\n return output_from_layer1, output_from_layer2\n\n def print_weights(self):\n print(self.layer1)\n print(self.layer2)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass NeuralNetwork:\n\n def __init__(self, layer1, layer2):\n self.layer1 = layer1\n self.layer2 = layer2\n\n def __sigmoid(self, x):\n return 1 / (1 + exp(-x))\n\n def __sigmoid_derivative(self, x):\n return x * (1 - x)\n\n def train(self, training_set_inputs, training_set_outputs,\n number_of_training_iterations):\n for iteration in range(number_of_training_iterations):\n output_from_layer_1, output_from_layer_2 = self.think(\n training_set_inputs)\n layer2_error = training_set_outputs - output_from_layer_2\n layer2_delta = layer2_error * self.__sigmoid_derivative(\n output_from_layer_2)\n layer1_error = layer2_delta.dot(self.layer2.T)\n layer1_delta = layer1_error * self.__sigmoid_derivative(\n output_from_layer_1)\n layer1_adjustment = training_set_inputs.T.dot(layer1_delta)\n layer2_adjustment = output_from_layer_1.T.dot(layer2_delta)\n self.layer1 += layer1_adjustment\n self.layer2 += layer2_adjustment\n\n def think(self, inputs):\n output_from_layer1 = self.__sigmoid(dot(inputs, self.layer1))\n output_from_layer2 = self.__sigmoid(dot(output_from_layer1, self.\n layer2))\n return output_from_layer1, output_from_layer2\n\n def print_weights(self):\n print(self.layer1)\n print(self.layer2)\n\n\nif __name__ == '__main__':\n layer1 = array([[0.2, 0.1], [0.3, 0.1], [0.2, 0.1]])\n layer2 = array([[0.5, 0.1]]).T\n neural_network = NeuralNetwork(layer1, layer2)\n neural_network.print_weights()\n training_set_inputs = array([[normalized_set['input1'][0],\n normalized_set['input2'][0], normalized_set['input3'][0]], [\n normalized_set['input1'][1], normalized_set['input2'][1],\n normalized_set['input3'][1]], [normalized_set['input1'][2],\n normalized_set['input2'][2], normalized_set['input3'][2]], [\n normalized_set['input1'][3], normalized_set['input2'][3],\n normalized_set['input3'][3]], [normalized_set['input1'][4],\n normalized_set['input2'][4], normalized_set['input3'][4]], [\n normalized_set['input1'][5], normalized_set['input2'][5],\n normalized_set['input3'][5]]])\n training_set_outputs = array([[normalized_set['output'][0],\n normalized_set['output'][1], normalized_set['output'][2],\n normalized_set['output'][3], normalized_set['output'][4],\n normalized_set['output'][5]]]).T\n print('Inputs', training_set_inputs)\n print('Output', training_set_outputs)\n neural_network.train(training_set_inputs, training_set_outputs, 60000)\n print('Weights ')\n neural_network.print_weights()\n output = neural_network.think(array([0.5, 0.6, 0.1]))\n print('Weights', output[0])\n print('Out ', output[1])\n",
"step-4": "from numpy import exp, array, dot\nfrom read import normalized\n\n\nclass NeuralNetwork:\n\n def __init__(self, layer1, layer2):\n self.layer1 = layer1\n self.layer2 = layer2\n\n def __sigmoid(self, x):\n return 1 / (1 + exp(-x))\n\n def __sigmoid_derivative(self, x):\n return x * (1 - x)\n\n def train(self, training_set_inputs, training_set_outputs,\n number_of_training_iterations):\n for iteration in range(number_of_training_iterations):\n output_from_layer_1, output_from_layer_2 = self.think(\n training_set_inputs)\n layer2_error = training_set_outputs - output_from_layer_2\n layer2_delta = layer2_error * self.__sigmoid_derivative(\n output_from_layer_2)\n layer1_error = layer2_delta.dot(self.layer2.T)\n layer1_delta = layer1_error * self.__sigmoid_derivative(\n output_from_layer_1)\n layer1_adjustment = training_set_inputs.T.dot(layer1_delta)\n layer2_adjustment = output_from_layer_1.T.dot(layer2_delta)\n self.layer1 += layer1_adjustment\n self.layer2 += layer2_adjustment\n\n def think(self, inputs):\n output_from_layer1 = self.__sigmoid(dot(inputs, self.layer1))\n output_from_layer2 = self.__sigmoid(dot(output_from_layer1, self.\n layer2))\n return output_from_layer1, output_from_layer2\n\n def print_weights(self):\n print(self.layer1)\n print(self.layer2)\n\n\nif __name__ == '__main__':\n layer1 = array([[0.2, 0.1], [0.3, 0.1], [0.2, 0.1]])\n layer2 = array([[0.5, 0.1]]).T\n neural_network = NeuralNetwork(layer1, layer2)\n neural_network.print_weights()\n training_set_inputs = array([[normalized_set['input1'][0],\n normalized_set['input2'][0], normalized_set['input3'][0]], [\n normalized_set['input1'][1], normalized_set['input2'][1],\n normalized_set['input3'][1]], [normalized_set['input1'][2],\n normalized_set['input2'][2], normalized_set['input3'][2]], [\n normalized_set['input1'][3], normalized_set['input2'][3],\n normalized_set['input3'][3]], [normalized_set['input1'][4],\n normalized_set['input2'][4], normalized_set['input3'][4]], [\n normalized_set['input1'][5], normalized_set['input2'][5],\n normalized_set['input3'][5]]])\n training_set_outputs = array([[normalized_set['output'][0],\n normalized_set['output'][1], normalized_set['output'][2],\n normalized_set['output'][3], normalized_set['output'][4],\n normalized_set['output'][5]]]).T\n print('Inputs', training_set_inputs)\n print('Output', training_set_outputs)\n neural_network.train(training_set_inputs, training_set_outputs, 60000)\n print('Weights ')\n neural_network.print_weights()\n output = neural_network.think(array([0.5, 0.6, 0.1]))\n print('Weights', output[0])\n print('Out ', output[1])\n",
"step-5": "from numpy import exp, array, dot\n\nfrom read import normalized\n\nclass NeuralNetwork():\n def __init__(self, layer1, layer2):\n self.layer1 = layer1\n self.layer2 = layer2\n\n def __sigmoid(self, x):\n return 1 / (1 + exp(-x))\n\n def __sigmoid_derivative(self, x):\n return x * (1 - x)\n\n def train(self, training_set_inputs, training_set_outputs, number_of_training_iterations):\n for iteration in range(number_of_training_iterations):\n \n output_from_layer_1, output_from_layer_2 = self.think(training_set_inputs)\n\n layer2_error = training_set_outputs - output_from_layer_2\n layer2_delta = layer2_error * self.__sigmoid_derivative(output_from_layer_2)\n\n layer1_error = layer2_delta.dot(self.layer2.T)\n layer1_delta = layer1_error * self.__sigmoid_derivative(output_from_layer_1)\n\n layer1_adjustment = training_set_inputs.T.dot(layer1_delta)\n layer2_adjustment = output_from_layer_1.T.dot(layer2_delta)\n\n self.layer1 += layer1_adjustment\n self.layer2 += layer2_adjustment\n\n\n def think(self, inputs):\n output_from_layer1 = self.__sigmoid(dot(inputs, self.layer1))\n output_from_layer2 = self.__sigmoid(dot(output_from_layer1, self.layer2))\n return output_from_layer1, output_from_layer2\n\n\n def print_weights(self):\n print(self.layer1)\n print(self.layer2)\n\n\nif __name__ == \"__main__\":\n \n layer1 = array([[0.2, 0.1], [0.3, 0.1], [0.2, 0.1]])\n\n layer2 = array([[0.5, 0.1]]).T\n\n neural_network = NeuralNetwork(layer1, layer2)\n\n neural_network.print_weights()\n\n training_set_inputs = array(\n [\n [normalized_set['input1'][0], normalized_set['input2'][0], normalized_set['input3'][0]],\n [normalized_set['input1'][1], normalized_set['input2'][1], normalized_set['input3'][1]],\n [normalized_set['input1'][2], normalized_set['input2'][2], normalized_set['input3'][2]],\n [normalized_set['input1'][3], normalized_set['input2'][3], normalized_set['input3'][3]],\n [normalized_set['input1'][4], normalized_set['input2'][4], normalized_set['input3'][4]],\n [normalized_set['input1'][5], normalized_set['input2'][5], normalized_set['input3'][5]]\n ])\n\n training_set_outputs = array(\n [[\n normalized_set['output'][0],\n normalized_set['output'][1],\n normalized_set['output'][2],\n normalized_set['output'][3],\n normalized_set['output'][4],\n normalized_set['output'][5]\n ]]).T\n\n print(\"Inputs\", training_set_inputs)\n print(\"Output\", training_set_outputs)\n\n neural_network.train(training_set_inputs, training_set_outputs, 60000)\n\n \n print(\"Weights \")\n neural_network.print_weights()\n\n \n output = neural_network.think(array([0.5, 0.6, 0.1]))\n print(\"Weights\", output[0])\n print(\"Out \", output[1])\n\n ",
"step-ids": [
3,
6,
8,
9,
10
]
}
|
[
3,
6,
8,
9,
10
] |
import torch
import torch.nn as nn
import numpy as np
class EuclideanLoss(nn.Module):
def __init__(self, c_p, c_h):
super().__init__()
self.c_p = c_p
self.c_h = c_h
def forward(self, y, d):
'''
y: prediction, size = (n_product, n_obs)
d: actual sales, size = (n_product, n_obs)
'''
diff = torch.add(y, -d)
diff = torch.add(torch.mul(torch.max(diff, torch.zeros(1)), self.c_p), torch.mul(torch.max(-diff, torch.zeros(1)), self.c_h))
diff = torch.norm(diff)
diff = torch.sum(diff)
return diff
class CostFunction(nn.Module):
def __init__(self, c_p, c_h):
super().__init__()
self.c_p = c_p
self.c_h = c_h
def forward(self, y, d):
'''
y: prediction, size = (n_product, n_obs)
d: actual sales, size = (n_product, n_obs)
'''
cost = torch.add(y, -d)
cost = torch.add(torch.mul(torch.max(cost, torch.zeros(1)), self.c_p), torch.mul(torch.max(-cost, torch.zeros(1)), self.c_h))
cost = torch.sum(cost)
return cost
|
normal
|
{
"blob_id": "67be25e8fdf004515e18e1c20b8d0238222a2172",
"index": 1401,
"step-1": "<mask token>\n\n\nclass EuclideanLoss(nn.Module):\n <mask token>\n <mask token>\n\n\nclass CostFunction(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n \"\"\"\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n \"\"\"\n cost = torch.add(y, -d)\n cost = torch.add(torch.mul(torch.max(cost, torch.zeros(1)), self.\n c_p), torch.mul(torch.max(-cost, torch.zeros(1)), self.c_h))\n cost = torch.sum(cost)\n return cost\n",
"step-2": "<mask token>\n\n\nclass EuclideanLoss(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n <mask token>\n\n\nclass CostFunction(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n \"\"\"\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n \"\"\"\n cost = torch.add(y, -d)\n cost = torch.add(torch.mul(torch.max(cost, torch.zeros(1)), self.\n c_p), torch.mul(torch.max(-cost, torch.zeros(1)), self.c_h))\n cost = torch.sum(cost)\n return cost\n",
"step-3": "<mask token>\n\n\nclass EuclideanLoss(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n \"\"\"\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n \"\"\"\n diff = torch.add(y, -d)\n diff = torch.add(torch.mul(torch.max(diff, torch.zeros(1)), self.\n c_p), torch.mul(torch.max(-diff, torch.zeros(1)), self.c_h))\n diff = torch.norm(diff)\n diff = torch.sum(diff)\n return diff\n\n\nclass CostFunction(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n \"\"\"\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n \"\"\"\n cost = torch.add(y, -d)\n cost = torch.add(torch.mul(torch.max(cost, torch.zeros(1)), self.\n c_p), torch.mul(torch.max(-cost, torch.zeros(1)), self.c_h))\n cost = torch.sum(cost)\n return cost\n",
"step-4": "import torch\nimport torch.nn as nn\nimport numpy as np\n\n\nclass EuclideanLoss(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n \"\"\"\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n \"\"\"\n diff = torch.add(y, -d)\n diff = torch.add(torch.mul(torch.max(diff, torch.zeros(1)), self.\n c_p), torch.mul(torch.max(-diff, torch.zeros(1)), self.c_h))\n diff = torch.norm(diff)\n diff = torch.sum(diff)\n return diff\n\n\nclass CostFunction(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n \"\"\"\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n \"\"\"\n cost = torch.add(y, -d)\n cost = torch.add(torch.mul(torch.max(cost, torch.zeros(1)), self.\n c_p), torch.mul(torch.max(-cost, torch.zeros(1)), self.c_h))\n cost = torch.sum(cost)\n return cost\n",
"step-5": "import torch\nimport torch.nn as nn\nimport numpy as np\n\nclass EuclideanLoss(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n '''\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n '''\n\n diff = torch.add(y, -d)\n diff = torch.add(torch.mul(torch.max(diff, torch.zeros(1)), self.c_p), torch.mul(torch.max(-diff, torch.zeros(1)), self.c_h))\n diff = torch.norm(diff)\n diff = torch.sum(diff)\n return diff\n\nclass CostFunction(nn.Module):\n\n def __init__(self, c_p, c_h):\n super().__init__()\n self.c_p = c_p\n self.c_h = c_h\n\n def forward(self, y, d):\n '''\n y: prediction, size = (n_product, n_obs)\n d: actual sales, size = (n_product, n_obs)\n '''\n\n cost = torch.add(y, -d)\n cost = torch.add(torch.mul(torch.max(cost, torch.zeros(1)), self.c_p), torch.mul(torch.max(-cost, torch.zeros(1)), self.c_h))\n cost = torch.sum(cost)\n\n return cost",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#Recursively parse a string for a pattern that can be either 1 or 2 characters long
|
normal
|
{
"blob_id": "4d524bb4b88b571c9567c651be1b1f1f19fd3c0b",
"index": 6296,
"step-1": "#Recursively parse a string for a pattern that can be either 1 or 2 characters long",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-01-13 15:01
import pickle
import numpy as np
from bert_serving.client import BertClient
from pyhanlp import *
CharTable = JClass('com.hankcs.hanlp.dictionary.other.CharTable')
# bc = BertClient(ip='192.168.1.88') # ip address of the server
bc = BertClient(ip='127.0.0.1') # ip address of the GPU machine
def embed_last_token(text):
result = bc.encode(text, show_tokens=True)
# print(result)
batch = []
for sent, tensor, tokens in zip(text, result[0], result[1]):
valid = []
tid = 0
buffer = ''
words = sent.lower().split()
for i, t in enumerate(tokens):
if t == '[CLS]' or t == '[SEP]':
continue
else:
if t.startswith('##'):
t = t[2:]
elif t == '[UNK]':
t = words[tid][len(buffer)]
buffer += t
if buffer == words[tid]:
valid.append(i)
buffer = ''
tid += 1
# print(len(valid))
# exit()
if len(valid) != len(sent.split()) or tid != len(words):
print(valid)
print(sent.split())
print(result[1])
batch.append(tensor[valid, :])
return batch
def embed_sum(text):
result = bc.encode(text, show_tokens=True)
# print(result)
batch = []
for sent, tensor, tokens in zip(text, result[0], result[1]):
token_tensor = []
sent_tensor = []
tid = 0
buffer = ''
words = sent.lower().split()
for i, t in enumerate(tokens):
if t == '[CLS]' or t == '[SEP]':
continue
else:
if t.startswith('##'):
t = t[2:]
elif t == '[UNK]':
t = words[tid][len(buffer)]
buffer += t
token_tensor.append(tensor[i, :])
if buffer == words[tid]:
sent_tensor.append(np.stack(token_tensor).mean(axis=0))
token_tensor = []
buffer = ''
tid += 1
# print(len(valid))
# exit()
if tid != len(words) or len(sent_tensor) != len(words):
print(sent.split())
print(tokens)
exit()
batch.append(np.stack(sent_tensor))
return batch
def generate_bert(path, output, embed_fun=embed_sum):
print(output)
total = 0
with open(path) as src:
batch = []
tensor = []
for line in src:
line = line.strip()
if len(line) == 0:
continue
batch.append(CharTable.convert(line).replace('—', '-')
.replace('‘', '\'')
.replace('…', '.')
.replace('坜', '壢')
.replace('唛', '麦')
.replace('ㄅㄆㄇㄈ', '呀呀')
.replace('’', '\''))
if len(batch) and len(batch) % 100 == 0:
tensor.extend(embed_fun(batch))
total += len(batch)
print(total)
batch = []
if len(batch):
tensor.extend(embed_fun(batch))
total += len(batch)
print(total)
with open(output, 'wb') as f:
pickle.dump(tensor, f)
if __name__ == '__main__':
# generate_bert('data/SemEval-2016/news.test.sent.txt', 'data/SemEval-2016/news.test.bert', embed_fun=embed_sum)
# generate_bert('data/SemEval-2016/news.valid.sent.txt', 'data/SemEval-2016/news.valid.bert', embed_fun=embed_sum)
# generate_bert('data/SemEval-2016/news.train.sent.txt', 'data/SemEval-2016/news.train.bert', embed_fun=embed_sum)
#
# generate_bert('data/SemEval-2016/text.test.sent.txt', 'data/SemEval-2016/text.test.bert', embed_fun=embed_sum)
# generate_bert('data/SemEval-2016/text.valid.sent.txt', 'data/SemEval-2016/text.valid.bert', embed_fun=embed_sum)
# generate_bert('data/SemEval-2016/text.train.sent.txt', 'data/SemEval-2016/text.train.bert', embed_fun=embed_sum)
generate_bert('data/semeval15/cz.pas.dev.sent.txt', 'data/embedding/bert_base_sum/cz.pas.dev.bert',
embed_fun=embed_sum)
generate_bert('data/semeval15/cz.pas.train.sent.txt', 'data/embedding/bert_base_sum/cz.pas.train.bert',
embed_fun=embed_sum)
generate_bert('data/semeval15/cz.id.pas.sent.txt', 'data/embedding/bert_base_sum/cz.id.pas.bert',
embed_fun=embed_sum)
# generate_bert('data/ctb5.1-pos/dev.short.sent.txt', 'data/embedding/bert_base_sum/ctb.pos.dev.bert',
# embed_fun=embed_sum)
# generate_bert('data/ctb5.1-pos/test.short.sent.txt', 'data/embedding/bert_base_sum/ctb.pos.test.bert',
# embed_fun=embed_sum)
# generate_bert('data/ctb5.1-pos/train.short.sent.txt', 'data/embedding/bert_base_sum/ctb.pos.train.bert',
# embed_fun=embed_sum)
# generate_bert('data/msra/dev.short.sent.txt', 'data/embedding/bert_base_sum/msra.dev.bert',
# embed_fun=embed_sum)
# generate_bert('data/msra/test.short.sent.txt', 'data/embedding/bert_base_sum/msra.test.bert',
# embed_fun=embed_sum)
# generate_bert('data/msra/train.short.sent.txt', 'data/embedding/bert_base_sum/msra.train.bert',
# embed_fun=embed_sum)
# generate_bert('data/msra/test.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.test.auto.bert',
# embed_fun=embed_sum)
# generate_bert('data/msra/test.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.test.auto.bert',
# embed_fun=embed_sum)
# generate_bert('data/msra/dev.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.dev.auto.bert',
# embed_fun=embed_sum)
# generate_bert('data/msra/train.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.train.auto.bert',
# embed_fun=embed_sum)
# generate_bert('data/ctb5/dev.sent.txt', 'data/embedding/bert_base_sum/ctb.dev.bert',
# embed_fun=embed_sum)
# generate_bert('data/ctb5/test.sent.txt', 'data/embedding/bert_base_sum/ctb.test.bert',
# embed_fun=embed_sum)
# generate_bert('data/ctb5/train.sent.txt', 'data/embedding/bert_base_sum/ctb.train.bert',
# embed_fun=embed_sum)
|
normal
|
{
"blob_id": "38e167630519b73bffea4ff527bc7b7272a49f1a",
"index": 348,
"step-1": "<mask token>\n\n\ndef embed_last_token(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n valid = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n if buffer == words[tid]:\n valid.append(i)\n buffer = ''\n tid += 1\n if len(valid) != len(sent.split()) or tid != len(words):\n print(valid)\n print(sent.split())\n print(result[1])\n batch.append(tensor[valid, :])\n return batch\n\n\ndef embed_sum(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n token_tensor = []\n sent_tensor = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n token_tensor.append(tensor[i, :])\n if buffer == words[tid]:\n sent_tensor.append(np.stack(token_tensor).mean(axis=0))\n token_tensor = []\n buffer = ''\n tid += 1\n if tid != len(words) or len(sent_tensor) != len(words):\n print(sent.split())\n print(tokens)\n exit()\n batch.append(np.stack(sent_tensor))\n return batch\n\n\ndef generate_bert(path, output, embed_fun=embed_sum):\n print(output)\n total = 0\n with open(path) as src:\n batch = []\n tensor = []\n for line in src:\n line = line.strip()\n if len(line) == 0:\n continue\n batch.append(CharTable.convert(line).replace('—', '-').replace(\n '‘', \"'\").replace('…', '.').replace('坜', '壢').replace('唛',\n '麦').replace('ㄅㄆㄇㄈ', '呀呀').replace('’', \"'\"))\n if len(batch) and len(batch) % 100 == 0:\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n batch = []\n if len(batch):\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n with open(output, 'wb') as f:\n pickle.dump(tensor, f)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef embed_last_token(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n valid = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n if buffer == words[tid]:\n valid.append(i)\n buffer = ''\n tid += 1\n if len(valid) != len(sent.split()) or tid != len(words):\n print(valid)\n print(sent.split())\n print(result[1])\n batch.append(tensor[valid, :])\n return batch\n\n\ndef embed_sum(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n token_tensor = []\n sent_tensor = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n token_tensor.append(tensor[i, :])\n if buffer == words[tid]:\n sent_tensor.append(np.stack(token_tensor).mean(axis=0))\n token_tensor = []\n buffer = ''\n tid += 1\n if tid != len(words) or len(sent_tensor) != len(words):\n print(sent.split())\n print(tokens)\n exit()\n batch.append(np.stack(sent_tensor))\n return batch\n\n\ndef generate_bert(path, output, embed_fun=embed_sum):\n print(output)\n total = 0\n with open(path) as src:\n batch = []\n tensor = []\n for line in src:\n line = line.strip()\n if len(line) == 0:\n continue\n batch.append(CharTable.convert(line).replace('—', '-').replace(\n '‘', \"'\").replace('…', '.').replace('坜', '壢').replace('唛',\n '麦').replace('ㄅㄆㄇㄈ', '呀呀').replace('’', \"'\"))\n if len(batch) and len(batch) % 100 == 0:\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n batch = []\n if len(batch):\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n with open(output, 'wb') as f:\n pickle.dump(tensor, f)\n\n\nif __name__ == '__main__':\n generate_bert('data/semeval15/cz.pas.dev.sent.txt',\n 'data/embedding/bert_base_sum/cz.pas.dev.bert', embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.pas.train.sent.txt',\n 'data/embedding/bert_base_sum/cz.pas.train.bert', embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.id.pas.sent.txt',\n 'data/embedding/bert_base_sum/cz.id.pas.bert', embed_fun=embed_sum)\n",
"step-3": "<mask token>\nCharTable = JClass('com.hankcs.hanlp.dictionary.other.CharTable')\nbc = BertClient(ip='127.0.0.1')\n\n\ndef embed_last_token(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n valid = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n if buffer == words[tid]:\n valid.append(i)\n buffer = ''\n tid += 1\n if len(valid) != len(sent.split()) or tid != len(words):\n print(valid)\n print(sent.split())\n print(result[1])\n batch.append(tensor[valid, :])\n return batch\n\n\ndef embed_sum(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n token_tensor = []\n sent_tensor = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n token_tensor.append(tensor[i, :])\n if buffer == words[tid]:\n sent_tensor.append(np.stack(token_tensor).mean(axis=0))\n token_tensor = []\n buffer = ''\n tid += 1\n if tid != len(words) or len(sent_tensor) != len(words):\n print(sent.split())\n print(tokens)\n exit()\n batch.append(np.stack(sent_tensor))\n return batch\n\n\ndef generate_bert(path, output, embed_fun=embed_sum):\n print(output)\n total = 0\n with open(path) as src:\n batch = []\n tensor = []\n for line in src:\n line = line.strip()\n if len(line) == 0:\n continue\n batch.append(CharTable.convert(line).replace('—', '-').replace(\n '‘', \"'\").replace('…', '.').replace('坜', '壢').replace('唛',\n '麦').replace('ㄅㄆㄇㄈ', '呀呀').replace('’', \"'\"))\n if len(batch) and len(batch) % 100 == 0:\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n batch = []\n if len(batch):\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n with open(output, 'wb') as f:\n pickle.dump(tensor, f)\n\n\nif __name__ == '__main__':\n generate_bert('data/semeval15/cz.pas.dev.sent.txt',\n 'data/embedding/bert_base_sum/cz.pas.dev.bert', embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.pas.train.sent.txt',\n 'data/embedding/bert_base_sum/cz.pas.train.bert', embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.id.pas.sent.txt',\n 'data/embedding/bert_base_sum/cz.id.pas.bert', embed_fun=embed_sum)\n",
"step-4": "import pickle\nimport numpy as np\nfrom bert_serving.client import BertClient\nfrom pyhanlp import *\nCharTable = JClass('com.hankcs.hanlp.dictionary.other.CharTable')\nbc = BertClient(ip='127.0.0.1')\n\n\ndef embed_last_token(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n valid = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n if buffer == words[tid]:\n valid.append(i)\n buffer = ''\n tid += 1\n if len(valid) != len(sent.split()) or tid != len(words):\n print(valid)\n print(sent.split())\n print(result[1])\n batch.append(tensor[valid, :])\n return batch\n\n\ndef embed_sum(text):\n result = bc.encode(text, show_tokens=True)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n token_tensor = []\n sent_tensor = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n token_tensor.append(tensor[i, :])\n if buffer == words[tid]:\n sent_tensor.append(np.stack(token_tensor).mean(axis=0))\n token_tensor = []\n buffer = ''\n tid += 1\n if tid != len(words) or len(sent_tensor) != len(words):\n print(sent.split())\n print(tokens)\n exit()\n batch.append(np.stack(sent_tensor))\n return batch\n\n\ndef generate_bert(path, output, embed_fun=embed_sum):\n print(output)\n total = 0\n with open(path) as src:\n batch = []\n tensor = []\n for line in src:\n line = line.strip()\n if len(line) == 0:\n continue\n batch.append(CharTable.convert(line).replace('—', '-').replace(\n '‘', \"'\").replace('…', '.').replace('坜', '壢').replace('唛',\n '麦').replace('ㄅㄆㄇㄈ', '呀呀').replace('’', \"'\"))\n if len(batch) and len(batch) % 100 == 0:\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n batch = []\n if len(batch):\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n with open(output, 'wb') as f:\n pickle.dump(tensor, f)\n\n\nif __name__ == '__main__':\n generate_bert('data/semeval15/cz.pas.dev.sent.txt',\n 'data/embedding/bert_base_sum/cz.pas.dev.bert', embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.pas.train.sent.txt',\n 'data/embedding/bert_base_sum/cz.pas.train.bert', embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.id.pas.sent.txt',\n 'data/embedding/bert_base_sum/cz.id.pas.bert', embed_fun=embed_sum)\n",
"step-5": "# -*- coding:utf-8 -*-\n# Author: hankcs\n# Date: 2019-01-13 15:01\nimport pickle\n\nimport numpy as np\nfrom bert_serving.client import BertClient\nfrom pyhanlp import *\n\nCharTable = JClass('com.hankcs.hanlp.dictionary.other.CharTable')\n\n# bc = BertClient(ip='192.168.1.88') # ip address of the server\nbc = BertClient(ip='127.0.0.1') # ip address of the GPU machine\n\n\ndef embed_last_token(text):\n result = bc.encode(text, show_tokens=True)\n # print(result)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n valid = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n if buffer == words[tid]:\n valid.append(i)\n buffer = ''\n tid += 1\n # print(len(valid))\n # exit()\n if len(valid) != len(sent.split()) or tid != len(words):\n print(valid)\n print(sent.split())\n print(result[1])\n batch.append(tensor[valid, :])\n return batch\n\n\ndef embed_sum(text):\n result = bc.encode(text, show_tokens=True)\n # print(result)\n batch = []\n for sent, tensor, tokens in zip(text, result[0], result[1]):\n token_tensor = []\n sent_tensor = []\n tid = 0\n buffer = ''\n words = sent.lower().split()\n for i, t in enumerate(tokens):\n if t == '[CLS]' or t == '[SEP]':\n continue\n else:\n if t.startswith('##'):\n t = t[2:]\n elif t == '[UNK]':\n t = words[tid][len(buffer)]\n buffer += t\n token_tensor.append(tensor[i, :])\n if buffer == words[tid]:\n sent_tensor.append(np.stack(token_tensor).mean(axis=0))\n token_tensor = []\n buffer = ''\n tid += 1\n # print(len(valid))\n # exit()\n if tid != len(words) or len(sent_tensor) != len(words):\n print(sent.split())\n print(tokens)\n exit()\n batch.append(np.stack(sent_tensor))\n return batch\n\n\ndef generate_bert(path, output, embed_fun=embed_sum):\n print(output)\n total = 0\n with open(path) as src:\n batch = []\n tensor = []\n for line in src:\n line = line.strip()\n if len(line) == 0:\n continue\n batch.append(CharTable.convert(line).replace('—', '-')\n .replace('‘', '\\'')\n .replace('…', '.')\n .replace('坜', '壢')\n .replace('唛', '麦')\n .replace('ㄅㄆㄇㄈ', '呀呀')\n .replace('’', '\\''))\n if len(batch) and len(batch) % 100 == 0:\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n batch = []\n if len(batch):\n tensor.extend(embed_fun(batch))\n total += len(batch)\n print(total)\n with open(output, 'wb') as f:\n pickle.dump(tensor, f)\n\n\nif __name__ == '__main__':\n # generate_bert('data/SemEval-2016/news.test.sent.txt', 'data/SemEval-2016/news.test.bert', embed_fun=embed_sum)\n # generate_bert('data/SemEval-2016/news.valid.sent.txt', 'data/SemEval-2016/news.valid.bert', embed_fun=embed_sum)\n # generate_bert('data/SemEval-2016/news.train.sent.txt', 'data/SemEval-2016/news.train.bert', embed_fun=embed_sum)\n #\n # generate_bert('data/SemEval-2016/text.test.sent.txt', 'data/SemEval-2016/text.test.bert', embed_fun=embed_sum)\n # generate_bert('data/SemEval-2016/text.valid.sent.txt', 'data/SemEval-2016/text.valid.bert', embed_fun=embed_sum)\n # generate_bert('data/SemEval-2016/text.train.sent.txt', 'data/SemEval-2016/text.train.bert', embed_fun=embed_sum)\n\n generate_bert('data/semeval15/cz.pas.dev.sent.txt', 'data/embedding/bert_base_sum/cz.pas.dev.bert',\n embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.pas.train.sent.txt', 'data/embedding/bert_base_sum/cz.pas.train.bert',\n embed_fun=embed_sum)\n generate_bert('data/semeval15/cz.id.pas.sent.txt', 'data/embedding/bert_base_sum/cz.id.pas.bert',\n embed_fun=embed_sum)\n\n # generate_bert('data/ctb5.1-pos/dev.short.sent.txt', 'data/embedding/bert_base_sum/ctb.pos.dev.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/ctb5.1-pos/test.short.sent.txt', 'data/embedding/bert_base_sum/ctb.pos.test.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/ctb5.1-pos/train.short.sent.txt', 'data/embedding/bert_base_sum/ctb.pos.train.bert',\n # embed_fun=embed_sum)\n\n # generate_bert('data/msra/dev.short.sent.txt', 'data/embedding/bert_base_sum/msra.dev.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/msra/test.short.sent.txt', 'data/embedding/bert_base_sum/msra.test.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/msra/train.short.sent.txt', 'data/embedding/bert_base_sum/msra.train.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/msra/test.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.test.auto.bert',\n # embed_fun=embed_sum)\n\n # generate_bert('data/msra/test.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.test.auto.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/msra/dev.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.dev.auto.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/msra/train.auto.short.sent.txt', 'data/embedding/bert_base_sum/msra.train.auto.bert',\n # embed_fun=embed_sum)\n\n # generate_bert('data/ctb5/dev.sent.txt', 'data/embedding/bert_base_sum/ctb.dev.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/ctb5/test.sent.txt', 'data/embedding/bert_base_sum/ctb.test.bert',\n # embed_fun=embed_sum)\n # generate_bert('data/ctb5/train.sent.txt', 'data/embedding/bert_base_sum/ctb.train.bert',\n # embed_fun=embed_sum)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from django.core.urlresolvers import reverse
from keptar import settings
import os, os.path
import Image
try:
from collections import OrderedDict
except ImportError:
from keptar.odict import OrderedDict
class AccessDenied(Exception):
pass
class FileNotFound(Exception):
pass
class NotDirectory(Exception):
pass
def enrich(filelist, relpath='', thumbnails=True):
"""A kep neveihez hozzateszi a szukseges adatokat"""
files = OrderedDict()
for f in filelist:
abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, relpath, f))
if os.path.isdir(abspath):
thumb = settings.KEPTAR_ICONS.get('dir', None)
url = reverse('keptar.views.listdir', args=[os.path.join(relpath, f)])
direct_url = None
type = 'dir'
else:
if thumbnails:
try:
thumb = get_thumbnail(abspath)
except:
thumb = None
else:
thumb = settings.KEPTAR_ICONS.get('file', None)
url = reverse('keptar.views.showfile', args=[os.path.join(relpath, f)])
direct_url = getattr(settings, 'KEPTAR_URL', '/media/')+relpath+f
type = 'file'
# TODO: egyeb adatok
files[f] = {
'relpath': relpath,
'url': url,
'abspath': abspath,
'thumb': thumb,
'type': type,
'direct_url': direct_url,
}
return files
def get_parent(path):
"""A megadott elem szulokonyvtarat adja meg"""
# security check
parent = os.path.dirname(path)
try:
get_abspath(parent)
except:
parent = ''
return parent
def get_abspath(path):
"""AccessDenied exceptiont dob, ha valaki cselezni akar"""
abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))
# vajon a celkonyvtar valoban a root-on belul talalhato? - /../... miatt
if not abspath.startswith(settings.KEPTAR_ROOT):
raise AccessDenied("%s < %s" % (abspath, settings.KEPTAR_ROOT))
return abspath
def get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', False), thumbnails=True):
"""Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.
A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.
A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza,
mindenfele extra parameterrel.
A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott
kiterjesztesek.
"""
abspath = get_abspath(path)
if not os.path.isdir(abspath):
raise NotDirectory(abspath)
dirs = []
pictures = []
for fname in os.listdir(abspath):
file = os.path.join(abspath, fname)
if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):
dirs.append(fname)
if os.path.isfile(file):
# a kiterjesztes tamogatott-e
ext = file[file.rfind('.')+1:]
if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or not fname.startswith('.')):
pictures.append(fname)
dirs.sort()
pictures.sort()
return enrich(dirs+pictures, relpath=path)
def get_thumbnail(file, type='', regenerate=False):
"""Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz
tartozo thumbnailt.
A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre
van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.
Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.
"""
ext = file[file.rfind('.')+1:]
if not os.path.isfile(file) or ext.lower() not in settings.KEPTAR_EXTENSIONS:
raise FileNotFound(file)
basename = os.path.basename(file)
dirname = os.path.dirname(file)
thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'], basename)
if regenerate or not os.path.isfile(thumbname):
if not os.path.isdir(os.path.dirname(thumbname)):
os.mkdir(os.path.dirname(thumbname))
generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type]['size'])
thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(settings.KEPTAR_ROOT):]
return thumburl
def generate_thumbnail(file, thumbname, size):
image = Image.open(file)
image.thumbnail(size)
image.save(thumbname, image.format)
|
normal
|
{
"blob_id": "d9156c20e046f608563bc6779575e14cc60f4c25",
"index": 896,
"step-1": "<mask token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\n<mask token>\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\n<mask token>\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\n<mask token>\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\n<mask token>\n",
"step-3": "<mask token>\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from keptar.odict import OrderedDict\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\ndef get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', \n False), thumbnails=True):\n \"\"\"Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.\n A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.\n A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza, \n mindenfele extra parameterrel.\n A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott \n kiterjesztesek.\n \"\"\"\n abspath = get_abspath(path)\n if not os.path.isdir(abspath):\n raise NotDirectory(abspath)\n dirs = []\n pictures = []\n for fname in os.listdir(abspath):\n file = os.path.join(abspath, fname)\n if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):\n dirs.append(fname)\n if os.path.isfile(file):\n ext = file[file.rfind('.') + 1:]\n if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or\n not fname.startswith('.')):\n pictures.append(fname)\n dirs.sort()\n pictures.sort()\n return enrich(dirs + pictures, relpath=path)\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"step-4": "from django.core.urlresolvers import reverse\nfrom keptar import settings\nimport os, os.path\nimport Image\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from keptar.odict import OrderedDict\n\n\nclass AccessDenied(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass NotDirectory(Exception):\n pass\n\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n files = OrderedDict()\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT,\n relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(\n relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(\n relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/'\n ) + relpath + f\n type = 'file'\n files[f] = {'relpath': relpath, 'url': url, 'abspath': abspath,\n 'thumb': thumb, 'type': type, 'direct_url': direct_url}\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n parent = os.path.dirname(path)\n try:\n get_abspath(parent)\n except:\n parent = ''\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied('%s < %s' % (abspath, settings.KEPTAR_ROOT))\n return abspath\n\n\ndef get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', \n False), thumbnails=True):\n \"\"\"Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.\n A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.\n A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza, \n mindenfele extra parameterrel.\n A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott \n kiterjesztesek.\n \"\"\"\n abspath = get_abspath(path)\n if not os.path.isdir(abspath):\n raise NotDirectory(abspath)\n dirs = []\n pictures = []\n for fname in os.listdir(abspath):\n file = os.path.join(abspath, fname)\n if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):\n dirs.append(fname)\n if os.path.isfile(file):\n ext = file[file.rfind('.') + 1:]\n if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or\n not fname.startswith('.')):\n pictures.append(fname)\n dirs.sort()\n pictures.sort()\n return enrich(dirs + pictures, relpath=path)\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n ext = file[file.rfind('.') + 1:]\n if not os.path.isfile(file) or ext.lower(\n ) not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'],\n basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type][\n 'size'])\n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(\n settings.KEPTAR_ROOT):]\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n",
"step-5": "from django.core.urlresolvers import reverse\nfrom keptar import settings\nimport os, os.path\nimport Image\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from keptar.odict import OrderedDict\n\nclass AccessDenied(Exception):\n pass\n\nclass FileNotFound(Exception):\n pass\n\nclass NotDirectory(Exception):\n pass\n\ndef enrich(filelist, relpath='', thumbnails=True):\n \"\"\"A kep neveihez hozzateszi a szukseges adatokat\"\"\"\n\n files = OrderedDict()\n\n for f in filelist:\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, relpath, f))\n if os.path.isdir(abspath):\n thumb = settings.KEPTAR_ICONS.get('dir', None)\n url = reverse('keptar.views.listdir', args=[os.path.join(relpath, f)])\n direct_url = None\n type = 'dir'\n else:\n if thumbnails:\n try:\n thumb = get_thumbnail(abspath)\n except:\n thumb = None\n else:\n thumb = settings.KEPTAR_ICONS.get('file', None)\n url = reverse('keptar.views.showfile', args=[os.path.join(relpath, f)])\n direct_url = getattr(settings, 'KEPTAR_URL', '/media/')+relpath+f\n type = 'file'\n\n # TODO: egyeb adatok\n files[f] = {\n 'relpath': relpath,\n 'url': url,\n 'abspath': abspath,\n 'thumb': thumb,\n 'type': type,\n 'direct_url': direct_url,\n }\n\n return files\n\n\ndef get_parent(path):\n \"\"\"A megadott elem szulokonyvtarat adja meg\"\"\"\n\n # security check\n parent = os.path.dirname(path)\n\n try:\n get_abspath(parent)\n except:\n parent = ''\n\n return parent\n\n\ndef get_abspath(path):\n \"\"\"AccessDenied exceptiont dob, ha valaki cselezni akar\"\"\"\n\n abspath = os.path.abspath(os.path.join(settings.KEPTAR_ROOT, path))\n # vajon a celkonyvtar valoban a root-on belul talalhato? - /../... miatt\n if not abspath.startswith(settings.KEPTAR_ROOT):\n raise AccessDenied(\"%s < %s\" % (abspath, settings.KEPTAR_ROOT))\n \n return abspath\n\n\ndef get_filelist(path, show_hidden=getattr(settings, 'KEPTAR_SHOW_HIDDEN', False), thumbnails=True):\n \"\"\"Visszaadja a ``path`` konyvtarban levo konyvtarak es fileok listajat.\n A ``path`` a ``settings.KEPTAR_ROOT``-hoz relativ.\n A konyvtarak es a fileok listajat ket kulon dict-ben adja vissza, \n mindenfele extra parameterrel.\n A ``settings.KEPTAR_EXTENSIONS``-nel allithatoak a tamogatott \n kiterjesztesek.\n \"\"\"\n\n abspath = get_abspath(path)\n\n if not os.path.isdir(abspath):\n raise NotDirectory(abspath)\n\n dirs = []\n pictures = []\n\n for fname in os.listdir(abspath):\n file = os.path.join(abspath, fname)\n if os.path.isdir(file) and (show_hidden or not fname.startswith('.')):\n dirs.append(fname)\n if os.path.isfile(file):\n # a kiterjesztes tamogatott-e\n ext = file[file.rfind('.')+1:]\n if ext.lower() in settings.KEPTAR_EXTENSIONS and (show_hidden or not fname.startswith('.')):\n pictures.append(fname)\n\n dirs.sort()\n pictures.sort()\n\n return enrich(dirs+pictures, relpath=path)\n\n\ndef get_thumbnail(file, type='', regenerate=False):\n \"\"\"Visszaadja, illetve ha nem letezik, akkor legeneralja a ``file``-hoz\n tartozo thumbnailt.\n A ``type``-on keresztul mondhatjuk meg, hogy milyen tipusu thumbnailre\n van szuksegunk, a tipusok parametereit a ``settings.py``-ben allithatjuk.\n Ha a ``regenerate`` ``True``, akkor ujrageneralja a thumbnailt.\n \"\"\"\n\n ext = file[file.rfind('.')+1:]\n if not os.path.isfile(file) or ext.lower() not in settings.KEPTAR_EXTENSIONS:\n raise FileNotFound(file)\n \n basename = os.path.basename(file)\n dirname = os.path.dirname(file)\n thumbname = os.path.join(dirname, settings.KEPTAR_THUMBS[type]['dir'], basename)\n if regenerate or not os.path.isfile(thumbname):\n if not os.path.isdir(os.path.dirname(thumbname)):\n os.mkdir(os.path.dirname(thumbname))\n generate_thumbnail(file, thumbname, settings.KEPTAR_THUMBS[type]['size'])\n \n thumburl = getattr(settings, 'KEPTAR_URL', '/media') + thumbname[len(settings.KEPTAR_ROOT):]\n\n return thumburl\n\n\ndef generate_thumbnail(file, thumbname, size):\n image = Image.open(file)\n image.thumbnail(size)\n image.save(thumbname, image.format)\n\n",
"step-ids": [
5,
6,
10,
11,
12
]
}
|
[
5,
6,
10,
11,
12
] |
from djitellopy import Tello
import time
import threading
import pandas as pd
class DataTello:
def __init__(self):
# Inicia objeto de controle do Tello
self.tello = Tello()
# Array onde será armazenado a lista de dados coletado pelo Tello
self.__data = []
self.__array = []
# Tempo de voo em mili segundos
self.tempoVoo = 420000
'''
___Padrão para nome dos arquivos das tabelas___
Onde x é o nº da tabela e y a quantidade de tempo em segundos do voo
1. Para a janela fechada e porta fechada: x_tudoFechado_y.csv
2. Para a janela aberta e porta aberta: x_janelaPortaAberta_y.csv
3. Para a janela e porta aberta, com ventilador ligado na direção do drone: x_janelaPortaAbertaVentilador_y.csv
'''
# Padrão de nome
self.nomeArquivo = '2_tudoFechado_420'
self.__df = pd.DataFrame(columns=['timestamp', 'pitch', 'roll',
'yaw', 'vgx', 'vgy', 'vgz',
'templ', 'temph', 'tof',
'height', 'battery', 'barometer',
'time', 'agx', 'agy', 'agz'])
'''
self.__startCollector = False
self.__endProgram = False
threadCollector = threading.Thread(target=self.dataCollector, args=())
threadCollector.daemon = False
threadCollector.start()
def dataCollector(self):
while True:
if self.__startCollector:
self.__data.append(self.tello.get_states())
if self.__endProgram:
for item in self.__data:
timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados
self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7],
item[9], item[11], item[13], item[15], item[17], item[19],
item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame
self.__df.to_csv('{}.csv'.format(self.nomeArquivo))
break
'''
def fly(self):
#
self.tello.connect()
self.tello.takeoff()
timestampInicial = int(round(time.time() * 1000))
timestampFinal = timestampInicial
while ((timestampFinal - timestampInicial) < self.tempoVoo):
try:
timestampFinal = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados
self.__data.append(self.tello.get_states())
if (not len(self.__data) % 20 == 0):
self.tello.send_command_without_return('command')
except KeyboardInterrupt:
print ('\n . . .\n')
self.tello.end()
break
self.tello.land()
self.tello.end()
for item in self.__data:
timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados
self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7],
item[9], item[11], item[13], item[15], item[17], item[19],
item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame
self.__df.to_csv('{}.csv'.format(self.nomeArquivo))
def stop(self):
self.tello.end()
def run(self):
self.tello.connect()
self.tello.takeoff()
tempo1 = self.tello.get_flight_time()
tempo1 = tempo1[0:(len(tempo1)-1)]
#time.sleep(3)
bateria = self.tello.get_battery()
tempo2 = self.tello.get_flight_time()
tempo2 = tempo2[0:(len(tempo2)-1)]
print('Nivel da bateria é: {}'.format(str(bateria)))
print('Tempo de início foi {}'.format(str(tempo1)))
print('Tempo de término foi de {}'.format(str(tempo2)))
while ((int(tempo2) - int(tempo1)) < 10):
print('Nivel da bateria é: ' + str(bateria))
self.__array.append(self.tello.get_attitude())
self.__data.append(self.tello.get_states())
tempo2 = self.tello.get_flight_time()
tempo2 = tempo2[0:(len(tempo2)-1)]
self.tello.land()
self.tello.end()
print(self.__array)
print(self.__data)
def main():
dataTello = DataTello()
dataTello.fly()
#dataTello.stop()
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "9e751bbddabbec7c5e997578d99ef1b8c35efe06",
"index": 8108,
"step-1": "<mask token>\n\n\nclass DataTello:\n\n def __init__(self):\n self.tello = Tello()\n self.__data = []\n self.__array = []\n self.tempoVoo = 420000\n \"\"\"\n ___Padrão para nome dos arquivos das tabelas___\n Onde x é o nº da tabela e y a quantidade de tempo em segundos do voo\n \n 1. Para a janela fechada e porta fechada: x_tudoFechado_y.csv\n 2. Para a janela aberta e porta aberta: x_janelaPortaAberta_y.csv\n 3. Para a janela e porta aberta, com ventilador ligado na direção do drone: x_janelaPortaAbertaVentilador_y.csv\n \"\"\"\n self.nomeArquivo = '2_tudoFechado_420'\n self.__df = pd.DataFrame(columns=['timestamp', 'pitch', 'roll',\n 'yaw', 'vgx', 'vgy', 'vgz', 'templ', 'temph', 'tof', 'height',\n 'battery', 'barometer', 'time', 'agx', 'agy', 'agz'])\n \"\"\"\n self.__startCollector = False\n self.__endProgram = False\n threadCollector = threading.Thread(target=self.dataCollector, args=())\n threadCollector.daemon = False\n threadCollector.start()\n\n def dataCollector(self):\n while True:\n if self.__startCollector:\n self.__data.append(self.tello.get_states())\n\n if self.__endProgram:\n for item in self.__data:\n timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7], \n item[9], item[11], item[13], item[15], item[17], item[19], \n item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame\n\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n break \n \"\"\"\n\n def fly(self):\n self.tello.connect()\n self.tello.takeoff()\n timestampInicial = int(round(time.time() * 1000))\n timestampFinal = timestampInicial\n while timestampFinal - timestampInicial < self.tempoVoo:\n try:\n timestampFinal = int(round(time.time() * 1000))\n self.__data.append(self.tello.get_states())\n if not len(self.__data) % 20 == 0:\n self.tello.send_command_without_return('command')\n except KeyboardInterrupt:\n print('\\n . . .\\n')\n self.tello.end()\n break\n self.tello.land()\n self.tello.end()\n for item in self.__data:\n timestamp = int(round(time.time() * 1000))\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3],\n item[5], item[7], item[9], item[11], item[13], item[15],\n item[17], item[19], item[21], item[23], item[25], item[27],\n item[29], item[31]]\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n def stop(self):\n self.tello.end()\n\n def run(self):\n self.tello.connect()\n self.tello.takeoff()\n tempo1 = self.tello.get_flight_time()\n tempo1 = tempo1[0:len(tempo1) - 1]\n bateria = self.tello.get_battery()\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n print('Nivel da bateria é: {}'.format(str(bateria)))\n print('Tempo de início foi {}'.format(str(tempo1)))\n print('Tempo de término foi de {}'.format(str(tempo2)))\n while int(tempo2) - int(tempo1) < 10:\n print('Nivel da bateria é: ' + str(bateria))\n self.__array.append(self.tello.get_attitude())\n self.__data.append(self.tello.get_states())\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n self.tello.land()\n self.tello.end()\n print(self.__array)\n print(self.__data)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DataTello:\n\n def __init__(self):\n self.tello = Tello()\n self.__data = []\n self.__array = []\n self.tempoVoo = 420000\n \"\"\"\n ___Padrão para nome dos arquivos das tabelas___\n Onde x é o nº da tabela e y a quantidade de tempo em segundos do voo\n \n 1. Para a janela fechada e porta fechada: x_tudoFechado_y.csv\n 2. Para a janela aberta e porta aberta: x_janelaPortaAberta_y.csv\n 3. Para a janela e porta aberta, com ventilador ligado na direção do drone: x_janelaPortaAbertaVentilador_y.csv\n \"\"\"\n self.nomeArquivo = '2_tudoFechado_420'\n self.__df = pd.DataFrame(columns=['timestamp', 'pitch', 'roll',\n 'yaw', 'vgx', 'vgy', 'vgz', 'templ', 'temph', 'tof', 'height',\n 'battery', 'barometer', 'time', 'agx', 'agy', 'agz'])\n \"\"\"\n self.__startCollector = False\n self.__endProgram = False\n threadCollector = threading.Thread(target=self.dataCollector, args=())\n threadCollector.daemon = False\n threadCollector.start()\n\n def dataCollector(self):\n while True:\n if self.__startCollector:\n self.__data.append(self.tello.get_states())\n\n if self.__endProgram:\n for item in self.__data:\n timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7], \n item[9], item[11], item[13], item[15], item[17], item[19], \n item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame\n\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n break \n \"\"\"\n\n def fly(self):\n self.tello.connect()\n self.tello.takeoff()\n timestampInicial = int(round(time.time() * 1000))\n timestampFinal = timestampInicial\n while timestampFinal - timestampInicial < self.tempoVoo:\n try:\n timestampFinal = int(round(time.time() * 1000))\n self.__data.append(self.tello.get_states())\n if not len(self.__data) % 20 == 0:\n self.tello.send_command_without_return('command')\n except KeyboardInterrupt:\n print('\\n . . .\\n')\n self.tello.end()\n break\n self.tello.land()\n self.tello.end()\n for item in self.__data:\n timestamp = int(round(time.time() * 1000))\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3],\n item[5], item[7], item[9], item[11], item[13], item[15],\n item[17], item[19], item[21], item[23], item[25], item[27],\n item[29], item[31]]\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n def stop(self):\n self.tello.end()\n\n def run(self):\n self.tello.connect()\n self.tello.takeoff()\n tempo1 = self.tello.get_flight_time()\n tempo1 = tempo1[0:len(tempo1) - 1]\n bateria = self.tello.get_battery()\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n print('Nivel da bateria é: {}'.format(str(bateria)))\n print('Tempo de início foi {}'.format(str(tempo1)))\n print('Tempo de término foi de {}'.format(str(tempo2)))\n while int(tempo2) - int(tempo1) < 10:\n print('Nivel da bateria é: ' + str(bateria))\n self.__array.append(self.tello.get_attitude())\n self.__data.append(self.tello.get_states())\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n self.tello.land()\n self.tello.end()\n print(self.__array)\n print(self.__data)\n\n\ndef main():\n dataTello = DataTello()\n dataTello.fly()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass DataTello:\n\n def __init__(self):\n self.tello = Tello()\n self.__data = []\n self.__array = []\n self.tempoVoo = 420000\n \"\"\"\n ___Padrão para nome dos arquivos das tabelas___\n Onde x é o nº da tabela e y a quantidade de tempo em segundos do voo\n \n 1. Para a janela fechada e porta fechada: x_tudoFechado_y.csv\n 2. Para a janela aberta e porta aberta: x_janelaPortaAberta_y.csv\n 3. Para a janela e porta aberta, com ventilador ligado na direção do drone: x_janelaPortaAbertaVentilador_y.csv\n \"\"\"\n self.nomeArquivo = '2_tudoFechado_420'\n self.__df = pd.DataFrame(columns=['timestamp', 'pitch', 'roll',\n 'yaw', 'vgx', 'vgy', 'vgz', 'templ', 'temph', 'tof', 'height',\n 'battery', 'barometer', 'time', 'agx', 'agy', 'agz'])\n \"\"\"\n self.__startCollector = False\n self.__endProgram = False\n threadCollector = threading.Thread(target=self.dataCollector, args=())\n threadCollector.daemon = False\n threadCollector.start()\n\n def dataCollector(self):\n while True:\n if self.__startCollector:\n self.__data.append(self.tello.get_states())\n\n if self.__endProgram:\n for item in self.__data:\n timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7], \n item[9], item[11], item[13], item[15], item[17], item[19], \n item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame\n\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n break \n \"\"\"\n\n def fly(self):\n self.tello.connect()\n self.tello.takeoff()\n timestampInicial = int(round(time.time() * 1000))\n timestampFinal = timestampInicial\n while timestampFinal - timestampInicial < self.tempoVoo:\n try:\n timestampFinal = int(round(time.time() * 1000))\n self.__data.append(self.tello.get_states())\n if not len(self.__data) % 20 == 0:\n self.tello.send_command_without_return('command')\n except KeyboardInterrupt:\n print('\\n . . .\\n')\n self.tello.end()\n break\n self.tello.land()\n self.tello.end()\n for item in self.__data:\n timestamp = int(round(time.time() * 1000))\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3],\n item[5], item[7], item[9], item[11], item[13], item[15],\n item[17], item[19], item[21], item[23], item[25], item[27],\n item[29], item[31]]\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n def stop(self):\n self.tello.end()\n\n def run(self):\n self.tello.connect()\n self.tello.takeoff()\n tempo1 = self.tello.get_flight_time()\n tempo1 = tempo1[0:len(tempo1) - 1]\n bateria = self.tello.get_battery()\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n print('Nivel da bateria é: {}'.format(str(bateria)))\n print('Tempo de início foi {}'.format(str(tempo1)))\n print('Tempo de término foi de {}'.format(str(tempo2)))\n while int(tempo2) - int(tempo1) < 10:\n print('Nivel da bateria é: ' + str(bateria))\n self.__array.append(self.tello.get_attitude())\n self.__data.append(self.tello.get_states())\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n self.tello.land()\n self.tello.end()\n print(self.__array)\n print(self.__data)\n\n\ndef main():\n dataTello = DataTello()\n dataTello.fly()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from djitellopy import Tello\nimport time\nimport threading\nimport pandas as pd\n\n\nclass DataTello:\n\n def __init__(self):\n self.tello = Tello()\n self.__data = []\n self.__array = []\n self.tempoVoo = 420000\n \"\"\"\n ___Padrão para nome dos arquivos das tabelas___\n Onde x é o nº da tabela e y a quantidade de tempo em segundos do voo\n \n 1. Para a janela fechada e porta fechada: x_tudoFechado_y.csv\n 2. Para a janela aberta e porta aberta: x_janelaPortaAberta_y.csv\n 3. Para a janela e porta aberta, com ventilador ligado na direção do drone: x_janelaPortaAbertaVentilador_y.csv\n \"\"\"\n self.nomeArquivo = '2_tudoFechado_420'\n self.__df = pd.DataFrame(columns=['timestamp', 'pitch', 'roll',\n 'yaw', 'vgx', 'vgy', 'vgz', 'templ', 'temph', 'tof', 'height',\n 'battery', 'barometer', 'time', 'agx', 'agy', 'agz'])\n \"\"\"\n self.__startCollector = False\n self.__endProgram = False\n threadCollector = threading.Thread(target=self.dataCollector, args=())\n threadCollector.daemon = False\n threadCollector.start()\n\n def dataCollector(self):\n while True:\n if self.__startCollector:\n self.__data.append(self.tello.get_states())\n\n if self.__endProgram:\n for item in self.__data:\n timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7], \n item[9], item[11], item[13], item[15], item[17], item[19], \n item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame\n\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n break \n \"\"\"\n\n def fly(self):\n self.tello.connect()\n self.tello.takeoff()\n timestampInicial = int(round(time.time() * 1000))\n timestampFinal = timestampInicial\n while timestampFinal - timestampInicial < self.tempoVoo:\n try:\n timestampFinal = int(round(time.time() * 1000))\n self.__data.append(self.tello.get_states())\n if not len(self.__data) % 20 == 0:\n self.tello.send_command_without_return('command')\n except KeyboardInterrupt:\n print('\\n . . .\\n')\n self.tello.end()\n break\n self.tello.land()\n self.tello.end()\n for item in self.__data:\n timestamp = int(round(time.time() * 1000))\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3],\n item[5], item[7], item[9], item[11], item[13], item[15],\n item[17], item[19], item[21], item[23], item[25], item[27],\n item[29], item[31]]\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n def stop(self):\n self.tello.end()\n\n def run(self):\n self.tello.connect()\n self.tello.takeoff()\n tempo1 = self.tello.get_flight_time()\n tempo1 = tempo1[0:len(tempo1) - 1]\n bateria = self.tello.get_battery()\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n print('Nivel da bateria é: {}'.format(str(bateria)))\n print('Tempo de início foi {}'.format(str(tempo1)))\n print('Tempo de término foi de {}'.format(str(tempo2)))\n while int(tempo2) - int(tempo1) < 10:\n print('Nivel da bateria é: ' + str(bateria))\n self.__array.append(self.tello.get_attitude())\n self.__data.append(self.tello.get_states())\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:len(tempo2) - 1]\n self.tello.land()\n self.tello.end()\n print(self.__array)\n print(self.__data)\n\n\ndef main():\n dataTello = DataTello()\n dataTello.fly()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from djitellopy import Tello\nimport time\nimport threading\nimport pandas as pd\n\nclass DataTello:\n \n def __init__(self):\n # Inicia objeto de controle do Tello\n self.tello = Tello()\n \n # Array onde será armazenado a lista de dados coletado pelo Tello\n self.__data = []\n self.__array = []\n\n # Tempo de voo em mili segundos\n self.tempoVoo = 420000\n\n '''\n ___Padrão para nome dos arquivos das tabelas___\n Onde x é o nº da tabela e y a quantidade de tempo em segundos do voo\n \n 1. Para a janela fechada e porta fechada: x_tudoFechado_y.csv\n 2. Para a janela aberta e porta aberta: x_janelaPortaAberta_y.csv\n 3. Para a janela e porta aberta, com ventilador ligado na direção do drone: x_janelaPortaAbertaVentilador_y.csv\n '''\n\n # Padrão de nome\n self.nomeArquivo = '2_tudoFechado_420'\n self.__df = pd.DataFrame(columns=['timestamp', 'pitch', 'roll', \n 'yaw', 'vgx', 'vgy', 'vgz', \n 'templ', 'temph', 'tof', \n 'height', 'battery', 'barometer', \n 'time', 'agx', 'agy', 'agz'])\n '''\n self.__startCollector = False\n self.__endProgram = False\n threadCollector = threading.Thread(target=self.dataCollector, args=())\n threadCollector.daemon = False\n threadCollector.start()\n\n def dataCollector(self):\n while True:\n if self.__startCollector:\n self.__data.append(self.tello.get_states())\n\n if self.__endProgram:\n for item in self.__data:\n timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7], \n item[9], item[11], item[13], item[15], item[17], item[19], \n item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame\n\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n break \n ''' \n\n def fly(self):\n #\n self.tello.connect()\n self.tello.takeoff()\n timestampInicial = int(round(time.time() * 1000))\n timestampFinal = timestampInicial\n\n while ((timestampFinal - timestampInicial) < self.tempoVoo):\n try:\n timestampFinal = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados\n self.__data.append(self.tello.get_states())\n if (not len(self.__data) % 20 == 0):\n self.tello.send_command_without_return('command')\n except KeyboardInterrupt:\n print ('\\n . . .\\n')\n self.tello.end() \n break\n\n self.tello.land()\n self.tello.end()\n\n for item in self.__data:\n timestamp = int(round(time.time() * 1000)) # Cria timestamp no momento que recebe os dados\n self.__df.loc[len(self.__df)] = [timestamp, item[1], item[3], item[5], item[7], \n item[9], item[11], item[13], item[15], item[17], item[19], \n item[21], item[23], item[25], item[27], item[29], item[31]] # Adiciona os novos valores em uma nova linha do DataFrame\n\n self.__df.to_csv('{}.csv'.format(self.nomeArquivo))\n\n def stop(self):\n self.tello.end()\n\n \n\n def run(self):\n self.tello.connect()\n self.tello.takeoff()\n tempo1 = self.tello.get_flight_time()\n tempo1 = tempo1[0:(len(tempo1)-1)]\n #time.sleep(3)\n bateria = self.tello.get_battery()\n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:(len(tempo2)-1)]\n \n print('Nivel da bateria é: {}'.format(str(bateria)))\n \n print('Tempo de início foi {}'.format(str(tempo1)))\n print('Tempo de término foi de {}'.format(str(tempo2)))\n \n while ((int(tempo2) - int(tempo1)) < 10):\n print('Nivel da bateria é: ' + str(bateria))\n self.__array.append(self.tello.get_attitude())\n self.__data.append(self.tello.get_states()) \n tempo2 = self.tello.get_flight_time()\n tempo2 = tempo2[0:(len(tempo2)-1)]\n\n self.tello.land()\n self.tello.end()\n print(self.__array)\n print(self.__data)\n\n\ndef main():\n dataTello = DataTello()\n dataTello.fly()\n #dataTello.stop()\n\nif __name__ == \"__main__\":\n main() ",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
import chainer
import chainer.functions as F
import numpy as np
import argparse
from model import Generator, Discriminator
from chainer import cuda, serializers
from pathlib import Path
from utils import set_optimizer
from dataset import DatasetLoader
xp = cuda.cupy
cuda.get_device(0).use()
class CycleGANVC2LossCalculator:
def __init__(self):
pass
@staticmethod
def dis_loss(discriminator, y, t):
y_dis = discriminator(y)
t_dis = discriminator(t)
return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))
@staticmethod
def gen_loss(discriminator, y):
y_dis = discriminator(y)
return F.mean(F.softplus(-y_dis))
@staticmethod
def cycle_loss(y, t):
return 10.0 * F.mean_absolute_error(y, t)
@staticmethod
def identity_loss(y, t):
return 5.0 * F.mean_absolute_error(y, t)
def train(epochs,
iterations,
batchsize,
modeldir,
extension,
time_width,
mel_bins,
sampling_rate,
g_learning_rate,
d_learning_rate,
beta1,
beta2,
identity_epoch,
second_step,
src_path,
tgt_path):
# Dataset definiton
dataset = DatasetLoader(src_path,
tgt_path,
extension,
time_width,
mel_bins,
sampling_rate)
print(dataset)
# Model & Optimizer definition
generator_xy = Generator()
generator_xy.to_gpu()
gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)
generator_yx = Generator()
generator_yx.to_gpu()
gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)
discriminator_y = Discriminator()
discriminator_y.to_gpu()
dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)
discriminator_x = Discriminator()
discriminator_x.to_gpu()
dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)
discriminator_xyx = Discriminator()
discriminator_xyx.to_gpu()
dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1, beta2)
discriminator_yxy = Discriminator()
discriminator_yxy.to_gpu()
dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1, beta2)
# Loss function definition
lossfunc = CycleGANVC2LossCalculator()
for epoch in range(epochs):
sum_dis_loss = 0
sum_gen_loss = 0
for batch in range(0, iterations, batchsize):
x, y = dataset.train(batchsize)
xy = generator_xy(x)
xyx = generator_yx(xy)
yx = generator_yx(y)
yxy = generator_xy(yx)
xy.unchain_backward()
xyx.unchain_backward()
yx.unchain_backward()
yxy.unchain_backward()
dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)
dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)
if second_step:
dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)
dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)
discriminator_xyx.cleargrads()
discriminator_yxy.cleargrads()
discriminator_x.cleargrads()
discriminator_y.cleargrads()
dis_loss.backward()
dis_x_opt.update()
dis_y_opt.update()
if second_step:
dis_xyx_opt.update()
dis_yxy_opt.update()
dis_loss.unchain_backward()
xy = generator_xy(x)
xyx = generator_yx(xy)
id_y = generator_xy(y)
yx = generator_yx(y)
yxy = generator_xy(yx)
id_x = generator_yx(x)
gen_loss = lossfunc.gen_loss(discriminator_y, xy)
gen_loss += lossfunc.gen_loss(discriminator_x, yx)
if second_step:
gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)
gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)
gen_loss += lossfunc.cycle_loss(x, xyx)
gen_loss += lossfunc.cycle_loss(y, xyx)
if epoch < identity_epoch:
gen_loss += lossfunc.identity_loss(id_y, y)
gen_loss += lossfunc.identity_loss(id_x, x)
generator_xy.cleargrads()
generator_yx.cleargrads()
gen_loss.backward()
gen_xy_opt.update()
gen_yx_opt.update()
gen_loss.unchain_backward()
sum_dis_loss += dis_loss.data
sum_gen_loss += gen_loss.data
if batch == 0:
serializers.save_npz(f"{modeldir}/generator_xy_{epoch}.model", generator_xy)
serializers.save_npz(f"{modeldir}/generator_yx_{epoch}.model", generator_yx)
print('epoch : {}'.format(epoch))
print('Generator loss : {}'.format(sum_gen_loss / iterations))
print('Discriminator loss : {}'.format(sum_dis_loss / iterations))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="StarGANVC2")
parser.add_argument('--e', type=int, default=50, help="the number of epochs")
parser.add_argument('--i', type=int, default=1000, help="the number of iterations")
parser.add_argument('--b', type=int, default=16, help="batch size")
parser.add_argument('--modeldir', type=Path, default="modeldir", help="model output directory")
parser.add_argument('--ext', type=str, default=".npy", help="extension of training data")
parser.add_argument('--tw', type=int, default=128, help="time width of spectral envelope")
parser.add_argument('--mb', type=int, default=36, help="mel bins of spectral envelope")
parser.add_argument('--sr', type=int, default=22050, help="sampling rate of audio data")
parser.add_argument('--glr', type=float, default=0.0002, help="learning rate of Adam on generator")
parser.add_argument('--dlr', type=float, default=0.0001, help="learning rate of Adam on discriminator")
parser.add_argument('--b1', type=float, default=0.5, help="beta1 of Adam")
parser.add_argument('--b2', type=float, default=0.999, help="beta2 of Adam")
parser.add_argument('--ie', type=int, default=20, help="time spans enabling identity mapping loss")
parser.add_argument('--second', action="store_true", help="enabling second step of adversaria loss")
parser.add_argument('--src', type=Path, help="path which includes source data")
parser.add_argument('--tgt', type=Path, help="path which includes target data")
args = parser.parse_args()
modeldir = args.modeldir
modeldir.mkdir(exist_ok=True)
train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb, args.sr,
args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,
args.src, args.tgt)
|
normal
|
{
"blob_id": "32105a245f6945dbe8749140d811b20d634289bc",
"index": 2481,
"step-1": "<mask token>\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n <mask token>\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs, iterations, batchsize, modeldir, extension, time_width,\n mel_bins, sampling_rate, g_learning_rate, d_learning_rate, beta1, beta2,\n identity_epoch, second_step, src_path, tgt_path):\n dataset = DatasetLoader(src_path, tgt_path, extension, time_width,\n mel_bins, sampling_rate)\n print(dataset)\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1,\n beta2)\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1,\n beta2)\n lossfunc = CycleGANVC2LossCalculator()\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n dis_loss.unchain_backward()\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n if batch == 0:\n serializers.save_npz(f'{modeldir}/generator_xy_{epoch}.model',\n generator_xy)\n serializers.save_npz(f'{modeldir}/generator_yx_{epoch}.model',\n generator_yx)\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\n<mask token>\n",
"step-3": "<mask token>\ncuda.get_device(0).use()\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs, iterations, batchsize, modeldir, extension, time_width,\n mel_bins, sampling_rate, g_learning_rate, d_learning_rate, beta1, beta2,\n identity_epoch, second_step, src_path, tgt_path):\n dataset = DatasetLoader(src_path, tgt_path, extension, time_width,\n mel_bins, sampling_rate)\n print(dataset)\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1,\n beta2)\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1,\n beta2)\n lossfunc = CycleGANVC2LossCalculator()\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n dis_loss.unchain_backward()\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n if batch == 0:\n serializers.save_npz(f'{modeldir}/generator_xy_{epoch}.model',\n generator_xy)\n serializers.save_npz(f'{modeldir}/generator_yx_{epoch}.model',\n generator_yx)\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='StarGANVC2')\n parser.add_argument('--e', type=int, default=50, help=\n 'the number of epochs')\n parser.add_argument('--i', type=int, default=1000, help=\n 'the number of iterations')\n parser.add_argument('--b', type=int, default=16, help='batch size')\n parser.add_argument('--modeldir', type=Path, default='modeldir', help=\n 'model output directory')\n parser.add_argument('--ext', type=str, default='.npy', help=\n 'extension of training data')\n parser.add_argument('--tw', type=int, default=128, help=\n 'time width of spectral envelope')\n parser.add_argument('--mb', type=int, default=36, help=\n 'mel bins of spectral envelope')\n parser.add_argument('--sr', type=int, default=22050, help=\n 'sampling rate of audio data')\n parser.add_argument('--glr', type=float, default=0.0002, help=\n 'learning rate of Adam on generator')\n parser.add_argument('--dlr', type=float, default=0.0001, help=\n 'learning rate of Adam on discriminator')\n parser.add_argument('--b1', type=float, default=0.5, help='beta1 of Adam')\n parser.add_argument('--b2', type=float, default=0.999, help='beta2 of Adam'\n )\n parser.add_argument('--ie', type=int, default=20, help=\n 'time spans enabling identity mapping loss')\n parser.add_argument('--second', action='store_true', help=\n 'enabling second step of adversaria loss')\n parser.add_argument('--src', type=Path, help=\n 'path which includes source data')\n parser.add_argument('--tgt', type=Path, help=\n 'path which includes target data')\n args = parser.parse_args()\n modeldir = args.modeldir\n modeldir.mkdir(exist_ok=True)\n train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb,\n args.sr, args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,\n args.src, args.tgt)\n",
"step-4": "<mask token>\nxp = cuda.cupy\ncuda.get_device(0).use()\n\n\nclass CycleGANVC2LossCalculator:\n\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs, iterations, batchsize, modeldir, extension, time_width,\n mel_bins, sampling_rate, g_learning_rate, d_learning_rate, beta1, beta2,\n identity_epoch, second_step, src_path, tgt_path):\n dataset = DatasetLoader(src_path, tgt_path, extension, time_width,\n mel_bins, sampling_rate)\n print(dataset)\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1,\n beta2)\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1,\n beta2)\n lossfunc = CycleGANVC2LossCalculator()\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n dis_loss.unchain_backward()\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n if batch == 0:\n serializers.save_npz(f'{modeldir}/generator_xy_{epoch}.model',\n generator_xy)\n serializers.save_npz(f'{modeldir}/generator_yx_{epoch}.model',\n generator_yx)\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='StarGANVC2')\n parser.add_argument('--e', type=int, default=50, help=\n 'the number of epochs')\n parser.add_argument('--i', type=int, default=1000, help=\n 'the number of iterations')\n parser.add_argument('--b', type=int, default=16, help='batch size')\n parser.add_argument('--modeldir', type=Path, default='modeldir', help=\n 'model output directory')\n parser.add_argument('--ext', type=str, default='.npy', help=\n 'extension of training data')\n parser.add_argument('--tw', type=int, default=128, help=\n 'time width of spectral envelope')\n parser.add_argument('--mb', type=int, default=36, help=\n 'mel bins of spectral envelope')\n parser.add_argument('--sr', type=int, default=22050, help=\n 'sampling rate of audio data')\n parser.add_argument('--glr', type=float, default=0.0002, help=\n 'learning rate of Adam on generator')\n parser.add_argument('--dlr', type=float, default=0.0001, help=\n 'learning rate of Adam on discriminator')\n parser.add_argument('--b1', type=float, default=0.5, help='beta1 of Adam')\n parser.add_argument('--b2', type=float, default=0.999, help='beta2 of Adam'\n )\n parser.add_argument('--ie', type=int, default=20, help=\n 'time spans enabling identity mapping loss')\n parser.add_argument('--second', action='store_true', help=\n 'enabling second step of adversaria loss')\n parser.add_argument('--src', type=Path, help=\n 'path which includes source data')\n parser.add_argument('--tgt', type=Path, help=\n 'path which includes target data')\n args = parser.parse_args()\n modeldir = args.modeldir\n modeldir.mkdir(exist_ok=True)\n train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb,\n args.sr, args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,\n args.src, args.tgt)\n",
"step-5": "import chainer\nimport chainer.functions as F\nimport numpy as np\nimport argparse\n\nfrom model import Generator, Discriminator\nfrom chainer import cuda, serializers\nfrom pathlib import Path\nfrom utils import set_optimizer\nfrom dataset import DatasetLoader\n\nxp = cuda.cupy\ncuda.get_device(0).use()\n\n\nclass CycleGANVC2LossCalculator:\n def __init__(self):\n pass\n\n @staticmethod\n def dis_loss(discriminator, y, t):\n y_dis = discriminator(y)\n t_dis = discriminator(t)\n\n return F.mean(F.softplus(-t_dis)) + F.mean(F.softplus(y_dis))\n\n @staticmethod\n def gen_loss(discriminator, y):\n y_dis = discriminator(y)\n\n return F.mean(F.softplus(-y_dis))\n\n @staticmethod\n def cycle_loss(y, t):\n return 10.0 * F.mean_absolute_error(y, t)\n\n @staticmethod\n def identity_loss(y, t):\n return 5.0 * F.mean_absolute_error(y, t)\n\n\ndef train(epochs,\n iterations,\n batchsize,\n modeldir,\n extension,\n time_width,\n mel_bins,\n sampling_rate,\n g_learning_rate,\n d_learning_rate,\n beta1,\n beta2,\n identity_epoch,\n second_step,\n src_path,\n tgt_path):\n\n # Dataset definiton\n dataset = DatasetLoader(src_path,\n tgt_path,\n extension,\n time_width,\n mel_bins,\n sampling_rate)\n print(dataset)\n\n # Model & Optimizer definition\n generator_xy = Generator()\n generator_xy.to_gpu()\n gen_xy_opt = set_optimizer(generator_xy, g_learning_rate, beta1, beta2)\n\n generator_yx = Generator()\n generator_yx.to_gpu()\n gen_yx_opt = set_optimizer(generator_yx, g_learning_rate, beta1, beta2)\n\n discriminator_y = Discriminator()\n discriminator_y.to_gpu()\n dis_y_opt = set_optimizer(discriminator_y, d_learning_rate, beta1, beta2)\n\n discriminator_x = Discriminator()\n discriminator_x.to_gpu()\n dis_x_opt = set_optimizer(discriminator_x, d_learning_rate, beta1, beta2)\n\n discriminator_xyx = Discriminator()\n discriminator_xyx.to_gpu()\n dis_xyx_opt = set_optimizer(discriminator_xyx, d_learning_rate, beta1, beta2)\n\n discriminator_yxy = Discriminator()\n discriminator_yxy.to_gpu()\n dis_yxy_opt = set_optimizer(discriminator_yxy, d_learning_rate, beta1, beta2)\n\n # Loss function definition\n lossfunc = CycleGANVC2LossCalculator()\n\n for epoch in range(epochs):\n sum_dis_loss = 0\n sum_gen_loss = 0\n\n for batch in range(0, iterations, batchsize):\n x, y = dataset.train(batchsize)\n\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n\n xy.unchain_backward()\n xyx.unchain_backward()\n yx.unchain_backward()\n yxy.unchain_backward()\n\n dis_loss = lossfunc.dis_loss(discriminator_y, xy, y)\n dis_loss += lossfunc.dis_loss(discriminator_x, yx, x)\n\n if second_step:\n dis_loss += lossfunc.dis_loss(discriminator_xyx, xyx, x)\n dis_loss += lossfunc.dis_loss(discriminator_yxy, yxy, y)\n\n discriminator_xyx.cleargrads()\n discriminator_yxy.cleargrads()\n\n discriminator_x.cleargrads()\n discriminator_y.cleargrads()\n\n dis_loss.backward()\n dis_x_opt.update()\n dis_y_opt.update()\n\n if second_step:\n dis_xyx_opt.update()\n dis_yxy_opt.update()\n\n dis_loss.unchain_backward()\n\n xy = generator_xy(x)\n xyx = generator_yx(xy)\n id_y = generator_xy(y)\n\n yx = generator_yx(y)\n yxy = generator_xy(yx)\n id_x = generator_yx(x)\n\n gen_loss = lossfunc.gen_loss(discriminator_y, xy)\n gen_loss += lossfunc.gen_loss(discriminator_x, yx)\n\n if second_step:\n gen_loss += lossfunc.gen_loss(discriminator_yxy, yxy)\n gen_loss += lossfunc.gen_loss(discriminator_xyx, xyx)\n\n gen_loss += lossfunc.cycle_loss(x, xyx)\n gen_loss += lossfunc.cycle_loss(y, xyx)\n\n if epoch < identity_epoch:\n gen_loss += lossfunc.identity_loss(id_y, y)\n gen_loss += lossfunc.identity_loss(id_x, x)\n\n generator_xy.cleargrads()\n generator_yx.cleargrads()\n gen_loss.backward()\n gen_xy_opt.update()\n gen_yx_opt.update()\n gen_loss.unchain_backward()\n\n sum_dis_loss += dis_loss.data\n sum_gen_loss += gen_loss.data\n\n if batch == 0:\n serializers.save_npz(f\"{modeldir}/generator_xy_{epoch}.model\", generator_xy)\n serializers.save_npz(f\"{modeldir}/generator_yx_{epoch}.model\", generator_yx)\n\n print('epoch : {}'.format(epoch))\n print('Generator loss : {}'.format(sum_gen_loss / iterations))\n print('Discriminator loss : {}'.format(sum_dis_loss / iterations))\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser(description=\"StarGANVC2\")\n parser.add_argument('--e', type=int, default=50, help=\"the number of epochs\")\n parser.add_argument('--i', type=int, default=1000, help=\"the number of iterations\")\n parser.add_argument('--b', type=int, default=16, help=\"batch size\")\n parser.add_argument('--modeldir', type=Path, default=\"modeldir\", help=\"model output directory\")\n parser.add_argument('--ext', type=str, default=\".npy\", help=\"extension of training data\")\n parser.add_argument('--tw', type=int, default=128, help=\"time width of spectral envelope\")\n parser.add_argument('--mb', type=int, default=36, help=\"mel bins of spectral envelope\")\n parser.add_argument('--sr', type=int, default=22050, help=\"sampling rate of audio data\")\n parser.add_argument('--glr', type=float, default=0.0002, help=\"learning rate of Adam on generator\")\n parser.add_argument('--dlr', type=float, default=0.0001, help=\"learning rate of Adam on discriminator\")\n parser.add_argument('--b1', type=float, default=0.5, help=\"beta1 of Adam\")\n parser.add_argument('--b2', type=float, default=0.999, help=\"beta2 of Adam\")\n parser.add_argument('--ie', type=int, default=20, help=\"time spans enabling identity mapping loss\")\n parser.add_argument('--second', action=\"store_true\", help=\"enabling second step of adversaria loss\")\n parser.add_argument('--src', type=Path, help=\"path which includes source data\")\n parser.add_argument('--tgt', type=Path, help=\"path which includes target data\")\n args = parser.parse_args()\n\n modeldir = args.modeldir\n modeldir.mkdir(exist_ok=True)\n\n train(args.e, args.i, args.b, modeldir, args.ext, args.tw, args.mb, args.sr,\n args.glr, args.dlr, args.b1, args.b2, args.ie, args.second,\n args.src, args.tgt)\n",
"step-ids": [
3,
7,
8,
9,
11
]
}
|
[
3,
7,
8,
9,
11
] |
import unittest
def is_multiple(value, base):
return 0 == (value % base)
def fizz_buzz(value):
if is_multiple(value, 5) and is_multiple(value, 3):
return "FizzBuzz"
if is_multiple(value, 3):
return "Fizz"
if is_multiple(value, 5):
return "Buzz"
return str(value)
class FizzBuzzTest(unittest.TestCase):
def check_fizz_buzz(self, value, expected):
result = fizz_buzz(value)
self.assertEqual(expected, result)
def test_fizz_buzz__fizz_buzz_1_1(self):
self.check_fizz_buzz(1, "1")
def test_fizz_buzz__fizz_buzz_2_2(self):
self.check_fizz_buzz(2, "2")
def test_fizz_buzz__fizz_buzz_3_Fizz(self):
self.check_fizz_buzz(3, "Fizz")
def test_fizz_buzz__fizz_buzz_5_Buzz(self):
self.check_fizz_buzz(5, "Buzz")
def test_fizz_buzz__fizz_buzz_6_Fizz(self):
self.check_fizz_buzz(6, "Fizz")
def test_fizz_buzz__fizz_buzz_10_Buzz(self):
self.check_fizz_buzz(10, "Buzz")
def test_fizz_buzz__fizz_buzz_15_FizzBuzz(self):
self.check_fizz_buzz(15, "FizzBuzz")
if __name__ == "__main__":
print("Running all unit tests...")
unittest.main()
|
normal
|
{
"blob_id": "59d543ed443c156ac65f9c806ba5bada6bcd0c21",
"index": 6891,
"step-1": "<mask token>\n\n\nclass FizzBuzzTest(unittest.TestCase):\n\n def check_fizz_buzz(self, value, expected):\n result = fizz_buzz(value)\n self.assertEqual(expected, result)\n <mask token>\n\n def test_fizz_buzz__fizz_buzz_2_2(self):\n self.check_fizz_buzz(2, '2')\n\n def test_fizz_buzz__fizz_buzz_3_Fizz(self):\n self.check_fizz_buzz(3, 'Fizz')\n\n def test_fizz_buzz__fizz_buzz_5_Buzz(self):\n self.check_fizz_buzz(5, 'Buzz')\n <mask token>\n\n def test_fizz_buzz__fizz_buzz_10_Buzz(self):\n self.check_fizz_buzz(10, 'Buzz')\n\n def test_fizz_buzz__fizz_buzz_15_FizzBuzz(self):\n self.check_fizz_buzz(15, 'FizzBuzz')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fizz_buzz(value):\n if is_multiple(value, 5) and is_multiple(value, 3):\n return 'FizzBuzz'\n if is_multiple(value, 3):\n return 'Fizz'\n if is_multiple(value, 5):\n return 'Buzz'\n return str(value)\n\n\nclass FizzBuzzTest(unittest.TestCase):\n\n def check_fizz_buzz(self, value, expected):\n result = fizz_buzz(value)\n self.assertEqual(expected, result)\n\n def test_fizz_buzz__fizz_buzz_1_1(self):\n self.check_fizz_buzz(1, '1')\n\n def test_fizz_buzz__fizz_buzz_2_2(self):\n self.check_fizz_buzz(2, '2')\n\n def test_fizz_buzz__fizz_buzz_3_Fizz(self):\n self.check_fizz_buzz(3, 'Fizz')\n\n def test_fizz_buzz__fizz_buzz_5_Buzz(self):\n self.check_fizz_buzz(5, 'Buzz')\n\n def test_fizz_buzz__fizz_buzz_6_Fizz(self):\n self.check_fizz_buzz(6, 'Fizz')\n\n def test_fizz_buzz__fizz_buzz_10_Buzz(self):\n self.check_fizz_buzz(10, 'Buzz')\n\n def test_fizz_buzz__fizz_buzz_15_FizzBuzz(self):\n self.check_fizz_buzz(15, 'FizzBuzz')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef is_multiple(value, base):\n return 0 == value % base\n\n\ndef fizz_buzz(value):\n if is_multiple(value, 5) and is_multiple(value, 3):\n return 'FizzBuzz'\n if is_multiple(value, 3):\n return 'Fizz'\n if is_multiple(value, 5):\n return 'Buzz'\n return str(value)\n\n\nclass FizzBuzzTest(unittest.TestCase):\n\n def check_fizz_buzz(self, value, expected):\n result = fizz_buzz(value)\n self.assertEqual(expected, result)\n\n def test_fizz_buzz__fizz_buzz_1_1(self):\n self.check_fizz_buzz(1, '1')\n\n def test_fizz_buzz__fizz_buzz_2_2(self):\n self.check_fizz_buzz(2, '2')\n\n def test_fizz_buzz__fizz_buzz_3_Fizz(self):\n self.check_fizz_buzz(3, 'Fizz')\n\n def test_fizz_buzz__fizz_buzz_5_Buzz(self):\n self.check_fizz_buzz(5, 'Buzz')\n\n def test_fizz_buzz__fizz_buzz_6_Fizz(self):\n self.check_fizz_buzz(6, 'Fizz')\n\n def test_fizz_buzz__fizz_buzz_10_Buzz(self):\n self.check_fizz_buzz(10, 'Buzz')\n\n def test_fizz_buzz__fizz_buzz_15_FizzBuzz(self):\n self.check_fizz_buzz(15, 'FizzBuzz')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef is_multiple(value, base):\n return 0 == value % base\n\n\ndef fizz_buzz(value):\n if is_multiple(value, 5) and is_multiple(value, 3):\n return 'FizzBuzz'\n if is_multiple(value, 3):\n return 'Fizz'\n if is_multiple(value, 5):\n return 'Buzz'\n return str(value)\n\n\nclass FizzBuzzTest(unittest.TestCase):\n\n def check_fizz_buzz(self, value, expected):\n result = fizz_buzz(value)\n self.assertEqual(expected, result)\n\n def test_fizz_buzz__fizz_buzz_1_1(self):\n self.check_fizz_buzz(1, '1')\n\n def test_fizz_buzz__fizz_buzz_2_2(self):\n self.check_fizz_buzz(2, '2')\n\n def test_fizz_buzz__fizz_buzz_3_Fizz(self):\n self.check_fizz_buzz(3, 'Fizz')\n\n def test_fizz_buzz__fizz_buzz_5_Buzz(self):\n self.check_fizz_buzz(5, 'Buzz')\n\n def test_fizz_buzz__fizz_buzz_6_Fizz(self):\n self.check_fizz_buzz(6, 'Fizz')\n\n def test_fizz_buzz__fizz_buzz_10_Buzz(self):\n self.check_fizz_buzz(10, 'Buzz')\n\n def test_fizz_buzz__fizz_buzz_15_FizzBuzz(self):\n self.check_fizz_buzz(15, 'FizzBuzz')\n\n\nif __name__ == '__main__':\n print('Running all unit tests...')\n unittest.main()\n",
"step-5": "import unittest\n\n\ndef is_multiple(value, base):\n return 0 == (value % base)\n\n\ndef fizz_buzz(value):\n if is_multiple(value, 5) and is_multiple(value, 3):\n return \"FizzBuzz\"\n if is_multiple(value, 3):\n return \"Fizz\"\n if is_multiple(value, 5):\n return \"Buzz\"\n return str(value)\n\n\nclass FizzBuzzTest(unittest.TestCase):\n def check_fizz_buzz(self, value, expected):\n result = fizz_buzz(value)\n\n self.assertEqual(expected, result)\n\n def test_fizz_buzz__fizz_buzz_1_1(self):\n self.check_fizz_buzz(1, \"1\")\n\n def test_fizz_buzz__fizz_buzz_2_2(self):\n self.check_fizz_buzz(2, \"2\")\n\n def test_fizz_buzz__fizz_buzz_3_Fizz(self):\n self.check_fizz_buzz(3, \"Fizz\")\n\n def test_fizz_buzz__fizz_buzz_5_Buzz(self):\n self.check_fizz_buzz(5, \"Buzz\")\n\n def test_fizz_buzz__fizz_buzz_6_Fizz(self):\n self.check_fizz_buzz(6, \"Fizz\")\n\n def test_fizz_buzz__fizz_buzz_10_Buzz(self):\n self.check_fizz_buzz(10, \"Buzz\")\n\n def test_fizz_buzz__fizz_buzz_15_FizzBuzz(self):\n self.check_fizz_buzz(15, \"FizzBuzz\")\n\n\nif __name__ == \"__main__\":\n print(\"Running all unit tests...\")\n unittest.main()\n",
"step-ids": [
7,
10,
11,
12,
14
]
}
|
[
7,
10,
11,
12,
14
] |
import requests
from urllib.parse import urlparse, urlencode
from json import JSONDecodeError
from requests.exceptions import HTTPError
def validate_response(response):
"""
raise exception if error response occurred
"""
r = response
try:
r.raise_for_status()
except HTTPError as e:
message = dict(status_code=r.status_code, exception=e)
try:
response = r.json()
message['response'] = response
except JSONDecodeError as e:
message['response'] = r.content
raise HTTPError(message)
class CpmsConnector:
"""The CpmsConnector object allow you communicate through
cpms between application.
"""
ORDER_STATUS = ('NEW', 'IN_PROGRESS', 'COMPLETED', 'CANCELED', 'ERROR')
def __init__(self, config):
"""initialize with config
config(dict): must supply username, api_key, api_url
"""
self.username = config['username']
self.api_key = config['api_key']
self.api_url = config['api_url']
self._token = None
self._set_token()
@property
def _fulfillment_url(self):
netloc = f'fulfillment.{urlparse(self.api_url).netloc}'
return urlparse(self.api_url)._replace(netloc=netloc).geturl()
def _update_headers(self, token):
self.headers = {
'X-Subject-Token': token
}
@property
def token(self):
return self._token
def _set_token(self):
path = '/identity/token'
payload = {
"auth":
{
"apiKeyCredentials":
{
"username": self.username,
"apiKey": self.api_key
}
}
}
url = urlparse(self.api_url)._replace(path=path).geturl()
r = requests.post(url, json=payload)
validate_response(r)
token = r.json()['token']['token_id']
self._update_headers(token)
self._token = token
def get_order(self, channel_id, order_id):
"""retrieve single order of sales order
Args:
url(str): url for retrieval sales order
"""
path = f'/channel/{channel_id}/order/{order_id}'
url = urlparse(self._fulfillment_url)._replace(path=path).geturl()
r = requests.get(url, headers=self.headers)
validate_response(r)
return r.json()
def get_orders_status(self, channel_id=None, partner_id=None, list_id=None,
since=None, order_status=None):
"""Get list order status of sales order
Args:
channel_id(str): channel_id of cpms
partner_id(str): merchant/partner id of cpms
list_id(list): list of order id
since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z
order_status(str): (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)
Returns:
list: all orders
"""
if order_status and order_status not in self.ORDER_STATUS:
raise ValueError(
'invalid order_status eg. '
'(NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)'
)
url = urlparse(self._fulfillment_url)
# make sure channel_id or partner_id being supply
if channel_id:
path = f'/channel/{channel_id}'
elif partner_id:
path = f'/partner/{partner_id}'
else:
raise ValueError(
'must supply either channel_id or partner_id args')
# append sales-order-status path
path += '/sales-order-status'
# make sure list_id or since being supply
if list_id:
if len(list_id) > 10:
raise ValueError('list_id can\'t be more than 10 length')
path += '/id'
query_string = {'id': list_id}
elif since:
query_string = {'id': list_id}
if order_status in self.ORDER_STATUS:
query_string.update({'orderStatus': order_status})
else:
raise ValueError('must supply either list_id or since args')
query_string = urlencode(query_string, doseq=True)
url = url._replace(path=path, query=query_string).geturl()
r = requests.get(url, headers=self.headers)
validate_response(r)
orders = r.json()
next_url = r.links['next']['url'] if 'next' in r.links else None
return orders, next_url
def create_order(self, channel_id, order_id, payload):
"""create order to acommerce (CPMS)
Args:
channel_id(str): channel_id of cpms
order_id(str): order_id of merchant or partner
payload(dict): order body
Returns:
response or exception
"""
path = f'/channel/{channel_id}/order/{order_id}'
url = urlparse(self._fulfillment_url)._replace(path=path).geturl()
r = requests.put(url=url, json=payload, headers=self.headers)
validate_response(r)
return {
'code': r.status_code,
'message': 'Order has been successfully created'
}
def get_stocks(self, channel_id, partner_id, since):
"""Get list stock of partner from specifics channel/marketplace
Args:
channel_id(str): channel_id cpms
partner_id(str): partner/merchant id
since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z
Returns (list): list of stock
"""
path = f'/channel/{channel_id}/allocation/merchant/{partner_id}'
query_string = urlencode({'since': since})
url = urlparse(self._fulfillment_url)._replace(
path=path, query=query_string).geturl()
r = requests.get(url, headers=self.headers)
validate_response(r)
next_link = r.links['next']['url'] if 'next' in r.links else None
return {'data': r.json(), 'url': url} \
if next_link else {'data': r.json()}
def _get_webhook_path(self, channel_id, partner_id):
if not (channel_id or partner_id):
raise ValueError('channel_id or partner_id must be fill')
return f'/channel/{channel_id}' \
if channel_id else f'/partner/{partner_id}'
def create_webhook(self, payload, channel_id=None, partner_id=None):
"""Create webhook registration end point to acommerce either using
channel_id or partner_id
Args:
channel_id(str): channel_id of acommerce (CPMS)
partner_id(str): merchant or partner id acommerce (CPMS)
payload(str): webhook data format acommerce
Returns (dict): webhook data informations
"""
path = self._get_webhook_path(channel_id, partner_id)
path += '/hooks'
url = urlparse(self.api_url)._replace(path=path).geturl()
r = requests.post(url=url, json=payload, headers=self.headers)
validate_response(r)
return r.json()
def retrieve_webhook(self, webhook_id, channel_id=None, partner_id=None):
"""Retrieve specific webhook information using webhook_id.
must supply either partner_id or channel_id
Args:
webhook_id: registered webhook id
channel_id(str): channel_id of acommerce (CPMS)
partner_id(str): merchant or partner id acommerce (CPMS)
Returns (dict): webhook data informations
"""
path = self._get_webhook_path(channel_id, partner_id)
path += f'/hooks/{webhook_id}'
url = urlparse(self.api_url)._replace(path=path).geturl()
r = requests.get(url=url, headers=self.headers)
validate_response(r)
return r.json()
def get_webhook(self, channel_id=None, partner_id=None):
"""Get list registered webhook from acommerce using either partner_id
or channel_id
Args:
channel_id(str): channel_id of acommerce (CPMS)
partner_id(str): merchant or partner id acommerce (CPMS)
Returns (list): webhook data informations
"""
path = self._get_webhook_path(channel_id, partner_id)
path += '/hooks'
url = url = urlparse(self.api_url)._replace(path=path).geturl()
r = requests.get(url, headers=self.headers)
validate_response(r)
return r.json()
def delete_webhook(self, webhook_id, channel_id=None, partner_id=None):
"""remove a registered webhook
Args:
webhook_id: registered webhook id
channel_id(str): channel_id of acommerce (CPMS)
partner_id(str): merchant or partner id acommerce (CPMS)
Returns No Content HTTP 204
"""
path = self._get_webhook_path(channel_id, partner_id)
path += '/hooks'
url = urlparse(self.api_url)._replace(path=path).geturl()
r = requests.delete(url, headers=self.headers)
validate_response(r)
return {
'code': r.status_code,
'message': 'Web Hook has been successfully deleted'
}
|
normal
|
{
"blob_id": "5bd2cf2ae68708d2b1dbbe0323a5f83837f7b564",
"index": 7842,
"step-1": "<mask token>\n\n\nclass CpmsConnector:\n <mask token>\n <mask token>\n\n def __init__(self, config):\n \"\"\"initialize with config\n config(dict): must supply username, api_key, api_url\n \"\"\"\n self.username = config['username']\n self.api_key = config['api_key']\n self.api_url = config['api_url']\n self._token = None\n self._set_token()\n\n @property\n def _fulfillment_url(self):\n netloc = f'fulfillment.{urlparse(self.api_url).netloc}'\n return urlparse(self.api_url)._replace(netloc=netloc).geturl()\n\n def _update_headers(self, token):\n self.headers = {'X-Subject-Token': token}\n <mask token>\n\n def _set_token(self):\n path = '/identity/token'\n payload = {'auth': {'apiKeyCredentials': {'username': self.username,\n 'apiKey': self.api_key}}}\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url, json=payload)\n validate_response(r)\n token = r.json()['token']['token_id']\n self._update_headers(token)\n self._token = token\n\n def get_order(self, channel_id, order_id):\n \"\"\"retrieve single order of sales order\n\n Args:\n url(str): url for retrieval sales order\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_orders_status(self, channel_id=None, partner_id=None, list_id=\n None, since=None, order_status=None):\n \"\"\"Get list order status of sales order\n\n Args:\n channel_id(str): channel_id of cpms\n partner_id(str): merchant/partner id of cpms\n list_id(list): list of order id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n order_status(str): (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)\n\n Returns:\n list: all orders\n \"\"\"\n if order_status and order_status not in self.ORDER_STATUS:\n raise ValueError(\n 'invalid order_status eg. (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)'\n )\n url = urlparse(self._fulfillment_url)\n if channel_id:\n path = f'/channel/{channel_id}'\n elif partner_id:\n path = f'/partner/{partner_id}'\n else:\n raise ValueError('must supply either channel_id or partner_id args'\n )\n path += '/sales-order-status'\n if list_id:\n if len(list_id) > 10:\n raise ValueError(\"list_id can't be more than 10 length\")\n path += '/id'\n query_string = {'id': list_id}\n elif since:\n query_string = {'id': list_id}\n if order_status in self.ORDER_STATUS:\n query_string.update({'orderStatus': order_status})\n else:\n raise ValueError('must supply either list_id or since args')\n query_string = urlencode(query_string, doseq=True)\n url = url._replace(path=path, query=query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n orders = r.json()\n next_url = r.links['next']['url'] if 'next' in r.links else None\n return orders, next_url\n\n def create_order(self, channel_id, order_id, payload):\n \"\"\"create order to acommerce (CPMS)\n\n Args:\n channel_id(str): channel_id of cpms\n order_id(str): order_id of merchant or partner\n payload(dict): order body\n\n Returns:\n response or exception\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.put(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Order has been successfully created'}\n\n def get_stocks(self, channel_id, partner_id, since):\n \"\"\"Get list stock of partner from specifics channel/marketplace\n\n Args:\n channel_id(str): channel_id cpms\n partner_id(str): partner/merchant id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n\n Returns (list): list of stock\n\n \"\"\"\n path = f'/channel/{channel_id}/allocation/merchant/{partner_id}'\n query_string = urlencode({'since': since})\n url = urlparse(self._fulfillment_url)._replace(path=path, query=\n query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n next_link = r.links['next']['url'] if 'next' in r.links else None\n return {'data': r.json(), 'url': url} if next_link else {'data': r.\n json()}\n\n def _get_webhook_path(self, channel_id, partner_id):\n if not (channel_id or partner_id):\n raise ValueError('channel_id or partner_id must be fill')\n return (f'/channel/{channel_id}' if channel_id else\n f'/partner/{partner_id}')\n\n def create_webhook(self, payload, channel_id=None, partner_id=None):\n \"\"\"Create webhook registration end point to acommerce either using\n channel_id or partner_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n payload(str): webhook data format acommerce\n\n Returns (dict): webhook data informations\n\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def retrieve_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"Retrieve specific webhook information using webhook_id.\n must supply either partner_id or channel_id\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (dict): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += f'/hooks/{webhook_id}'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url=url, headers=self.headers)\n validate_response(r)\n return r.json()\n <mask token>\n\n def delete_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"remove a registered webhook\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns No Content HTTP 204\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.delete(url, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Web Hook has been successfully deleted'}\n",
"step-2": "<mask token>\n\n\nclass CpmsConnector:\n <mask token>\n ORDER_STATUS = 'NEW', 'IN_PROGRESS', 'COMPLETED', 'CANCELED', 'ERROR'\n\n def __init__(self, config):\n \"\"\"initialize with config\n config(dict): must supply username, api_key, api_url\n \"\"\"\n self.username = config['username']\n self.api_key = config['api_key']\n self.api_url = config['api_url']\n self._token = None\n self._set_token()\n\n @property\n def _fulfillment_url(self):\n netloc = f'fulfillment.{urlparse(self.api_url).netloc}'\n return urlparse(self.api_url)._replace(netloc=netloc).geturl()\n\n def _update_headers(self, token):\n self.headers = {'X-Subject-Token': token}\n\n @property\n def token(self):\n return self._token\n\n def _set_token(self):\n path = '/identity/token'\n payload = {'auth': {'apiKeyCredentials': {'username': self.username,\n 'apiKey': self.api_key}}}\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url, json=payload)\n validate_response(r)\n token = r.json()['token']['token_id']\n self._update_headers(token)\n self._token = token\n\n def get_order(self, channel_id, order_id):\n \"\"\"retrieve single order of sales order\n\n Args:\n url(str): url for retrieval sales order\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_orders_status(self, channel_id=None, partner_id=None, list_id=\n None, since=None, order_status=None):\n \"\"\"Get list order status of sales order\n\n Args:\n channel_id(str): channel_id of cpms\n partner_id(str): merchant/partner id of cpms\n list_id(list): list of order id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n order_status(str): (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)\n\n Returns:\n list: all orders\n \"\"\"\n if order_status and order_status not in self.ORDER_STATUS:\n raise ValueError(\n 'invalid order_status eg. (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)'\n )\n url = urlparse(self._fulfillment_url)\n if channel_id:\n path = f'/channel/{channel_id}'\n elif partner_id:\n path = f'/partner/{partner_id}'\n else:\n raise ValueError('must supply either channel_id or partner_id args'\n )\n path += '/sales-order-status'\n if list_id:\n if len(list_id) > 10:\n raise ValueError(\"list_id can't be more than 10 length\")\n path += '/id'\n query_string = {'id': list_id}\n elif since:\n query_string = {'id': list_id}\n if order_status in self.ORDER_STATUS:\n query_string.update({'orderStatus': order_status})\n else:\n raise ValueError('must supply either list_id or since args')\n query_string = urlencode(query_string, doseq=True)\n url = url._replace(path=path, query=query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n orders = r.json()\n next_url = r.links['next']['url'] if 'next' in r.links else None\n return orders, next_url\n\n def create_order(self, channel_id, order_id, payload):\n \"\"\"create order to acommerce (CPMS)\n\n Args:\n channel_id(str): channel_id of cpms\n order_id(str): order_id of merchant or partner\n payload(dict): order body\n\n Returns:\n response or exception\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.put(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Order has been successfully created'}\n\n def get_stocks(self, channel_id, partner_id, since):\n \"\"\"Get list stock of partner from specifics channel/marketplace\n\n Args:\n channel_id(str): channel_id cpms\n partner_id(str): partner/merchant id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n\n Returns (list): list of stock\n\n \"\"\"\n path = f'/channel/{channel_id}/allocation/merchant/{partner_id}'\n query_string = urlencode({'since': since})\n url = urlparse(self._fulfillment_url)._replace(path=path, query=\n query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n next_link = r.links['next']['url'] if 'next' in r.links else None\n return {'data': r.json(), 'url': url} if next_link else {'data': r.\n json()}\n\n def _get_webhook_path(self, channel_id, partner_id):\n if not (channel_id or partner_id):\n raise ValueError('channel_id or partner_id must be fill')\n return (f'/channel/{channel_id}' if channel_id else\n f'/partner/{partner_id}')\n\n def create_webhook(self, payload, channel_id=None, partner_id=None):\n \"\"\"Create webhook registration end point to acommerce either using\n channel_id or partner_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n payload(str): webhook data format acommerce\n\n Returns (dict): webhook data informations\n\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def retrieve_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"Retrieve specific webhook information using webhook_id.\n must supply either partner_id or channel_id\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (dict): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += f'/hooks/{webhook_id}'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url=url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_webhook(self, channel_id=None, partner_id=None):\n \"\"\"Get list registered webhook from acommerce using either partner_id\n or channel_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (list): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def delete_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"remove a registered webhook\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns No Content HTTP 204\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.delete(url, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Web Hook has been successfully deleted'}\n",
"step-3": "<mask token>\n\n\nclass CpmsConnector:\n \"\"\"The CpmsConnector object allow you communicate through\n cpms between application.\n \"\"\"\n ORDER_STATUS = 'NEW', 'IN_PROGRESS', 'COMPLETED', 'CANCELED', 'ERROR'\n\n def __init__(self, config):\n \"\"\"initialize with config\n config(dict): must supply username, api_key, api_url\n \"\"\"\n self.username = config['username']\n self.api_key = config['api_key']\n self.api_url = config['api_url']\n self._token = None\n self._set_token()\n\n @property\n def _fulfillment_url(self):\n netloc = f'fulfillment.{urlparse(self.api_url).netloc}'\n return urlparse(self.api_url)._replace(netloc=netloc).geturl()\n\n def _update_headers(self, token):\n self.headers = {'X-Subject-Token': token}\n\n @property\n def token(self):\n return self._token\n\n def _set_token(self):\n path = '/identity/token'\n payload = {'auth': {'apiKeyCredentials': {'username': self.username,\n 'apiKey': self.api_key}}}\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url, json=payload)\n validate_response(r)\n token = r.json()['token']['token_id']\n self._update_headers(token)\n self._token = token\n\n def get_order(self, channel_id, order_id):\n \"\"\"retrieve single order of sales order\n\n Args:\n url(str): url for retrieval sales order\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_orders_status(self, channel_id=None, partner_id=None, list_id=\n None, since=None, order_status=None):\n \"\"\"Get list order status of sales order\n\n Args:\n channel_id(str): channel_id of cpms\n partner_id(str): merchant/partner id of cpms\n list_id(list): list of order id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n order_status(str): (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)\n\n Returns:\n list: all orders\n \"\"\"\n if order_status and order_status not in self.ORDER_STATUS:\n raise ValueError(\n 'invalid order_status eg. (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)'\n )\n url = urlparse(self._fulfillment_url)\n if channel_id:\n path = f'/channel/{channel_id}'\n elif partner_id:\n path = f'/partner/{partner_id}'\n else:\n raise ValueError('must supply either channel_id or partner_id args'\n )\n path += '/sales-order-status'\n if list_id:\n if len(list_id) > 10:\n raise ValueError(\"list_id can't be more than 10 length\")\n path += '/id'\n query_string = {'id': list_id}\n elif since:\n query_string = {'id': list_id}\n if order_status in self.ORDER_STATUS:\n query_string.update({'orderStatus': order_status})\n else:\n raise ValueError('must supply either list_id or since args')\n query_string = urlencode(query_string, doseq=True)\n url = url._replace(path=path, query=query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n orders = r.json()\n next_url = r.links['next']['url'] if 'next' in r.links else None\n return orders, next_url\n\n def create_order(self, channel_id, order_id, payload):\n \"\"\"create order to acommerce (CPMS)\n\n Args:\n channel_id(str): channel_id of cpms\n order_id(str): order_id of merchant or partner\n payload(dict): order body\n\n Returns:\n response or exception\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.put(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Order has been successfully created'}\n\n def get_stocks(self, channel_id, partner_id, since):\n \"\"\"Get list stock of partner from specifics channel/marketplace\n\n Args:\n channel_id(str): channel_id cpms\n partner_id(str): partner/merchant id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n\n Returns (list): list of stock\n\n \"\"\"\n path = f'/channel/{channel_id}/allocation/merchant/{partner_id}'\n query_string = urlencode({'since': since})\n url = urlparse(self._fulfillment_url)._replace(path=path, query=\n query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n next_link = r.links['next']['url'] if 'next' in r.links else None\n return {'data': r.json(), 'url': url} if next_link else {'data': r.\n json()}\n\n def _get_webhook_path(self, channel_id, partner_id):\n if not (channel_id or partner_id):\n raise ValueError('channel_id or partner_id must be fill')\n return (f'/channel/{channel_id}' if channel_id else\n f'/partner/{partner_id}')\n\n def create_webhook(self, payload, channel_id=None, partner_id=None):\n \"\"\"Create webhook registration end point to acommerce either using\n channel_id or partner_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n payload(str): webhook data format acommerce\n\n Returns (dict): webhook data informations\n\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def retrieve_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"Retrieve specific webhook information using webhook_id.\n must supply either partner_id or channel_id\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (dict): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += f'/hooks/{webhook_id}'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url=url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_webhook(self, channel_id=None, partner_id=None):\n \"\"\"Get list registered webhook from acommerce using either partner_id\n or channel_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (list): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def delete_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"remove a registered webhook\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns No Content HTTP 204\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.delete(url, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Web Hook has been successfully deleted'}\n",
"step-4": "import requests\nfrom urllib.parse import urlparse, urlencode\nfrom json import JSONDecodeError\nfrom requests.exceptions import HTTPError\n\n\ndef validate_response(response):\n \"\"\"\n raise exception if error response occurred\n \"\"\"\n r = response\n try:\n r.raise_for_status()\n except HTTPError as e:\n message = dict(status_code=r.status_code, exception=e)\n try:\n response = r.json()\n message['response'] = response\n except JSONDecodeError as e:\n message['response'] = r.content\n raise HTTPError(message)\n\n\nclass CpmsConnector:\n \"\"\"The CpmsConnector object allow you communicate through\n cpms between application.\n \"\"\"\n ORDER_STATUS = 'NEW', 'IN_PROGRESS', 'COMPLETED', 'CANCELED', 'ERROR'\n\n def __init__(self, config):\n \"\"\"initialize with config\n config(dict): must supply username, api_key, api_url\n \"\"\"\n self.username = config['username']\n self.api_key = config['api_key']\n self.api_url = config['api_url']\n self._token = None\n self._set_token()\n\n @property\n def _fulfillment_url(self):\n netloc = f'fulfillment.{urlparse(self.api_url).netloc}'\n return urlparse(self.api_url)._replace(netloc=netloc).geturl()\n\n def _update_headers(self, token):\n self.headers = {'X-Subject-Token': token}\n\n @property\n def token(self):\n return self._token\n\n def _set_token(self):\n path = '/identity/token'\n payload = {'auth': {'apiKeyCredentials': {'username': self.username,\n 'apiKey': self.api_key}}}\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url, json=payload)\n validate_response(r)\n token = r.json()['token']['token_id']\n self._update_headers(token)\n self._token = token\n\n def get_order(self, channel_id, order_id):\n \"\"\"retrieve single order of sales order\n\n Args:\n url(str): url for retrieval sales order\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_orders_status(self, channel_id=None, partner_id=None, list_id=\n None, since=None, order_status=None):\n \"\"\"Get list order status of sales order\n\n Args:\n channel_id(str): channel_id of cpms\n partner_id(str): merchant/partner id of cpms\n list_id(list): list of order id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n order_status(str): (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)\n\n Returns:\n list: all orders\n \"\"\"\n if order_status and order_status not in self.ORDER_STATUS:\n raise ValueError(\n 'invalid order_status eg. (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)'\n )\n url = urlparse(self._fulfillment_url)\n if channel_id:\n path = f'/channel/{channel_id}'\n elif partner_id:\n path = f'/partner/{partner_id}'\n else:\n raise ValueError('must supply either channel_id or partner_id args'\n )\n path += '/sales-order-status'\n if list_id:\n if len(list_id) > 10:\n raise ValueError(\"list_id can't be more than 10 length\")\n path += '/id'\n query_string = {'id': list_id}\n elif since:\n query_string = {'id': list_id}\n if order_status in self.ORDER_STATUS:\n query_string.update({'orderStatus': order_status})\n else:\n raise ValueError('must supply either list_id or since args')\n query_string = urlencode(query_string, doseq=True)\n url = url._replace(path=path, query=query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n orders = r.json()\n next_url = r.links['next']['url'] if 'next' in r.links else None\n return orders, next_url\n\n def create_order(self, channel_id, order_id, payload):\n \"\"\"create order to acommerce (CPMS)\n\n Args:\n channel_id(str): channel_id of cpms\n order_id(str): order_id of merchant or partner\n payload(dict): order body\n\n Returns:\n response or exception\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.put(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Order has been successfully created'}\n\n def get_stocks(self, channel_id, partner_id, since):\n \"\"\"Get list stock of partner from specifics channel/marketplace\n\n Args:\n channel_id(str): channel_id cpms\n partner_id(str): partner/merchant id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n\n Returns (list): list of stock\n\n \"\"\"\n path = f'/channel/{channel_id}/allocation/merchant/{partner_id}'\n query_string = urlencode({'since': since})\n url = urlparse(self._fulfillment_url)._replace(path=path, query=\n query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n next_link = r.links['next']['url'] if 'next' in r.links else None\n return {'data': r.json(), 'url': url} if next_link else {'data': r.\n json()}\n\n def _get_webhook_path(self, channel_id, partner_id):\n if not (channel_id or partner_id):\n raise ValueError('channel_id or partner_id must be fill')\n return (f'/channel/{channel_id}' if channel_id else\n f'/partner/{partner_id}')\n\n def create_webhook(self, payload, channel_id=None, partner_id=None):\n \"\"\"Create webhook registration end point to acommerce either using\n channel_id or partner_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n payload(str): webhook data format acommerce\n\n Returns (dict): webhook data informations\n\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url=url, json=payload, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def retrieve_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"Retrieve specific webhook information using webhook_id.\n must supply either partner_id or channel_id\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (dict): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += f'/hooks/{webhook_id}'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url=url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_webhook(self, channel_id=None, partner_id=None):\n \"\"\"Get list registered webhook from acommerce using either partner_id\n or channel_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (list): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def delete_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"remove a registered webhook\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns No Content HTTP 204\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.delete(url, headers=self.headers)\n validate_response(r)\n return {'code': r.status_code, 'message':\n 'Web Hook has been successfully deleted'}\n",
"step-5": "import requests\nfrom urllib.parse import urlparse, urlencode\nfrom json import JSONDecodeError\nfrom requests.exceptions import HTTPError\n\n\ndef validate_response(response):\n \"\"\"\n raise exception if error response occurred\n \"\"\"\n\n r = response\n try:\n r.raise_for_status()\n except HTTPError as e:\n message = dict(status_code=r.status_code, exception=e)\n\n try:\n response = r.json()\n message['response'] = response\n except JSONDecodeError as e:\n message['response'] = r.content\n\n raise HTTPError(message)\n\n\nclass CpmsConnector:\n \"\"\"The CpmsConnector object allow you communicate through\n cpms between application.\n \"\"\"\n\n ORDER_STATUS = ('NEW', 'IN_PROGRESS', 'COMPLETED', 'CANCELED', 'ERROR')\n\n def __init__(self, config):\n \"\"\"initialize with config\n config(dict): must supply username, api_key, api_url\n \"\"\"\n self.username = config['username']\n self.api_key = config['api_key']\n self.api_url = config['api_url']\n self._token = None\n self._set_token()\n\n @property\n def _fulfillment_url(self):\n netloc = f'fulfillment.{urlparse(self.api_url).netloc}'\n return urlparse(self.api_url)._replace(netloc=netloc).geturl()\n\n def _update_headers(self, token):\n self.headers = {\n 'X-Subject-Token': token\n }\n\n @property\n def token(self):\n return self._token\n\n def _set_token(self):\n path = '/identity/token'\n\n payload = {\n \"auth\":\n {\n \"apiKeyCredentials\":\n {\n \"username\": self.username,\n \"apiKey\": self.api_key\n }\n }\n }\n\n url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.post(url, json=payload)\n validate_response(r)\n token = r.json()['token']['token_id']\n self._update_headers(token)\n self._token = token\n\n def get_order(self, channel_id, order_id):\n \"\"\"retrieve single order of sales order\n\n Args:\n url(str): url for retrieval sales order\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n return r.json()\n\n def get_orders_status(self, channel_id=None, partner_id=None, list_id=None,\n since=None, order_status=None):\n \"\"\"Get list order status of sales order\n\n Args:\n channel_id(str): channel_id of cpms\n partner_id(str): merchant/partner id of cpms\n list_id(list): list of order id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n order_status(str): (NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)\n\n Returns:\n list: all orders\n \"\"\"\n\n if order_status and order_status not in self.ORDER_STATUS:\n raise ValueError(\n 'invalid order_status eg. '\n '(NEW, IN_PROGRESS, COMPLETED, CANCELED, ERROR)'\n )\n\n url = urlparse(self._fulfillment_url)\n\n # make sure channel_id or partner_id being supply\n if channel_id:\n path = f'/channel/{channel_id}'\n\n elif partner_id:\n path = f'/partner/{partner_id}'\n\n else:\n raise ValueError(\n 'must supply either channel_id or partner_id args')\n\n # append sales-order-status path\n path += '/sales-order-status'\n\n # make sure list_id or since being supply\n if list_id:\n if len(list_id) > 10:\n raise ValueError('list_id can\\'t be more than 10 length')\n path += '/id'\n query_string = {'id': list_id}\n\n elif since:\n query_string = {'id': list_id}\n if order_status in self.ORDER_STATUS:\n query_string.update({'orderStatus': order_status})\n else:\n raise ValueError('must supply either list_id or since args')\n\n query_string = urlencode(query_string, doseq=True)\n url = url._replace(path=path, query=query_string).geturl()\n\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n orders = r.json()\n next_url = r.links['next']['url'] if 'next' in r.links else None\n return orders, next_url\n\n def create_order(self, channel_id, order_id, payload):\n \"\"\"create order to acommerce (CPMS)\n\n Args:\n channel_id(str): channel_id of cpms\n order_id(str): order_id of merchant or partner\n payload(dict): order body\n\n Returns:\n response or exception\n \"\"\"\n path = f'/channel/{channel_id}/order/{order_id}'\n url = urlparse(self._fulfillment_url)._replace(path=path).geturl()\n\n r = requests.put(url=url, json=payload, headers=self.headers)\n validate_response(r)\n\n return {\n 'code': r.status_code,\n 'message': 'Order has been successfully created'\n }\n\n def get_stocks(self, channel_id, partner_id, since):\n \"\"\"Get list stock of partner from specifics channel/marketplace\n\n Args:\n channel_id(str): channel_id cpms\n partner_id(str): partner/merchant id\n since(str): ISO 8601 format eg. 2015-06-18T10:30:40Z\n\n Returns (list): list of stock\n\n \"\"\"\n path = f'/channel/{channel_id}/allocation/merchant/{partner_id}'\n query_string = urlencode({'since': since})\n url = urlparse(self._fulfillment_url)._replace(\n path=path, query=query_string).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n\n next_link = r.links['next']['url'] if 'next' in r.links else None\n return {'data': r.json(), 'url': url} \\\n if next_link else {'data': r.json()}\n\n def _get_webhook_path(self, channel_id, partner_id):\n if not (channel_id or partner_id):\n raise ValueError('channel_id or partner_id must be fill')\n return f'/channel/{channel_id}' \\\n if channel_id else f'/partner/{partner_id}'\n\n def create_webhook(self, payload, channel_id=None, partner_id=None):\n \"\"\"Create webhook registration end point to acommerce either using\n channel_id or partner_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n payload(str): webhook data format acommerce\n\n Returns (dict): webhook data informations\n\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n\n url = urlparse(self.api_url)._replace(path=path).geturl()\n\n r = requests.post(url=url, json=payload, headers=self.headers)\n validate_response(r)\n\n return r.json()\n\n def retrieve_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"Retrieve specific webhook information using webhook_id.\n must supply either partner_id or channel_id\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (dict): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += f'/hooks/{webhook_id}'\n\n url = urlparse(self.api_url)._replace(path=path).geturl()\n\n r = requests.get(url=url, headers=self.headers)\n validate_response(r)\n\n return r.json()\n\n def get_webhook(self, channel_id=None, partner_id=None):\n \"\"\"Get list registered webhook from acommerce using either partner_id\n or channel_id\n\n Args:\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns (list): webhook data informations\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = url = urlparse(self.api_url)._replace(path=path).geturl()\n r = requests.get(url, headers=self.headers)\n validate_response(r)\n\n return r.json()\n\n def delete_webhook(self, webhook_id, channel_id=None, partner_id=None):\n \"\"\"remove a registered webhook\n\n Args:\n webhook_id: registered webhook id\n channel_id(str): channel_id of acommerce (CPMS)\n partner_id(str): merchant or partner id acommerce (CPMS)\n\n Returns No Content HTTP 204\n \"\"\"\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n\n r = requests.delete(url, headers=self.headers)\n validate_response(r)\n\n return {\n 'code': r.status_code,\n 'message': 'Web Hook has been successfully deleted'\n }\n",
"step-ids": [
13,
16,
17,
19,
20
]
}
|
[
13,
16,
17,
19,
20
] |
import mclient
from mclient import instruments
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
#from pulseseq import sequencer, pulselib
mpl.rcParams['figure.figsize']=[6,4]
qubit_info = mclient.get_qubit_info('qubit_info')
qubit_ef_info = mclient.get_qubit_info('qubit_ef_info')
vspec = instruments['vspec']
awg1 = instruments['AWG1']
qubit_brick = instruments['qubit_brick']
qubit_ef_brick = instruments['qubit_ef_brick']
va_lo = instruments['va_lo']
funcgen = instruments['funcgen']
alazar = instruments['alazar']
spec_brick = instruments['spec_brick']
spec_info = mclient.get_qubit_info('spec_info')
cavity_info = mclient.get_qubit_info('cavity_info')
field = 0.0
temp = 'cd'
#voltage = laser_info.get_DCOffset()
################################################################################################################################################
from scripts.single_qubit import T1measurement, T2measurement
# from scripts.single_qubit import T1measurement_QP, T2measurement_QP
# from scripts.single_qubit import FT1measurement, EFT2measurement, GFT2measurement
# from scripts.single_qubit import efrabi
# from scripts.single_qubit import efrabi_QP
# from scripts.single_qubit import QPdecay
from scripts.single_qubit import rabi
def try_twice(func, N=2, **kwargs):
for i in range(N):
try:
return func(**kwargs)
except Exception, e:
print 'Error %s' % (e,)
pass
print 'Failed to do %s %s times...' % (func, N)
# work in progress. For looping over multiple qubits
# def T1T2Loop(qubit_params):
# # from scripts.single_qubit.t1t2_plotting import do_T1_plot, do_T2_plot, do_T2echo_plot
# T1s={}
# T2s={}
# T2Es={}
# rep_rates = [500]
# for qubit in enumerate(qubit_params)
# T1s[qubit] = {'t1s':[], 't1s_err':[], 'ofs':[], 'ofs_err':[], 'amps':[], 'amps_err':[],}
# T2s[qubit] = {'t2s':[], 't2s_err':[], 't2freqs':[], 't2freqs_err':[], 'amps':[], 'amps_err':[], 't22s':[], 't22s_err':[], 't22freqs':[], 't22freqs_err':[], 'amp2s':[], 'amp2s_err':[],}
# T2Es[qubit] = {'t2es':[], 't2es_err':[]}
# for i in range(1000): #set number of repetitions.
# for qubit, params in enumerate(qubit_params)
# qubit_info = params[1]
# qubit_freq = params[2]
# if 1:
# for rep_rate in rep_rates:
# funcgen.set_frequency(rep_rate)
# do_T1_plot(qubit_info, 500, np.concatenate((np.linspace(0, 10e3, 21), np.linspace(11e3, 60e3, 50))), T1s[qubit_info], 300*(qubit_ind+1))
# do_T2_plot(qubit_info, 500, np.linspace(0, 10e3, 101), 1000e3, T2s[qubit_info], 301*(qubit_ind+1), double_freq=False)
# do_T2echo_plot(qubit_info, 500, np.linspace(1e3, 20e3, 101), 500e3, T2Es[qubit_info], 302*(qubit_ind+1))
def do_ROspec_plot(qubit_info, n_avg, freqs, ro_powers, ro_fits, fig_num, var=None):
from scripts.single_cavity import rocavspectroscopy
alazar.set_naverages(n_avg)
rospec = rocavspectroscopy.ROCavSpectroscopy(qubit_info, ro_powers, freqs) #qubit_pulse=np.pi/2
rospec.measure()
plt.close()
ro_fits['x0s'].append(rospec.fit_params[0][2])
ro_fits['x0s_err'].append(rospec.fit_params[1][2])
ro_fits['As'].append(rospec.fit_params[0][1])
ro_fits['As_err'].append(rospec.fit_params[1][1])
ro_fits['ws'].append(rospec.fit_params[0][3])
ro_fits['ws_err'].append(rospec.fit_params[1][3])
if var!=None:
ro_fits['vars'].append(var)
plt.figure(fig_num)
plt.clf()
if ro_fits['vars']==[]:
plt.subplot(311).axis(xmin=-len(ro_fits['x0s'])*0.10, xmax=len(ro_fits['x0s'])*1.10)
plt.errorbar(range(len(ro_fits['x0s'])),ro_fits['x0s'],ro_fits['x0s_err'],fmt='go')
else:
xmin=min(ro_fits['vars'])
xmax=max(ro_fits['vars'])
plt.subplot(311).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(ro_fits['vars'],ro_fits['x0s'],ro_fits['x0s_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Center frequency(MHz)")
if ro_fits['vars']==[]:
plt.subplot(312).axis(xmin=-len(ro_fits['As'])*0.10, xmax=len(ro_fits['As'])*1.10)
plt.errorbar(range(len(ro_fits['As'])),ro_fits['As'],ro_fits['As_err'],fmt='go')
else:
xmin=min(ro_fits['vars'])
xmax=max(ro_fits['vars'])
plt.subplot(312).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(ro_fits['vars'],ro_fits['As'],ro_fits['As_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Amplitude")
if ro_fits['vars']==[]:
plt.subplot(313).axis(xmin=-len(ro_fits['ws'])*0.10, xmax=len(ro_fits['ws'])*1.10)
plt.errorbar(range(len(ro_fits['ws'])),ro_fits['ws'],ro_fits['ws_err'],fmt='go')
else:
xmin=min(ro_fits['vars'])
xmax=max(ro_fits['vars'])
plt.subplot(313).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(ro_fits['vars'],ro_fits['ws'],ro_fits['ws_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Width")
return rospec
def do_spec_plot(qubit_info, n_avg, freqs, spec_params, spec_fits, fig_num, plen=50000, amp=0.01,var=None):
from scripts.single_qubit import spectroscopy as spectroscopy
alazar.set_naverages(n_avg)
s = spectroscopy.Spectroscopy(qubit_info, freqs, spec_params,
plen, amp, plot_seqs=False,subtraction = False) #1=1ns5
s.measure()
plt.close()
spec_fits['x0s'].append(s.fit_params['x0'].value)
spec_fits['x0s_err'].append(s.fit_params['x0'].stderr)
spec_fits['ofs'].append(s.fit_params['ofs'].value)
spec_fits['ofs_err'].append(s.fit_params['ofs'].stderr)
spec_fits['ws'].append(s.fit_params['w'].value)
spec_fits['ws_err'].append(s.fit_params['w'].stderr)
if var!=None:
spec_fits['vars'].append(var)
plt.figure(fig_num)
plt.clf()
if spec_fits['vars']==[]:
plt.subplot(311).axis(xmin=-len(spec_fits['x0s'])*0.10, xmax=len(spec_fits['x0s'])*1.10)
plt.errorbar(range(len(spec_fits['x0s'])),spec_fits['x0s'],spec_fits['x0s_err'],fmt='go')
else:
xmin=min(spec_fits['vars'])
xmax=max(spec_fits['vars'])
plt.subplot(311).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(spec_fits['vars'],spec_fits['x0s'],spec_fits['x0s_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Center frequency(MHz)")
if spec_fits['vars']==[]:
plt.subplot(312).axis(xmin=-len(spec_fits['ofs'])*0.10, xmax=len(spec_fits['ofs'])*1.10)
plt.errorbar(range(len(spec_fits['ofs'])),spec_fits['ofs'],spec_fits['ofs_err'],fmt='go')
else:
xmin=min(spec_fits['vars'])
xmax=max(spec_fits['vars'])
plt.subplot(312).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(spec_fits['vars'],spec_fits['ofs'],spec_fits['ofs_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Offset")
if spec_fits['vars']==[]:
plt.subplot(313).axis(xmin=-len(spec_fits['ws'])*0.10, xmax=len(spec_fits['ws'])*1.10)
plt.errorbar(range(len(spec_fits['ws'])),spec_fits['ws'],spec_fits['ws_err'],fmt='go')
else:
xmin=min(spec_fits['vars'])
xmax=max(spec_fits['vars'])
plt.subplot(313).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(spec_fits['vars'],spec_fits['ws'],spec_fits['ws_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Width")
return s
def do_T1(qubit_info, delays, double_exp = False):
from scripts.single_qubit import T1measurement
t1 = T1measurement.T1Measurement(qubit_info, delays)
t1.data.set_attrs(field_current=field)
t1.data.set_attrs(temperature=temp)
# t1.data.set_attrs(laser_power=voltage)
t1.measure()
plt.close()
return t1
def do_T1_plot(qubit_info, n_avg, delays, t1_fits, fig_num, double_exp = False, var=None):
alazar.set_naverages(n_avg)
t1 = do_T1(qubit_info, delays)
t1_fits['t1s'].append(t1.fit_params['tau'].value)
t1_fits['t1s_err'].append(t1.fit_params['tau'].stderr)
t1_fits['ofs'].append(t1.fit_params['ofs'].value)
t1_fits['ofs_err'].append(t1.fit_params['ofs'].stderr)
t1_fits['amps'].append(t1.fit_params['A'].value)
t1_fits['amps_err'].append(t1.fit_params['A'].stderr)
if var!=None:
t1_fits['vars'].append(var)
plt.figure(fig_num)
plt.clf()
if t1_fits['vars']==[]:
plt.subplot(211).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)
plt.errorbar(range(len(t1_fits['t1s'])),t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')
else:
xmin=min(t1_fits['vars'])
xmax=max(t1_fits['vars'])
plt.subplot(211).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(t1_fits['vars'],t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("T1(us)")
if t1_fits['vars']==[]:
plt.subplot(212).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)
plt.errorbar(range(len(t1_fits['amps'])),t1_fits['amps'],t1_fits['amps_err'],fmt='go')
else:
xmin=min(t1_fits['vars'])
xmax=max(t1_fits['vars'])
plt.subplot(212).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(t1_fits['vars'],t1_fits['amps'],t1_fits['amps_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Amplitude")
def do_T1_phonon(qubit_info, delays, amp, piLength, sigma = 10):
from scripts.single_qubit import stark_swap
t1 = stark_swap.phonon_T1(qubit_info,
delays, phonon_pi = piLength, amp = amp,
sigma = sigma,
)
t1.measure()
plt.close()
return t1
def do_T1_phonon_plot(qubit_info, n_avg, delays, amp, piLength, t1_fits, fig_num, sigma = 10, var=None):
alazar.set_naverages(n_avg)
t1 = do_T1_phonon(qubit_info, delays, amp, piLength, sigma)
t1_fits['t1s'].append(t1.fit_params['tau'].value)
t1_fits['t1s_err'].append(t1.fit_params['tau'].stderr)
t1_fits['ofs'].append(t1.fit_params['ofs'].value)
t1_fits['ofs_err'].append(t1.fit_params['ofs'].stderr)
t1_fits['amps'].append(t1.fit_params['A'].value)
t1_fits['amps_err'].append(t1.fit_params['A'].stderr)
if var!=None:
t1_fits['vars'].append(var)
plt.figure(fig_num)
plt.clf()
if t1_fits['vars']==[]:
plt.subplot(211).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)
plt.errorbar(range(len(t1_fits['t1s'])),t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')
else:
xmin=min(t1_fits['vars'])
xmax=max(t1_fits['vars'])
plt.subplot(211).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(t1_fits['vars'],t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("T1(us)")
if t1_fits['vars']==[]:
plt.subplot(212).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)
plt.errorbar(range(len(t1_fits['amps'])),t1_fits['amps'],t1_fits['amps_err'],fmt='go')
else:
xmin=min(t1_fits['vars'])
xmax=max(t1_fits['vars'])
plt.subplot(212).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))
plt.errorbar(t1_fits['vars'],t1_fits['amps'],t1_fits['amps_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Amplitude")
def do_T2(qubit_info, delays, detune, fix_freq=None, fit_type='exp_decay_sine',):
from scripts.single_qubit import T2measurement
t2 = T2measurement.T2Measurement(qubit_info, delays, detune=detune, fix_freq = fix_freq, fit_type = fit_type)
t2.data.set_attrs(field_current=field)
t2.data.set_attrs(temperature=temp)
# t2.data.set_attrs(laser_power=voltage)
t2.measure()
plt.close()
return t2
def do_T2_plot(qubit_info, n_avg, delays, detune, t2_fits, fig_num, fix_freq=None, fit_type='exp_decay_sine', var=None):
alazar.set_naverages(n_avg)
t2 = do_T2(qubit_info, delays, detune, fix_freq, fit_type)
if (t2!=None):
t2_fits['t2s'].append(t2.fit_params['tau'].value)
t2_fits['t2s_err'].append(t2.fit_params['tau'].stderr)
t2_fits['t2freqs'].append(t2.fit_params['f'].value*1000 - detune/1e6)
t2_fits['t2freqs_err'].append(t2.fit_params['f'].stderr*1000.0)
t2_fits['amps'].append(t2.fit_params['A'].value)
t2_fits['amps_err'].append(t2.fit_params['A'].stderr)
# if double_freq == True:
# t2_fits['t22s'].append(t2.fit_params['tau2'].value)
# t2_fits['t22s_err'].append(t2.fit_params['tau2'].stderr)
# t2_fits['t22freqs'].append(t2.fit_params['freq2'].value*1000 -detune/1e6)
# t2_fits['t22freqs_err'].append(t2.fit_params['freq2'].stderr*1000.0)
# t2_fits['amp2s'].append(t2.fit_params['amp2'].value)
# t2_fits['amp2s_err'].append(t2.fit_params['amp2'].stderr)
if var!=None:
t2_fits['vars'].append(var)
if fit_type == 'exp_decay_sine':
plt.figure(fig_num)
plt.clf()
if t2_fits['vars']==[]:
plt.subplot(211).axis(xmin=-len(t2_fits['t2s'])*0.10, xmax=len(t2_fits['t2s'])*1.10, ymin= min(t2_fits['t2s'])*0.7, ymax=max(t2_fits['t2s'])*1.3)
plt.errorbar(range(len(t2_fits['t2s'])),t2_fits['t2s'],t2_fits['t2s_err'],fmt='rs')
else:
xmin=min(t2_fits['vars'])
xmax=max(t2_fits['vars'])
plt.subplot(211).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax), ymin= min(t2_fits['t2s'])*0.7, ymax=max(t2_fits['t2s'])*1.3)
plt.errorbar(t2_fits['vars'],t2_fits['t2s'],t2_fits['t2s_err'],fmt='rs')
plt.xlabel("Measurement iterations")
plt.ylabel("T2(us)")
if t2_fits['vars']==[]:
plt.subplot(212).axis(xmin=-len(t2_fits['t2freqs'])*0.10, xmax=len(t2_fits['t2freqs'])*1.10, ymin=min(t2_fits['t2freqs'])-0.02, ymax=max(t2_fits['t2freqs'])+0.02)
plt.errorbar(range(len(t2_fits['t2freqs'])),t2_fits['t2freqs'],t2_fits['t2freqs_err'],fmt='b^')
else:
xmin=min(t2_fits['vars'])
xmax=max(t2_fits['vars'])
plt.subplot(212).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax), ymin=min(t2_fits['t2freqs'])-0.02, ymax=max(t2_fits['t2freqs'])+0.02)
plt.errorbar(t2_fits['vars'],t2_fits['t2freqs'],t2_fits['t2freqs_err'],fmt='b^')
plt.xlabel("Measurement iterations")
plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
# if fit_type == 'exp_decay_sine':
# plt.figure(fig_num)
# plt.clf()
# plt.subplot(311).axis(xmin=-len(t2_fits['t2s'])*0.10, xmax=len(t2_fits['t2s'])*1.10, ymin= min(t2_fits['t2s'])*0.7, ymax=max(t2_fits['t22s'])*1.3)
# plt.errorbar(range(len(t2_fits['t2s'])),t2_fits['t2s'],t2_fits['t2s_err'],fmt='rs')
# plt.errorbar(range(len(t2_fits['t22s'])),t2_fits['t22s'],t2_fits['t22s_err'],fmt='b^')
# plt.ylabel("T2(us)")
# plt.subplot(312).axis(xmin=-len(t2_fits['t2freqs'])*0.10, xmax=len(t2_fits['t2freqs'])*1.10,ymin= min(min(t2_fits['t2freqs']),min(t2_fits['t22freqs']))-0.02, ymax=max(max(t2_fits['t2freqs']), max(t2_fits['t22freqs']))+0.02)
# plt.errorbar(range(len(t2_fits['t2freqs'])),t2_fits['t2freqs'],t2_fits['t2freqs_err'],fmt='rs')
# plt.errorbar(range(len(t2_fits['t22freqs'])),t2_fits['t22freqs'],t2_fits['t22freqs_err'],fmt='b^')
# plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
# plt.subplot(313).axis(xmin=-len(t2_fits['amps'])*0.10, xmax=len(t2_fits['amps'])*1.10,ymin= min(t2_fits['amp2s'])*0.8, ymax=max(t2_fits['amps'])*1.2)
# plt.errorbar(range(len(t2_fits['amps'])),t2_fits['amps'],t2_fits['amps_err'],fmt='rs')
# plt.errorbar(range(len(t2_fits['amp2s'])),t2_fits['amp2s'],t2_fits['amp2s_err'],fmt='b^')
# plt.xlabel("Measurement iterations")
# plt.ylabel("Amplitudes (AU)")
# plt.semilogy()
def do_T2echo(qubit_info, delays, detune, fix_freq=None, fit_type='exp_decay_sine'):
# t2e = T2measurement.T2Measurement(qubit_info, delays, detune, echotype=T2measurement.ECHO_HAHN, title='T2 Echo')
from scripts.single_qubit import T2measurement
t2e = T2measurement.T2Measurement(qubit_info, delays, detune, echotype=T2measurement.ECHO_CPMG, fix_freq = fix_freq, fit_type = fit_type, title='T2 Echo')
t2e.data.set_attrs(field_current=field)
t2e.data.set_attrs(temperature=temp)
# t2e.data.set_attrs(laser_power=voltage)
t2e.measure()
plt.close()
return t2e
def do_T2echo_plot(qubit_info, n_avg, delays, detune, t2E_fits, fig_num, fix_freq=None, fit_type='exp_decay_sine', var=None):
alazar.set_naverages(n_avg)
t2e = do_T2echo(qubit_info, delays, detune, fix_freq, fit_type)
if fit_type == 'gaussian_decay':
tname = 'sigma'
else:
tname = 'tau'
if t2e!=None:
t2E_fits['t2es'].append(t2e.fit_params[tname].value)
t2E_fits['t2es_err'].append(t2e.fit_params[tname].stderr)
if var!=None:
t2E_fits['vars'].append(var)
plt.figure(fig_num)
plt.clf()
if t2E_fits['vars']==[]:
plt.axis(xmin=-len(t2E_fits['t2es'])*0.10, xmax=len(t2E_fits['t2es'])*1.10, ymin= min(t2E_fits['t2es'])*0.8, ymax=max(t2E_fits['t2es'])*1.2)
plt.errorbar(range(len(t2E_fits['t2es'])),t2E_fits['t2es'],t2E_fits['t2es_err'],fmt='mv') # magenta color and v-shape markers
else:
xmin=min(t2E_fits['vars'])
xmax=max(t2E_fits['vars'])
plt.axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax), ymin= min(t2E_fits['t2es'])*0.8, ymax=max(t2E_fits['t2es'])*1.2)
plt.errorbar(t2E_fits['vars'],t2E_fits['t2es'],t2E_fits['t2es_err'],fmt='mv') # magenta color and v-shape markers
plt.xlabel("Measurement iterations")
plt.ylabel("T2Echo(us)")
def smart_T1_delays(T1_int=90e3, QPT1=1.5e6, half_decay_point=1e6, eff_T1_delay=800.0, probe_point=0.5, meas_per_QPinj=30, meas_per_reptime=5):
"""
T1_int = 90e3 # Intrinsic T1 of the qubit
QPT1 = 1.5e6 # Guess the lifetime of the quasiparticles
half_decay_point = 1e6 # The QP_delay time that would make qubit relax halfway to ground state with T1_delay=0, i.e. relax during readout pulse
eff_T1_delay = 800.0 # The effective T1_delay due to the finite length of the readout pulse, usually taken as readout pulse length/2
"""
# rep_time = 1.0e9/fg.get_frequency()
# T1_QPref = 1/(np.log(2)/eff_T1_delay-1/T1_int) # T1 at half decay point = effective readout delay/ln(2), excluding intrinsic part giving the T1 due to quasiparticles
# n_delayless = int(half_decay_point/rep_time) # Number of points with T1_delay = 0
#
## QP_times_s = np.linspace(rep_time, half_decay_point, n_delayless)
# T1_delays_s = np.linspace(0, 0, n_delayless)
# QP_times_l = np.linspace(half_decay_point+rep_time, meas_per_QPinj*rep_time, meas_per_QPinj-n_delayless)
# T1_delays_l = np.log(2)/(1/T1_int+1/T1_QPref*np.exp(-(QP_times_l-half_decay_point)/QPT1))-eff_T1_delay
## QP_times = np.concatenate((QP_times_s, QP_times_l))
# T1_delays = np.concatenate((T1_delays_s, T1_delays_l))
rep_time = 1.0e9/fg.get_frequency()
n_points = meas_per_QPinj * meas_per_reptime
step_time = rep_time / meas_per_reptime
T1_QPref = 1/(np.log(2)/eff_T1_delay-1/T1_int) # T1 at half decay point = effective readout delay/ln(2), excluding intrinsic part giving the T1 due to quasiparticles
QP_times = np.linspace(0, (n_points-1)*step_time, n_points)
T1_est = 1/(1/T1_int+1/T1_QPref*np.exp(-(QP_times-half_decay_point)/QPT1))
T1_delays = -np.log(probe_point)*T1_est-eff_T1_delay
for j, delay in enumerate(T1_delays):
if delay < 0:
T1_delays[j]=0.0
return T1_delays
def do_QPdecay(qubit_info, T1_delay, **kwargs):
rep_time = 1e9/fg.get_frequency()
qpd = QPdecay.QPdecay(qubit_info, T1_delay, rep_time, **kwargs)
qpd.data.set_attrs(field_current=field)
qpd.data.set_attrs(temperature=temp)
# qpd.data.set_attrs(T1_delay=T1_delay)
qpd.data.set_attrs(inj_power=ag3.get_power())
# qpd.data.set_attrs(laser_voltage=laser_info.get_DCOffset())
# qpd.measure()
# plt.close()
return qpd
def do_QPdecay_plot(qubit_info, n_avg, T1_delay, qpd_fits, fig_num, **kwargs):
alz.set_naverages(n_avg)
ag3.set_rf_on(True)
qpd = do_QPdecay(qubit_info, T1_delay, **kwargs)
qpd.measure()
plt.close()
if qpd!=None:
qpd_fits['qpt1s'].append(qpd.fit_params['tau'].value/1000.0)
qpd_fits['qpt1s_err'].append(qpd.fit_params['tau'].stderr/1000.0)
qpd_fits['qpofs'].append(qpd.fit_params['ofs'].value)
qpd_fits['qpofs_err'].append(qpd.fit_params['ofs'].stderr)
# qpd_fits['amps'].append(qpd.fit_params['amplitude'].value)
qpofs_array = np.array(qpd_fits['qpofs'])
qpofs_err_array = np.array(qpd_fits['qpofs_err'])
plt.figure(fig_num)
plt.clf()
plt.subplot(211).axis(xmin=-len(qpd_fits['qpt1s'])*0.10, xmax=len(qpd_fits['qpt1s'])*1.10)#, ymin=0, ymax=1)
plt.errorbar(range(len(qpd_fits['qpt1s'])),qpd_fits['qpt1s'],qpd_fits['qpt1s_err'],fmt='go')
plt.ylabel("Tau QP(ms)")
plt.subplot(212).axis(xmin=-len(np.array(qpd_fits['qpofs']))*0.10, xmax=len(np.array(qpd_fits['qpofs']))*1.10)#, ymin=10, ymax=30)
plt.errorbar(range(len(qpofs_array)), 1/qpofs_array, qpofs_err_array/qpofs_array/qpofs_array, fmt='b^')
plt.xlabel("Measurement iterations")
plt.ylabel("Qubit T1-floor(us)")
ag3.set_rf_on(False)
return qpd
def do_FT1(qubit_info, ef_info, delays):
ft1 = FT1measurement.FT1Measurement(qubit_info, ef_info, delays)
ft1.data.set_attrs(field_current=field)
ft1.data.set_attrs(temperature=temp)
ft1.measure()
plt.close()
return ft1
def do_FT1_plot(qubit_info, ef_info, n_avg, delays, ft1_fits, fig_num):
alz.set_naverages(n_avg)
brick1.set_rf_on(True)
ft1 = do_FT1(qubit_info, ef_info, delays)
if ft1!=None:
ft1_fits['ft1s'].append(ft1.fit_params['tau'].value/1000.0)
ft1_fits['ft1s_err'].append(ft1.fit_params['tau'].stderr/1000.0)
ft1_fits['ofs'].append(ft1.fit_params['ofs'].value)
ft1_fits['amps'].append(ft1.fit_params['amplitude'].value)
plt.figure(fig_num)
plt.clf()
plt.axis(xmin=-len(ft1_fits['ft1s'])*0.10, xmax=len(ft1_fits['ft1s'])*1.10, ymin= min(ft1_fits['ft1s'])*0.8, ymax=max(ft1_fits['ft1s'])*1.2)
plt.errorbar(range(len(ft1_fits['ft1s'])),ft1_fits['ft1s'],ft1_fits['ft1s_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("FT1(us)")
brick1.set_rf_on(False)
def do_EFT2(qubit_info, ef_info, delays, detune, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3):
eft2 = EFT2measurement.EFT2Measurement(qubit_info, ef_info, delays, detune=detune, double_freq=double_freq)
eft2.data.set_attrs(field_current=field)
eft2.data.set_attrs(temperature=temp)
eft2.measure()
plt.close()
return eft2
def do_EFT2_plot(qubit_info, ef_info, n_avg, delays, detune, ft2_fits, fig_num, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3, laser_power = None):
alz.set_naverages(n_avg)
brick1.set_rf_on(True)
eft2 = do_EFT2(qubit_info, ef_info, delays, detune, double_freq, QP_injection_delay, QP_injection_length)
if (eft2!=None):
ft2_fits['eft2s'].append(eft2.fit_params['tau'].value/1000)
ft2_fits['eft2s_err'].append(eft2.fit_params['tau'].stderr/1000.0)
ft2_fits['eft2freqs'].append(eft2.fit_params['freq'].value*1000 - detune/1e6)
ft2_fits['eft2freqs_err'].append(eft2.fit_params['freq'].stderr*1000.0)
ft2_fits['eft2amps'].append(eft2.fit_params['amp'].value)
ft2_fits['eft2amps_err'].append(eft2.fit_params['amp'].stderr)
if double_freq == True:
ft2_fits['eft22s'].append(eft2.fit_params['tau2'].value/1000)
ft2_fits['eft22s_err'].append(eft2.fit_params['tau2'].stderr/1000.0)
ft2_fits['eft22freqs'].append(eft2.fit_params['freq2'].value*1000 -detune/1e6)
ft2_fits['eft22freqs_err'].append(eft2.fit_params['freq2'].stderr*1000.0)
ft2_fits['eft2amp2s'].append(eft2.fit_params['amp2'].value)
ft2_fits['eft2amp2s_err'].append(eft2.fit_params['amp2'].stderr)
if QP_injection_delay is not None:
ft2_fits['eft2s_QP'].append(eft2.fit_params['tau'].value/1000)
ft2_fits['eft2s_QP_err'].append(eft2.fit_params['tau'].stderr/1000.0)
ft2_fits['eft2freqs_QP'].append(eft2.fit_params['freq'].value*1000 -detune/1e6)
ft2_fits['eft2freqs_QP_err'].append(eft2.fit_params['freq'].stderr*1000.0)
if double_freq == False and QP_injection_delay is None:
plt.figure(fig_num)
plt.clf()
plt.subplot(211).axis(xmin=-len(ft2_fits['eft2s'])*0.10, xmax=len(ft2_fits['eft2s'])*1.10, ymin= min(ft2_fits['eft2s'])*0.7, ymax=max(ft2_fits['eft2s'])*1.3)
plt.errorbar(range(len(ft2_fits['eft2s'])),ft2_fits['eft2s'],ft2_fits['eft2s_err'],fmt='rs')
plt.ylabel("EFT2(us)")
plt.subplot(212).axis(xmin=-len(ft2_fits['eft2freqs'])*0.10, xmax=len(ft2_fits['eft2freqs'])*1.10, ymin=min(ft2_fits['eft2freqs'])-0.02, ymax=max(ft2_fits['eft2freqs'])+0.02)
plt.errorbar(range(len(ft2_fits['eft2freqs'])),ft2_fits['eft2freqs'],ft2_fits['eft2freqs_err'],fmt='b^')
plt.xlabel("Measurement iterations")
plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
if double_freq == False and QP_injection_delay is not None:
plt.figure(fig_num)
plt.clf()
plt.subplot(211).axis(xmin=-len(ft2_fits['eft2s_QP'])*0.10, xmax=len(ft2_fits['eft2s_QP'])*1.10, ymin= min(ft2_fits['eft2s_QP'])*0.7, ymax=max(ft2_fits['eft2s_QP'])*1.3)
plt.errorbar(range(len(ft2_fits['eft2s_QP'])),ft2_fits['eft2s_QP'],ft2_fits['eft2s_QP_err'],fmt='rs')
plt.ylabel("EFT2 with QP injection (us)")
plt.subplot(212).axis(xmin=-len(ft2_fits['eft2freqs_QP'])*0.10, xmax=len(ft2_fits['eft2freqs_QP'])*1.10, ymin=min(ft2_fits['eft2freqs_QP'])-0.02, ymax=max(ft2_fits['eft2freqs_QP'])+0.02)
plt.errorbar(range(len(ft2_fits['eft2freqs_QP'])),ft2_fits['eft2freqs_QP'],ft2_fits['eft2freqs_QP_err'],fmt='b^')
plt.xlabel("Measurement iterations")
plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
if double_freq is True:
plt.figure(fig_num)
plt.clf()
plt.subplot(311).axis(xmin=-len(ft2_fits['eft2s'])*0.10, xmax=len(ft2_fits['eft2s'])*1.10, ymin= min(ft2_fits['eft2s'])*0.7, ymax=max(ft2_fits['eft22s'])*1.3)
plt.errorbar(range(len(ft2_fits['eft2s'])),ft2_fits['eft2s'],ft2_fits['eft2s_err'],fmt='rs')
plt.errorbar(range(len(ft2_fits['eft22s'])),ft2_fits['eft22s'],ft2_fits['eft22s_err'],fmt='b^')
plt.ylabel("EFT2(us)")
plt.subplot(312).axis(xmin=-len(ft2_fits['eft2freqs'])*0.10, xmax=len(ft2_fits['eft2freqs'])*1.10,ymin= min(min(ft2_fits['eft2freqs']),min(ft2_fits['eft22freqs']))-0.02, ymax=max(max(ft2_fits['eft2freqs']), max(ft2_fits['eft22freqs']))+0.02)
plt.errorbar(range(len(ft2_fits['eft2freqs'])),ft2_fits['eft2freqs'],ft2_fits['eft2freqs_err'],fmt='rs')
plt.errorbar(range(len(ft2_fits['eft22freqs'])),ft2_fits['eft22freqs'],ft2_fits['eft22freqs_err'],fmt='b^')
plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
plt.subplot(313).axis(xmin=-len(ft2_fits['eft2amps'])*0.10, xmax=len(ft2_fits['eft2amps'])*1.10,ymin= min(ft2_fits['eft2amp2s'])*0.8, ymax=max(ft2_fits['eft2amps'])*1.2)
plt.errorbar(range(len(ft2_fits['eft2amps'])),ft2_fits['eft2amps'],ft2_fits['eft2amps_err'],fmt='rs')
plt.errorbar(range(len(ft2_fits['eft2amp2s'])),ft2_fits['eft2amp2s'],ft2_fits['eft2amp2s_err'],fmt='b^')
plt.xlabel("Measurement iterations")
plt.ylabel("Amplitudes (AU)")
brick1.set_rf_on(False)
def do_EFT2echo(qubit_info, ef_info, delays, detune, laser_power = None):
eft2e = EFT2measurement.EFT2Measurement(qubit_info, ef_info, delays, detune, echotype=EFT2measurement.ECHO_HAHN, title='EFT2 Echo')
eft2e.data.set_attrs(field_current=field)
eft2e.data.set_attrs(temperature=temp)
# t2e.data.set_attrs(laser_power=voltage)
eft2e.measure()
plt.close()
return eft2e
def do_EFT2echo_plot(qubit_info, ef_info, n_avg, delays, detune, t2E_fits, fig_num, laser_power = None):
alz.set_naverages(n_avg)
brick1.set_rf_on(True)
eft2e = do_EFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)
if eft2e!=None:
t2E_fits['eft2es'].append(eft2e.fit_params['tau'].value/1000)
t2E_fits['eft2es_err'].append(eft2e.fit_params['tau'].stderr/1000)
plt.figure(fig_num)
plt.clf()
plt.axis(xmin=-len(t2E_fits['eft2es'])*0.10, xmax=len(t2E_fits['eft2es'])*1.10, ymin= min(t2E_fits['eft2es'])*0.8, ymax=max(t2E_fits['eft2es'])*1.2)
plt.errorbar(range(len(t2E_fits['eft2es'])),t2E_fits['eft2es'],t2E_fits['eft2es_err'],fmt='mv') # magenta color and v-shape markers
plt.xlabel("Measurement iterations")
plt.ylabel("EFT2Echo(us)")
brick1.set_rf_on(False)
def do_GFT2(qubit_info, ef_info, delays, detune, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3):
gft2 = GFT2measurement.GFT2Measurement(qubit_info, ef_info, delays, detune=detune, double_freq=double_freq)
gft2.data.set_attrs(field_current=field)
gft2.data.set_attrs(temperature=temp)
gft2.measure()
plt.close()
return gft2
def do_GFT2_plot(qubit_info, ef_info, n_avg, delays, detune, ft2_fits, fig_num, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3, laser_power = None):
alz.set_naverages(n_avg)
brick1.set_rf_on(True)
gft2 = do_GFT2(qubit_info, ef_info, delays, detune, double_freq, QP_injection_delay, QP_injection_length)
if (gft2!=None):
ft2_fits['gft2s'].append(gft2.fit_params['tau'].value/1000)
ft2_fits['gft2s_err'].append(gft2.fit_params['tau'].stderr/1000.0)
ft2_fits['gft2freqs'].append(gft2.fit_params['freq'].value*1000 - detune/1e6)
ft2_fits['gft2freqs_err'].append(gft2.fit_params['freq'].stderr*1000.0)
ft2_fits['gft2amps'].append(gft2.fit_params['amp'].value)
ft2_fits['gft2amps_err'].append(gft2.fit_params['amp'].stderr)
if double_freq == True:
ft2_fits['gft22s'].append(gft2.fit_params['tau2'].value/1000)
ft2_fits['gft22s_err'].append(gft2.fit_params['tau2'].stderr/1000.0)
ft2_fits['gft22freqs'].append(gft2.fit_params['freq2'].value*1000 -detune/1e6)
ft2_fits['gft22freqs_err'].append(gft2.fit_params['freq2'].stderr*1000.0)
ft2_fits['gft2amp2s'].append(gft2.fit_params['amp2'].value)
ft2_fits['gft2amp2s_err'].append(gft2.fit_params['amp2'].stderr)
if QP_injection_delay is not None:
ft2_fits['gft2s_QP'].append(gft2.fit_params['tau'].value/1000)
ft2_fits['gft2s_QP_err'].append(gft2.fit_params['tau'].stderr/1000.0)
ft2_fits['gft2freqs_QP'].append(gft2.fit_params['freq'].value*1000 -detune/1e6)
ft2_fits['gft2freqs_QP_err'].append(gft2.fit_params['freq'].stderr*1000.0)
if double_freq == False and QP_injection_delay is None:
plt.figure(fig_num)
plt.clf()
plt.subplot(211).axis(xmin=-len(ft2_fits['gft2s'])*0.10, xmax=len(ft2_fits['gft2s'])*1.10, ymin= min(ft2_fits['gft2s'])*0.7, ymax=max(ft2_fits['gft2s'])*1.3)
plt.errorbar(range(len(ft2_fits['gft2s'])),ft2_fits['gft2s'],ft2_fits['gft2s_err'],fmt='ks')
plt.ylabel("GFT2(us)")
plt.subplot(212).axis(xmin=-len(ft2_fits['gft2freqs'])*0.10, xmax=len(ft2_fits['gft2freqs'])*1.10, ymin=min(ft2_fits['gft2freqs'])-0.02, ymax=max(ft2_fits['gft2freqs'])+0.02)
plt.errorbar(range(len(ft2_fits['gft2freqs'])),ft2_fits['gft2freqs'],ft2_fits['gft2freqs_err'],fmt='c^')
plt.xlabel("Measurement iterations")
plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
if double_freq == False and QP_injection_delay is not None:
plt.figure(fig_num)
plt.clf()
plt.subplot(211).axis(xmin=-len(ft2_fits['gft2s_QP'])*0.10, xmax=len(ft2_fits['gft2s_QP'])*1.10, ymin= min(ft2_fits['gft2s_QP'])*0.7, ymax=max(ft2_fits['gft2s_QP'])*1.3)
plt.errorbar(range(len(ft2_fits['gft2s_QP'])),ft2_fits['gft2s_QP'],ft2_fits['gft2s_QP_err'],fmt='ks')
plt.ylabel("GFT2 with QP injection (us)")
plt.subplot(212).axis(xmin=-len(ft2_fits['gft2freqs_QP'])*0.10, xmax=len(ft2_fits['gft2freqs_QP'])*1.10, ymin=min(ft2_fits['gft2freqs_QP'])-0.02, ymax=max(ft2_fits['gft2freqs_QP'])+0.02)
plt.errorbar(range(len(ft2_fits['gft2freqs_QP'])),ft2_fits['gft2freqs_QP'],ft2_fits['gft2freqs_QP_err'],fmt='c^')
plt.xlabel("Measurement iterations")
plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
if double_freq is True:
plt.figure(fig_num)
plt.clf()
plt.subplot(311).axis(xmin=-len(ft2_fits['gft2s'])*0.10, xmax=len(ft2_fits['gft2s'])*1.10, ymin= min(ft2_fits['gft2s'])*0.7, ymax=max(ft2_fits['gft22s'])*1.3)
plt.errorbar(range(len(ft2_fits['gft2s'])),ft2_fits['gft2s'],ft2_fits['gft2s_err'],fmt='ks')
plt.errorbar(range(len(ft2_fits['gft22s'])),ft2_fits['gft22s'],ft2_fits['gft22s_err'],fmt='c^')
plt.ylabel("GFT2(us)")
plt.subplot(312).axis(xmin=-len(ft2_fits['gft2freqs'])*0.10, xmax=len(ft2_fits['gft2freqs'])*1.10,ymin= min(min(ft2_fits['gft2freqs']),min(ft2_fits['gft22freqs']))-0.02, ymax=max(max(ft2_fits['gft2freqs']), max(ft2_fits['gft22freqs']))+0.02)
plt.errorbar(range(len(ft2_fits['gft2freqs'])),ft2_fits['gft2freqs'],ft2_fits['gft2freqs_err'],fmt='ks')
plt.errorbar(range(len(ft2_fits['gft22freqs'])),ft2_fits['gft22freqs'],ft2_fits['gft22freqs_err'],fmt='c^')
plt.ylabel("Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)")
plt.subplot(313).axis(xmin=-len(ft2_fits['gft2amps'])*0.10, xmax=len(ft2_fits['gft2amps'])*1.10,ymin= min(ft2_fits['gft2amp2s'])*0.8, ymax=max(ft2_fits['gft2amps'])*1.2)
plt.errorbar(range(len(ft2_fits['gft2amps'])),ft2_fits['gft2amps'],ft2_fits['gft2amps_err'],fmt='ks')
plt.errorbar(range(len(ft2_fits['gft2amp2s'])),ft2_fits['gft2amp2s'],ft2_fits['gft2amp2s_err'],fmt='c^')
plt.xlabel("Measurement iterations")
plt.ylabel("Amplitudes (AU)")
brick1.set_rf_on(False)
def do_GFT2echo(qubit_info, ef_info, delays, detune, laser_power = None):
gft2e = GFT2measurement.GFT2Measurement(qubit_info, ef_info, delays, detune, echotype=EFT2measurement.ECHO_HAHN, title='GFT2 Echo')
gft2e.data.set_attrs(field_current=field)
gft2e.data.set_attrs(temperature=temp)
# t2e.data.set_attrs(laser_power=voltage)
gft2e.measure()
plt.close()
return gft2e
def do_GFT2echo_plot(qubit_info, ef_info, n_avg, delays, detune, t2E_fits, fig_num, laser_power = None):
alz.set_naverages(n_avg)
brick1.set_rf_on(True)
gft2e = do_GFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)
if gft2e!=None:
t2E_fits['gft2es'].append(gft2e.fit_params['tau'].value/1000)
t2E_fits['gft2es_err'].append(gft2e.fit_params['tau'].stderr/1000)
plt.figure(fig_num)
plt.clf()
plt.axis(xmin=-len(t2E_fits['gft2es'])*0.10, xmax=len(t2E_fits['gft2es'])*1.10, ymin= min(t2E_fits['gft2es'])*0.8, ymax=max(t2E_fits['gft2es'])*1.2)
plt.errorbar(range(len(t2E_fits['gft2es'])),t2E_fits['gft2es'],t2E_fits['gft2es_err'],fmt='yv') # yellow color and v-shape markers
plt.xlabel("Measurement iterations")
plt.ylabel("GFT2Echo(us)")
brick1.set_rf_on(False)
def do_FT2echo_plot(qubit_info, ef_info, n_avg, delays, detune, t2E_fits, fig_num, laser_power = None):
alz.set_naverages(n_avg)
brick1.set_rf_on(True)
eft2e = do_EFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)
if eft2e!=None:
t2E_fits['eft2es'].append(eft2e.fit_params['tau'].value/1000)
t2E_fits['eft2es_err'].append(eft2e.fit_params['tau'].stderr/1000)
plt.figure(fig_num)
plt.clf()
plt.axis(xmin=-len(t2E_fits['eft2es'])*0.10, xmax=len(t2E_fits['eft2es'])*1.10, ymin= min(t2E_fits['eft2es'])*0.8, ymax=max(t2E_fits['eft2es'])*1.2)
plt.errorbar(range(len(t2E_fits['eft2es'])),t2E_fits['eft2es'],t2E_fits['eft2es_err'],fmt='mv', label='EFT2echo') # magenta color and v-shape markers
plt.errorbar(range(len(t2E_fits['gft2es'])),t2E_fits['gft2es'],t2E_fits['gft2es_err'],fmt='yv', label='GFT2echo') # yellow color and v-shape markers
plt.xlabel("Measurement iterations")
plt.ylabel("FT2Echo(us)")
gft2e = do_GFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)
if gft2e!=None:
t2E_fits['gft2es'].append(gft2e.fit_params['tau'].value/1000)
t2E_fits['gft2es_err'].append(gft2e.fit_params['tau'].stderr/1000)
plt.figure(fig_num)
plt.clf()
plt.axis(xmin=-len(t2E_fits['gft2es'])*0.10, xmax=len(t2E_fits['gft2es'])*1.10, ymin= min(t2E_fits['eft2es'])*0.8, ymax=max(t2E_fits['gft2es'])*1.2)
plt.errorbar(range(len(t2E_fits['eft2es'])),t2E_fits['eft2es'],t2E_fits['eft2es_err'],fmt='mv', label='EFT2echo') # magenta color and v-shape markers
plt.errorbar(range(len(t2E_fits['gft2es'])),t2E_fits['gft2es'],t2E_fits['gft2es_err'],fmt='yv', label='GFT2echo') # yellow color and v-shape markers
plt.xlabel("Measurement iterations")
plt.ylabel("FT2Echo(us)")
brick1.set_rf_on(False)
def do_rabiup(qubit_info, ef_info, amps, QP_injection_delay=None, laser_power= None):
if QP_injection_delay == None:
rabiup = efrabi.EFRabi(qubit_info, ef_info, amps, laser_power = laser_power)
else:
rabiup = efrabi_QP.EFRabi_QP(qubit_info, ef_info, amps, QP_injection_delay, laser_power = laser_power)
rabiup.data.set_attrs(QP_delay=QP_injection_delay)
rabiup.data.set_attrs(field_current=field)
rabiup.data.set_attrs(temperature=temp)
rabiup.data.set_attrs(laser_power=laser_power)
rabiup.measure()
plt.close()
return rabiup
def do_rabinoup(qubit_info, ef_info, amps, force_period, QP_injection_delay=None, laser_power=None):
if QP_injection_delay == None:
rabinoup = efrabi.EFRabi(qubit_info, ef_info, amps, first_pi=False, force_period=force_period,laser_power = laser_power)
else:
rabinoup = efrabi_QP.EFRabi_QP(qubit_info, ef_info, amps, first_pi=False, force_period=force_period, QP_delay=QP_injection_delay)
rabinoup.data.set_attrs(QP_delay=QP_injection_delay)
rabinoup.data.set_attrs(field_current=field)
rabinoup.data.set_attrs(temperature=temp)
rabinoup.data.set_attrs(laser_power=laser_power)
rabinoup.measure()
#population = 100*rabinoup.fit_params['amp'].value/(rabiup.fit_params['amp'].value+rabinoup.fit_params['amp'].value)
plt.close()
return rabinoup
def do_population_plot(qubit_info, ef_info, n_avg_rabiup, n_avg_rabinoup, amps, pops_fits, fig_num, QP_injection_delay=None, laser_power = None):
brick1.set_rf_on(True)
alz.set_naverages(n_avg_rabiup)
rabiup = do_rabiup(qubit_info, ef_info, amps, QP_injection_delay, laser_power = laser_power)
if rabiup!=None:
pops_fits['rabiupAmp'].append(abs(rabiup.fit_params['amp'].value))
pops_fits['rabiupAmp_err'].append(rabiup.fit_params['amp'].stderr)
plt.figure(fig_num).show()
# plt.clf()
plt.subplot(211).axis(xmin=-len(pops_fits['rabiupAmp'])*0.10, xmax=len(pops_fits['rabiupAmp'])*1.10, ymin=min(pops_fits['rabiupAmp'])*0.7, ymax=max(pops_fits['rabiupAmp'])*1.3)
plt.errorbar(range(len(pops_fits['rabiupAmp'])),pops_fits['rabiupAmp'],pops_fits['rabiupAmp_err'],fmt='b^')
#plt.xlabel("Measurement iterations")
plt.ylabel("Rabiup")
alz.set_naverages(n_avg_rabinoup)
rabinoup = do_rabinoup(qubit_info, ef_info, amps, force_period=rabiup.fit_params['period'].value, QP_injection_delay=QP_injection_delay, laser_power = laser_power)
if rabinoup!=None:
pops_fits['rabinoupAmp'].append(abs(rabinoup.fit_params['amp'].value))
pops_fits['rabinoupAmp_err'].append(rabinoup.fit_params['amp'].stderr)
#population.append(population)
plt.figure(fig_num).show()
plt.subplot(212).axis(xmin=-len(pops_fits['rabinoupAmp'])*0.10, xmax=len(pops_fits['rabinoupAmp'])*1.10, ymin=0.0, ymax=max(pops_fits['rabinoupAmp'])*2.0)
plt.errorbar(range(len(pops_fits['rabinoupAmp'])),pops_fits['rabinoupAmp'],pops_fits['rabinoupAmp_err'],fmt='go')
plt.xlabel("Measurement iterations")
plt.ylabel("Rabinoup")
brick1.set_rf_on(False)
'''
def do_qubitSSBspec()
from scripts.single_qubit import ssbspec
qubitSSBspec = ssbspec.SSBSpec(qubit_info, np.linspace(-3e6, 3e6, 51), plot_seqs=False)
qubitSSBspec.measure()
return qubitSSBspec
'''
|
normal
|
{
"blob_id": "ba13bcf9e89ae96e9a66a42fc4e6ae4ad33c84b4",
"index": 4497,
"step-1": "import mclient\r\nfrom mclient import instruments\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport matplotlib as mpl\r\n#from pulseseq import sequencer, pulselib\r\n\r\nmpl.rcParams['figure.figsize']=[6,4]\r\n\r\nqubit_info = mclient.get_qubit_info('qubit_info')\r\nqubit_ef_info = mclient.get_qubit_info('qubit_ef_info')\r\nvspec = instruments['vspec']\r\nawg1 = instruments['AWG1']\r\nqubit_brick = instruments['qubit_brick']\r\nqubit_ef_brick = instruments['qubit_ef_brick']\r\nva_lo = instruments['va_lo']\r\nfuncgen = instruments['funcgen']\r\nalazar = instruments['alazar']\r\nspec_brick = instruments['spec_brick']\r\nspec_info = mclient.get_qubit_info('spec_info')\r\ncavity_info = mclient.get_qubit_info('cavity_info')\r\n\r\nfield = 0.0\r\ntemp = 'cd'\r\n#voltage = laser_info.get_DCOffset()\r\n\r\n\r\n################################################################################################################################################\r\nfrom scripts.single_qubit import T1measurement, T2measurement\r\n# from scripts.single_qubit import T1measurement_QP, T2measurement_QP\r\n# from scripts.single_qubit import FT1measurement, EFT2measurement, GFT2measurement\r\n# from scripts.single_qubit import efrabi\r\n# from scripts.single_qubit import efrabi_QP\r\n# from scripts.single_qubit import QPdecay\r\nfrom scripts.single_qubit import rabi\r\n\r\ndef try_twice(func, N=2, **kwargs):\r\n for i in range(N):\r\n try:\r\n return func(**kwargs)\r\n except Exception, e:\r\n print 'Error %s' % (e,)\r\n pass\r\n print 'Failed to do %s %s times...' % (func, N)\r\n\r\n\r\n# work in progress. For looping over multiple qubits\r\n# def T1T2Loop(qubit_params):\r\n# \t# from scripts.single_qubit.t1t2_plotting import do_T1_plot, do_T2_plot, do_T2echo_plot\r\n# \tT1s={}\r\n# \tT2s={}\r\n# \tT2Es={}\r\n# \trep_rates = [500]\r\n\r\n# \tfor qubit in enumerate(qubit_params)\t\r\n# \t T1s[qubit] = {'t1s':[], 't1s_err':[], 'ofs':[], 'ofs_err':[], 'amps':[], 'amps_err':[],}\r\n# \t T2s[qubit] = {'t2s':[], 't2s_err':[], 't2freqs':[], 't2freqs_err':[], 'amps':[], 'amps_err':[], 't22s':[], 't22s_err':[], 't22freqs':[], 't22freqs_err':[], 'amp2s':[], 'amp2s_err':[],}\r\n# \t T2Es[qubit] = {'t2es':[], 't2es_err':[]}\r\n\t\r\n# \tfor i in range(1000): #set number of repetitions.\r\n# \t\tfor qubit, params in enumerate(qubit_params)\r\n# \t\t\tqubit_info = params[1] \r\n# \t\t\tqubit_freq = params[2]\r\n\r\n\r\n# \t if 1:\r\n# \t for rep_rate in rep_rates:\r\n# \t funcgen.set_frequency(rep_rate)\r\n# \t do_T1_plot(qubit_info, 500, np.concatenate((np.linspace(0, 10e3, 21), np.linspace(11e3, 60e3, 50))), T1s[qubit_info], 300*(qubit_ind+1))\r\n# \t do_T2_plot(qubit_info, 500, np.linspace(0, 10e3, 101), 1000e3, T2s[qubit_info], 301*(qubit_ind+1), double_freq=False)\r\n# \t do_T2echo_plot(qubit_info, 500, np.linspace(1e3, 20e3, 101), 500e3, T2Es[qubit_info], 302*(qubit_ind+1))\r\n\r\ndef do_ROspec_plot(qubit_info, n_avg, freqs, ro_powers, ro_fits, fig_num, var=None):\r\n from scripts.single_cavity import rocavspectroscopy\r\n alazar.set_naverages(n_avg)\r\n rospec = rocavspectroscopy.ROCavSpectroscopy(qubit_info, ro_powers, freqs) #qubit_pulse=np.pi/2\r\n rospec.measure()\r\n plt.close()\r\n\r\n ro_fits['x0s'].append(rospec.fit_params[0][2])\r\n ro_fits['x0s_err'].append(rospec.fit_params[1][2])\r\n ro_fits['As'].append(rospec.fit_params[0][1])\r\n ro_fits['As_err'].append(rospec.fit_params[1][1])\r\n ro_fits['ws'].append(rospec.fit_params[0][3])\r\n ro_fits['ws_err'].append(rospec.fit_params[1][3])\r\n if var!=None:\r\n ro_fits['vars'].append(var)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n if ro_fits['vars']==[]:\r\n plt.subplot(311).axis(xmin=-len(ro_fits['x0s'])*0.10, xmax=len(ro_fits['x0s'])*1.10)\r\n plt.errorbar(range(len(ro_fits['x0s'])),ro_fits['x0s'],ro_fits['x0s_err'],fmt='go')\r\n else:\r\n xmin=min(ro_fits['vars'])\r\n xmax=max(ro_fits['vars'])\r\n plt.subplot(311).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(ro_fits['vars'],ro_fits['x0s'],ro_fits['x0s_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Center frequency(MHz)\")\r\n\r\n if ro_fits['vars']==[]:\r\n plt.subplot(312).axis(xmin=-len(ro_fits['As'])*0.10, xmax=len(ro_fits['As'])*1.10)\r\n plt.errorbar(range(len(ro_fits['As'])),ro_fits['As'],ro_fits['As_err'],fmt='go')\r\n else:\r\n xmin=min(ro_fits['vars'])\r\n xmax=max(ro_fits['vars'])\r\n plt.subplot(312).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(ro_fits['vars'],ro_fits['As'],ro_fits['As_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Amplitude\")\r\n\r\n if ro_fits['vars']==[]:\r\n plt.subplot(313).axis(xmin=-len(ro_fits['ws'])*0.10, xmax=len(ro_fits['ws'])*1.10)\r\n plt.errorbar(range(len(ro_fits['ws'])),ro_fits['ws'],ro_fits['ws_err'],fmt='go')\r\n else:\r\n xmin=min(ro_fits['vars'])\r\n xmax=max(ro_fits['vars'])\r\n plt.subplot(313).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(ro_fits['vars'],ro_fits['ws'],ro_fits['ws_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Width\")\r\n return rospec\r\n\r\n\r\ndef do_spec_plot(qubit_info, n_avg, freqs, spec_params, spec_fits, fig_num, plen=50000, amp=0.01,var=None):\r\n from scripts.single_qubit import spectroscopy as spectroscopy\r\n alazar.set_naverages(n_avg)\r\n s = spectroscopy.Spectroscopy(qubit_info, freqs, spec_params,\r\n plen, amp, plot_seqs=False,subtraction = False) #1=1ns5\r\n s.measure()\r\n plt.close()\r\n spec_fits['x0s'].append(s.fit_params['x0'].value)\r\n spec_fits['x0s_err'].append(s.fit_params['x0'].stderr)\r\n spec_fits['ofs'].append(s.fit_params['ofs'].value)\r\n spec_fits['ofs_err'].append(s.fit_params['ofs'].stderr)\r\n spec_fits['ws'].append(s.fit_params['w'].value)\r\n spec_fits['ws_err'].append(s.fit_params['w'].stderr)\r\n if var!=None:\r\n spec_fits['vars'].append(var)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n if spec_fits['vars']==[]:\r\n plt.subplot(311).axis(xmin=-len(spec_fits['x0s'])*0.10, xmax=len(spec_fits['x0s'])*1.10)\r\n plt.errorbar(range(len(spec_fits['x0s'])),spec_fits['x0s'],spec_fits['x0s_err'],fmt='go')\r\n else:\r\n xmin=min(spec_fits['vars'])\r\n xmax=max(spec_fits['vars'])\r\n plt.subplot(311).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(spec_fits['vars'],spec_fits['x0s'],spec_fits['x0s_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Center frequency(MHz)\")\r\n\r\n if spec_fits['vars']==[]:\r\n plt.subplot(312).axis(xmin=-len(spec_fits['ofs'])*0.10, xmax=len(spec_fits['ofs'])*1.10)\r\n plt.errorbar(range(len(spec_fits['ofs'])),spec_fits['ofs'],spec_fits['ofs_err'],fmt='go')\r\n else:\r\n xmin=min(spec_fits['vars'])\r\n xmax=max(spec_fits['vars'])\r\n plt.subplot(312).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(spec_fits['vars'],spec_fits['ofs'],spec_fits['ofs_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Offset\")\r\n\r\n if spec_fits['vars']==[]:\r\n plt.subplot(313).axis(xmin=-len(spec_fits['ws'])*0.10, xmax=len(spec_fits['ws'])*1.10)\r\n plt.errorbar(range(len(spec_fits['ws'])),spec_fits['ws'],spec_fits['ws_err'],fmt='go')\r\n else:\r\n xmin=min(spec_fits['vars'])\r\n xmax=max(spec_fits['vars'])\r\n plt.subplot(313).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(spec_fits['vars'],spec_fits['ws'],spec_fits['ws_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Width\")\r\n return s\r\n\r\ndef do_T1(qubit_info, delays, double_exp = False):\r\n from scripts.single_qubit import T1measurement\r\n t1 = T1measurement.T1Measurement(qubit_info, delays)\r\n t1.data.set_attrs(field_current=field)\r\n t1.data.set_attrs(temperature=temp)\r\n# t1.data.set_attrs(laser_power=voltage)\r\n t1.measure()\r\n plt.close()\r\n return t1\r\n \r\n\r\ndef do_T1_plot(qubit_info, n_avg, delays, t1_fits, fig_num, double_exp = False, var=None):\r\n alazar.set_naverages(n_avg)\r\n t1 = do_T1(qubit_info, delays)\r\n t1_fits['t1s'].append(t1.fit_params['tau'].value)\r\n t1_fits['t1s_err'].append(t1.fit_params['tau'].stderr)\r\n t1_fits['ofs'].append(t1.fit_params['ofs'].value)\r\n t1_fits['ofs_err'].append(t1.fit_params['ofs'].stderr)\r\n t1_fits['amps'].append(t1.fit_params['A'].value)\r\n t1_fits['amps_err'].append(t1.fit_params['A'].stderr)\r\n if var!=None:\r\n t1_fits['vars'].append(var)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n if t1_fits['vars']==[]:\r\n plt.subplot(211).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)\r\n plt.errorbar(range(len(t1_fits['t1s'])),t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')\r\n else:\r\n xmin=min(t1_fits['vars'])\r\n xmax=max(t1_fits['vars'])\r\n plt.subplot(211).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(t1_fits['vars'],t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"T1(us)\")\r\n if t1_fits['vars']==[]:\r\n plt.subplot(212).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)\r\n plt.errorbar(range(len(t1_fits['amps'])),t1_fits['amps'],t1_fits['amps_err'],fmt='go')\r\n else:\r\n xmin=min(t1_fits['vars'])\r\n xmax=max(t1_fits['vars'])\r\n plt.subplot(212).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(t1_fits['vars'],t1_fits['amps'],t1_fits['amps_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Amplitude\")\r\n \r\ndef do_T1_phonon(qubit_info, delays, amp, piLength, sigma = 10):\r\n from scripts.single_qubit import stark_swap \r\n t1 = stark_swap.phonon_T1(qubit_info, \r\n delays, phonon_pi = piLength, amp = amp,\r\n sigma = sigma,\r\n )\r\n t1.measure()\r\n plt.close()\r\n return t1\r\n\r\ndef do_T1_phonon_plot(qubit_info, n_avg, delays, amp, piLength, t1_fits, fig_num, sigma = 10, var=None):\r\n alazar.set_naverages(n_avg)\r\n t1 = do_T1_phonon(qubit_info, delays, amp, piLength, sigma)\r\n t1_fits['t1s'].append(t1.fit_params['tau'].value)\r\n t1_fits['t1s_err'].append(t1.fit_params['tau'].stderr)\r\n t1_fits['ofs'].append(t1.fit_params['ofs'].value)\r\n t1_fits['ofs_err'].append(t1.fit_params['ofs'].stderr)\r\n t1_fits['amps'].append(t1.fit_params['A'].value)\r\n t1_fits['amps_err'].append(t1.fit_params['A'].stderr)\r\n if var!=None:\r\n t1_fits['vars'].append(var)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n if t1_fits['vars']==[]:\r\n plt.subplot(211).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)\r\n plt.errorbar(range(len(t1_fits['t1s'])),t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')\r\n else:\r\n xmin=min(t1_fits['vars'])\r\n xmax=max(t1_fits['vars'])\r\n plt.subplot(211).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(t1_fits['vars'],t1_fits['t1s'],t1_fits['t1s_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"T1(us)\")\r\n if t1_fits['vars']==[]:\r\n plt.subplot(212).axis(xmin=-len(t1_fits['t1s'])*0.10, xmax=len(t1_fits['t1s'])*1.10)\r\n plt.errorbar(range(len(t1_fits['amps'])),t1_fits['amps'],t1_fits['amps_err'],fmt='go')\r\n else:\r\n xmin=min(t1_fits['vars'])\r\n xmax=max(t1_fits['vars'])\r\n plt.subplot(212).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax))\r\n plt.errorbar(t1_fits['vars'],t1_fits['amps'],t1_fits['amps_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Amplitude\")\r\n\r\ndef do_T2(qubit_info, delays, detune, fix_freq=None, fit_type='exp_decay_sine',):\r\n from scripts.single_qubit import T2measurement\r\n t2 = T2measurement.T2Measurement(qubit_info, delays, detune=detune, fix_freq = fix_freq, fit_type = fit_type)\r\n t2.data.set_attrs(field_current=field)\r\n t2.data.set_attrs(temperature=temp)\r\n# t2.data.set_attrs(laser_power=voltage)\r\n t2.measure()\r\n plt.close()\r\n return t2\r\n\r\ndef do_T2_plot(qubit_info, n_avg, delays, detune, t2_fits, fig_num, fix_freq=None, fit_type='exp_decay_sine', var=None):\r\n alazar.set_naverages(n_avg)\r\n t2 = do_T2(qubit_info, delays, detune, fix_freq, fit_type)\r\n\r\n if (t2!=None):\r\n t2_fits['t2s'].append(t2.fit_params['tau'].value)\r\n t2_fits['t2s_err'].append(t2.fit_params['tau'].stderr)\r\n t2_fits['t2freqs'].append(t2.fit_params['f'].value*1000 - detune/1e6)\r\n t2_fits['t2freqs_err'].append(t2.fit_params['f'].stderr*1000.0)\r\n t2_fits['amps'].append(t2.fit_params['A'].value)\r\n t2_fits['amps_err'].append(t2.fit_params['A'].stderr)\r\n # if double_freq == True:\r\n # t2_fits['t22s'].append(t2.fit_params['tau2'].value)\r\n # t2_fits['t22s_err'].append(t2.fit_params['tau2'].stderr)\r\n # t2_fits['t22freqs'].append(t2.fit_params['freq2'].value*1000 -detune/1e6)\r\n # t2_fits['t22freqs_err'].append(t2.fit_params['freq2'].stderr*1000.0)\r\n # t2_fits['amp2s'].append(t2.fit_params['amp2'].value)\r\n # t2_fits['amp2s_err'].append(t2.fit_params['amp2'].stderr)\r\n if var!=None:\r\n t2_fits['vars'].append(var)\r\n if fit_type == 'exp_decay_sine':\r\n plt.figure(fig_num)\r\n plt.clf()\r\n if t2_fits['vars']==[]: \r\n plt.subplot(211).axis(xmin=-len(t2_fits['t2s'])*0.10, xmax=len(t2_fits['t2s'])*1.10, ymin= min(t2_fits['t2s'])*0.7, ymax=max(t2_fits['t2s'])*1.3)\r\n plt.errorbar(range(len(t2_fits['t2s'])),t2_fits['t2s'],t2_fits['t2s_err'],fmt='rs')\r\n else:\r\n xmin=min(t2_fits['vars'])\r\n xmax=max(t2_fits['vars']) \r\n plt.subplot(211).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax), ymin= min(t2_fits['t2s'])*0.7, ymax=max(t2_fits['t2s'])*1.3)\r\n plt.errorbar(t2_fits['vars'],t2_fits['t2s'],t2_fits['t2s_err'],fmt='rs')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"T2(us)\")\r\n if t2_fits['vars']==[]: \r\n plt.subplot(212).axis(xmin=-len(t2_fits['t2freqs'])*0.10, xmax=len(t2_fits['t2freqs'])*1.10, ymin=min(t2_fits['t2freqs'])-0.02, ymax=max(t2_fits['t2freqs'])+0.02)\r\n plt.errorbar(range(len(t2_fits['t2freqs'])),t2_fits['t2freqs'],t2_fits['t2freqs_err'],fmt='b^')\r\n else:\r\n xmin=min(t2_fits['vars'])\r\n xmax=max(t2_fits['vars'])\r\n plt.subplot(212).axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax), ymin=min(t2_fits['t2freqs'])-0.02, ymax=max(t2_fits['t2freqs'])+0.02)\r\n plt.errorbar(t2_fits['vars'],t2_fits['t2freqs'],t2_fits['t2freqs_err'],fmt='b^') \r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n\r\n\r\n # if fit_type == 'exp_decay_sine':\r\n # plt.figure(fig_num)\r\n # plt.clf()\r\n # plt.subplot(311).axis(xmin=-len(t2_fits['t2s'])*0.10, xmax=len(t2_fits['t2s'])*1.10, ymin= min(t2_fits['t2s'])*0.7, ymax=max(t2_fits['t22s'])*1.3)\r\n # plt.errorbar(range(len(t2_fits['t2s'])),t2_fits['t2s'],t2_fits['t2s_err'],fmt='rs')\r\n # plt.errorbar(range(len(t2_fits['t22s'])),t2_fits['t22s'],t2_fits['t22s_err'],fmt='b^')\r\n # plt.ylabel(\"T2(us)\")\r\n # plt.subplot(312).axis(xmin=-len(t2_fits['t2freqs'])*0.10, xmax=len(t2_fits['t2freqs'])*1.10,ymin= min(min(t2_fits['t2freqs']),min(t2_fits['t22freqs']))-0.02, ymax=max(max(t2_fits['t2freqs']), max(t2_fits['t22freqs']))+0.02)\r\n # plt.errorbar(range(len(t2_fits['t2freqs'])),t2_fits['t2freqs'],t2_fits['t2freqs_err'],fmt='rs')\r\n # plt.errorbar(range(len(t2_fits['t22freqs'])),t2_fits['t22freqs'],t2_fits['t22freqs_err'],fmt='b^')\r\n # plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n # plt.subplot(313).axis(xmin=-len(t2_fits['amps'])*0.10, xmax=len(t2_fits['amps'])*1.10,ymin= min(t2_fits['amp2s'])*0.8, ymax=max(t2_fits['amps'])*1.2)\r\n # plt.errorbar(range(len(t2_fits['amps'])),t2_fits['amps'],t2_fits['amps_err'],fmt='rs')\r\n # plt.errorbar(range(len(t2_fits['amp2s'])),t2_fits['amp2s'],t2_fits['amp2s_err'],fmt='b^')\r\n # plt.xlabel(\"Measurement iterations\")\r\n # plt.ylabel(\"Amplitudes (AU)\")\r\n\r\n# plt.semilogy()\r\n\r\ndef do_T2echo(qubit_info, delays, detune, fix_freq=None, fit_type='exp_decay_sine'):\r\n # t2e = T2measurement.T2Measurement(qubit_info, delays, detune, echotype=T2measurement.ECHO_HAHN, title='T2 Echo')\r\n from scripts.single_qubit import T2measurement \r\n t2e = T2measurement.T2Measurement(qubit_info, delays, detune, echotype=T2measurement.ECHO_CPMG, fix_freq = fix_freq, fit_type = fit_type, title='T2 Echo')\r\n t2e.data.set_attrs(field_current=field)\r\n t2e.data.set_attrs(temperature=temp)\r\n# t2e.data.set_attrs(laser_power=voltage)\r\n t2e.measure()\r\n plt.close()\r\n return t2e\r\n\r\ndef do_T2echo_plot(qubit_info, n_avg, delays, detune, t2E_fits, fig_num, fix_freq=None, fit_type='exp_decay_sine', var=None):\r\n alazar.set_naverages(n_avg)\r\n t2e = do_T2echo(qubit_info, delays, detune, fix_freq, fit_type)\r\n if fit_type == 'gaussian_decay':\r\n tname = 'sigma'\r\n else:\r\n tname = 'tau'\r\n\r\n if t2e!=None:\r\n t2E_fits['t2es'].append(t2e.fit_params[tname].value)\r\n t2E_fits['t2es_err'].append(t2e.fit_params[tname].stderr)\r\n if var!=None:\r\n t2E_fits['vars'].append(var)\r\n\r\n plt.figure(fig_num)\r\n plt.clf()\r\n if t2E_fits['vars']==[]: \r\n plt.axis(xmin=-len(t2E_fits['t2es'])*0.10, xmax=len(t2E_fits['t2es'])*1.10, ymin= min(t2E_fits['t2es'])*0.8, ymax=max(t2E_fits['t2es'])*1.2)\r\n plt.errorbar(range(len(t2E_fits['t2es'])),t2E_fits['t2es'],t2E_fits['t2es_err'],fmt='mv') # magenta color and v-shape markers\r\n else:\r\n xmin=min(t2E_fits['vars'])\r\n xmax=max(t2E_fits['vars'])\r\n plt.axis(xmin=xmin-0.1*abs(xmin), xmax=xmax+0.1*abs(xmax), ymin= min(t2E_fits['t2es'])*0.8, ymax=max(t2E_fits['t2es'])*1.2)\r\n plt.errorbar(t2E_fits['vars'],t2E_fits['t2es'],t2E_fits['t2es_err'],fmt='mv') # magenta color and v-shape markers\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"T2Echo(us)\")\r\n\r\ndef smart_T1_delays(T1_int=90e3, QPT1=1.5e6, half_decay_point=1e6, eff_T1_delay=800.0, probe_point=0.5, meas_per_QPinj=30, meas_per_reptime=5):\r\n \"\"\"\r\n T1_int = 90e3 # Intrinsic T1 of the qubit\r\n QPT1 = 1.5e6 # Guess the lifetime of the quasiparticles\r\n half_decay_point = 1e6 # The QP_delay time that would make qubit relax halfway to ground state with T1_delay=0, i.e. relax during readout pulse\r\n eff_T1_delay = 800.0 # The effective T1_delay due to the finite length of the readout pulse, usually taken as readout pulse length/2\r\n \"\"\"\r\n# rep_time = 1.0e9/fg.get_frequency()\r\n# T1_QPref = 1/(np.log(2)/eff_T1_delay-1/T1_int) # T1 at half decay point = effective readout delay/ln(2), excluding intrinsic part giving the T1 due to quasiparticles\r\n# n_delayless = int(half_decay_point/rep_time) # Number of points with T1_delay = 0\r\n#\r\n## QP_times_s = np.linspace(rep_time, half_decay_point, n_delayless)\r\n# T1_delays_s = np.linspace(0, 0, n_delayless)\r\n# QP_times_l = np.linspace(half_decay_point+rep_time, meas_per_QPinj*rep_time, meas_per_QPinj-n_delayless)\r\n# T1_delays_l = np.log(2)/(1/T1_int+1/T1_QPref*np.exp(-(QP_times_l-half_decay_point)/QPT1))-eff_T1_delay\r\n## QP_times = np.concatenate((QP_times_s, QP_times_l))\r\n# T1_delays = np.concatenate((T1_delays_s, T1_delays_l))\r\n\r\n rep_time = 1.0e9/fg.get_frequency()\r\n n_points = meas_per_QPinj * meas_per_reptime\r\n step_time = rep_time / meas_per_reptime\r\n T1_QPref = 1/(np.log(2)/eff_T1_delay-1/T1_int) # T1 at half decay point = effective readout delay/ln(2), excluding intrinsic part giving the T1 due to quasiparticles\r\n\r\n QP_times = np.linspace(0, (n_points-1)*step_time, n_points)\r\n T1_est = 1/(1/T1_int+1/T1_QPref*np.exp(-(QP_times-half_decay_point)/QPT1))\r\n T1_delays = -np.log(probe_point)*T1_est-eff_T1_delay\r\n for j, delay in enumerate(T1_delays):\r\n if delay < 0:\r\n T1_delays[j]=0.0\r\n return T1_delays\r\n\r\ndef do_QPdecay(qubit_info, T1_delay, **kwargs):\r\n rep_time = 1e9/fg.get_frequency()\r\n qpd = QPdecay.QPdecay(qubit_info, T1_delay, rep_time, **kwargs)\r\n qpd.data.set_attrs(field_current=field)\r\n qpd.data.set_attrs(temperature=temp)\r\n# qpd.data.set_attrs(T1_delay=T1_delay)\r\n qpd.data.set_attrs(inj_power=ag3.get_power())\r\n# qpd.data.set_attrs(laser_voltage=laser_info.get_DCOffset())\r\n# qpd.measure()\r\n# plt.close()\r\n return qpd\r\n\r\ndef do_QPdecay_plot(qubit_info, n_avg, T1_delay, qpd_fits, fig_num, **kwargs):\r\n alz.set_naverages(n_avg)\r\n ag3.set_rf_on(True)\r\n qpd = do_QPdecay(qubit_info, T1_delay, **kwargs)\r\n qpd.measure()\r\n plt.close()\r\n if qpd!=None:\r\n qpd_fits['qpt1s'].append(qpd.fit_params['tau'].value/1000.0)\r\n qpd_fits['qpt1s_err'].append(qpd.fit_params['tau'].stderr/1000.0)\r\n qpd_fits['qpofs'].append(qpd.fit_params['ofs'].value)\r\n qpd_fits['qpofs_err'].append(qpd.fit_params['ofs'].stderr)\r\n# qpd_fits['amps'].append(qpd.fit_params['amplitude'].value)\r\n qpofs_array = np.array(qpd_fits['qpofs'])\r\n qpofs_err_array = np.array(qpd_fits['qpofs_err'])\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.subplot(211).axis(xmin=-len(qpd_fits['qpt1s'])*0.10, xmax=len(qpd_fits['qpt1s'])*1.10)#, ymin=0, ymax=1)\r\n plt.errorbar(range(len(qpd_fits['qpt1s'])),qpd_fits['qpt1s'],qpd_fits['qpt1s_err'],fmt='go')\r\n plt.ylabel(\"Tau QP(ms)\")\r\n plt.subplot(212).axis(xmin=-len(np.array(qpd_fits['qpofs']))*0.10, xmax=len(np.array(qpd_fits['qpofs']))*1.10)#, ymin=10, ymax=30)\r\n plt.errorbar(range(len(qpofs_array)), 1/qpofs_array, qpofs_err_array/qpofs_array/qpofs_array, fmt='b^')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Qubit T1-floor(us)\")\r\n ag3.set_rf_on(False)\r\n return qpd\r\n\r\n\r\n\r\ndef do_FT1(qubit_info, ef_info, delays):\r\n ft1 = FT1measurement.FT1Measurement(qubit_info, ef_info, delays)\r\n ft1.data.set_attrs(field_current=field)\r\n ft1.data.set_attrs(temperature=temp)\r\n ft1.measure()\r\n plt.close()\r\n return ft1\r\n\r\ndef do_FT1_plot(qubit_info, ef_info, n_avg, delays, ft1_fits, fig_num):\r\n alz.set_naverages(n_avg)\r\n brick1.set_rf_on(True)\r\n ft1 = do_FT1(qubit_info, ef_info, delays)\r\n if ft1!=None:\r\n ft1_fits['ft1s'].append(ft1.fit_params['tau'].value/1000.0)\r\n ft1_fits['ft1s_err'].append(ft1.fit_params['tau'].stderr/1000.0)\r\n ft1_fits['ofs'].append(ft1.fit_params['ofs'].value)\r\n ft1_fits['amps'].append(ft1.fit_params['amplitude'].value)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.axis(xmin=-len(ft1_fits['ft1s'])*0.10, xmax=len(ft1_fits['ft1s'])*1.10, ymin= min(ft1_fits['ft1s'])*0.8, ymax=max(ft1_fits['ft1s'])*1.2)\r\n plt.errorbar(range(len(ft1_fits['ft1s'])),ft1_fits['ft1s'],ft1_fits['ft1s_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"FT1(us)\")\r\n brick1.set_rf_on(False)\r\n\r\ndef do_EFT2(qubit_info, ef_info, delays, detune, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3):\r\n eft2 = EFT2measurement.EFT2Measurement(qubit_info, ef_info, delays, detune=detune, double_freq=double_freq)\r\n eft2.data.set_attrs(field_current=field)\r\n eft2.data.set_attrs(temperature=temp)\r\n eft2.measure()\r\n plt.close()\r\n return eft2\r\n\r\ndef do_EFT2_plot(qubit_info, ef_info, n_avg, delays, detune, ft2_fits, fig_num, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3, laser_power = None):\r\n alz.set_naverages(n_avg)\r\n brick1.set_rf_on(True)\r\n eft2 = do_EFT2(qubit_info, ef_info, delays, detune, double_freq, QP_injection_delay, QP_injection_length)\r\n if (eft2!=None):\r\n ft2_fits['eft2s'].append(eft2.fit_params['tau'].value/1000)\r\n ft2_fits['eft2s_err'].append(eft2.fit_params['tau'].stderr/1000.0)\r\n ft2_fits['eft2freqs'].append(eft2.fit_params['freq'].value*1000 - detune/1e6)\r\n ft2_fits['eft2freqs_err'].append(eft2.fit_params['freq'].stderr*1000.0)\r\n ft2_fits['eft2amps'].append(eft2.fit_params['amp'].value)\r\n ft2_fits['eft2amps_err'].append(eft2.fit_params['amp'].stderr)\r\n if double_freq == True:\r\n ft2_fits['eft22s'].append(eft2.fit_params['tau2'].value/1000)\r\n ft2_fits['eft22s_err'].append(eft2.fit_params['tau2'].stderr/1000.0)\r\n ft2_fits['eft22freqs'].append(eft2.fit_params['freq2'].value*1000 -detune/1e6)\r\n ft2_fits['eft22freqs_err'].append(eft2.fit_params['freq2'].stderr*1000.0)\r\n ft2_fits['eft2amp2s'].append(eft2.fit_params['amp2'].value)\r\n ft2_fits['eft2amp2s_err'].append(eft2.fit_params['amp2'].stderr)\r\n if QP_injection_delay is not None:\r\n ft2_fits['eft2s_QP'].append(eft2.fit_params['tau'].value/1000)\r\n ft2_fits['eft2s_QP_err'].append(eft2.fit_params['tau'].stderr/1000.0)\r\n ft2_fits['eft2freqs_QP'].append(eft2.fit_params['freq'].value*1000 -detune/1e6)\r\n ft2_fits['eft2freqs_QP_err'].append(eft2.fit_params['freq'].stderr*1000.0)\r\n\r\n if double_freq == False and QP_injection_delay is None:\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.subplot(211).axis(xmin=-len(ft2_fits['eft2s'])*0.10, xmax=len(ft2_fits['eft2s'])*1.10, ymin= min(ft2_fits['eft2s'])*0.7, ymax=max(ft2_fits['eft2s'])*1.3)\r\n plt.errorbar(range(len(ft2_fits['eft2s'])),ft2_fits['eft2s'],ft2_fits['eft2s_err'],fmt='rs')\r\n plt.ylabel(\"EFT2(us)\")\r\n plt.subplot(212).axis(xmin=-len(ft2_fits['eft2freqs'])*0.10, xmax=len(ft2_fits['eft2freqs'])*1.10, ymin=min(ft2_fits['eft2freqs'])-0.02, ymax=max(ft2_fits['eft2freqs'])+0.02)\r\n plt.errorbar(range(len(ft2_fits['eft2freqs'])),ft2_fits['eft2freqs'],ft2_fits['eft2freqs_err'],fmt='b^')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n if double_freq == False and QP_injection_delay is not None:\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.subplot(211).axis(xmin=-len(ft2_fits['eft2s_QP'])*0.10, xmax=len(ft2_fits['eft2s_QP'])*1.10, ymin= min(ft2_fits['eft2s_QP'])*0.7, ymax=max(ft2_fits['eft2s_QP'])*1.3)\r\n plt.errorbar(range(len(ft2_fits['eft2s_QP'])),ft2_fits['eft2s_QP'],ft2_fits['eft2s_QP_err'],fmt='rs')\r\n plt.ylabel(\"EFT2 with QP injection (us)\")\r\n plt.subplot(212).axis(xmin=-len(ft2_fits['eft2freqs_QP'])*0.10, xmax=len(ft2_fits['eft2freqs_QP'])*1.10, ymin=min(ft2_fits['eft2freqs_QP'])-0.02, ymax=max(ft2_fits['eft2freqs_QP'])+0.02)\r\n plt.errorbar(range(len(ft2_fits['eft2freqs_QP'])),ft2_fits['eft2freqs_QP'],ft2_fits['eft2freqs_QP_err'],fmt='b^')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n if double_freq is True:\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.subplot(311).axis(xmin=-len(ft2_fits['eft2s'])*0.10, xmax=len(ft2_fits['eft2s'])*1.10, ymin= min(ft2_fits['eft2s'])*0.7, ymax=max(ft2_fits['eft22s'])*1.3)\r\n plt.errorbar(range(len(ft2_fits['eft2s'])),ft2_fits['eft2s'],ft2_fits['eft2s_err'],fmt='rs')\r\n plt.errorbar(range(len(ft2_fits['eft22s'])),ft2_fits['eft22s'],ft2_fits['eft22s_err'],fmt='b^')\r\n plt.ylabel(\"EFT2(us)\")\r\n plt.subplot(312).axis(xmin=-len(ft2_fits['eft2freqs'])*0.10, xmax=len(ft2_fits['eft2freqs'])*1.10,ymin= min(min(ft2_fits['eft2freqs']),min(ft2_fits['eft22freqs']))-0.02, ymax=max(max(ft2_fits['eft2freqs']), max(ft2_fits['eft22freqs']))+0.02)\r\n plt.errorbar(range(len(ft2_fits['eft2freqs'])),ft2_fits['eft2freqs'],ft2_fits['eft2freqs_err'],fmt='rs')\r\n plt.errorbar(range(len(ft2_fits['eft22freqs'])),ft2_fits['eft22freqs'],ft2_fits['eft22freqs_err'],fmt='b^')\r\n plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n plt.subplot(313).axis(xmin=-len(ft2_fits['eft2amps'])*0.10, xmax=len(ft2_fits['eft2amps'])*1.10,ymin= min(ft2_fits['eft2amp2s'])*0.8, ymax=max(ft2_fits['eft2amps'])*1.2)\r\n plt.errorbar(range(len(ft2_fits['eft2amps'])),ft2_fits['eft2amps'],ft2_fits['eft2amps_err'],fmt='rs')\r\n plt.errorbar(range(len(ft2_fits['eft2amp2s'])),ft2_fits['eft2amp2s'],ft2_fits['eft2amp2s_err'],fmt='b^')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Amplitudes (AU)\")\r\n brick1.set_rf_on(False)\r\n\r\ndef do_EFT2echo(qubit_info, ef_info, delays, detune, laser_power = None):\r\n eft2e = EFT2measurement.EFT2Measurement(qubit_info, ef_info, delays, detune, echotype=EFT2measurement.ECHO_HAHN, title='EFT2 Echo')\r\n eft2e.data.set_attrs(field_current=field)\r\n eft2e.data.set_attrs(temperature=temp)\r\n# t2e.data.set_attrs(laser_power=voltage)\r\n eft2e.measure()\r\n plt.close()\r\n return eft2e\r\n\r\ndef do_EFT2echo_plot(qubit_info, ef_info, n_avg, delays, detune, t2E_fits, fig_num, laser_power = None):\r\n alz.set_naverages(n_avg)\r\n brick1.set_rf_on(True)\r\n eft2e = do_EFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)\r\n if eft2e!=None:\r\n t2E_fits['eft2es'].append(eft2e.fit_params['tau'].value/1000)\r\n t2E_fits['eft2es_err'].append(eft2e.fit_params['tau'].stderr/1000)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.axis(xmin=-len(t2E_fits['eft2es'])*0.10, xmax=len(t2E_fits['eft2es'])*1.10, ymin= min(t2E_fits['eft2es'])*0.8, ymax=max(t2E_fits['eft2es'])*1.2)\r\n plt.errorbar(range(len(t2E_fits['eft2es'])),t2E_fits['eft2es'],t2E_fits['eft2es_err'],fmt='mv') # magenta color and v-shape markers\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"EFT2Echo(us)\")\r\n brick1.set_rf_on(False)\r\n\r\ndef do_GFT2(qubit_info, ef_info, delays, detune, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3):\r\n gft2 = GFT2measurement.GFT2Measurement(qubit_info, ef_info, delays, detune=detune, double_freq=double_freq)\r\n gft2.data.set_attrs(field_current=field)\r\n gft2.data.set_attrs(temperature=temp)\r\n gft2.measure()\r\n plt.close()\r\n return gft2\r\n\r\ndef do_GFT2_plot(qubit_info, ef_info, n_avg, delays, detune, ft2_fits, fig_num, double_freq=False, QP_injection_delay=None, QP_injection_length=10e3, laser_power = None):\r\n alz.set_naverages(n_avg)\r\n brick1.set_rf_on(True)\r\n gft2 = do_GFT2(qubit_info, ef_info, delays, detune, double_freq, QP_injection_delay, QP_injection_length)\r\n if (gft2!=None):\r\n ft2_fits['gft2s'].append(gft2.fit_params['tau'].value/1000)\r\n ft2_fits['gft2s_err'].append(gft2.fit_params['tau'].stderr/1000.0)\r\n ft2_fits['gft2freqs'].append(gft2.fit_params['freq'].value*1000 - detune/1e6)\r\n ft2_fits['gft2freqs_err'].append(gft2.fit_params['freq'].stderr*1000.0)\r\n ft2_fits['gft2amps'].append(gft2.fit_params['amp'].value)\r\n ft2_fits['gft2amps_err'].append(gft2.fit_params['amp'].stderr)\r\n if double_freq == True:\r\n ft2_fits['gft22s'].append(gft2.fit_params['tau2'].value/1000)\r\n ft2_fits['gft22s_err'].append(gft2.fit_params['tau2'].stderr/1000.0)\r\n ft2_fits['gft22freqs'].append(gft2.fit_params['freq2'].value*1000 -detune/1e6)\r\n ft2_fits['gft22freqs_err'].append(gft2.fit_params['freq2'].stderr*1000.0)\r\n ft2_fits['gft2amp2s'].append(gft2.fit_params['amp2'].value)\r\n ft2_fits['gft2amp2s_err'].append(gft2.fit_params['amp2'].stderr)\r\n if QP_injection_delay is not None:\r\n ft2_fits['gft2s_QP'].append(gft2.fit_params['tau'].value/1000)\r\n ft2_fits['gft2s_QP_err'].append(gft2.fit_params['tau'].stderr/1000.0)\r\n ft2_fits['gft2freqs_QP'].append(gft2.fit_params['freq'].value*1000 -detune/1e6)\r\n ft2_fits['gft2freqs_QP_err'].append(gft2.fit_params['freq'].stderr*1000.0)\r\n\r\n if double_freq == False and QP_injection_delay is None:\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.subplot(211).axis(xmin=-len(ft2_fits['gft2s'])*0.10, xmax=len(ft2_fits['gft2s'])*1.10, ymin= min(ft2_fits['gft2s'])*0.7, ymax=max(ft2_fits['gft2s'])*1.3)\r\n plt.errorbar(range(len(ft2_fits['gft2s'])),ft2_fits['gft2s'],ft2_fits['gft2s_err'],fmt='ks')\r\n plt.ylabel(\"GFT2(us)\")\r\n plt.subplot(212).axis(xmin=-len(ft2_fits['gft2freqs'])*0.10, xmax=len(ft2_fits['gft2freqs'])*1.10, ymin=min(ft2_fits['gft2freqs'])-0.02, ymax=max(ft2_fits['gft2freqs'])+0.02)\r\n plt.errorbar(range(len(ft2_fits['gft2freqs'])),ft2_fits['gft2freqs'],ft2_fits['gft2freqs_err'],fmt='c^')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n if double_freq == False and QP_injection_delay is not None:\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.subplot(211).axis(xmin=-len(ft2_fits['gft2s_QP'])*0.10, xmax=len(ft2_fits['gft2s_QP'])*1.10, ymin= min(ft2_fits['gft2s_QP'])*0.7, ymax=max(ft2_fits['gft2s_QP'])*1.3)\r\n plt.errorbar(range(len(ft2_fits['gft2s_QP'])),ft2_fits['gft2s_QP'],ft2_fits['gft2s_QP_err'],fmt='ks')\r\n plt.ylabel(\"GFT2 with QP injection (us)\")\r\n plt.subplot(212).axis(xmin=-len(ft2_fits['gft2freqs_QP'])*0.10, xmax=len(ft2_fits['gft2freqs_QP'])*1.10, ymin=min(ft2_fits['gft2freqs_QP'])-0.02, ymax=max(ft2_fits['gft2freqs_QP'])+0.02)\r\n plt.errorbar(range(len(ft2_fits['gft2freqs_QP'])),ft2_fits['gft2freqs_QP'],ft2_fits['gft2freqs_QP_err'],fmt='c^')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n if double_freq is True:\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.subplot(311).axis(xmin=-len(ft2_fits['gft2s'])*0.10, xmax=len(ft2_fits['gft2s'])*1.10, ymin= min(ft2_fits['gft2s'])*0.7, ymax=max(ft2_fits['gft22s'])*1.3)\r\n plt.errorbar(range(len(ft2_fits['gft2s'])),ft2_fits['gft2s'],ft2_fits['gft2s_err'],fmt='ks')\r\n plt.errorbar(range(len(ft2_fits['gft22s'])),ft2_fits['gft22s'],ft2_fits['gft22s_err'],fmt='c^')\r\n plt.ylabel(\"GFT2(us)\")\r\n plt.subplot(312).axis(xmin=-len(ft2_fits['gft2freqs'])*0.10, xmax=len(ft2_fits['gft2freqs'])*1.10,ymin= min(min(ft2_fits['gft2freqs']),min(ft2_fits['gft22freqs']))-0.02, ymax=max(max(ft2_fits['gft2freqs']), max(ft2_fits['gft22freqs']))+0.02)\r\n plt.errorbar(range(len(ft2_fits['gft2freqs'])),ft2_fits['gft2freqs'],ft2_fits['gft2freqs_err'],fmt='ks')\r\n plt.errorbar(range(len(ft2_fits['gft22freqs'])),ft2_fits['gft22freqs'],ft2_fits['gft22freqs_err'],fmt='c^')\r\n plt.ylabel(\"Ramsey Freq.(MHz) (= Actual Qubit Freq. - Drive Freq.)\")\r\n plt.subplot(313).axis(xmin=-len(ft2_fits['gft2amps'])*0.10, xmax=len(ft2_fits['gft2amps'])*1.10,ymin= min(ft2_fits['gft2amp2s'])*0.8, ymax=max(ft2_fits['gft2amps'])*1.2)\r\n plt.errorbar(range(len(ft2_fits['gft2amps'])),ft2_fits['gft2amps'],ft2_fits['gft2amps_err'],fmt='ks')\r\n plt.errorbar(range(len(ft2_fits['gft2amp2s'])),ft2_fits['gft2amp2s'],ft2_fits['gft2amp2s_err'],fmt='c^')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Amplitudes (AU)\")\r\n brick1.set_rf_on(False)\r\n\r\ndef do_GFT2echo(qubit_info, ef_info, delays, detune, laser_power = None):\r\n gft2e = GFT2measurement.GFT2Measurement(qubit_info, ef_info, delays, detune, echotype=EFT2measurement.ECHO_HAHN, title='GFT2 Echo')\r\n gft2e.data.set_attrs(field_current=field)\r\n gft2e.data.set_attrs(temperature=temp)\r\n# t2e.data.set_attrs(laser_power=voltage)\r\n gft2e.measure()\r\n plt.close()\r\n return gft2e\r\n\r\ndef do_GFT2echo_plot(qubit_info, ef_info, n_avg, delays, detune, t2E_fits, fig_num, laser_power = None):\r\n alz.set_naverages(n_avg)\r\n brick1.set_rf_on(True)\r\n gft2e = do_GFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)\r\n if gft2e!=None:\r\n t2E_fits['gft2es'].append(gft2e.fit_params['tau'].value/1000)\r\n t2E_fits['gft2es_err'].append(gft2e.fit_params['tau'].stderr/1000)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.axis(xmin=-len(t2E_fits['gft2es'])*0.10, xmax=len(t2E_fits['gft2es'])*1.10, ymin= min(t2E_fits['gft2es'])*0.8, ymax=max(t2E_fits['gft2es'])*1.2)\r\n plt.errorbar(range(len(t2E_fits['gft2es'])),t2E_fits['gft2es'],t2E_fits['gft2es_err'],fmt='yv') # yellow color and v-shape markers\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"GFT2Echo(us)\")\r\n brick1.set_rf_on(False)\r\n\r\ndef do_FT2echo_plot(qubit_info, ef_info, n_avg, delays, detune, t2E_fits, fig_num, laser_power = None):\r\n alz.set_naverages(n_avg)\r\n brick1.set_rf_on(True)\r\n eft2e = do_EFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)\r\n if eft2e!=None:\r\n t2E_fits['eft2es'].append(eft2e.fit_params['tau'].value/1000)\r\n t2E_fits['eft2es_err'].append(eft2e.fit_params['tau'].stderr/1000)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.axis(xmin=-len(t2E_fits['eft2es'])*0.10, xmax=len(t2E_fits['eft2es'])*1.10, ymin= min(t2E_fits['eft2es'])*0.8, ymax=max(t2E_fits['eft2es'])*1.2)\r\n plt.errorbar(range(len(t2E_fits['eft2es'])),t2E_fits['eft2es'],t2E_fits['eft2es_err'],fmt='mv', label='EFT2echo') # magenta color and v-shape markers\r\n plt.errorbar(range(len(t2E_fits['gft2es'])),t2E_fits['gft2es'],t2E_fits['gft2es_err'],fmt='yv', label='GFT2echo') # yellow color and v-shape markers\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"FT2Echo(us)\")\r\n\r\n gft2e = do_GFT2echo(qubit_info, ef_info, delays, detune, laser_power = laser_power)\r\n if gft2e!=None:\r\n t2E_fits['gft2es'].append(gft2e.fit_params['tau'].value/1000)\r\n t2E_fits['gft2es_err'].append(gft2e.fit_params['tau'].stderr/1000)\r\n plt.figure(fig_num)\r\n plt.clf()\r\n plt.axis(xmin=-len(t2E_fits['gft2es'])*0.10, xmax=len(t2E_fits['gft2es'])*1.10, ymin= min(t2E_fits['eft2es'])*0.8, ymax=max(t2E_fits['gft2es'])*1.2)\r\n plt.errorbar(range(len(t2E_fits['eft2es'])),t2E_fits['eft2es'],t2E_fits['eft2es_err'],fmt='mv', label='EFT2echo') # magenta color and v-shape markers\r\n plt.errorbar(range(len(t2E_fits['gft2es'])),t2E_fits['gft2es'],t2E_fits['gft2es_err'],fmt='yv', label='GFT2echo') # yellow color and v-shape markers\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"FT2Echo(us)\")\r\n brick1.set_rf_on(False)\r\n\r\n\r\ndef do_rabiup(qubit_info, ef_info, amps, QP_injection_delay=None, laser_power= None):\r\n if QP_injection_delay == None:\r\n rabiup = efrabi.EFRabi(qubit_info, ef_info, amps, laser_power = laser_power)\r\n else:\r\n rabiup = efrabi_QP.EFRabi_QP(qubit_info, ef_info, amps, QP_injection_delay, laser_power = laser_power)\r\n rabiup.data.set_attrs(QP_delay=QP_injection_delay)\r\n rabiup.data.set_attrs(field_current=field)\r\n rabiup.data.set_attrs(temperature=temp)\r\n rabiup.data.set_attrs(laser_power=laser_power)\r\n rabiup.measure()\r\n plt.close()\r\n return rabiup\r\n\r\ndef do_rabinoup(qubit_info, ef_info, amps, force_period, QP_injection_delay=None, laser_power=None):\r\n if QP_injection_delay == None:\r\n rabinoup = efrabi.EFRabi(qubit_info, ef_info, amps, first_pi=False, force_period=force_period,laser_power = laser_power)\r\n else:\r\n rabinoup = efrabi_QP.EFRabi_QP(qubit_info, ef_info, amps, first_pi=False, force_period=force_period, QP_delay=QP_injection_delay)\r\n rabinoup.data.set_attrs(QP_delay=QP_injection_delay)\r\n rabinoup.data.set_attrs(field_current=field)\r\n rabinoup.data.set_attrs(temperature=temp)\r\n rabinoup.data.set_attrs(laser_power=laser_power)\r\n rabinoup.measure()\r\n #population = 100*rabinoup.fit_params['amp'].value/(rabiup.fit_params['amp'].value+rabinoup.fit_params['amp'].value)\r\n plt.close()\r\n return rabinoup\r\n\r\ndef do_population_plot(qubit_info, ef_info, n_avg_rabiup, n_avg_rabinoup, amps, pops_fits, fig_num, QP_injection_delay=None, laser_power = None):\r\n brick1.set_rf_on(True)\r\n alz.set_naverages(n_avg_rabiup)\r\n rabiup = do_rabiup(qubit_info, ef_info, amps, QP_injection_delay, laser_power = laser_power)\r\n if rabiup!=None:\r\n pops_fits['rabiupAmp'].append(abs(rabiup.fit_params['amp'].value))\r\n pops_fits['rabiupAmp_err'].append(rabiup.fit_params['amp'].stderr)\r\n plt.figure(fig_num).show()\r\n# plt.clf()\r\n plt.subplot(211).axis(xmin=-len(pops_fits['rabiupAmp'])*0.10, xmax=len(pops_fits['rabiupAmp'])*1.10, ymin=min(pops_fits['rabiupAmp'])*0.7, ymax=max(pops_fits['rabiupAmp'])*1.3)\r\n plt.errorbar(range(len(pops_fits['rabiupAmp'])),pops_fits['rabiupAmp'],pops_fits['rabiupAmp_err'],fmt='b^')\r\n #plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Rabiup\")\r\n\r\n alz.set_naverages(n_avg_rabinoup)\r\n rabinoup = do_rabinoup(qubit_info, ef_info, amps, force_period=rabiup.fit_params['period'].value, QP_injection_delay=QP_injection_delay, laser_power = laser_power)\r\n if rabinoup!=None:\r\n pops_fits['rabinoupAmp'].append(abs(rabinoup.fit_params['amp'].value))\r\n pops_fits['rabinoupAmp_err'].append(rabinoup.fit_params['amp'].stderr)\r\n #population.append(population)\r\n plt.figure(fig_num).show()\r\n plt.subplot(212).axis(xmin=-len(pops_fits['rabinoupAmp'])*0.10, xmax=len(pops_fits['rabinoupAmp'])*1.10, ymin=0.0, ymax=max(pops_fits['rabinoupAmp'])*2.0)\r\n plt.errorbar(range(len(pops_fits['rabinoupAmp'])),pops_fits['rabinoupAmp'],pops_fits['rabinoupAmp_err'],fmt='go')\r\n plt.xlabel(\"Measurement iterations\")\r\n plt.ylabel(\"Rabinoup\")\r\n brick1.set_rf_on(False)\r\n\r\n'''\r\ndef do_qubitSSBspec()\r\n from scripts.single_qubit import ssbspec\r\n qubitSSBspec = ssbspec.SSBSpec(qubit_info, np.linspace(-3e6, 3e6, 51), plot_seqs=False)\r\n qubitSSBspec.measure()\r\n return qubitSSBspec\r\n'''\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.db import models
from django.conf import settings
from django.utils.text import slugify
from six import python_2_unicode_compatible
from ckeditor_uploader.fields import RichTextUploadingField
from ckeditor.fields import RichTextField
# Create your models here.
class topic(models.Model):
name = models.CharField(max_length=255, primary_key=True)
showname = models.CharField(max_length=255, null= True)
def __str__(self):
return self.name
class article(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique= True, blank=True, editable=True, null = True)
topic = models.ForeignKey(topic, on_delete=models.CASCADE)
author = models.CharField(max_length=255)
opening = models.TextField()
body = RichTextUploadingField()
date = models.DateTimeField(auto_now_add=True)
image = models.ImageField(null = True)
view = models.IntegerField(default=0, null=True)
def __str__(self):
return self.title
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(article, self).save(*args, **kwargs)
class Comment(models.Model):
post = models.ForeignKey(article, on_delete=models.CASCADE, related_name='comments')
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
body = models.TextField()
date = models.DateTimeField(auto_now_add=True)
|
normal
|
{
"blob_id": "31801f62942337b0cdf0e022dc75a9e125be54e3",
"index": 4191,
"step-1": "<mask token>\n\n\nclass article(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-2": "<mask token>\n\n\nclass topic(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique=True, blank=True,\n editable=True, null=True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null=True)\n view = models.IntegerField(default=0, null=True)\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-3": "<mask token>\n\n\nclass topic(models.Model):\n name = models.CharField(max_length=255, primary_key=True)\n showname = models.CharField(max_length=255, null=True)\n\n def __str__(self):\n return self.name\n\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique=True, blank=True,\n editable=True, null=True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null=True)\n view = models.IntegerField(default=0, null=True)\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-4": "from django.db import models\nfrom django.conf import settings\nfrom django.utils.text import slugify\nfrom six import python_2_unicode_compatible\nfrom ckeditor_uploader.fields import RichTextUploadingField\nfrom ckeditor.fields import RichTextField\n\n\nclass topic(models.Model):\n name = models.CharField(max_length=255, primary_key=True)\n showname = models.CharField(max_length=255, null=True)\n\n def __str__(self):\n return self.name\n\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique=True, blank=True,\n editable=True, null=True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null=True)\n view = models.IntegerField(default=0, null=True)\n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n\n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE,\n related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n",
"step-5": "from django.db import models\nfrom django.conf import settings\nfrom django.utils.text import slugify\nfrom six import python_2_unicode_compatible\nfrom ckeditor_uploader.fields import RichTextUploadingField\nfrom ckeditor.fields import RichTextField\n# Create your models here.\nclass topic(models.Model):\n name = models.CharField(max_length=255, primary_key=True)\n showname = models.CharField(max_length=255, null= True)\n\n def __str__(self):\n return self.name\n\nclass article(models.Model):\n title = models.CharField(max_length=255)\n slug = models.SlugField(max_length=255, unique= True, blank=True, editable=True, null = True)\n topic = models.ForeignKey(topic, on_delete=models.CASCADE)\n author = models.CharField(max_length=255)\n opening = models.TextField()\n body = RichTextUploadingField()\n date = models.DateTimeField(auto_now_add=True)\n image = models.ImageField(null = True)\n view = models.IntegerField(default=0, null=True)\n \n\n def __str__(self):\n return self.title\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.title)\n super(article, self).save(*args, **kwargs)\n \n\nclass Comment(models.Model):\n post = models.ForeignKey(article, on_delete=models.CASCADE, related_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n body = models.TextField()\n date = models.DateTimeField(auto_now_add=True)\n\n\n\n ",
"step-ids": [
5,
7,
9,
10,
11
]
}
|
[
5,
7,
9,
10,
11
] |
"""Main application for FastAPI"""
from typing import Dict
from fastapi import FastAPI
from fastapi.openapi.utils import get_openapi
from cool_seq_tool.routers import default, mane, mappings, SERVICE_NAME
from cool_seq_tool.version import __version__
app = FastAPI(
docs_url=f"/{SERVICE_NAME}",
openapi_url=f"/{SERVICE_NAME}/openapi.json",
swagger_ui_parameters={"tryItOutEnabled": True}
)
app.include_router(default.router)
app.include_router(mane.router)
app.include_router(mappings.router)
def custom_openapi() -> Dict:
"""Generate custom fields for OpenAPI response."""
if app.openapi_schema:
return app.openapi_schema
openapi_schema = get_openapi(
title="The GenomicMedLab Cool Seq Tool",
version=__version__,
description="Common Operations On Lots-of Sequences Tool.",
routes=app.routes
)
openapi_schema["info"]["contact"] = {
"name": "Alex H. Wagner",
"email": "[email protected]",
"url": "https://www.nationwidechildrens.org/specialties/institute-for-genomic-medicine/research-labs/wagner-lab" # noqa: E501
}
app.openapi_schema = openapi_schema
return app.openapi_schema
app.openapi = custom_openapi
|
normal
|
{
"blob_id": "c6fa8c33630fc2f7ffb08aace1a260e6805ddfa2",
"index": 7670,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp.include_router(default.router)\napp.include_router(mane.router)\napp.include_router(mappings.router)\n\n\ndef custom_openapi() ->Dict:\n \"\"\"Generate custom fields for OpenAPI response.\"\"\"\n if app.openapi_schema:\n return app.openapi_schema\n openapi_schema = get_openapi(title='The GenomicMedLab Cool Seq Tool',\n version=__version__, description=\n 'Common Operations On Lots-of Sequences Tool.', routes=app.routes)\n openapi_schema['info']['contact'] = {'name': 'Alex H. Wagner', 'email':\n '[email protected]', 'url':\n 'https://www.nationwidechildrens.org/specialties/institute-for-genomic-medicine/research-labs/wagner-lab'\n }\n app.openapi_schema = openapi_schema\n return app.openapi_schema\n\n\n<mask token>\n",
"step-3": "<mask token>\napp = FastAPI(docs_url=f'/{SERVICE_NAME}', openapi_url=\n f'/{SERVICE_NAME}/openapi.json', swagger_ui_parameters={\n 'tryItOutEnabled': True})\napp.include_router(default.router)\napp.include_router(mane.router)\napp.include_router(mappings.router)\n\n\ndef custom_openapi() ->Dict:\n \"\"\"Generate custom fields for OpenAPI response.\"\"\"\n if app.openapi_schema:\n return app.openapi_schema\n openapi_schema = get_openapi(title='The GenomicMedLab Cool Seq Tool',\n version=__version__, description=\n 'Common Operations On Lots-of Sequences Tool.', routes=app.routes)\n openapi_schema['info']['contact'] = {'name': 'Alex H. Wagner', 'email':\n '[email protected]', 'url':\n 'https://www.nationwidechildrens.org/specialties/institute-for-genomic-medicine/research-labs/wagner-lab'\n }\n app.openapi_schema = openapi_schema\n return app.openapi_schema\n\n\napp.openapi = custom_openapi\n",
"step-4": "<mask token>\nfrom typing import Dict\nfrom fastapi import FastAPI\nfrom fastapi.openapi.utils import get_openapi\nfrom cool_seq_tool.routers import default, mane, mappings, SERVICE_NAME\nfrom cool_seq_tool.version import __version__\napp = FastAPI(docs_url=f'/{SERVICE_NAME}', openapi_url=\n f'/{SERVICE_NAME}/openapi.json', swagger_ui_parameters={\n 'tryItOutEnabled': True})\napp.include_router(default.router)\napp.include_router(mane.router)\napp.include_router(mappings.router)\n\n\ndef custom_openapi() ->Dict:\n \"\"\"Generate custom fields for OpenAPI response.\"\"\"\n if app.openapi_schema:\n return app.openapi_schema\n openapi_schema = get_openapi(title='The GenomicMedLab Cool Seq Tool',\n version=__version__, description=\n 'Common Operations On Lots-of Sequences Tool.', routes=app.routes)\n openapi_schema['info']['contact'] = {'name': 'Alex H. Wagner', 'email':\n '[email protected]', 'url':\n 'https://www.nationwidechildrens.org/specialties/institute-for-genomic-medicine/research-labs/wagner-lab'\n }\n app.openapi_schema = openapi_schema\n return app.openapi_schema\n\n\napp.openapi = custom_openapi\n",
"step-5": "\"\"\"Main application for FastAPI\"\"\"\nfrom typing import Dict\n\nfrom fastapi import FastAPI\nfrom fastapi.openapi.utils import get_openapi\n\n\nfrom cool_seq_tool.routers import default, mane, mappings, SERVICE_NAME\nfrom cool_seq_tool.version import __version__\n\n\napp = FastAPI(\n docs_url=f\"/{SERVICE_NAME}\",\n openapi_url=f\"/{SERVICE_NAME}/openapi.json\",\n swagger_ui_parameters={\"tryItOutEnabled\": True}\n)\n\n\napp.include_router(default.router)\napp.include_router(mane.router)\napp.include_router(mappings.router)\n\n\ndef custom_openapi() -> Dict:\n \"\"\"Generate custom fields for OpenAPI response.\"\"\"\n if app.openapi_schema:\n return app.openapi_schema\n openapi_schema = get_openapi(\n title=\"The GenomicMedLab Cool Seq Tool\",\n version=__version__,\n description=\"Common Operations On Lots-of Sequences Tool.\",\n routes=app.routes\n )\n\n openapi_schema[\"info\"][\"contact\"] = {\n \"name\": \"Alex H. Wagner\",\n \"email\": \"[email protected]\",\n \"url\": \"https://www.nationwidechildrens.org/specialties/institute-for-genomic-medicine/research-labs/wagner-lab\" # noqa: E501\n }\n app.openapi_schema = openapi_schema\n return app.openapi_schema\n\n\napp.openapi = custom_openapi\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
class product(object):
def __init__(self, item_name, price, weight, brand, status = "for sale"):
self.item_name = item_name
self.price = price
self.weight = weight
self.brand = brand
self.cost = price
self.status = status
self.displayInfo()
def displayInfo(self):
print "Item name:", self.item_name
print "Price:", self.price
print "Weight:", self.weight
print "Brand:", self.brand
print "Cost:", self.cost
print "Status:", self.status
return self
def sell(self):
self.status = "Sold"
return self
def addTax(self, num):
self.cost = self.cost * (1+num)
return self
def Return(self, reason):
if reason == "Defective":
self.cost = 0
self.status = reason
elif reason == "Opened":
self.cost = self.cost * 0.80
self.status = "for sale"
elif reason == "Box":
self.status = "for sale"
return self
print "add items to inv"
product1 = product("Kona Dew", 499, 1.2, "Kona")
product2 = product("Kona Dew Plus", 799, 1.5, "Kona")
product3 = product("Kona Dr.Dew", 999, 1.2, "Kona")
product1.addTax(0.10)
product2.addTax(0.15)
product3.addTax(0.11)
print "add tax"
product1.displayInfo()
product2.displayInfo()
product3.displayInfo()
product1.sell()
product2.sell()
product3.sell()
print "sell items"
product1.displayInfo()
product2.displayInfo()
product3.displayInfo()
product1.Return("Defective")
product2.Return("Box")
product3.Return("Opened")
print "return items"
product1.displayInfo()
product2.displayInfo()
product3.displayInfo()
|
normal
|
{
"blob_id": "303d56c18cce922ace45de1b8e195ebfdd874e23",
"index": 7394,
"step-1": "class product(object):\n def __init__(self, item_name, price, weight, brand, status = \"for sale\"):\n self.item_name = item_name\n self.price = price\n self.weight = weight\n self.brand = brand\n self.cost = price\n self.status = status\n self.displayInfo()\n def displayInfo(self):\n print \"Item name:\", self.item_name\n print \"Price:\", self.price\n print \"Weight:\", self.weight\n print \"Brand:\", self.brand\n print \"Cost:\", self.cost\n print \"Status:\", self.status\n return self\n def sell(self):\n self.status = \"Sold\"\n return self\n def addTax(self, num):\n self.cost = self.cost * (1+num)\n return self\n def Return(self, reason):\n if reason == \"Defective\":\n self.cost = 0\n self.status = reason\n elif reason == \"Opened\":\n self.cost = self.cost * 0.80\n self.status = \"for sale\"\n elif reason == \"Box\":\n self.status = \"for sale\"\n return self\nprint \"add items to inv\"\nproduct1 = product(\"Kona Dew\", 499, 1.2, \"Kona\")\nproduct2 = product(\"Kona Dew Plus\", 799, 1.5, \"Kona\")\nproduct3 = product(\"Kona Dr.Dew\", 999, 1.2, \"Kona\")\nproduct1.addTax(0.10)\nproduct2.addTax(0.15)\nproduct3.addTax(0.11)\nprint \"add tax\"\nproduct1.displayInfo()\nproduct2.displayInfo()\nproduct3.displayInfo()\nproduct1.sell()\nproduct2.sell()\nproduct3.sell()\nprint \"sell items\"\nproduct1.displayInfo()\nproduct2.displayInfo()\nproduct3.displayInfo()\nproduct1.Return(\"Defective\")\nproduct2.Return(\"Box\")\nproduct3.Return(\"Opened\")\nprint \"return items\"\nproduct1.displayInfo()\nproduct2.displayInfo()\nproduct3.displayInfo()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 31 14:35:49 2019
@author: devinpowers
"""
# Lab 1 in CSE 231
#Quadratic Formula
# Find the roots in the Quadratic Formula
import math
a = float(input("Enter the coeddicient a: "))
b = float(input("Enter the coeddicient b: "))
c = float(input("Enter the coeddicient c: "))
print (" Coefficients:")
print( " Coefficient of a = ", a)
print( " Coefficient of b = ", b)
print( " Coefficient of c = ", c)
root_1 = (-b+(b**2-4*a*c)**(0.5))/(2*a)
root_2 = (-b-(b**2-4*a*c)**(0.5))/(2*a)
print("The roots of the equation:")
print( " Root 1 =", root_1)
print( " Root 2 =", root_2)
|
normal
|
{
"blob_id": "2acfd0bbad68bb9d55aeb39b180f4326a225f6d5",
"index": 1218,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(' Coefficients:')\nprint(' Coefficient of a = ', a)\nprint(' Coefficient of b = ', b)\nprint(' Coefficient of c = ', c)\n<mask token>\nprint('The roots of the equation:')\nprint(' Root 1 =', root_1)\nprint(' Root 2 =', root_2)\n",
"step-3": "<mask token>\na = float(input('Enter the coeddicient a: '))\nb = float(input('Enter the coeddicient b: '))\nc = float(input('Enter the coeddicient c: '))\nprint(' Coefficients:')\nprint(' Coefficient of a = ', a)\nprint(' Coefficient of b = ', b)\nprint(' Coefficient of c = ', c)\nroot_1 = (-b + (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nroot_2 = (-b - (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nprint('The roots of the equation:')\nprint(' Root 1 =', root_1)\nprint(' Root 2 =', root_2)\n",
"step-4": "<mask token>\nimport math\na = float(input('Enter the coeddicient a: '))\nb = float(input('Enter the coeddicient b: '))\nc = float(input('Enter the coeddicient c: '))\nprint(' Coefficients:')\nprint(' Coefficient of a = ', a)\nprint(' Coefficient of b = ', b)\nprint(' Coefficient of c = ', c)\nroot_1 = (-b + (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nroot_2 = (-b - (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nprint('The roots of the equation:')\nprint(' Root 1 =', root_1)\nprint(' Root 2 =', root_2)\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sat Aug 31 14:35:49 2019\n\n@author: devinpowers\n\"\"\"\n\n# Lab 1 in CSE 231\n#Quadratic Formula\n# Find the roots in the Quadratic Formula\n \nimport math\n\na = float(input(\"Enter the coeddicient a: \"))\nb = float(input(\"Enter the coeddicient b: \"))\nc = float(input(\"Enter the coeddicient c: \"))\n\nprint (\" Coefficients:\")\nprint( \" Coefficient of a = \", a)\nprint( \" Coefficient of b = \", b)\nprint( \" Coefficient of c = \", c)\n\nroot_1 = (-b+(b**2-4*a*c)**(0.5))/(2*a)\nroot_2 = (-b-(b**2-4*a*c)**(0.5))/(2*a)\n\nprint(\"The roots of the equation:\")\nprint( \" Root 1 =\", root_1)\nprint( \" Root 2 =\", root_2)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.urls import path
from .authentication import GetToken, RegisterUserAPIView
from .resurses import *
urlpatterns = [
path('register/', RegisterUserAPIView.as_view()),
path('get/token/', GetToken.as_view()),
path('card/list/', ShowCardsAPIView.as_view()),
path('card/create/', CreateCardAPIView.as_view()),
path('card/<int:pk>/status/raise/', RaiseStatusAPIView.as_view()),
path('card/<int:pk>/status/omit/', OmitStatusAPIView.as_view()),
path('card/<int:pk>/delete/', DeleteCardAPIView.as_view()),
path('card/<int:pk>/update/', UpdateCardAPIView.as_view()),
path('card/get/', GetCardSListAPIView.as_view()),
]
|
normal
|
{
"blob_id": "aac334256c1e05ef33a54da19925911af6645a10",
"index": 9529,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('register/', RegisterUserAPIView.as_view()), path(\n 'get/token/', GetToken.as_view()), path('card/list/', ShowCardsAPIView.\n as_view()), path('card/create/', CreateCardAPIView.as_view()), path(\n 'card/<int:pk>/status/raise/', RaiseStatusAPIView.as_view()), path(\n 'card/<int:pk>/status/omit/', OmitStatusAPIView.as_view()), path(\n 'card/<int:pk>/delete/', DeleteCardAPIView.as_view()), path(\n 'card/<int:pk>/update/', UpdateCardAPIView.as_view()), path('card/get/',\n GetCardSListAPIView.as_view())]\n",
"step-3": "from django.urls import path\nfrom .authentication import GetToken, RegisterUserAPIView\nfrom .resurses import *\nurlpatterns = [path('register/', RegisterUserAPIView.as_view()), path(\n 'get/token/', GetToken.as_view()), path('card/list/', ShowCardsAPIView.\n as_view()), path('card/create/', CreateCardAPIView.as_view()), path(\n 'card/<int:pk>/status/raise/', RaiseStatusAPIView.as_view()), path(\n 'card/<int:pk>/status/omit/', OmitStatusAPIView.as_view()), path(\n 'card/<int:pk>/delete/', DeleteCardAPIView.as_view()), path(\n 'card/<int:pk>/update/', UpdateCardAPIView.as_view()), path('card/get/',\n GetCardSListAPIView.as_view())]\n",
"step-4": "from django.urls import path\n\nfrom .authentication import GetToken, RegisterUserAPIView\nfrom .resurses import *\n\nurlpatterns = [\n path('register/', RegisterUserAPIView.as_view()),\n path('get/token/', GetToken.as_view()),\n path('card/list/', ShowCardsAPIView.as_view()),\n path('card/create/', CreateCardAPIView.as_view()),\n path('card/<int:pk>/status/raise/', RaiseStatusAPIView.as_view()),\n path('card/<int:pk>/status/omit/', OmitStatusAPIView.as_view()),\n path('card/<int:pk>/delete/', DeleteCardAPIView.as_view()),\n path('card/<int:pk>/update/', UpdateCardAPIView.as_view()),\n path('card/get/', GetCardSListAPIView.as_view()),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import airflow
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from airflow.operators import BashOperator, DummyOperator
from datetime import datetime, timedelta
# --------------------------------------------------------------------------------
# set default arguments
# --------------------------------------------------------------------------------
default_args = {
'owner': 'Jaimin',
'depends_on_past': False,
'start_date': datetime.now(),
'email': ['[email protected]'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
dag = DAG(
'hive_create_part_v1',
default_args=default_args,
schedule_interval="0 1 * * *",
concurrency=1)
# --------------------------------------------------------------------------------
# set tasks
# --------------------------------------------------------------------------------
task = BashOperator(
task_id='hive_create_parition',
bash_command='bash /data/appdata/airflow/script/hive_create_job.sh mnode2 ',
dag=dag)
|
normal
|
{
"blob_id": "49492ad1a1734be02ebefb77095fd560a7a7efd8",
"index": 7155,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndefault_args = {'owner': 'Jaimin', 'depends_on_past': False, 'start_date':\n datetime.now(), 'email': ['[email protected]'], 'email_on_failure': \n False, 'email_on_retry': False, 'retries': 1, 'retry_delay': timedelta(\n minutes=5)}\ndag = DAG('hive_create_part_v1', default_args=default_args,\n schedule_interval='0 1 * * *', concurrency=1)\ntask = BashOperator(task_id='hive_create_parition', bash_command=\n 'bash /data/appdata/airflow/script/hive_create_job.sh mnode2 ', dag=dag)\n",
"step-3": "import logging\nimport airflow\nfrom airflow import DAG\nfrom airflow.operators.python_operator import PythonOperator\nfrom airflow.operators import BashOperator, DummyOperator\nfrom datetime import datetime, timedelta\ndefault_args = {'owner': 'Jaimin', 'depends_on_past': False, 'start_date':\n datetime.now(), 'email': ['[email protected]'], 'email_on_failure': \n False, 'email_on_retry': False, 'retries': 1, 'retry_delay': timedelta(\n minutes=5)}\ndag = DAG('hive_create_part_v1', default_args=default_args,\n schedule_interval='0 1 * * *', concurrency=1)\ntask = BashOperator(task_id='hive_create_parition', bash_command=\n 'bash /data/appdata/airflow/script/hive_create_job.sh mnode2 ', dag=dag)\n",
"step-4": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport logging\nimport airflow\n\nfrom airflow import DAG\nfrom airflow.operators.python_operator import PythonOperator\nfrom airflow.operators import BashOperator, DummyOperator\n\nfrom datetime import datetime, timedelta\n\n\n# --------------------------------------------------------------------------------\n# set default arguments\n# --------------------------------------------------------------------------------\n\ndefault_args = {\n 'owner': 'Jaimin',\n 'depends_on_past': False,\n 'start_date': datetime.now(),\n 'email': ['[email protected]'],\n 'email_on_failure': False,\n 'email_on_retry': False,\n 'retries': 1,\n 'retry_delay': timedelta(minutes=5),\n # 'queue': 'bash_queue',\n # 'pool': 'backfill',\n # 'priority_weight': 10,\n # 'end_date': datetime(2016, 1, 1),\n}\n\ndag = DAG(\n 'hive_create_part_v1',\n default_args=default_args,\n schedule_interval=\"0 1 * * *\",\n concurrency=1)\n\n# --------------------------------------------------------------------------------\n# set tasks \n# --------------------------------------------------------------------------------\n\ntask = BashOperator(\n task_id='hive_create_parition',\n bash_command='bash /data/appdata/airflow/script/hive_create_job.sh mnode2 ',\n dag=dag)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.contrib import admin
from django.urls import path, include
from serverside.router import router
from rest_framework.authtoken import views as auth_views
from . import views
from .views import CustomObtainAuthToken
urlpatterns = [path('users/', views.UserCreateAPIView.as_view(), name=
'user-list'), path('users/login/', CustomObtainAuthToken.as_view()),
path('users/<int:pk>/', views.ReadUserAPIView.as_view()), path(
'users/<int:pk>/profile/', views.ReadUpdateProfileAPIView.as_view()),
path('charities/', views.ListCharitiesAPIView.as_view()), path(
'categories/', views.ListCategoriesAPIView.as_view())]
|
normal
|
{
"blob_id": "49d76458b8adcf6eea9db2ef127609ff96e03ad1",
"index": 6270,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('users/', views.UserCreateAPIView.as_view(), name=\n 'user-list'), path('users/login/', CustomObtainAuthToken.as_view()),\n path('users/<int:pk>/', views.ReadUserAPIView.as_view()), path(\n 'users/<int:pk>/profile/', views.ReadUpdateProfileAPIView.as_view()),\n path('charities/', views.ListCharitiesAPIView.as_view()), path(\n 'categories/', views.ListCategoriesAPIView.as_view())]\n",
"step-3": "from django.contrib import admin\nfrom django.urls import path, include\nfrom serverside.router import router\nfrom rest_framework.authtoken import views as auth_views\nfrom . import views\nfrom .views import CustomObtainAuthToken\nurlpatterns = [path('users/', views.UserCreateAPIView.as_view(), name=\n 'user-list'), path('users/login/', CustomObtainAuthToken.as_view()),\n path('users/<int:pk>/', views.ReadUserAPIView.as_view()), path(\n 'users/<int:pk>/profile/', views.ReadUpdateProfileAPIView.as_view()),\n path('charities/', views.ListCharitiesAPIView.as_view()), path(\n 'categories/', views.ListCategoriesAPIView.as_view())]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django.conf.urls import url
from . import views
from .import admin
urlpatterns = [
url(r'^$', views.showberanda, name='showberanda'),
url(r'^sentimenanalisis/$', views.showsentimenanalisis, name='showsentimenanalisis'),
url(r'^bantuan/$', views.showbantuan, name='showbantuan'),
url(r'^tweets/', views.get_tweets),
]
|
normal
|
{
"blob_id": "077c596f71aae22e85589fdaf78d5cdae8085443",
"index": 8710,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^$', views.showberanda, name='showberanda'), url(\n '^sentimenanalisis/$', views.showsentimenanalisis, name=\n 'showsentimenanalisis'), url('^bantuan/$', views.showbantuan, name=\n 'showbantuan'), url('^tweets/', views.get_tweets)]\n",
"step-3": "from django.conf.urls import url\nfrom . import views\nfrom . import admin\nurlpatterns = [url('^$', views.showberanda, name='showberanda'), url(\n '^sentimenanalisis/$', views.showsentimenanalisis, name=\n 'showsentimenanalisis'), url('^bantuan/$', views.showbantuan, name=\n 'showbantuan'), url('^tweets/', views.get_tweets)]\n",
"step-4": "from django.conf.urls import url\nfrom . import views\nfrom .import admin\n\nurlpatterns = [\n url(r'^$', views.showberanda, name='showberanda'),\n url(r'^sentimenanalisis/$', views.showsentimenanalisis, name='showsentimenanalisis'),\n url(r'^bantuan/$', views.showbantuan, name='showbantuan'),\n url(r'^tweets/', views.get_tweets),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from mathgraph3D.core.plot import *
from mathgraph3D.core.functions import *
|
normal
|
{
"blob_id": "b58cc08f8f10220373fa78f5d7249bc883b447bf",
"index": 6991,
"step-1": "<mask token>\n",
"step-2": "from mathgraph3D.core.plot import *\nfrom mathgraph3D.core.functions import *\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from django.db import models
class Survey(models.Model):
"""Survey representation.
"""
name = models.CharField(max_length=255)
description = models.TextField()
start_date = models.DateTimeField()
end_date = models.DateTimeField()
def __str__(self):
return self.name
class Question(models.Model):
"""Survey's question respresentation.
"""
QUESTION_TYPE_CHOICES = (
(1, 'Text answer'),
(2, 'One choice answer'),
(3, 'Multiple choices answer')
)
survey = models.ForeignKey(
Survey,
on_delete=models.CASCADE,
related_name='questions')
text = models.TextField()
question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)
def __str__(self):
return self.text
class AnswerChoice(models.Model):
"""Represantation of question's
answer's choice.
"""
question = models.ForeignKey(
Question,
on_delete=models.CASCADE,
related_name='choices')
text = models.TextField()
def __str__(self):
return self.text
class CompletedSurvey(models.Model):
"""Representation of survey,
completed by the user.
"""
user_id = models.IntegerField(null=True, blank=True)
survey = models.ForeignKey(
Survey,
on_delete=models.SET_NULL,
null=True,
related_name='completed_surveys')
def __str__(self):
return f"{self.user_id} - {self.survey.name}"
class Answer(models.Model):
"""Representations of question's answer.
"""
completed_survey = models.ForeignKey(
CompletedSurvey,
on_delete=models.CASCADE,
related_name='answers')
question = models.ForeignKey(
Question,
on_delete=models.CASCADE,
related_name='answers')
text_answer = models.TextField(blank=True)
answer_choices = models.ManyToManyField(AnswerChoice, blank=True)
def __str__(self):
return f"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}"
|
normal
|
{
"blob_id": "2c4f27e7d1bfe6d68fd0836094b9e350946913f6",
"index": 5480,
"step-1": "<mask token>\n\n\nclass Question(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n",
"step-2": "<mask token>\n\n\nclass Survey(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), (\n 3, 'Multiple choices answer')\n survey = models.ForeignKey(Survey, on_delete=models.CASCADE,\n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n",
"step-3": "<mask token>\n\n\nclass Survey(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.name\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), (\n 3, 'Multiple choices answer')\n survey = models.ForeignKey(Survey, on_delete=models.CASCADE,\n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n",
"step-4": "<mask token>\n\n\nclass Survey(models.Model):\n \"\"\"Survey representation.\n \"\"\"\n name = models.CharField(max_length=255)\n description = models.TextField()\n start_date = models.DateTimeField()\n end_date = models.DateTimeField()\n\n def __str__(self):\n return self.name\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n QUESTION_TYPE_CHOICES = (1, 'Text answer'), (2, 'One choice answer'), (\n 3, 'Multiple choices answer')\n survey = models.ForeignKey(Survey, on_delete=models.CASCADE,\n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(Survey, on_delete=models.SET_NULL, null=True,\n related_name='completed_surveys')\n\n def __str__(self):\n return f'{self.user_id} - {self.survey.name}'\n\n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n completed_survey = models.ForeignKey(CompletedSurvey, on_delete=models.\n CASCADE, related_name='answers')\n question = models.ForeignKey(Question, on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return (\n f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"\n )\n",
"step-5": "from django.db import models\n\n\nclass Survey(models.Model):\n \"\"\"Survey representation.\n \"\"\"\n\n name = models.CharField(max_length=255)\n description = models.TextField()\n start_date = models.DateTimeField()\n end_date = models.DateTimeField()\n\n def __str__(self):\n return self.name\n\n\nclass Question(models.Model):\n \"\"\"Survey's question respresentation.\n \"\"\"\n\n QUESTION_TYPE_CHOICES = (\n (1, 'Text answer'),\n (2, 'One choice answer'),\n (3, 'Multiple choices answer')\n )\n\n survey = models.ForeignKey(\n Survey, \n on_delete=models.CASCADE, \n related_name='questions')\n text = models.TextField()\n question_type = models.IntegerField(choices=QUESTION_TYPE_CHOICES)\n\n def __str__(self):\n return self.text\n\n\nclass AnswerChoice(models.Model):\n \"\"\"Represantation of question's \n answer's choice.\n \"\"\"\n\n question = models.ForeignKey(\n Question, \n on_delete=models.CASCADE, \n related_name='choices')\n text = models.TextField()\n\n def __str__(self):\n return self.text\n\n\nclass CompletedSurvey(models.Model):\n \"\"\"Representation of survey, \n completed by the user.\n \"\"\"\n\n user_id = models.IntegerField(null=True, blank=True)\n survey = models.ForeignKey(\n Survey, \n on_delete=models.SET_NULL, \n null=True, \n related_name='completed_surveys')\n\n def __str__(self):\n return f\"{self.user_id} - {self.survey.name}\"\n \n\nclass Answer(models.Model):\n \"\"\"Representations of question's answer.\n \"\"\"\n\n completed_survey = models.ForeignKey(\n CompletedSurvey,\n on_delete=models.CASCADE,\n related_name='answers')\n question = models.ForeignKey(\n Question,\n on_delete=models.CASCADE,\n related_name='answers')\n text_answer = models.TextField(blank=True)\n answer_choices = models.ManyToManyField(AnswerChoice, blank=True)\n\n def __str__(self):\n return f\"Answer for survey '{str(self.completed_survey)}' made by user {self.completed_survey.user_id}\"",
"step-ids": [
14,
17,
18,
20,
22
]
}
|
[
14,
17,
18,
20,
22
] |
# -*- coding: utf-8 -*-
from plone import api
from plone.dexterity.content import Container
from sc.microsite.interfaces import IMicrosite
from zope.interface import implementer
@implementer(IMicrosite)
class Microsite(Container):
"""A microsite."""
def getLocallyAllowedTypes(self):
"""
By now we allow all allowed types without constrain.
TODO: fully implement ISelectableConstrainTypes
"""
portal_types = api.portal.get_tool('portal_types')
my_type = portal_types.getTypeInfo(self)
result = portal_types.listTypeInfo()
return [t for t in result if my_type.allowType(t.getId()) and
t.isConstructionAllowed(self)]
def getImmediatelyAddableTypes(self, context=None):
"""
By now we allow all allowed types without constrain.
TODO: fully implement ISelectableConstrainTypes
"""
return self.getLocallyAllowedTypes()
|
normal
|
{
"blob_id": "3d5d88edca5d746b830363cc9451bda94c1d7aa4",
"index": 2905,
"step-1": "<mask token>\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n <mask token>\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n <mask token>\n",
"step-2": "<mask token>\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n <mask token>\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-3": "<mask token>\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n \"\"\"A microsite.\"\"\"\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-4": "from plone import api\nfrom plone.dexterity.content import Container\nfrom sc.microsite.interfaces import IMicrosite\nfrom zope.interface import implementer\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n \"\"\"A microsite.\"\"\"\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and t.\n isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom plone import api\nfrom plone.dexterity.content import Container\nfrom sc.microsite.interfaces import IMicrosite\nfrom zope.interface import implementer\n\n\n@implementer(IMicrosite)\nclass Microsite(Container):\n \"\"\"A microsite.\"\"\"\n\n def getLocallyAllowedTypes(self):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n portal_types = api.portal.get_tool('portal_types')\n my_type = portal_types.getTypeInfo(self)\n result = portal_types.listTypeInfo()\n return [t for t in result if my_type.allowType(t.getId()) and\n t.isConstructionAllowed(self)]\n\n def getImmediatelyAddableTypes(self, context=None):\n \"\"\"\n By now we allow all allowed types without constrain.\n TODO: fully implement ISelectableConstrainTypes\n \"\"\"\n return self.getLocallyAllowedTypes()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from . import colorbar_artist
from . import subplot_artist
from . import surface_3d_with_shadows
from .colorbar_artist import *
from .subplot_artist import *
from .surface_3d_with_shadows import *
__all__ = ['colorbar_artist', 'subplot_artist', 'surface_3d_with_shadows']
__all__.extend(colorbar_artist.__all__)
__all__.extend(subplot_artist.__all__)
__all__.extend(surface_3d_with_shadows.__all__)
|
normal
|
{
"blob_id": "16c4dbd472f9d32e5fa48a28dff4a40914f7d29e",
"index": 8231,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__.extend(colorbar_artist.__all__)\n__all__.extend(subplot_artist.__all__)\n__all__.extend(surface_3d_with_shadows.__all__)\n",
"step-3": "<mask token>\n__all__ = ['colorbar_artist', 'subplot_artist', 'surface_3d_with_shadows']\n__all__.extend(colorbar_artist.__all__)\n__all__.extend(subplot_artist.__all__)\n__all__.extend(surface_3d_with_shadows.__all__)\n",
"step-4": "from . import colorbar_artist\nfrom . import subplot_artist\nfrom . import surface_3d_with_shadows\nfrom .colorbar_artist import *\nfrom .subplot_artist import *\nfrom .surface_3d_with_shadows import *\n__all__ = ['colorbar_artist', 'subplot_artist', 'surface_3d_with_shadows']\n__all__.extend(colorbar_artist.__all__)\n__all__.extend(subplot_artist.__all__)\n__all__.extend(surface_3d_with_shadows.__all__)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""Utilities for AnalysisModules."""
import inspect
from mongoengine import QuerySet
from numpy import percentile
from .modules import AnalysisModule
def get_primary_module(package):
"""Extract AnalysisModule primary module from package."""
def test_submodule(submodule):
"""Test a submodule to see if it is an AnalysisModule module."""
is_correct_subclass = issubclass(submodule, AnalysisModule)
# Ensure submodule is defined within the package we are inspecting (and not 'base')
is_correct_module = package.__name__ in submodule.__module__
return is_correct_subclass and is_correct_module
submodules = inspect.getmembers(package, inspect.isclass)
module = next(submodule for _, submodule in submodules
if test_submodule(submodule))
return module
def scrub_object(obj):
"""Remove protected fields from object (dict or list)."""
if isinstance(obj, list):
return [scrub_object(item) for item in obj]
if isinstance(obj, dict):
clean_dict = {key: scrub_object(value)
for key, value in obj.items()
if not key.startswith('_')}
return clean_dict
return obj
def jsonify(mongo_doc):
"""Convert Mongo document to JSON for serialization."""
if isinstance(mongo_doc, (QuerySet, list,)):
return [jsonify(element) for element in mongo_doc]
result_dict = mongo_doc.to_mongo().to_dict()
clean_dict = scrub_object(result_dict)
return clean_dict
def boxplot(values):
"""Calculate percentiles needed for a boxplot."""
percentiles = percentile(values, [0, 25, 50, 75, 100])
result = {'min_val': percentiles[0],
'q1_val': percentiles[1],
'mean_val': percentiles[2],
'q3_val': percentiles[3],
'max_val': percentiles[4]}
return result
def scrub_category_val(category_val):
"""Make sure that category val is a string with positive length."""
if not isinstance(category_val, str):
category_val = str(category_val)
if category_val.lower() == 'nan':
category_val = 'NaN'
if not category_val:
category_val = 'NaN'
return category_val
def collate_samples(tool_name, fields, samples):
"""Group a set of ToolResult fields from a set of samples by sample name."""
sample_dict = {}
for sample in samples:
sample_name = sample['name']
sample_dict[sample_name] = {}
tool_result = sample[tool_name]
for field in fields:
sample_dict[sample_name][field] = tool_result[field]
return sample_dict
def categories_from_metadata(samples, min_size=2):
"""
Create dict of categories and their values from sample metadata.
Parameters
----------
samples : list
List of sample models.
min_size: int
Minimum number of values required for a given metadata item to
be included in returned categories.
Returns
-------
dict
Dictionary of form {<category_name>: [category_value[, category_value]]}
"""
categories = {}
# Gather categories and values
all_metadata = [sample['metadata'] for sample in samples]
for metadata in all_metadata:
properties = [prop for prop in metadata.keys()]
for prop in properties:
if prop not in categories:
categories[prop] = set([])
category_val = metadata[prop]
category_val = scrub_category_val(category_val)
categories[prop].add(category_val)
# Filter for minimum number of values
categories = {category_name: list(category_values)
for category_name, category_values in categories.items()
if len(category_values) >= min_size}
return categories
|
normal
|
{
"blob_id": "3472dc0c9d00c10ab0690c052e70fbf6a4bdb13d",
"index": 7889,
"step-1": "<mask token>\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n",
"step-2": "<mask token>\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules if\n test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value) for key, value in obj.items(\n ) if not key.startswith('_')}\n return clean_dict\n return obj\n\n\n<mask token>\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n",
"step-3": "<mask token>\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules if\n test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value) for key, value in obj.items(\n ) if not key.startswith('_')}\n return clean_dict\n return obj\n\n\ndef jsonify(mongo_doc):\n \"\"\"Convert Mongo document to JSON for serialization.\"\"\"\n if isinstance(mongo_doc, (QuerySet, list)):\n return [jsonify(element) for element in mongo_doc]\n result_dict = mongo_doc.to_mongo().to_dict()\n clean_dict = scrub_object(result_dict)\n return clean_dict\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n",
"step-4": "<mask token>\nimport inspect\nfrom mongoengine import QuerySet\nfrom numpy import percentile\nfrom .modules import AnalysisModule\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules if\n test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value) for key, value in obj.items(\n ) if not key.startswith('_')}\n return clean_dict\n return obj\n\n\ndef jsonify(mongo_doc):\n \"\"\"Convert Mongo document to JSON for serialization.\"\"\"\n if isinstance(mongo_doc, (QuerySet, list)):\n return [jsonify(element) for element in mongo_doc]\n result_dict = mongo_doc.to_mongo().to_dict()\n clean_dict = scrub_object(result_dict)\n return clean_dict\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0], 'q1_val': percentiles[1],\n 'mean_val': percentiles[2], 'q3_val': percentiles[3], 'max_val':\n percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n categories = {category_name: list(category_values) for category_name,\n category_values in categories.items() if len(category_values) >=\n min_size}\n return categories\n",
"step-5": "\"\"\"Utilities for AnalysisModules.\"\"\"\n\nimport inspect\n\nfrom mongoengine import QuerySet\nfrom numpy import percentile\n\nfrom .modules import AnalysisModule\n\n\ndef get_primary_module(package):\n \"\"\"Extract AnalysisModule primary module from package.\"\"\"\n def test_submodule(submodule):\n \"\"\"Test a submodule to see if it is an AnalysisModule module.\"\"\"\n is_correct_subclass = issubclass(submodule, AnalysisModule)\n # Ensure submodule is defined within the package we are inspecting (and not 'base')\n is_correct_module = package.__name__ in submodule.__module__\n return is_correct_subclass and is_correct_module\n\n submodules = inspect.getmembers(package, inspect.isclass)\n module = next(submodule for _, submodule in submodules\n if test_submodule(submodule))\n return module\n\n\ndef scrub_object(obj):\n \"\"\"Remove protected fields from object (dict or list).\"\"\"\n if isinstance(obj, list):\n return [scrub_object(item) for item in obj]\n if isinstance(obj, dict):\n clean_dict = {key: scrub_object(value)\n for key, value in obj.items()\n if not key.startswith('_')}\n return clean_dict\n return obj\n\n\ndef jsonify(mongo_doc):\n \"\"\"Convert Mongo document to JSON for serialization.\"\"\"\n if isinstance(mongo_doc, (QuerySet, list,)):\n return [jsonify(element) for element in mongo_doc]\n result_dict = mongo_doc.to_mongo().to_dict()\n clean_dict = scrub_object(result_dict)\n return clean_dict\n\n\ndef boxplot(values):\n \"\"\"Calculate percentiles needed for a boxplot.\"\"\"\n percentiles = percentile(values, [0, 25, 50, 75, 100])\n result = {'min_val': percentiles[0],\n 'q1_val': percentiles[1],\n 'mean_val': percentiles[2],\n 'q3_val': percentiles[3],\n 'max_val': percentiles[4]}\n return result\n\n\ndef scrub_category_val(category_val):\n \"\"\"Make sure that category val is a string with positive length.\"\"\"\n if not isinstance(category_val, str):\n category_val = str(category_val)\n if category_val.lower() == 'nan':\n category_val = 'NaN'\n if not category_val:\n category_val = 'NaN'\n return category_val\n\n\ndef collate_samples(tool_name, fields, samples):\n \"\"\"Group a set of ToolResult fields from a set of samples by sample name.\"\"\"\n sample_dict = {}\n for sample in samples:\n sample_name = sample['name']\n sample_dict[sample_name] = {}\n tool_result = sample[tool_name]\n for field in fields:\n sample_dict[sample_name][field] = tool_result[field]\n\n return sample_dict\n\n\ndef categories_from_metadata(samples, min_size=2):\n \"\"\"\n Create dict of categories and their values from sample metadata.\n\n Parameters\n ----------\n samples : list\n List of sample models.\n min_size: int\n Minimum number of values required for a given metadata item to\n be included in returned categories.\n\n Returns\n -------\n dict\n Dictionary of form {<category_name>: [category_value[, category_value]]}\n\n \"\"\"\n categories = {}\n\n # Gather categories and values\n all_metadata = [sample['metadata'] for sample in samples]\n for metadata in all_metadata:\n properties = [prop for prop in metadata.keys()]\n for prop in properties:\n if prop not in categories:\n categories[prop] = set([])\n category_val = metadata[prop]\n category_val = scrub_category_val(category_val)\n categories[prop].add(category_val)\n\n # Filter for minimum number of values\n categories = {category_name: list(category_values)\n for category_name, category_values in categories.items()\n if len(category_values) >= min_size}\n\n return categories\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
from setuptools import setup
import os.path
# Get the long description from the README file
with open('README.rst') as f:
long_description = f.read()
setup(name='logging_exceptions',
version='0.1.8',
py_modules=['logging_exceptions'],
author="Bernhard C. Thiel",
author_email="[email protected]",
description="Self-logging exceptions: Attach log messages to exceptions and output them conditionally.",
long_description=long_description,
url='https://github.com/Bernhard10/logging_exceptions',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
keywords='logging exceptions'
)
|
normal
|
{
"blob_id": "7f7adc367e4f3b8ee721e42f5d5d0770f40828c9",
"index": 9365,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('README.rst') as f:\n long_description = f.read()\nsetup(name='logging_exceptions', version='0.1.8', py_modules=[\n 'logging_exceptions'], author='Bernhard C. Thiel', author_email=\n '[email protected]', description=\n 'Self-logging exceptions: Attach log messages to exceptions and output them conditionally.'\n , long_description=long_description, url=\n 'https://github.com/Bernhard10/logging_exceptions', license='MIT',\n classifiers=['Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5'], keywords='logging exceptions')\n",
"step-3": "from setuptools import setup\nimport os.path\nwith open('README.rst') as f:\n long_description = f.read()\nsetup(name='logging_exceptions', version='0.1.8', py_modules=[\n 'logging_exceptions'], author='Bernhard C. Thiel', author_email=\n '[email protected]', description=\n 'Self-logging exceptions: Attach log messages to exceptions and output them conditionally.'\n , long_description=long_description, url=\n 'https://github.com/Bernhard10/logging_exceptions', license='MIT',\n classifiers=['Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5'], keywords='logging exceptions')\n",
"step-4": "from setuptools import setup\nimport os.path\n\n# Get the long description from the README file\nwith open('README.rst') as f:\n long_description = f.read()\n\n\nsetup(name='logging_exceptions',\n version='0.1.8',\n py_modules=['logging_exceptions'],\n author=\"Bernhard C. Thiel\",\n author_email=\"[email protected]\",\n description=\"Self-logging exceptions: Attach log messages to exceptions and output them conditionally.\",\n long_description=long_description,\n url='https://github.com/Bernhard10/logging_exceptions',\n license='MIT',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5'\n ],\n keywords='logging exceptions'\n\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import json
import datetime
import string
import random
import logging
import jwt
from main import db
from main.config import config
def execute_sql_from_file(filename):
# Open and read the file as a single buffer
fd = open(filename, 'r')
sql_file = fd.read()
fd.close()
# All SQL commands (split on ';')
sql_commands = sql_file.split(';')
# Execute every command from the input file
for command in sql_commands:
# This will skip and report validation
# For example, if the tables do not yet exist, this will skip over
# the DROP TABLE commands
try:
db.session.execute(command.decode('utf-8'))
except Exception, e:
logging.exception(e)
def create_mock_data():
execute_sql_from_file('./sql/test.sql')
def drop_tables():
execute_sql_from_file('./sql/drop_tables.sql')
def create_headers(access_token=None):
headers = {
'Content-Type': 'application/json'
}
if access_token:
headers.update({
'Authorization': 'Bearer {}'.format(access_token)
})
return headers
def json_response(response):
return json.loads(response.data.decode('utf-8'))
def generate_access_token(user_id, is_expired=False):
"""
Generate JWT Token for test authentication.
:param user_id: User ID
:param is_expired: To generate expired tokens
:return: JWT Token string
"""
iat = datetime.datetime.utcnow()
return jwt.encode({
'sub': user_id, # Subject of this token
'iat': iat, # Issued at
'exp': iat + datetime.timedelta(hours=1) # Expired at
if not is_expired
else iat - datetime.timedelta(minutes=5)
}, config.SECRET_KEY)
def random_string(string_length=10):
"""Generate a random string of fixed length"""
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for _ in range(string_length))
|
normal
|
{
"blob_id": "a724b49c4d86400b632c02236ceca58e62ba6c86",
"index": 9116,
"step-1": "import json\nimport datetime\nimport string\nimport random\nimport logging\n\nimport jwt\n\nfrom main import db\nfrom main.config import config\n\n\ndef execute_sql_from_file(filename):\n # Open and read the file as a single buffer\n fd = open(filename, 'r')\n sql_file = fd.read()\n fd.close()\n\n # All SQL commands (split on ';')\n sql_commands = sql_file.split(';')\n\n # Execute every command from the input file\n for command in sql_commands:\n # This will skip and report validation\n # For example, if the tables do not yet exist, this will skip over\n # the DROP TABLE commands\n try:\n db.session.execute(command.decode('utf-8'))\n except Exception, e:\n logging.exception(e)\n\n\ndef create_mock_data():\n execute_sql_from_file('./sql/test.sql')\n\n\ndef drop_tables():\n execute_sql_from_file('./sql/drop_tables.sql')\n\n\ndef create_headers(access_token=None):\n headers = {\n 'Content-Type': 'application/json'\n }\n\n if access_token:\n headers.update({\n 'Authorization': 'Bearer {}'.format(access_token)\n })\n\n return headers\n\n\ndef json_response(response):\n return json.loads(response.data.decode('utf-8'))\n\n\ndef generate_access_token(user_id, is_expired=False):\n \"\"\"\n Generate JWT Token for test authentication.\n\n :param user_id: User ID\n :param is_expired: To generate expired tokens\n :return: JWT Token string\n \"\"\"\n\n iat = datetime.datetime.utcnow()\n\n return jwt.encode({\n 'sub': user_id, # Subject of this token\n 'iat': iat, # Issued at\n 'exp': iat + datetime.timedelta(hours=1) # Expired at\n if not is_expired\n else iat - datetime.timedelta(minutes=5)\n }, config.SECRET_KEY)\n\n\ndef random_string(string_length=10):\n \"\"\"Generate a random string of fixed length\"\"\"\n letters = string.ascii_lowercase\n return ''.join(random.choice(letters) for _ in range(string_length))\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import ProcessPoolExecutor
import ATLAS1
import ATLAS_v2
from atlas.config import dbConfig
import pandas as pd
import ContentCategories
import NgramMapping
import SentimentAnalysis_2
import TrigDriv_2
import TopicModeling
import logging
import traceback
from StringIO import StringIO
from atlas.models import Requests
def caller_file(full_data_dict):
#print(full_data_dict)
request = full_data_dict['filename_obj']
print("Entering File analysis", request)
filecontents = full_data_dict['file_data']
# print("filecontents:", filecontents)
# tag_dict = full_data_dict['tag_dict']
#db = pymongo.MongoClient().atlas
#s = request.encode('utf-8')
df = pd.read_csv(dbConfig.dict["requestUrl"], encoding='utf-8')
status_dict = {'status': None, "senti_list": None, 'td_list': None}
print("going to read file contents into df.")
file_contents_df = pd.read_csv(StringIO(filecontents), encoding='utf-8')
print("file contents read into df.")
if "pCategory" in file_contents_df.columns.values.tolist():
print("Calling Atlas1.main2()")
status = ATLAS1.main2(request, filecontents, full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '15% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "15% complete"
# file_dict = {
# '_id': binascii.hexlify(s),
# 'Product': request,
#
# 'metadata': {
# '_id': binascii.hexlify(s),
# 'lastUpdated': datetime.datetime.now().strftime("%A, %d. %B %Y %I:%M:%S %p"),
# 'name': request
# },
# 'analyticData': {
# 'sentimentData': [
#
# ],
# 'trigdrivData': {
#
# }
# }
# }
# result = db.data.insert_one(file_dict)
# sent_list = SentimentAPI_generic.senti_main(dbConfig.dict['uploadsUrl'] + request, ',')
# print sent_list
#
# target_string = "analyticData.sentimentData"
#
# db.data.update({"_id": binascii.hexlify(s)}, {"$set": {target_string: sent_list[0]}})
# print result.inserted_id
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now classifying content categories")
cc_list = ContentCategories.main(request)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '35% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "35% complete"
except:
print("Error while classifying content categories")
print(traceback.print_exc())
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now tagging the dataset")
tagop_list = NgramMapping.main2(request, full_data_dict['tag_dict'])
#tagop_list = NgramMapping.main2("headphones", full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '50% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "50% complete"
except:
print("Error while tagging dataset with dictionary")
print(traceback.print_exc())
try:
print("Calling sentiment analyses to run on uploaded file...")
sent_list = SentimentAnalysis_2.senti_main2(request, filecontents, full_data_dict['senti_dict'])
#print sent_list
print("Sentiment data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '65% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "65% complete"
except:
print("Error while analysing sentiment")
#print(traceback.print_exc())
try:
td_list = TrigDriv_2.td_main2(request, full_data_dict['td_dict'])
#print td_list
print("TriggerDriver data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '80% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "80% complete"
except:
print("Error while analysing triggers/drivers")
#print(traceback.print_exc())
else:
print("Calling Atlas1.main3()")
# if 'supplements_10k_1' not in request:
status = ATLAS1.main3(request, filecontents, full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '15% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "15% complete"
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now classifying content categories")
cc_list = ContentCategories.main(request)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '35% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "35% complete"
except:
print("Error while classifying content categories")
print(traceback.print_exc())
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now tagging the dataset with the dictionary provided")
tagop_list = NgramMapping.main3(request, full_data_dict['file_data'], full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '50% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "50% complete"
except:
print("Error while tagging dataset with dictionary")
print(traceback.print_exc())
try:
print("Calling sentiment analyses to run on uploaded file...")
sent_list = SentimentAnalysis_2.senti_main3(request, filecontents, full_data_dict['senti_dict'])
# print sent_list
print("Sentiment data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '65% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "65% complete"
except:
print("Error while analysing sentiment")
# print(traceback.print_exc())
try:
td_list = TrigDriv_2.td_main3(request, full_data_dict['td_dict'])
# print td_list
print("TriggerDriver data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '80% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "80% complete"
except:
print("Error while analysing triggers/drivers")
# print(traceback.print_exc())
# else:
# try:
# print("Now tagging the supplements dataset with the dictionary provided")
# tagop_list = NgramMapping.main3(request, full_data_dict['file_data'], full_data_dict['tag_dict'])
# except:
# print("Error while tagging supplement dataset with dictionary")
# print(traceback.print_exc())
print "Going to topic model"
# Performing Topic Modeling Analysis
num_topics = 8
topic_status = TopicModeling.main(request, num_topics)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = 'Complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
df.ix[(df.reqKw == request), 'reqStatus'] = "Complete"
# if status == 200 and sent_list == 200 and td_list == 200 and topic_status == 200:
# # Update request csv status to completed
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Completed"
# elif status == 200 and sent_list == 200 and td_list == 200:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Topic Modelling Failed"
# elif status == 200 and sent_list == 200:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Trigger/Driver Failed"
# elif status == 200:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Sentiment Failed"
# else:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Scraping incomplete"
with open(dbConfig.dict["requestUrl"], 'w') as f:
df.to_csv(f, index=False)
print("Exiting return")
return request
def caller(request, site, full_data_dict):
print(full_data_dict['tag_dict']) # dict with default dict urls for automatic scraped data tagging
print("Entering", request, site)
# df = pd.read_csv(dbConfig.dict["requestUrl"], encoding='utf-8')
# db = pymongo.MongoClient().atlas
# s = request.encode('utf-8')
status = ATLAS_v2.main(request, site)
print("Atlas main finish")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '15% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "20% complete"
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now classifying content categories")
cc_list = ContentCategories.main(request)
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '35% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "40% complete"
except:
print("Error while classifying content categories!")
print(traceback.print_exc())
# Calling analyses files - sentiment, trigger/driver and topic modelling
try:
print("Now tagging the dataset...")
tagop_list = NgramMapping.main(request, full_data_dict['tag_dict'])
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '50% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "40% complete"
except:
print("Error while tagging dataset with dictionary")
print(traceback.print_exc())
try:
sent_list = SentimentAnalysis_2.senti_main(request)
#print sent_list
print("Sentiment data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '65% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "60% complete"
except:
print("Error while analysing sentiment")
print(traceback.print_exc())
try:
td_list = TrigDriv_2.td_main(request)
#print td_list
print("TriggerDriver data inserted into DB")
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = '80% complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "80% complete"
except:
print("Error while analysing triggers/drivers")
print(traceback.print_exc())
print "Going to topic model"
#logging.info("going to topicmodeling.main")
#
#Performing Topic Modeling Analysis
num_topics = 8
topic_status = TopicModeling.main(request, num_topics)
# df = pd.read_csv(dbConfig.dict["requestUrl"], encoding='utf-8')
# if status == 200 & sent_list[1] == 200 & topic_status == 200:
# # Update request csv status to completed
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Completed"
# else:
# df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = "Failed"
try:
req_obj = Requests.objects.get(reqKw=request)
req_obj.reqStatus = 'Complete'
req_obj.save()
except:
print("Couldn't save status update in DB!")
print(traceback.print_exc())
# df.ix[(df.reqKw == request), 'reqStatus'] = "Complete"
# with open(dbConfig.dict["requestUrl"], 'w') as f:
# df.to_csv(f, index=False)
print("Exiting Return")
return request
pool = ProcessPoolExecutor()
def pool_exe(request, site, full_data_dict): # to Rev
future = pool.submit(caller, request, site, full_data_dict)
print ("Exit pool exe\n")
#def pool_exe_file(request,filecontents):
# future = pool.submit(caller_file, request, filecontents)
# print("Exit file pool exe\n")
def pool_exe_file(full_data_dict): # to Upl, Soc
future = pool.submit(caller_file, full_data_dict)
print("Exit file pool exe\n")
|
normal
|
{
"blob_id": "41698e9d8349ddf3f42aa3d4fc405c69077d1aa3",
"index": 3160,
"step-1": "from concurrent.futures import ThreadPoolExecutor\nfrom concurrent.futures import ProcessPoolExecutor\nimport ATLAS1\nimport ATLAS_v2\nfrom atlas.config import dbConfig\nimport pandas as pd\nimport ContentCategories\nimport NgramMapping\nimport SentimentAnalysis_2\nimport TrigDriv_2\nimport TopicModeling\nimport logging\nimport traceback\nfrom StringIO import StringIO\nfrom atlas.models import Requests\n\n\ndef caller_file(full_data_dict):\n #print(full_data_dict)\n request = full_data_dict['filename_obj']\n print(\"Entering File analysis\", request)\n filecontents = full_data_dict['file_data']\n # print(\"filecontents:\", filecontents)\n # tag_dict = full_data_dict['tag_dict']\n\n #db = pymongo.MongoClient().atlas\n #s = request.encode('utf-8')\n\n df = pd.read_csv(dbConfig.dict[\"requestUrl\"], encoding='utf-8')\n status_dict = {'status': None, \"senti_list\": None, 'td_list': None}\n print(\"going to read file contents into df.\")\n file_contents_df = pd.read_csv(StringIO(filecontents), encoding='utf-8')\n print(\"file contents read into df.\")\n\n if \"pCategory\" in file_contents_df.columns.values.tolist():\n print(\"Calling Atlas1.main2()\")\n status = ATLAS1.main2(request, filecontents, full_data_dict['tag_dict'])\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '15% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"15% complete\"\n\n # file_dict = {\n # '_id': binascii.hexlify(s),\n # 'Product': request,\n #\n # 'metadata': {\n # '_id': binascii.hexlify(s),\n # 'lastUpdated': datetime.datetime.now().strftime(\"%A, %d. %B %Y %I:%M:%S %p\"),\n # 'name': request\n # },\n # 'analyticData': {\n # 'sentimentData': [\n #\n # ],\n # 'trigdrivData': {\n #\n # }\n # }\n # }\n # result = db.data.insert_one(file_dict)\n # sent_list = SentimentAPI_generic.senti_main(dbConfig.dict['uploadsUrl'] + request, ',')\n # print sent_list\n #\n # target_string = \"analyticData.sentimentData\"\n #\n # db.data.update({\"_id\": binascii.hexlify(s)}, {\"$set\": {target_string: sent_list[0]}})\n # print result.inserted_id\n\n # Calling analyses files - sentiment, trigger/driver and topic modelling\n try:\n print(\"Now classifying content categories\")\n cc_list = ContentCategories.main(request)\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '35% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"35% complete\"\n except:\n print(\"Error while classifying content categories\")\n print(traceback.print_exc())\n\n # Calling analyses files - sentiment, trigger/driver and topic modelling\n try:\n print(\"Now tagging the dataset\")\n tagop_list = NgramMapping.main2(request, full_data_dict['tag_dict'])\n #tagop_list = NgramMapping.main2(\"headphones\", full_data_dict['tag_dict'])\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '50% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"50% complete\"\n except:\n print(\"Error while tagging dataset with dictionary\")\n print(traceback.print_exc())\n\n try:\n print(\"Calling sentiment analyses to run on uploaded file...\")\n sent_list = SentimentAnalysis_2.senti_main2(request, filecontents, full_data_dict['senti_dict'])\n #print sent_list\n print(\"Sentiment data inserted into DB\")\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '65% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"65% complete\"\n\n except:\n print(\"Error while analysing sentiment\")\n #print(traceback.print_exc())\n\n try:\n td_list = TrigDriv_2.td_main2(request, full_data_dict['td_dict'])\n #print td_list\n print(\"TriggerDriver data inserted into DB\")\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '80% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"80% complete\"\n except:\n print(\"Error while analysing triggers/drivers\")\n #print(traceback.print_exc())\n\n else:\n print(\"Calling Atlas1.main3()\")\n # if 'supplements_10k_1' not in request:\n status = ATLAS1.main3(request, filecontents, full_data_dict['tag_dict'])\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '15% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"15% complete\"\n\n # Calling analyses files - sentiment, trigger/driver and topic modelling\n try:\n print(\"Now classifying content categories\")\n cc_list = ContentCategories.main(request)\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '35% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"35% complete\"\n except:\n print(\"Error while classifying content categories\")\n print(traceback.print_exc())\n\n # Calling analyses files - sentiment, trigger/driver and topic modelling\n try:\n print(\"Now tagging the dataset with the dictionary provided\")\n tagop_list = NgramMapping.main3(request, full_data_dict['file_data'], full_data_dict['tag_dict'])\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '50% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"50% complete\"\n except:\n print(\"Error while tagging dataset with dictionary\")\n print(traceback.print_exc())\n\n try:\n print(\"Calling sentiment analyses to run on uploaded file...\")\n sent_list = SentimentAnalysis_2.senti_main3(request, filecontents, full_data_dict['senti_dict'])\n # print sent_list\n print(\"Sentiment data inserted into DB\")\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '65% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"65% complete\"\n\n except:\n print(\"Error while analysing sentiment\")\n # print(traceback.print_exc())\n\n try:\n td_list = TrigDriv_2.td_main3(request, full_data_dict['td_dict'])\n # print td_list\n print(\"TriggerDriver data inserted into DB\")\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '80% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"80% complete\"\n except:\n print(\"Error while analysing triggers/drivers\")\n # print(traceback.print_exc())\n # else:\n # try:\n # print(\"Now tagging the supplements dataset with the dictionary provided\")\n # tagop_list = NgramMapping.main3(request, full_data_dict['file_data'], full_data_dict['tag_dict'])\n # except:\n # print(\"Error while tagging supplement dataset with dictionary\")\n # print(traceback.print_exc())\n\n print \"Going to topic model\"\n # Performing Topic Modeling Analysis\n num_topics = 8\n topic_status = TopicModeling.main(request, num_topics)\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = 'Complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n df.ix[(df.reqKw == request), 'reqStatus'] = \"Complete\"\n\n # if status == 200 and sent_list == 200 and td_list == 200 and topic_status == 200:\n # # Update request csv status to completed\n # df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = \"Completed\"\n # elif status == 200 and sent_list == 200 and td_list == 200:\n # df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = \"Topic Modelling Failed\"\n # elif status == 200 and sent_list == 200:\n # df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = \"Trigger/Driver Failed\"\n # elif status == 200:\n # df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = \"Sentiment Failed\"\n # else:\n # df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = \"Scraping incomplete\"\n\n with open(dbConfig.dict[\"requestUrl\"], 'w') as f:\n df.to_csv(f, index=False)\n\n print(\"Exiting return\")\n return request\n\n\ndef caller(request, site, full_data_dict):\n print(full_data_dict['tag_dict']) # dict with default dict urls for automatic scraped data tagging\n print(\"Entering\", request, site)\n # df = pd.read_csv(dbConfig.dict[\"requestUrl\"], encoding='utf-8')\n # db = pymongo.MongoClient().atlas\n # s = request.encode('utf-8')\n\n status = ATLAS_v2.main(request, site)\n print(\"Atlas main finish\")\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '15% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n # df.ix[(df.reqKw == request), 'reqStatus'] = \"20% complete\"\n\n # Calling analyses files - sentiment, trigger/driver and topic modelling\n try:\n print(\"Now classifying content categories\")\n cc_list = ContentCategories.main(request)\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '35% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n # df.ix[(df.reqKw == request), 'reqStatus'] = \"40% complete\"\n except:\n print(\"Error while classifying content categories!\")\n print(traceback.print_exc())\n\n # Calling analyses files - sentiment, trigger/driver and topic modelling\n try:\n print(\"Now tagging the dataset...\")\n tagop_list = NgramMapping.main(request, full_data_dict['tag_dict'])\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '50% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n # df.ix[(df.reqKw == request), 'reqStatus'] = \"40% complete\"\n except:\n print(\"Error while tagging dataset with dictionary\")\n print(traceback.print_exc())\n\n try:\n sent_list = SentimentAnalysis_2.senti_main(request)\n #print sent_list\n print(\"Sentiment data inserted into DB\")\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '65% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n # df.ix[(df.reqKw == request), 'reqStatus'] = \"60% complete\"\n except:\n print(\"Error while analysing sentiment\")\n print(traceback.print_exc())\n\n\n try:\n td_list = TrigDriv_2.td_main(request)\n #print td_list\n print(\"TriggerDriver data inserted into DB\")\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = '80% complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n # df.ix[(df.reqKw == request), 'reqStatus'] = \"80% complete\"\n except:\n print(\"Error while analysing triggers/drivers\")\n print(traceback.print_exc())\n\n print \"Going to topic model\"\n #logging.info(\"going to topicmodeling.main\")\n #\n #Performing Topic Modeling Analysis\n num_topics = 8\n topic_status = TopicModeling.main(request, num_topics)\n\n # df = pd.read_csv(dbConfig.dict[\"requestUrl\"], encoding='utf-8')\n # if status == 200 & sent_list[1] == 200 & topic_status == 200:\n # # Update request csv status to completed\n # df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = \"Completed\"\n # else:\n # df.ix[(df.reqKw == request) & (df.reqStatus == 'Pending'), 'reqStatus'] = \"Failed\"\n try:\n req_obj = Requests.objects.get(reqKw=request)\n req_obj.reqStatus = 'Complete'\n req_obj.save()\n except:\n print(\"Couldn't save status update in DB!\")\n print(traceback.print_exc())\n # df.ix[(df.reqKw == request), 'reqStatus'] = \"Complete\"\n # with open(dbConfig.dict[\"requestUrl\"], 'w') as f:\n # df.to_csv(f, index=False)\n\n print(\"Exiting Return\")\n return request\n\n\npool = ProcessPoolExecutor()\n\n\ndef pool_exe(request, site, full_data_dict): # to Rev\n future = pool.submit(caller, request, site, full_data_dict)\n print (\"Exit pool exe\\n\")\n\n\n#def pool_exe_file(request,filecontents):\n# future = pool.submit(caller_file, request, filecontents)\n# print(\"Exit file pool exe\\n\")\n\n\ndef pool_exe_file(full_data_dict): # to Upl, Soc\n future = pool.submit(caller_file, full_data_dict)\n print(\"Exit file pool exe\\n\")\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.db.models import Q
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView
from carga_horaria.models import Profesor, AsignaturaBase, Asignatura, Asistente
from carga_horaria.formsAlexis import ProfesorForm, AsignaturaBaseForm, AsignaturaCreateForm, AsignaturaUpdateForm, AsistenteForm
from django.core.urlresolvers import reverse_lazy, reverse
from guardian.shortcuts import get_objects_for_user
from .models import Persona
from .models import Fundacion
from .models import Colegio
from .models import Periodo
from .models import Nivel
class LevelFilterMixin(object):
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]
ctx['nivel_actual'] = self.request.GET.get('nivel')
return ctx
def get_queryset(self):
qs = super().get_queryset()
nivel = self.request.GET.get('nivel')
if nivel:
qs = qs.filter(plan__nivel=nivel)
return qs
# FIXME: I will leave it like this for now,
# but it's still possible for somebody to poke object ids to see what shouldn't see
# fix this!!1
class SearchMixin(object):
def get_queryset(self):
qs = super(SearchMixin, self).get_queryset()
q = self.request.GET.get('q', None)
if q:
if qs.model == Profesor:
qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionextra__descripcion__unaccent__icontains=q) | Q(asignacionnoaula__descripcion__unaccent__icontains=q))
else:
qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionasistente__descripcion__unaccent__icontains=q) | Q(funcion__unaccent__icontains=q))
return qs
def get_for_user(request, qs, lookup, user):
periodo = request.session.get('periodo', 2020)
if not user.is_superuser:
colegios = [c.pk for c in get_objects_for_user(user, "carga_horaria.change_colegio")]
# new logic for colegio switcher
selected = request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(lookup): colegios,
"{}periode".format(lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
else:
colegios = [c.pk for c in Colegio.objects.all()]
# new logic for colegio switcher
selected = request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(lookup): colegios,
"{}periode".format(lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
class GetObjectsForUserMixin(object):
def get_queryset(self):
qs = super(GetObjectsForUserMixin, self).get_queryset()
periodo = self.request.session.get('periodo', 2020)
if not self.request.user.is_superuser:
colegios = [c.pk for c in get_objects_for_user(self.request.user, "carga_horaria.change_colegio")]
# new logic for colegio switcher
selected = self.request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(self.lookup): colegios,
"{}periode".format(self.lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
else:
colegios = [c.pk for c in Colegio.objects.all()]
# new logic for colegio switcher
selected = self.request.session.get('colegio__pk', None)
if selected:
colegios = [selected]
# end
kwargs = {"{}__in".format(self.lookup): colegios,
"{}periode".format(self.lookup[:-2]): periodo}
return qs.filter(**kwargs).distinct()
class ObjPermissionRequiredMixin(object):
def get_object(self, *args, **kwargs):
obj = super(ObjPermissionRequiredMixin, self).get_object(*args, **kwargs)
if self.request.user.has_perm(self.permission, obj):
return obj
else:
raise Http404
"""
Comienzo Crud Profesor
"""
class ProfesorListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):
"""
Listado de profesores
"""
model = Profesor
lookup = 'colegio__pk'
template_name = 'carga_horaria/profesor/listado_profesor.html'
search_fields = ['nombre', 'horas']
paginate_by = 6
class ProfesorDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de Profesor
"""
model = Profesor
template_name = 'carga_horaria/profesor/detalle_profesor.html'
class ProfesorCreateView(LoginRequiredMixin, CreateView):
model = Profesor
form_class = ProfesorForm
template_name = 'carga_horaria/profesor/nuevo_profesor.html'
success_url = reverse_lazy('carga-horaria:profesores')
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(ProfesorCreateView, self).get_form_kwargs(*args, **kwargs)
colegio_pk = self.request.session.get('colegio__pk', None)
if colegio_pk:
kwargs.update({'user': self.request.user,
'colegio': colegio_pk,
'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})
else:
kwargs.update({'user': self.request.user})
return kwargs
def form_valid(self, form):
profesor = form.save(commit=False)
profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
profesor.save()
return redirect(reverse('carga-horaria:profesores'))
class ProfesorUpdateView(LoginRequiredMixin, UpdateView):
model = Profesor
form_class = ProfesorForm
template_name = 'carga_horaria/profesor/editar_profesor.html'
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(ProfesorUpdateView, self).get_form_kwargs(*args, **kwargs)
colegio_pk = self.request.session.get('colegio__pk', None)
if colegio_pk:
kwargs.update({'user': self.request.user,
'colegio': colegio_pk,
'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})
else:
kwargs.update({'user': self.request.user})
return kwargs
def form_valid(self, form):
profesor = form.save(commit=False)
profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
profesor.save()
return redirect(self.get_success_url())
def get_success_url(self):
return reverse(
'carga-horaria:profesor',
kwargs={
'pk': self.object.pk,
}
)
class ProfesorDeleteView(LoginRequiredMixin, DeleteView):
model = Profesor
success_url = reverse_lazy('carga-horaria:profesores')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
# """
# Comienzo Crud Curso
# """
# class CursoListView(ListView):
# """
# Listado de cursos
# """
# model = Curso
# template_name = 'carga_horaria/curso/listado_curso.html'
# search_fields = ['periodo', 'letra']
# paginate_by = 6
# class CursoDetailView(DetailView):
# """
# Detalle de curso
# """
# model = Curso
# template_name = 'carga_horaria/curso/detalle_curso.html'
# class CursoCreateView(CreateView):
# model = Curso
# form_class = CursoForm
# template_name = 'carga_horaria/curso/nuevo_curso.html'
# success_url = reverse_lazy('carga-horaria:cursos')
# class CursoUpdateView(UpdateView):
# model = Curso
# form_class = CursoForm
# template_name = 'carga_horaria/curso/editar_curso.html'
# def get_success_url(self):
# return reverse(
# 'carga-horaria:curso',
# kwargs={
# 'pk': self.object.pk,
# }
# )
# class CursoDeleteView(DeleteView):
# model = Curso
# success_url = reverse_lazy('carga-horaria:cursos')
# def get(self, request, *args, **kwargs):
# return self.post(request, *args, **kwargs)
"""
Comienzo Crud Asistente
"""
class AsistenteListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):
"""
Listado de asistentes
"""
model = Asistente
lookup = 'colegio__pk'
template_name = 'carga_horaria/asistente/listado_asistente.html'
search_fields = ['nombre', 'horas']
paginate_by = 6
class AsistenteDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de Asistente
"""
model = Asistente
template_name = 'carga_horaria/asistente/detalle_asistente.html'
class AsistenteCreateView(LoginRequiredMixin, CreateView):
model = Asistente
form_class = AsistenteForm
template_name = 'carga_horaria/asistente/nuevo_asistente.html'
success_url = reverse_lazy('carga-horaria:asistentes')
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **kwargs)
colegio_pk = self.request.session.get('colegio__pk', None)
if colegio_pk:
kwargs.update({'user': self.request.user,
'colegio': colegio_pk,
'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})
else:
kwargs.update({'user': self.request.user})
return kwargs
def form_valid(self, form):
asistente = form.save(commit=False)
asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
asistente.save()
return redirect(reverse('carga-horaria:asistentes'))
class AsistenteUpdateView(LoginRequiredMixin, UpdateView):
model = Asistente
form_class = AsistenteForm
template_name = 'carga_horaria/asistente/editar_asistente.html'
def get_success_url(self):
return reverse(
'carga-horaria:asistente',
kwargs={
'pk': self.object.pk,
}
)
def form_valid(self, form):
asistente = form.save(commit=False)
asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],
defaults={'nombre': form.cleaned_data['nombre'],
'direccion': form.cleaned_data['direccion'],
'comuna': form.cleaned_data['comuna'],
'nacionalidad': form.cleaned_data['nacionalidad'],
'telefono': form.cleaned_data['telefono'],
'email_personal': form.cleaned_data['email_personal'],
'email_institucional': form.cleaned_data['email_institucional'],
'estado_civil': form.cleaned_data['estado_civil'],
'discapacidad': form.cleaned_data['discapacidad'],
'recibe_pension': form.cleaned_data['recibe_pension'],
'adventista': form.cleaned_data['adventista'],
'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})
asistente.save()
return redirect(self.get_success_url())
class AsistenteDeleteView(LoginRequiredMixin, DeleteView):
model = Asistente
success_url = reverse_lazy('carga-horaria:asistentes')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
"""
Comienzo Crud Asignatura Base
"""
class AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin, ListView):
"""
Listado de asignatura base
"""
model = AsignaturaBase
lookup = 'plan__colegio__pk'
template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'
search_fields = ['nombre', 'plan']
paginate_by = 10
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]
ctx['nivel_actual'] = self.request.GET.get('nivel')
return ctx
def get_queryset(self):
qs = super().get_queryset()
nivel = self.request.GET.get('nivel')
if nivel:
qs = qs.filter(plan__nivel=nivel)
return qs
class AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de asignatura base
"""
model = AsignaturaBase
template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'
class AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):
model = AsignaturaBase
form_class = AsignaturaBaseForm
template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'
success_url = reverse_lazy('carga-horaria:asignaturasbase')
def get_form_kwargs(self, *args, **kwargs):
kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*args, **kwargs)
kwargs.update({'user': self.request.user,
'colegio': self.request.session.get('colegio__pk', None)})
return kwargs
class AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):
model = AsignaturaBase
form_class = AsignaturaBaseForm
template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'
def get_success_url(self):
return reverse(
'carga-horaria:asignaturabase',
kwargs={
'pk': self.object.pk,
}
)
class AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):
model = AsignaturaBase
success_url = reverse_lazy('carga-horaria:asignaturasbase')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
"""
Comienzo Crud Asignatura
"""
class AsignaturaListView(LoginRequiredMixin, ListView):
"""
Listado de asignatura
"""
model = Asignatura
template_name = 'carga_horaria/asignatura/listado_asignatura.html'
search_fields = ['base', 'periodo']
paginate_by = 10
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]
ctx['nivel_actual'] = self.request.GET.get('nivel')
return ctx
def get_queryset(self):
qs = super().get_queryset()
nivel = self.request.GET.get('nivel')
if nivel:
qs = qs.filter(base__plan__nivel=nivel)
periodo = self.request.GET.get('periodo')
if periodo:
qs = qs.filter(periodo__pk=periodo)
return qs
class AsignaturaDetailView(LoginRequiredMixin, DetailView):
"""
Detalle de asignatura
"""
model = Asignatura
template_name = 'carga_horaria/asignatura/detalle_asignatura.html'
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])
return ctx
class AsignaturaCreateView(LoginRequiredMixin, CreateView):
model = Asignatura
form_class = AsignaturaCreateForm
template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'
def form_valid(self, form):
# dirty validation
periodo = Periodo.objects.get(pk=self.kwargs['pk'])
horas = form.cleaned_data['horas']
available = periodo.available
if horas > available:
form.add_error('horas', "Horas superan el tiempo disponible ({})".format(available))
return self.form_invalid(form)
else:
self.object = form.save()
self.object.periodos.add(periodo)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return reverse(
'carga-horaria:periodo',
kwargs={
'pk': self.kwargs['pk'],
}
)
class AsignaturaUpdateView(LoginRequiredMixin, UpdateView):
model = Asignatura
form_class = AsignaturaUpdateForm
template_name = 'carga_horaria/asignatura/editar_asignatura.html'
def get_success_url(self):
return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs['periodo_pk']})
def form_valid(self, form):
# dirty validation
periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])
horas = form.cleaned_data['horas']
old_horas = Asignatura.objects.get(pk=self.object.pk).horas
delta = horas - old_horas
available = periodo.available
if delta > available:
form.add_error('horas', "Horas superan el tiempo disponible ({})".format(available + old_horas))
return self.form_invalid(form)
elif self.object.base:
if periodo.colegio.jec:
horas_base = self.object.base.horas_jec
else:
horas_base = self.object.base.horas_nec
if horas < horas_base:
form.add_error('horas', "Horas deben ser como mínimo las del plan de estudios original ({})".format(horas_base))
return self.form_invalid(form)
return super().form_valid(form)
class AsignaturaDeleteView(LoginRequiredMixin, DeleteView):
model = Asignatura
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
def get_success_url(self):
return reverse(
'carga-horaria:periodo',
kwargs={
'pk': self.kwargs['periodo_pk'],
}
)
|
normal
|
{
"blob_id": "d0d86d8b5b276218add6dd11a44d5c3951cc4e14",
"index": 3846,
"step-1": "<mask token>\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-2": "<mask token>\n\n\nclass AsistenteListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-3": "<mask token>\n\n\nclass ProfesorDeleteView(LoginRequiredMixin, DeleteView):\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass AsistenteListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asistentes\n \"\"\"\n model = Asistente\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/asistente/listado_asistente.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-4": "<mask token>\n\n\nclass ProfesorListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de profesores\n \"\"\"\n model = Profesor\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/profesor/listado_profesor.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass ProfesorDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Profesor\n \"\"\"\n model = Profesor\n template_name = 'carga_horaria/profesor/detalle_profesor.html'\n\n\nclass ProfesorCreateView(LoginRequiredMixin, CreateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/nuevo_profesor.html'\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(reverse('carga-horaria:profesores'))\n\n\nclass ProfesorUpdateView(LoginRequiredMixin, UpdateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/editar_profesor.html'\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorUpdateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:profesor', kwargs={'pk': self.object.pk})\n\n\nclass ProfesorDeleteView(LoginRequiredMixin, DeleteView):\n model = Profesor\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsistenteListView(LoginRequiredMixin, SearchMixin,\n GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asistentes\n \"\"\"\n model = Asistente\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/asistente/listado_asistente.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **\n kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user, 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.\n get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n return kwargs\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asistente', kwargs={'pk': self.object.pk}\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.\n cleaned_data['rut'], defaults={'nombre': form.cleaned_data[\n 'nombre'], 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'], 'nacionalidad': form.\n cleaned_data['nacionalidad'], 'telefono': form.cleaned_data[\n 'telefono'], 'email_personal': form.cleaned_data[\n 'email_personal'], 'email_institucional': form.cleaned_data[\n 'email_institucional'], 'estado_civil': form.cleaned_data[\n 'estado_civil'], 'discapacidad': form.cleaned_data[\n 'discapacidad'], 'recibe_pension': form.cleaned_data[\n 'recibe_pension'], 'adventista': form.cleaned_data['adventista'\n ], 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin,\n ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*\n args, **kwargs)\n kwargs.update({'user': self.request.user, 'colegio': self.request.\n session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:asignaturabase', kwargs={'pk': self.\n object.pk})\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n<mask token>\n\n\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'pk']})\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n\n def form_valid(self, form):\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n if delta > available:\n form.add_error('horas',\n 'Horas superan el tiempo disponible ({})'.format(available +\n old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n if horas < horas_base:\n form.add_error('horas',\n 'Horas deben ser como mínimo las del plan de estudios original ({})'\n .format(horas_base))\n return self.form_invalid(form)\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs[\n 'periodo_pk']})\n",
"step-5": "from django.db.models import Q\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import render, redirect\nfrom django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView\nfrom carga_horaria.models import Profesor, AsignaturaBase, Asignatura, Asistente\nfrom carga_horaria.formsAlexis import ProfesorForm, AsignaturaBaseForm, AsignaturaCreateForm, AsignaturaUpdateForm, AsistenteForm\nfrom django.core.urlresolvers import reverse_lazy, reverse\nfrom guardian.shortcuts import get_objects_for_user\nfrom .models import Persona\nfrom .models import Fundacion\nfrom .models import Colegio\nfrom .models import Periodo\nfrom .models import Nivel\n\n\nclass LevelFilterMixin(object):\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n\n return qs\n\n\n\n# FIXME: I will leave it like this for now,\n# but it's still possible for somebody to poke object ids to see what shouldn't see\n# fix this!!1\n\n\nclass SearchMixin(object):\n def get_queryset(self):\n qs = super(SearchMixin, self).get_queryset()\n q = self.request.GET.get('q', None)\n if q:\n if qs.model == Profesor:\n qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionextra__descripcion__unaccent__icontains=q) | Q(asignacionnoaula__descripcion__unaccent__icontains=q))\n else:\n qs = qs.filter(Q(persona__nombre__unaccent__icontains=q) | Q(persona__rut__unaccent__icontains=q) | Q(asignacionasistente__descripcion__unaccent__icontains=q) | Q(funcion__unaccent__icontains=q))\n return qs\n\n\ndef get_for_user(request, qs, lookup, user):\n periodo = request.session.get('periodo', 2020)\n\n if not user.is_superuser:\n colegios = [c.pk for c in get_objects_for_user(user, \"carga_horaria.change_colegio\")]\n \n # new logic for colegio switcher\n selected = request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(lookup): colegios,\n \"{}periode\".format(lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n else:\n colegios = [c.pk for c in Colegio.objects.all()]\n # new logic for colegio switcher\n selected = request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(lookup): colegios,\n \"{}periode\".format(lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n \n \n\nclass GetObjectsForUserMixin(object):\n def get_queryset(self):\n qs = super(GetObjectsForUserMixin, self).get_queryset()\n periodo = self.request.session.get('periodo', 2020)\n\n if not self.request.user.is_superuser:\n colegios = [c.pk for c in get_objects_for_user(self.request.user, \"carga_horaria.change_colegio\")]\n\n # new logic for colegio switcher\n selected = self.request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(self.lookup): colegios,\n \"{}periode\".format(self.lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n else:\n colegios = [c.pk for c in Colegio.objects.all()]\n # new logic for colegio switcher\n selected = self.request.session.get('colegio__pk', None)\n if selected:\n colegios = [selected]\n # end\n \n kwargs = {\"{}__in\".format(self.lookup): colegios,\n \"{}periode\".format(self.lookup[:-2]): periodo}\n return qs.filter(**kwargs).distinct()\n\n\nclass ObjPermissionRequiredMixin(object):\n def get_object(self, *args, **kwargs):\n obj = super(ObjPermissionRequiredMixin, self).get_object(*args, **kwargs)\n if self.request.user.has_perm(self.permission, obj):\n return obj\n else:\n raise Http404\n\n\n\"\"\"\n Comienzo Crud Profesor\n\"\"\"\nclass ProfesorListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de profesores\n \"\"\"\n model = Profesor\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/profesor/listado_profesor.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\n\nclass ProfesorDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Profesor\n \"\"\"\n model = Profesor\n template_name = 'carga_horaria/profesor/detalle_profesor.html'\n\n\nclass ProfesorCreateView(LoginRequiredMixin, CreateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/nuevo_profesor.html'\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorCreateView, self).get_form_kwargs(*args, **kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user,\n 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(reverse('carga-horaria:profesores'))\n\n\nclass ProfesorUpdateView(LoginRequiredMixin, UpdateView):\n model = Profesor\n form_class = ProfesorForm\n template_name = 'carga_horaria/profesor/editar_profesor.html'\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(ProfesorUpdateView, self).get_form_kwargs(*args, **kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user,\n 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n\n return kwargs\n\n def form_valid(self, form):\n profesor = form.save(commit=False)\n profesor.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n profesor.save()\n return redirect(self.get_success_url())\n\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:profesor',\n kwargs={\n 'pk': self.object.pk,\n }\n )\n\n\nclass ProfesorDeleteView(LoginRequiredMixin, DeleteView):\n model = Profesor\n success_url = reverse_lazy('carga-horaria:profesores')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n# \"\"\"\n# Comienzo Crud Curso\n# \"\"\"\n# class CursoListView(ListView):\n# \"\"\"\n# Listado de cursos\n# \"\"\"\n# model = Curso\n# template_name = 'carga_horaria/curso/listado_curso.html'\n# search_fields = ['periodo', 'letra']\n# paginate_by = 6\n\n\n# class CursoDetailView(DetailView):\n# \"\"\"\n# Detalle de curso\n# \"\"\"\n# model = Curso\n# template_name = 'carga_horaria/curso/detalle_curso.html'\n\n\n# class CursoCreateView(CreateView):\n# model = Curso\n# form_class = CursoForm\n# template_name = 'carga_horaria/curso/nuevo_curso.html'\n# success_url = reverse_lazy('carga-horaria:cursos')\n\n\n# class CursoUpdateView(UpdateView):\n# model = Curso\n# form_class = CursoForm\n# template_name = 'carga_horaria/curso/editar_curso.html'\n\n# def get_success_url(self):\n# return reverse(\n# 'carga-horaria:curso',\n# kwargs={\n# 'pk': self.object.pk,\n# }\n# )\n\n\n# class CursoDeleteView(DeleteView):\n# model = Curso\n# success_url = reverse_lazy('carga-horaria:cursos')\n\n# def get(self, request, *args, **kwargs):\n# return self.post(request, *args, **kwargs)\n\n\n\"\"\"\n Comienzo Crud Asistente\n\"\"\"\nclass AsistenteListView(LoginRequiredMixin, SearchMixin, GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asistentes\n \"\"\"\n model = Asistente\n lookup = 'colegio__pk'\n template_name = 'carga_horaria/asistente/listado_asistente.html'\n search_fields = ['nombre', 'horas']\n paginate_by = 6\n\n\nclass AsistenteDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de Asistente\n \"\"\"\n model = Asistente\n template_name = 'carga_horaria/asistente/detalle_asistente.html'\n\n\nclass AsistenteCreateView(LoginRequiredMixin, CreateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/nuevo_asistente.html'\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsistenteCreateView, self).get_form_kwargs(*args, **kwargs)\n colegio_pk = self.request.session.get('colegio__pk', None)\n if colegio_pk:\n kwargs.update({'user': self.request.user,\n 'colegio': colegio_pk,\n 'fundacion': Colegio.objects.get(pk=self.request.session.get('colegio__pk', None)).fundacion.pk})\n else:\n kwargs.update({'user': self.request.user})\n\n return kwargs\n\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(reverse('carga-horaria:asistentes'))\n\n\nclass AsistenteUpdateView(LoginRequiredMixin, UpdateView):\n model = Asistente\n form_class = AsistenteForm\n template_name = 'carga_horaria/asistente/editar_asistente.html'\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:asistente',\n kwargs={\n 'pk': self.object.pk,\n }\n )\n\n def form_valid(self, form):\n asistente = form.save(commit=False)\n asistente.persona, _ = Persona.objects.update_or_create(rut=form.cleaned_data['rut'],\n defaults={'nombre': form.cleaned_data['nombre'],\n 'direccion': form.cleaned_data['direccion'],\n 'comuna': form.cleaned_data['comuna'],\n 'nacionalidad': form.cleaned_data['nacionalidad'],\n 'telefono': form.cleaned_data['telefono'],\n 'email_personal': form.cleaned_data['email_personal'],\n 'email_institucional': form.cleaned_data['email_institucional'],\n 'estado_civil': form.cleaned_data['estado_civil'],\n 'discapacidad': form.cleaned_data['discapacidad'],\n 'recibe_pension': form.cleaned_data['recibe_pension'],\n 'adventista': form.cleaned_data['adventista'],\n 'fecha_nacimiento': form.cleaned_data['fecha_nacimiento']})\n asistente.save()\n return redirect(self.get_success_url())\n\n\nclass AsistenteDeleteView(LoginRequiredMixin, DeleteView):\n model = Asistente\n success_url = reverse_lazy('carga-horaria:asistentes')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n\n\n\"\"\"\n Comienzo Crud Asignatura Base\n\"\"\"\nclass AsignaturaBaseListView(LoginRequiredMixin, GetObjectsForUserMixin, ListView):\n \"\"\"\n Listado de asignatura base\n \"\"\"\n model = AsignaturaBase\n lookup = 'plan__colegio__pk'\n template_name = 'carga_horaria/asignaturabase/listado_asignaturabase.html'\n search_fields = ['nombre', 'plan']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(plan__nivel=nivel)\n\n return qs\n\n\nclass AsignaturaBaseDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura base\n \"\"\"\n model = AsignaturaBase\n template_name = 'carga_horaria/asignaturabase/detalle_asignaturabase.html'\n\n\nclass AsignaturaBaseCreateView(LoginRequiredMixin, CreateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/nuevo_asignaturabase.html'\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get_form_kwargs(self, *args, **kwargs):\n kwargs = super(AsignaturaBaseCreateView, self).get_form_kwargs(*args, **kwargs)\n kwargs.update({'user': self.request.user,\n 'colegio': self.request.session.get('colegio__pk', None)})\n return kwargs\n\n\nclass AsignaturaBaseUpdateView(LoginRequiredMixin, UpdateView):\n model = AsignaturaBase\n form_class = AsignaturaBaseForm\n template_name = 'carga_horaria/asignaturabase/editar_asignaturabase.html'\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:asignaturabase',\n kwargs={\n 'pk': self.object.pk,\n }\n )\n\n\nclass AsignaturaBaseDeleteView(LoginRequiredMixin, DeleteView):\n model = AsignaturaBase\n success_url = reverse_lazy('carga-horaria:asignaturasbase')\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n\n\"\"\"\n Comienzo Crud Asignatura\n\"\"\"\nclass AsignaturaListView(LoginRequiredMixin, ListView):\n \"\"\"\n Listado de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/listado_asignatura.html'\n search_fields = ['base', 'periodo']\n paginate_by = 10\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['levels'] = [(tag.name, tag.value) for tag in Nivel][::-1]\n ctx['nivel_actual'] = self.request.GET.get('nivel')\n return ctx\n\n def get_queryset(self):\n qs = super().get_queryset()\n\n nivel = self.request.GET.get('nivel')\n if nivel:\n qs = qs.filter(base__plan__nivel=nivel)\n\n periodo = self.request.GET.get('periodo')\n if periodo:\n qs = qs.filter(periodo__pk=periodo)\n return qs\n\n\nclass AsignaturaDetailView(LoginRequiredMixin, DetailView):\n \"\"\"\n Detalle de asignatura\n \"\"\"\n model = Asignatura\n template_name = 'carga_horaria/asignatura/detalle_asignatura.html'\n\n def get_context_data(self, *args, **kwargs):\n ctx = super().get_context_data(*args, **kwargs)\n ctx['periodo'] = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n return ctx\n\nclass AsignaturaCreateView(LoginRequiredMixin, CreateView):\n model = Asignatura\n form_class = AsignaturaCreateForm\n template_name = 'carga_horaria/asignatura/nuevo_asignatura.html'\n\n def form_valid(self, form):\n # dirty validation\n periodo = Periodo.objects.get(pk=self.kwargs['pk'])\n horas = form.cleaned_data['horas']\n available = periodo.available\n if horas > available:\n form.add_error('horas', \"Horas superan el tiempo disponible ({})\".format(available))\n return self.form_invalid(form)\n else:\n self.object = form.save()\n self.object.periodos.add(periodo)\n return HttpResponseRedirect(self.get_success_url())\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:periodo',\n kwargs={\n 'pk': self.kwargs['pk'],\n }\n )\n\n\n\nclass AsignaturaUpdateView(LoginRequiredMixin, UpdateView):\n model = Asignatura\n form_class = AsignaturaUpdateForm\n template_name = 'carga_horaria/asignatura/editar_asignatura.html'\n\n def get_success_url(self):\n return reverse('carga-horaria:periodo', kwargs={'pk': self.kwargs['periodo_pk']})\n\n def form_valid(self, form):\n # dirty validation\n periodo = Periodo.objects.get(pk=self.kwargs['periodo_pk'])\n horas = form.cleaned_data['horas']\n old_horas = Asignatura.objects.get(pk=self.object.pk).horas\n delta = horas - old_horas\n available = periodo.available\n\n if delta > available:\n form.add_error('horas', \"Horas superan el tiempo disponible ({})\".format(available + old_horas))\n return self.form_invalid(form)\n elif self.object.base:\n if periodo.colegio.jec:\n horas_base = self.object.base.horas_jec\n else:\n horas_base = self.object.base.horas_nec\n\n if horas < horas_base:\n form.add_error('horas', \"Horas deben ser como mínimo las del plan de estudios original ({})\".format(horas_base))\n return self.form_invalid(form)\n\n return super().form_valid(form)\n\n\nclass AsignaturaDeleteView(LoginRequiredMixin, DeleteView):\n model = Asignatura\n\n def get(self, request, *args, **kwargs):\n return self.post(request, *args, **kwargs)\n\n def get_success_url(self):\n return reverse(\n 'carga-horaria:periodo',\n kwargs={\n 'pk': self.kwargs['periodo_pk'],\n }\n )\n",
"step-ids": [
52,
53,
56,
73,
85
]
}
|
[
52,
53,
56,
73,
85
] |
import sys
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QMainWindow, QApplication
#---Import that will load the UI file---#
from PyQt5.uic import loadUi
import detechRs_rc #---THIS IMPORT WILL DISPLAY THE IMAGES STORED IN THE QRC FILE AND _rc.py FILE--#
#--CLASS CREATED THAT WILL LOAD THE UI FILE
class Login(QMainWindow):
def __init__(self):
super(Login, self).__init__()
# --- FROM THE IMPORT PYQT5.UIC IMPORT LOADUI---##
loadUi("login_UI.ui",self)
#--- a code once the login button clicked, will call the loginFunction ---#
self.loginButton.clicked.connect(self.loginFunction)
#-- Created a function called "loginFunction" --#
def loginFunction(self):
lgUserLine=self.lgUserLine.text() #-- Getting the textbox context lgUserline --#
lgPassLine=self.lgPassLine.text() #-- Getting the textbox context lgPassline --#
#-- Will display at the terminal what you wrote in the textbox(QLineEdit) --#
print("Success, ", lgUserLine, "and ", lgPassLine)
app=QApplication(sys.argv)
loginWindow=Login()
widget=QtWidgets.QStackedWidget()
widget.addWidget(loginWindow) #-- displays all design widgets of the UI Window --#
widget.setFixedWidth(1190) #-- setting the fixed window size in width --#
widget.setFixedHeight(782) #-- setting the fixed window size in height--#
widget.show()
app.exec_() #-- window execution --#
|
normal
|
{
"blob_id": "a9b1cc9b928b8999450b6c95656b863c476b273b",
"index": 7355,
"step-1": "<mask token>\n\n\nclass Login(QMainWindow):\n\n def __init__(self):\n super(Login, self).__init__()\n loadUi('login_UI.ui', self)\n self.loginButton.clicked.connect(self.loginFunction)\n\n def loginFunction(self):\n lgUserLine = self.lgUserLine.text()\n lgPassLine = self.lgPassLine.text()\n print('Success, ', lgUserLine, 'and ', lgPassLine)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Login(QMainWindow):\n\n def __init__(self):\n super(Login, self).__init__()\n loadUi('login_UI.ui', self)\n self.loginButton.clicked.connect(self.loginFunction)\n\n def loginFunction(self):\n lgUserLine = self.lgUserLine.text()\n lgPassLine = self.lgPassLine.text()\n print('Success, ', lgUserLine, 'and ', lgPassLine)\n\n\n<mask token>\nwidget.addWidget(loginWindow)\nwidget.setFixedWidth(1190)\nwidget.setFixedHeight(782)\nwidget.show()\napp.exec_()\n",
"step-3": "<mask token>\n\n\nclass Login(QMainWindow):\n\n def __init__(self):\n super(Login, self).__init__()\n loadUi('login_UI.ui', self)\n self.loginButton.clicked.connect(self.loginFunction)\n\n def loginFunction(self):\n lgUserLine = self.lgUserLine.text()\n lgPassLine = self.lgPassLine.text()\n print('Success, ', lgUserLine, 'and ', lgPassLine)\n\n\napp = QApplication(sys.argv)\nloginWindow = Login()\nwidget = QtWidgets.QStackedWidget()\nwidget.addWidget(loginWindow)\nwidget.setFixedWidth(1190)\nwidget.setFixedHeight(782)\nwidget.show()\napp.exec_()\n",
"step-4": "import sys\nfrom PyQt5 import QtWidgets\nfrom PyQt5.QtWidgets import QMainWindow, QApplication\nfrom PyQt5.uic import loadUi\nimport detechRs_rc\n\n\nclass Login(QMainWindow):\n\n def __init__(self):\n super(Login, self).__init__()\n loadUi('login_UI.ui', self)\n self.loginButton.clicked.connect(self.loginFunction)\n\n def loginFunction(self):\n lgUserLine = self.lgUserLine.text()\n lgPassLine = self.lgPassLine.text()\n print('Success, ', lgUserLine, 'and ', lgPassLine)\n\n\napp = QApplication(sys.argv)\nloginWindow = Login()\nwidget = QtWidgets.QStackedWidget()\nwidget.addWidget(loginWindow)\nwidget.setFixedWidth(1190)\nwidget.setFixedHeight(782)\nwidget.show()\napp.exec_()\n",
"step-5": "import sys\r\nfrom PyQt5 import QtWidgets\r\nfrom PyQt5.QtWidgets import QMainWindow, QApplication\r\n\r\n#---Import that will load the UI file---#\r\nfrom PyQt5.uic import loadUi\r\n\r\nimport detechRs_rc #---THIS IMPORT WILL DISPLAY THE IMAGES STORED IN THE QRC FILE AND _rc.py FILE--#\r\n\r\n#--CLASS CREATED THAT WILL LOAD THE UI FILE\r\nclass Login(QMainWindow):\r\n def __init__(self):\r\n super(Login, self).__init__()\r\n # --- FROM THE IMPORT PYQT5.UIC IMPORT LOADUI---##\r\n loadUi(\"login_UI.ui\",self)\r\n\r\n #--- a code once the login button clicked, will call the loginFunction ---#\r\n self.loginButton.clicked.connect(self.loginFunction)\r\n\r\n #-- Created a function called \"loginFunction\" --#\r\n def loginFunction(self):\r\n lgUserLine=self.lgUserLine.text() #-- Getting the textbox context lgUserline --#\r\n lgPassLine=self.lgPassLine.text() #-- Getting the textbox context lgPassline --#\r\n\r\n #-- Will display at the terminal what you wrote in the textbox(QLineEdit) --#\r\n print(\"Success, \", lgUserLine, \"and \", lgPassLine)\r\n\r\n\r\n\r\napp=QApplication(sys.argv)\r\nloginWindow=Login()\r\nwidget=QtWidgets.QStackedWidget()\r\nwidget.addWidget(loginWindow) #-- displays all design widgets of the UI Window --#\r\nwidget.setFixedWidth(1190) #-- setting the fixed window size in width --#\r\nwidget.setFixedHeight(782) #-- setting the fixed window size in height--#\r\nwidget.show()\r\napp.exec_() #-- window execution --#",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from pyparsing import ParseException
from pytest import raises
from easymql.expressions import Expression as exp
class TestComparisonExpression:
def test_cmp(self):
assert exp.parse('CMP(1, 2)') == {'$cmp': [1, 2]}
with raises(ParseException):
exp.parse('CMP(1)')
with raises(ParseException):
exp.parse('CMP(1, 2, 3)')
assert exp.parse('CMP(1, 3 + 2)') == {'$cmp': [1, {'$add': [3, 2]}]}
|
normal
|
{
"blob_id": "91959f6621f05b1b814a025f0b95c55cf683ded3",
"index": 5856,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestComparisonExpression:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TestComparisonExpression:\n\n def test_cmp(self):\n assert exp.parse('CMP(1, 2)') == {'$cmp': [1, 2]}\n with raises(ParseException):\n exp.parse('CMP(1)')\n with raises(ParseException):\n exp.parse('CMP(1, 2, 3)')\n assert exp.parse('CMP(1, 3 + 2)') == {'$cmp': [1, {'$add': [3, 2]}]}\n",
"step-4": "from pyparsing import ParseException\nfrom pytest import raises\nfrom easymql.expressions import Expression as exp\n\n\nclass TestComparisonExpression:\n\n def test_cmp(self):\n assert exp.parse('CMP(1, 2)') == {'$cmp': [1, 2]}\n with raises(ParseException):\n exp.parse('CMP(1)')\n with raises(ParseException):\n exp.parse('CMP(1, 2, 3)')\n assert exp.parse('CMP(1, 3 + 2)') == {'$cmp': [1, {'$add': [3, 2]}]}\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from objet import Objet
class Piece(Objet):
""" Représente une piece qui permet d'acheter dans la boutique """
def ramasser(self, joueur):
joueur.addPiece()
def depenser(self,joueur):
joueur.depenserPiece()
def description(self):
return "Vous avez trouvé une piece, peut etre trouverez vous un marchand"
|
normal
|
{
"blob_id": "b6898b923e286c66673df1e07105adf789c3151c",
"index": 6335,
"step-1": "<mask token>\n\n\nclass Piece(Objet):\n <mask token>\n\n def ramasser(self, joueur):\n joueur.addPiece()\n\n def depenser(self, joueur):\n joueur.depenserPiece()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Piece(Objet):\n <mask token>\n\n def ramasser(self, joueur):\n joueur.addPiece()\n\n def depenser(self, joueur):\n joueur.depenserPiece()\n\n def description(self):\n return (\n 'Vous avez trouvé une piece, peut etre trouverez vous un marchand')\n",
"step-3": "<mask token>\n\n\nclass Piece(Objet):\n \"\"\" Représente une piece qui permet d'acheter dans la boutique \"\"\"\n\n def ramasser(self, joueur):\n joueur.addPiece()\n\n def depenser(self, joueur):\n joueur.depenserPiece()\n\n def description(self):\n return (\n 'Vous avez trouvé une piece, peut etre trouverez vous un marchand')\n",
"step-4": "from objet import Objet\n\n\nclass Piece(Objet):\n \"\"\" Représente une piece qui permet d'acheter dans la boutique \"\"\"\n\n def ramasser(self, joueur):\n joueur.addPiece()\n\n def depenser(self, joueur):\n joueur.depenserPiece()\n\n def description(self):\n return (\n 'Vous avez trouvé une piece, peut etre trouverez vous un marchand')\n",
"step-5": "from objet import Objet\n\nclass Piece(Objet):\n \"\"\" Représente une piece qui permet d'acheter dans la boutique \"\"\"\n \n def ramasser(self, joueur):\n joueur.addPiece()\n\n def depenser(self,joueur):\n joueur.depenserPiece()\n \n def description(self):\n return \"Vous avez trouvé une piece, peut etre trouverez vous un marchand\"",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import glob
pyfiles = glob.glob('*.py')
modulenames = [f.split('.')[0] for f in pyfiles]
# print(modulenames)
for f in pyfiles:
contents = open(f).read()
for m in modulenames:
v1 = "import " + m
v2 = "from " + m
if v1 or v2 in contents:
contents = contents.replace(v1, "import ."+m)
contents = contents.replace(v2, "from ."+m)
with open('new_'+f, 'w') as outf:
outf.write(contents)
|
normal
|
{
"blob_id": "d6a73365aa32c74798b6887ff46c0ed2323ed1a6",
"index": 2324,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor f in pyfiles:\n contents = open(f).read()\n for m in modulenames:\n v1 = 'import ' + m\n v2 = 'from ' + m\n if v1 or v2 in contents:\n contents = contents.replace(v1, 'import .' + m)\n contents = contents.replace(v2, 'from .' + m)\n with open('new_' + f, 'w') as outf:\n outf.write(contents)\n",
"step-3": "<mask token>\npyfiles = glob.glob('*.py')\nmodulenames = [f.split('.')[0] for f in pyfiles]\nfor f in pyfiles:\n contents = open(f).read()\n for m in modulenames:\n v1 = 'import ' + m\n v2 = 'from ' + m\n if v1 or v2 in contents:\n contents = contents.replace(v1, 'import .' + m)\n contents = contents.replace(v2, 'from .' + m)\n with open('new_' + f, 'w') as outf:\n outf.write(contents)\n",
"step-4": "import glob\npyfiles = glob.glob('*.py')\nmodulenames = [f.split('.')[0] for f in pyfiles]\nfor f in pyfiles:\n contents = open(f).read()\n for m in modulenames:\n v1 = 'import ' + m\n v2 = 'from ' + m\n if v1 or v2 in contents:\n contents = contents.replace(v1, 'import .' + m)\n contents = contents.replace(v2, 'from .' + m)\n with open('new_' + f, 'w') as outf:\n outf.write(contents)\n",
"step-5": "import glob\n\npyfiles = glob.glob('*.py')\n\nmodulenames = [f.split('.')[0] for f in pyfiles]\n\n# print(modulenames)\n\nfor f in pyfiles:\n contents = open(f).read()\n for m in modulenames:\n v1 = \"import \" + m\n v2 = \"from \" + m\n if v1 or v2 in contents:\n contents = contents.replace(v1, \"import .\"+m)\n contents = contents.replace(v2, \"from .\"+m)\n with open('new_'+f, 'w') as outf:\n outf.write(contents)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pandas as pd
import numpy as np
import urllib.request
import urllib.parse
import json
def predict(input_text):
URL = "http://127.0.0.1:8000/api/v1/predict/"
values = {
"format": "json",
"input_text": input_text,
}
data = urllib.parse.urlencode({'input_text': input_text}).encode('utf-8')
request = urllib.request.Request(URL, data)
response = urllib.request.urlopen(request)
result= json.loads(response.read())
return result['neg_pos']
if __name__ == '__main__':
print("Start if __name__ == '__main__'")
print('load csv file ....')
df = pd.read_csv("test.csv", engine="python", encoding="utf-8-sig")
df["PREDICT"] = np.nan #予測列を追加
print('Getting prediction results ....')
for index, row in df.iterrows():
df.at[index, "PREDICT"] = predict(row['INPUT'])
print('save results to csv file')
df.to_csv("predicted_test .csv", encoding="utf-8-sig", index=False)
print('Processing terminated normally.')
|
normal
|
{
"blob_id": "b7632cc7d8fc2f9096f7a6bb61c471dc61689f70",
"index": 8342,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef predict(input_text):\n URL = 'http://127.0.0.1:8000/api/v1/predict/'\n values = {'format': 'json', 'input_text': input_text}\n data = urllib.parse.urlencode({'input_text': input_text}).encode('utf-8')\n request = urllib.request.Request(URL, data)\n response = urllib.request.urlopen(request)\n result = json.loads(response.read())\n return result['neg_pos']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef predict(input_text):\n URL = 'http://127.0.0.1:8000/api/v1/predict/'\n values = {'format': 'json', 'input_text': input_text}\n data = urllib.parse.urlencode({'input_text': input_text}).encode('utf-8')\n request = urllib.request.Request(URL, data)\n response = urllib.request.urlopen(request)\n result = json.loads(response.read())\n return result['neg_pos']\n\n\nif __name__ == '__main__':\n print(\"Start if __name__ == '__main__'\")\n print('load csv file ....')\n df = pd.read_csv('test.csv', engine='python', encoding='utf-8-sig')\n df['PREDICT'] = np.nan\n print('Getting prediction results ....')\n for index, row in df.iterrows():\n df.at[index, 'PREDICT'] = predict(row['INPUT'])\n print('save results to csv file')\n df.to_csv('predicted_test .csv', encoding='utf-8-sig', index=False)\n print('Processing terminated normally.')\n",
"step-4": "import pandas as pd\nimport numpy as np\nimport urllib.request\nimport urllib.parse\nimport json\n\n\ndef predict(input_text):\n URL = 'http://127.0.0.1:8000/api/v1/predict/'\n values = {'format': 'json', 'input_text': input_text}\n data = urllib.parse.urlencode({'input_text': input_text}).encode('utf-8')\n request = urllib.request.Request(URL, data)\n response = urllib.request.urlopen(request)\n result = json.loads(response.read())\n return result['neg_pos']\n\n\nif __name__ == '__main__':\n print(\"Start if __name__ == '__main__'\")\n print('load csv file ....')\n df = pd.read_csv('test.csv', engine='python', encoding='utf-8-sig')\n df['PREDICT'] = np.nan\n print('Getting prediction results ....')\n for index, row in df.iterrows():\n df.at[index, 'PREDICT'] = predict(row['INPUT'])\n print('save results to csv file')\n df.to_csv('predicted_test .csv', encoding='utf-8-sig', index=False)\n print('Processing terminated normally.')\n",
"step-5": "import pandas as pd\r\nimport numpy as np\r\nimport urllib.request\r\nimport urllib.parse\r\nimport json\r\n\r\ndef predict(input_text):\r\n URL = \"http://127.0.0.1:8000/api/v1/predict/\"\r\n values = {\r\n \"format\": \"json\",\r\n \"input_text\": input_text,\r\n }\r\n data = urllib.parse.urlencode({'input_text': input_text}).encode('utf-8')\r\n request = urllib.request.Request(URL, data)\r\n response = urllib.request.urlopen(request)\r\n result= json.loads(response.read())\r\n return result['neg_pos']\r\n\r\nif __name__ == '__main__':\r\n print(\"Start if __name__ == '__main__'\")\r\n print('load csv file ....')\r\n df = pd.read_csv(\"test.csv\", engine=\"python\", encoding=\"utf-8-sig\")\r\n df[\"PREDICT\"] = np.nan #予測列を追加\r\n print('Getting prediction results ....')\r\n for index, row in df.iterrows():\r\n df.at[index, \"PREDICT\"] = predict(row['INPUT'])\r\n print('save results to csv file')\r\n df.to_csv(\"predicted_test .csv\", encoding=\"utf-8-sig\", index=False)\r\n print('Processing terminated normally.')\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Return min number of hacks (swap of adjacent instructions)
# in p so that total damage <= d.
# If impossible, return -1
def min_hacks(d, p):
# list containing number of shoot commands per
# damage level. Each element is represents a
# damage level; 1, 2, 4, 8, ... and so on.
shots = [0]
damage = 0
for c in p:
if c == "S":
shots[-1] += 1
# we can also calculate damage here.
damage += 2 ** (len(shots) - 1)
else:
shots.append(0)
# each hack represents moving 1 shot down 1 element
# in the shots list. So keep doing this until
# damage is <= d.
hacks = 0
while damage > d:
# move 1 shot from highest element possible down 1 element.
hacked = False
for i in range(len(shots)-1, 0, -1):
if shots[i] > 0:
shots[i] -= 1
shots[i-1] += 1
damage -= 2 ** (i - 1) # damage = damage - 2**i + 2**(i-1)
hacks += 1
hacked = True
break
if not hacked:
# impossible to get damage <= d!
return -1
return hacks
num_cases = int(input())
for i in range(1, num_cases+1):
current_case = input().split()
d = int(current_case[0])
p = current_case[1]
solution = min_hacks(d, p)
if solution < 0:
solution_string = "IMPOSSIBLE"
else:
solution_string = str(solution)
print("Case #{:d}: {:s}".format(i, solution_string))
|
normal
|
{
"blob_id": "607700faebc2018327d66939419cc24a563c3900",
"index": 6515,
"step-1": "<mask token>\n",
"step-2": "def min_hacks(d, p):\n shots = [0]\n damage = 0\n for c in p:\n if c == 'S':\n shots[-1] += 1\n damage += 2 ** (len(shots) - 1)\n else:\n shots.append(0)\n hacks = 0\n while damage > d:\n hacked = False\n for i in range(len(shots) - 1, 0, -1):\n if shots[i] > 0:\n shots[i] -= 1\n shots[i - 1] += 1\n damage -= 2 ** (i - 1)\n hacks += 1\n hacked = True\n break\n if not hacked:\n return -1\n return hacks\n\n\n<mask token>\n",
"step-3": "def min_hacks(d, p):\n shots = [0]\n damage = 0\n for c in p:\n if c == 'S':\n shots[-1] += 1\n damage += 2 ** (len(shots) - 1)\n else:\n shots.append(0)\n hacks = 0\n while damage > d:\n hacked = False\n for i in range(len(shots) - 1, 0, -1):\n if shots[i] > 0:\n shots[i] -= 1\n shots[i - 1] += 1\n damage -= 2 ** (i - 1)\n hacks += 1\n hacked = True\n break\n if not hacked:\n return -1\n return hacks\n\n\n<mask token>\nfor i in range(1, num_cases + 1):\n current_case = input().split()\n d = int(current_case[0])\n p = current_case[1]\n solution = min_hacks(d, p)\n if solution < 0:\n solution_string = 'IMPOSSIBLE'\n else:\n solution_string = str(solution)\n print('Case #{:d}: {:s}'.format(i, solution_string))\n",
"step-4": "def min_hacks(d, p):\n shots = [0]\n damage = 0\n for c in p:\n if c == 'S':\n shots[-1] += 1\n damage += 2 ** (len(shots) - 1)\n else:\n shots.append(0)\n hacks = 0\n while damage > d:\n hacked = False\n for i in range(len(shots) - 1, 0, -1):\n if shots[i] > 0:\n shots[i] -= 1\n shots[i - 1] += 1\n damage -= 2 ** (i - 1)\n hacks += 1\n hacked = True\n break\n if not hacked:\n return -1\n return hacks\n\n\nnum_cases = int(input())\nfor i in range(1, num_cases + 1):\n current_case = input().split()\n d = int(current_case[0])\n p = current_case[1]\n solution = min_hacks(d, p)\n if solution < 0:\n solution_string = 'IMPOSSIBLE'\n else:\n solution_string = str(solution)\n print('Case #{:d}: {:s}'.format(i, solution_string))\n",
"step-5": "# Return min number of hacks (swap of adjacent instructions)\n# in p so that total damage <= d.\n# If impossible, return -1\ndef min_hacks(d, p):\n\n # list containing number of shoot commands per\n # damage level. Each element is represents a\n # damage level; 1, 2, 4, 8, ... and so on.\n shots = [0]\n damage = 0\n for c in p:\n if c == \"S\":\n shots[-1] += 1\n # we can also calculate damage here.\n damage += 2 ** (len(shots) - 1)\n else:\n shots.append(0)\n\n # each hack represents moving 1 shot down 1 element\n # in the shots list. So keep doing this until\n # damage is <= d.\n hacks = 0\n while damage > d:\n # move 1 shot from highest element possible down 1 element.\n hacked = False\n for i in range(len(shots)-1, 0, -1):\n if shots[i] > 0:\n shots[i] -= 1\n shots[i-1] += 1\n damage -= 2 ** (i - 1) # damage = damage - 2**i + 2**(i-1)\n hacks += 1\n hacked = True\n break\n\n if not hacked:\n # impossible to get damage <= d!\n return -1\n\n return hacks\n\nnum_cases = int(input())\nfor i in range(1, num_cases+1):\n current_case = input().split()\n d = int(current_case[0])\n p = current_case[1]\n solution = min_hacks(d, p)\n if solution < 0:\n solution_string = \"IMPOSSIBLE\"\n else:\n solution_string = str(solution)\n print(\"Case #{:d}: {:s}\".format(i, solution_string))\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from matasano import *
ec = EC_M(233970423115425145524320034830162017933,534,1,4,order=233970423115425145498902418297807005944)
assert(ec.scale(4,ec.order) == 0)
aPriv = randint(1,ec.order-1)
aPub = ec.scale(4,aPriv)
print("Factoring...")
twist_ord = 2*ec.prime+2 - ec.order
factors = []
x = twist_ord
for i in range(2,2**24):
if x%i == 0:
if x%(i*i) != 0:
factors.append(i)
x = pp(x,i)
print("Getting remainders...")
rems = []
for f in factors:
u = 0
while u == 0:
while isQRes((u**3+ec.A*u**2+u)%ec.prime,ec.prime):
u = randint(1,ec.prime-1)
u = ec.scale(u,pp(twist_ord,f))
while ec.scale(u,f) != 0:
u = ec.scale(u,f)
shared = ec.scale(u,aPriv) #Not generating the MAC this time
for i in range(f):
if ec.scale(u,i) == shared:
print("\tSolved mod %d"%f)
rems.append(i)
break
#Now aPriv is +-rems[i] mod factors[i]
#Do them 2 at a time to get down to 2 values mod Prod factors[i]
print("Correcting parities...")
for i in range(len(factors)):
if rems[i] != 0:
break
fixed = i
for i in range(len(factors)):
if i == fixed:
continue
u = 0
while u == 0:
while isQRes((u**3+ec.A*u**2+u)%ec.prime,ec.prime):
u = randint(1,ec.prime-1)
u = ec.scale(u,pp(pp(twist_ord,factors[fixed]),factors[i]))
if ec.scale(u,factors[fixed]) == 0:
u = 0
elif ec.scale(u,factors[i]) == 0:
u = 0
shared = ec.scale(u,aPriv)
r,_ = crt([rems[fixed],rems[i]],[factors[fixed],factors[i]])
if ec.scale(u,r) != shared:
rems[i] = (-rems[i])%factors[i]
#Now I need to run down the remaining bits
|
normal
|
{
"blob_id": "b5275fc068526063fd8baf13210052971b05503f",
"index": 585,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nassert ec.scale(4, ec.order) == 0\n<mask token>\nprint('Factoring...')\n<mask token>\nfor i in range(2, 2 ** 24):\n if x % i == 0:\n if x % (i * i) != 0:\n factors.append(i)\n x = pp(x, i)\nprint('Getting remainders...')\n<mask token>\nfor f in factors:\n u = 0\n while u == 0:\n while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):\n u = randint(1, ec.prime - 1)\n u = ec.scale(u, pp(twist_ord, f))\n while ec.scale(u, f) != 0:\n u = ec.scale(u, f)\n shared = ec.scale(u, aPriv)\n for i in range(f):\n if ec.scale(u, i) == shared:\n print('\\tSolved mod %d' % f)\n rems.append(i)\n break\nprint('Correcting parities...')\nfor i in range(len(factors)):\n if rems[i] != 0:\n break\n<mask token>\nfor i in range(len(factors)):\n if i == fixed:\n continue\n u = 0\n while u == 0:\n while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):\n u = randint(1, ec.prime - 1)\n u = ec.scale(u, pp(pp(twist_ord, factors[fixed]), factors[i]))\n if ec.scale(u, factors[fixed]) == 0:\n u = 0\n elif ec.scale(u, factors[i]) == 0:\n u = 0\n shared = ec.scale(u, aPriv)\n r, _ = crt([rems[fixed], rems[i]], [factors[fixed], factors[i]])\n if ec.scale(u, r) != shared:\n rems[i] = -rems[i] % factors[i]\n",
"step-3": "<mask token>\nec = EC_M(233970423115425145524320034830162017933, 534, 1, 4, order=\n 233970423115425145498902418297807005944)\nassert ec.scale(4, ec.order) == 0\naPriv = randint(1, ec.order - 1)\naPub = ec.scale(4, aPriv)\nprint('Factoring...')\ntwist_ord = 2 * ec.prime + 2 - ec.order\nfactors = []\nx = twist_ord\nfor i in range(2, 2 ** 24):\n if x % i == 0:\n if x % (i * i) != 0:\n factors.append(i)\n x = pp(x, i)\nprint('Getting remainders...')\nrems = []\nfor f in factors:\n u = 0\n while u == 0:\n while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):\n u = randint(1, ec.prime - 1)\n u = ec.scale(u, pp(twist_ord, f))\n while ec.scale(u, f) != 0:\n u = ec.scale(u, f)\n shared = ec.scale(u, aPriv)\n for i in range(f):\n if ec.scale(u, i) == shared:\n print('\\tSolved mod %d' % f)\n rems.append(i)\n break\nprint('Correcting parities...')\nfor i in range(len(factors)):\n if rems[i] != 0:\n break\nfixed = i\nfor i in range(len(factors)):\n if i == fixed:\n continue\n u = 0\n while u == 0:\n while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):\n u = randint(1, ec.prime - 1)\n u = ec.scale(u, pp(pp(twist_ord, factors[fixed]), factors[i]))\n if ec.scale(u, factors[fixed]) == 0:\n u = 0\n elif ec.scale(u, factors[i]) == 0:\n u = 0\n shared = ec.scale(u, aPriv)\n r, _ = crt([rems[fixed], rems[i]], [factors[fixed], factors[i]])\n if ec.scale(u, r) != shared:\n rems[i] = -rems[i] % factors[i]\n",
"step-4": "from matasano import *\nec = EC_M(233970423115425145524320034830162017933, 534, 1, 4, order=\n 233970423115425145498902418297807005944)\nassert ec.scale(4, ec.order) == 0\naPriv = randint(1, ec.order - 1)\naPub = ec.scale(4, aPriv)\nprint('Factoring...')\ntwist_ord = 2 * ec.prime + 2 - ec.order\nfactors = []\nx = twist_ord\nfor i in range(2, 2 ** 24):\n if x % i == 0:\n if x % (i * i) != 0:\n factors.append(i)\n x = pp(x, i)\nprint('Getting remainders...')\nrems = []\nfor f in factors:\n u = 0\n while u == 0:\n while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):\n u = randint(1, ec.prime - 1)\n u = ec.scale(u, pp(twist_ord, f))\n while ec.scale(u, f) != 0:\n u = ec.scale(u, f)\n shared = ec.scale(u, aPriv)\n for i in range(f):\n if ec.scale(u, i) == shared:\n print('\\tSolved mod %d' % f)\n rems.append(i)\n break\nprint('Correcting parities...')\nfor i in range(len(factors)):\n if rems[i] != 0:\n break\nfixed = i\nfor i in range(len(factors)):\n if i == fixed:\n continue\n u = 0\n while u == 0:\n while isQRes((u ** 3 + ec.A * u ** 2 + u) % ec.prime, ec.prime):\n u = randint(1, ec.prime - 1)\n u = ec.scale(u, pp(pp(twist_ord, factors[fixed]), factors[i]))\n if ec.scale(u, factors[fixed]) == 0:\n u = 0\n elif ec.scale(u, factors[i]) == 0:\n u = 0\n shared = ec.scale(u, aPriv)\n r, _ = crt([rems[fixed], rems[i]], [factors[fixed], factors[i]])\n if ec.scale(u, r) != shared:\n rems[i] = -rems[i] % factors[i]\n",
"step-5": "from matasano import *\r\n\r\nec = EC_M(233970423115425145524320034830162017933,534,1,4,order=233970423115425145498902418297807005944)\r\nassert(ec.scale(4,ec.order) == 0)\r\n\r\naPriv = randint(1,ec.order-1)\r\naPub = ec.scale(4,aPriv)\r\n\r\nprint(\"Factoring...\")\r\ntwist_ord = 2*ec.prime+2 - ec.order\r\nfactors = []\r\nx = twist_ord\r\nfor i in range(2,2**24):\r\n\tif x%i == 0:\r\n\t\tif x%(i*i) != 0:\r\n\t\t\tfactors.append(i)\r\n\t\tx = pp(x,i)\r\n\t\t\r\nprint(\"Getting remainders...\")\r\nrems = []\r\nfor f in factors:\r\n\tu = 0\r\n\twhile u == 0:\r\n\t\twhile isQRes((u**3+ec.A*u**2+u)%ec.prime,ec.prime):\r\n\t\t\tu = randint(1,ec.prime-1)\r\n\t\tu = ec.scale(u,pp(twist_ord,f))\r\n\twhile ec.scale(u,f) != 0:\r\n\t\tu = ec.scale(u,f)\r\n\tshared = ec.scale(u,aPriv)\t#Not generating the MAC this time\r\n\tfor i in range(f):\r\n\t\tif ec.scale(u,i) == shared:\r\n\t\t\tprint(\"\\tSolved mod %d\"%f)\r\n\t\t\trems.append(i)\r\n\t\t\tbreak\r\n\r\n#Now aPriv is +-rems[i] mod factors[i]\r\n#Do them 2 at a time to get down to 2 values mod Prod factors[i]\r\nprint(\"Correcting parities...\")\r\nfor i in range(len(factors)):\r\n\tif rems[i] != 0:\r\n\t\tbreak\r\nfixed = i\r\nfor i in range(len(factors)):\r\n\tif i == fixed:\r\n\t\tcontinue\r\n\tu = 0\r\n\twhile u == 0:\r\n\t\twhile isQRes((u**3+ec.A*u**2+u)%ec.prime,ec.prime):\r\n\t\t\tu = randint(1,ec.prime-1)\r\n\t\tu = ec.scale(u,pp(pp(twist_ord,factors[fixed]),factors[i]))\r\n\t\tif ec.scale(u,factors[fixed]) == 0:\r\n\t\t\tu = 0\r\n\t\telif ec.scale(u,factors[i]) == 0:\r\n\t\t\tu = 0\r\n\tshared = ec.scale(u,aPriv)\r\n\tr,_ = crt([rems[fixed],rems[i]],[factors[fixed],factors[i]])\r\n\tif ec.scale(u,r) != shared:\r\n\t\trems[i] = (-rems[i])%factors[i]\r\n\t\t\r\n#Now I need to run down the remaining bits\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from ImageCoord import ImageCoord
import os
import sys
from folium.features import DivIcon
# Chemin du dossier ou l'on recupere les images
racine = tkinter.Tk()
racine.title("listPhoto")
racine.directory = filedialog.askdirectory()
cheminDossier = racine.directory
dirImage = os.listdir(cheminDossier)
listImage = []
# Parcour du dossier d'images
for index in range(0,len(dirImage)) :
#parcours du dossier
img = ImageCoord(cheminDossier + '\\' + dirImage[index])
#Insertion des image avec coordonné
if img.has_coord() :
listImage.append(img)
# Tri des images
listImage.sort()
|
normal
|
{
"blob_id": "f5b8d8c291d18c6f320704a89985acbcae97ca2f",
"index": 2954,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nracine.title('listPhoto')\n<mask token>\nfor index in range(0, len(dirImage)):\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n if img.has_coord():\n listImage.append(img)\nlistImage.sort()\n",
"step-3": "<mask token>\nracine = tkinter.Tk()\nracine.title('listPhoto')\nracine.directory = filedialog.askdirectory()\ncheminDossier = racine.directory\ndirImage = os.listdir(cheminDossier)\nlistImage = []\nfor index in range(0, len(dirImage)):\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n if img.has_coord():\n listImage.append(img)\nlistImage.sort()\n",
"step-4": "from ImageCoord import ImageCoord\nimport os\nimport sys\nfrom folium.features import DivIcon\nracine = tkinter.Tk()\nracine.title('listPhoto')\nracine.directory = filedialog.askdirectory()\ncheminDossier = racine.directory\ndirImage = os.listdir(cheminDossier)\nlistImage = []\nfor index in range(0, len(dirImage)):\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n if img.has_coord():\n listImage.append(img)\nlistImage.sort()\n",
"step-5": "from ImageCoord import ImageCoord\nimport os\nimport sys\nfrom folium.features import DivIcon\n\n# Chemin du dossier ou l'on recupere les images\n\nracine = tkinter.Tk()\nracine.title(\"listPhoto\")\nracine.directory = filedialog.askdirectory()\ncheminDossier = racine.directory\ndirImage = os.listdir(cheminDossier)\n\nlistImage = []\n\n# Parcour du dossier d'images\nfor index in range(0,len(dirImage)) :\n #parcours du dossier\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n\n #Insertion des image avec coordonné\n if img.has_coord() :\n listImage.append(img)\n\n# Tri des images\nlistImage.sort()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import konlpy
import nltk
# POS tag a sentence
sentence = u'만 6세 이하의 초등학교 취학 전 자녀를 양육하기 위해서는'
words = konlpy.tag.Twitter().pos(sentence)
# Define a chunk grammar, or chunking rules, then chunk
grammar = """
NP: {<N.*>*<Suffix>?} # Noun phrase
VP: {<V.*>*} # Verb phrase
AP: {<A.*>*} # Adjective phrase
"""
parser = nltk.RegexpParser(grammar)
chunks = parser.parse(words)
print("# Print whole tree")
print(chunks.pprint())
print("\n# Print noun phrases only")
for subtree in chunks.subtrees():
if subtree.label()=='NP':
print(' '.join((e[0] for e in list(subtree))))
print(subtree.pprint())
# Display the chunk tree
chunks.draw()
|
normal
|
{
"blob_id": "6b647dc2775f54706a6c18ee91145ba60d70be21",
"index": 4453,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('# Print whole tree')\nprint(chunks.pprint())\nprint(\"\"\"\n# Print noun phrases only\"\"\")\nfor subtree in chunks.subtrees():\n if subtree.label() == 'NP':\n print(' '.join(e[0] for e in list(subtree)))\n print(subtree.pprint())\nchunks.draw()\n",
"step-3": "<mask token>\nsentence = u'만 6세 이하의 초등학교 취학 전 자녀를 양육하기 위해서는'\nwords = konlpy.tag.Twitter().pos(sentence)\ngrammar = \"\"\"\nNP: {<N.*>*<Suffix>?} # Noun phrase\nVP: {<V.*>*} # Verb phrase\nAP: {<A.*>*} # Adjective phrase\n\"\"\"\nparser = nltk.RegexpParser(grammar)\nchunks = parser.parse(words)\nprint('# Print whole tree')\nprint(chunks.pprint())\nprint(\"\"\"\n# Print noun phrases only\"\"\")\nfor subtree in chunks.subtrees():\n if subtree.label() == 'NP':\n print(' '.join(e[0] for e in list(subtree)))\n print(subtree.pprint())\nchunks.draw()\n",
"step-4": "import konlpy\nimport nltk\nsentence = u'만 6세 이하의 초등학교 취학 전 자녀를 양육하기 위해서는'\nwords = konlpy.tag.Twitter().pos(sentence)\ngrammar = \"\"\"\nNP: {<N.*>*<Suffix>?} # Noun phrase\nVP: {<V.*>*} # Verb phrase\nAP: {<A.*>*} # Adjective phrase\n\"\"\"\nparser = nltk.RegexpParser(grammar)\nchunks = parser.parse(words)\nprint('# Print whole tree')\nprint(chunks.pprint())\nprint(\"\"\"\n# Print noun phrases only\"\"\")\nfor subtree in chunks.subtrees():\n if subtree.label() == 'NP':\n print(' '.join(e[0] for e in list(subtree)))\n print(subtree.pprint())\nchunks.draw()\n",
"step-5": "import konlpy\nimport nltk\n\n# POS tag a sentence\nsentence = u'만 6세 이하의 초등학교 취학 전 자녀를 양육하기 위해서는'\nwords = konlpy.tag.Twitter().pos(sentence)\n\n# Define a chunk grammar, or chunking rules, then chunk\ngrammar = \"\"\"\nNP: {<N.*>*<Suffix>?} # Noun phrase\nVP: {<V.*>*} # Verb phrase\nAP: {<A.*>*} # Adjective phrase\n\"\"\"\nparser = nltk.RegexpParser(grammar)\nchunks = parser.parse(words)\nprint(\"# Print whole tree\")\nprint(chunks.pprint())\n\nprint(\"\\n# Print noun phrases only\")\nfor subtree in chunks.subtrees():\n if subtree.label()=='NP':\n print(' '.join((e[0] for e in list(subtree))))\n print(subtree.pprint())\n\n# Display the chunk tree\nchunks.draw()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from flask import Flask
from flask import render_template
import datetime
from person import Person
import requests
from post import Post
app = Flask(__name__)
all_posts = all_posts = requests.get(
"https://api.npoint.io/5abcca6f4e39b4955965").json()
post_objects = []
for post in all_posts:
post_obj = Post(post["id"], post["title"], post["subtitle"], post["body"])
post_objects.append(post_obj)
@app.route('/')
def home_page():
year = datetime.datetime.today().year
return render_template("index.html",
current_year=year)
@app.route('/guess/<name>')
def guesser(name):
person = Person(name=name)
return render_template("guess.html",
name=person.name,
gender=person.gender,
age=person.age,
country=person.country,
)
@app.route('/blog')
def blog():
return render_template("blog.html", posts=post_objects)
@app.route('/post/<int:id>')
def blog_post(id):
requested_post = None
for post in post_objects:
if post.id == id:
requested_post = post
return render_template("post.html", post=requested_post)
if __name__ == "__main__":
app.run(debug=True)
|
normal
|
{
"blob_id": "895ece0b8d45cd64e43f8ddc54824f7647254185",
"index": 2547,
"step-1": "<mask token>\n\n\[email protected]('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\n<mask token>\n\n\[email protected]('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\n<mask token>\n",
"step-2": "<mask token>\nfor post in all_posts:\n post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])\n post_objects.append(post_obj)\n\n\[email protected]('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template('index.html', current_year=year)\n\n\[email protected]('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\[email protected]('/blog')\ndef blog():\n return render_template('blog.html', posts=post_objects)\n\n\[email protected]('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\nall_posts = all_posts = requests.get(\n 'https://api.npoint.io/5abcca6f4e39b4955965').json()\npost_objects = []\nfor post in all_posts:\n post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])\n post_objects.append(post_obj)\n\n\[email protected]('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template('index.html', current_year=year)\n\n\[email protected]('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\[email protected]('/blog')\ndef blog():\n return render_template('blog.html', posts=post_objects)\n\n\[email protected]('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask\nfrom flask import render_template\nimport datetime\nfrom person import Person\nimport requests\nfrom post import Post\napp = Flask(__name__)\nall_posts = all_posts = requests.get(\n 'https://api.npoint.io/5abcca6f4e39b4955965').json()\npost_objects = []\nfor post in all_posts:\n post_obj = Post(post['id'], post['title'], post['subtitle'], post['body'])\n post_objects.append(post_obj)\n\n\[email protected]('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template('index.html', current_year=year)\n\n\[email protected]('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template('guess.html', name=person.name, gender=person.\n gender, age=person.age, country=person.country)\n\n\[email protected]('/blog')\ndef blog():\n return render_template('blog.html', posts=post_objects)\n\n\[email protected]('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template('post.html', post=requested_post)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask\nfrom flask import render_template\nimport datetime\nfrom person import Person\nimport requests\nfrom post import Post\n\napp = Flask(__name__)\nall_posts = all_posts = requests.get(\n \"https://api.npoint.io/5abcca6f4e39b4955965\").json()\npost_objects = []\n\nfor post in all_posts:\n post_obj = Post(post[\"id\"], post[\"title\"], post[\"subtitle\"], post[\"body\"])\n post_objects.append(post_obj)\n\n\[email protected]('/')\ndef home_page():\n year = datetime.datetime.today().year\n return render_template(\"index.html\",\n current_year=year)\n\n\[email protected]('/guess/<name>')\ndef guesser(name):\n person = Person(name=name)\n return render_template(\"guess.html\",\n name=person.name,\n gender=person.gender,\n age=person.age,\n country=person.country,\n )\n\n\[email protected]('/blog')\ndef blog():\n return render_template(\"blog.html\", posts=post_objects)\n\n\[email protected]('/post/<int:id>')\ndef blog_post(id):\n requested_post = None\n for post in post_objects:\n if post.id == id:\n requested_post = post\n return render_template(\"post.html\", post=requested_post)\n\n\nif __name__ == \"__main__\":\n app.run(debug=True)\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
# Generated by Django 2.1.2 on 2018-10-19 22:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='mascota',
name='descripcion',
field=models.CharField(max_length=200),
),
]
|
normal
|
{
"blob_id": "fcfec60a2302ee0c1385add053d4371040a2aff4",
"index": 3667,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('core', '0001_initial')]\n operations = [migrations.AlterField(model_name='mascota', name=\n 'descripcion', field=models.CharField(max_length=200))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('core', '0001_initial')]\n operations = [migrations.AlterField(model_name='mascota', name=\n 'descripcion', field=models.CharField(max_length=200))]\n",
"step-5": "# Generated by Django 2.1.2 on 2018-10-19 22:13\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='mascota',\n name='descripcion',\n field=models.CharField(max_length=200),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Main {
public static void findSubNode(Node root) {
}
public static void main(String args[]) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
String[] strings = br.readLine().split(" ");
int n = Integer.parseInt(strings[0]);
int root = Integer.parseInt(strings[1]);
Node head1 = new Node(root);
int[][] arr1 = new int[100 + 1][2];
for (int i = 0; i < n; i++) {
strings = br.readLine().split(" ");
arr1[Integer.parseInt(strings[0])][0] = Integer.parseInt(strings[1]);
arr1[Integer.parseInt(strings[0])][1] = Integer.parseInt(strings[2]);
}
int t = Integer.parseInt(br.readLine());
if (arr1[t][0] == 0 && arr1[t][1] == 0){
System.out.println(0);
} else if(arr1[t][0] != 0){
System.out.println(arr1[t][0] );
}else {
System.out.println(arr1[t][1] );
}
// createTree(head1, arr1);
}
public static void createTree(Node head, int[][] arr) {
if (head == null) {
return;
}
if (arr[head.value][0] != 0) {
head.left = new Node(arr[head.value][0]);
createTree(head.left, arr);
}
if (arr[head.value][1] != 0) {
head.right = new Node(arr[head.value][1]);
createTree(head.right, arr);
}
}
}
|
normal
|
{
"blob_id": "6d0a945c9eaf6564a327928880df1f0aeed2e5d0",
"index": 9649,
"step-1": "import java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\n\npublic class Main {\n\n public static void findSubNode(Node root) {\n\n }\n\n public static void main(String args[]) throws IOException {\n BufferedReader br = new BufferedReader(new InputStreamReader(System.in));\n String[] strings = br.readLine().split(\" \");\n int n = Integer.parseInt(strings[0]);\n int root = Integer.parseInt(strings[1]);\n Node head1 = new Node(root);\n int[][] arr1 = new int[100 + 1][2];\n for (int i = 0; i < n; i++) {\n strings = br.readLine().split(\" \");\n arr1[Integer.parseInt(strings[0])][0] = Integer.parseInt(strings[1]);\n arr1[Integer.parseInt(strings[0])][1] = Integer.parseInt(strings[2]);\n }\n int t = Integer.parseInt(br.readLine());\n if (arr1[t][0] == 0 && arr1[t][1] == 0){\n System.out.println(0);\n } else if(arr1[t][0] != 0){\n System.out.println(arr1[t][0] );\n }else {\n System.out.println(arr1[t][1] );\n }\n// createTree(head1, arr1);\n }\n\n public static void createTree(Node head, int[][] arr) {\n if (head == null) {\n return;\n }\n if (arr[head.value][0] != 0) {\n head.left = new Node(arr[head.value][0]);\n createTree(head.left, arr);\n }\n if (arr[head.value][1] != 0) {\n head.right = new Node(arr[head.value][1]);\n createTree(head.right, arr);\n }\n }\n}\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# name: Ali
# date: 7/12/2016
# description: uses openweathermap.org's api to get weather data about
# the city that is inputted
# unbreakable? = idk
import json
import urllib2
from collections import OrderedDict
from pprint import pprint
api_key = "&APPID=507e30d896f751513350c41899382d89"
city_name_url = "http://api.openweathermap.org/data/2.5/weather?q="
units = "&units=metric"
general_info = {
"Humidity (%)": 0,
"Pressure": 0,
"Temperature(C)": 0,
"Max. Temp.(C)": 0,
"Min. Temp.(C)": 0
}
def connectapi():
global parsed
global data
urlrequest = city_name_url + city_input + units + api_key
response = urllib2.urlopen(urlrequest)
content = response.read()
data = json.loads(content, object_pairs_hook=OrderedDict)
parsed = json.dumps(data, indent=4, sort_keys=True)
print parsed
def find_data():
global country_name
global city_name
global general_info
global weather_description
global formatted_general_info
city_name = str(data['name'])
country_name = str(data['sys']['country'])
#weather_description = data['weather']['description']
for key, value in data['main'].iteritems():
if key == "humidity":
general_info['Humidity (%)'] = value
elif key == "pressure":
general_info['Pressure'] = value
elif key == "temp":
general_info['Temperature(C)'] = value
elif key == "temp_max":
general_info['Max. Temp.(C)'] = value
elif key == "temp_min":
general_info['Min. Temp.(C)'] = value
else:
continue
print "Weather Lookup\n\nEnter the name of the city that you want\nto look at the weather details of.\n"
while True:
try:
city_input = str(raw_input("What city would you like to look at?"))
except ValueError:
print"Please enter a city name."
connectapi()
if "name" in data:
find_data()
print "\n%r in %r:\n"% (city_name, country_name)
print """General info:"""
pprint(general_info)
print "\nWeather Description:\n\tidk why it doesn't let me take this data so annoying\n"
else:
print "Something went wrong, would you like to try again?"
continue
|
normal
|
{
"blob_id": "94540561ba29d2fc1766dac7b199e0cbbbeecdfc",
"index": 8046,
"step-1": "# name: Ali\n# date: 7/12/2016\n# description: uses openweathermap.org's api to get weather data about\n# the city that is inputted\n\n# unbreakable? = idk\nimport json\nimport urllib2\nfrom collections import OrderedDict\nfrom pprint import pprint\napi_key = \"&APPID=507e30d896f751513350c41899382d89\"\ncity_name_url = \"http://api.openweathermap.org/data/2.5/weather?q=\"\nunits = \"&units=metric\"\n\ngeneral_info = {\n \"Humidity (%)\": 0,\n \"Pressure\": 0,\n \"Temperature(C)\": 0,\n \"Max. Temp.(C)\": 0,\n \"Min. Temp.(C)\": 0\n }\n\ndef connectapi():\n global parsed\n global data\n urlrequest = city_name_url + city_input + units + api_key\n response = urllib2.urlopen(urlrequest)\n content = response.read()\n\n data = json.loads(content, object_pairs_hook=OrderedDict)\n parsed = json.dumps(data, indent=4, sort_keys=True)\n print parsed\n\n\ndef find_data():\n global country_name\n global city_name\n global general_info\n global weather_description\n global formatted_general_info\n city_name = str(data['name'])\n country_name = str(data['sys']['country'])\n #weather_description = data['weather']['description']\n for key, value in data['main'].iteritems():\n if key == \"humidity\":\n general_info['Humidity (%)'] = value\n elif key == \"pressure\":\n general_info['Pressure'] = value\n elif key == \"temp\":\n general_info['Temperature(C)'] = value\n elif key == \"temp_max\":\n general_info['Max. Temp.(C)'] = value\n elif key == \"temp_min\":\n general_info['Min. Temp.(C)'] = value\n else:\n continue\n\n\n\n\nprint \"Weather Lookup\\n\\nEnter the name of the city that you want\\nto look at the weather details of.\\n\"\nwhile True:\n\n try:\n city_input = str(raw_input(\"What city would you like to look at?\"))\n except ValueError:\n print\"Please enter a city name.\"\n\n connectapi()\n if \"name\" in data:\n find_data()\n print \"\\n%r in %r:\\n\"% (city_name, country_name)\n print \"\"\"General info:\"\"\"\n pprint(general_info)\n print \"\\nWeather Description:\\n\\tidk why it doesn't let me take this data so annoying\\n\"\n else:\n print \"Something went wrong, would you like to try again?\"\n continue\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from Config_paar import *
from Envelopefkt import *
from Kinematik import *
def A_m_n(M,N,x_plus,p_el,p_pos,k_photon,k_laser):
def f1(p):
return -(m*a0)/(pk(p)) * g(phi,sigma,Envelope) *( pe(1,p) * cos(ksi) * cos(phi) + pe(2,p) * sin(ksi) * sin(phi) )
def f2(p):
return -(m*a0)**2/(2.*pk(p))*g(phi,sigma,Envelope)**2*((cos(ksi)*cos(phi))**2+(sin(ksi)*sin(phi))**2)
def f(p):
return f1(p)+f2(p)
def f1_SVEA(p):
return -(m*a0)/(pk(p))*g(phi,sigma,Envelope)*(pe(1,p)*cos(ksi)*sin(phi)-pe(2,p)*sin(ksi)*cos(phi))
def f2_SVEA(p):
return -(m*a0)**2/(4.*pk(p))*(Int_g_2(phi,sigma,Envelope)+g(phi,sigma,Envelope)**2*cos(phi)*sin(phi)*(cos(ksi)**2-sin(ksi)**2))
def f_SVEA(p):
return f1_SVEA(p)+f2_SVEA(p)
pk = lambda imp: (imp * k_laser)
pe = lambda l,imp: (imp * eps_laser(l))
P_ = p_pos.minus() + p_el.minus() - k_photon.minus()
s = P_/k_laser.minus()
phi = w_laser * x_plus
H_plus = s*phi - f_SVEA(p_el) + f_SVEA(-p_pos)
if M == 0:
A = -1./s * (f(-p_pos) - f(p_el)) * exp(1j * H_plus)
else:
A = g(phi,sigma,Envelope)**M *exp( 1j* ( H_plus + N*phi))
return A
def A_m_n_nSVEA(M,N,x_plus,p_el,p_pos,k_photon,k_laser):
def f1(p):
if Envelope == 'cos^2':
fakt_a = sigma/(sigma-pi)
fakt_b = sigma/(sigma+pi)
Int_sin = -0.25 *( fakt_a * cos( phi/fakt_a ) + fakt_b * cos( phi/fakt_b ) +2.*cos(phi) )
Int_cos = 0.25 *( fakt_a * sin( phi/fakt_a ) + fakt_b * sin( phi/fakt_b ) +2.*sin(phi) )
return -(m*a0)/(pk(p)) *( pe(1,p) * cos(ksi) * Int_cos + pe(2,p) * sin(ksi) * Int_sin )
elif Envelope == 'cos^4':
fakt_a = lambda n: ( 1. + n*pi/sigma )
fakt_b = lambda n: ( -1. + n*pi/sigma )
Int_sin = 0.25 *( ( - cos( fakt_a(2.)*phi ) / fakt_a(2.) + cos( fakt_b(2.)*phi ) / fakt_b(2.) ) * 0.25 \
- cos( fakt_a(1.)*phi ) / fakt_a(1.) + cos( fakt_b(1.)*phi ) / fakt_b(1.) - 3./2. * cos(phi) )
Int_cos = 0.25 *( ( sin( fakt_a(2.)*phi ) / fakt_a(2.) + sin( fakt_b(2.)*phi ) / fakt_b(2.) ) * 0.25 \
+ sin( fakt_a(1.)*phi ) / fakt_a(1.) + sin( fakt_b(1.)*phi ) / fakt_b(1.) - 3./2. * sin(phi) )
return -(m*a0)/(pk(p)) * ( pe(1,p) * cos(ksi) * Int_cos + pe(2,p) * sin(ksi) * Int_sin )
elif Envelope == 'cosh':
raise IOError,'cosh noch nicht implementiert -> benutze SEVA'
else:
raise IOError,'Nicht analytisch loesbar -> benutze SEVA'
def f2(p):
if Envelope == 'cos^2':
a = pi/sigma/2.
F = lambda l,n: ( l + n*a )
Int_cos = 1./8. *( 1.5*phi + 0.75*sin(2.*phi) + sin(F(0,4.)*phi)/F(0,8.) + sin(F(0,2.)*phi)/a \
+ sin(F(-2.,4.)*phi)/F(-2.,4.)/4. + sin(F(2.,4.)*phi)/F(2.,4.)/4. \
+ sin(F(-2.,2.)*phi)/F(-2.,2.) + sin(F(2.,2.)*phi)/F(2.,2.) )
Int_sin = 1./8. *( 1.5*phi - 0.75*sin(2.*phi) + sin(F(0,4.)*phi)/F(0,8.) + sin(F(0,2.)*phi)/a \
- sin(F(-2.,4.)*phi)/F(-2.,4.)/4. - sin(F(2.,4.)*phi)/F(2.,4.)/4. \
- sin(F(-2.,2.)*phi)/F(-2.,2.) - sin(F(2.,2.)*phi)/F(2.,2.) )
return -( m*a0 )**2 / (2.*pk(p)) * ( cos(ksi)**2 * Int_cos + sin(ksi)**2 * Int_sin )
elif Envelope == 'cos^4':
Faktor = lambda l,n: ( l + n*pi/sigma )
Int_sin = 1./64. *( (- sin( Faktor(-2.,4.)*phi ) / Faktor(-2.,4.) - sin( Faktor(2.,4.)*phi ) / Faktor(2.,4.) ) / 8. \
- sin( Faktor(-2.,3.)*phi ) / Faktor(-2.,3.) - sin( Faktor(2.,3.)*phi ) / Faktor(2.,3.) \
-( sin( Faktor(-2.,2.)*phi ) / Faktor(-2.,2.) + sin( Faktor(2.,2.)*phi ) / Faktor(2.,2.) ) * 3.5 \
-( sin( Faktor(-2.,1.)*phi ) / Faktor(-2.,1.) + sin( Faktor(2.,1.)*phi ) / Faktor(2.,1.) ) * 7. \
+ sin( Faktor( 0.,4.)*phi ) / Faktor(0.,16.) + sin( Faktor(0.,3.)*phi ) / Faktor(0.,1.5) \
+( sin( Faktor( 0.,2.)*phi ) / Faktor(0.,2.) + sin( Faktor(0.,1.)*phi ) / Faktor(0.,0.5)) * 7. \
+ 35./4. * phi - 35./8. * sin( 2*phi ) )
Int_cos = 1./64. *( ( sin( Faktor(-2.,4.)*phi ) / Faktor(-2.,4.) + sin( Faktor(2.,4.)*phi ) / Faktor(2.,4.) ) / 8. \
+ sin( Faktor(-2.,3.)*phi ) / Faktor(-2.,3.) + sin( Faktor(2.,3.)*phi ) / Faktor(2.,3.) \
+( sin( Faktor(-2.,2.)*phi ) / Faktor(-2.,2.) + sin( Faktor(2.,2.)*phi ) / Faktor(2.,2.) ) * 3.5 \
+( sin( Faktor(-2.,1.)*phi ) / Faktor(-2.,1.) + sin( Faktor(2.,1.)*phi ) / Faktor(2.,1.) ) * 7. \
+ sin( Faktor( 0.,4.)*phi ) / Faktor(0.,16.) + sin( Faktor(0.,3.)*phi ) / Faktor(0.,1.5) \
+( sin( Faktor( 0.,2.)*phi ) / Faktor(0.,2.) + sin( Faktor(0.,1.)*phi ) / Faktor(0.,0.5)) * 7. \
+ 35./4. * phi - 35./8. * sin( 2*phi ) )
return -( m*a0 )**2 / (2.*pk(p)) * ( cos(ksi)**2 * Int_cos + sin(ksi)**2 * Int_sin )
elif Envelope == 'cosh':
raise IOError,'cosh noch nicht implementiert -> benutze SEVA'
else:
raise IOError,'Nicht analytisch loesbar -> benutze SEVA'
def f(p):
return f1(p)+f2(p)
pk = lambda imp: (imp * k_laser)
pe = lambda l,imp: (imp * eps_laser(l))
P_ = p_pos.minus() + p_el.minus() - k_photon.minus()
s = P_/k_laser.minus()
phi = w_laser * x_plus
H_plus = s*phi - f(p_el) + f(-p_pos)
A = g(phi,sigma,Envelope)**M *exp( 1j* ( H_plus + N*phi))
return A
def A_0_0 (A11,A1_1,A20,A22,A2_2):
p_pos,p_el,k_laser,k_photon,q_pos,eps_m,eps_p = kinematik()
pk = lambda p: (p * k_laser)
d_p = lambda p: m*a0 / ( 4.* pk(p) )
P_ = p_pos.minus() + p_el.minus() - k_photon.minus()
s = P_/k_laser.minus()
Wert = 2./s * ( ( d_p(p_pos)*p_pos*eps_m - d_p(p_el)*p_el*eps_m ) * A11 \
+ ( d_p(p_pos)*p_pos*eps_p - d_p(p_el)*p_el*eps_p ) * A1_1 \
- k_laser*k_photon*d_p(p_pos)*d_p(p_el) \
* ( 2.*A20 + (cos(ksi)**2 - sin(ksi)**2) * (A22 + A2_2) ) )
return Wert
|
normal
|
{
"blob_id": "ad170f67e5b9f54d950ead91dd60cd4f3b753eca",
"index": 6660,
"step-1": "from Config_paar import *\nfrom Envelopefkt import *\nfrom Kinematik import *\n\n\ndef A_m_n(M,N,x_plus,p_el,p_pos,k_photon,k_laser):\n\n def f1(p):\n return -(m*a0)/(pk(p)) * g(phi,sigma,Envelope) *( pe(1,p) * cos(ksi) * cos(phi) + pe(2,p) * sin(ksi) * sin(phi) )\n \n def f2(p):\n return -(m*a0)**2/(2.*pk(p))*g(phi,sigma,Envelope)**2*((cos(ksi)*cos(phi))**2+(sin(ksi)*sin(phi))**2) \n \n def f(p):\n return f1(p)+f2(p)\n \n def f1_SVEA(p):\n return -(m*a0)/(pk(p))*g(phi,sigma,Envelope)*(pe(1,p)*cos(ksi)*sin(phi)-pe(2,p)*sin(ksi)*cos(phi))\n\n def f2_SVEA(p):\n return -(m*a0)**2/(4.*pk(p))*(Int_g_2(phi,sigma,Envelope)+g(phi,sigma,Envelope)**2*cos(phi)*sin(phi)*(cos(ksi)**2-sin(ksi)**2))\n\n def f_SVEA(p):\n return f1_SVEA(p)+f2_SVEA(p)\n\n pk = lambda imp: (imp * k_laser)\n pe = lambda l,imp: (imp * eps_laser(l))\n \n P_ = p_pos.minus() + p_el.minus() - k_photon.minus() \n s = P_/k_laser.minus()\n\n phi = w_laser * x_plus\n \n H_plus = s*phi - f_SVEA(p_el) + f_SVEA(-p_pos)\n\n if M == 0: \n A = -1./s * (f(-p_pos) - f(p_el)) * exp(1j * H_plus)\n \n else:\n A = g(phi,sigma,Envelope)**M *exp( 1j* ( H_plus + N*phi))\n \n return A \n\n\ndef A_m_n_nSVEA(M,N,x_plus,p_el,p_pos,k_photon,k_laser):\n \n def f1(p):\n \n if Envelope == 'cos^2':\n \n fakt_a = sigma/(sigma-pi)\n fakt_b = sigma/(sigma+pi)\n \n Int_sin = -0.25 *( fakt_a * cos( phi/fakt_a ) + fakt_b * cos( phi/fakt_b ) +2.*cos(phi) )\n Int_cos = 0.25 *( fakt_a * sin( phi/fakt_a ) + fakt_b * sin( phi/fakt_b ) +2.*sin(phi) )\n \n return -(m*a0)/(pk(p)) *( pe(1,p) * cos(ksi) * Int_cos + pe(2,p) * sin(ksi) * Int_sin )\n \n \n elif Envelope == 'cos^4':\n \n fakt_a = lambda n: ( 1. + n*pi/sigma )\n fakt_b = lambda n: ( -1. + n*pi/sigma )\n \n Int_sin = 0.25 *( ( - cos( fakt_a(2.)*phi ) / fakt_a(2.) + cos( fakt_b(2.)*phi ) / fakt_b(2.) ) * 0.25 \\\n - cos( fakt_a(1.)*phi ) / fakt_a(1.) + cos( fakt_b(1.)*phi ) / fakt_b(1.) - 3./2. * cos(phi) )\n \n Int_cos = 0.25 *( ( sin( fakt_a(2.)*phi ) / fakt_a(2.) + sin( fakt_b(2.)*phi ) / fakt_b(2.) ) * 0.25 \\\n + sin( fakt_a(1.)*phi ) / fakt_a(1.) + sin( fakt_b(1.)*phi ) / fakt_b(1.) - 3./2. * sin(phi) )\n \n return -(m*a0)/(pk(p)) * ( pe(1,p) * cos(ksi) * Int_cos + pe(2,p) * sin(ksi) * Int_sin )\n \n \n elif Envelope == 'cosh':\n raise IOError,'cosh noch nicht implementiert -> benutze SEVA'\n \n else:\n raise IOError,'Nicht analytisch loesbar -> benutze SEVA'\n \n \n \n def f2(p):\n if Envelope == 'cos^2':\n \n a = pi/sigma/2.\n F = lambda l,n: ( l + n*a )\n \n \n Int_cos = 1./8. *( 1.5*phi + 0.75*sin(2.*phi) + sin(F(0,4.)*phi)/F(0,8.) + sin(F(0,2.)*phi)/a \\\n + sin(F(-2.,4.)*phi)/F(-2.,4.)/4. + sin(F(2.,4.)*phi)/F(2.,4.)/4. \\\n + sin(F(-2.,2.)*phi)/F(-2.,2.) + sin(F(2.,2.)*phi)/F(2.,2.) )\n \n Int_sin = 1./8. *( 1.5*phi - 0.75*sin(2.*phi) + sin(F(0,4.)*phi)/F(0,8.) + sin(F(0,2.)*phi)/a \\\n - sin(F(-2.,4.)*phi)/F(-2.,4.)/4. - sin(F(2.,4.)*phi)/F(2.,4.)/4. \\\n - sin(F(-2.,2.)*phi)/F(-2.,2.) - sin(F(2.,2.)*phi)/F(2.,2.) )\n \n return -( m*a0 )**2 / (2.*pk(p)) * ( cos(ksi)**2 * Int_cos + sin(ksi)**2 * Int_sin ) \n \n elif Envelope == 'cos^4':\n \n \n Faktor = lambda l,n: ( l + n*pi/sigma )\n \n Int_sin = 1./64. *( (- sin( Faktor(-2.,4.)*phi ) / Faktor(-2.,4.) - sin( Faktor(2.,4.)*phi ) / Faktor(2.,4.) ) / 8. \\\n - sin( Faktor(-2.,3.)*phi ) / Faktor(-2.,3.) - sin( Faktor(2.,3.)*phi ) / Faktor(2.,3.) \\\n -( sin( Faktor(-2.,2.)*phi ) / Faktor(-2.,2.) + sin( Faktor(2.,2.)*phi ) / Faktor(2.,2.) ) * 3.5 \\\n -( sin( Faktor(-2.,1.)*phi ) / Faktor(-2.,1.) + sin( Faktor(2.,1.)*phi ) / Faktor(2.,1.) ) * 7. \\\n + sin( Faktor( 0.,4.)*phi ) / Faktor(0.,16.) + sin( Faktor(0.,3.)*phi ) / Faktor(0.,1.5) \\\n +( sin( Faktor( 0.,2.)*phi ) / Faktor(0.,2.) + sin( Faktor(0.,1.)*phi ) / Faktor(0.,0.5)) * 7. \\\n + 35./4. * phi - 35./8. * sin( 2*phi ) ) \n \n Int_cos = 1./64. *( ( sin( Faktor(-2.,4.)*phi ) / Faktor(-2.,4.) + sin( Faktor(2.,4.)*phi ) / Faktor(2.,4.) ) / 8. \\\n + sin( Faktor(-2.,3.)*phi ) / Faktor(-2.,3.) + sin( Faktor(2.,3.)*phi ) / Faktor(2.,3.) \\\n +( sin( Faktor(-2.,2.)*phi ) / Faktor(-2.,2.) + sin( Faktor(2.,2.)*phi ) / Faktor(2.,2.) ) * 3.5 \\\n +( sin( Faktor(-2.,1.)*phi ) / Faktor(-2.,1.) + sin( Faktor(2.,1.)*phi ) / Faktor(2.,1.) ) * 7. \\\n + sin( Faktor( 0.,4.)*phi ) / Faktor(0.,16.) + sin( Faktor(0.,3.)*phi ) / Faktor(0.,1.5) \\\n +( sin( Faktor( 0.,2.)*phi ) / Faktor(0.,2.) + sin( Faktor(0.,1.)*phi ) / Faktor(0.,0.5)) * 7. \\\n + 35./4. * phi - 35./8. * sin( 2*phi ) )\n \n return -( m*a0 )**2 / (2.*pk(p)) * ( cos(ksi)**2 * Int_cos + sin(ksi)**2 * Int_sin ) \n \n \n elif Envelope == 'cosh':\n raise IOError,'cosh noch nicht implementiert -> benutze SEVA'\n \n else:\n raise IOError,'Nicht analytisch loesbar -> benutze SEVA'\n \n def f(p):\n return f1(p)+f2(p)\n \n\n pk = lambda imp: (imp * k_laser)\n pe = lambda l,imp: (imp * eps_laser(l))\n \n P_ = p_pos.minus() + p_el.minus() - k_photon.minus() \n s = P_/k_laser.minus()\n \n \n \n phi = w_laser * x_plus\n \n H_plus = s*phi - f(p_el) + f(-p_pos)\n\n A = g(phi,sigma,Envelope)**M *exp( 1j* ( H_plus + N*phi))\n \n return A \n \n \ndef A_0_0 (A11,A1_1,A20,A22,A2_2):\n \n\n \n p_pos,p_el,k_laser,k_photon,q_pos,eps_m,eps_p = kinematik()\n \n pk = lambda p: (p * k_laser)\n d_p = lambda p: m*a0 / ( 4.* pk(p) ) \n \n P_ = p_pos.minus() + p_el.minus() - k_photon.minus() \n s = P_/k_laser.minus()\n \n Wert = 2./s * ( ( d_p(p_pos)*p_pos*eps_m - d_p(p_el)*p_el*eps_m ) * A11 \\\n + ( d_p(p_pos)*p_pos*eps_p - d_p(p_el)*p_el*eps_p ) * A1_1 \\\n - k_laser*k_photon*d_p(p_pos)*d_p(p_el) \\\n * ( 2.*A20 + (cos(ksi)**2 - sin(ksi)**2) * (A22 + A2_2) ) )\n \n return Wert\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from external.odds.betclic.api import get_odds
# FDJ parsing is broken - their UI has been refactored with JS framework &
# protected async JSON API usage (requires HEADERS) and more complex to isolate & group match odds
# hence move to another betting website - which is still full html rendered
|
normal
|
{
"blob_id": "8b583ee55df409020a605b467479236e610a2efe",
"index": 3646,
"step-1": "<mask token>\n",
"step-2": "from external.odds.betclic.api import get_odds\n",
"step-3": "from external.odds.betclic.api import get_odds\n\n# FDJ parsing is broken - their UI has been refactored with JS framework &\n# protected async JSON API usage (requires HEADERS) and more complex to isolate & group match odds\n# hence move to another betting website - which is still full html rendered\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from xgboost import XGBRegressor
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import pandas as pd
import numpy as np
from ghg import GHGPredictor
predictor = GHGPredictor()
dataset_df = pd.read_csv("db-wheat.csv", index_col=0)
# print(dataset_df.iloc[1])
dataset_df_2 = dataset_df.drop(columns=['Area', 'Year', 'Crop', 'Previous crop'])
# print(dataset_df_2)
dataset = dataset_df_2.to_numpy()
# print(dataset)
X, Y = dataset[:, :-1], dataset[:, -1:]
# print(X)
# print(Y)
seed = 10
test_size = 0.2
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=test_size, random_state=seed)
# print(len(X_train))
# print(len(X_test))
# print(len(Y_train))
# print(len(Y_test))
model = XGBRegressor()
model.fit(X_train, Y_train)
# print(model)
print(dataset_df_2.columns)
print(model.feature_importances_)
# print(X_test.shape)
y_pred = model.predict(X_test)
# predictions = [round(value) for value in y_pred]
Y_test = map(lambda x: x[0], Y_test)
# print(Y_test)
res = zip(y_pred, Y_test)
# print(list(res))
ghg_predictor = GHGPredictor()
def predict(model, row):
preds = []
# print(row)
# print(row.).shape)
for perc in range(-10, 11):
new_row = row.copy()
row_copy = row.copy()
# new_row = new_row.iloc[0]
new_row = new_row.drop(labels=['Area', 'Year', 'Crop', 'Previous crop', 'Yield'])
# print(new_row.labels)
# new_row = new_row.tolist()
# print(new_row)
# print(type(new_row))
nitrogen = new_row['N'] * ((100 + perc) / 100)
new_row['N'] = nitrogen
row_copy['N'] = nitrogen
new_row = np.array([new_row])
# print(new_row)
pred = model.predict(new_row)
row_df = pd.DataFrame([row_copy])
fuel_ghg = predictor.fuel_ghg_emissions(row_df["Area"], unit="kg")
fuel_ghg = fuel_ghg.values[0]
ms_ghg = predictor.managed_soils_ghg(row_df['N'], row_df['Manure'], row_df['Area'], row_df['Crop'], row_df['Yield'])
ms_ghg = ms_ghg.values[0]
sum_ghg = fuel_ghg + ms_ghg
area = row_df['Area'].iloc[0]
# print(area)
# print(sum_ghg)
# print(row_df['N'])
# print(sum_ghg)
# GHG
# fuel = ghg_predictor.fuel_ghg_emissions()
preds.append([nitrogen, pred[0], sum_ghg])
print('{:4}% | Yield: {:.2f} | Area {} | C02_ha {:.5f} | C02 {:.5f}'.format(100 + perc, pred[0], area, sum_ghg / area, sum_ghg))
return preds
# accuracy = accuracy_score(Y_test, predictions)
# print("Accuracy: %.2f%%" % (accuracy * 100.0))
import random
rand_ind = random.randrange(0, len(dataset))
rand_row = dataset_df.iloc[rand_ind]
while rand_row['N'] == 0:
rand_ind = random.randrange(0, len(dataset))
rand_row = dataset_df.iloc[rand_ind]
# rand_row = rand_row[:-1]
preds = predict(model, rand_row)
import matplotlib.pyplot as plt
fig, ax1 = plt.subplots()
n_amount = [x[0] for x in preds]
yield_p = [x[1] for x in preds]
ghg_p = [x[2] for x in preds]
color = 'tab:red'
ax1.set_xlabel('N')
ax1.set_ylabel('Yield (t)', color=color)
ax1.set_title(f'GHG and yield predictions (Area: {rand_row["Area"]} ha)')
ax1.plot(n_amount, yield_p, color=color)
ax1.tick_params(axis='y', labelcolor=color)
ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
color = 'tab:blue'
ax2.set_ylabel('CO2 (kg)', color=color) # we already handled the x-label with ax1
ax2.plot(n_amount, ghg_p, color=color)
ax2.tick_params(axis='y', labelcolor=color)
print(n_amount)
fig.tight_layout() # otherwise the right y-label is slightly clipped
plt.show()
|
normal
|
{
"blob_id": "0ebd3ca5fd29b0f2f2149dd162b37f39668f1c58",
"index": 7397,
"step-1": "<mask token>\n\n\ndef predict(model, row):\n preds = []\n for perc in range(-10, 11):\n new_row = row.copy()\n row_copy = row.copy()\n new_row = new_row.drop(labels=['Area', 'Year', 'Crop',\n 'Previous crop', 'Yield'])\n nitrogen = new_row['N'] * ((100 + perc) / 100)\n new_row['N'] = nitrogen\n row_copy['N'] = nitrogen\n new_row = np.array([new_row])\n pred = model.predict(new_row)\n row_df = pd.DataFrame([row_copy])\n fuel_ghg = predictor.fuel_ghg_emissions(row_df['Area'], unit='kg')\n fuel_ghg = fuel_ghg.values[0]\n ms_ghg = predictor.managed_soils_ghg(row_df['N'], row_df['Manure'],\n row_df['Area'], row_df['Crop'], row_df['Yield'])\n ms_ghg = ms_ghg.values[0]\n sum_ghg = fuel_ghg + ms_ghg\n area = row_df['Area'].iloc[0]\n preds.append([nitrogen, pred[0], sum_ghg])\n print('{:4}% | Yield: {:.2f} | Area {} | C02_ha {:.5f} | C02 {:.5f}'\n .format(100 + perc, pred[0], area, sum_ghg / area, sum_ghg))\n return preds\n\n\n<mask token>\n",
"step-2": "<mask token>\nmodel.fit(X_train, Y_train)\nprint(dataset_df_2.columns)\nprint(model.feature_importances_)\n<mask token>\n\n\ndef predict(model, row):\n preds = []\n for perc in range(-10, 11):\n new_row = row.copy()\n row_copy = row.copy()\n new_row = new_row.drop(labels=['Area', 'Year', 'Crop',\n 'Previous crop', 'Yield'])\n nitrogen = new_row['N'] * ((100 + perc) / 100)\n new_row['N'] = nitrogen\n row_copy['N'] = nitrogen\n new_row = np.array([new_row])\n pred = model.predict(new_row)\n row_df = pd.DataFrame([row_copy])\n fuel_ghg = predictor.fuel_ghg_emissions(row_df['Area'], unit='kg')\n fuel_ghg = fuel_ghg.values[0]\n ms_ghg = predictor.managed_soils_ghg(row_df['N'], row_df['Manure'],\n row_df['Area'], row_df['Crop'], row_df['Yield'])\n ms_ghg = ms_ghg.values[0]\n sum_ghg = fuel_ghg + ms_ghg\n area = row_df['Area'].iloc[0]\n preds.append([nitrogen, pred[0], sum_ghg])\n print('{:4}% | Yield: {:.2f} | Area {} | C02_ha {:.5f} | C02 {:.5f}'\n .format(100 + perc, pred[0], area, sum_ghg / area, sum_ghg))\n return preds\n\n\n<mask token>\nwhile rand_row['N'] == 0:\n rand_ind = random.randrange(0, len(dataset))\n rand_row = dataset_df.iloc[rand_ind]\n<mask token>\nax1.set_xlabel('N')\nax1.set_ylabel('Yield (t)', color=color)\nax1.set_title(f\"GHG and yield predictions (Area: {rand_row['Area']} ha)\")\nax1.plot(n_amount, yield_p, color=color)\nax1.tick_params(axis='y', labelcolor=color)\n<mask token>\nax2.set_ylabel('CO2 (kg)', color=color)\nax2.plot(n_amount, ghg_p, color=color)\nax2.tick_params(axis='y', labelcolor=color)\nprint(n_amount)\nfig.tight_layout()\nplt.show()\n",
"step-3": "<mask token>\npredictor = GHGPredictor()\ndataset_df = pd.read_csv('db-wheat.csv', index_col=0)\ndataset_df_2 = dataset_df.drop(columns=['Area', 'Year', 'Crop',\n 'Previous crop'])\ndataset = dataset_df_2.to_numpy()\nX, Y = dataset[:, :-1], dataset[:, -1:]\nseed = 10\ntest_size = 0.2\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=\n test_size, random_state=seed)\nmodel = XGBRegressor()\nmodel.fit(X_train, Y_train)\nprint(dataset_df_2.columns)\nprint(model.feature_importances_)\ny_pred = model.predict(X_test)\nY_test = map(lambda x: x[0], Y_test)\nres = zip(y_pred, Y_test)\nghg_predictor = GHGPredictor()\n\n\ndef predict(model, row):\n preds = []\n for perc in range(-10, 11):\n new_row = row.copy()\n row_copy = row.copy()\n new_row = new_row.drop(labels=['Area', 'Year', 'Crop',\n 'Previous crop', 'Yield'])\n nitrogen = new_row['N'] * ((100 + perc) / 100)\n new_row['N'] = nitrogen\n row_copy['N'] = nitrogen\n new_row = np.array([new_row])\n pred = model.predict(new_row)\n row_df = pd.DataFrame([row_copy])\n fuel_ghg = predictor.fuel_ghg_emissions(row_df['Area'], unit='kg')\n fuel_ghg = fuel_ghg.values[0]\n ms_ghg = predictor.managed_soils_ghg(row_df['N'], row_df['Manure'],\n row_df['Area'], row_df['Crop'], row_df['Yield'])\n ms_ghg = ms_ghg.values[0]\n sum_ghg = fuel_ghg + ms_ghg\n area = row_df['Area'].iloc[0]\n preds.append([nitrogen, pred[0], sum_ghg])\n print('{:4}% | Yield: {:.2f} | Area {} | C02_ha {:.5f} | C02 {:.5f}'\n .format(100 + perc, pred[0], area, sum_ghg / area, sum_ghg))\n return preds\n\n\n<mask token>\nrand_ind = random.randrange(0, len(dataset))\nrand_row = dataset_df.iloc[rand_ind]\nwhile rand_row['N'] == 0:\n rand_ind = random.randrange(0, len(dataset))\n rand_row = dataset_df.iloc[rand_ind]\npreds = predict(model, rand_row)\n<mask token>\nfig, ax1 = plt.subplots()\nn_amount = [x[0] for x in preds]\nyield_p = [x[1] for x in preds]\nghg_p = [x[2] for x in preds]\ncolor = 'tab:red'\nax1.set_xlabel('N')\nax1.set_ylabel('Yield (t)', color=color)\nax1.set_title(f\"GHG and yield predictions (Area: {rand_row['Area']} ha)\")\nax1.plot(n_amount, yield_p, color=color)\nax1.tick_params(axis='y', labelcolor=color)\nax2 = ax1.twinx()\ncolor = 'tab:blue'\nax2.set_ylabel('CO2 (kg)', color=color)\nax2.plot(n_amount, ghg_p, color=color)\nax2.tick_params(axis='y', labelcolor=color)\nprint(n_amount)\nfig.tight_layout()\nplt.show()\n",
"step-4": "from xgboost import XGBRegressor\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import accuracy_score\nimport pandas as pd\nimport numpy as np\nfrom ghg import GHGPredictor\npredictor = GHGPredictor()\ndataset_df = pd.read_csv('db-wheat.csv', index_col=0)\ndataset_df_2 = dataset_df.drop(columns=['Area', 'Year', 'Crop',\n 'Previous crop'])\ndataset = dataset_df_2.to_numpy()\nX, Y = dataset[:, :-1], dataset[:, -1:]\nseed = 10\ntest_size = 0.2\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=\n test_size, random_state=seed)\nmodel = XGBRegressor()\nmodel.fit(X_train, Y_train)\nprint(dataset_df_2.columns)\nprint(model.feature_importances_)\ny_pred = model.predict(X_test)\nY_test = map(lambda x: x[0], Y_test)\nres = zip(y_pred, Y_test)\nghg_predictor = GHGPredictor()\n\n\ndef predict(model, row):\n preds = []\n for perc in range(-10, 11):\n new_row = row.copy()\n row_copy = row.copy()\n new_row = new_row.drop(labels=['Area', 'Year', 'Crop',\n 'Previous crop', 'Yield'])\n nitrogen = new_row['N'] * ((100 + perc) / 100)\n new_row['N'] = nitrogen\n row_copy['N'] = nitrogen\n new_row = np.array([new_row])\n pred = model.predict(new_row)\n row_df = pd.DataFrame([row_copy])\n fuel_ghg = predictor.fuel_ghg_emissions(row_df['Area'], unit='kg')\n fuel_ghg = fuel_ghg.values[0]\n ms_ghg = predictor.managed_soils_ghg(row_df['N'], row_df['Manure'],\n row_df['Area'], row_df['Crop'], row_df['Yield'])\n ms_ghg = ms_ghg.values[0]\n sum_ghg = fuel_ghg + ms_ghg\n area = row_df['Area'].iloc[0]\n preds.append([nitrogen, pred[0], sum_ghg])\n print('{:4}% | Yield: {:.2f} | Area {} | C02_ha {:.5f} | C02 {:.5f}'\n .format(100 + perc, pred[0], area, sum_ghg / area, sum_ghg))\n return preds\n\n\nimport random\nrand_ind = random.randrange(0, len(dataset))\nrand_row = dataset_df.iloc[rand_ind]\nwhile rand_row['N'] == 0:\n rand_ind = random.randrange(0, len(dataset))\n rand_row = dataset_df.iloc[rand_ind]\npreds = predict(model, rand_row)\nimport matplotlib.pyplot as plt\nfig, ax1 = plt.subplots()\nn_amount = [x[0] for x in preds]\nyield_p = [x[1] for x in preds]\nghg_p = [x[2] for x in preds]\ncolor = 'tab:red'\nax1.set_xlabel('N')\nax1.set_ylabel('Yield (t)', color=color)\nax1.set_title(f\"GHG and yield predictions (Area: {rand_row['Area']} ha)\")\nax1.plot(n_amount, yield_p, color=color)\nax1.tick_params(axis='y', labelcolor=color)\nax2 = ax1.twinx()\ncolor = 'tab:blue'\nax2.set_ylabel('CO2 (kg)', color=color)\nax2.plot(n_amount, ghg_p, color=color)\nax2.tick_params(axis='y', labelcolor=color)\nprint(n_amount)\nfig.tight_layout()\nplt.show()\n",
"step-5": "from xgboost import XGBRegressor\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import accuracy_score\nimport pandas as pd\nimport numpy as np\n\nfrom ghg import GHGPredictor\n\npredictor = GHGPredictor()\n\ndataset_df = pd.read_csv(\"db-wheat.csv\", index_col=0)\n\n# print(dataset_df.iloc[1])\n\ndataset_df_2 = dataset_df.drop(columns=['Area', 'Year', 'Crop', 'Previous crop'])\n# print(dataset_df_2)\n\ndataset = dataset_df_2.to_numpy()\n\n# print(dataset)\n\nX, Y = dataset[:, :-1], dataset[:, -1:]\n\n# print(X)\n# print(Y)\n\nseed = 10\ntest_size = 0.2\n\nX_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=test_size, random_state=seed)\n\n# print(len(X_train))\n# print(len(X_test))\n# print(len(Y_train))\n# print(len(Y_test))\n\nmodel = XGBRegressor()\nmodel.fit(X_train, Y_train)\n\n# print(model)\nprint(dataset_df_2.columns)\nprint(model.feature_importances_)\n\n# print(X_test.shape)\ny_pred = model.predict(X_test)\n# predictions = [round(value) for value in y_pred]\n\nY_test = map(lambda x: x[0], Y_test)\n# print(Y_test)\n\nres = zip(y_pred, Y_test)\n\n# print(list(res))\n\nghg_predictor = GHGPredictor()\n\ndef predict(model, row):\n preds = []\n # print(row)\n # print(row.).shape)\n for perc in range(-10, 11):\n new_row = row.copy()\n row_copy = row.copy()\n\n # new_row = new_row.iloc[0]\n new_row = new_row.drop(labels=['Area', 'Year', 'Crop', 'Previous crop', 'Yield'])\n # print(new_row.labels)\n # new_row = new_row.tolist()\n\n # print(new_row)\n # print(type(new_row))\n nitrogen = new_row['N'] * ((100 + perc) / 100)\n\n new_row['N'] = nitrogen\n row_copy['N'] = nitrogen\n new_row = np.array([new_row])\n # print(new_row)\n pred = model.predict(new_row)\n\n\n row_df = pd.DataFrame([row_copy])\n\n fuel_ghg = predictor.fuel_ghg_emissions(row_df[\"Area\"], unit=\"kg\")\n \n fuel_ghg = fuel_ghg.values[0]\n\n ms_ghg = predictor.managed_soils_ghg(row_df['N'], row_df['Manure'], row_df['Area'], row_df['Crop'], row_df['Yield'])\n\n ms_ghg = ms_ghg.values[0]\n\n\n sum_ghg = fuel_ghg + ms_ghg\n\n area = row_df['Area'].iloc[0]\n # print(area)\n\n # print(sum_ghg)\n # print(row_df['N'])\n\n # print(sum_ghg)\n\n # GHG\n # fuel = ghg_predictor.fuel_ghg_emissions()\n\n preds.append([nitrogen, pred[0], sum_ghg])\n\n print('{:4}% | Yield: {:.2f} | Area {} | C02_ha {:.5f} | C02 {:.5f}'.format(100 + perc, pred[0], area, sum_ghg / area, sum_ghg))\n\n return preds\n\n# accuracy = accuracy_score(Y_test, predictions)\n# print(\"Accuracy: %.2f%%\" % (accuracy * 100.0))\n\nimport random\n\nrand_ind = random.randrange(0, len(dataset))\nrand_row = dataset_df.iloc[rand_ind]\nwhile rand_row['N'] == 0:\n rand_ind = random.randrange(0, len(dataset))\n rand_row = dataset_df.iloc[rand_ind]\n# rand_row = rand_row[:-1]\n\npreds = predict(model, rand_row)\n\nimport matplotlib.pyplot as plt\n\nfig, ax1 = plt.subplots()\n\nn_amount = [x[0] for x in preds]\nyield_p = [x[1] for x in preds]\nghg_p = [x[2] for x in preds]\n\ncolor = 'tab:red'\nax1.set_xlabel('N')\nax1.set_ylabel('Yield (t)', color=color)\nax1.set_title(f'GHG and yield predictions (Area: {rand_row[\"Area\"]} ha)')\nax1.plot(n_amount, yield_p, color=color)\nax1.tick_params(axis='y', labelcolor=color)\n\nax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis\n\ncolor = 'tab:blue'\nax2.set_ylabel('CO2 (kg)', color=color) # we already handled the x-label with ax1\nax2.plot(n_amount, ghg_p, color=color)\nax2.tick_params(axis='y', labelcolor=color)\n\nprint(n_amount)\n\nfig.tight_layout() # otherwise the right y-label is slightly clipped\nplt.show()\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import pygame
import time as time_
import random
import os
from pygame.locals import *
from math import sin, cos, pi
from sys import exit
# ---------------------------
from unzip import *
unzip()
# ---------------------------
from others import *
from gaster_blaster import *
from board import *
from bone import *
from sans import *
from player import *
from functions import *
# ----------------------------------------------------------------
'''初始化'''
os.environ["SDL_VIDEO_WINDOW_POS"] = "100,100"
pygame.init()
if FULL_SCREEN:
display = pygame.display.set_mode((1920, 1080), FULLSCREEN)
else:
display = pygame.display.set_mode(SCREEN_SIZE)
screen = pygame.Surface(SCREEN_SIZE).convert_alpha()
mask_surface_blue = pygame.Surface(SCREEN_SIZE).convert_alpha() # 蓝色攻击的mask
mask_surface_orange = pygame.Surface(SCREEN_SIZE).convert_alpha() # 橙色攻击的mask
mask_surface_normal = pygame.Surface(SCREEN_SIZE).convert_alpha() # 普通攻击的mask
pygame.display.set_caption("UPPERTALE") #标题
pygame.display.set_icon(pygame.image.load("res/icon-32.png")) #图标
fps = pygame.time.Clock() # 帧数计时器
frames = 60
# -----------------------------------
'''因为需要修改全局变量
所以不得不写在主文件里的函数'''
def players_turn(text):
def tmp():
global is_players_turn, battle_text, shown_index
is_players_turn = True
battle_text = text
shown_index = 0
bones.clear()
blasters.clear()
boards.clear()
attacks.append(tmp)
def set_turn_time(time):
def next_turn(screen):
global stop
stop = False
tasks.append(Task(next_turn, time))
def add_attack(func):
attacks.append(func)
return func
def shake(screen):
global screen_shaking
screen_shaking = True
def unshake(screen):
global screen_shaking
screen_shaking = False
def set_screen_angle(angle):
global screen_angle
screen_angle = angle
def start_testing():
attacks.clear()
# -------------------------------------
'''回合'''
# 吟唱
@add_attack
def yinchang_1():
global BOX_POS, BOX_SIZE
BOX_POS = [230, 230]
BOX_SIZE = [170, 160]
if DEBUG:
# 测试区开始
pass
# 测试区结束
sans.say("准备好了?")
# 开头杀
@add_attack
def first_round1():
set_turn_time(50)
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
player.falling = True
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7],
speed=[0, -5],
direction=UP,
time1=8,
time2=40,
length=1000,
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],
speed=[0, 0],
direction=UP,
time1=200,
time2=48,
length=1000,
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],
speed=[0, 5],
direction=UP,
time1=8,
time2=248,
length=1000,
type_=1
)
)
@add_attack
def first_round2():
set_turn_time(50)
sans.hand_direction = LEFT
player.type = BLUE_SOUL
player.direction = LEFT
player.falling_speed = 10
player.falling = True
tasks.append(Task(shake,
(player.pos[0] - BOX_POS[0]) // 10))
tasks.append(Task(unshake,
((player.pos[0] - BOX_POS[0]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(player.pos[0] - BOX_POS[0]) // 10))
for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):
bones.append(
Bone(
pos=[BOX_POS[0] - 7, y],
speed=[0, 0, 5],
direction=LEFT,
time1=8,
time2=30,
length=0,
type_=2
)
)
bones.append(
Bone(
pos=[BOX_POS[0] - 7, y],
speed=[0, 0, 0],
direction=LEFT,
time1=150,
time2=38,
length=40,
type_=2
)
)
bones.append(
Bone(
pos=[BOX_POS[0] - 7, y],
speed=[0, 0, -5],
direction=LEFT,
time1=8,
time2=188,
length=40,
type_=2
)
)
@add_attack
def first_round3():
set_turn_time(450)
player.type = RED_SOUL
for _ in range(0, 300, 2):
bones.append(
Bone(
pos=BOX_POS,
length=40 + sin(_ / 20) * 40,
direction=UP,
speed=[7, 0],
time1=1000,
time2=_,
)
)
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] + 25 + (sin(_ / 20) * 40) + 60],
length=1000,
direction=UP,
speed=[7, 0],
time1=1000,
time2=_,
)
)
@add_attack
def first_round4():
sans.headtype = SANS_LOOK_LEFT
sans.say("只是第一个回合而已,何必用尽全力?")
@add_attack
def first_round5():
set_turn_time(1)
sans.headtype = SANS_NORMAL
pygame.mixer.music.play(-1)
players_turn("* ...")
@add_attack
def zjj_1():
set_turn_time(60)
global BOX_POS, BOX_SIZE
BOX_POS = [200, 230]
BOX_SIZE = [200, 150]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
@add_attack
def zjj_2():
set_turn_time(11 * 100)
def zjj(screen):
angle = random.randint(240, 300)
blasters.append(GasterBlaster(
pos=[
player.pos[0] + math.cos(math.radians(angle)) * 200,
player.pos[1] + math.sin(math.radians(angle)) * 200],
angle=angle - 180,
time1=10,
time2=30,
width=30,
color=BLUE
))
for _ in range(10):
tasks.append(Task(zjj, _ * 100))
bones.append(
Bone(
pos=[BOX_POS[0] - 20, BOX_POS[1] - 8],
length=BOX_SIZE[1] - 30 - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[2, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] - 8],
length=BOX_SIZE[1] - 30 - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-2, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0] - 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[2, 0],
type_=1
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-2, 0],
type_=1
))
players_turn("* ...")
@add_attack
def blue_bone():
set_turn_time(700)
global BOX_POS, BOX_SIZE
BOX_POS = [150, 250]
BOX_SIZE = [350, 120]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for _ in range(10):
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] - 8],
length=BOX_SIZE[1] - 30 - 16,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[4, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[4, 0],
type_=1
))
bones.append(
Bone(
pos=BOX_POS,
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60 + 16,
speed=[4, 0],
type_=1,
color=BLUE
))
@add_attack
def orange_bone():
def start_spinning(screen):
global spinning_left
spinning_left = True
def stop_spinning(screen):
global spinning_left
spinning_left = False
tasks.append(Task(start_spinning, 0))
tasks.append(Task(stop_spinning, 180))
tasks.append(Task(lambda screen:set_screen_angle(180), 181))
tasks.append(Task(start_spinning, 520))
tasks.append(Task(stop_spinning, 700))
tasks.append(Task(lambda screen:set_screen_angle(0), 701))
set_turn_time(700)
sans.hand_direction = UP
player.type = BLUE_SOUL
player.direction = UP
player.falling_speed = 10
tasks.append(Task(shake,
(player.pos[1] - BOX_POS[1]) // 10))
tasks.append(Task(unshake,
((player.pos[1] - BOX_POS[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for _ in range(10):
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] - 8],
length=10,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[8, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] + 30 + 16],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[8, 0],
type_=1
))
bones.append(
Bone(
pos=BOX_POS,
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60 + 8,
speed=[8, 0],
type_=1,
color=ORANGE
))
players_turn("* ...")
@add_attack
def bone_gap():
set_turn_time(1000)
global BOX_POS, BOX_SIZE
BOX_POS = [150, 230]
BOX_SIZE = [300, 150]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for _ in range(10):
x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)
bones.append(Bone(
pos=[x, BOX_POS[1]],
time1=10,
time2=_ * 100,
speed=[0, 0, BOX_SIZE[1] / 10],
length=0,
direction=DOWN,
color=BLUE
))
bones.append(Bone(
pos=[x, BOX_POS[1]],
time1=10,
time2=_ * 100 + 10,
speed=[0, 0, -BOX_SIZE[1] / 10],
length=BOX_SIZE[1],
direction=DOWN,
color=BLUE
))
tasks.append(Task(shake,_ * 100 + 10))
tasks.append(Task(unshake,_ * 100 + 15))
tasks.append(Task(lambda screen : slam_sound.play(),
_ * 100 + 15))
y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)
bones.append(Bone(
pos=[BOX_POS[0], y],
time1=10,
time2=_ * 100,
speed=[0, 0, BOX_SIZE[0] / 10],
length=0,
direction=RIGHT,
color=ORANGE
))
bones.append(Bone(
pos=[BOX_POS[0], y],
time1=10,
time2=_ * 100 + 10,
speed=[0, 0, -BOX_SIZE[0] / 10],
length=BOX_SIZE[0],
direction=RIGHT,
color=ORANGE
))
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] - 8],
length=y - BOX_POS[1] - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[(x - BOX_POS[0]) / 30, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],
length=y - BOX_POS[1] - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0], y + 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[(x - BOX_POS[0]) / 30, 0],
type_=1
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0], y + 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],
type_=1
))
players_turn("* ...")
@add_attack
def board_1():
set_turn_time(10)
global BOX_POS, BOX_SIZE
BOX_POS = [50, 240]
BOX_SIZE = [500, 140]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
@add_attack
def board_2():
set_turn_time(600)
tasks.append(Task(shake, 70))
tasks.append(Task(unshake, 75))
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1] + BOX_SIZE[1]],
angle=0,
time1=10,
time2=70,
time3=10,
width=70
)
)
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1]],
angle=0,
time1=10,
time2=70,
time3=10,
width=30
)
)
for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],
length=1000,
direction=UP,
time1=1000,
time2=100,
speed=[0, 0],
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] - 8],
length=5,
direction=DOWN,
time1=1000,
time2=100,
speed=[0, 0],
type_=2
)
)
boards.append(
Board(
pos=[BOX_POS[0],BOX_POS[1] + BOX_SIZE[1] - 40],
length=40,
speed=[1, 0],
time1=BOX_SIZE[0],
time2=100,
direction=UP
)
)
for _ in range(0, 20, 4):
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 40 - 25],
length=1000,
direction=UP,
time1=BOX_SIZE[0] // 4,
time2=150 + (_ * 30),
speed=[-4, 0]
)
)
def start_spinning(screen):
global spinning_left
spinning_left = True
def stop_spinning(screen):
global spinning_left
spinning_left = False
tasks.append(Task(start_spinning, 200))
tasks.append(Task(stop_spinning, 380))
tasks.append(Task(start_spinning, 500))
tasks.append(Task(stop_spinning, 680))
tasks.append(Task(lambda screen:set_screen_angle(0), 682))
@add_attack
def board_3():
set_turn_time(100)
sans.hand_direction = LEFT
player.type = BLUE_SOUL
player.direction = LEFT
player.falling_speed = 10
tasks.append(Task(shake,
(player.pos[0] - BOX_POS[0]) // 10))
tasks.append(Task(unshake,
((player.pos[0] - BOX_POS[0]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(player.pos[0] - BOX_POS[0]) // 10))
tasks.append(Task(shake, 60))
tasks.append(Task(unshake, 65))
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], 10],
angle=90,
time1=10,
time2=50,
time3=0,
width=50
)
)
@add_attack
def board_4():
set_turn_time(0)
bones.clear()
players_turn("* ...")
@add_attack
def board_2_1():
set_turn_time(10)
global BOX_POS, BOX_SIZE
BOX_POS = [50, 240]
BOX_SIZE = [500, 140]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
@add_attack
def board_2_2():
set_turn_time(600)
tasks.append(Task(shake, 70))
tasks.append(Task(unshake, 75))
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1] + BOX_SIZE[1]],
angle=0,
time1=10,
time2=70,
time3=10,
width=70
)
)
tasks.append(Task(shake, 250))
tasks.append(Task(unshake, 255))
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1] + BOX_SIZE[1] - 20],
angle=0,
time1=10,
time2=70,
time3=250,
width=70
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 10],
time1=1000,
time2=0,
speed=[-2, 0],
length=40
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 10],
time1=1000,
time2=100,
speed=[-1.5, 0],
length=40
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 10],
time1=1000,
time2=200,
speed=[-1, 0],
length=40
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 30],
time1=1000,
time2=300,
speed=[-3, 0],
length=80
)
)
for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],
length=1000,
direction=UP,
time1=400,
time2=100,
speed=[0, 0],
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],
length=1000,
direction=UP,
time1=1000,
time2=500,
speed=[0, 0],
type_=1
)
)
players_turn("* ...")
@add_attack
def bone_lid1():
set_turn_time(70)
global BOX_SIZE, BOX_POS
BOX_POS = [200, 240]
BOX_SIZE = [200, 150]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
bones.append(
RotatableBone(
pos=[BOX_POS[0] - 70, BOX_POS[1] + BOX_SIZE[1]],
time1=1000,
length=130,
angle=45,
speed=[5, 0, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 70, BOX_POS[1] + BOX_SIZE[1]],
time1=1000,
length=130,
angle=-45,
speed=[-5, 0, 0, 0]
)
)
@add_attack
def bone_lid2():
set_turn_time(60)
sans.hand_direction = UP
player.type = BLUE_SOUL
player.direction = UP
player.falling_speed = 10
player.falling = True
tasks.append(Task(shake,
(player.pos[1] - BOX_POS[1]) // 10))
tasks.append(Task(unshake,
((player.pos[1] - BOX_POS[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
bones.append(
RotatableBone(
pos=[BOX_POS[0] - 20, BOX_POS[1]],
time1=1000,
length=130,
angle=-45,
speed=[5, 0, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1]],
time1=1000,
length=130,
angle=45,
speed=[-5, 0, 0, 0]
)
)
@add_attack
def bone_lid3():
set_turn_time(1300)
player.type = RED_SOUL
for _ in range(20):
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 60,
length=260,
angle=-45,
speed=[0, 2, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 60,
length=260,
angle=45,
speed=[0, -2, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 60 + 30,
length=260,
angle=45,
speed=[0, 2, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 60 + 30,
length=260,
angle=-45,
speed=[0, -2, 0, 0]
)
)
players_turn("* ...")
@add_attack
def mercy1():
pygame.mixer.music.pause()
sans.say("好了,我也累了,不如我们休息一下?")
@add_attack
def mercy2():
sans.say("这也是一个改过自新的机会,")
@add_attack
def mercy3():
sans.say("赶紧按下饶恕,")
@add_attack
def mercy4():
sans.headtype = SANS_NO_EYES
sans.say("否则你绝对不想见到下一个回合")
@add_attack
def mercy5():
set_turn_time(0)
sans.headtype = SANS_NORMAL
players_turn("* ...")
@add_attack
def before_flash():
sans.say("好吧,看来你已经做出了自己的选择。")
@add_attack
def flash_round():
set_turn_time(10)
global blackout
flash_sound.play()
blackout = True
bones.clear()
blasters.clear()
boards.clear()
def flash(screen):
global blackout
blackout = False
flash_sound.play()
pygame.mixer.music.unpause()
tasks.append(Task(flash, 10))
def flash_round_1():
set_turn_time(150)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
player.type = BLUE_SOUL
player.direction = DOWN
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
100000]
direction = random.randint(0, 1)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 30, BOX_POS[1] + BOX_SIZE[1] - 30],
angle=0,
time1=0,
time2=30,
time3=10,
width=90
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 30, BOX_POS[1] - 30],
angle=0,
time1=0,
time2=30,
time3=60,
width=90
)
)
if direction:
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=10,
width=90
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=60,
width=90
)
)
else:
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=10,
width=90
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=60,
width=90
)
)
for angle in range(0, 360, 10):
bones.append(RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] / 2 + cos(radians(angle)) * BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2 + 25 + sin(radians(angle)) * BOX_SIZE[1] / 2],
length=25,
angle=angle,
time1=150
)
)
if angle % 30 == 0:
bones.append(RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2 + 25],
length=40,
angle=angle,
speed=[0, 0, 0, 5],
time1=130,
time2=20
)
)
def flash_round_2():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
def zjj(screen):
angle = random.randint(-140, -40)
d = random.randint(10, 200)
blasters.append(GasterBlaster(
pos=[
player.pos[0] + math.cos(math.radians(angle)) * d,
player.pos[1] + math.sin(math.radians(angle)) * d],
angle=angle - 180,
time1=0,
time2=20,
width=50
))
for _ in range(0, 50):
tasks.append(Task(zjj, _ / 2))
def flash_round_3():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [200, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],
angle=0,
time1=10,
time2=70,
time3=0,
width=60
)
)
def flash_round_4():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],
angle=45,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 10, BOX_POS[1] + BOX_SIZE[1] + 10],
angle=-45,
time1=10,
time2=70,
time3=0,
width=60
)
)
def flash_round_5():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[50, BOX_POS[1] + 50],
angle=0,
time1=10,
time2=70,
time3=0,
width=100
)
)
def flash_round_6():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],
angle=0,
time1=10,
time2=70,
time3=0,
width=100
)
)
def flash_round_7():
set_turn_time(150)
global BOX_SIZE, BOX_POS, _boxpos, _boxsize
BOX_POS = _boxpos = [230, 230]
BOX_SIZE = _boxsize = [150, 150]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
for _ in range(3):
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 50 + 20,
length=150,
angle=-20,
speed=[0, 4, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 50 + 20,
length=150,
angle=20,
speed=[0, -4, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 50 + 50,
length=150,
angle=20,
speed=[0, 4, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 50 + 50,
length=150,
angle=-20,
speed=[0, -4, 0, 0]
)
)
random_attacks = [flash_round_1,
flash_round_2,
flash_round_3,
flash_round_4,
flash_round_5,
flash_round_6,
flash_round_7]
for _ in range(5):
attacks.append(random.choice(random_attacks))
attacks.append(flash_round)
players_turn("* ...")
@add_attack
def windmill():
set_turn_time(1200)
global BOX_POS, BOX_SIZE, before_strike, after_strike
def before_strike():
global sans_damage
sans_damage = 1
after_strike = lambda : ...
BOX_POS = [150, 240]
BOX_SIZE = [150, 150]
def movegb(screen):
for i in range(4):
blasters[i].angle += 1
blasters[i].end_angle += 1
blasters[i].radian += radians(-1)
blasters[i].back_speed = 0
for angle in range(360 * 5):
tasks.append(Task(movegb, angle * 0.4 + 100))
def enablerecoil(screen):
for b in blasters:
b.norecoil = False
tasks.append(Task(enablerecoil, 800))
for angle in range(0, 360, 90):
blasters.append(GasterBlaster(
pos=[150 + 150 / 2, 240 + 150 / 2],
angle=angle,
time1=10,
time2=1000,
width=30,
time3=0,
norecoil=True
))
players_turn("* ...")
@add_attack
def gameend():
...
# ------------------------------------
"""主程序"""
while True:
# ---------------------------------------------------------
'''实例化'''
from locals_ import *
time = 0
_boxpos = [0, 0]
_boxsize = SCREEN_SIZE[:]
rightdown = SCREEN_SIZE[:]
time1 = 0
time2 = 0
delta = 1
blasters = []
bones = []
tasks = []
warns = []
texts = []
boards = []
before_strike = None
after_strike = None
sans = Sans([280, 80])
player = Player([0, 0])
actions = {
"* check" : CHECK_SANS,
"* heal ({} time(s) left)" : HEAL_SANS
}
mc_actions = {
"* spare" : MERCY_SANS_SPARE,
"* flee" : MERCY_SANS_FLEE
}
pygame.mixer.music.stop()
if FULL_SCREEN:
display = pygame.display.set_mode((1920, 1080), FULLSCREEN)
else:
display = pygame.display.set_mode(SCREEN_SIZE)
while True:
time1 = time_.time()
# 屏幕震动
if screen_shaking:
screen_offset[0] = random.randint(-5, 5)
screen_offset[1] = random.randint(-5, 5)
else:
screen_offset = [0, 0]
# 屏幕旋转
if spinning_left:
screen_angle -= 1
# 屏幕旋转
if spinning_right:
screen_angle += 1
# 测试区
if DEBUG:...
# 战斗框位移
if _boxpos[0] != BOX_POS[0]:
if abs(BOX_POS[0] - _boxpos[0]) < 0.1:
_boxpos[0] = BOX_POS[0]
else:
_boxpos[0] += (BOX_POS[0] - _boxpos[0]) / 5
if _boxpos[1] != BOX_POS[1]:
if abs(BOX_POS[1] - _boxpos[1]) < 0.1:
_boxpos[1] = BOX_POS[1]
else:
_boxpos[1] += (BOX_POS[1] - _boxpos[1]) / 5
# 战斗框大小
if rightdown[0] != BOX_POS[0] + BOX_SIZE[0]:
if abs(BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) < 0.1:
rightdown[0] = BOX_POS[0] + BOX_SIZE[0]
else:
rightdown[0] += (BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) / 5
if rightdown[1] != BOX_POS[1] + BOX_SIZE[1]:
if abs(BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) < 0.1:
rightdown[1] = BOX_POS[1] + BOX_SIZE[1]
else:
rightdown[1] += (BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) / 5
_boxsize = [
rightdown[0] - _boxpos[0],
rightdown[1] - _boxpos[1]
]
if time >= len(attacks):
exit()
if not stop and not is_players_turn:
attacks[time]()
time += 1
stop = True
screen.fill((0, 0, 0, 255))
display.fill((0, 0, 0))
mask_surface_blue.fill((0, 0, 0, 0))
mask_surface_orange.fill((0, 0, 0, 0))
mask_surface_normal.fill((0, 0, 0, 0))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
exit()
if event.key in (K_z, K_RETURN):
if sans.show_index >= len(sans.text) and sans.show_text == True:
sans.show_text = False
stop = False
elif page in (CHECK_SANS, HEAL_SANS, HEAL_SANS_CANT) and shown_index >= len(battle_text):
is_players_turn = False
stop = False
page = MAIN_PAGE
player.pos = [
BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2
]
player.select_sound.play()
else:
player.choose = is_players_turn
if is_players_turn and page != FIGHT_SANS:
player.select_sound.play()
if event.key in (K_x, K_RSHIFT):
sans.show_index = len(sans.text)
shown_index = len(battle_text)
player.back = True
player.choice = 0
if event.key == K_UP:
player.going_up = True
if event.key == K_DOWN:
player.going_down = True
if event.key == K_LEFT:
player.going_left = True
if event.key == K_RIGHT:
player.going_right = True
if event.key == K_F4:
if FULL_SCREEN:
display = pygame.display.set_mode(SCREEN_SIZE)
FULL_SCREEN = 0
else:
display = pygame.display.set_mode((1920, 1080), FULLSCREEN)
FULL_SCREEN = 1
if event.key == K_F2:
restarting = True
if DEBUG:
if event.key == K_n:
bones.clear()
boards.clear()
blasters.clear()
stop = False
if event.key == K_EQUALS:
frames += 1
if event.key == K_MINUS:
frames -= 1
if event.type == KEYUP:
if event.key == K_UP:
player.going_up = False
if event.key == K_DOWN:
player.going_down = False
if event.key == K_LEFT:
player.going_left = False
if event.key == K_RIGHT:
player.going_right = False
if event.key == K_ESCAPE:
pygame.quit()
exit()
if event.key in (K_z, K_RETURN):
player.choose = False
if event.key in (K_x, K_RSHIFT):
player.back = False
'''检测&更新'''
# 战斗框
pygame.draw.rect(screen, (255, 255, 255, 255), pygame.Rect((_boxpos[0] - 5, _boxpos[1] - 5),
(_boxsize[0] + 10, _boxsize[1] + 10)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect(_boxpos, _boxsize)) # 内遮挡
# 骨头
for b in bones:
b.show(screen,
mask_surface_blue,
mask_surface_orange,
mask_surface_normal)
if b.stop:
bones.remove(b)
# 警告框
for w in warns:
w.show(screen)
if w.stop:
warns.remove(w)
# 板子
for b in boards:
b.show(screen)
if b.stop:
boards.remove(b)
if b.rect.colliderect(player.rect) and player.falling:
player.pos[0] += b.speed[0]
player.pos[1] += b.speed[1]
if player.direction == DOWN:
player.pos[1] = b.rect.top - 7
elif player.direction == UP:
player.pos[1] = b.rect.bottom - 1
elif player.direction == RIGHT:
player.pos[0] = b.rect.left - 7
elif player.direction == LEFT:
player.pos[0] = b.rect.right - 1
player.falling = False
"""外遮挡"""
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, 0), (SCREEN_SIZE[0], _boxpos[1] - 5)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] - 5), (_boxpos[0] - 5, _boxsize[1] + 10)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] + _boxsize[1] + 5),
(SCREEN_SIZE[0], SCREEN_SIZE[1] - (_boxpos[1] + _boxsize[1]) - 5)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((_boxpos[0] + _boxsize[0] + 5, _boxpos[1] - 5),
(SCREEN_SIZE[0] - (_boxpos[0] + _boxsize[0]) - 5, _boxsize[1] + 10)))
'''显示UI(外面)'''
pygame.draw.rect(screen, (191, 0, 0, 255), pygame.Rect((275, 400), (92, 20)))
if player.KR:
pygame.draw.rect(screen, (255, 0, 255, 255), pygame.Rect((275 + player.HP, 400), (round(player.KR), 20)))
pygame.draw.rect(screen, (255, 255, 0, 255), pygame.Rect((275, 400), (player.HP, 20)))
screen.blit(
font2.render(
"{:0>2.0f} / 92".format(player.HP + player.KR),
True,
(255, 255, 255) if not round(player.KR) else (255, 0, 255)
),
(
415,
400
)
)
screen.blit(hp_image, (240, 405))
screen.blit(kr_image, (375, 405))
screen.blit(
font2.render(
"Chara LV 19", True, (255, 255, 255)
), (30, 400)
)
# 显示文本
for text in texts:
screen.blit(
font.render(
text[1], True, (255, 255, 255)
), text[0]
)
if DEBUG:
screen.blit(
font2.render(
"DEBUG", True, (0, 0, 255)
), (200, 0)
)
# 显示帧数
screen.blit(
font2.render(
"FPS:{:0>3d}".format(round(1 / delta)), True, (0, 0, 255)
), (0, 0)
)
if fight:
screen.blit(fight_highlight_image, fight_pos)
else:
screen.blit(fight_default_image, fight_pos)
if act:
screen.blit(act_highlight_image, act_pos)
else:
screen.blit(act_default_image, act_pos)
if item:
screen.blit(item_highlight_image, item_pos)
else:
screen.blit(item_default_image, item_pos)
if mercy:
screen.blit(mercy_highlight_image, mercy_pos)
else:
screen.blit(mercy_default_image, mercy_pos)
# 鳝丝(要放在外面)
sans.show(screen)
if show_sans_damage:
if sans_damage == MISS:
screen.blit(miss_image, (250, 60))
# GB炮(要放在外面)
for t in blasters:
t.show(screen,
mask_surface_blue,
mask_surface_orange,
mask_surface_normal)
if t.stop:
blasters.remove(t)
# 其他东西,blahblahblah(外面)
for t in tasks:
t.show(screen)
if t.stop:
tasks.remove(t)
if is_players_turn: # 玩家回合
BOX_POS = [30, 250]
BOX_SIZE = [570, 130]
if page == MAIN_PAGE:
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 16
x = 40
player.type = CURSOR_SOUL
player.options = (
(fight_pos[0] + 10, fight_pos[1] + 15),
( act_pos[0] + 10, act_pos[1] + 15),
( item_pos[0] + 10, item_pos[1] + 15),
(mercy_pos[0] + 10, mercy_pos[1] + 15)
)
if player.choice == 0:
fight = True
act = False
item = False
mercy = False
if player.choice == 1:
fight = False
act = True
item = False
mercy = False
if player.choice == 2:
fight = False
act = False
item = True
mercy = False
if player.choice == 3:
fight = False
act = False
item = False
mercy = True
if player.choose:
page = [FIGHT, ACT, 0, MERCY][player.choice]
player.choose = False
player.choice = 0
fight = False
act = False
item = False
mercy = False
if page == ACT:
player.options = [(40, 255)]
screen.blit(
battle_font.render("* sans", True, (255, 255, 255)),
(40, 250)
)
if player.choose:
page = [ACT_SANS][player.choice]
player.choose = False
player.choice = 0
if player.back:
page = MAIN_PAGE
if page == ACT_SANS:
player.options = []
y = 250
for _ in actions.keys():
if actions[_] == HEAL_SANS:
_ = _.format(heal_times_left)
screen.blit(
battle_font.render(_, True, (255, 255, 255)),
(40, y)
)
player.options.append((40, y + 5))
y += 20
if player.choose:
page = list(actions.values())[player.choice]
if page == HEAL_SANS:
if heal_times_left > 0:
heal(player, 92)
heal_times_left -= 1
else:
page = HEAL_SANS_CANT
player.choose = False
player.choice = 0
if player.back:
page = ACT
if page == CHECK_SANS:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
battle_text = "* Sans\n The TRUE HERO.\n ATK:1\n DEF:1\n Nothing to say."
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 20
x = 40
if page == HEAL_SANS:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
battle_text = "* You are healthy again now.\n* {} time(s) left.".format(heal_times_left)
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 20
x = 40
if page == HEAL_SANS_CANT:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
battle_text = "* No more times for you to heal!"
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 20
x = 40
if page == FIGHT:
player.options = [(40, 255)]
screen.blit(
battle_font.render("* sans", True, (255, 255, 255)),
(40, 250)
)
if player.choose:
page = [FIGHT_SANS][player.choice]
player.choose = False
player.choice = 0
choice_pos = [50, 250]
if player.back:
page = MAIN_PAGE
if page == FIGHT_SANS:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
target_img.set_alpha(target_alpha)
if not choice_blink:
if target_alpha >= 255:
choice_going = True
else:
target_alpha += 10
screen.blit(target_img, [BOX_POS[0] + 10, BOX_POS[1] + 5])
screen.blit([choice_img, choice_blink_img][choice_ani_index // 5 % 2], choice_pos)
choice_ani_index += choice_blink
choice_pos[0] += choice_going * 8
if choice_going and (player.choose or choice_pos[0] > BOX_POS[0] + BOX_SIZE[0]):
choice_going = False
choice_blink = True
tasks.append(Strike(sans.pos[:]))
if not before_strike:
sans.target_pos = [100, 80]
else:
before_strike()
if choice_blink:
blink_time += 1
if blink_time > 60:
show_sans_damage = False
choice_going = False
choice_blink = False
choice_ani_index = 0
target_alpha = 0
blink_time = 0
is_players_turn = False
stop = False
page = MAIN_PAGE
if not after_strike:
sans.target_pos = [250, 80]
else:
after_strike()
player.pos = [
BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2
]
elif blink_time > 30:
target_alpha -= 10
show_sans_damage = True
if page == MERCY:
player.options = [(40, 255)]
screen.blit(
battle_font.render("* sans", True, (255, 255, 255)),
(40, 250)
)
if player.choose:
page = [MERCY_SANS][player.choice]
player.choose = False
player.choice = 0
if player.back:
page = MAIN_PAGE
if page == MERCY_SANS:
player.options = []
y = 250
for _ in mc_actions.keys():
screen.blit(
battle_font.render(_, True, (255, 255, 255)),
(40, y)
)
player.options.append((40, y + 5))
y += 20
if player.choose:
page = list(mc_actions.values())[player.choice]
player.choose = False
player.choice = 0
if player.back:
page = MERCY
if page == MERCY_SANS_SPARE: # 你都饶恕了,想必也不想继续玩了()
exit()
if page == MERCY_SANS_FLEE: # 你都逃跑了,想必也不想继续玩了()
exit()
# 你死了
if player.HP + player.KR <= 0:
DEAD = True
if DEAD or restarting:
break
# 判定伤害
blue_mask = pygame.mask.from_surface(mask_surface_blue)
orange_mask = pygame.mask.from_surface(mask_surface_orange)
normal_mask = pygame.mask.from_surface(mask_surface_normal)
if mask_collide(blue_mask, player.mask, [0, 0], player.mask_pos):
if any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):
damage(player)
if mask_collide(orange_mask, player.mask, [0, 0], player.mask_pos):
if not any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):
damage(player)
if mask_collide(normal_mask, player.mask, [0, 0], player.mask_pos):
damage(player)
# 玩家
player.show(screen, _boxpos, _boxsize)
# 黑屏攻击
if blackout:
screen.fill(0x000000)
"""将screen的图像加工后放入display"""
if not FULL_SCREEN:
rotated_screen = pygame.transform.rotate(screen, screen_angle)
else:
screen_rect = screen.get_rect()
rotated_screen = pygame.transform.rotate(
pygame.transform.scale(
screen,
(
round(screen_rect.size[1] / screen_rect.size[0] * 1920),
1080
)
),
screen_angle
)
rotated_rect = rotated_screen.get_rect()
if not FULL_SCREEN:
rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]
else:
rotated_rect.center = [960, 540]
display.blit(rotated_screen,
(rotated_rect.x + screen_offset[0],
rotated_rect.y + screen_offset[1]))
fps.tick(frames)
pygame.display.update()
time2 = time_.time()
delta = time2 - time1
if not restarting:
ticks = 0
heart_offset = [0, 0]
while True:
'''死后的'''
pygame.mixer.music.stop()
ticks += 1
screen.fill((0, 0, 0, 255))
if ticks >= 200:
break
if ticks >= 160:
screen.blit(alive_img, player.rect)
if ticks == 160:
split_sound.play()
elif ticks >= 100:
screen.blit(dead_img,
(player.rect.x + heart_offset[0],
player.rect.y + heart_offset[1]))
heart_offset = [random.randint(-2, 2), random.randint(-2, 2)]
elif ticks >= 60:
screen.blit(dead_img, player.rect)
if ticks == 60:
split_sound.play()
else:
screen.blit(alive_img, player.rect)
if not FULL_SCREEN:
rotated_screen = pygame.transform.rotate(screen, screen_angle)
else:
screen_rect = screen.get_rect()
rotated_screen = pygame.transform.rotate(
pygame.transform.scale(
screen,
(
round(screen_rect.size[1] / screen_rect.size[0] * 1920),
1080
)
),
screen_angle
)
rotated_rect = rotated_screen.get_rect()
if not FULL_SCREEN:
rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]
else:
rotated_rect.center = [960, 540]
display.blit(rotated_screen,
(rotated_rect.x + screen_offset[0],
rotated_rect.y + screen_offset[1]))
fps.tick(frames)
pygame.display.update()
|
normal
|
{
"blob_id": "46fd4b976526a1bc70cf902bdb191feea8b84ad9",
"index": 2633,
"step-1": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n<mask token>\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7], speed=[0, \n -5], direction=UP, time1=8, time2=40, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 0], direction=UP, time1=200, time2=48, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 5], direction=UP, time1=8, time2=248, length=1000, type_=1))\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1] + BOX_SIZE[1]], angle\n =0, time1=10, time2=70, time3=10, width=70))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1]], angle=0, time1=10,\n time2=70, time3=10, width=30))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30], length=\n 1000, direction=UP, time1=1000, time2=100, speed=[0, 0], type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] - 8], length=5, direction=DOWN,\n time1=1000, time2=100, speed=[0, 0], type_=2))\n boards.append(Board(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40, speed=[1, 0], time1=BOX_SIZE[0], time2=100, direction=UP))\n for _ in range(0, 20, 4):\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] +\n BOX_SIZE[1] - 40 - 25], length=1000, direction=UP, time1=\n BOX_SIZE[0] // 4, time2=150 + _ * 30, speed=[-4, 0]))\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen: set_screen_angle(0), 682))\n\n\n<mask token>\n\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say('好了,我也累了,不如我们休息一下?')\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\n@add_attack\ndef before_flash():\n sans.say('好吧,看来你已经做出了自己的选择。')\n\n\n<mask token>\n\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + 50], angle=0, time1\n =10, time2=70, time3=0, width=100))\n\n\ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0, time1=10, time2=70, time3=0, width=100))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7], speed=[0, \n -5], direction=UP, time1=8, time2=40, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 0], direction=UP, time1=200, time2=48, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 5], direction=UP, time1=8, time2=248, length=1000, type_=1))\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1] + BOX_SIZE[1]], angle\n =0, time1=10, time2=70, time3=10, width=70))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1]], angle=0, time1=10,\n time2=70, time3=10, width=30))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30], length=\n 1000, direction=UP, time1=1000, time2=100, speed=[0, 0], type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] - 8], length=5, direction=DOWN,\n time1=1000, time2=100, speed=[0, 0], type_=2))\n boards.append(Board(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40, speed=[1, 0], time1=BOX_SIZE[0], time2=100, direction=UP))\n for _ in range(0, 20, 4):\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] +\n BOX_SIZE[1] - 40 - 25], length=1000, direction=UP, time1=\n BOX_SIZE[0] // 4, time2=150 + _ * 30, speed=[-4, 0]))\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen: set_screen_angle(0), 682))\n\n\n<mask token>\n\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid2():\n set_turn_time(60)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake, (player.pos[1] - BOX_POS[1]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(RotatableBone(pos=[BOX_POS[0] - 20, BOX_POS[1]], time1=\n 1000, length=130, angle=-45, speed=[5, 0, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[\n 1]], time1=1000, length=130, angle=45, speed=[-5, 0, 0, 0]))\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say('好了,我也累了,不如我们休息一下?')\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\n@add_attack\ndef before_flash():\n sans.say('好吧,看来你已经做出了自己的选择。')\n\n\n<mask token>\n\n\ndef flash_round_2():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n\n def zjj(screen):\n angle = random.randint(-140, -40)\n d = random.randint(10, 200)\n blasters.append(GasterBlaster(pos=[player.pos[0] + math.cos(math.\n radians(angle)) * d, player.pos[1] + math.sin(math.radians(\n angle)) * d], angle=angle - 180, time1=0, time2=20, width=50))\n for _ in range(0, 50):\n tasks.append(Task(zjj, _ / 2))\n\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + 50], angle=0, time1\n =10, time2=70, time3=0, width=100))\n\n\ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0, time1=10, time2=70, time3=0, width=100))\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7], speed=[0, \n -5], direction=UP, time1=8, time2=40, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 0], direction=UP, time1=200, time2=48, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 5], direction=UP, time1=8, time2=248, length=1000, type_=1))\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\n\n<mask token>\n\n\n@add_attack\ndef zjj_1():\n set_turn_time(60)\n global BOX_POS, BOX_SIZE\n BOX_POS = [200, 230]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n@add_attack\ndef orange_bone():\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 0))\n tasks.append(Task(stop_spinning, 180))\n tasks.append(Task(lambda screen: set_screen_angle(180), 181))\n tasks.append(Task(start_spinning, 520))\n tasks.append(Task(stop_spinning, 700))\n tasks.append(Task(lambda screen: set_screen_angle(0), 701))\n set_turn_time(700)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n tasks.append(Task(shake, (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake, (player.pos[1] - BOX_POS[1]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=10,\n direction=DOWN, time1=1000, time2=_ * 60 + 60, speed=[8, 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 30 + 16], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 60 + 60, speed=[8, \n 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 8, speed=[8, 0], type_=1, color=ORANGE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1] + BOX_SIZE[1]], angle\n =0, time1=10, time2=70, time3=10, width=70))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1]], angle=0, time1=10,\n time2=70, time3=10, width=30))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30], length=\n 1000, direction=UP, time1=1000, time2=100, speed=[0, 0], type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] - 8], length=5, direction=DOWN,\n time1=1000, time2=100, speed=[0, 0], type_=2))\n boards.append(Board(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40, speed=[1, 0], time1=BOX_SIZE[0], time2=100, direction=UP))\n for _ in range(0, 20, 4):\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] +\n BOX_SIZE[1] - 40 - 25], length=1000, direction=UP, time1=\n BOX_SIZE[0] // 4, time2=150 + _ * 30, speed=[-4, 0]))\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen: set_screen_angle(0), 682))\n\n\n@add_attack\ndef board_3():\n set_turn_time(100)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n tasks.append(Task(shake, 60))\n tasks.append(Task(unshake, 65))\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 10], angle=90, time1=10,\n time2=50, time3=0, width=50))\n\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid1():\n set_turn_time(70)\n global BOX_SIZE, BOX_POS\n BOX_POS = [200, 240]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(RotatableBone(pos=[BOX_POS[0] - 70, BOX_POS[1] + BOX_SIZE[\n 1]], time1=1000, length=130, angle=45, speed=[5, 0, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0] + 70, BOX_POS[\n 1] + BOX_SIZE[1]], time1=1000, length=130, angle=-45, speed=[-5, 0,\n 0, 0]))\n\n\n@add_attack\ndef bone_lid2():\n set_turn_time(60)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake, (player.pos[1] - BOX_POS[1]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(RotatableBone(pos=[BOX_POS[0] - 20, BOX_POS[1]], time1=\n 1000, length=130, angle=-45, speed=[5, 0, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[\n 1]], time1=1000, length=130, angle=45, speed=[-5, 0, 0, 0]))\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say('好了,我也累了,不如我们休息一下?')\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\n@add_attack\ndef before_flash():\n sans.say('好吧,看来你已经做出了自己的选择。')\n\n\n<mask token>\n\n\ndef flash_round_2():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n\n def zjj(screen):\n angle = random.randint(-140, -40)\n d = random.randint(10, 200)\n blasters.append(GasterBlaster(pos=[player.pos[0] + math.cos(math.\n radians(angle)) * d, player.pos[1] + math.sin(math.radians(\n angle)) * d], angle=angle - 180, time1=0, time2=20, width=50))\n for _ in range(0, 50):\n tasks.append(Task(zjj, _ / 2))\n\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + 50], angle=0, time1\n =10, time2=70, time3=0, width=100))\n\n\ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0, time1=10, time2=70, time3=0, width=100))\n\n\n<mask token>\n",
"step-5": "import pygame\nimport time as time_\nimport random\nimport os\nfrom pygame.locals import *\nfrom math import sin, cos, pi\nfrom sys import exit\n# ---------------------------\nfrom unzip import *\nunzip()\n# ---------------------------\nfrom others import *\nfrom gaster_blaster import *\nfrom board import *\nfrom bone import *\nfrom sans import *\nfrom player import *\nfrom functions import *\n# ----------------------------------------------------------------\n'''初始化'''\nos.environ[\"SDL_VIDEO_WINDOW_POS\"] = \"100,100\"\npygame.init()\nif FULL_SCREEN:\n display = pygame.display.set_mode((1920, 1080), FULLSCREEN)\nelse:\n display = pygame.display.set_mode(SCREEN_SIZE)\nscreen = pygame.Surface(SCREEN_SIZE).convert_alpha()\nmask_surface_blue = pygame.Surface(SCREEN_SIZE).convert_alpha() # 蓝色攻击的mask\nmask_surface_orange = pygame.Surface(SCREEN_SIZE).convert_alpha() # 橙色攻击的mask\nmask_surface_normal = pygame.Surface(SCREEN_SIZE).convert_alpha() # 普通攻击的mask\npygame.display.set_caption(\"UPPERTALE\") #标题\npygame.display.set_icon(pygame.image.load(\"res/icon-32.png\")) #图标\n\nfps = pygame.time.Clock() # 帧数计时器\nframes = 60\n\n# -----------------------------------\n'''因为需要修改全局变量\n所以不得不写在主文件里的函数'''\ndef players_turn(text):\n def tmp():\n global is_players_turn, battle_text, shown_index\n is_players_turn = True\n battle_text = text\n shown_index = 0\n bones.clear()\n blasters.clear()\n boards.clear()\n attacks.append(tmp)\n\ndef set_turn_time(time):\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\ndef unshake(screen):\n global screen_shaking\n screen_shaking = False\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\ndef start_testing():\n attacks.clear()\n\n# -------------------------------------\n'''回合'''\n# 吟唱\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n # 测试区开始\n pass\n # 测试区结束\n sans.say(\"准备好了?\")\n\n# 开头杀\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7],\n speed=[0, -5],\n direction=UP,\n time1=8,\n time2=40,\n length=1000,\n type_=1\n )\n )\n\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],\n speed=[0, 0],\n direction=UP,\n time1=200,\n time2=48,\n length=1000,\n type_=1\n )\n )\n\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],\n speed=[0, 5],\n direction=UP,\n time1=8,\n time2=248,\n length=1000,\n type_=1\n )\n )\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake,\n (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[0] - BOX_POS[0]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (player.pos[0] - BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 7, y],\n speed=[0, 0, 5],\n direction=LEFT,\n time1=8,\n time2=30,\n length=0,\n type_=2\n )\n )\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 7, y],\n speed=[0, 0, 0],\n direction=LEFT,\n time1=150,\n time2=38,\n length=40,\n type_=2\n )\n )\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 7, y],\n speed=[0, 0, -5],\n direction=LEFT,\n time1=8,\n time2=188,\n length=40,\n type_=2\n )\n )\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(\n Bone(\n pos=BOX_POS,\n length=40 + sin(_ / 20) * 40,\n direction=UP,\n speed=[7, 0],\n time1=1000,\n time2=_,\n )\n )\n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] + 25 + (sin(_ / 20) * 40) + 60],\n length=1000,\n direction=UP,\n speed=[7, 0],\n time1=1000,\n time2=_,\n )\n )\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say(\"只是第一个回合而已,何必用尽全力?\")\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef zjj_1():\n set_turn_time(60)\n global BOX_POS, BOX_SIZE\n BOX_POS = [200, 230]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n\n@add_attack\ndef zjj_2():\n set_turn_time(11 * 100)\n def zjj(screen):\n angle = random.randint(240, 300)\n blasters.append(GasterBlaster(\n pos=[\n player.pos[0] + math.cos(math.radians(angle)) * 200,\n player.pos[1] + math.sin(math.radians(angle)) * 200],\n angle=angle - 180,\n time1=10,\n time2=30,\n width=30,\n color=BLUE\n ))\n for _ in range(10):\n tasks.append(Task(zjj, _ * 100))\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 20, BOX_POS[1] - 8],\n length=BOX_SIZE[1] - 30 - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[2, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] - 8],\n length=BOX_SIZE[1] - 30 - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-2, 0],\n type_=2\n ))\n\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] - 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[2, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-2, 0],\n type_=1\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] - 8],\n length=BOX_SIZE[1] - 30 - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[4, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[4, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=BOX_POS,\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60 + 16,\n speed=[4, 0],\n type_=1,\n color=BLUE\n ))\n \n@add_attack\ndef orange_bone():\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 0))\n tasks.append(Task(stop_spinning, 180))\n tasks.append(Task(lambda screen:set_screen_angle(180), 181))\n tasks.append(Task(start_spinning, 520))\n tasks.append(Task(stop_spinning, 700))\n tasks.append(Task(lambda screen:set_screen_angle(0), 701))\n set_turn_time(700)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n tasks.append(Task(shake,\n (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[1] - BOX_POS[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] - 8],\n length=10,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[8, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] + 30 + 16],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[8, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=BOX_POS,\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60 + 8,\n speed=[8, 0],\n type_=1,\n color=ORANGE\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(\n pos=[x, BOX_POS[1]],\n time1=10,\n time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10],\n length=0,\n direction=DOWN,\n color=BLUE\n ))\n bones.append(Bone(\n pos=[x, BOX_POS[1]],\n time1=10,\n time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10],\n length=BOX_SIZE[1],\n direction=DOWN,\n color=BLUE\n ))\n tasks.append(Task(shake,_ * 100 + 10))\n tasks.append(Task(unshake,_ * 100 + 15))\n tasks.append(Task(lambda screen : slam_sound.play(),\n _ * 100 + 15))\n \n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(\n pos=[BOX_POS[0], y],\n time1=10,\n time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10],\n length=0,\n direction=RIGHT,\n color=ORANGE\n ))\n bones.append(Bone(\n pos=[BOX_POS[0], y],\n time1=10,\n time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10],\n length=BOX_SIZE[0],\n direction=RIGHT,\n color=ORANGE\n ))\n\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2\n ))\n\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], y + 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0], y + 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=1\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n \n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1] + BOX_SIZE[1]],\n angle=0,\n time1=10,\n time2=70,\n time3=10,\n width=70\n )\n )\n\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1]],\n angle=0,\n time1=10,\n time2=70,\n time3=10,\n width=30\n )\n )\n\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],\n length=1000,\n direction=UP,\n time1=1000,\n time2=100,\n speed=[0, 0],\n type_=1\n )\n )\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] - 8],\n length=5,\n direction=DOWN,\n time1=1000,\n time2=100,\n speed=[0, 0],\n type_=2\n )\n )\n boards.append(\n Board(\n pos=[BOX_POS[0],BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40,\n speed=[1, 0],\n time1=BOX_SIZE[0],\n time2=100,\n direction=UP\n )\n )\n\n for _ in range(0, 20, 4):\n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 40 - 25],\n length=1000,\n direction=UP,\n time1=BOX_SIZE[0] // 4,\n time2=150 + (_ * 30),\n speed=[-4, 0]\n )\n )\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen:set_screen_angle(0), 682))\n\n@add_attack\ndef board_3():\n set_turn_time(100)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n tasks.append(Task(shake,\n (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[0] - BOX_POS[0]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (player.pos[0] - BOX_POS[0]) // 10))\n \n tasks.append(Task(shake, 60))\n tasks.append(Task(unshake, 65))\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], 10],\n angle=90,\n time1=10,\n time2=50,\n time3=0,\n width=50\n )\n )\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n\n@add_attack\ndef board_2_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1] + BOX_SIZE[1]],\n angle=0,\n time1=10,\n time2=70,\n time3=10,\n width=70\n )\n )\n \n tasks.append(Task(shake, 250))\n tasks.append(Task(unshake, 255))\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1] + BOX_SIZE[1] - 20],\n angle=0,\n time1=10,\n time2=70,\n time3=250,\n width=70\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 10],\n time1=1000,\n time2=0,\n speed=[-2, 0],\n length=40\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 10],\n time1=1000,\n time2=100,\n speed=[-1.5, 0],\n length=40\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 10],\n time1=1000,\n time2=200,\n speed=[-1, 0],\n length=40\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 30],\n time1=1000,\n time2=300,\n speed=[-3, 0],\n length=80\n )\n )\n \n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],\n length=1000,\n direction=UP,\n time1=400,\n time2=100,\n speed=[0, 0],\n type_=1\n )\n )\n\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],\n length=1000,\n direction=UP,\n time1=1000,\n time2=500,\n speed=[0, 0],\n type_=1\n )\n )\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef bone_lid1():\n set_turn_time(70)\n global BOX_SIZE, BOX_POS\n BOX_POS = [200, 240]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] - 70, BOX_POS[1] + BOX_SIZE[1]],\n time1=1000,\n length=130,\n angle=45,\n speed=[5, 0, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 70, BOX_POS[1] + BOX_SIZE[1]],\n time1=1000,\n length=130,\n angle=-45,\n speed=[-5, 0, 0, 0]\n )\n )\n\n@add_attack\ndef bone_lid2():\n set_turn_time(60)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake,\n (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[1] - BOX_POS[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] - 20, BOX_POS[1]],\n time1=1000,\n length=130,\n angle=-45,\n speed=[5, 0, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1]],\n time1=1000,\n length=130,\n angle=45,\n speed=[-5, 0, 0, 0]\n )\n )\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 60,\n length=260,\n angle=-45,\n speed=[0, 2, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 60,\n length=260,\n angle=45,\n speed=[0, -2, 0, 0]\n )\n )\n \n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 60 + 30,\n length=260,\n angle=45,\n speed=[0, 2, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 60 + 30,\n length=260,\n angle=-45,\n speed=[0, -2, 0, 0]\n )\n )\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say(\"好了,我也累了,不如我们休息一下?\")\n\n@add_attack\ndef mercy2():\n sans.say(\"这也是一个改过自新的机会,\")\n\n@add_attack\ndef mercy3():\n sans.say(\"赶紧按下饶恕,\")\n\n@add_attack\ndef mercy4():\n sans.headtype = SANS_NO_EYES\n sans.say(\"否则你绝对不想见到下一个回合\")\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n \nplayers_turn(\"* ...\")\n@add_attack\ndef before_flash():\n sans.say(\"好吧,看来你已经做出了自己的选择。\")\n \n@add_attack\ndef flash_round():\n set_turn_time(10)\n global blackout\n flash_sound.play()\n blackout = True\n bones.clear()\n blasters.clear()\n boards.clear()\n def flash(screen):\n global blackout\n blackout = False\n flash_sound.play()\n pygame.mixer.music.unpause()\n tasks.append(Task(flash, 10))\n \ndef flash_round_1():\n set_turn_time(150)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n player.type = BLUE_SOUL\n player.direction = DOWN\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n 100000]\n direction = random.randint(0, 1)\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 30, BOX_POS[1] + BOX_SIZE[1] - 30],\n angle=0,\n time1=0,\n time2=30,\n time3=10,\n width=90\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 30, BOX_POS[1] - 30],\n angle=0,\n time1=0,\n time2=30,\n time3=60,\n width=90\n )\n )\n if direction:\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=10,\n width=90\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=60,\n width=90\n )\n )\n else:\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=10,\n width=90\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=60,\n width=90\n )\n )\n for angle in range(0, 360, 10):\n bones.append(RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] / 2 + cos(radians(angle)) * BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2 + 25 + sin(radians(angle)) * BOX_SIZE[1] / 2],\n length=25,\n angle=angle,\n time1=150\n )\n )\n if angle % 30 == 0:\n bones.append(RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2 + 25],\n length=40,\n angle=angle,\n speed=[0, 0, 0, 5],\n time1=130,\n time2=20\n )\n )\n\ndef flash_round_2():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n def zjj(screen):\n angle = random.randint(-140, -40)\n d = random.randint(10, 200)\n blasters.append(GasterBlaster(\n pos=[\n player.pos[0] + math.cos(math.radians(angle)) * d,\n player.pos[1] + math.sin(math.radians(angle)) * d],\n angle=angle - 180,\n time1=0,\n time2=20,\n width=50\n ))\n for _ in range(0, 50):\n tasks.append(Task(zjj, _ / 2))\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n \ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 10, BOX_POS[1] + BOX_SIZE[1] + 10],\n angle=-45,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n \ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[50, BOX_POS[1] + 50],\n angle=0,\n time1=10,\n time2=70,\n time3=0,\n width=100\n )\n )\n \ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0,\n time1=10,\n time2=70,\n time3=0,\n width=100\n )\n )\n \ndef flash_round_7():\n set_turn_time(150)\n global BOX_SIZE, BOX_POS, _boxpos, _boxsize\n BOX_POS = _boxpos = [230, 230]\n BOX_SIZE = _boxsize = [150, 150]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n for _ in range(3):\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 50 + 20,\n length=150,\n angle=-20,\n speed=[0, 4, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 50 + 20,\n length=150,\n angle=20,\n speed=[0, -4, 0, 0]\n )\n )\n \n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 50 + 50,\n length=150,\n angle=20,\n speed=[0, 4, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 50 + 50,\n length=150,\n angle=-20,\n speed=[0, -4, 0, 0]\n )\n )\n \n\nrandom_attacks = [flash_round_1,\n flash_round_2,\n flash_round_3,\n flash_round_4,\n flash_round_5,\n flash_round_6,\n flash_round_7]\nfor _ in range(5):\n attacks.append(random.choice(random_attacks))\n attacks.append(flash_round)\n \nplayers_turn(\"* ...\")\n \n@add_attack\ndef windmill():\n set_turn_time(1200)\n global BOX_POS, BOX_SIZE, before_strike, after_strike\n def before_strike():\n global sans_damage\n sans_damage = 1\n after_strike = lambda : ...\n BOX_POS = [150, 240]\n BOX_SIZE = [150, 150]\n\n def movegb(screen):\n for i in range(4):\n blasters[i].angle += 1\n blasters[i].end_angle += 1\n blasters[i].radian += radians(-1)\n blasters[i].back_speed = 0\n\n for angle in range(360 * 5):\n tasks.append(Task(movegb, angle * 0.4 + 100))\n \n def enablerecoil(screen):\n for b in blasters:\n b.norecoil = False\n\n tasks.append(Task(enablerecoil, 800))\n\n for angle in range(0, 360, 90):\n blasters.append(GasterBlaster(\n pos=[150 + 150 / 2, 240 + 150 / 2],\n angle=angle,\n time1=10,\n time2=1000,\n width=30,\n time3=0,\n norecoil=True\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef gameend():\n ...\n\n# ------------------------------------\n\"\"\"主程序\"\"\"\n\nwhile True:\n # ---------------------------------------------------------\n '''实例化'''\n from locals_ import *\n time = 0\n _boxpos = [0, 0]\n _boxsize = SCREEN_SIZE[:]\n rightdown = SCREEN_SIZE[:]\n\n time1 = 0\n time2 = 0\n delta = 1\n blasters = []\n bones = []\n tasks = []\n warns = []\n texts = []\n boards = []\n before_strike = None\n after_strike = None\n sans = Sans([280, 80])\n player = Player([0, 0])\n actions = {\n \"* check\" : CHECK_SANS,\n \"* heal ({} time(s) left)\" : HEAL_SANS\n }\n mc_actions = {\n \"* spare\" : MERCY_SANS_SPARE,\n \"* flee\" : MERCY_SANS_FLEE\n }\n pygame.mixer.music.stop()\n if FULL_SCREEN:\n display = pygame.display.set_mode((1920, 1080), FULLSCREEN)\n else:\n display = pygame.display.set_mode(SCREEN_SIZE)\n while True:\n time1 = time_.time()\n # 屏幕震动\n if screen_shaking:\n screen_offset[0] = random.randint(-5, 5)\n screen_offset[1] = random.randint(-5, 5)\n else:\n screen_offset = [0, 0]\n # 屏幕旋转\n if spinning_left:\n screen_angle -= 1\n # 屏幕旋转\n if spinning_right:\n screen_angle += 1\n # 测试区\n if DEBUG:...\n # 战斗框位移\n if _boxpos[0] != BOX_POS[0]:\n if abs(BOX_POS[0] - _boxpos[0]) < 0.1:\n _boxpos[0] = BOX_POS[0]\n else:\n _boxpos[0] += (BOX_POS[0] - _boxpos[0]) / 5\n if _boxpos[1] != BOX_POS[1]:\n if abs(BOX_POS[1] - _boxpos[1]) < 0.1:\n _boxpos[1] = BOX_POS[1]\n else:\n _boxpos[1] += (BOX_POS[1] - _boxpos[1]) / 5\n\n # 战斗框大小\n if rightdown[0] != BOX_POS[0] + BOX_SIZE[0]:\n if abs(BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) < 0.1:\n rightdown[0] = BOX_POS[0] + BOX_SIZE[0]\n else:\n rightdown[0] += (BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) / 5\n if rightdown[1] != BOX_POS[1] + BOX_SIZE[1]:\n if abs(BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) < 0.1:\n rightdown[1] = BOX_POS[1] + BOX_SIZE[1]\n else:\n rightdown[1] += (BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) / 5\n _boxsize = [\n rightdown[0] - _boxpos[0],\n rightdown[1] - _boxpos[1]\n ]\n\n if time >= len(attacks):\n exit()\n if not stop and not is_players_turn:\n attacks[time]()\n time += 1\n stop = True\n\n screen.fill((0, 0, 0, 255))\n display.fill((0, 0, 0))\n mask_surface_blue.fill((0, 0, 0, 0))\n mask_surface_orange.fill((0, 0, 0, 0))\n mask_surface_normal.fill((0, 0, 0, 0))\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n exit()\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n pygame.quit()\n exit()\n if event.key in (K_z, K_RETURN):\n if sans.show_index >= len(sans.text) and sans.show_text == True:\n sans.show_text = False\n stop = False\n elif page in (CHECK_SANS, HEAL_SANS, HEAL_SANS_CANT) and shown_index >= len(battle_text):\n is_players_turn = False\n stop = False\n page = MAIN_PAGE\n player.pos = [\n BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2\n ]\n player.select_sound.play()\n else:\n player.choose = is_players_turn\n if is_players_turn and page != FIGHT_SANS:\n player.select_sound.play()\n if event.key in (K_x, K_RSHIFT):\n sans.show_index = len(sans.text)\n shown_index = len(battle_text)\n player.back = True\n player.choice = 0\n if event.key == K_UP:\n player.going_up = True\n if event.key == K_DOWN:\n player.going_down = True\n if event.key == K_LEFT:\n player.going_left = True\n if event.key == K_RIGHT:\n player.going_right = True\n if event.key == K_F4:\n if FULL_SCREEN:\n display = pygame.display.set_mode(SCREEN_SIZE)\n FULL_SCREEN = 0\n else:\n display = pygame.display.set_mode((1920, 1080), FULLSCREEN)\n FULL_SCREEN = 1\n if event.key == K_F2:\n restarting = True\n \n if DEBUG:\n if event.key == K_n:\n bones.clear()\n boards.clear()\n blasters.clear()\n stop = False\n if event.key == K_EQUALS:\n frames += 1\n if event.key == K_MINUS:\n frames -= 1\n if event.type == KEYUP:\n if event.key == K_UP:\n player.going_up = False\n if event.key == K_DOWN:\n player.going_down = False\n if event.key == K_LEFT:\n player.going_left = False\n if event.key == K_RIGHT:\n player.going_right = False\n if event.key == K_ESCAPE:\n pygame.quit()\n exit()\n if event.key in (K_z, K_RETURN):\n player.choose = False\n if event.key in (K_x, K_RSHIFT):\n player.back = False\n\n '''检测&更新'''\n \n # 战斗框\n pygame.draw.rect(screen, (255, 255, 255, 255), pygame.Rect((_boxpos[0] - 5, _boxpos[1] - 5),\n (_boxsize[0] + 10, _boxsize[1] + 10)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect(_boxpos, _boxsize)) # 内遮挡\n # 骨头\n for b in bones:\n b.show(screen,\n mask_surface_blue,\n mask_surface_orange,\n mask_surface_normal)\n if b.stop:\n bones.remove(b)\n # 警告框\n for w in warns:\n w.show(screen)\n if w.stop:\n warns.remove(w)\n # 板子\n for b in boards:\n b.show(screen)\n if b.stop:\n boards.remove(b)\n \n if b.rect.colliderect(player.rect) and player.falling:\n player.pos[0] += b.speed[0]\n player.pos[1] += b.speed[1]\n if player.direction == DOWN:\n player.pos[1] = b.rect.top - 7\n elif player.direction == UP:\n player.pos[1] = b.rect.bottom - 1\n elif player.direction == RIGHT:\n player.pos[0] = b.rect.left - 7\n elif player.direction == LEFT:\n player.pos[0] = b.rect.right - 1\n player.falling = False\n\n \"\"\"外遮挡\"\"\"\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, 0), (SCREEN_SIZE[0], _boxpos[1] - 5)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] - 5), (_boxpos[0] - 5, _boxsize[1] + 10)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] + _boxsize[1] + 5),\n (SCREEN_SIZE[0], SCREEN_SIZE[1] - (_boxpos[1] + _boxsize[1]) - 5)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((_boxpos[0] + _boxsize[0] + 5, _boxpos[1] - 5),\n (SCREEN_SIZE[0] - (_boxpos[0] + _boxsize[0]) - 5, _boxsize[1] + 10)))\n \n '''显示UI(外面)'''\n pygame.draw.rect(screen, (191, 0, 0, 255), pygame.Rect((275, 400), (92, 20)))\n if player.KR:\n pygame.draw.rect(screen, (255, 0, 255, 255), pygame.Rect((275 + player.HP, 400), (round(player.KR), 20)))\n pygame.draw.rect(screen, (255, 255, 0, 255), pygame.Rect((275, 400), (player.HP, 20)))\n screen.blit(\n font2.render(\n \"{:0>2.0f} / 92\".format(player.HP + player.KR),\n True,\n (255, 255, 255) if not round(player.KR) else (255, 0, 255)\n ),\n (\n 415,\n 400\n )\n )\n screen.blit(hp_image, (240, 405))\n screen.blit(kr_image, (375, 405))\n screen.blit(\n font2.render(\n \"Chara LV 19\", True, (255, 255, 255)\n ), (30, 400)\n )\n \n # 显示文本\n for text in texts:\n screen.blit(\n font.render(\n text[1], True, (255, 255, 255)\n ), text[0]\n )\n\n if DEBUG:\n screen.blit(\n font2.render(\n \"DEBUG\", True, (0, 0, 255)\n ), (200, 0)\n )\n # 显示帧数\n screen.blit(\n font2.render(\n \"FPS:{:0>3d}\".format(round(1 / delta)), True, (0, 0, 255)\n ), (0, 0)\n )\n if fight:\n screen.blit(fight_highlight_image, fight_pos)\n else:\n screen.blit(fight_default_image, fight_pos)\n if act:\n screen.blit(act_highlight_image, act_pos)\n else:\n screen.blit(act_default_image, act_pos)\n if item:\n screen.blit(item_highlight_image, item_pos)\n else:\n screen.blit(item_default_image, item_pos)\n if mercy:\n screen.blit(mercy_highlight_image, mercy_pos)\n else:\n screen.blit(mercy_default_image, mercy_pos)\n \n # 鳝丝(要放在外面)\n sans.show(screen)\n if show_sans_damage:\n if sans_damage == MISS:\n screen.blit(miss_image, (250, 60))\n \n # GB炮(要放在外面)\n for t in blasters:\n t.show(screen,\n mask_surface_blue,\n mask_surface_orange,\n mask_surface_normal)\n if t.stop:\n blasters.remove(t)\n\n # 其他东西,blahblahblah(外面)\n for t in tasks:\n t.show(screen)\n if t.stop:\n tasks.remove(t)\n\n if is_players_turn: # 玩家回合\n BOX_POS = [30, 250]\n BOX_SIZE = [570, 130]\n if page == MAIN_PAGE:\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 16\n x = 40\n player.type = CURSOR_SOUL\n player.options = (\n (fight_pos[0] + 10, fight_pos[1] + 15),\n ( act_pos[0] + 10, act_pos[1] + 15),\n ( item_pos[0] + 10, item_pos[1] + 15),\n (mercy_pos[0] + 10, mercy_pos[1] + 15)\n )\n\n if player.choice == 0:\n fight = True\n act = False\n item = False\n mercy = False\n\n if player.choice == 1:\n fight = False\n act = True\n item = False\n mercy = False\n\n if player.choice == 2:\n fight = False\n act = False\n item = True\n mercy = False\n\n if player.choice == 3:\n fight = False\n act = False\n item = False\n mercy = True\n\n if player.choose:\n page = [FIGHT, ACT, 0, MERCY][player.choice]\n player.choose = False\n player.choice = 0\n fight = False\n act = False\n item = False\n mercy = False\n\n if page == ACT:\n player.options = [(40, 255)]\n screen.blit(\n battle_font.render(\"* sans\", True, (255, 255, 255)),\n (40, 250)\n )\n if player.choose:\n page = [ACT_SANS][player.choice]\n player.choose = False\n player.choice = 0\n if player.back:\n page = MAIN_PAGE\n\n if page == ACT_SANS:\n player.options = []\n y = 250\n for _ in actions.keys():\n if actions[_] == HEAL_SANS:\n _ = _.format(heal_times_left)\n screen.blit(\n battle_font.render(_, True, (255, 255, 255)),\n (40, y)\n )\n player.options.append((40, y + 5))\n y += 20\n \n if player.choose:\n page = list(actions.values())[player.choice]\n if page == HEAL_SANS:\n if heal_times_left > 0:\n heal(player, 92)\n heal_times_left -= 1\n else:\n page = HEAL_SANS_CANT\n player.choose = False\n player.choice = 0\n if player.back:\n page = ACT\n\n if page == CHECK_SANS:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n battle_text = \"* Sans\\n The TRUE HERO.\\n ATK:1\\n DEF:1\\n Nothing to say.\"\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 20\n x = 40\n\n if page == HEAL_SANS:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n battle_text = \"* You are healthy again now.\\n* {} time(s) left.\".format(heal_times_left)\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 20\n x = 40\n\n if page == HEAL_SANS_CANT:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n battle_text = \"* No more times for you to heal!\"\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 20\n x = 40\n\n if page == FIGHT:\n player.options = [(40, 255)]\n screen.blit(\n battle_font.render(\"* sans\", True, (255, 255, 255)),\n (40, 250)\n )\n if player.choose:\n page = [FIGHT_SANS][player.choice]\n player.choose = False\n player.choice = 0\n choice_pos = [50, 250]\n if player.back:\n page = MAIN_PAGE\n\n if page == FIGHT_SANS:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n target_img.set_alpha(target_alpha)\n if not choice_blink:\n if target_alpha >= 255:\n choice_going = True\n else:\n target_alpha += 10\n screen.blit(target_img, [BOX_POS[0] + 10, BOX_POS[1] + 5])\n screen.blit([choice_img, choice_blink_img][choice_ani_index // 5 % 2], choice_pos)\n choice_ani_index += choice_blink\n choice_pos[0] += choice_going * 8\n if choice_going and (player.choose or choice_pos[0] > BOX_POS[0] + BOX_SIZE[0]):\n choice_going = False\n choice_blink = True\n tasks.append(Strike(sans.pos[:]))\n if not before_strike:\n sans.target_pos = [100, 80]\n else:\n before_strike()\n if choice_blink:\n blink_time += 1\n if blink_time > 60:\n show_sans_damage = False\n choice_going = False\n choice_blink = False\n choice_ani_index = 0\n target_alpha = 0\n blink_time = 0\n is_players_turn = False\n stop = False\n page = MAIN_PAGE\n if not after_strike:\n sans.target_pos = [250, 80]\n else:\n after_strike()\n player.pos = [\n BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2\n ]\n elif blink_time > 30:\n target_alpha -= 10\n show_sans_damage = True\n\n if page == MERCY:\n player.options = [(40, 255)]\n screen.blit(\n battle_font.render(\"* sans\", True, (255, 255, 255)),\n (40, 250)\n )\n if player.choose:\n page = [MERCY_SANS][player.choice]\n player.choose = False\n player.choice = 0\n if player.back:\n page = MAIN_PAGE\n\n if page == MERCY_SANS:\n player.options = []\n y = 250\n for _ in mc_actions.keys():\n screen.blit(\n battle_font.render(_, True, (255, 255, 255)),\n (40, y)\n )\n player.options.append((40, y + 5))\n y += 20\n \n if player.choose:\n page = list(mc_actions.values())[player.choice]\n player.choose = False\n player.choice = 0\n if player.back:\n page = MERCY\n\n if page == MERCY_SANS_SPARE: # 你都饶恕了,想必也不想继续玩了()\n exit()\n\n if page == MERCY_SANS_FLEE: # 你都逃跑了,想必也不想继续玩了()\n exit()\n\n # 你死了\n if player.HP + player.KR <= 0:\n DEAD = True\n if DEAD or restarting:\n break\n\n # 判定伤害\n blue_mask = pygame.mask.from_surface(mask_surface_blue)\n orange_mask = pygame.mask.from_surface(mask_surface_orange)\n normal_mask = pygame.mask.from_surface(mask_surface_normal)\n if mask_collide(blue_mask, player.mask, [0, 0], player.mask_pos):\n if any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):\n damage(player)\n if mask_collide(orange_mask, player.mask, [0, 0], player.mask_pos):\n if not any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):\n damage(player)\n if mask_collide(normal_mask, player.mask, [0, 0], player.mask_pos):\n damage(player)\n\n # 玩家\n player.show(screen, _boxpos, _boxsize)\n\n # 黑屏攻击\n if blackout:\n screen.fill(0x000000)\n\n \"\"\"将screen的图像加工后放入display\"\"\"\n if not FULL_SCREEN:\n rotated_screen = pygame.transform.rotate(screen, screen_angle)\n else:\n screen_rect = screen.get_rect()\n rotated_screen = pygame.transform.rotate(\n pygame.transform.scale(\n screen,\n (\n round(screen_rect.size[1] / screen_rect.size[0] * 1920),\n 1080\n )\n ),\n screen_angle\n )\n rotated_rect = rotated_screen.get_rect()\n if not FULL_SCREEN:\n rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]\n else:\n rotated_rect.center = [960, 540]\n display.blit(rotated_screen,\n (rotated_rect.x + screen_offset[0],\n rotated_rect.y + screen_offset[1]))\n fps.tick(frames)\n pygame.display.update()\n time2 = time_.time()\n delta = time2 - time1\n\n if not restarting:\n ticks = 0\n heart_offset = [0, 0]\n while True:\n '''死后的'''\n pygame.mixer.music.stop()\n ticks += 1\n screen.fill((0, 0, 0, 255))\n if ticks >= 200:\n break\n \n if ticks >= 160:\n screen.blit(alive_img, player.rect)\n if ticks == 160:\n split_sound.play()\n \n elif ticks >= 100:\n screen.blit(dead_img,\n (player.rect.x + heart_offset[0],\n player.rect.y + heart_offset[1]))\n heart_offset = [random.randint(-2, 2), random.randint(-2, 2)]\n \n elif ticks >= 60:\n screen.blit(dead_img, player.rect)\n if ticks == 60:\n split_sound.play()\n \n else:\n screen.blit(alive_img, player.rect)\n \n if not FULL_SCREEN:\n rotated_screen = pygame.transform.rotate(screen, screen_angle)\n else:\n screen_rect = screen.get_rect()\n rotated_screen = pygame.transform.rotate(\n pygame.transform.scale(\n screen,\n (\n round(screen_rect.size[1] / screen_rect.size[0] * 1920),\n 1080\n )\n ),\n screen_angle\n )\n rotated_rect = rotated_screen.get_rect()\n if not FULL_SCREEN:\n rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]\n else:\n rotated_rect.center = [960, 540]\n display.blit(rotated_screen,\n (rotated_rect.x + screen_offset[0],\n rotated_rect.y + screen_offset[1]))\n fps.tick(frames)\n pygame.display.update()\n",
"step-ids": [
16,
26,
28,
32,
47
]
}
|
[
16,
26,
28,
32,
47
] |
# Generated by Django 2.2.3 on 2019-07-11 22:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app1', '0002_property_details'),
]
operations = [
migrations.AlterField(
model_name='property_details',
name='flat_type',
field=models.CharField(choices=[('1', '1BHK'), ('2', '2BHK'), ('3', '3BHK')], max_length=20),
),
migrations.AlterField(
model_name='property_details',
name='possession',
field=models.CharField(choices=[('1', 'ready to move'), ('2', 'work on progress')], max_length=20),
),
migrations.AlterField(
model_name='property_details',
name='price_range',
field=models.CharField(choices=[('1', '$5000'), ('2', '$15,000'), ('3', '$25,000'), ('4', '$40,000'), ('5', '$50,000')], max_length=50),
),
]
|
normal
|
{
"blob_id": "8cdd7646dbf23259e160186f332b5cb02b67291b",
"index": 5121,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('app1', '0002_property_details')]\n operations = [migrations.AlterField(model_name='property_details', name\n ='flat_type', field=models.CharField(choices=[('1', '1BHK'), ('2',\n '2BHK'), ('3', '3BHK')], max_length=20)), migrations.AlterField(\n model_name='property_details', name='possession', field=models.\n CharField(choices=[('1', 'ready to move'), ('2', 'work on progress'\n )], max_length=20)), migrations.AlterField(model_name=\n 'property_details', name='price_range', field=models.CharField(\n choices=[('1', '$5000'), ('2', '$15,000'), ('3', '$25,000'), ('4',\n '$40,000'), ('5', '$50,000')], max_length=50))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('app1', '0002_property_details')]\n operations = [migrations.AlterField(model_name='property_details', name\n ='flat_type', field=models.CharField(choices=[('1', '1BHK'), ('2',\n '2BHK'), ('3', '3BHK')], max_length=20)), migrations.AlterField(\n model_name='property_details', name='possession', field=models.\n CharField(choices=[('1', 'ready to move'), ('2', 'work on progress'\n )], max_length=20)), migrations.AlterField(model_name=\n 'property_details', name='price_range', field=models.CharField(\n choices=[('1', '$5000'), ('2', '$15,000'), ('3', '$25,000'), ('4',\n '$40,000'), ('5', '$50,000')], max_length=50))]\n",
"step-5": "# Generated by Django 2.2.3 on 2019-07-11 22:04\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('app1', '0002_property_details'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='property_details',\n name='flat_type',\n field=models.CharField(choices=[('1', '1BHK'), ('2', '2BHK'), ('3', '3BHK')], max_length=20),\n ),\n migrations.AlterField(\n model_name='property_details',\n name='possession',\n field=models.CharField(choices=[('1', 'ready to move'), ('2', 'work on progress')], max_length=20),\n ),\n migrations.AlterField(\n model_name='property_details',\n name='price_range',\n field=models.CharField(choices=[('1', '$5000'), ('2', '$15,000'), ('3', '$25,000'), ('4', '$40,000'), ('5', '$50,000')], max_length=50),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
#1
def longest_substring(string1,string2):
mat=np.zeros(shape=(len(string1),len(string2)))
for x in range(len(string1)):
for y in range(len(string2)):
if x==0 or y==0:
if string1[x]==string2[y]:
mat[x,y]=1
else:
if string1[x]==string2[y]:
mat[x,y]=mat[x-1,y-1]+1
agmx=np.argmax(mat)
iofagmx=np.unravel_index(agmx,mat.shape)
numbofstr=int(np.max(mat))
endstring=string1[iofagmx[0]-numbofstr+1:iofagmx[0]+1]
return endstring
if __name__ == '__main__':
assert longest_substring("jsanad","anasc") == "ana"
assert longest_substring("ilovebioinformatics","icantwaitformax") == "forma"
assert longest_substring("ironmansaregreat","triathlonforever") == "on"
assert longest_substring("ihatewalking","nobikenolife") == "i"
assert longest_substring("gofaster","govegan") == "go"
|
normal
|
{
"blob_id": "6bb7dafea73aff7aca9b0ddc1393e4db6fcf0151",
"index": 4828,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef longest_substring(string1, string2):\n mat = np.zeros(shape=(len(string1), len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x == 0 or y == 0:\n if string1[x] == string2[y]:\n mat[x, y] = 1\n elif string1[x] == string2[y]:\n mat[x, y] = mat[x - 1, y - 1] + 1\n agmx = np.argmax(mat)\n iofagmx = np.unravel_index(agmx, mat.shape)\n numbofstr = int(np.max(mat))\n endstring = string1[iofagmx[0] - numbofstr + 1:iofagmx[0] + 1]\n return endstring\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef longest_substring(string1, string2):\n mat = np.zeros(shape=(len(string1), len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x == 0 or y == 0:\n if string1[x] == string2[y]:\n mat[x, y] = 1\n elif string1[x] == string2[y]:\n mat[x, y] = mat[x - 1, y - 1] + 1\n agmx = np.argmax(mat)\n iofagmx = np.unravel_index(agmx, mat.shape)\n numbofstr = int(np.max(mat))\n endstring = string1[iofagmx[0] - numbofstr + 1:iofagmx[0] + 1]\n return endstring\n\n\nif __name__ == '__main__':\n assert longest_substring('jsanad', 'anasc') == 'ana'\n assert longest_substring('ilovebioinformatics', 'icantwaitformax'\n ) == 'forma'\n assert longest_substring('ironmansaregreat', 'triathlonforever') == 'on'\n assert longest_substring('ihatewalking', 'nobikenolife') == 'i'\n assert longest_substring('gofaster', 'govegan') == 'go'\n",
"step-4": "import numpy as np\n\n\ndef longest_substring(string1, string2):\n mat = np.zeros(shape=(len(string1), len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x == 0 or y == 0:\n if string1[x] == string2[y]:\n mat[x, y] = 1\n elif string1[x] == string2[y]:\n mat[x, y] = mat[x - 1, y - 1] + 1\n agmx = np.argmax(mat)\n iofagmx = np.unravel_index(agmx, mat.shape)\n numbofstr = int(np.max(mat))\n endstring = string1[iofagmx[0] - numbofstr + 1:iofagmx[0] + 1]\n return endstring\n\n\nif __name__ == '__main__':\n assert longest_substring('jsanad', 'anasc') == 'ana'\n assert longest_substring('ilovebioinformatics', 'icantwaitformax'\n ) == 'forma'\n assert longest_substring('ironmansaregreat', 'triathlonforever') == 'on'\n assert longest_substring('ihatewalking', 'nobikenolife') == 'i'\n assert longest_substring('gofaster', 'govegan') == 'go'\n",
"step-5": "import numpy as np\n#1\ndef longest_substring(string1,string2):\n mat=np.zeros(shape=(len(string1),len(string2)))\n for x in range(len(string1)):\n for y in range(len(string2)):\n if x==0 or y==0:\n if string1[x]==string2[y]:\n mat[x,y]=1\n else:\n if string1[x]==string2[y]:\n mat[x,y]=mat[x-1,y-1]+1\n agmx=np.argmax(mat)\n iofagmx=np.unravel_index(agmx,mat.shape)\n numbofstr=int(np.max(mat))\n endstring=string1[iofagmx[0]-numbofstr+1:iofagmx[0]+1]\n return endstring\n \nif __name__ == '__main__':\n assert longest_substring(\"jsanad\",\"anasc\") == \"ana\"\n assert longest_substring(\"ilovebioinformatics\",\"icantwaitformax\") == \"forma\"\n assert longest_substring(\"ironmansaregreat\",\"triathlonforever\") == \"on\"\n assert longest_substring(\"ihatewalking\",\"nobikenolife\") == \"i\"\n assert longest_substring(\"gofaster\",\"govegan\") == \"go\" \n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import sqlite3
from typing import Any
from direct_geocoder import get_table_columns
from reverse_geocoder import is_point_in_polygon
from utils import zip_table_columns_with_table_rows, get_average_point
def get_organizations_by_address_border(city: str,
nodes: list[tuple[float, float]]) \
-> list[dict[str, Any]]:
result = []
radius = 0.0025
with sqlite3.connect(os.path.join('db', f'{city}.db')) as connection:
cursor = connection.cursor()
lat, lon = get_average_point(nodes)
south, north = lat - radius, lat + radius
west, east = lon - radius, lon + radius
request_template = f"SELECT * FROM nodes WHERE " \
f"(lat BETWEEN ? AND ?) AND " \
f"(lon BETWEEN ? AND ?) AND " \
f"(highway IS NULL) AND" \
f"(NOT(name IS NULL) OR " \
f"NOT(shop IS NULL) OR " \
f"NOT(amenity IS NULL))"
organizations_within_radius = []
nodes_columns = get_table_columns(cursor, 'nodes')
ways_columns = get_table_columns(cursor, 'ways')
cursor.execute(request_template, (south, north, west, east))
organizations_within_radius += zip_table_columns_with_table_rows(
nodes_columns,
cursor.fetchall())
request_template = request_template.replace('nodes', 'ways')
cursor.execute(request_template, (south, north, west, east))
organizations_within_radius += zip_table_columns_with_table_rows(
ways_columns,
cursor.fetchall())
for organization in organizations_within_radius:
if is_point_in_polygon((organization['lat'], organization['lon']),
nodes):
result.append(organization)
return result
|
normal
|
{
"blob_id": "79f945694f853e5886b590020bb661ecd418510d",
"index": 4567,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_organizations_by_address_border(city: str, nodes: list[tuple[float,\n float]]) ->list[dict[str, Any]]:\n result = []\n radius = 0.0025\n with sqlite3.connect(os.path.join('db', f'{city}.db')) as connection:\n cursor = connection.cursor()\n lat, lon = get_average_point(nodes)\n south, north = lat - radius, lat + radius\n west, east = lon - radius, lon + radius\n request_template = (\n f'SELECT * FROM nodes WHERE (lat BETWEEN ? AND ?) AND (lon BETWEEN ? AND ?) AND (highway IS NULL) AND(NOT(name IS NULL) OR NOT(shop IS NULL) OR NOT(amenity IS NULL))'\n )\n organizations_within_radius = []\n nodes_columns = get_table_columns(cursor, 'nodes')\n ways_columns = get_table_columns(cursor, 'ways')\n cursor.execute(request_template, (south, north, west, east))\n organizations_within_radius += zip_table_columns_with_table_rows(\n nodes_columns, cursor.fetchall())\n request_template = request_template.replace('nodes', 'ways')\n cursor.execute(request_template, (south, north, west, east))\n organizations_within_radius += zip_table_columns_with_table_rows(\n ways_columns, cursor.fetchall())\n for organization in organizations_within_radius:\n if is_point_in_polygon((organization['lat'], organization['lon']),\n nodes):\n result.append(organization)\n return result\n",
"step-3": "import os\nimport sqlite3\nfrom typing import Any\nfrom direct_geocoder import get_table_columns\nfrom reverse_geocoder import is_point_in_polygon\nfrom utils import zip_table_columns_with_table_rows, get_average_point\n\n\ndef get_organizations_by_address_border(city: str, nodes: list[tuple[float,\n float]]) ->list[dict[str, Any]]:\n result = []\n radius = 0.0025\n with sqlite3.connect(os.path.join('db', f'{city}.db')) as connection:\n cursor = connection.cursor()\n lat, lon = get_average_point(nodes)\n south, north = lat - radius, lat + radius\n west, east = lon - radius, lon + radius\n request_template = (\n f'SELECT * FROM nodes WHERE (lat BETWEEN ? AND ?) AND (lon BETWEEN ? AND ?) AND (highway IS NULL) AND(NOT(name IS NULL) OR NOT(shop IS NULL) OR NOT(amenity IS NULL))'\n )\n organizations_within_radius = []\n nodes_columns = get_table_columns(cursor, 'nodes')\n ways_columns = get_table_columns(cursor, 'ways')\n cursor.execute(request_template, (south, north, west, east))\n organizations_within_radius += zip_table_columns_with_table_rows(\n nodes_columns, cursor.fetchall())\n request_template = request_template.replace('nodes', 'ways')\n cursor.execute(request_template, (south, north, west, east))\n organizations_within_radius += zip_table_columns_with_table_rows(\n ways_columns, cursor.fetchall())\n for organization in organizations_within_radius:\n if is_point_in_polygon((organization['lat'], organization['lon']),\n nodes):\n result.append(organization)\n return result\n",
"step-4": "import os\nimport sqlite3\nfrom typing import Any\n\nfrom direct_geocoder import get_table_columns\nfrom reverse_geocoder import is_point_in_polygon\nfrom utils import zip_table_columns_with_table_rows, get_average_point\n\n\ndef get_organizations_by_address_border(city: str,\n nodes: list[tuple[float, float]]) \\\n -> list[dict[str, Any]]:\n result = []\n radius = 0.0025\n with sqlite3.connect(os.path.join('db', f'{city}.db')) as connection:\n cursor = connection.cursor()\n lat, lon = get_average_point(nodes)\n south, north = lat - radius, lat + radius\n west, east = lon - radius, lon + radius\n request_template = f\"SELECT * FROM nodes WHERE \" \\\n f\"(lat BETWEEN ? AND ?) AND \" \\\n f\"(lon BETWEEN ? AND ?) AND \" \\\n f\"(highway IS NULL) AND\" \\\n f\"(NOT(name IS NULL) OR \" \\\n f\"NOT(shop IS NULL) OR \" \\\n f\"NOT(amenity IS NULL))\"\n organizations_within_radius = []\n nodes_columns = get_table_columns(cursor, 'nodes')\n ways_columns = get_table_columns(cursor, 'ways')\n cursor.execute(request_template, (south, north, west, east))\n organizations_within_radius += zip_table_columns_with_table_rows(\n nodes_columns,\n cursor.fetchall())\n request_template = request_template.replace('nodes', 'ways')\n cursor.execute(request_template, (south, north, west, east))\n organizations_within_radius += zip_table_columns_with_table_rows(\n ways_columns,\n cursor.fetchall())\n for organization in organizations_within_radius:\n if is_point_in_polygon((organization['lat'], organization['lon']),\n nodes):\n result.append(organization)\n return result\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import models
from django.contrib.auth.models import User, Group
from userena.models import UserenaBaseProfile
from django.db.models.signals import post_save
from tastypie.models import create_api_key
class UserProfile(UserenaBaseProfile):
# user reference
user = models.OneToOneField(User)
facebook_id = models.CharField(max_length = 128, blank = True, null = True)
class Meta:
permissions = (
('change_profile', 'Change profile'),
('view_profile', 'View profile'),
('delete_profile', 'Delete profile'),
)
def create_user_profile(sender, instance, created, **kwargs):
"""
Create user profie and set the permissions
"""
if created and instance.pk >= 0:
UserProfile.objects.create(user=instance)
# get default group, but not for anonymous
try:
default_group = Group.objects.get(name = "default_users")
instance.groups.add(default_group)
except:
pass
post_save.connect(create_user_profile, sender=User)
# generate api key for the user when the user is created
post_save.connect(create_api_key, sender=User)
|
normal
|
{
"blob_id": "6e6f153857879da625f57f0382f1997fcae4f6c8",
"index": 6041,
"step-1": "<mask token>\n\n\nclass UserProfile(UserenaBaseProfile):\n user = models.OneToOneField(User)\n facebook_id = models.CharField(max_length=128, blank=True, null=True)\n\n\n class Meta:\n permissions = ('change_profile', 'Change profile'), ('view_profile',\n 'View profile'), ('delete_profile', 'Delete profile')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass UserProfile(UserenaBaseProfile):\n user = models.OneToOneField(User)\n facebook_id = models.CharField(max_length=128, blank=True, null=True)\n\n\n class Meta:\n permissions = ('change_profile', 'Change profile'), ('view_profile',\n 'View profile'), ('delete_profile', 'Delete profile')\n\n\ndef create_user_profile(sender, instance, created, **kwargs):\n \"\"\"\n Create user profie and set the permissions\n \"\"\"\n if created and instance.pk >= 0:\n UserProfile.objects.create(user=instance)\n try:\n default_group = Group.objects.get(name='default_users')\n instance.groups.add(default_group)\n except:\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass UserProfile(UserenaBaseProfile):\n user = models.OneToOneField(User)\n facebook_id = models.CharField(max_length=128, blank=True, null=True)\n\n\n class Meta:\n permissions = ('change_profile', 'Change profile'), ('view_profile',\n 'View profile'), ('delete_profile', 'Delete profile')\n\n\ndef create_user_profile(sender, instance, created, **kwargs):\n \"\"\"\n Create user profie and set the permissions\n \"\"\"\n if created and instance.pk >= 0:\n UserProfile.objects.create(user=instance)\n try:\n default_group = Group.objects.get(name='default_users')\n instance.groups.add(default_group)\n except:\n pass\n\n\npost_save.connect(create_user_profile, sender=User)\npost_save.connect(create_api_key, sender=User)\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User, Group\nfrom userena.models import UserenaBaseProfile\nfrom django.db.models.signals import post_save\nfrom tastypie.models import create_api_key\n\n\nclass UserProfile(UserenaBaseProfile):\n user = models.OneToOneField(User)\n facebook_id = models.CharField(max_length=128, blank=True, null=True)\n\n\n class Meta:\n permissions = ('change_profile', 'Change profile'), ('view_profile',\n 'View profile'), ('delete_profile', 'Delete profile')\n\n\ndef create_user_profile(sender, instance, created, **kwargs):\n \"\"\"\n Create user profie and set the permissions\n \"\"\"\n if created and instance.pk >= 0:\n UserProfile.objects.create(user=instance)\n try:\n default_group = Group.objects.get(name='default_users')\n instance.groups.add(default_group)\n except:\n pass\n\n\npost_save.connect(create_user_profile, sender=User)\npost_save.connect(create_api_key, sender=User)\n",
"step-5": "from django.db import models\nfrom django.contrib.auth.models import User, Group\nfrom userena.models import UserenaBaseProfile\nfrom django.db.models.signals import post_save\nfrom tastypie.models import create_api_key\n\nclass UserProfile(UserenaBaseProfile):\n # user reference\n user = models.OneToOneField(User)\n \n facebook_id = models.CharField(max_length = 128, blank = True, null = True)\n \n class Meta:\n permissions = (\n ('change_profile', 'Change profile'),\n ('view_profile', 'View profile'),\n ('delete_profile', 'Delete profile'),\n )\n \ndef create_user_profile(sender, instance, created, **kwargs):\n \"\"\"\n Create user profie and set the permissions\n \"\"\"\n if created and instance.pk >= 0:\n UserProfile.objects.create(user=instance)\n \n # get default group, but not for anonymous\n try:\n default_group = Group.objects.get(name = \"default_users\")\n instance.groups.add(default_group)\n except:\n pass\n \npost_save.connect(create_user_profile, sender=User)\n\n# generate api key for the user when the user is created\npost_save.connect(create_api_key, sender=User)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import os
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import copy, get, replace_in_file, rmdir
from conan.tools.scm import Version
from conan.errors import ConanInvalidConfiguration
required_conan_version = ">=1.57.0"
class RuyConan(ConanFile):
name = "ruy"
description = "ruy is a matrix multiplication library.\n" \
"Its focus is to cover the matrix multiplication needs of neural network inference engines\n"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/google/ruy"
license = "Apache-2.0"
topics = ("matrix", "multiplication", "neural", "network", "AI", "tensorflow")
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
}
@property
def _minimum_compilers_version(self):
return {
"Visual Studio": "15",
"msvc": "191",
"gcc": "5",
"clang": "3.4",
"apple-clang": "5.1",
}
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 14)
minimum_version = self._minimum_compilers_version.get(str(self.settings.compiler), False)
if not minimum_version:
self.output.warning("Compiler is unknown. Assuming it supports C++14.")
elif Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration("Build requires support for C++14. Minimum version for {} is {}"
.format(str(self.settings.compiler), minimum_version))
if str(self.settings.compiler) == "clang" and Version(self.settings.compiler.version) <= 5 and self.settings.build_type == "Debug":
raise ConanInvalidConfiguration("Debug builds are not supported on older versions of Clang (<=5)")
def config_options(self):
if self.settings.os == "Windows":
self.options.rm_safe("fPIC")
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def requirements(self):
self.requires("cpuinfo/cci.20220228")
def layout(self):
cmake_layout(self, src_folder="src")
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.cache_variables["RUY_MINIMAL_BUILD"] = True
tc.cache_variables["RUY_FIND_CPUINFO"] = True
# Ruy public headers don't have API decorators,
# export everything to support shared libraries on Windows
tc.variables["CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS"] = True
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def _patch_sources(self):
cmakelists = os.path.join(self.source_folder, "CMakeLists.txt")
patches = {
#Remove the invocation after project(), see https://github.com/google/ruy/issues/328
"cmake_minimum_required(VERSION 3.13)": "",
# Ensure `cmake_minimum_required` is called first
"# Copyright 2021 Google LLC": "# Copyright 2021 Google LLC\ncmake_minimum_required(VERSION 3.13)",
}
for pattern, patch in patches.items():
replace_in_file(self, cmakelists, pattern, patch)
# 1. Allow Shared builds
replace_in_file(self, os.path.join(self.source_folder, "cmake", "ruy_cc_library.cmake"),
"add_library(${_NAME} STATIC",
"add_library(${_NAME}"
)
def build(self):
self._patch_sources()
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
copy(self, "LICENSE", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
def package_info(self):
self.cpp_info.libs = ["ruy_frontend",
"ruy_context",
"ruy_trmul",
"ruy_thread_pool",
"ruy_blocking_counter",
"ruy_prepare_packed_matrices",
"ruy_ctx",
"ruy_allocator",
"ruy_prepacked_cache",
"ruy_tune",
"ruy_wait",
"ruy_apply_multiplier",
"ruy_block_map",
"ruy_context_get_ctx",
"ruy_cpuinfo",
"ruy_denormal",
"ruy_have_built_path_for_avx",
"ruy_have_built_path_for_avx2_fma",
"ruy_have_built_path_for_avx512",
"ruy_kernel_arm",
"ruy_kernel_avx",
"ruy_kernel_avx2_fma",
"ruy_kernel_avx512",
"ruy_pack_arm",
"ruy_pack_avx",
"ruy_pack_avx2_fma",
"ruy_pack_avx512",
"ruy_system_aligned_alloc",
"ruy_profiler_instrumentation",
"ruy_profiler_profiler"
]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.system_libs.extend(["m", "pthread"])
|
normal
|
{
"blob_id": "fe1c499efe492dbd4f5c9b99bd6339c503c7902b",
"index": 5766,
"step-1": "<mask token>\n\n\nclass RuyConan(ConanFile):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def configure(self):\n if self.options.shared:\n self.options.rm_safe('fPIC')\n\n def requirements(self):\n self.requires('cpuinfo/cci.20220228')\n <mask token>\n\n def source(self):\n get(self, **self.conan_data['sources'][self.version], strip_root=True)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RuyConan(ConanFile):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def _minimum_compilers_version(self):\n return {'Visual Studio': '15', 'msvc': '191', 'gcc': '5', 'clang':\n '3.4', 'apple-clang': '5.1'}\n\n def validate(self):\n if self.settings.compiler.get_safe('cppstd'):\n check_min_cppstd(self, 14)\n minimum_version = self._minimum_compilers_version.get(str(self.\n settings.compiler), False)\n if not minimum_version:\n self.output.warning(\n 'Compiler is unknown. Assuming it supports C++14.')\n elif Version(self.settings.compiler.version) < minimum_version:\n raise ConanInvalidConfiguration(\n 'Build requires support for C++14. Minimum version for {} is {}'\n .format(str(self.settings.compiler), minimum_version))\n if str(self.settings.compiler) == 'clang' and Version(self.settings\n .compiler.version) <= 5 and self.settings.build_type == 'Debug':\n raise ConanInvalidConfiguration(\n 'Debug builds are not supported on older versions of Clang (<=5)'\n )\n\n def config_options(self):\n if self.settings.os == 'Windows':\n self.options.rm_safe('fPIC')\n\n def configure(self):\n if self.options.shared:\n self.options.rm_safe('fPIC')\n\n def requirements(self):\n self.requires('cpuinfo/cci.20220228')\n <mask token>\n\n def source(self):\n get(self, **self.conan_data['sources'][self.version], strip_root=True)\n\n def generate(self):\n tc = CMakeToolchain(self)\n tc.cache_variables['RUY_MINIMAL_BUILD'] = True\n tc.cache_variables['RUY_FIND_CPUINFO'] = True\n tc.variables['CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS'] = True\n tc.generate()\n deps = CMakeDeps(self)\n deps.generate()\n\n def _patch_sources(self):\n cmakelists = os.path.join(self.source_folder, 'CMakeLists.txt')\n patches = {'cmake_minimum_required(VERSION 3.13)': '',\n '# Copyright 2021 Google LLC':\n \"\"\"# Copyright 2021 Google LLC\ncmake_minimum_required(VERSION 3.13)\"\"\"\n }\n for pattern, patch in patches.items():\n replace_in_file(self, cmakelists, pattern, patch)\n replace_in_file(self, os.path.join(self.source_folder, 'cmake',\n 'ruy_cc_library.cmake'), 'add_library(${_NAME} STATIC',\n 'add_library(${_NAME}')\n\n def build(self):\n self._patch_sources()\n cmake = CMake(self)\n cmake.configure()\n cmake.build()\n\n def package(self):\n cmake = CMake(self)\n cmake.install()\n copy(self, 'LICENSE', dst=os.path.join(self.package_folder,\n 'licenses'), src=self.source_folder)\n rmdir(self, os.path.join(self.package_folder, 'lib', 'cmake'))\n\n def package_info(self):\n self.cpp_info.libs = ['ruy_frontend', 'ruy_context', 'ruy_trmul',\n 'ruy_thread_pool', 'ruy_blocking_counter',\n 'ruy_prepare_packed_matrices', 'ruy_ctx', 'ruy_allocator',\n 'ruy_prepacked_cache', 'ruy_tune', 'ruy_wait',\n 'ruy_apply_multiplier', 'ruy_block_map', 'ruy_context_get_ctx',\n 'ruy_cpuinfo', 'ruy_denormal', 'ruy_have_built_path_for_avx',\n 'ruy_have_built_path_for_avx2_fma',\n 'ruy_have_built_path_for_avx512', 'ruy_kernel_arm',\n 'ruy_kernel_avx', 'ruy_kernel_avx2_fma', 'ruy_kernel_avx512',\n 'ruy_pack_arm', 'ruy_pack_avx', 'ruy_pack_avx2_fma',\n 'ruy_pack_avx512', 'ruy_system_aligned_alloc',\n 'ruy_profiler_instrumentation', 'ruy_profiler_profiler']\n if self.settings.os in ['Linux', 'FreeBSD']:\n self.cpp_info.system_libs.extend(['m', 'pthread'])\n",
"step-3": "<mask token>\n\n\nclass RuyConan(ConanFile):\n name = 'ruy'\n description = \"\"\"ruy is a matrix multiplication library.\nIts focus is to cover the matrix multiplication needs of neural network inference engines\n\"\"\"\n url = 'https://github.com/conan-io/conan-center-index'\n homepage = 'https://github.com/google/ruy'\n license = 'Apache-2.0'\n topics = ('matrix', 'multiplication', 'neural', 'network', 'AI',\n 'tensorflow')\n settings = 'os', 'arch', 'compiler', 'build_type'\n options = {'shared': [True, False], 'fPIC': [True, False]}\n default_options = {'shared': False, 'fPIC': True}\n\n @property\n def _minimum_compilers_version(self):\n return {'Visual Studio': '15', 'msvc': '191', 'gcc': '5', 'clang':\n '3.4', 'apple-clang': '5.1'}\n\n def validate(self):\n if self.settings.compiler.get_safe('cppstd'):\n check_min_cppstd(self, 14)\n minimum_version = self._minimum_compilers_version.get(str(self.\n settings.compiler), False)\n if not minimum_version:\n self.output.warning(\n 'Compiler is unknown. Assuming it supports C++14.')\n elif Version(self.settings.compiler.version) < minimum_version:\n raise ConanInvalidConfiguration(\n 'Build requires support for C++14. Minimum version for {} is {}'\n .format(str(self.settings.compiler), minimum_version))\n if str(self.settings.compiler) == 'clang' and Version(self.settings\n .compiler.version) <= 5 and self.settings.build_type == 'Debug':\n raise ConanInvalidConfiguration(\n 'Debug builds are not supported on older versions of Clang (<=5)'\n )\n\n def config_options(self):\n if self.settings.os == 'Windows':\n self.options.rm_safe('fPIC')\n\n def configure(self):\n if self.options.shared:\n self.options.rm_safe('fPIC')\n\n def requirements(self):\n self.requires('cpuinfo/cci.20220228')\n\n def layout(self):\n cmake_layout(self, src_folder='src')\n\n def source(self):\n get(self, **self.conan_data['sources'][self.version], strip_root=True)\n\n def generate(self):\n tc = CMakeToolchain(self)\n tc.cache_variables['RUY_MINIMAL_BUILD'] = True\n tc.cache_variables['RUY_FIND_CPUINFO'] = True\n tc.variables['CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS'] = True\n tc.generate()\n deps = CMakeDeps(self)\n deps.generate()\n\n def _patch_sources(self):\n cmakelists = os.path.join(self.source_folder, 'CMakeLists.txt')\n patches = {'cmake_minimum_required(VERSION 3.13)': '',\n '# Copyright 2021 Google LLC':\n \"\"\"# Copyright 2021 Google LLC\ncmake_minimum_required(VERSION 3.13)\"\"\"\n }\n for pattern, patch in patches.items():\n replace_in_file(self, cmakelists, pattern, patch)\n replace_in_file(self, os.path.join(self.source_folder, 'cmake',\n 'ruy_cc_library.cmake'), 'add_library(${_NAME} STATIC',\n 'add_library(${_NAME}')\n\n def build(self):\n self._patch_sources()\n cmake = CMake(self)\n cmake.configure()\n cmake.build()\n\n def package(self):\n cmake = CMake(self)\n cmake.install()\n copy(self, 'LICENSE', dst=os.path.join(self.package_folder,\n 'licenses'), src=self.source_folder)\n rmdir(self, os.path.join(self.package_folder, 'lib', 'cmake'))\n\n def package_info(self):\n self.cpp_info.libs = ['ruy_frontend', 'ruy_context', 'ruy_trmul',\n 'ruy_thread_pool', 'ruy_blocking_counter',\n 'ruy_prepare_packed_matrices', 'ruy_ctx', 'ruy_allocator',\n 'ruy_prepacked_cache', 'ruy_tune', 'ruy_wait',\n 'ruy_apply_multiplier', 'ruy_block_map', 'ruy_context_get_ctx',\n 'ruy_cpuinfo', 'ruy_denormal', 'ruy_have_built_path_for_avx',\n 'ruy_have_built_path_for_avx2_fma',\n 'ruy_have_built_path_for_avx512', 'ruy_kernel_arm',\n 'ruy_kernel_avx', 'ruy_kernel_avx2_fma', 'ruy_kernel_avx512',\n 'ruy_pack_arm', 'ruy_pack_avx', 'ruy_pack_avx2_fma',\n 'ruy_pack_avx512', 'ruy_system_aligned_alloc',\n 'ruy_profiler_instrumentation', 'ruy_profiler_profiler']\n if self.settings.os in ['Linux', 'FreeBSD']:\n self.cpp_info.system_libs.extend(['m', 'pthread'])\n",
"step-4": "<mask token>\nrequired_conan_version = '>=1.57.0'\n\n\nclass RuyConan(ConanFile):\n name = 'ruy'\n description = \"\"\"ruy is a matrix multiplication library.\nIts focus is to cover the matrix multiplication needs of neural network inference engines\n\"\"\"\n url = 'https://github.com/conan-io/conan-center-index'\n homepage = 'https://github.com/google/ruy'\n license = 'Apache-2.0'\n topics = ('matrix', 'multiplication', 'neural', 'network', 'AI',\n 'tensorflow')\n settings = 'os', 'arch', 'compiler', 'build_type'\n options = {'shared': [True, False], 'fPIC': [True, False]}\n default_options = {'shared': False, 'fPIC': True}\n\n @property\n def _minimum_compilers_version(self):\n return {'Visual Studio': '15', 'msvc': '191', 'gcc': '5', 'clang':\n '3.4', 'apple-clang': '5.1'}\n\n def validate(self):\n if self.settings.compiler.get_safe('cppstd'):\n check_min_cppstd(self, 14)\n minimum_version = self._minimum_compilers_version.get(str(self.\n settings.compiler), False)\n if not minimum_version:\n self.output.warning(\n 'Compiler is unknown. Assuming it supports C++14.')\n elif Version(self.settings.compiler.version) < minimum_version:\n raise ConanInvalidConfiguration(\n 'Build requires support for C++14. Minimum version for {} is {}'\n .format(str(self.settings.compiler), minimum_version))\n if str(self.settings.compiler) == 'clang' and Version(self.settings\n .compiler.version) <= 5 and self.settings.build_type == 'Debug':\n raise ConanInvalidConfiguration(\n 'Debug builds are not supported on older versions of Clang (<=5)'\n )\n\n def config_options(self):\n if self.settings.os == 'Windows':\n self.options.rm_safe('fPIC')\n\n def configure(self):\n if self.options.shared:\n self.options.rm_safe('fPIC')\n\n def requirements(self):\n self.requires('cpuinfo/cci.20220228')\n\n def layout(self):\n cmake_layout(self, src_folder='src')\n\n def source(self):\n get(self, **self.conan_data['sources'][self.version], strip_root=True)\n\n def generate(self):\n tc = CMakeToolchain(self)\n tc.cache_variables['RUY_MINIMAL_BUILD'] = True\n tc.cache_variables['RUY_FIND_CPUINFO'] = True\n tc.variables['CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS'] = True\n tc.generate()\n deps = CMakeDeps(self)\n deps.generate()\n\n def _patch_sources(self):\n cmakelists = os.path.join(self.source_folder, 'CMakeLists.txt')\n patches = {'cmake_minimum_required(VERSION 3.13)': '',\n '# Copyright 2021 Google LLC':\n \"\"\"# Copyright 2021 Google LLC\ncmake_minimum_required(VERSION 3.13)\"\"\"\n }\n for pattern, patch in patches.items():\n replace_in_file(self, cmakelists, pattern, patch)\n replace_in_file(self, os.path.join(self.source_folder, 'cmake',\n 'ruy_cc_library.cmake'), 'add_library(${_NAME} STATIC',\n 'add_library(${_NAME}')\n\n def build(self):\n self._patch_sources()\n cmake = CMake(self)\n cmake.configure()\n cmake.build()\n\n def package(self):\n cmake = CMake(self)\n cmake.install()\n copy(self, 'LICENSE', dst=os.path.join(self.package_folder,\n 'licenses'), src=self.source_folder)\n rmdir(self, os.path.join(self.package_folder, 'lib', 'cmake'))\n\n def package_info(self):\n self.cpp_info.libs = ['ruy_frontend', 'ruy_context', 'ruy_trmul',\n 'ruy_thread_pool', 'ruy_blocking_counter',\n 'ruy_prepare_packed_matrices', 'ruy_ctx', 'ruy_allocator',\n 'ruy_prepacked_cache', 'ruy_tune', 'ruy_wait',\n 'ruy_apply_multiplier', 'ruy_block_map', 'ruy_context_get_ctx',\n 'ruy_cpuinfo', 'ruy_denormal', 'ruy_have_built_path_for_avx',\n 'ruy_have_built_path_for_avx2_fma',\n 'ruy_have_built_path_for_avx512', 'ruy_kernel_arm',\n 'ruy_kernel_avx', 'ruy_kernel_avx2_fma', 'ruy_kernel_avx512',\n 'ruy_pack_arm', 'ruy_pack_avx', 'ruy_pack_avx2_fma',\n 'ruy_pack_avx512', 'ruy_system_aligned_alloc',\n 'ruy_profiler_instrumentation', 'ruy_profiler_profiler']\n if self.settings.os in ['Linux', 'FreeBSD']:\n self.cpp_info.system_libs.extend(['m', 'pthread'])\n",
"step-5": "import os\nfrom conan import ConanFile\nfrom conan.tools.build import check_min_cppstd\nfrom conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout\nfrom conan.tools.files import copy, get, replace_in_file, rmdir\nfrom conan.tools.scm import Version\nfrom conan.errors import ConanInvalidConfiguration\n\nrequired_conan_version = \">=1.57.0\"\n\n\nclass RuyConan(ConanFile):\n name = \"ruy\"\n description = \"ruy is a matrix multiplication library.\\n\" \\\n \"Its focus is to cover the matrix multiplication needs of neural network inference engines\\n\"\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://github.com/google/ruy\"\n license = \"Apache-2.0\"\n topics = (\"matrix\", \"multiplication\", \"neural\", \"network\", \"AI\", \"tensorflow\")\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\n \"shared\": [True, False],\n \"fPIC\": [True, False],\n }\n default_options = {\n \"shared\": False,\n \"fPIC\": True,\n }\n\n @property\n def _minimum_compilers_version(self):\n return {\n \"Visual Studio\": \"15\",\n \"msvc\": \"191\", \n \"gcc\": \"5\",\n \"clang\": \"3.4\",\n \"apple-clang\": \"5.1\",\n }\n\n def validate(self):\n if self.settings.compiler.get_safe(\"cppstd\"):\n check_min_cppstd(self, 14)\n\n minimum_version = self._minimum_compilers_version.get(str(self.settings.compiler), False)\n if not minimum_version:\n self.output.warning(\"Compiler is unknown. Assuming it supports C++14.\")\n elif Version(self.settings.compiler.version) < minimum_version:\n raise ConanInvalidConfiguration(\"Build requires support for C++14. Minimum version for {} is {}\"\n .format(str(self.settings.compiler), minimum_version))\n\n if str(self.settings.compiler) == \"clang\" and Version(self.settings.compiler.version) <= 5 and self.settings.build_type == \"Debug\":\n raise ConanInvalidConfiguration(\"Debug builds are not supported on older versions of Clang (<=5)\")\n\n def config_options(self):\n if self.settings.os == \"Windows\":\n self.options.rm_safe(\"fPIC\")\n\n def configure(self):\n if self.options.shared:\n self.options.rm_safe(\"fPIC\")\n\n def requirements(self):\n self.requires(\"cpuinfo/cci.20220228\")\n\n def layout(self):\n cmake_layout(self, src_folder=\"src\")\n\n def source(self):\n get(self, **self.conan_data[\"sources\"][self.version], strip_root=True)\n\n def generate(self):\n tc = CMakeToolchain(self)\n tc.cache_variables[\"RUY_MINIMAL_BUILD\"] = True\n tc.cache_variables[\"RUY_FIND_CPUINFO\"] = True\n # Ruy public headers don't have API decorators,\n # export everything to support shared libraries on Windows\n tc.variables[\"CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS\"] = True\n tc.generate()\n\n deps = CMakeDeps(self)\n deps.generate()\n\n def _patch_sources(self):\n cmakelists = os.path.join(self.source_folder, \"CMakeLists.txt\")\n patches = {\n #Remove the invocation after project(), see https://github.com/google/ruy/issues/328\n \"cmake_minimum_required(VERSION 3.13)\": \"\",\n # Ensure `cmake_minimum_required` is called first \n \"# Copyright 2021 Google LLC\": \"# Copyright 2021 Google LLC\\ncmake_minimum_required(VERSION 3.13)\", \n }\n\n for pattern, patch in patches.items():\n replace_in_file(self, cmakelists, pattern, patch)\n\n # 1. Allow Shared builds\n replace_in_file(self, os.path.join(self.source_folder, \"cmake\", \"ruy_cc_library.cmake\"),\n \"add_library(${_NAME} STATIC\",\n \"add_library(${_NAME}\"\n )\n\n def build(self):\n self._patch_sources()\n cmake = CMake(self)\n cmake.configure()\n cmake.build()\n\n def package(self):\n cmake = CMake(self)\n cmake.install()\n copy(self, \"LICENSE\", dst=os.path.join(self.package_folder, \"licenses\"), src=self.source_folder)\n rmdir(self, os.path.join(self.package_folder, \"lib\", \"cmake\"))\n\n def package_info(self):\n self.cpp_info.libs = [\"ruy_frontend\",\n \"ruy_context\",\n \"ruy_trmul\",\n \"ruy_thread_pool\",\n \"ruy_blocking_counter\",\n \"ruy_prepare_packed_matrices\",\n \"ruy_ctx\",\n \"ruy_allocator\",\n \"ruy_prepacked_cache\",\n \"ruy_tune\",\n \"ruy_wait\",\n \"ruy_apply_multiplier\",\n \"ruy_block_map\",\n \"ruy_context_get_ctx\",\n \"ruy_cpuinfo\",\n \"ruy_denormal\",\n \"ruy_have_built_path_for_avx\",\n \"ruy_have_built_path_for_avx2_fma\",\n \"ruy_have_built_path_for_avx512\",\n \"ruy_kernel_arm\",\n \"ruy_kernel_avx\",\n \"ruy_kernel_avx2_fma\",\n \"ruy_kernel_avx512\",\n \"ruy_pack_arm\",\n \"ruy_pack_avx\",\n \"ruy_pack_avx2_fma\",\n \"ruy_pack_avx512\",\n \"ruy_system_aligned_alloc\",\n \"ruy_profiler_instrumentation\",\n \"ruy_profiler_profiler\"\n ]\n if self.settings.os in [\"Linux\", \"FreeBSD\"]:\n self.cpp_info.system_libs.extend([\"m\", \"pthread\"])\n",
"step-ids": [
4,
12,
14,
15,
17
]
}
|
[
4,
12,
14,
15,
17
] |
#!/usr/bin/env python
# $Id: iprscan5_urllib2.py 2809 2015-03-13 16:10:25Z uludag $
# ======================================================================
#
# Copyright 2009-2014 EMBL - European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ======================================================================
# InterProScan 5 (REST) Python client using urllib2 and
# xmltramp (http://www.aaronsw.com/2002/xmltramp/).
#
# Tested with:
# Python 2.6.5 (Ubuntu 10.04 LTS)
# Python 2.7.3 (Ubuntu 12.04 LTS)
#
# See:
# http://www.ebi.ac.uk/Tools/webservices/services/pfa/iprscan5_rest
# http://www.ebi.ac.uk/Tools/webservices/tutorials/python
# ======================================================================
# Base URL for service
import urllib.request, urllib.error, urllib.parse
import urllib.request, urllib.parse, urllib.error
import time
import sys
import re
import os
import platform
import argparse
import xmltramp
baseUrl = 'http://www.ebi.ac.uk/Tools/services/rest/iprscan5'
# Load libraries
# Set interval for checking status
checkInterval = 10
# Output level
outputLevel = 1
# Debug level
debugLevel = 0
# Number of option arguments.
numOpts = len(sys.argv)
# Usage message
parser = argparse.ArgumentParser()
# Tool specific options
parser.add_argument('--input', required=True, help='input FASTA file')
parser.add_argument('--appl',
help='signature methods to use, see --paramDetail appl')
parser.add_argument('--crc', action="store_true",
help='enable InterProScan Matches look-up (ignored)')
parser.add_argument('--nocrc', action="store_true",
help='disable InterProScan Matches look-up (ignored)')
parser.add_argument('--goterms', action="store_true",
help='enable inclusion of GO terms')
parser.add_argument('--nogoterms', action="store_true",
help='disable inclusion of GO terms')
parser.add_argument('--pathways', action="store_true",
help='enable inclusion of pathway terms')
parser.add_argument('--nopathways', action="store_true",
help='disable inclusion of pathway terms')
parser.add_argument('--sequence', help='input sequence file name')
# General options
parser.add_argument('--email', required=True, help='e-mail address')
parser.add_argument('--title', help='job title')
parser.add_argument('--outfile', help='file name for results')
parser.add_argument('--outformat', help='output format for results')
parser.add_argument('--async', action='store_true', help='asynchronous mode')
parser.add_argument('--jobid', help='job identifier')
parser.add_argument('--polljob', action="store_true", help='get job result')
parser.add_argument('--status', action="store_true", help='get job status')
parser.add_argument('--resultTypes', action='store_true',
help='get result types')
parser.add_argument('--params', action='store_true',
help='list input parameters')
parser.add_argument('--paramDetail', help='get details for parameter')
parser.add_argument('--quiet', action='store_true',
help='decrease output level')
parser.add_argument('--verbose', action='store_true',
help='increase output level')
parser.add_argument('--baseURL', default=baseUrl, help='Base URL for service')
parser.add_argument('--debugLevel', type=int,
default=debugLevel, help='debug output level')
options = parser.parse_args()
# Increase output level
if options.verbose:
outputLevel += 1
# Decrease output level
if options.quiet:
outputLevel -= 1
# Debug level
if options.debugLevel:
debugLevel = options.debugLevel
# Debug print
def printDebugMessage(functionName, message, level):
if(level <= debugLevel):
print('[' + functionName + '] ' + message, file=sys.stderr)
# User-agent for request (see RFC2616).
def getUserAgent():
printDebugMessage('getUserAgent', 'Begin', 11)
# Agent string for urllib2 library.
urllib_agent = 'Python-urllib/%s' % urllib2.__version__
clientRevision = '$Revision: 2809 $'
clientVersion = '0'
if len(clientRevision) > 11:
clientVersion = clientRevision[11:-2]
# Prepend client specific agent string.
user_agent = 'EBI-Sample-Client/%s (%s; Python %s; %s) %s' % (
clientVersion, os.path.basename(__file__),
platform.python_version(), platform.system(),
urllib_agent
)
printDebugMessage('getUserAgent', 'user_agent: ' + user_agent, 12)
printDebugMessage('getUserAgent', 'End', 11)
return user_agent
# Wrapper for a REST (HTTP GET) request
def restRequest(url):
printDebugMessage('restRequest', 'Begin', 11)
printDebugMessage('restRequest', 'url: ' + url, 11)
# Errors are indicated by HTTP status codes.
try:
# Set the User-agent.
user_agent = getUserAgent()
http_headers = {'User-Agent': user_agent}
req = urllib.request.Request(url, None, http_headers)
# Make the request (HTTP GET).
reqH = urllib.request.urlopen(req)
result = reqH.read()
reqH.close()
# Errors are indicated by HTTP status codes.
except urllib.error.HTTPError as ex:
# Trap exception and output the document to get error message.
print(ex.read(), file=sys.stderr)
raise
printDebugMessage('restRequest', 'End', 11)
return result
# Get input parameters list
def serviceGetParameters():
printDebugMessage('serviceGetParameters', 'Begin', 1)
requestUrl = baseUrl + '/parameters'
printDebugMessage('serviceGetParameters', 'requestUrl: ' + requestUrl, 2)
xmlDoc = restRequest(requestUrl)
doc = xmltramp.parse(xmlDoc)
printDebugMessage('serviceGetParameters', 'End', 1)
return doc['id':]
# Print list of parameters
def printGetParameters():
printDebugMessage('printGetParameters', 'Begin', 1)
idList = serviceGetParameters()
for id in idList:
print(id)
printDebugMessage('printGetParameters', 'End', 1)
# Get input parameter information
def serviceGetParameterDetails(paramName):
printDebugMessage('serviceGetParameterDetails', 'Begin', 1)
printDebugMessage('serviceGetParameterDetails',
'paramName: ' + paramName, 2)
requestUrl = baseUrl + '/parameterdetails/' + paramName
printDebugMessage('serviceGetParameterDetails',
'requestUrl: ' + requestUrl, 2)
xmlDoc = restRequest(requestUrl)
doc = xmltramp.parse(xmlDoc)
printDebugMessage('serviceGetParameterDetails', 'End', 1)
return doc
# Print description of a parameter
def printGetParameterDetails(paramName):
printDebugMessage('printGetParameterDetails', 'Begin', 1)
doc = serviceGetParameterDetails(paramName)
print(str(doc.name) + "\t" + str(doc.type))
print(doc.description)
for value in doc.values:
print(value.value, end=' ')
if str(value.defaultValue) == 'true':
print('default', end=' ')
print()
print("\t" + str(value.label))
if(hasattr(value, 'properties')):
for wsProperty in value.properties:
print("\t" + str(wsProperty.key) + "\t" + str(wsProperty.value))
#print doc
printDebugMessage('printGetParameterDetails', 'End', 1)
# Submit job
def serviceRun(email, title, params):
printDebugMessage('serviceRun', 'Begin', 1)
# Insert e-mail and title into params
params['email'] = email
if title:
params['title'] = title
requestUrl = baseUrl + '/run/'
printDebugMessage('serviceRun', 'requestUrl: ' + requestUrl, 2)
# Signature methods requires special handling (list)
applData = ''
if 'appl' in params:
# So extract from params
applList = params['appl']
del params['appl']
# Build the method data options
for appl in applList:
applData += '&appl=' + appl
# Get the data for the other options
requestData = urllib.parse.urlencode(params)
# Concatenate the two parts.
requestData += applData
printDebugMessage('serviceRun', 'requestData: ' + requestData, 2)
# Errors are indicated by HTTP status codes.
try:
# Set the HTTP User-agent.
user_agent = getUserAgent()
http_headers = {'User-Agent': user_agent}
req = urllib.request.Request(requestUrl, None, http_headers)
# Make the submission (HTTP POST).
reqH = urllib.request.urlopen(req, requestData)
jobId = reqH.read()
reqH.close()
except urllib.error.HTTPError as ex:
# Trap exception and output the document to get error message.
print(ex.read(), file=sys.stderr)
raise
printDebugMessage('serviceRun', 'jobId: ' + jobId, 2)
printDebugMessage('serviceRun', 'End', 1)
return jobId
# Get job status
def serviceGetStatus(jobId):
printDebugMessage('serviceGetStatus', 'Begin', 1)
printDebugMessage('serviceGetStatus', 'jobId: ' + jobId, 2)
requestUrl = baseUrl + '/status/' + jobId
printDebugMessage('serviceGetStatus', 'requestUrl: ' + requestUrl, 2)
status = restRequest(requestUrl)
printDebugMessage('serviceGetStatus', 'status: ' + status, 2)
printDebugMessage('serviceGetStatus', 'End', 1)
return status
# Print the status of a job
def printGetStatus(jobId):
printDebugMessage('printGetStatus', 'Begin', 1)
status = serviceGetStatus(jobId)
print(status)
printDebugMessage('printGetStatus', 'End', 1)
# Get available result types for job
def serviceGetResultTypes(jobId):
printDebugMessage('serviceGetResultTypes', 'Begin', 1)
printDebugMessage('serviceGetResultTypes', 'jobId: ' + jobId, 2)
requestUrl = baseUrl + '/resulttypes/' + jobId
printDebugMessage('serviceGetResultTypes', 'requestUrl: ' + requestUrl, 2)
xmlDoc = restRequest(requestUrl)
doc = xmltramp.parse(xmlDoc)
printDebugMessage('serviceGetResultTypes', 'End', 1)
return doc['type':]
# Print list of available result types for a job.
def printGetResultTypes(jobId):
printDebugMessage('printGetResultTypes', 'Begin', 1)
resultTypeList = serviceGetResultTypes(jobId)
for resultType in resultTypeList:
print(resultType['identifier'])
if(hasattr(resultType, 'label')):
print("\t", resultType['label'])
if(hasattr(resultType, 'description')):
print("\t", resultType['description'])
if(hasattr(resultType, 'mediaType')):
print("\t", resultType['mediaType'])
if(hasattr(resultType, 'fileSuffix')):
print("\t", resultType['fileSuffix'])
printDebugMessage('printGetResultTypes', 'End', 1)
# Get result
def serviceGetResult(jobId, type_):
printDebugMessage('serviceGetResult', 'Begin', 1)
printDebugMessage('serviceGetResult', 'jobId: ' + jobId, 2)
printDebugMessage('serviceGetResult', 'type_: ' + type_, 2)
requestUrl = baseUrl + '/result/' + jobId + '/' + type_
result = restRequest(requestUrl)
printDebugMessage('serviceGetResult', 'End', 1)
return result
# Client-side poll
def clientPoll(jobId):
printDebugMessage('clientPoll', 'Begin', 1)
result = 'PENDING'
while result == 'RUNNING' or result == 'PENDING':
result = serviceGetStatus(jobId)
print(result, file=sys.stderr)
if result == 'RUNNING' or result == 'PENDING':
time.sleep(checkInterval)
printDebugMessage('clientPoll', 'End', 1)
# Get result for a jobid
def getResult(jobId):
printDebugMessage('getResult', 'Begin', 1)
printDebugMessage('getResult', 'jobId: ' + jobId, 1)
# Check status and wait if necessary
clientPoll(jobId)
# Get available result types
resultTypes = serviceGetResultTypes(jobId)
for resultType in resultTypes:
# Derive the filename for the result
if options.outfile:
filename = options.outfile + '.' + \
str(resultType['identifier']) + '.' + \
str(resultType['fileSuffix'])
else:
filename = jobId + '.' + \
str(resultType['identifier']) + '.' + \
str(resultType['fileSuffix'])
# Write a result file
if not options.outformat or options.outformat == str(resultType['identifier']):
# Get the result
result = serviceGetResult(jobId, str(resultType['identifier']))
fh = open(filename, 'w')
fh.write(result)
fh.close()
print(filename)
printDebugMessage('getResult', 'End', 1)
# Read a file
def readFile(filename):
printDebugMessage('readFile', 'Begin', 1)
fh = open(filename, 'r')
data = fh.read()
fh.close()
printDebugMessage('readFile', 'End', 1)
return data
# No options... print help.
if numOpts < 2:
parser.print_help()
# List parameters
elif options.params:
printGetParameters()
# Get parameter details
elif options.paramDetail:
printGetParameterDetails(options.paramDetail)
# Submit job
elif options.email and not options.jobid:
params = {}
if 1 > 0:
if os.access(options.input, os.R_OK): # Read file into content
params['sequence'] = readFile(options.input)
else: # Argument is a sequence id
params['sequence'] = options.input
elif options.sequence: # Specified via option
if os.access(options.sequence, os.R_OK): # Read file into content
params['sequence'] = readFile(options.sequence)
else: # Argument is a sequence id
params['sequence'] = options.sequence
# Map flag options to boolean values.
# if options.crc:
# params['crc'] = True
# elif options.nocrc:
# params['crc'] = False
if options.goterms:
params['goterms'] = True
elif options.nogoterms:
params['goterms'] = False
if options.pathways:
params['pathways'] = True
elif options.nopathways:
params['pathways'] = False
# Add the other options (if defined)
if options.appl:
params['appl'] = re.split('[ \t\n,;]+', options.appl)
# Submit the job
jobid = serviceRun(options.email, options.title, params)
if options.async: # Async mode
print(jobid)
else: # Sync mode
print(jobid, file=sys.stderr)
time.sleep(5)
getResult(jobid)
# Get job status
elif options.status and options.jobid:
printGetStatus(options.jobid)
# List result types for job
elif options.resultTypes and options.jobid:
printGetResultTypes(options.jobid)
# Get results for job
elif options.polljob and options.jobid:
getResult(options.jobid)
else:
print('Error: unrecognised argument combination', file=sys.stderr)
parser.print_help()
|
normal
|
{
"blob_id": "3dd9ce6d5d1ba0bebadae4068e2c898802180e1d",
"index": 8825,
"step-1": "#!/usr/bin/env python\n# $Id: iprscan5_urllib2.py 2809 2015-03-13 16:10:25Z uludag $\n# ======================================================================\n#\n# Copyright 2009-2014 EMBL - European Bioinformatics Institute\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# ======================================================================\n# InterProScan 5 (REST) Python client using urllib2 and\n# xmltramp (http://www.aaronsw.com/2002/xmltramp/).\n#\n# Tested with:\n# Python 2.6.5 (Ubuntu 10.04 LTS)\n# Python 2.7.3 (Ubuntu 12.04 LTS)\n#\n# See:\n# http://www.ebi.ac.uk/Tools/webservices/services/pfa/iprscan5_rest\n# http://www.ebi.ac.uk/Tools/webservices/tutorials/python\n# ======================================================================\n# Base URL for service\nimport urllib.request, urllib.error, urllib.parse\nimport urllib.request, urllib.parse, urllib.error\nimport time\nimport sys\nimport re\nimport os\nimport platform\nimport argparse\nimport xmltramp\nbaseUrl = 'http://www.ebi.ac.uk/Tools/services/rest/iprscan5'\n\n# Load libraries\n\n# Set interval for checking status\ncheckInterval = 10\n# Output level\noutputLevel = 1\n# Debug level\ndebugLevel = 0\n# Number of option arguments.\nnumOpts = len(sys.argv)\n\n# Usage message\nparser = argparse.ArgumentParser()\n# Tool specific options\nparser.add_argument('--input', required=True, help='input FASTA file')\nparser.add_argument('--appl', \n\t\t\t\t\thelp='signature methods to use, see --paramDetail appl')\nparser.add_argument('--crc', action=\"store_true\",\n help='enable InterProScan Matches look-up (ignored)')\nparser.add_argument('--nocrc', action=\"store_true\",\n help='disable InterProScan Matches look-up (ignored)')\nparser.add_argument('--goterms', action=\"store_true\",\n help='enable inclusion of GO terms')\nparser.add_argument('--nogoterms', action=\"store_true\",\n help='disable inclusion of GO terms')\nparser.add_argument('--pathways', action=\"store_true\",\n help='enable inclusion of pathway terms')\nparser.add_argument('--nopathways', action=\"store_true\",\n help='disable inclusion of pathway terms')\nparser.add_argument('--sequence', help='input sequence file name')\n# General options\nparser.add_argument('--email', required=True, help='e-mail address')\nparser.add_argument('--title', help='job title')\nparser.add_argument('--outfile', help='file name for results')\nparser.add_argument('--outformat', help='output format for results')\nparser.add_argument('--async', action='store_true', help='asynchronous mode')\nparser.add_argument('--jobid', help='job identifier')\nparser.add_argument('--polljob', action=\"store_true\", help='get job result')\nparser.add_argument('--status', action=\"store_true\", help='get job status')\nparser.add_argument('--resultTypes', action='store_true',\n help='get result types')\nparser.add_argument('--params', action='store_true',\n help='list input parameters')\nparser.add_argument('--paramDetail', help='get details for parameter')\nparser.add_argument('--quiet', action='store_true',\n help='decrease output level')\nparser.add_argument('--verbose', action='store_true',\n help='increase output level')\nparser.add_argument('--baseURL', default=baseUrl, help='Base URL for service')\nparser.add_argument('--debugLevel', type=int,\n default=debugLevel, help='debug output level')\noptions = parser.parse_args()\n\n# Increase output level\nif options.verbose:\n outputLevel += 1\n\n# Decrease output level\nif options.quiet:\n outputLevel -= 1\n\n# Debug level\nif options.debugLevel:\n debugLevel = options.debugLevel\n\n# Debug print\n\n\ndef printDebugMessage(functionName, message, level):\n if(level <= debugLevel):\n print('[' + functionName + '] ' + message, file=sys.stderr)\n\n# User-agent for request (see RFC2616).\n\n\ndef getUserAgent():\n printDebugMessage('getUserAgent', 'Begin', 11)\n # Agent string for urllib2 library.\n urllib_agent = 'Python-urllib/%s' % urllib2.__version__\n clientRevision = '$Revision: 2809 $'\n clientVersion = '0'\n if len(clientRevision) > 11:\n clientVersion = clientRevision[11:-2]\n # Prepend client specific agent string.\n user_agent = 'EBI-Sample-Client/%s (%s; Python %s; %s) %s' % (\n clientVersion, os.path.basename(__file__),\n platform.python_version(), platform.system(),\n urllib_agent\n )\n printDebugMessage('getUserAgent', 'user_agent: ' + user_agent, 12)\n printDebugMessage('getUserAgent', 'End', 11)\n return user_agent\n\n# Wrapper for a REST (HTTP GET) request\n\n\ndef restRequest(url):\n printDebugMessage('restRequest', 'Begin', 11)\n printDebugMessage('restRequest', 'url: ' + url, 11)\n # Errors are indicated by HTTP status codes.\n try:\n # Set the User-agent.\n user_agent = getUserAgent()\n http_headers = {'User-Agent': user_agent}\n req = urllib.request.Request(url, None, http_headers)\n # Make the request (HTTP GET).\n reqH = urllib.request.urlopen(req)\n result = reqH.read()\n reqH.close()\n # Errors are indicated by HTTP status codes.\n except urllib.error.HTTPError as ex:\n # Trap exception and output the document to get error message.\n print(ex.read(), file=sys.stderr)\n raise\n printDebugMessage('restRequest', 'End', 11)\n return result\n\n# Get input parameters list\n\n\ndef serviceGetParameters():\n printDebugMessage('serviceGetParameters', 'Begin', 1)\n requestUrl = baseUrl + '/parameters'\n printDebugMessage('serviceGetParameters', 'requestUrl: ' + requestUrl, 2)\n xmlDoc = restRequest(requestUrl)\n doc = xmltramp.parse(xmlDoc)\n printDebugMessage('serviceGetParameters', 'End', 1)\n return doc['id':]\n\n# Print list of parameters\n\n\ndef printGetParameters():\n printDebugMessage('printGetParameters', 'Begin', 1)\n idList = serviceGetParameters()\n for id in idList:\n print(id)\n printDebugMessage('printGetParameters', 'End', 1)\n\n# Get input parameter information\n\n\ndef serviceGetParameterDetails(paramName):\n printDebugMessage('serviceGetParameterDetails', 'Begin', 1)\n printDebugMessage('serviceGetParameterDetails',\n 'paramName: ' + paramName, 2)\n requestUrl = baseUrl + '/parameterdetails/' + paramName\n printDebugMessage('serviceGetParameterDetails',\n 'requestUrl: ' + requestUrl, 2)\n xmlDoc = restRequest(requestUrl)\n doc = xmltramp.parse(xmlDoc)\n printDebugMessage('serviceGetParameterDetails', 'End', 1)\n return doc\n\n# Print description of a parameter\n\n\ndef printGetParameterDetails(paramName):\n printDebugMessage('printGetParameterDetails', 'Begin', 1)\n doc = serviceGetParameterDetails(paramName)\n print(str(doc.name) + \"\\t\" + str(doc.type))\n print(doc.description)\n for value in doc.values:\n print(value.value, end=' ')\n if str(value.defaultValue) == 'true':\n print('default', end=' ')\n print()\n print(\"\\t\" + str(value.label))\n if(hasattr(value, 'properties')):\n for wsProperty in value.properties:\n print(\"\\t\" + str(wsProperty.key) + \"\\t\" + str(wsProperty.value))\n #print doc\n printDebugMessage('printGetParameterDetails', 'End', 1)\n\n# Submit job\n\n\ndef serviceRun(email, title, params):\n printDebugMessage('serviceRun', 'Begin', 1)\n # Insert e-mail and title into params\n params['email'] = email\n if title:\n params['title'] = title\n requestUrl = baseUrl + '/run/'\n printDebugMessage('serviceRun', 'requestUrl: ' + requestUrl, 2)\n # Signature methods requires special handling (list)\n applData = ''\n if 'appl' in params:\n # So extract from params\n applList = params['appl']\n del params['appl']\n # Build the method data options\n for appl in applList:\n applData += '&appl=' + appl\n # Get the data for the other options\n requestData = urllib.parse.urlencode(params)\n # Concatenate the two parts.\n requestData += applData\n printDebugMessage('serviceRun', 'requestData: ' + requestData, 2)\n # Errors are indicated by HTTP status codes.\n try:\n # Set the HTTP User-agent.\n user_agent = getUserAgent()\n http_headers = {'User-Agent': user_agent}\n req = urllib.request.Request(requestUrl, None, http_headers)\n # Make the submission (HTTP POST).\n reqH = urllib.request.urlopen(req, requestData)\n jobId = reqH.read()\n reqH.close()\n except urllib.error.HTTPError as ex:\n # Trap exception and output the document to get error message.\n print(ex.read(), file=sys.stderr)\n raise\n printDebugMessage('serviceRun', 'jobId: ' + jobId, 2)\n printDebugMessage('serviceRun', 'End', 1)\n return jobId\n\n# Get job status\n\n\ndef serviceGetStatus(jobId):\n printDebugMessage('serviceGetStatus', 'Begin', 1)\n printDebugMessage('serviceGetStatus', 'jobId: ' + jobId, 2)\n requestUrl = baseUrl + '/status/' + jobId\n printDebugMessage('serviceGetStatus', 'requestUrl: ' + requestUrl, 2)\n status = restRequest(requestUrl)\n printDebugMessage('serviceGetStatus', 'status: ' + status, 2)\n printDebugMessage('serviceGetStatus', 'End', 1)\n return status\n\n# Print the status of a job\n\n\ndef printGetStatus(jobId):\n printDebugMessage('printGetStatus', 'Begin', 1)\n status = serviceGetStatus(jobId)\n print(status)\n printDebugMessage('printGetStatus', 'End', 1)\n\n\n# Get available result types for job\ndef serviceGetResultTypes(jobId):\n printDebugMessage('serviceGetResultTypes', 'Begin', 1)\n printDebugMessage('serviceGetResultTypes', 'jobId: ' + jobId, 2)\n requestUrl = baseUrl + '/resulttypes/' + jobId\n printDebugMessage('serviceGetResultTypes', 'requestUrl: ' + requestUrl, 2)\n xmlDoc = restRequest(requestUrl)\n doc = xmltramp.parse(xmlDoc)\n printDebugMessage('serviceGetResultTypes', 'End', 1)\n return doc['type':]\n\n# Print list of available result types for a job.\n\n\ndef printGetResultTypes(jobId):\n printDebugMessage('printGetResultTypes', 'Begin', 1)\n resultTypeList = serviceGetResultTypes(jobId)\n for resultType in resultTypeList:\n print(resultType['identifier'])\n if(hasattr(resultType, 'label')):\n print(\"\\t\", resultType['label'])\n if(hasattr(resultType, 'description')):\n print(\"\\t\", resultType['description'])\n if(hasattr(resultType, 'mediaType')):\n print(\"\\t\", resultType['mediaType'])\n if(hasattr(resultType, 'fileSuffix')):\n print(\"\\t\", resultType['fileSuffix'])\n printDebugMessage('printGetResultTypes', 'End', 1)\n\n# Get result\n\n\ndef serviceGetResult(jobId, type_):\n printDebugMessage('serviceGetResult', 'Begin', 1)\n printDebugMessage('serviceGetResult', 'jobId: ' + jobId, 2)\n printDebugMessage('serviceGetResult', 'type_: ' + type_, 2)\n requestUrl = baseUrl + '/result/' + jobId + '/' + type_\n result = restRequest(requestUrl)\n printDebugMessage('serviceGetResult', 'End', 1)\n return result\n\n# Client-side poll\n\n\ndef clientPoll(jobId):\n printDebugMessage('clientPoll', 'Begin', 1)\n result = 'PENDING'\n while result == 'RUNNING' or result == 'PENDING':\n result = serviceGetStatus(jobId)\n print(result, file=sys.stderr)\n if result == 'RUNNING' or result == 'PENDING':\n time.sleep(checkInterval)\n printDebugMessage('clientPoll', 'End', 1)\n\n# Get result for a jobid\n\n\ndef getResult(jobId):\n printDebugMessage('getResult', 'Begin', 1)\n printDebugMessage('getResult', 'jobId: ' + jobId, 1)\n # Check status and wait if necessary\n clientPoll(jobId)\n # Get available result types\n resultTypes = serviceGetResultTypes(jobId)\n for resultType in resultTypes:\n # Derive the filename for the result\n if options.outfile:\n filename = options.outfile + '.' + \\\n str(resultType['identifier']) + '.' + \\\n str(resultType['fileSuffix'])\n else:\n filename = jobId + '.' + \\\n str(resultType['identifier']) + '.' + \\\n str(resultType['fileSuffix'])\n # Write a result file\n if not options.outformat or options.outformat == str(resultType['identifier']):\n # Get the result\n result = serviceGetResult(jobId, str(resultType['identifier']))\n fh = open(filename, 'w')\n fh.write(result)\n fh.close()\n print(filename)\n printDebugMessage('getResult', 'End', 1)\n\n# Read a file\n\n\ndef readFile(filename):\n printDebugMessage('readFile', 'Begin', 1)\n fh = open(filename, 'r')\n data = fh.read()\n fh.close()\n printDebugMessage('readFile', 'End', 1)\n return data\n\n\n# No options... print help.\nif numOpts < 2:\n parser.print_help()\n# List parameters\nelif options.params:\n printGetParameters()\n# Get parameter details\nelif options.paramDetail:\n printGetParameterDetails(options.paramDetail)\n# Submit job\nelif options.email and not options.jobid:\n params = {}\n if 1 > 0:\n if os.access(options.input, os.R_OK): # Read file into content\n params['sequence'] = readFile(options.input)\n else: # Argument is a sequence id\n params['sequence'] = options.input\n elif options.sequence: # Specified via option\n if os.access(options.sequence, os.R_OK): # Read file into content\n params['sequence'] = readFile(options.sequence)\n else: # Argument is a sequence id\n params['sequence'] = options.sequence\n # Map flag options to boolean values.\n # if options.crc:\n # params['crc'] = True\n # elif options.nocrc:\n # params['crc'] = False\n if options.goterms:\n params['goterms'] = True\n elif options.nogoterms:\n params['goterms'] = False\n if options.pathways:\n params['pathways'] = True\n elif options.nopathways:\n params['pathways'] = False\n # Add the other options (if defined)\n if options.appl:\n params['appl'] = re.split('[ \\t\\n,;]+', options.appl)\n\n # Submit the job\n jobid = serviceRun(options.email, options.title, params)\n if options.async: # Async mode\n print(jobid)\n else: # Sync mode\n print(jobid, file=sys.stderr)\n time.sleep(5)\n getResult(jobid)\n# Get job status\nelif options.status and options.jobid:\n printGetStatus(options.jobid)\n# List result types for job\nelif options.resultTypes and options.jobid:\n printGetResultTypes(options.jobid)\n# Get results for job\nelif options.polljob and options.jobid:\n getResult(options.jobid)\nelse:\n print('Error: unrecognised argument combination', file=sys.stderr)\n parser.print_help()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import pickle as p
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
from numpy.random import randn
from neural_network import network
net = network([1,8,8,1], filename='./data/x', bias=True)
# net.load_random()
net.load()
n = 32
x = np.array([[x] for x in np.linspace(0,1,n)])
y = (1+np.sin(10*x))/2
X = [xx[0] for xx in x]
Y = [yy[0] for yy in y]
plt.plot(x,y)
c = 1
ii = 0
for ii in range(1001):
# c = net.gradient_training(x,y,dw=0.1)
c = net.retarded_training(x,y)
print(ii,c)
net.save()
# if ii%10==0 and ii!=0:
# net.shake(x,y,n=10)
# net.save()
# # ii+=1
N = 128
plt.plot(X,Y, 'ro')
X = np.linspace(0,1,N)
Y = []
for x in X:
Y += [net.forward([x])[0]]
plt.plot(X,np.array(Y))
plt.show()
# for i in range(len(self.z)):
# if i==0:
# yHat = self.forward(x)
# delta = np.multiply(yHat - y, sigmoidPrime(self.z[-1]))
# dJdW = np.dot(self.a[-2].T, delta)
# else:
# delta = np.dot(delta, self.W[-i].T)*sigmoidPrime(self.z[-1-i])
# dJdW = np.dot(self.a[-2-i].T, delta)
# dJ += [dJdW]
# dJ = dJ[::-1]
|
normal
|
{
"blob_id": "cf07344808f2d91d8949cfc4beb9f923926e6851",
"index": 6208,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnet.load()\n<mask token>\nplt.plot(x, y)\n<mask token>\nfor ii in range(1001):\n c = net.retarded_training(x, y)\n print(ii, c)\n net.save()\n<mask token>\nplt.plot(X, Y, 'ro')\n<mask token>\nfor x in X:\n Y += [net.forward([x])[0]]\nplt.plot(X, np.array(Y))\nplt.show()\n",
"step-3": "<mask token>\nnet = network([1, 8, 8, 1], filename='./data/x', bias=True)\nnet.load()\nn = 32\nx = np.array([[x] for x in np.linspace(0, 1, n)])\ny = (1 + np.sin(10 * x)) / 2\nX = [xx[0] for xx in x]\nY = [yy[0] for yy in y]\nplt.plot(x, y)\nc = 1\nii = 0\nfor ii in range(1001):\n c = net.retarded_training(x, y)\n print(ii, c)\n net.save()\nN = 128\nplt.plot(X, Y, 'ro')\nX = np.linspace(0, 1, N)\nY = []\nfor x in X:\n Y += [net.forward([x])[0]]\nplt.plot(X, np.array(Y))\nplt.show()\n",
"step-4": "import numpy as np\nimport pickle as p\nfrom mpl_toolkits.mplot3d import axes3d\nimport matplotlib.pyplot as plt\nfrom numpy.random import randn\nfrom neural_network import network\nnet = network([1, 8, 8, 1], filename='./data/x', bias=True)\nnet.load()\nn = 32\nx = np.array([[x] for x in np.linspace(0, 1, n)])\ny = (1 + np.sin(10 * x)) / 2\nX = [xx[0] for xx in x]\nY = [yy[0] for yy in y]\nplt.plot(x, y)\nc = 1\nii = 0\nfor ii in range(1001):\n c = net.retarded_training(x, y)\n print(ii, c)\n net.save()\nN = 128\nplt.plot(X, Y, 'ro')\nX = np.linspace(0, 1, N)\nY = []\nfor x in X:\n Y += [net.forward([x])[0]]\nplt.plot(X, np.array(Y))\nplt.show()\n",
"step-5": "import numpy as np\nimport pickle as p\nfrom mpl_toolkits.mplot3d import axes3d\nimport matplotlib.pyplot as plt\nfrom numpy.random import randn\nfrom neural_network import network\n\nnet = network([1,8,8,1], filename='./data/x', bias=True)\n# net.load_random()\nnet.load()\n\nn = 32\n\nx = np.array([[x] for x in np.linspace(0,1,n)])\ny = (1+np.sin(10*x))/2\n\nX = [xx[0] for xx in x]\nY = [yy[0] for yy in y]\n\nplt.plot(x,y)\n\nc = 1\nii = 0\n\nfor ii in range(1001):\n # c = net.gradient_training(x,y,dw=0.1)\n c = net.retarded_training(x,y)\n print(ii,c)\n net.save()\n # if ii%10==0 and ii!=0:\n # net.shake(x,y,n=10)\n # net.save()\n # # ii+=1\n\nN = 128\n\nplt.plot(X,Y, 'ro')\n\nX = np.linspace(0,1,N)\nY = []\n\nfor x in X:\n Y += [net.forward([x])[0]]\n\nplt.plot(X,np.array(Y))\nplt.show()\n\n\n# for i in range(len(self.z)):\n# if i==0:\n# yHat = self.forward(x)\n# delta = np.multiply(yHat - y, sigmoidPrime(self.z[-1]))\n# dJdW = np.dot(self.a[-2].T, delta)\n# else:\n# delta = np.dot(delta, self.W[-i].T)*sigmoidPrime(self.z[-1-i])\n# dJdW = np.dot(self.a[-2-i].T, delta)\n\n# dJ += [dJdW]\n\n# dJ = dJ[::-1]",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
frase = "todos somos promgramadores"
palabras = frase.split()
for p in palabras:
print(palabras[p])
#if p[-2] == "o":
|
normal
|
{
"blob_id": "00c57e7e26a3181ab23697a25257aca479d9ee05",
"index": 5755,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor p in palabras:\n print(palabras[p])\n",
"step-3": "frase = 'todos somos promgramadores'\npalabras = frase.split()\nfor p in palabras:\n print(palabras[p])\n",
"step-4": "frase = \"todos somos promgramadores\"\r\npalabras = frase.split()\r\nfor p in palabras:\r\n print(palabras[p])\r\n\r\n\r\n #if p[-2] == \"o\":\r\n \r\n ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import json
import requests as requests
from flask import Flask
from flask import request
from tools import AESCipher, tokenId, TokenKey, appId
from tools import TCApplyNeedleUrl, TCCreditNeedleUrl, TCWJNeedleUrl
app = Flask(__name__)
@app.route('/', methods=['POST'])
def hello_world():
if request.method == "POST":
json_data = request.get_data().decode('utf-8')
_data = json.loads(json_data)
orderNo = _data['orderNo']
name = _data['name']
idcard = _data['idcard']
mobile = _data['mobile']
json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})
param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')), encoding="utf-8")
parameter = ("param=%s" % (param))
parameterXY = ("name=%s,idCard=%s,mobile=%s" % (name, idcard, mobile))
XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY, TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard': idcard,
'mobile': mobile}
WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCWJNeedleUrl), 'appId': appId, 'param': param}
ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCApplyNeedleUrl), 'appId': appId, 'param': param}
r1 = requests.post(TCCreditNeedleUrl, XYTZparams)
TCdata = r1.text
print(TCdata)
r2 = requests.post(TCWJNeedleUrl,WJTZparams)
print(r2.text)
rep = json.loads(r2.text)
if rep["status"] == 0:
data = rep["data"]
TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print("TCdata1解密后", TCdata1)
r3 = requests.post(TCApplyNeedleUrl,ANparams)
print(r3.text)
rep = json.loads(r3.text)
if rep["status"] == 0:
data = rep["data"]
TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))
print("TCdata2解密后", TCdata2)
return json.dumps(TCdata2)
if __name__ == '__main__':
app.run()
|
normal
|
{
"blob_id": "4652cd5548b550cc21d126fc4fbe3e316ecb71b2",
"index": 143,
"step-1": "<mask token>\n\n\[email protected]('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\[email protected]('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-4": "import json\nimport requests as requests\nfrom flask import Flask\nfrom flask import request\nfrom tools import AESCipher, tokenId, TokenKey, appId\nfrom tools import TCApplyNeedleUrl, TCCreditNeedleUrl, TCWJNeedleUrl\napp = Flask(__name__)\n\n\[email protected]('/', methods=['POST'])\ndef hello_world():\n if request.method == 'POST':\n json_data = request.get_data().decode('utf-8')\n _data = json.loads(json_data)\n orderNo = _data['orderNo']\n name = _data['name']\n idcard = _data['idcard']\n mobile = _data['mobile']\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')),\n encoding='utf-8')\n parameter = 'param=%s' % param\n parameterXY = 'name=%s,idCard=%s,mobile=%s' % (name, idcard, mobile)\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY,\n TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard':\n idcard, 'mobile': mobile}\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCWJNeedleUrl), 'appId': appId, 'param': param}\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,\n TCApplyNeedleUrl), 'appId': appId, 'param': param}\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\n TCdata = r1.text\n print(TCdata)\n r2 = requests.post(TCWJNeedleUrl, WJTZparams)\n print(r2.text)\n rep = json.loads(r2.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata1解密后', TCdata1)\n r3 = requests.post(TCApplyNeedleUrl, ANparams)\n print(r3.text)\n rep = json.loads(r3.text)\n if rep['status'] == 0:\n data = rep['data']\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\n print('TCdata2解密后', TCdata2)\n return json.dumps(TCdata2)\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-5": "import json\r\n\r\nimport requests as requests\r\nfrom flask import Flask\r\nfrom flask import request\r\n\r\nfrom tools import AESCipher, tokenId, TokenKey, appId\r\nfrom tools import TCApplyNeedleUrl, TCCreditNeedleUrl, TCWJNeedleUrl\r\n\r\napp = Flask(__name__)\r\n\r\n\r\[email protected]('/', methods=['POST'])\r\ndef hello_world():\r\n if request.method == \"POST\":\r\n json_data = request.get_data().decode('utf-8')\r\n _data = json.loads(json_data)\r\n orderNo = _data['orderNo']\r\n name = _data['name']\r\n idcard = _data['idcard']\r\n mobile = _data['mobile']\r\n json1 = json.dumps({'name': name, 'idcard': idcard, 'mobile': mobile})\r\n param = str(AESCipher.encrypt(json1, tokenId.replace('-', '')), encoding=\"utf-8\")\r\n parameter = (\"param=%s\" % (param))\r\n parameterXY = (\"name=%s,idCard=%s,mobile=%s\" % (name, idcard, mobile))\r\n XYTZparams = {'tokenKey': TokenKey.getTokenKey(parameterXY, TCCreditNeedleUrl), 'appId': appId, 'name': name, 'idCard': idcard,\r\n 'mobile': mobile}\r\n WJTZparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCWJNeedleUrl), 'appId': appId, 'param': param}\r\n ANparams = {'tokenKey': TokenKey.getTokenKey(parameter,TCApplyNeedleUrl), 'appId': appId, 'param': param}\r\n r1 = requests.post(TCCreditNeedleUrl, XYTZparams)\r\n TCdata = r1.text\r\n print(TCdata)\r\n\r\n r2 = requests.post(TCWJNeedleUrl,WJTZparams)\r\n print(r2.text)\r\n rep = json.loads(r2.text)\r\n if rep[\"status\"] == 0:\r\n data = rep[\"data\"]\r\n TCdata1 = AESCipher.decode_data(data, tokenId.replace('-', ''))\r\n print(\"TCdata1解密后\", TCdata1)\r\n\r\n r3 = requests.post(TCApplyNeedleUrl,ANparams)\r\n print(r3.text)\r\n rep = json.loads(r3.text)\r\n if rep[\"status\"] == 0:\r\n data = rep[\"data\"]\r\n TCdata2 = AESCipher.decode_data(data, tokenId.replace('-', ''))\r\n print(\"TCdata2解密后\", TCdata2)\r\n\r\n\r\n return json.dumps(TCdata2)\r\n\r\n\r\nif __name__ == '__main__':\r\n app.run()\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
balance=42
annualInterestRate=0.20
monthlyPaymentRate=0.04
monthlyir = annualInterestRate/12
rb=balance
for i in range(12):
mp = monthlyPaymentRate * rb
rb=rb-mp
rb=rb+rb*monthlyir
print('remaining balance: ',round(rb,2))
|
normal
|
{
"blob_id": "1429524b0ae3b679bc3d4386dd17ed50b0fff381",
"index": 146,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(12):\n mp = monthlyPaymentRate * rb\n rb = rb - mp\n rb = rb + rb * monthlyir\nprint('remaining balance: ', round(rb, 2))\n",
"step-3": "balance = 42\nannualInterestRate = 0.2\nmonthlyPaymentRate = 0.04\nmonthlyir = annualInterestRate / 12\nrb = balance\nfor i in range(12):\n mp = monthlyPaymentRate * rb\n rb = rb - mp\n rb = rb + rb * monthlyir\nprint('remaining balance: ', round(rb, 2))\n",
"step-4": "balance=42\n\nannualInterestRate=0.20\n\nmonthlyPaymentRate=0.04\n\n\nmonthlyir = annualInterestRate/12\n\nrb=balance\n\n\nfor i in range(12):\n mp = monthlyPaymentRate * rb\n rb=rb-mp\n rb=rb+rb*monthlyir\n\nprint('remaining balance: ',round(rb,2))\n \n \n\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
#
# nextskeleton - An assembler skeleton for the ZX Spectrum Next
#
# Copyright (C) 2020 Richard "Shred" Körber
# https://github.com/shred/nextskeleton
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import struct
import sys
parser = argparse.ArgumentParser(description='Generate an autoexec.bas that launches a .nex file')
parser.add_argument('nex',
help='path of the .nex file to be launched')
parser.add_argument('file',
help='autoexec.bas file to be generated')
args = parser.parse_args()
command = '.nexload ' + args.nex + '\r'
contents = bytearray(128)
contents[0:8] = 'PLUS3DOS'.encode('ASCII') # +3DOS signature
contents[8] = 0x1A
contents[9:11] = [0x01, 0x00] # Issue and Version
contents += bytearray((0x00, 0x0A)) # Line number 10
contents += struct.pack('<H', len(command)) # Line length
contents += command.encode('ASCII') # BASIC line
programLength = len(contents) - 128 # Length of the BASIC program
contents[15] = 0x00 # DOS header: PROGRAM
contents[16:18] = struct.pack('<H', programLength) # DOS header: length
contents[18:20] = struct.pack('<H', 10) # DOS header: run at line 10
contents[20:22] = struct.pack('<H', programLength) # DOS header: offset to prog
contents[11:15] = struct.pack('<L', len(contents)) # Set total length
contents[127] = sum(contents[0:126]) & 0xFF # Compute checksum
with open(args.file, 'wb') as f:
f.write(contents)
|
normal
|
{
"blob_id": "0744ec646e7b9303c67c25dff2997568c6171b91",
"index": 108,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('nex', help='path of the .nex file to be launched')\nparser.add_argument('file', help='autoexec.bas file to be generated')\n<mask token>\ncontents += bytearray((0, 10))\ncontents += struct.pack('<H', len(command))\ncontents += command.encode('ASCII')\n<mask token>\nwith open(args.file, 'wb') as f:\n f.write(contents)\n",
"step-3": "<mask token>\nparser = argparse.ArgumentParser(description=\n 'Generate an autoexec.bas that launches a .nex file')\nparser.add_argument('nex', help='path of the .nex file to be launched')\nparser.add_argument('file', help='autoexec.bas file to be generated')\nargs = parser.parse_args()\ncommand = '.nexload ' + args.nex + '\\r'\ncontents = bytearray(128)\ncontents[0:8] = 'PLUS3DOS'.encode('ASCII')\ncontents[8] = 26\ncontents[9:11] = [1, 0]\ncontents += bytearray((0, 10))\ncontents += struct.pack('<H', len(command))\ncontents += command.encode('ASCII')\nprogramLength = len(contents) - 128\ncontents[15] = 0\ncontents[16:18] = struct.pack('<H', programLength)\ncontents[18:20] = struct.pack('<H', 10)\ncontents[20:22] = struct.pack('<H', programLength)\ncontents[11:15] = struct.pack('<L', len(contents))\ncontents[127] = sum(contents[0:126]) & 255\nwith open(args.file, 'wb') as f:\n f.write(contents)\n",
"step-4": "import argparse\nimport struct\nimport sys\nparser = argparse.ArgumentParser(description=\n 'Generate an autoexec.bas that launches a .nex file')\nparser.add_argument('nex', help='path of the .nex file to be launched')\nparser.add_argument('file', help='autoexec.bas file to be generated')\nargs = parser.parse_args()\ncommand = '.nexload ' + args.nex + '\\r'\ncontents = bytearray(128)\ncontents[0:8] = 'PLUS3DOS'.encode('ASCII')\ncontents[8] = 26\ncontents[9:11] = [1, 0]\ncontents += bytearray((0, 10))\ncontents += struct.pack('<H', len(command))\ncontents += command.encode('ASCII')\nprogramLength = len(contents) - 128\ncontents[15] = 0\ncontents[16:18] = struct.pack('<H', programLength)\ncontents[18:20] = struct.pack('<H', 10)\ncontents[20:22] = struct.pack('<H', programLength)\ncontents[11:15] = struct.pack('<L', len(contents))\ncontents[127] = sum(contents[0:126]) & 255\nwith open(args.file, 'wb') as f:\n f.write(contents)\n",
"step-5": "#!/usr/bin/env python3\n#\n# nextskeleton - An assembler skeleton for the ZX Spectrum Next\n#\n# Copyright (C) 2020 Richard \"Shred\" Körber\n# https://github.com/shred/nextskeleton\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport argparse\nimport struct\nimport sys\n\nparser = argparse.ArgumentParser(description='Generate an autoexec.bas that launches a .nex file')\nparser.add_argument('nex',\n help='path of the .nex file to be launched')\nparser.add_argument('file',\n help='autoexec.bas file to be generated')\nargs = parser.parse_args()\n\ncommand = '.nexload ' + args.nex + '\\r'\n\ncontents = bytearray(128)\ncontents[0:8] = 'PLUS3DOS'.encode('ASCII') # +3DOS signature\ncontents[8] = 0x1A\ncontents[9:11] = [0x01, 0x00] # Issue and Version\n\ncontents += bytearray((0x00, 0x0A)) # Line number 10\ncontents += struct.pack('<H', len(command)) # Line length\ncontents += command.encode('ASCII') # BASIC line\nprogramLength = len(contents) - 128 # Length of the BASIC program\n\ncontents[15] = 0x00 # DOS header: PROGRAM\ncontents[16:18] = struct.pack('<H', programLength) # DOS header: length\ncontents[18:20] = struct.pack('<H', 10) # DOS header: run at line 10\ncontents[20:22] = struct.pack('<H', programLength) # DOS header: offset to prog\ncontents[11:15] = struct.pack('<L', len(contents)) # Set total length\ncontents[127] = sum(contents[0:126]) & 0xFF # Compute checksum\n\nwith open(args.file, 'wb') as f:\n f.write(contents)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
__author__ = 'Administrator'
import socket,os,time
server = socket.socket()
server.bind(("localhost",9999))
server.listen()
while True:
conn,addr = server.accept()
while True:
data = conn.recv(1024)
if not data:
break
cmd,filename = data.decode().split()
if os.path.isfile(filename)
f = open(filename,"rb")
m = hashlib.md5()
file_size = os.stat(filename).st_size
conn.send(str(file_size).encode())
conn.recv(1024)
for line in f:
m.update(line)
conn.send(line)
print("file_md5",m.hexdigest())
f.close()
server.close()
|
normal
|
{
"blob_id": "0a19efea0c8d7e5e248ca3265ffcb55604dc500c",
"index": 7576,
"step-1": "__author__ = 'Administrator'\n\nimport socket,os,time\n\nserver = socket.socket()\n\nserver.bind((\"localhost\",9999))\n\nserver.listen()\n\nwhile True:\n conn,addr = server.accept()\n\n while True:\n data = conn.recv(1024)\n if not data:\n break\n\n cmd,filename = data.decode().split()\n\n if os.path.isfile(filename)\n f = open(filename,\"rb\")\n m = hashlib.md5()\n file_size = os.stat(filename).st_size\n conn.send(str(file_size).encode())\n conn.recv(1024)\n for line in f:\n m.update(line)\n conn.send(line)\n print(\"file_md5\",m.hexdigest())\n f.close()\nserver.close()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import cv2
import numpy as np
import copy
imgpath = 'D:\\DIP-Project1/b.jpg'
img = cv2.imread(imgpath)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
cv2.imshow('img', img)
row = len(img)
col = len(img[0])
def medianflt(img, i, j, msize, mr, mc):
pxls = []
for a in range(msize):
for b in range(msize):
mi = i + a - mr
mj = j + b - mc
pxls.append(img[mi][mj])
pxls.sort()
return pxls[msize * msize // 2]
def orderstatistic(img, row, col, msize=3):
rimg = copy.deepcopy(img)
mr = (msize - 1) // 2
mc = (msize - 1) // 2
for i in range(mr, row - mr - 1):
for j in range(mc, col - mc - 1):
rimg[i][j] = medianflt(img, i, j, msize, mr, mc)
return rimg
d0 = 9
rimg = orderstatistic(img, row, col, d0)
cv2.imshow('aimg', rimg)
cv2.waitKey(0)
|
normal
|
{
"blob_id": "cfcce8c760f6ba49ce450d78782cb8f3b5fc1188",
"index": 2857,
"step-1": "<mask token>\n\n\ndef medianflt(img, i, j, msize, mr, mc):\n pxls = []\n for a in range(msize):\n for b in range(msize):\n mi = i + a - mr\n mj = j + b - mc\n pxls.append(img[mi][mj])\n pxls.sort()\n return pxls[msize * msize // 2]\n\n\ndef orderstatistic(img, row, col, msize=3):\n rimg = copy.deepcopy(img)\n mr = (msize - 1) // 2\n mc = (msize - 1) // 2\n for i in range(mr, row - mr - 1):\n for j in range(mc, col - mc - 1):\n rimg[i][j] = medianflt(img, i, j, msize, mr, mc)\n return rimg\n\n\n<mask token>\n",
"step-2": "<mask token>\ncv2.imshow('img', img)\n<mask token>\n\n\ndef medianflt(img, i, j, msize, mr, mc):\n pxls = []\n for a in range(msize):\n for b in range(msize):\n mi = i + a - mr\n mj = j + b - mc\n pxls.append(img[mi][mj])\n pxls.sort()\n return pxls[msize * msize // 2]\n\n\ndef orderstatistic(img, row, col, msize=3):\n rimg = copy.deepcopy(img)\n mr = (msize - 1) // 2\n mc = (msize - 1) // 2\n for i in range(mr, row - mr - 1):\n for j in range(mc, col - mc - 1):\n rimg[i][j] = medianflt(img, i, j, msize, mr, mc)\n return rimg\n\n\n<mask token>\ncv2.imshow('aimg', rimg)\ncv2.waitKey(0)\n",
"step-3": "<mask token>\nimgpath = 'D:\\\\DIP-Project1/b.jpg'\nimg = cv2.imread(imgpath)\nimg = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\ncv2.imshow('img', img)\nrow = len(img)\ncol = len(img[0])\n\n\ndef medianflt(img, i, j, msize, mr, mc):\n pxls = []\n for a in range(msize):\n for b in range(msize):\n mi = i + a - mr\n mj = j + b - mc\n pxls.append(img[mi][mj])\n pxls.sort()\n return pxls[msize * msize // 2]\n\n\ndef orderstatistic(img, row, col, msize=3):\n rimg = copy.deepcopy(img)\n mr = (msize - 1) // 2\n mc = (msize - 1) // 2\n for i in range(mr, row - mr - 1):\n for j in range(mc, col - mc - 1):\n rimg[i][j] = medianflt(img, i, j, msize, mr, mc)\n return rimg\n\n\nd0 = 9\nrimg = orderstatistic(img, row, col, d0)\ncv2.imshow('aimg', rimg)\ncv2.waitKey(0)\n",
"step-4": "import cv2\nimport numpy as np\nimport copy\nimgpath = 'D:\\\\DIP-Project1/b.jpg'\nimg = cv2.imread(imgpath)\nimg = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\ncv2.imshow('img', img)\nrow = len(img)\ncol = len(img[0])\n\n\ndef medianflt(img, i, j, msize, mr, mc):\n pxls = []\n for a in range(msize):\n for b in range(msize):\n mi = i + a - mr\n mj = j + b - mc\n pxls.append(img[mi][mj])\n pxls.sort()\n return pxls[msize * msize // 2]\n\n\ndef orderstatistic(img, row, col, msize=3):\n rimg = copy.deepcopy(img)\n mr = (msize - 1) // 2\n mc = (msize - 1) // 2\n for i in range(mr, row - mr - 1):\n for j in range(mc, col - mc - 1):\n rimg[i][j] = medianflt(img, i, j, msize, mr, mc)\n return rimg\n\n\nd0 = 9\nrimg = orderstatistic(img, row, col, d0)\ncv2.imshow('aimg', rimg)\ncv2.waitKey(0)\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
import numpy as np
import torch
import torch.nn as nn
from torch.nn.functional import interpolate
from torchvision.ops.boxes import batched_nms
class MTCNN():
def __init__(self, device=None, model=None):
if device is None:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
self.device = device
url = 'https://github.com/deepware/dFace/raw/master/models/mtcnn.pt'
if model is None:
model = torch.hub.load_state_dict_from_url(url)
else:
model = torch.load(model, map_location=device)
self.pnet = PNet().to(device)
self.rnet = RNet().to(device)
self.onet = ONet().to(device)
self.pnet.load_state_dict(model['pnet'])
self.rnet.load_state_dict(model['rnet'])
self.onet.load_state_dict(model['onet'])
def detect(self, imgs, minsize=None):
if len(imgs) == 0:
return []
if isinstance(imgs[0], np.ndarray):
h, w = imgs[0].shape[:2]
else:
w, h = imgs[0].size
if minsize is None:
minsize = max(96 * min(w, h)/1080, 40)
boxes, points = [], []
with torch.no_grad():
batches = [imgs[i:i+10] for i in range(0, len(imgs), 10)]
for batch in batches:
batch_boxes, batch_points = detect_face(
batch, minsize, self.pnet, self.rnet, self.onet,
[0.7, 0.8, 0.9], 0.709, self.device)
boxes += list(batch_boxes)
points += list(batch_points)
result = []
for box, point in zip(boxes, points):
box = np.array(box)
point = np.array(point)
if len(box) == 0:
result.append(None)
else:
result.append((box[:, :4], box[:, 4], point))
return result
def empty_cache(device):
if 'cuda' in device:
with torch.cuda.device(device):
torch.cuda.empty_cache()
class PNet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 10, kernel_size=3)
self.prelu1 = nn.PReLU(10)
self.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)
self.conv2 = nn.Conv2d(10, 16, kernel_size=3)
self.prelu2 = nn.PReLU(16)
self.conv3 = nn.Conv2d(16, 32, kernel_size=3)
self.prelu3 = nn.PReLU(32)
self.conv4_1 = nn.Conv2d(32, 2, kernel_size=1)
self.softmax4_1 = nn.Softmax(dim=1)
self.conv4_2 = nn.Conv2d(32, 4, kernel_size=1)
def forward(self, x):
x = self.conv1(x)
x = self.prelu1(x)
x = self.pool1(x)
x = self.conv2(x)
x = self.prelu2(x)
x = self.conv3(x)
x = self.prelu3(x)
a = self.conv4_1(x)
a = self.softmax4_1(a)
b = self.conv4_2(x)
return b, a
class RNet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 28, kernel_size=3)
self.prelu1 = nn.PReLU(28)
self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)
self.conv2 = nn.Conv2d(28, 48, kernel_size=3)
self.prelu2 = nn.PReLU(48)
self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)
self.conv3 = nn.Conv2d(48, 64, kernel_size=2)
self.prelu3 = nn.PReLU(64)
self.dense4 = nn.Linear(576, 128)
self.prelu4 = nn.PReLU(128)
self.dense5_1 = nn.Linear(128, 2)
self.softmax5_1 = nn.Softmax(dim=1)
self.dense5_2 = nn.Linear(128, 4)
def forward(self, x):
x = self.conv1(x)
x = self.prelu1(x)
x = self.pool1(x)
x = self.conv2(x)
x = self.prelu2(x)
x = self.pool2(x)
x = self.conv3(x)
x = self.prelu3(x)
x = x.permute(0, 3, 2, 1).contiguous()
x = self.dense4(x.view(x.shape[0], -1))
x = self.prelu4(x)
a = self.dense5_1(x)
a = self.softmax5_1(a)
b = self.dense5_2(x)
return b, a
class ONet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 32, kernel_size=3)
self.prelu1 = nn.PReLU(32)
self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)
self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
self.prelu2 = nn.PReLU(64)
self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)
self.conv3 = nn.Conv2d(64, 64, kernel_size=3)
self.prelu3 = nn.PReLU(64)
self.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)
self.conv4 = nn.Conv2d(64, 128, kernel_size=2)
self.prelu4 = nn.PReLU(128)
self.dense5 = nn.Linear(1152, 256)
self.prelu5 = nn.PReLU(256)
self.dense6_1 = nn.Linear(256, 2)
self.softmax6_1 = nn.Softmax(dim=1)
self.dense6_2 = nn.Linear(256, 4)
self.dense6_3 = nn.Linear(256, 10)
def forward(self, x):
x = self.conv1(x)
x = self.prelu1(x)
x = self.pool1(x)
x = self.conv2(x)
x = self.prelu2(x)
x = self.pool2(x)
x = self.conv3(x)
x = self.prelu3(x)
x = self.pool3(x)
x = self.conv4(x)
x = self.prelu4(x)
x = x.permute(0, 3, 2, 1).contiguous()
x = self.dense5(x.view(x.shape[0], -1))
x = self.prelu5(x)
a = self.dense6_1(x)
a = self.softmax6_1(a)
b = self.dense6_2(x)
c = self.dense6_3(x)
return b, c, a
def detect_face(imgs, minsize, pnet, rnet, onet, threshold, factor, device):
if isinstance(imgs, (np.ndarray, torch.Tensor)):
imgs = torch.as_tensor(imgs, device=device)
if len(imgs.shape) == 3:
imgs = imgs.unsqueeze(0)
else:
if not isinstance(imgs, (list, tuple)):
imgs = [imgs]
if any(img.size != imgs[0].size for img in imgs):
raise Exception("MTCNN batch processing only compatible with equal-dimension images.")
imgs = np.stack([np.uint8(img) for img in imgs])
imgs = torch.as_tensor(imgs, device=device)
model_dtype = next(pnet.parameters()).dtype
imgs = imgs.permute(0, 3, 1, 2).type(model_dtype)
batch_size = len(imgs)
h, w = imgs.shape[2:4]
m = 12.0 / minsize
minl = min(h, w)
minl = minl * m
# Create scale pyramid
scale_i = m
scales = []
while minl >= 12:
scales.append(scale_i)
scale_i = scale_i * factor
minl = minl * factor
# First stage
boxes = []
image_inds = []
all_inds = []
all_i = 0
for scale in scales:
im_data = imresample(imgs, (int(h * scale + 1), int(w * scale + 1)))
im_data = (im_data - 127.5) * 0.0078125
reg, probs = pnet(im_data)
empty_cache(device)
boxes_scale, image_inds_scale = generateBoundingBox(reg, probs[:, 1], scale, threshold[0])
boxes.append(boxes_scale)
image_inds.append(image_inds_scale)
all_inds.append(all_i + image_inds_scale)
all_i += batch_size
boxes = torch.cat(boxes, dim=0)
image_inds = torch.cat(image_inds, dim=0).cpu()
all_inds = torch.cat(all_inds, dim=0)
# NMS within each scale + image
pick = batched_nms(boxes[:, :4], boxes[:, 4], all_inds, 0.5)
boxes, image_inds = boxes[pick], image_inds[pick]
# NMS within each image
pick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)
boxes, image_inds = boxes[pick], image_inds[pick]
regw = boxes[:, 2] - boxes[:, 0]
regh = boxes[:, 3] - boxes[:, 1]
qq1 = boxes[:, 0] + boxes[:, 5] * regw
qq2 = boxes[:, 1] + boxes[:, 6] * regh
qq3 = boxes[:, 2] + boxes[:, 7] * regw
qq4 = boxes[:, 3] + boxes[:, 8] * regh
boxes = torch.stack([qq1, qq2, qq3, qq4, boxes[:, 4]]).permute(1, 0)
boxes = rerec(boxes)
y, ey, x, ex = pad(boxes, w, h)
# Second stage
if len(boxes) > 0:
im_data = []
for k in range(len(y)):
if ey[k] > (y[k] - 1) and ex[k] > (x[k] - 1):
img_k = imgs[image_inds[k], :, (y[k] - 1):ey[k], (x[k] - 1):ex[k]].unsqueeze(0)
im_data.append(imresample(img_k, (24, 24)))
im_data = torch.cat(im_data, dim=0)
im_data = (im_data - 127.5) * 0.0078125
out = []
for batch in im_data.split(2000):
out += [rnet(batch)]
z = list(zip(*out))
out = (torch.cat(z[0]), torch.cat(z[1]))
empty_cache(device)
out0 = out[0].permute(1, 0)
out1 = out[1].permute(1, 0)
score = out1[1, :]
ipass = score > threshold[1]
boxes = torch.cat((boxes[ipass, :4], score[ipass].unsqueeze(1)), dim=1)
image_inds = image_inds[ipass]
mv = out0[:, ipass].permute(1, 0)
# NMS within each image
pick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)
boxes, image_inds, mv = boxes[pick], image_inds[pick], mv[pick]
boxes = bbreg(boxes, mv)
boxes = rerec(boxes)
# Third stage
points = torch.zeros(0, 5, 2, device=device)
if len(boxes) > 0:
y, ey, x, ex = pad(boxes, w, h)
im_data = []
for k in range(len(y)):
if ey[k] > (y[k] - 1) and ex[k] > (x[k] - 1):
img_k = imgs[image_inds[k], :, (y[k] - 1):ey[k], (x[k] - 1):ex[k]].unsqueeze(0)
im_data.append(imresample(img_k, (48, 48)))
im_data = torch.cat(im_data, dim=0)
im_data = (im_data - 127.5) * 0.0078125
out = []
for batch in im_data.split(500):
out += [onet(batch)]
z = list(zip(*out))
out = (torch.cat(z[0]), torch.cat(z[1]), torch.cat(z[2]))
empty_cache(device)
out0 = out[0].permute(1, 0)
out1 = out[1].permute(1, 0)
out2 = out[2].permute(1, 0)
score = out2[1, :]
points = out1
ipass = score > threshold[2]
points = points[:, ipass]
boxes = torch.cat((boxes[ipass, :4], score[ipass].unsqueeze(1)), dim=1)
image_inds = image_inds[ipass]
mv = out0[:, ipass].permute(1, 0)
w_i = boxes[:, 2] - boxes[:, 0] + 1
h_i = boxes[:, 3] - boxes[:, 1] + 1
points_x = w_i.repeat(5, 1) * points[:5, :] + boxes[:, 0].repeat(5, 1) - 1
points_y = h_i.repeat(5, 1) * points[5:10, :] + boxes[:, 1].repeat(5, 1) - 1
points = torch.stack((points_x, points_y)).permute(2, 1, 0)
boxes = bbreg(boxes, mv)
# NMS within each image using "Min" strategy
# pick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)
pick = batched_nms_numpy(boxes[:, :4], boxes[:, 4], image_inds, 0.7, 'Min')
boxes, image_inds, points = boxes[pick], image_inds[pick], points[pick]
boxes = boxes.cpu().numpy()
points = points.cpu().numpy()
batch_boxes = []
batch_points = []
for b_i in range(batch_size):
b_i_inds = np.where(image_inds == b_i)
batch_boxes.append(boxes[b_i_inds].copy())
batch_points.append(points[b_i_inds].copy())
batch_boxes, batch_points = np.array(batch_boxes), np.array(batch_points)
empty_cache(device)
return batch_boxes, batch_points
def bbreg(boundingbox, reg):
if reg.shape[1] == 1:
reg = torch.reshape(reg, (reg.shape[2], reg.shape[3]))
w = boundingbox[:, 2] - boundingbox[:, 0] + 1
h = boundingbox[:, 3] - boundingbox[:, 1] + 1
b1 = boundingbox[:, 0] + reg[:, 0] * w
b2 = boundingbox[:, 1] + reg[:, 1] * h
b3 = boundingbox[:, 2] + reg[:, 2] * w
b4 = boundingbox[:, 3] + reg[:, 3] * h
boundingbox[:, :4] = torch.stack([b1, b2, b3, b4]).permute(1, 0)
return boundingbox
def generateBoundingBox(reg, probs, scale, thresh):
stride = 2
cellsize = 12
reg = reg.permute(1, 0, 2, 3)
mask = probs >= thresh
mask_inds = mask.nonzero(as_tuple=False)
image_inds = mask_inds[:, 0]
score = probs[mask]
reg = reg[:, mask].permute(1, 0)
bb = mask_inds[:, 1:].type(reg.dtype).flip(1)
q1 = ((stride * bb + 1) / scale).floor()
q2 = ((stride * bb + cellsize - 1 + 1) / scale).floor()
boundingbox = torch.cat([q1, q2, score.unsqueeze(1), reg], dim=1)
return boundingbox, image_inds
def nms_numpy(boxes, scores, threshold, method):
if boxes.size == 0:
return np.empty((0, 3))
x1 = boxes[:, 0].copy()
y1 = boxes[:, 1].copy()
x2 = boxes[:, 2].copy()
y2 = boxes[:, 3].copy()
s = scores
area = (x2 - x1 + 1) * (y2 - y1 + 1)
I = np.argsort(s)
pick = np.zeros_like(s, dtype=np.int16)
counter = 0
while I.size > 0:
i = I[-1]
pick[counter] = i
counter += 1
idx = I[0:-1]
xx1 = np.maximum(x1[i], x1[idx]).copy()
yy1 = np.maximum(y1[i], y1[idx]).copy()
xx2 = np.minimum(x2[i], x2[idx]).copy()
yy2 = np.minimum(y2[i], y2[idx]).copy()
w = np.maximum(0.0, xx2 - xx1 + 1).copy()
h = np.maximum(0.0, yy2 - yy1 + 1).copy()
inter = w * h
if method == "Min":
o = inter / np.minimum(area[i], area[idx])
else:
o = inter / (area[i] + area[idx] - inter)
I = I[np.where(o <= threshold)]
pick = pick[:counter].copy()
return pick
def batched_nms_numpy(boxes, scores, idxs, threshold, method):
device = boxes.device
if boxes.numel() == 0:
return torch.empty((0,), dtype=torch.int64, device=device)
# strategy: in order to perform NMS independently per class.
# we add an offset to all the boxes. The offset is dependent
# only on the class idx, and is large enough so that boxes
# from different classes do not overlap
max_coordinate = boxes.max()
offsets = idxs.to(boxes) * (max_coordinate + 1)
boxes_for_nms = boxes + offsets[:, None]
boxes_for_nms = boxes_for_nms.cpu().numpy()
scores = scores.cpu().numpy()
keep = nms_numpy(boxes_for_nms, scores, threshold, method)
return torch.as_tensor(keep, dtype=torch.long, device=device)
def pad(boxes, w, h):
boxes = boxes.trunc().int().cpu().numpy()
x = boxes[:, 0]
y = boxes[:, 1]
ex = boxes[:, 2]
ey = boxes[:, 3]
x[x < 1] = 1
y[y < 1] = 1
ex[ex > w] = w
ey[ey > h] = h
return y, ey, x, ex
def rerec(bboxA):
h = bboxA[:, 3] - bboxA[:, 1]
w = bboxA[:, 2] - bboxA[:, 0]
l = torch.max(w, h)
bboxA[:, 0] = bboxA[:, 0] + w * 0.5 - l * 0.5
bboxA[:, 1] = bboxA[:, 1] + h * 0.5 - l * 0.5
bboxA[:, 2:4] = bboxA[:, :2] + l.repeat(2, 1).permute(1, 0)
return bboxA
def imresample(img, sz):
im_data = interpolate(img, size=sz, mode="area")
return im_data
|
normal
|
{
"blob_id": "865121e7eb5f9c70adf44d33d21f30c22f13ec56",
"index": 7012,
"step-1": "<mask token>\n\n\nclass MTCNN:\n\n def __init__(self, device=None, model=None):\n if device is None:\n device = 'cuda' if torch.cuda.is_available() else 'cpu'\n self.device = device\n url = 'https://github.com/deepware/dFace/raw/master/models/mtcnn.pt'\n if model is None:\n model = torch.hub.load_state_dict_from_url(url)\n else:\n model = torch.load(model, map_location=device)\n self.pnet = PNet().to(device)\n self.rnet = RNet().to(device)\n self.onet = ONet().to(device)\n self.pnet.load_state_dict(model['pnet'])\n self.rnet.load_state_dict(model['rnet'])\n self.onet.load_state_dict(model['onet'])\n\n def detect(self, imgs, minsize=None):\n if len(imgs) == 0:\n return []\n if isinstance(imgs[0], np.ndarray):\n h, w = imgs[0].shape[:2]\n else:\n w, h = imgs[0].size\n if minsize is None:\n minsize = max(96 * min(w, h) / 1080, 40)\n boxes, points = [], []\n with torch.no_grad():\n batches = [imgs[i:i + 10] for i in range(0, len(imgs), 10)]\n for batch in batches:\n batch_boxes, batch_points = detect_face(batch, minsize,\n self.pnet, self.rnet, self.onet, [0.7, 0.8, 0.9], 0.709,\n self.device)\n boxes += list(batch_boxes)\n points += list(batch_points)\n result = []\n for box, point in zip(boxes, points):\n box = np.array(box)\n point = np.array(point)\n if len(box) == 0:\n result.append(None)\n else:\n result.append((box[:, :4], box[:, 4], point))\n return result\n\n\n<mask token>\n\n\nclass PNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 10, kernel_size=3)\n self.prelu1 = nn.PReLU(10)\n self.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(10, 16, kernel_size=3)\n self.prelu2 = nn.PReLU(16)\n self.conv3 = nn.Conv2d(16, 32, kernel_size=3)\n self.prelu3 = nn.PReLU(32)\n self.conv4_1 = nn.Conv2d(32, 2, kernel_size=1)\n self.softmax4_1 = nn.Softmax(dim=1)\n self.conv4_2 = nn.Conv2d(32, 4, kernel_size=1)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n a = self.conv4_1(x)\n a = self.softmax4_1(a)\n b = self.conv4_2(x)\n return b, a\n\n\nclass RNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 28, kernel_size=3)\n self.prelu1 = nn.PReLU(28)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(28, 48, kernel_size=3)\n self.prelu2 = nn.PReLU(48)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(48, 64, kernel_size=2)\n self.prelu3 = nn.PReLU(64)\n self.dense4 = nn.Linear(576, 128)\n self.prelu4 = nn.PReLU(128)\n self.dense5_1 = nn.Linear(128, 2)\n self.softmax5_1 = nn.Softmax(dim=1)\n self.dense5_2 = nn.Linear(128, 4)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense4(x.view(x.shape[0], -1))\n x = self.prelu4(x)\n a = self.dense5_1(x)\n a = self.softmax5_1(a)\n b = self.dense5_2(x)\n return b, a\n\n\nclass ONet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 32, kernel_size=3)\n self.prelu1 = nn.PReLU(32)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(32, 64, kernel_size=3)\n self.prelu2 = nn.PReLU(64)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(64, 64, kernel_size=3)\n self.prelu3 = nn.PReLU(64)\n self.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv4 = nn.Conv2d(64, 128, kernel_size=2)\n self.prelu4 = nn.PReLU(128)\n self.dense5 = nn.Linear(1152, 256)\n self.prelu5 = nn.PReLU(256)\n self.dense6_1 = nn.Linear(256, 2)\n self.softmax6_1 = nn.Softmax(dim=1)\n self.dense6_2 = nn.Linear(256, 4)\n self.dense6_3 = nn.Linear(256, 10)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = self.pool3(x)\n x = self.conv4(x)\n x = self.prelu4(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense5(x.view(x.shape[0], -1))\n x = self.prelu5(x)\n a = self.dense6_1(x)\n a = self.softmax6_1(a)\n b = self.dense6_2(x)\n c = self.dense6_3(x)\n return b, c, a\n\n\n<mask token>\n\n\ndef bbreg(boundingbox, reg):\n if reg.shape[1] == 1:\n reg = torch.reshape(reg, (reg.shape[2], reg.shape[3]))\n w = boundingbox[:, 2] - boundingbox[:, 0] + 1\n h = boundingbox[:, 3] - boundingbox[:, 1] + 1\n b1 = boundingbox[:, 0] + reg[:, 0] * w\n b2 = boundingbox[:, 1] + reg[:, 1] * h\n b3 = boundingbox[:, 2] + reg[:, 2] * w\n b4 = boundingbox[:, 3] + reg[:, 3] * h\n boundingbox[:, :4] = torch.stack([b1, b2, b3, b4]).permute(1, 0)\n return boundingbox\n\n\ndef generateBoundingBox(reg, probs, scale, thresh):\n stride = 2\n cellsize = 12\n reg = reg.permute(1, 0, 2, 3)\n mask = probs >= thresh\n mask_inds = mask.nonzero(as_tuple=False)\n image_inds = mask_inds[:, 0]\n score = probs[mask]\n reg = reg[:, mask].permute(1, 0)\n bb = mask_inds[:, 1:].type(reg.dtype).flip(1)\n q1 = ((stride * bb + 1) / scale).floor()\n q2 = ((stride * bb + cellsize - 1 + 1) / scale).floor()\n boundingbox = torch.cat([q1, q2, score.unsqueeze(1), reg], dim=1)\n return boundingbox, image_inds\n\n\ndef nms_numpy(boxes, scores, threshold, method):\n if boxes.size == 0:\n return np.empty((0, 3))\n x1 = boxes[:, 0].copy()\n y1 = boxes[:, 1].copy()\n x2 = boxes[:, 2].copy()\n y2 = boxes[:, 3].copy()\n s = scores\n area = (x2 - x1 + 1) * (y2 - y1 + 1)\n I = np.argsort(s)\n pick = np.zeros_like(s, dtype=np.int16)\n counter = 0\n while I.size > 0:\n i = I[-1]\n pick[counter] = i\n counter += 1\n idx = I[0:-1]\n xx1 = np.maximum(x1[i], x1[idx]).copy()\n yy1 = np.maximum(y1[i], y1[idx]).copy()\n xx2 = np.minimum(x2[i], x2[idx]).copy()\n yy2 = np.minimum(y2[i], y2[idx]).copy()\n w = np.maximum(0.0, xx2 - xx1 + 1).copy()\n h = np.maximum(0.0, yy2 - yy1 + 1).copy()\n inter = w * h\n if method == 'Min':\n o = inter / np.minimum(area[i], area[idx])\n else:\n o = inter / (area[i] + area[idx] - inter)\n I = I[np.where(o <= threshold)]\n pick = pick[:counter].copy()\n return pick\n\n\ndef batched_nms_numpy(boxes, scores, idxs, threshold, method):\n device = boxes.device\n if boxes.numel() == 0:\n return torch.empty((0,), dtype=torch.int64, device=device)\n max_coordinate = boxes.max()\n offsets = idxs.to(boxes) * (max_coordinate + 1)\n boxes_for_nms = boxes + offsets[:, None]\n boxes_for_nms = boxes_for_nms.cpu().numpy()\n scores = scores.cpu().numpy()\n keep = nms_numpy(boxes_for_nms, scores, threshold, method)\n return torch.as_tensor(keep, dtype=torch.long, device=device)\n\n\n<mask token>\n\n\ndef imresample(img, sz):\n im_data = interpolate(img, size=sz, mode='area')\n return im_data\n",
"step-2": "<mask token>\n\n\nclass MTCNN:\n\n def __init__(self, device=None, model=None):\n if device is None:\n device = 'cuda' if torch.cuda.is_available() else 'cpu'\n self.device = device\n url = 'https://github.com/deepware/dFace/raw/master/models/mtcnn.pt'\n if model is None:\n model = torch.hub.load_state_dict_from_url(url)\n else:\n model = torch.load(model, map_location=device)\n self.pnet = PNet().to(device)\n self.rnet = RNet().to(device)\n self.onet = ONet().to(device)\n self.pnet.load_state_dict(model['pnet'])\n self.rnet.load_state_dict(model['rnet'])\n self.onet.load_state_dict(model['onet'])\n\n def detect(self, imgs, minsize=None):\n if len(imgs) == 0:\n return []\n if isinstance(imgs[0], np.ndarray):\n h, w = imgs[0].shape[:2]\n else:\n w, h = imgs[0].size\n if minsize is None:\n minsize = max(96 * min(w, h) / 1080, 40)\n boxes, points = [], []\n with torch.no_grad():\n batches = [imgs[i:i + 10] for i in range(0, len(imgs), 10)]\n for batch in batches:\n batch_boxes, batch_points = detect_face(batch, minsize,\n self.pnet, self.rnet, self.onet, [0.7, 0.8, 0.9], 0.709,\n self.device)\n boxes += list(batch_boxes)\n points += list(batch_points)\n result = []\n for box, point in zip(boxes, points):\n box = np.array(box)\n point = np.array(point)\n if len(box) == 0:\n result.append(None)\n else:\n result.append((box[:, :4], box[:, 4], point))\n return result\n\n\n<mask token>\n\n\nclass PNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 10, kernel_size=3)\n self.prelu1 = nn.PReLU(10)\n self.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(10, 16, kernel_size=3)\n self.prelu2 = nn.PReLU(16)\n self.conv3 = nn.Conv2d(16, 32, kernel_size=3)\n self.prelu3 = nn.PReLU(32)\n self.conv4_1 = nn.Conv2d(32, 2, kernel_size=1)\n self.softmax4_1 = nn.Softmax(dim=1)\n self.conv4_2 = nn.Conv2d(32, 4, kernel_size=1)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n a = self.conv4_1(x)\n a = self.softmax4_1(a)\n b = self.conv4_2(x)\n return b, a\n\n\nclass RNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 28, kernel_size=3)\n self.prelu1 = nn.PReLU(28)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(28, 48, kernel_size=3)\n self.prelu2 = nn.PReLU(48)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(48, 64, kernel_size=2)\n self.prelu3 = nn.PReLU(64)\n self.dense4 = nn.Linear(576, 128)\n self.prelu4 = nn.PReLU(128)\n self.dense5_1 = nn.Linear(128, 2)\n self.softmax5_1 = nn.Softmax(dim=1)\n self.dense5_2 = nn.Linear(128, 4)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense4(x.view(x.shape[0], -1))\n x = self.prelu4(x)\n a = self.dense5_1(x)\n a = self.softmax5_1(a)\n b = self.dense5_2(x)\n return b, a\n\n\nclass ONet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 32, kernel_size=3)\n self.prelu1 = nn.PReLU(32)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(32, 64, kernel_size=3)\n self.prelu2 = nn.PReLU(64)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(64, 64, kernel_size=3)\n self.prelu3 = nn.PReLU(64)\n self.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv4 = nn.Conv2d(64, 128, kernel_size=2)\n self.prelu4 = nn.PReLU(128)\n self.dense5 = nn.Linear(1152, 256)\n self.prelu5 = nn.PReLU(256)\n self.dense6_1 = nn.Linear(256, 2)\n self.softmax6_1 = nn.Softmax(dim=1)\n self.dense6_2 = nn.Linear(256, 4)\n self.dense6_3 = nn.Linear(256, 10)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = self.pool3(x)\n x = self.conv4(x)\n x = self.prelu4(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense5(x.view(x.shape[0], -1))\n x = self.prelu5(x)\n a = self.dense6_1(x)\n a = self.softmax6_1(a)\n b = self.dense6_2(x)\n c = self.dense6_3(x)\n return b, c, a\n\n\n<mask token>\n\n\ndef bbreg(boundingbox, reg):\n if reg.shape[1] == 1:\n reg = torch.reshape(reg, (reg.shape[2], reg.shape[3]))\n w = boundingbox[:, 2] - boundingbox[:, 0] + 1\n h = boundingbox[:, 3] - boundingbox[:, 1] + 1\n b1 = boundingbox[:, 0] + reg[:, 0] * w\n b2 = boundingbox[:, 1] + reg[:, 1] * h\n b3 = boundingbox[:, 2] + reg[:, 2] * w\n b4 = boundingbox[:, 3] + reg[:, 3] * h\n boundingbox[:, :4] = torch.stack([b1, b2, b3, b4]).permute(1, 0)\n return boundingbox\n\n\ndef generateBoundingBox(reg, probs, scale, thresh):\n stride = 2\n cellsize = 12\n reg = reg.permute(1, 0, 2, 3)\n mask = probs >= thresh\n mask_inds = mask.nonzero(as_tuple=False)\n image_inds = mask_inds[:, 0]\n score = probs[mask]\n reg = reg[:, mask].permute(1, 0)\n bb = mask_inds[:, 1:].type(reg.dtype).flip(1)\n q1 = ((stride * bb + 1) / scale).floor()\n q2 = ((stride * bb + cellsize - 1 + 1) / scale).floor()\n boundingbox = torch.cat([q1, q2, score.unsqueeze(1), reg], dim=1)\n return boundingbox, image_inds\n\n\ndef nms_numpy(boxes, scores, threshold, method):\n if boxes.size == 0:\n return np.empty((0, 3))\n x1 = boxes[:, 0].copy()\n y1 = boxes[:, 1].copy()\n x2 = boxes[:, 2].copy()\n y2 = boxes[:, 3].copy()\n s = scores\n area = (x2 - x1 + 1) * (y2 - y1 + 1)\n I = np.argsort(s)\n pick = np.zeros_like(s, dtype=np.int16)\n counter = 0\n while I.size > 0:\n i = I[-1]\n pick[counter] = i\n counter += 1\n idx = I[0:-1]\n xx1 = np.maximum(x1[i], x1[idx]).copy()\n yy1 = np.maximum(y1[i], y1[idx]).copy()\n xx2 = np.minimum(x2[i], x2[idx]).copy()\n yy2 = np.minimum(y2[i], y2[idx]).copy()\n w = np.maximum(0.0, xx2 - xx1 + 1).copy()\n h = np.maximum(0.0, yy2 - yy1 + 1).copy()\n inter = w * h\n if method == 'Min':\n o = inter / np.minimum(area[i], area[idx])\n else:\n o = inter / (area[i] + area[idx] - inter)\n I = I[np.where(o <= threshold)]\n pick = pick[:counter].copy()\n return pick\n\n\ndef batched_nms_numpy(boxes, scores, idxs, threshold, method):\n device = boxes.device\n if boxes.numel() == 0:\n return torch.empty((0,), dtype=torch.int64, device=device)\n max_coordinate = boxes.max()\n offsets = idxs.to(boxes) * (max_coordinate + 1)\n boxes_for_nms = boxes + offsets[:, None]\n boxes_for_nms = boxes_for_nms.cpu().numpy()\n scores = scores.cpu().numpy()\n keep = nms_numpy(boxes_for_nms, scores, threshold, method)\n return torch.as_tensor(keep, dtype=torch.long, device=device)\n\n\ndef pad(boxes, w, h):\n boxes = boxes.trunc().int().cpu().numpy()\n x = boxes[:, 0]\n y = boxes[:, 1]\n ex = boxes[:, 2]\n ey = boxes[:, 3]\n x[x < 1] = 1\n y[y < 1] = 1\n ex[ex > w] = w\n ey[ey > h] = h\n return y, ey, x, ex\n\n\n<mask token>\n\n\ndef imresample(img, sz):\n im_data = interpolate(img, size=sz, mode='area')\n return im_data\n",
"step-3": "<mask token>\n\n\nclass MTCNN:\n\n def __init__(self, device=None, model=None):\n if device is None:\n device = 'cuda' if torch.cuda.is_available() else 'cpu'\n self.device = device\n url = 'https://github.com/deepware/dFace/raw/master/models/mtcnn.pt'\n if model is None:\n model = torch.hub.load_state_dict_from_url(url)\n else:\n model = torch.load(model, map_location=device)\n self.pnet = PNet().to(device)\n self.rnet = RNet().to(device)\n self.onet = ONet().to(device)\n self.pnet.load_state_dict(model['pnet'])\n self.rnet.load_state_dict(model['rnet'])\n self.onet.load_state_dict(model['onet'])\n\n def detect(self, imgs, minsize=None):\n if len(imgs) == 0:\n return []\n if isinstance(imgs[0], np.ndarray):\n h, w = imgs[0].shape[:2]\n else:\n w, h = imgs[0].size\n if minsize is None:\n minsize = max(96 * min(w, h) / 1080, 40)\n boxes, points = [], []\n with torch.no_grad():\n batches = [imgs[i:i + 10] for i in range(0, len(imgs), 10)]\n for batch in batches:\n batch_boxes, batch_points = detect_face(batch, minsize,\n self.pnet, self.rnet, self.onet, [0.7, 0.8, 0.9], 0.709,\n self.device)\n boxes += list(batch_boxes)\n points += list(batch_points)\n result = []\n for box, point in zip(boxes, points):\n box = np.array(box)\n point = np.array(point)\n if len(box) == 0:\n result.append(None)\n else:\n result.append((box[:, :4], box[:, 4], point))\n return result\n\n\ndef empty_cache(device):\n if 'cuda' in device:\n with torch.cuda.device(device):\n torch.cuda.empty_cache()\n\n\nclass PNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 10, kernel_size=3)\n self.prelu1 = nn.PReLU(10)\n self.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(10, 16, kernel_size=3)\n self.prelu2 = nn.PReLU(16)\n self.conv3 = nn.Conv2d(16, 32, kernel_size=3)\n self.prelu3 = nn.PReLU(32)\n self.conv4_1 = nn.Conv2d(32, 2, kernel_size=1)\n self.softmax4_1 = nn.Softmax(dim=1)\n self.conv4_2 = nn.Conv2d(32, 4, kernel_size=1)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n a = self.conv4_1(x)\n a = self.softmax4_1(a)\n b = self.conv4_2(x)\n return b, a\n\n\nclass RNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 28, kernel_size=3)\n self.prelu1 = nn.PReLU(28)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(28, 48, kernel_size=3)\n self.prelu2 = nn.PReLU(48)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(48, 64, kernel_size=2)\n self.prelu3 = nn.PReLU(64)\n self.dense4 = nn.Linear(576, 128)\n self.prelu4 = nn.PReLU(128)\n self.dense5_1 = nn.Linear(128, 2)\n self.softmax5_1 = nn.Softmax(dim=1)\n self.dense5_2 = nn.Linear(128, 4)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense4(x.view(x.shape[0], -1))\n x = self.prelu4(x)\n a = self.dense5_1(x)\n a = self.softmax5_1(a)\n b = self.dense5_2(x)\n return b, a\n\n\nclass ONet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 32, kernel_size=3)\n self.prelu1 = nn.PReLU(32)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(32, 64, kernel_size=3)\n self.prelu2 = nn.PReLU(64)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(64, 64, kernel_size=3)\n self.prelu3 = nn.PReLU(64)\n self.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv4 = nn.Conv2d(64, 128, kernel_size=2)\n self.prelu4 = nn.PReLU(128)\n self.dense5 = nn.Linear(1152, 256)\n self.prelu5 = nn.PReLU(256)\n self.dense6_1 = nn.Linear(256, 2)\n self.softmax6_1 = nn.Softmax(dim=1)\n self.dense6_2 = nn.Linear(256, 4)\n self.dense6_3 = nn.Linear(256, 10)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = self.pool3(x)\n x = self.conv4(x)\n x = self.prelu4(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense5(x.view(x.shape[0], -1))\n x = self.prelu5(x)\n a = self.dense6_1(x)\n a = self.softmax6_1(a)\n b = self.dense6_2(x)\n c = self.dense6_3(x)\n return b, c, a\n\n\n<mask token>\n\n\ndef bbreg(boundingbox, reg):\n if reg.shape[1] == 1:\n reg = torch.reshape(reg, (reg.shape[2], reg.shape[3]))\n w = boundingbox[:, 2] - boundingbox[:, 0] + 1\n h = boundingbox[:, 3] - boundingbox[:, 1] + 1\n b1 = boundingbox[:, 0] + reg[:, 0] * w\n b2 = boundingbox[:, 1] + reg[:, 1] * h\n b3 = boundingbox[:, 2] + reg[:, 2] * w\n b4 = boundingbox[:, 3] + reg[:, 3] * h\n boundingbox[:, :4] = torch.stack([b1, b2, b3, b4]).permute(1, 0)\n return boundingbox\n\n\ndef generateBoundingBox(reg, probs, scale, thresh):\n stride = 2\n cellsize = 12\n reg = reg.permute(1, 0, 2, 3)\n mask = probs >= thresh\n mask_inds = mask.nonzero(as_tuple=False)\n image_inds = mask_inds[:, 0]\n score = probs[mask]\n reg = reg[:, mask].permute(1, 0)\n bb = mask_inds[:, 1:].type(reg.dtype).flip(1)\n q1 = ((stride * bb + 1) / scale).floor()\n q2 = ((stride * bb + cellsize - 1 + 1) / scale).floor()\n boundingbox = torch.cat([q1, q2, score.unsqueeze(1), reg], dim=1)\n return boundingbox, image_inds\n\n\ndef nms_numpy(boxes, scores, threshold, method):\n if boxes.size == 0:\n return np.empty((0, 3))\n x1 = boxes[:, 0].copy()\n y1 = boxes[:, 1].copy()\n x2 = boxes[:, 2].copy()\n y2 = boxes[:, 3].copy()\n s = scores\n area = (x2 - x1 + 1) * (y2 - y1 + 1)\n I = np.argsort(s)\n pick = np.zeros_like(s, dtype=np.int16)\n counter = 0\n while I.size > 0:\n i = I[-1]\n pick[counter] = i\n counter += 1\n idx = I[0:-1]\n xx1 = np.maximum(x1[i], x1[idx]).copy()\n yy1 = np.maximum(y1[i], y1[idx]).copy()\n xx2 = np.minimum(x2[i], x2[idx]).copy()\n yy2 = np.minimum(y2[i], y2[idx]).copy()\n w = np.maximum(0.0, xx2 - xx1 + 1).copy()\n h = np.maximum(0.0, yy2 - yy1 + 1).copy()\n inter = w * h\n if method == 'Min':\n o = inter / np.minimum(area[i], area[idx])\n else:\n o = inter / (area[i] + area[idx] - inter)\n I = I[np.where(o <= threshold)]\n pick = pick[:counter].copy()\n return pick\n\n\ndef batched_nms_numpy(boxes, scores, idxs, threshold, method):\n device = boxes.device\n if boxes.numel() == 0:\n return torch.empty((0,), dtype=torch.int64, device=device)\n max_coordinate = boxes.max()\n offsets = idxs.to(boxes) * (max_coordinate + 1)\n boxes_for_nms = boxes + offsets[:, None]\n boxes_for_nms = boxes_for_nms.cpu().numpy()\n scores = scores.cpu().numpy()\n keep = nms_numpy(boxes_for_nms, scores, threshold, method)\n return torch.as_tensor(keep, dtype=torch.long, device=device)\n\n\ndef pad(boxes, w, h):\n boxes = boxes.trunc().int().cpu().numpy()\n x = boxes[:, 0]\n y = boxes[:, 1]\n ex = boxes[:, 2]\n ey = boxes[:, 3]\n x[x < 1] = 1\n y[y < 1] = 1\n ex[ex > w] = w\n ey[ey > h] = h\n return y, ey, x, ex\n\n\n<mask token>\n\n\ndef imresample(img, sz):\n im_data = interpolate(img, size=sz, mode='area')\n return im_data\n",
"step-4": "<mask token>\n\n\nclass MTCNN:\n\n def __init__(self, device=None, model=None):\n if device is None:\n device = 'cuda' if torch.cuda.is_available() else 'cpu'\n self.device = device\n url = 'https://github.com/deepware/dFace/raw/master/models/mtcnn.pt'\n if model is None:\n model = torch.hub.load_state_dict_from_url(url)\n else:\n model = torch.load(model, map_location=device)\n self.pnet = PNet().to(device)\n self.rnet = RNet().to(device)\n self.onet = ONet().to(device)\n self.pnet.load_state_dict(model['pnet'])\n self.rnet.load_state_dict(model['rnet'])\n self.onet.load_state_dict(model['onet'])\n\n def detect(self, imgs, minsize=None):\n if len(imgs) == 0:\n return []\n if isinstance(imgs[0], np.ndarray):\n h, w = imgs[0].shape[:2]\n else:\n w, h = imgs[0].size\n if minsize is None:\n minsize = max(96 * min(w, h) / 1080, 40)\n boxes, points = [], []\n with torch.no_grad():\n batches = [imgs[i:i + 10] for i in range(0, len(imgs), 10)]\n for batch in batches:\n batch_boxes, batch_points = detect_face(batch, minsize,\n self.pnet, self.rnet, self.onet, [0.7, 0.8, 0.9], 0.709,\n self.device)\n boxes += list(batch_boxes)\n points += list(batch_points)\n result = []\n for box, point in zip(boxes, points):\n box = np.array(box)\n point = np.array(point)\n if len(box) == 0:\n result.append(None)\n else:\n result.append((box[:, :4], box[:, 4], point))\n return result\n\n\ndef empty_cache(device):\n if 'cuda' in device:\n with torch.cuda.device(device):\n torch.cuda.empty_cache()\n\n\nclass PNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 10, kernel_size=3)\n self.prelu1 = nn.PReLU(10)\n self.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(10, 16, kernel_size=3)\n self.prelu2 = nn.PReLU(16)\n self.conv3 = nn.Conv2d(16, 32, kernel_size=3)\n self.prelu3 = nn.PReLU(32)\n self.conv4_1 = nn.Conv2d(32, 2, kernel_size=1)\n self.softmax4_1 = nn.Softmax(dim=1)\n self.conv4_2 = nn.Conv2d(32, 4, kernel_size=1)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n a = self.conv4_1(x)\n a = self.softmax4_1(a)\n b = self.conv4_2(x)\n return b, a\n\n\nclass RNet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 28, kernel_size=3)\n self.prelu1 = nn.PReLU(28)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(28, 48, kernel_size=3)\n self.prelu2 = nn.PReLU(48)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(48, 64, kernel_size=2)\n self.prelu3 = nn.PReLU(64)\n self.dense4 = nn.Linear(576, 128)\n self.prelu4 = nn.PReLU(128)\n self.dense5_1 = nn.Linear(128, 2)\n self.softmax5_1 = nn.Softmax(dim=1)\n self.dense5_2 = nn.Linear(128, 4)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense4(x.view(x.shape[0], -1))\n x = self.prelu4(x)\n a = self.dense5_1(x)\n a = self.softmax5_1(a)\n b = self.dense5_2(x)\n return b, a\n\n\nclass ONet(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 32, kernel_size=3)\n self.prelu1 = nn.PReLU(32)\n self.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv2 = nn.Conv2d(32, 64, kernel_size=3)\n self.prelu2 = nn.PReLU(64)\n self.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n self.conv3 = nn.Conv2d(64, 64, kernel_size=3)\n self.prelu3 = nn.PReLU(64)\n self.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)\n self.conv4 = nn.Conv2d(64, 128, kernel_size=2)\n self.prelu4 = nn.PReLU(128)\n self.dense5 = nn.Linear(1152, 256)\n self.prelu5 = nn.PReLU(256)\n self.dense6_1 = nn.Linear(256, 2)\n self.softmax6_1 = nn.Softmax(dim=1)\n self.dense6_2 = nn.Linear(256, 4)\n self.dense6_3 = nn.Linear(256, 10)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.prelu1(x)\n x = self.pool1(x)\n x = self.conv2(x)\n x = self.prelu2(x)\n x = self.pool2(x)\n x = self.conv3(x)\n x = self.prelu3(x)\n x = self.pool3(x)\n x = self.conv4(x)\n x = self.prelu4(x)\n x = x.permute(0, 3, 2, 1).contiguous()\n x = self.dense5(x.view(x.shape[0], -1))\n x = self.prelu5(x)\n a = self.dense6_1(x)\n a = self.softmax6_1(a)\n b = self.dense6_2(x)\n c = self.dense6_3(x)\n return b, c, a\n\n\ndef detect_face(imgs, minsize, pnet, rnet, onet, threshold, factor, device):\n if isinstance(imgs, (np.ndarray, torch.Tensor)):\n imgs = torch.as_tensor(imgs, device=device)\n if len(imgs.shape) == 3:\n imgs = imgs.unsqueeze(0)\n else:\n if not isinstance(imgs, (list, tuple)):\n imgs = [imgs]\n if any(img.size != imgs[0].size for img in imgs):\n raise Exception(\n 'MTCNN batch processing only compatible with equal-dimension images.'\n )\n imgs = np.stack([np.uint8(img) for img in imgs])\n imgs = torch.as_tensor(imgs, device=device)\n model_dtype = next(pnet.parameters()).dtype\n imgs = imgs.permute(0, 3, 1, 2).type(model_dtype)\n batch_size = len(imgs)\n h, w = imgs.shape[2:4]\n m = 12.0 / minsize\n minl = min(h, w)\n minl = minl * m\n scale_i = m\n scales = []\n while minl >= 12:\n scales.append(scale_i)\n scale_i = scale_i * factor\n minl = minl * factor\n boxes = []\n image_inds = []\n all_inds = []\n all_i = 0\n for scale in scales:\n im_data = imresample(imgs, (int(h * scale + 1), int(w * scale + 1)))\n im_data = (im_data - 127.5) * 0.0078125\n reg, probs = pnet(im_data)\n empty_cache(device)\n boxes_scale, image_inds_scale = generateBoundingBox(reg, probs[:, 1\n ], scale, threshold[0])\n boxes.append(boxes_scale)\n image_inds.append(image_inds_scale)\n all_inds.append(all_i + image_inds_scale)\n all_i += batch_size\n boxes = torch.cat(boxes, dim=0)\n image_inds = torch.cat(image_inds, dim=0).cpu()\n all_inds = torch.cat(all_inds, dim=0)\n pick = batched_nms(boxes[:, :4], boxes[:, 4], all_inds, 0.5)\n boxes, image_inds = boxes[pick], image_inds[pick]\n pick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)\n boxes, image_inds = boxes[pick], image_inds[pick]\n regw = boxes[:, 2] - boxes[:, 0]\n regh = boxes[:, 3] - boxes[:, 1]\n qq1 = boxes[:, 0] + boxes[:, 5] * regw\n qq2 = boxes[:, 1] + boxes[:, 6] * regh\n qq3 = boxes[:, 2] + boxes[:, 7] * regw\n qq4 = boxes[:, 3] + boxes[:, 8] * regh\n boxes = torch.stack([qq1, qq2, qq3, qq4, boxes[:, 4]]).permute(1, 0)\n boxes = rerec(boxes)\n y, ey, x, ex = pad(boxes, w, h)\n if len(boxes) > 0:\n im_data = []\n for k in range(len(y)):\n if ey[k] > y[k] - 1 and ex[k] > x[k] - 1:\n img_k = imgs[image_inds[k], :, y[k] - 1:ey[k], x[k] - 1:ex[k]\n ].unsqueeze(0)\n im_data.append(imresample(img_k, (24, 24)))\n im_data = torch.cat(im_data, dim=0)\n im_data = (im_data - 127.5) * 0.0078125\n out = []\n for batch in im_data.split(2000):\n out += [rnet(batch)]\n z = list(zip(*out))\n out = torch.cat(z[0]), torch.cat(z[1])\n empty_cache(device)\n out0 = out[0].permute(1, 0)\n out1 = out[1].permute(1, 0)\n score = out1[1, :]\n ipass = score > threshold[1]\n boxes = torch.cat((boxes[ipass, :4], score[ipass].unsqueeze(1)), dim=1)\n image_inds = image_inds[ipass]\n mv = out0[:, ipass].permute(1, 0)\n pick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)\n boxes, image_inds, mv = boxes[pick], image_inds[pick], mv[pick]\n boxes = bbreg(boxes, mv)\n boxes = rerec(boxes)\n points = torch.zeros(0, 5, 2, device=device)\n if len(boxes) > 0:\n y, ey, x, ex = pad(boxes, w, h)\n im_data = []\n for k in range(len(y)):\n if ey[k] > y[k] - 1 and ex[k] > x[k] - 1:\n img_k = imgs[image_inds[k], :, y[k] - 1:ey[k], x[k] - 1:ex[k]\n ].unsqueeze(0)\n im_data.append(imresample(img_k, (48, 48)))\n im_data = torch.cat(im_data, dim=0)\n im_data = (im_data - 127.5) * 0.0078125\n out = []\n for batch in im_data.split(500):\n out += [onet(batch)]\n z = list(zip(*out))\n out = torch.cat(z[0]), torch.cat(z[1]), torch.cat(z[2])\n empty_cache(device)\n out0 = out[0].permute(1, 0)\n out1 = out[1].permute(1, 0)\n out2 = out[2].permute(1, 0)\n score = out2[1, :]\n points = out1\n ipass = score > threshold[2]\n points = points[:, ipass]\n boxes = torch.cat((boxes[ipass, :4], score[ipass].unsqueeze(1)), dim=1)\n image_inds = image_inds[ipass]\n mv = out0[:, ipass].permute(1, 0)\n w_i = boxes[:, 2] - boxes[:, 0] + 1\n h_i = boxes[:, 3] - boxes[:, 1] + 1\n points_x = w_i.repeat(5, 1) * points[:5, :] + boxes[:, 0].repeat(5, 1\n ) - 1\n points_y = h_i.repeat(5, 1) * points[5:10, :] + boxes[:, 1].repeat(5, 1\n ) - 1\n points = torch.stack((points_x, points_y)).permute(2, 1, 0)\n boxes = bbreg(boxes, mv)\n pick = batched_nms_numpy(boxes[:, :4], boxes[:, 4], image_inds, 0.7,\n 'Min')\n boxes, image_inds, points = boxes[pick], image_inds[pick], points[pick]\n boxes = boxes.cpu().numpy()\n points = points.cpu().numpy()\n batch_boxes = []\n batch_points = []\n for b_i in range(batch_size):\n b_i_inds = np.where(image_inds == b_i)\n batch_boxes.append(boxes[b_i_inds].copy())\n batch_points.append(points[b_i_inds].copy())\n batch_boxes, batch_points = np.array(batch_boxes), np.array(batch_points)\n empty_cache(device)\n return batch_boxes, batch_points\n\n\ndef bbreg(boundingbox, reg):\n if reg.shape[1] == 1:\n reg = torch.reshape(reg, (reg.shape[2], reg.shape[3]))\n w = boundingbox[:, 2] - boundingbox[:, 0] + 1\n h = boundingbox[:, 3] - boundingbox[:, 1] + 1\n b1 = boundingbox[:, 0] + reg[:, 0] * w\n b2 = boundingbox[:, 1] + reg[:, 1] * h\n b3 = boundingbox[:, 2] + reg[:, 2] * w\n b4 = boundingbox[:, 3] + reg[:, 3] * h\n boundingbox[:, :4] = torch.stack([b1, b2, b3, b4]).permute(1, 0)\n return boundingbox\n\n\ndef generateBoundingBox(reg, probs, scale, thresh):\n stride = 2\n cellsize = 12\n reg = reg.permute(1, 0, 2, 3)\n mask = probs >= thresh\n mask_inds = mask.nonzero(as_tuple=False)\n image_inds = mask_inds[:, 0]\n score = probs[mask]\n reg = reg[:, mask].permute(1, 0)\n bb = mask_inds[:, 1:].type(reg.dtype).flip(1)\n q1 = ((stride * bb + 1) / scale).floor()\n q2 = ((stride * bb + cellsize - 1 + 1) / scale).floor()\n boundingbox = torch.cat([q1, q2, score.unsqueeze(1), reg], dim=1)\n return boundingbox, image_inds\n\n\ndef nms_numpy(boxes, scores, threshold, method):\n if boxes.size == 0:\n return np.empty((0, 3))\n x1 = boxes[:, 0].copy()\n y1 = boxes[:, 1].copy()\n x2 = boxes[:, 2].copy()\n y2 = boxes[:, 3].copy()\n s = scores\n area = (x2 - x1 + 1) * (y2 - y1 + 1)\n I = np.argsort(s)\n pick = np.zeros_like(s, dtype=np.int16)\n counter = 0\n while I.size > 0:\n i = I[-1]\n pick[counter] = i\n counter += 1\n idx = I[0:-1]\n xx1 = np.maximum(x1[i], x1[idx]).copy()\n yy1 = np.maximum(y1[i], y1[idx]).copy()\n xx2 = np.minimum(x2[i], x2[idx]).copy()\n yy2 = np.minimum(y2[i], y2[idx]).copy()\n w = np.maximum(0.0, xx2 - xx1 + 1).copy()\n h = np.maximum(0.0, yy2 - yy1 + 1).copy()\n inter = w * h\n if method == 'Min':\n o = inter / np.minimum(area[i], area[idx])\n else:\n o = inter / (area[i] + area[idx] - inter)\n I = I[np.where(o <= threshold)]\n pick = pick[:counter].copy()\n return pick\n\n\ndef batched_nms_numpy(boxes, scores, idxs, threshold, method):\n device = boxes.device\n if boxes.numel() == 0:\n return torch.empty((0,), dtype=torch.int64, device=device)\n max_coordinate = boxes.max()\n offsets = idxs.to(boxes) * (max_coordinate + 1)\n boxes_for_nms = boxes + offsets[:, None]\n boxes_for_nms = boxes_for_nms.cpu().numpy()\n scores = scores.cpu().numpy()\n keep = nms_numpy(boxes_for_nms, scores, threshold, method)\n return torch.as_tensor(keep, dtype=torch.long, device=device)\n\n\ndef pad(boxes, w, h):\n boxes = boxes.trunc().int().cpu().numpy()\n x = boxes[:, 0]\n y = boxes[:, 1]\n ex = boxes[:, 2]\n ey = boxes[:, 3]\n x[x < 1] = 1\n y[y < 1] = 1\n ex[ex > w] = w\n ey[ey > h] = h\n return y, ey, x, ex\n\n\ndef rerec(bboxA):\n h = bboxA[:, 3] - bboxA[:, 1]\n w = bboxA[:, 2] - bboxA[:, 0]\n l = torch.max(w, h)\n bboxA[:, 0] = bboxA[:, 0] + w * 0.5 - l * 0.5\n bboxA[:, 1] = bboxA[:, 1] + h * 0.5 - l * 0.5\n bboxA[:, 2:4] = bboxA[:, :2] + l.repeat(2, 1).permute(1, 0)\n return bboxA\n\n\ndef imresample(img, sz):\n im_data = interpolate(img, size=sz, mode='area')\n return im_data\n",
"step-5": "import numpy as np\nimport torch\nimport torch.nn as nn\nfrom torch.nn.functional import interpolate\nfrom torchvision.ops.boxes import batched_nms\n\n\nclass MTCNN():\n\tdef __init__(self, device=None, model=None):\n\t\tif device is None:\n\t\t\tdevice = 'cuda' if torch.cuda.is_available() else 'cpu'\n\t\tself.device = device\n\n\t\turl = 'https://github.com/deepware/dFace/raw/master/models/mtcnn.pt'\n\t\tif model is None:\n\t\t\tmodel = torch.hub.load_state_dict_from_url(url)\n\t\telse:\n\t\t\tmodel = torch.load(model, map_location=device)\n\n\t\tself.pnet = PNet().to(device)\n\t\tself.rnet = RNet().to(device)\n\t\tself.onet = ONet().to(device)\n\n\t\tself.pnet.load_state_dict(model['pnet'])\n\t\tself.rnet.load_state_dict(model['rnet'])\n\t\tself.onet.load_state_dict(model['onet'])\n\n\n\tdef detect(self, imgs, minsize=None):\n\t\tif len(imgs) == 0:\n\t\t\treturn []\n\n\t\tif isinstance(imgs[0], np.ndarray):\n\t\t\th, w = imgs[0].shape[:2]\n\t\telse:\n\t\t\tw, h = imgs[0].size\n\n\t\tif minsize is None:\n\t\t\tminsize = max(96 * min(w, h)/1080, 40)\n\n\t\tboxes, points = [], []\n\n\t\twith torch.no_grad():\n\t\t\tbatches = [imgs[i:i+10] for i in range(0, len(imgs), 10)]\n\t\t\tfor batch in batches:\n\t\t\t\tbatch_boxes, batch_points = detect_face(\n\t\t\t\t\tbatch, minsize, self.pnet, self.rnet, self.onet,\n\t\t\t\t\t[0.7, 0.8, 0.9], 0.709, self.device)\n\t\t\t\tboxes += list(batch_boxes)\n\t\t\t\tpoints += list(batch_points)\n\n\t\tresult = []\n\t\tfor box, point in zip(boxes, points):\n\t\t\tbox = np.array(box)\n\t\t\tpoint = np.array(point)\n\t\t\tif len(box) == 0:\n\t\t\t\tresult.append(None)\n\t\t\telse:\n\t\t\t\tresult.append((box[:, :4], box[:, 4], point))\n\t\treturn result\n\n\ndef empty_cache(device):\n\tif 'cuda' in device:\n\t\twith torch.cuda.device(device):\n\t\t\ttorch.cuda.empty_cache()\n\n\nclass PNet(nn.Module):\n\n\tdef __init__(self):\n\t\tsuper().__init__()\n\n\t\tself.conv1 = nn.Conv2d(3, 10, kernel_size=3)\n\t\tself.prelu1 = nn.PReLU(10)\n\t\tself.pool1 = nn.MaxPool2d(2, 2, ceil_mode=True)\n\t\tself.conv2 = nn.Conv2d(10, 16, kernel_size=3)\n\t\tself.prelu2 = nn.PReLU(16)\n\t\tself.conv3 = nn.Conv2d(16, 32, kernel_size=3)\n\t\tself.prelu3 = nn.PReLU(32)\n\t\tself.conv4_1 = nn.Conv2d(32, 2, kernel_size=1)\n\t\tself.softmax4_1 = nn.Softmax(dim=1)\n\t\tself.conv4_2 = nn.Conv2d(32, 4, kernel_size=1)\n\n\tdef forward(self, x):\n\t\tx = self.conv1(x)\n\t\tx = self.prelu1(x)\n\t\tx = self.pool1(x)\n\t\tx = self.conv2(x)\n\t\tx = self.prelu2(x)\n\t\tx = self.conv3(x)\n\t\tx = self.prelu3(x)\n\t\ta = self.conv4_1(x)\n\t\ta = self.softmax4_1(a)\n\t\tb = self.conv4_2(x)\n\t\treturn b, a\n\n\nclass RNet(nn.Module):\n\n\tdef __init__(self):\n\t\tsuper().__init__()\n\n\t\tself.conv1 = nn.Conv2d(3, 28, kernel_size=3)\n\t\tself.prelu1 = nn.PReLU(28)\n\t\tself.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n\t\tself.conv2 = nn.Conv2d(28, 48, kernel_size=3)\n\t\tself.prelu2 = nn.PReLU(48)\n\t\tself.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n\t\tself.conv3 = nn.Conv2d(48, 64, kernel_size=2)\n\t\tself.prelu3 = nn.PReLU(64)\n\t\tself.dense4 = nn.Linear(576, 128)\n\t\tself.prelu4 = nn.PReLU(128)\n\t\tself.dense5_1 = nn.Linear(128, 2)\n\t\tself.softmax5_1 = nn.Softmax(dim=1)\n\t\tself.dense5_2 = nn.Linear(128, 4)\n\n\tdef forward(self, x):\n\t\tx = self.conv1(x)\n\t\tx = self.prelu1(x)\n\t\tx = self.pool1(x)\n\t\tx = self.conv2(x)\n\t\tx = self.prelu2(x)\n\t\tx = self.pool2(x)\n\t\tx = self.conv3(x)\n\t\tx = self.prelu3(x)\n\t\tx = x.permute(0, 3, 2, 1).contiguous()\n\t\tx = self.dense4(x.view(x.shape[0], -1))\n\t\tx = self.prelu4(x)\n\t\ta = self.dense5_1(x)\n\t\ta = self.softmax5_1(a)\n\t\tb = self.dense5_2(x)\n\t\treturn b, a\n\n\nclass ONet(nn.Module):\n\n\tdef __init__(self):\n\t\tsuper().__init__()\n\n\t\tself.conv1 = nn.Conv2d(3, 32, kernel_size=3)\n\t\tself.prelu1 = nn.PReLU(32)\n\t\tself.pool1 = nn.MaxPool2d(3, 2, ceil_mode=True)\n\t\tself.conv2 = nn.Conv2d(32, 64, kernel_size=3)\n\t\tself.prelu2 = nn.PReLU(64)\n\t\tself.pool2 = nn.MaxPool2d(3, 2, ceil_mode=True)\n\t\tself.conv3 = nn.Conv2d(64, 64, kernel_size=3)\n\t\tself.prelu3 = nn.PReLU(64)\n\t\tself.pool3 = nn.MaxPool2d(2, 2, ceil_mode=True)\n\t\tself.conv4 = nn.Conv2d(64, 128, kernel_size=2)\n\t\tself.prelu4 = nn.PReLU(128)\n\t\tself.dense5 = nn.Linear(1152, 256)\n\t\tself.prelu5 = nn.PReLU(256)\n\t\tself.dense6_1 = nn.Linear(256, 2)\n\t\tself.softmax6_1 = nn.Softmax(dim=1)\n\t\tself.dense6_2 = nn.Linear(256, 4)\n\t\tself.dense6_3 = nn.Linear(256, 10)\n\n\tdef forward(self, x):\n\t\tx = self.conv1(x)\n\t\tx = self.prelu1(x)\n\t\tx = self.pool1(x)\n\t\tx = self.conv2(x)\n\t\tx = self.prelu2(x)\n\t\tx = self.pool2(x)\n\t\tx = self.conv3(x)\n\t\tx = self.prelu3(x)\n\t\tx = self.pool3(x)\n\t\tx = self.conv4(x)\n\t\tx = self.prelu4(x)\n\t\tx = x.permute(0, 3, 2, 1).contiguous()\n\t\tx = self.dense5(x.view(x.shape[0], -1))\n\t\tx = self.prelu5(x)\n\t\ta = self.dense6_1(x)\n\t\ta = self.softmax6_1(a)\n\t\tb = self.dense6_2(x)\n\t\tc = self.dense6_3(x)\n\t\treturn b, c, a\n\n\ndef detect_face(imgs, minsize, pnet, rnet, onet, threshold, factor, device):\n\tif isinstance(imgs, (np.ndarray, torch.Tensor)):\n\t\timgs = torch.as_tensor(imgs, device=device)\n\t\tif len(imgs.shape) == 3:\n\t\t\timgs = imgs.unsqueeze(0)\n\telse:\n\t\tif not isinstance(imgs, (list, tuple)):\n\t\t\timgs = [imgs]\n\t\tif any(img.size != imgs[0].size for img in imgs):\n\t\t\traise Exception(\"MTCNN batch processing only compatible with equal-dimension images.\")\n\t\timgs = np.stack([np.uint8(img) for img in imgs])\n\n\timgs = torch.as_tensor(imgs, device=device)\n\n\tmodel_dtype = next(pnet.parameters()).dtype\n\timgs = imgs.permute(0, 3, 1, 2).type(model_dtype)\n\n\tbatch_size = len(imgs)\n\th, w = imgs.shape[2:4]\n\tm = 12.0 / minsize\n\tminl = min(h, w)\n\tminl = minl * m\n\n\t# Create scale pyramid\n\tscale_i = m\n\tscales = []\n\twhile minl >= 12:\n\t\tscales.append(scale_i)\n\t\tscale_i = scale_i * factor\n\t\tminl = minl * factor\n\n\t# First stage\n\tboxes = []\n\timage_inds = []\n\tall_inds = []\n\tall_i = 0\n\tfor scale in scales:\n\t\tim_data = imresample(imgs, (int(h * scale + 1), int(w * scale + 1)))\n\t\tim_data = (im_data - 127.5) * 0.0078125\n\t\treg, probs = pnet(im_data)\n\t\tempty_cache(device)\n\t\tboxes_scale, image_inds_scale = generateBoundingBox(reg, probs[:, 1], scale, threshold[0])\n\t\tboxes.append(boxes_scale)\n\t\timage_inds.append(image_inds_scale)\n\t\tall_inds.append(all_i + image_inds_scale)\n\t\tall_i += batch_size\n\n\tboxes = torch.cat(boxes, dim=0)\n\timage_inds = torch.cat(image_inds, dim=0).cpu()\n\tall_inds = torch.cat(all_inds, dim=0)\n\n\t# NMS within each scale + image\n\tpick = batched_nms(boxes[:, :4], boxes[:, 4], all_inds, 0.5)\n\tboxes, image_inds = boxes[pick], image_inds[pick]\n\n\t# NMS within each image\n\tpick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)\n\tboxes, image_inds = boxes[pick], image_inds[pick]\n\n\tregw = boxes[:, 2] - boxes[:, 0]\n\tregh = boxes[:, 3] - boxes[:, 1]\n\tqq1 = boxes[:, 0] + boxes[:, 5] * regw\n\tqq2 = boxes[:, 1] + boxes[:, 6] * regh\n\tqq3 = boxes[:, 2] + boxes[:, 7] * regw\n\tqq4 = boxes[:, 3] + boxes[:, 8] * regh\n\tboxes = torch.stack([qq1, qq2, qq3, qq4, boxes[:, 4]]).permute(1, 0)\n\tboxes = rerec(boxes)\n\ty, ey, x, ex = pad(boxes, w, h)\n\n\t# Second stage\n\tif len(boxes) > 0:\n\t\tim_data = []\n\t\tfor k in range(len(y)):\n\t\t\tif ey[k] > (y[k] - 1) and ex[k] > (x[k] - 1):\n\t\t\t\timg_k = imgs[image_inds[k], :, (y[k] - 1):ey[k], (x[k] - 1):ex[k]].unsqueeze(0)\n\t\t\t\tim_data.append(imresample(img_k, (24, 24)))\n\t\tim_data = torch.cat(im_data, dim=0)\n\t\tim_data = (im_data - 127.5) * 0.0078125\n\n\t\tout = []\n\t\tfor batch in im_data.split(2000):\n\t\t\tout += [rnet(batch)]\n\t\tz = list(zip(*out))\n\t\tout = (torch.cat(z[0]), torch.cat(z[1]))\n\t\tempty_cache(device)\n\n\t\tout0 = out[0].permute(1, 0)\n\t\tout1 = out[1].permute(1, 0)\n\t\tscore = out1[1, :]\n\t\tipass = score > threshold[1]\n\t\tboxes = torch.cat((boxes[ipass, :4], score[ipass].unsqueeze(1)), dim=1)\n\t\timage_inds = image_inds[ipass]\n\t\tmv = out0[:, ipass].permute(1, 0)\n\n\t\t# NMS within each image\n\t\tpick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)\n\t\tboxes, image_inds, mv = boxes[pick], image_inds[pick], mv[pick]\n\t\tboxes = bbreg(boxes, mv)\n\t\tboxes = rerec(boxes)\n\n\t# Third stage\n\tpoints = torch.zeros(0, 5, 2, device=device)\n\tif len(boxes) > 0:\n\t\ty, ey, x, ex = pad(boxes, w, h)\n\t\tim_data = []\n\t\tfor k in range(len(y)):\n\t\t\tif ey[k] > (y[k] - 1) and ex[k] > (x[k] - 1):\n\t\t\t\timg_k = imgs[image_inds[k], :, (y[k] - 1):ey[k], (x[k] - 1):ex[k]].unsqueeze(0)\n\t\t\t\tim_data.append(imresample(img_k, (48, 48)))\n\t\tim_data = torch.cat(im_data, dim=0)\n\t\tim_data = (im_data - 127.5) * 0.0078125\n\n\t\tout = []\n\t\tfor batch in im_data.split(500):\n\t\t\tout += [onet(batch)]\n\t\tz = list(zip(*out))\n\t\tout = (torch.cat(z[0]), torch.cat(z[1]), torch.cat(z[2]))\n\t\tempty_cache(device)\n\n\t\tout0 = out[0].permute(1, 0)\n\t\tout1 = out[1].permute(1, 0)\n\t\tout2 = out[2].permute(1, 0)\n\t\tscore = out2[1, :]\n\t\tpoints = out1\n\t\tipass = score > threshold[2]\n\t\tpoints = points[:, ipass]\n\t\tboxes = torch.cat((boxes[ipass, :4], score[ipass].unsqueeze(1)), dim=1)\n\t\timage_inds = image_inds[ipass]\n\t\tmv = out0[:, ipass].permute(1, 0)\n\n\t\tw_i = boxes[:, 2] - boxes[:, 0] + 1\n\t\th_i = boxes[:, 3] - boxes[:, 1] + 1\n\t\tpoints_x = w_i.repeat(5, 1) * points[:5, :] + boxes[:, 0].repeat(5, 1) - 1\n\t\tpoints_y = h_i.repeat(5, 1) * points[5:10, :] + boxes[:, 1].repeat(5, 1) - 1\n\t\tpoints = torch.stack((points_x, points_y)).permute(2, 1, 0)\n\t\tboxes = bbreg(boxes, mv)\n\n\t\t# NMS within each image using \"Min\" strategy\n\t\t# pick = batched_nms(boxes[:, :4], boxes[:, 4], image_inds, 0.7)\n\t\tpick = batched_nms_numpy(boxes[:, :4], boxes[:, 4], image_inds, 0.7, 'Min')\n\t\tboxes, image_inds, points = boxes[pick], image_inds[pick], points[pick]\n\n\tboxes = boxes.cpu().numpy()\n\tpoints = points.cpu().numpy()\n\n\tbatch_boxes = []\n\tbatch_points = []\n\tfor b_i in range(batch_size):\n\t\tb_i_inds = np.where(image_inds == b_i)\n\t\tbatch_boxes.append(boxes[b_i_inds].copy())\n\t\tbatch_points.append(points[b_i_inds].copy())\n\n\tbatch_boxes, batch_points = np.array(batch_boxes), np.array(batch_points)\n\tempty_cache(device)\n\n\treturn batch_boxes, batch_points\n\n\ndef bbreg(boundingbox, reg):\n\tif reg.shape[1] == 1:\n\t\treg = torch.reshape(reg, (reg.shape[2], reg.shape[3]))\n\n\tw = boundingbox[:, 2] - boundingbox[:, 0] + 1\n\th = boundingbox[:, 3] - boundingbox[:, 1] + 1\n\tb1 = boundingbox[:, 0] + reg[:, 0] * w\n\tb2 = boundingbox[:, 1] + reg[:, 1] * h\n\tb3 = boundingbox[:, 2] + reg[:, 2] * w\n\tb4 = boundingbox[:, 3] + reg[:, 3] * h\n\tboundingbox[:, :4] = torch.stack([b1, b2, b3, b4]).permute(1, 0)\n\n\treturn boundingbox\n\n\ndef generateBoundingBox(reg, probs, scale, thresh):\n\tstride = 2\n\tcellsize = 12\n\n\treg = reg.permute(1, 0, 2, 3)\n\n\tmask = probs >= thresh\n\tmask_inds = mask.nonzero(as_tuple=False)\n\timage_inds = mask_inds[:, 0]\n\tscore = probs[mask]\n\treg = reg[:, mask].permute(1, 0)\n\tbb = mask_inds[:, 1:].type(reg.dtype).flip(1)\n\tq1 = ((stride * bb + 1) / scale).floor()\n\tq2 = ((stride * bb + cellsize - 1 + 1) / scale).floor()\n\tboundingbox = torch.cat([q1, q2, score.unsqueeze(1), reg], dim=1)\n\treturn boundingbox, image_inds\n\n\ndef nms_numpy(boxes, scores, threshold, method):\n\tif boxes.size == 0:\n\t\treturn np.empty((0, 3))\n\n\tx1 = boxes[:, 0].copy()\n\ty1 = boxes[:, 1].copy()\n\tx2 = boxes[:, 2].copy()\n\ty2 = boxes[:, 3].copy()\n\ts = scores\n\tarea = (x2 - x1 + 1) * (y2 - y1 + 1)\n\n\tI = np.argsort(s)\n\tpick = np.zeros_like(s, dtype=np.int16)\n\tcounter = 0\n\twhile I.size > 0:\n\t\ti = I[-1]\n\t\tpick[counter] = i\n\t\tcounter += 1\n\t\tidx = I[0:-1]\n\n\t\txx1 = np.maximum(x1[i], x1[idx]).copy()\n\t\tyy1 = np.maximum(y1[i], y1[idx]).copy()\n\t\txx2 = np.minimum(x2[i], x2[idx]).copy()\n\t\tyy2 = np.minimum(y2[i], y2[idx]).copy()\n\n\t\tw = np.maximum(0.0, xx2 - xx1 + 1).copy()\n\t\th = np.maximum(0.0, yy2 - yy1 + 1).copy()\n\n\t\tinter = w * h\n\t\tif method == \"Min\":\n\t\t\to = inter / np.minimum(area[i], area[idx])\n\t\telse:\n\t\t\to = inter / (area[i] + area[idx] - inter)\n\t\tI = I[np.where(o <= threshold)]\n\n\tpick = pick[:counter].copy()\n\treturn pick\n\n\ndef batched_nms_numpy(boxes, scores, idxs, threshold, method):\n\tdevice = boxes.device\n\tif boxes.numel() == 0:\n\t\treturn torch.empty((0,), dtype=torch.int64, device=device)\n\t# strategy: in order to perform NMS independently per class.\n\t# we add an offset to all the boxes. The offset is dependent\n\t# only on the class idx, and is large enough so that boxes\n\t# from different classes do not overlap\n\tmax_coordinate = boxes.max()\n\toffsets = idxs.to(boxes) * (max_coordinate + 1)\n\tboxes_for_nms = boxes + offsets[:, None]\n\tboxes_for_nms = boxes_for_nms.cpu().numpy()\n\tscores = scores.cpu().numpy()\n\tkeep = nms_numpy(boxes_for_nms, scores, threshold, method)\n\treturn torch.as_tensor(keep, dtype=torch.long, device=device)\n\n\ndef pad(boxes, w, h):\n\tboxes = boxes.trunc().int().cpu().numpy()\n\tx = boxes[:, 0]\n\ty = boxes[:, 1]\n\tex = boxes[:, 2]\n\tey = boxes[:, 3]\n\n\tx[x < 1] = 1\n\ty[y < 1] = 1\n\tex[ex > w] = w\n\tey[ey > h] = h\n\n\treturn y, ey, x, ex\n\n\ndef rerec(bboxA):\n\th = bboxA[:, 3] - bboxA[:, 1]\n\tw = bboxA[:, 2] - bboxA[:, 0]\n\n\tl = torch.max(w, h)\n\tbboxA[:, 0] = bboxA[:, 0] + w * 0.5 - l * 0.5\n\tbboxA[:, 1] = bboxA[:, 1] + h * 0.5 - l * 0.5\n\tbboxA[:, 2:4] = bboxA[:, :2] + l.repeat(2, 1).permute(1, 0)\n\n\treturn bboxA\n\n\ndef imresample(img, sz):\n\tim_data = interpolate(img, size=sz, mode=\"area\")\n\treturn im_data",
"step-ids": [
17,
18,
19,
21,
23
]
}
|
[
17,
18,
19,
21,
23
] |
from django.urls import path
from .views import *
from .utils import *
app_name = 'gymapp'
urlpatterns = [
# CLIENT PATHS ##
# CLIENT PATHS ##
# CLIENT PATHS ##
# CLIENT PATHS ##
# general pages
path('', ClientHomeView.as_view(), name='clienthome'),
path('about/', ClientAboutView.as_view(), name='clientabout'),
path('contact/', ClientContactCreateView.as_view(), name='clientcontact'),
# path('makeanappointment/', ClientAppointmentCreateView.as_view(),
# name='clientappointmentcreate'),
path('products/', ClientProductListView.as_view(), name='clientproductlist'),
path('product/<int:pk>/detail/',ClientProductDetailView.as_view(),
name='clientproductdetail'),
path('trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),
path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),
name='clienttrainerdetail'),
path('services/', ClientServiceListView.as_view(),
name='clientservicelist'),
path('services/<slug:slug>/detail/',
ClientServiceDetailView.as_view(), name='clientservicedetail'),
path('schedule/<slug:slug>/detail/',
ClientScheduleDetailView.as_view(), name='clientscheduledetail'),
path('testimonial/',
TestimonialListView.as_view(), name='testimoniallist'),
# path('slider/',
# SliderListView.as_view(), name='sliderlist'),
path('facilities/', ClientFacilityListView.as_view(),
name='clientfacilitylist'),
path('facilities/<slug:slug>/details',
ClientFacilityDetailView.as_view(), name='clientfacilitydetail'),
path('events/', ClientEventListView.as_view(),
name='clienteventlist'),
path('events/<slug:slug>/details',
ClientEventDetailView.as_view(), name='clienteventdetail'),
path('notices/', ClientNoticeListView.as_view(), name='clientnoticelist'),
path('notices/<slug:slug>/details',
ClientNoticeDetailView.as_view(), name='clientnoticedetail'),
path('pages/<slug:slug>/details',
ClientPageDetailView.as_view(), name='clientpagedetail'),
path('images/', ClientImageListView.as_view(), name='clientimagelist'),
path('videos/', ClientVideoListView.as_view(), name='clientvideolist'),
path('blogs/', ClientBlogListView.as_view(), name='clientbloglist'),
path('blogs/<slug:slug>/details',
ClientBlogDetailView.as_view(), name='clientblogdetail'),
path('schedules/', ClientScheduleListView.as_view(), name='clientschedulelist'),
path('404/', ClientPageNotFoundView.as_view(), name='clientpagenotfound'),
path('subscribe/', ClientSubscriberCreateView.as_view(),
name='clientsubscribercreate'),
path('search/result/', SearchResultView.as_view(), name="searchresult"),
path('login/', ClientLoginView.as_view(), name='clientlogin'),
path('logout/', ClientLogoutView.as_view(), name='clientlogout'),
path('register/', ClientRegistrationView.as_view(), name='clientcreate'),
path('cart_update',cart_update,name = 'cart_update'),
path('carts/<int:pk>/items/total/',ClientCartTotalView.as_view(), name='clientcarttotal'),
]
|
normal
|
{
"blob_id": "48a4331e4b26ea81f1c52ae76db1e92a57cb378c",
"index": 2654,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'gymapp'\nurlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(\n 'about/', ClientAboutView.as_view(), name='clientabout'), path(\n 'contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n path('products/', ClientProductListView.as_view(), name=\n 'clientproductlist'), path('product/<int:pk>/detail/',\n ClientProductDetailView.as_view(), name='clientproductdetail'), path(\n 'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'), path('services/', ClientServiceListView.\n as_view(), name='clientservicelist'), path(\n 'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name\n ='clientservicedetail'), path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(\n 'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),\n path('facilities/', ClientFacilityListView.as_view(), name=\n 'clientfacilitylist'), path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(\n 'events/', ClientEventListView.as_view(), name='clienteventlist'), path\n ('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=\n 'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),\n name='clientnoticelist'), path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(\n 'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=\n 'clientpagedetail'), path('images/', ClientImageListView.as_view(),\n name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),\n name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),\n name='clientbloglist'), path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'), path(\n 'schedules/', ClientScheduleListView.as_view(), name=\n 'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),\n name='clientpagenotfound'), path('subscribe/',\n ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name='searchresult'),\n path('login/', ClientLoginView.as_view(), name='clientlogin'), path(\n 'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(\n 'register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update', cart_update, name='cart_update'), path(\n 'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=\n 'clientcarttotal')]\n",
"step-3": "from django.urls import path\nfrom .views import *\nfrom .utils import *\napp_name = 'gymapp'\nurlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(\n 'about/', ClientAboutView.as_view(), name='clientabout'), path(\n 'contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n path('products/', ClientProductListView.as_view(), name=\n 'clientproductlist'), path('product/<int:pk>/detail/',\n ClientProductDetailView.as_view(), name='clientproductdetail'), path(\n 'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'), path('services/', ClientServiceListView.\n as_view(), name='clientservicelist'), path(\n 'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name\n ='clientservicedetail'), path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(\n 'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),\n path('facilities/', ClientFacilityListView.as_view(), name=\n 'clientfacilitylist'), path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(\n 'events/', ClientEventListView.as_view(), name='clienteventlist'), path\n ('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=\n 'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),\n name='clientnoticelist'), path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(\n 'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=\n 'clientpagedetail'), path('images/', ClientImageListView.as_view(),\n name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),\n name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),\n name='clientbloglist'), path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'), path(\n 'schedules/', ClientScheduleListView.as_view(), name=\n 'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),\n name='clientpagenotfound'), path('subscribe/',\n ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name='searchresult'),\n path('login/', ClientLoginView.as_view(), name='clientlogin'), path(\n 'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(\n 'register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update', cart_update, name='cart_update'), path(\n 'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=\n 'clientcarttotal')]\n",
"step-4": "from django.urls import path\nfrom .views import *\nfrom .utils import *\n\n\napp_name = 'gymapp'\n\nurlpatterns = [\n\n\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n\n # general pages\n\n path('', ClientHomeView.as_view(), name='clienthome'),\n path('about/', ClientAboutView.as_view(), name='clientabout'),\n path('contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n # path('makeanappointment/', ClientAppointmentCreateView.as_view(),\n # name='clientappointmentcreate'),\n path('products/', ClientProductListView.as_view(), name='clientproductlist'),\n path('product/<int:pk>/detail/',ClientProductDetailView.as_view(), \n name='clientproductdetail'),\n path('trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'),\n path('services/', ClientServiceListView.as_view(),\n name='clientservicelist'),\n path('services/<slug:slug>/detail/',\n ClientServiceDetailView.as_view(), name='clientservicedetail'),\n path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'),\n path('testimonial/',\n TestimonialListView.as_view(), name='testimoniallist'),\n # path('slider/',\n # SliderListView.as_view(), name='sliderlist'),\n path('facilities/', ClientFacilityListView.as_view(),\n name='clientfacilitylist'),\n path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'),\n path('events/', ClientEventListView.as_view(),\n name='clienteventlist'),\n path('events/<slug:slug>/details',\n ClientEventDetailView.as_view(), name='clienteventdetail'),\n path('notices/', ClientNoticeListView.as_view(), name='clientnoticelist'),\n path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'),\n path('pages/<slug:slug>/details',\n ClientPageDetailView.as_view(), name='clientpagedetail'),\n path('images/', ClientImageListView.as_view(), name='clientimagelist'),\n path('videos/', ClientVideoListView.as_view(), name='clientvideolist'),\n path('blogs/', ClientBlogListView.as_view(), name='clientbloglist'),\n path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'),\n path('schedules/', ClientScheduleListView.as_view(), name='clientschedulelist'),\n path('404/', ClientPageNotFoundView.as_view(), name='clientpagenotfound'),\n path('subscribe/', ClientSubscriberCreateView.as_view(),\n name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name=\"searchresult\"),\n path('login/', ClientLoginView.as_view(), name='clientlogin'),\n path('logout/', ClientLogoutView.as_view(), name='clientlogout'),\n path('register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update',cart_update,name = 'cart_update'),\n path('carts/<int:pk>/items/total/',ClientCartTotalView.as_view(), name='clientcarttotal'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import xadmin
from .models import EmailVerifyRecord,Banner
from xadmin import views
class EmailVerifyRecordAdmin(object):
pass
class BannerAdmin(object):
list_display=('title','url','index')
class BaseSetting(object):
enable_themes=True
user_bootswatch=True
#设置xadmin页面标题和页脚
class GlobalSetting(object):
site_title='西游记'
site_footer='咨询在线'
xadmin.site.register(EmailVerifyRecord,EmailVerifyRecordAdmin)
xadmin.site.register(Banner,BannerAdmin)
xadmin.site.register(views.BaseAdminView,BaseSetting)
xadmin.site.register(views.CommAdminView,GlobalSetting)
|
normal
|
{
"blob_id": "263a853f33eb9724101ca87f12b914282dea9981",
"index": 1441,
"step-1": "<mask token>\n\n\nclass BannerAdmin(object):\n list_display = 'title', 'url', 'index'\n\n\nclass BaseSetting(object):\n enable_themes = True\n user_bootswatch = True\n\n\nclass GlobalSetting(object):\n site_title = '西游记'\n site_footer = '咨询在线'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass EmailVerifyRecordAdmin(object):\n pass\n\n\nclass BannerAdmin(object):\n list_display = 'title', 'url', 'index'\n\n\nclass BaseSetting(object):\n enable_themes = True\n user_bootswatch = True\n\n\nclass GlobalSetting(object):\n site_title = '西游记'\n site_footer = '咨询在线'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass EmailVerifyRecordAdmin(object):\n pass\n\n\nclass BannerAdmin(object):\n list_display = 'title', 'url', 'index'\n\n\nclass BaseSetting(object):\n enable_themes = True\n user_bootswatch = True\n\n\nclass GlobalSetting(object):\n site_title = '西游记'\n site_footer = '咨询在线'\n\n\nxadmin.site.register(EmailVerifyRecord, EmailVerifyRecordAdmin)\nxadmin.site.register(Banner, BannerAdmin)\nxadmin.site.register(views.BaseAdminView, BaseSetting)\nxadmin.site.register(views.CommAdminView, GlobalSetting)\n",
"step-4": "import xadmin\nfrom .models import EmailVerifyRecord, Banner\nfrom xadmin import views\n\n\nclass EmailVerifyRecordAdmin(object):\n pass\n\n\nclass BannerAdmin(object):\n list_display = 'title', 'url', 'index'\n\n\nclass BaseSetting(object):\n enable_themes = True\n user_bootswatch = True\n\n\nclass GlobalSetting(object):\n site_title = '西游记'\n site_footer = '咨询在线'\n\n\nxadmin.site.register(EmailVerifyRecord, EmailVerifyRecordAdmin)\nxadmin.site.register(Banner, BannerAdmin)\nxadmin.site.register(views.BaseAdminView, BaseSetting)\nxadmin.site.register(views.CommAdminView, GlobalSetting)\n",
"step-5": "import xadmin\nfrom .models import EmailVerifyRecord,Banner\nfrom xadmin import views\n\nclass EmailVerifyRecordAdmin(object):\n pass\n\n\nclass BannerAdmin(object):\n list_display=('title','url','index')\n\nclass BaseSetting(object):\n enable_themes=True\n user_bootswatch=True\n#设置xadmin页面标题和页脚\nclass GlobalSetting(object):\n site_title='西游记'\n site_footer='咨询在线'\nxadmin.site.register(EmailVerifyRecord,EmailVerifyRecordAdmin)\nxadmin.site.register(Banner,BannerAdmin)\nxadmin.site.register(views.BaseAdminView,BaseSetting)\nxadmin.site.register(views.CommAdminView,GlobalSetting)\n\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
# aylat
# This program will calculate an individual's body mass index (BMI),
# based on their height and their weight
# Prompt user to input information
Name = input('Enter your full name: ')
Weight = float(input('Enter your weight in pounds: '))
Height = float(input('Enter your height in inches: '))
# Perform BMI calculation, based on user input
BMI = Weight * 703 / Height**2
# Use an if/elif structure to determine the user's weight category, based on BMI
print('\n')
if BMI < 18.5:
print(Name, ", your BMI calculation is ", format(BMI, '.1f'),
", which indicates your weight category is underweight.", sep='')
elif BMI < 24.9:
print(Name, ", your BMI calculation is ", format(BMI, '.1f'),
", which indicates your weight category is ideal.", sep='')
elif BMI < 29.9:
print(Name, ", your BMI calculation is ", format(BMI, '.1f'),
", which indicates your weight category is overweight.", sep='')
else:
print(Name, ", your BMI calculation is ", format(BMI, '.1f'),
", which indicates your weight category is obese.", sep='')
|
normal
|
{
"blob_id": "8b009451e9f65ef12e5db1321a9d5347ef7fd756",
"index": 9593,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('\\n')\nif BMI < 18.5:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is underweight.', sep='')\nelif BMI < 24.9:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is ideal.', sep='')\nelif BMI < 29.9:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is overweight.', sep='')\nelse:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is obese.', sep='')\n",
"step-3": "Name = input('Enter your full name: ')\nWeight = float(input('Enter your weight in pounds: '))\nHeight = float(input('Enter your height in inches: '))\nBMI = Weight * 703 / Height ** 2\nprint('\\n')\nif BMI < 18.5:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is underweight.', sep='')\nelif BMI < 24.9:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is ideal.', sep='')\nelif BMI < 29.9:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is overweight.', sep='')\nelse:\n print(Name, ', your BMI calculation is ', format(BMI, '.1f'),\n ', which indicates your weight category is obese.', sep='')\n",
"step-4": "# aylat\n\n# This program will calculate an individual's body mass index (BMI), \n# based on their height and their weight\n\n# Prompt user to input information\nName = input('Enter your full name: ')\nWeight = float(input('Enter your weight in pounds: '))\nHeight = float(input('Enter your height in inches: '))\n\n# Perform BMI calculation, based on user input\nBMI = Weight * 703 / Height**2\n\n# Use an if/elif structure to determine the user's weight category, based on BMI\nprint('\\n')\n\nif BMI < 18.5: \n print(Name, \", your BMI calculation is \", format(BMI, '.1f'),\n \", which indicates your weight category is underweight.\", sep='')\n \nelif BMI < 24.9:\n print(Name, \", your BMI calculation is \", format(BMI, '.1f'),\n \", which indicates your weight category is ideal.\", sep='')\n \nelif BMI < 29.9:\n print(Name, \", your BMI calculation is \", format(BMI, '.1f'),\n \", which indicates your weight category is overweight.\", sep='')\n \nelse:\n print(Name, \", your BMI calculation is \", format(BMI, '.1f'),\n \", which indicates your weight category is obese.\", sep='')\n\n\n \n \n \n \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.