code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# encoding: utf-8
from __future__ import print_function
import os
import json
from collections import OrderedDict
import numpy as np
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.ticker import Formatter
from jaqs.trade.analyze.report import Report
from jaqs.data import RemoteDataService
from jaqs.data.basic.instrument import InstManager
from jaqs.trade import common
import jaqs.util as jutil
STATIC_FOLDER = jutil.join_relative_path("trade/analyze/static")
TO_PCT = 100.0
MPL_RCPARAMS = {'figure.facecolor': '#F6F6F6',
'axes.facecolor': '#F6F6F6',
'axes.edgecolor': '#D3D3D3',
'text.color': '#555555',
'grid.color': '#B1B1B1',
'grid.alpha': 0.3,
# scale
'axes.linewidth': 2.0,
'axes.titlepad': 12,
'grid.linewidth': 1.0,
'grid.linestyle': '-',
# font size
'font.size': 13,
'axes.titlesize': 18,
'axes.labelsize': 14,
'legend.fontsize': 'small',
'lines.linewidth': 2.5,
}
class TradeRecordEmptyError(Exception):
def __init__(self, *args):
super(TradeRecordEmptyError, self).__init__(*args)
class MyFormatter(Formatter):
def __init__(self, dates, fmt='%Y%m'):
self.dates = dates
self.fmt = fmt
def __call__(self, x, pos=0):
"""Return the label for time x at position pos"""
ind = int(np.round(x))
if ind >= len(self.dates) or ind < 0:
return ''
# return self.dates[ind].strftime(self.fmt)
return pd.to_datetime(self.dates[ind], format="%Y%m%d").strftime(self.fmt)
class BaseAnalyzer(object):
"""
Attributes
----------
_trades : pd.DataFrame
_configs : dict
data_api : BaseDataServer
_universe : set
All securities that have been traded.
"""
def __init__(self):
self.file_folder = ""
self._trades = None
self._configs = None
self.data_api = None
self.dataview = None
self._universe = []
self._closes = None
self._closes_adj = None
self.daily_position = None
self.adjust_mode = None
self.inst_map = dict()
self.performance_metrics = dict()
self.risk_metrics = dict()
self.report_dic = dict()
@property
def trades(self):
"""Read-only attribute"""
return self._trades
@property
def universe(self):
"""Read-only attribute"""
return self._universe
@property
def configs(self):
"""Read-only attribute"""
return self._configs
@property
def closes(self):
"""Read-only attribute, close prices of securities in the universe"""
return self._closes
@property
def closes_adj(self):
"""Read-only attribute, close prices of securities in the universe"""
return self._closes_adj
def initialize(self, data_api=None, dataview=None, file_folder='.'):
"""
Read trades from csv file to DataFrame of given data type.
Parameters
----------
data_api : RemoteDataService
dataview : DataView
file_folder : str
Directory path where trades and configs are stored.
"""
self.data_api = data_api
self.dataview = dataview
type_map = {'task_id': str,
'entrust_no': str,
'entrust_action': str,
'symbol': str,
'fill_price': float,
'fill_size': float,
'fill_date': np.integer,
'fill_time': np.integer,
'fill_no': str,
'commission': float}
abs_path = os.path.abspath(file_folder)
self.file_folder = abs_path
trades = pd.read_csv(os.path.join(self.file_folder, 'trades.csv'), ',', dtype=type_map)
if trades.empty:
raise TradeRecordEmptyError("No trade records found in your 'trades.csv' file. Analysis stopped.")
self._init_universe(trades.loc[:, 'symbol'].values)
self._init_configs(self.file_folder)
self._init_trades(trades)
self._init_symbol_price()
self._init_inst_data()
def _init_inst_data(self):
symbol_str = ','.join(self.universe)
if self.dataview is not None:
data_inst = self.dataview.data_inst
self.inst_map = data_inst.to_dict(orient='index')
elif self.data_api is not None:
inst_mgr = InstManager(data_api=self.data_api, symbol=symbol_str)
self.inst_map = {k: v.__dict__ for k, v in inst_mgr.inst_map.items()}
del inst_mgr
else:
raise ValueError("no dataview or dataapi provided.")
def _init_trades(self, df):
"""Add datetime column. """
df.loc[:, 'fill_dt'] = jutil.combine_date_time(df.loc[:, 'fill_date'], df.loc[:, 'fill_time'])
df = df.set_index(['symbol', 'fill_dt']).sort_index(axis=0)
# self._trades = jutil.group_df_to_dict(df, by='symbol')
self._trades = df
def _init_symbol_price(self):
"""Get close price of securities in the universe from data server."""
if self.dataview is not None:
df_close = self.dataview.get_ts('close', start_date=self.start_date, end_date=self.end_date)
df_close_adj = self.dataview.get_ts('close_adj', start_date=self.start_date, end_date=self.end_date)
else:
df, msg = self.data_api.daily(symbol=','.join(self.universe), fields='trade_date,symbol,close',
start_date=self.start_date, end_date=self.end_date)
if msg != '0,':
print(msg)
df_close = df.pivot(index='trade_date', columns='symbol', values='close')
df_adj, msg = self.data_api.daily(symbol=','.join(self.universe), fields='trade_date,symbol,close',
start_date=self.start_date, end_date=self.end_date)
if msg != '0,':
print(msg)
df_close_adj = df_adj.pivot(index='trade_date', columns='symbol', values='close')
self._closes = df_close
self._closes_adj = df_close_adj
def _init_universe(self, securities):
"""Return a set of securities."""
self._universe = set(securities)
def _init_configs(self, folder):
import codecs
with codecs.open(os.path.join(folder, 'configs.json'), 'r', encoding='utf-8') as f:
configs = json.load(f)
self._configs = configs
self.init_balance = self.configs['init_balance']
self.start_date = self.configs['start_date']
self.end_date = self.configs['end_date']
@staticmethod
def _process_trades(df):
"""Add various statistics to trades DataFrame."""
from jaqs.trade import common
# df = df.set_index('fill_date')
# pre-process
cols_to_drop = ['task_id', 'entrust_no', 'fill_no']
df = df.drop(cols_to_drop, axis=1)
def _apply(gp_df):
# calculation of non-cumulative fields
direction = gp_df['entrust_action'].apply(lambda s: 1 if common.ORDER_ACTION.is_positive(s) else -1)
fill_size, fill_price = gp_df['fill_size'], gp_df['fill_price']
turnover = fill_size * fill_price
gp_df.loc[:, 'BuyVolume'] = (direction + 1) / 2 * fill_size
gp_df.loc[:, 'SellVolume'] = (direction - 1) / -2 * fill_size
# Calculation of cumulative fields
gp_df.loc[:, 'CumVolume'] = fill_size.cumsum()
gp_df.loc[:, 'CumTurnOver'] = turnover.cumsum()
gp_df.loc[:, 'CumNetTurnOver'] = (turnover * -direction).cumsum()
gp_df.loc[:, 'position'] = (fill_size * direction).cumsum()
gp_df.loc[:, 'AvgPosPrice'] = calc_avg_pos_price(gp_df.loc[:, 'position'].values, fill_price.values)
gp_df.loc[:, 'CumProfit'] = (gp_df.loc[:, 'CumNetTurnOver'] + gp_df.loc[:, 'position'] * fill_price)
return gp_df
gp = df.groupby(by='symbol')
res = gp.apply(_apply)
return res
def process_trades(self):
# self._trades = {k: self._process_trades(v) for k, v in self.trades.items()}
self._trades = self._process_trades(self._trades)
def get_pos_change_info(self):
trades = pd.concat(self.trades.values(), axis=0)
gp = trades.groupby(by=['fill_date'], as_index=False)
res = OrderedDict()
account = OrderedDict()
for date, df in gp:
df_mod = df.loc[:, ['symbol', 'entrust_action', 'fill_size', 'fill_price',
'position', 'AvgPosPrice']]
df_mod.columns = ['symbol', 'action', 'size', 'price',
'position', 'cost price']
res[str(date)] = df_mod
mv = sum(df_mod.loc[:, 'price'] * df.loc[:, 'position'])
current_profit = sum(df.loc[:, 'CumProfit'])
cash = self.configs['init_balance'] + current_profit - mv
account[str(date)] = {'market_value': mv, 'cash': cash}
self.position_change = res
self.account = account
def get_daily(self):
close = self.closes
trade = self.trades
# pro-process
trade_cols = ['fill_date', 'BuyVolume', 'SellVolume', 'commission', 'position', 'AvgPosPrice', 'CumNetTurnOver']
trade = trade.loc[:, trade_cols]
gp = trade.groupby(by=['symbol', 'fill_date'])
func_last = lambda ser: ser.iat[-1]
trade = gp.agg({'BuyVolume': np.sum, 'SellVolume': np.sum, 'commission': np.sum,
'position': func_last, 'AvgPosPrice': func_last, 'CumNetTurnOver': func_last})
trade.index.names = ['symbol', 'trade_date']
# get daily position
df_position = trade['position'].unstack('symbol').fillna(method='ffill').fillna(0.0)
daily_position = df_position.reindex(close.index)
daily_position = daily_position.fillna(method='ffill').fillna(0)
self.daily_position = daily_position
# calculate statistics
close = pd.DataFrame(close.T.stack())
close.columns = ['close']
close.index.names = ['symbol', 'trade_date']
merge = pd.concat([close, trade], axis=1, join='outer')
def _apply(gp_df):
cols_nan_to_zero = ['BuyVolume', 'SellVolume', 'commission']
cols_nan_fill = ['close', 'position', 'AvgPosPrice', 'CumNetTurnOver']
# merge: pd.DataFrame
gp_df.loc[:, cols_nan_fill] = gp_df.loc[:, cols_nan_fill].fillna(method='ffill')
gp_df.loc[:, cols_nan_fill] = gp_df.loc[:, cols_nan_fill].fillna(0)
gp_df.loc[:, cols_nan_to_zero] = gp_df.loc[:, cols_nan_to_zero].fillna(0)
mask = gp_df.loc[:, 'AvgPosPrice'] < 1e-5
gp_df.loc[mask, 'AvgPosPrice'] = gp_df.loc[mask, 'close']
gp_df.loc[:, 'CumProfit'] = gp_df.loc[:, 'CumNetTurnOver'] + gp_df.loc[:, 'position'] * gp_df.loc[:, 'close']
gp_df.loc[:, 'CumProfitComm'] = gp_df['CumProfit'] - gp_df['commission'].cumsum()
daily_net_turnover = gp_df['CumNetTurnOver'].diff(1).fillna(gp_df['CumNetTurnOver'].iat[0])
daily_position_change = gp_df['position'].diff(1).fillna(gp_df['position'].iat[0])
gp_df['trading_pnl'] = (daily_net_turnover + gp_df['close'] * daily_position_change)
gp_df['holding_pnl'] = (gp_df['close'].diff(1) * gp_df['position'].shift(1)).fillna(0.0)
gp_df.loc[:, 'total_pnl'] = gp_df['trading_pnl'] + gp_df['holding_pnl']
return gp_df
gp = merge.groupby(by='symbol')
res = gp.apply(_apply)
self.daily = res
'''
def get_daily(self):
"""Add various statistics to daily DataFrame."""
self.daily = self._get_daily(self.closes, self.trades)
daily_dic = dict()
for sec, df_trade in self.trades.items():
df_close = self.closes[sec].rename('close')
res = self._get_daily(df_close, df_trade)
daily_dic[sec] = res
self.daily = daily_dic
'''
def get_returns(self, compound_return=True, consider_commission=True):
cols = ['trading_pnl', 'holding_pnl', 'total_pnl', 'commission', 'CumProfitComm', 'CumProfit']
'''
dic_symbol = {sec: self.inst_map[sec]['multiplier'] * df_daily.loc[:, cols]
for sec, df_daily in self.daily.items()}
df_profit = pd.concat(dic_symbol, axis=1) # this is cumulative profit
df_profit = df_profit.fillna(method='ffill').fillna(0.0)
df_pnl = df_profit.stack(level=1)
df_pnl = df_pnl.sum(axis=1)
df_pnl = df_pnl.unstack(level=1)
'''
daily = self.daily.loc[:, cols]
daily = daily.stack().unstack('symbol')
df_pnl = daily.sum(axis=1)
df_pnl = df_pnl.unstack(level=1)
self.df_pnl = df_pnl
# TODO temperary solution
if consider_commission:
strategy_value = (df_pnl['total_pnl'] - df_pnl['commission']).cumsum() + self.init_balance
else:
strategy_value = df_pnl['total_pnl'].cumsum() + self.init_balance
market_values = pd.concat([strategy_value, self.data_benchmark], axis=1).fillna(method='ffill')
market_values.columns = ['strat', 'bench']
df_returns = market_values.pct_change(periods=1).fillna(0.0)
df_returns = df_returns.join((df_returns.loc[:, ['strat', 'bench']] + 1.0).cumprod(), rsuffix='_cum')
if compound_return:
df_returns.loc[:, 'active_cum'] = df_returns['strat_cum'] - df_returns['bench_cum'] + 1
df_returns.loc[:, 'active'] = df_returns['active_cum'].pct_change(1).fillna(0.0)
else:
df_returns.loc[:, 'active'] = df_returns['strat'] - df_returns['bench']
df_returns.loc[:, 'active_cum'] = df_returns['active'].add(1.0).cumprod(axis=0)
start = pd.to_datetime(self.configs['start_date'], format="%Y%m%d")
end = pd.to_datetime(self.configs['end_date'], format="%Y%m%d")
years = (end - start).days / 365.0
self.performance_metrics['Annual Return (%)'] =\
100 * (np.power(df_returns.loc[:, 'active_cum'].values[-1], 1. / years) - 1)
self.performance_metrics['Annual Volatility (%)'] =\
100 * (df_returns.loc[:, 'active'].std() * np.sqrt(common.CALENDAR_CONST.TRADE_DAYS_PER_YEAR))
self.performance_metrics['Sharpe Ratio'] = (self.performance_metrics['Annual Return (%)']
/ self.performance_metrics['Annual Volatility (%)'])
self.risk_metrics['Beta'] = np.corrcoef(df_returns.loc[:, 'bench'], df_returns.loc[:, 'strat'])[0, 1]
# bt_strat_mv = pd.read_csv('bt_strat_mv.csv').set_index('trade_date')
# df_returns = df_returns.join(bt_strat_mv, how='right')
self.returns = df_returns
def plot_pnl(self, save_folder=None):
old_mpl_rcparams = {k: v for k, v in mpl.rcParams.items()}
mpl.rcParams.update(MPL_RCPARAMS)
if save_folder is None:
save_folder = self.file_folder
fig1 = plot_portfolio_bench_pnl(self.returns.loc[:, 'strat_cum'],
self.returns.loc[:, 'bench_cum'],
self.returns.loc[:, 'active_cum'])
fig1.savefig(os.path.join(save_folder,'pnl_img.png'), facecolor=fig1.get_facecolor(), dpi=fig1.get_dpi())
fig2 = plot_daily_trading_holding_pnl(self.df_pnl['trading_pnl'],
self.df_pnl['holding_pnl'],
self.df_pnl['total_pnl'],
self.df_pnl['total_pnl'].cumsum())
fig2.savefig(os.path.join(save_folder,'pnl_img_trading_holding.png'), facecolor=fig2.get_facecolor(), dpi=fig2.get_dpi())
mpl.rcParams.update(old_mpl_rcparams)
def plot_pnl_OLD(self, save_folder=None):
if save_folder is None:
save_folder = self.file_folder
fig, (ax0, ax1, ax2) = plt.subplots(3, 1, figsize=(21, 8), dpi=300, sharex=True)
idx0 = self.returns.index
idx = np.arange(len(idx0))
bar_width = 0.3
ax0.bar(idx-bar_width/2, self.df_pnl['trading_pnl'], width=bar_width, color='indianred', label='Trading PnL',)
ax0.bar(idx+bar_width/2, self.df_pnl['holding_pnl'], width=bar_width, color='royalblue', label='Holding PnL')
ax0.axhline(0.0, color='k', lw=1, ls='--')
# ax0.plot(idx, self.pnl['total_pnl'], lw=1.5, color='violet', label='Total PnL')
ax0.legend(loc='upper left')
ax1.plot(idx, self.returns.loc[:, 'bench_cum'], label='Benchmark')
ax1.plot(idx, self.returns.loc[:, 'strat_cum'], label='Strategy')
ax1.legend(loc='upper left')
ax2.plot(idx, self.returns.loc[:, 'active_cum'], label='Extra Return')
ax2.legend(loc='upper left')
ax2.set_xlabel("Date")
ax2.set_ylabel("Net Value")
ax1.set_ylabel("Net Value")
ax2.xaxis.set_major_formatter(MyFormatter(idx0, '%Y-%m-%d'))
plt.tight_layout()
fig.savefig(os.path.join(save_folder, 'pnl_img.png'))
plt.close()
def gen_report(self, source_dir, template_fn, out_folder='.', selected=None):
"""
Generate HTML (and PDF) report of the trade analysis.
Parameters
----------
source_dir : str
path of directory where HTML template and css files are stored.
template_fn : str
File name of HTML template.
out_folder : str
Output folder of report.
selected : list of str or None
List of symbols whose detailed PnL curve and position will be plotted.
# TODO: this parameter should not belong to function
"""
dic = dict()
dic['html_title'] = "Alpha Strategy Backtest Result"
dic['selected_securities'] = selected
# we do not want to show username / password in report
dic['props'] = {k: v for k, v in self.configs.items() if ('username' not in k and 'password' not in k)}
dic['performance_metrics'] = self.performance_metrics
dic['risk_metrics'] = self.risk_metrics
dic['position_change'] = self.position_change
dic['account'] = self.account
dic['df_daily'] = jutil.group_df_to_dict(self.daily, by='symbol')
dic['daily_position'] = self.daily_position
self.report_dic.update(dic)
self.returns.to_csv(os.path.join(out_folder, 'returns.csv'))
r = Report(self.report_dic, source_dir=source_dir, template_fn=template_fn, out_folder=out_folder)
r.generate_html()
r.output_html('report.html')
def do_analyze(self, result_dir, selected_sec=None):
if selected_sec is None:
selected_sec = []
print("process trades...")
self.process_trades()
print("get daily stats...")
self.get_daily()
print("calc strategy return...")
self.get_returns(consider_commission=False)
if len(selected_sec) > 0:
print("Plot single securities PnL")
for symbol in selected_sec:
df_daily = self.daily.loc[pd.IndexSlice[symbol, :], :]
df_daily.index = df_daily.index.droplevel(0)
if df_daily is not None:
plot_trades(df_daily, symbol=symbol, save_folder=self.file_folder)
print("Plot strategy PnL...")
self.plot_pnl(result_dir)
print("generate report...")
self.gen_report(source_dir=STATIC_FOLDER, template_fn='report_template.html',
out_folder=result_dir,
selected=selected_sec)
class EventAnalyzer(BaseAnalyzer):
def __init__(self):
super(EventAnalyzer, self).__init__()
self.metrics = dict()
self.daily = None
self.data_benchmark = None
self.returns = None # OrderedDict
self.position_change = None # OrderedDict
self.account = None # OrderedDict
def initialize(self, data_server_=None, dataview=None, file_folder='.'):
super(EventAnalyzer, self).initialize(data_api=data_server_, dataview=dataview,
file_folder=file_folder)
if self.dataview is not None and self.dataview.data_benchmark is not None:
self.data_benchmark = self.dataview.data_benchmark.loc[(self.dataview.data_benchmark.index >= self.start_date)
&(self.dataview.data_benchmark.index <= self.end_date)]
else:
benchmark = self.configs.get('benchmark', "")
if benchmark and data_server_:
df, msg = data_server_.daily(benchmark, start_date=self.closes.index[0], end_date=self.closes.index[-1])
self.data_benchmark = df.set_index('trade_date').loc[:, ['close']]
self.data_benchmark.columns = ['bench']
else:
self.data_benchmark = pd.DataFrame(index=self.closes.index, columns=['bench'], data=np.ones(len(self.closes), dtype=float))
class AlphaAnalyzer(BaseAnalyzer):
def __init__(self):
super(AlphaAnalyzer, self).__init__()
self.metrics = dict()
self.daily = None
self.returns = None # OrderedDict
self.position_change = None # OrderedDict
self.account = None # OrderedDict
self.df_brinson = None
self.data_benchmark = None
def initialize(self, data_api=None, dataview=None, file_folder='.'):
super(AlphaAnalyzer, self).initialize(data_api=data_api, dataview=dataview,
file_folder=file_folder)
if self.dataview is not None and self.dataview.data_benchmark is not None:
self.data_benchmark = self.dataview.data_benchmark.loc[(self.dataview.data_benchmark.index >= self.start_date)
&(self.dataview.data_benchmark.index <= self.end_date)]
@staticmethod
def _to_pct_return(arr, cumulative=False):
"""Convert portfolio value to portfolio (linear) return."""
r = np.empty_like(arr)
r[0] = 0.0
if cumulative:
r[1:] = arr[1:] / arr[0] - 1
else:
r[1:] = arr[1:] / arr[:-1] - 1
return r
'''
def get_returns_OLD(self, compound_return=True, consider_commission=True):
profit_col_name = 'CumProfitComm' if consider_commission else 'CumProfit'
vp_list = {sec: df_profit.loc[:, profit_col_name] for sec, df_profit in self.daily.items()}
df_profit = pd.concat(vp_list, axis=1) # this is cumulative profit
# TODO temperary solution
df_profit = df_profit.fillna(method='ffill').fillna(0.0)
strategy_value = df_profit.sum(axis=1) + self.configs['init_balance']
market_values = pd.concat([strategy_value, self.data_benchmark], axis=1).fillna(method='ffill')
market_values.columns = ['strat', 'bench']
df_returns = market_values.pct_change(periods=1).fillna(0.0)
df_returns = df_returns.join((df_returns.loc[:, ['strat', 'bench']] + 1.0).cumprod(), rsuffix='_cum')
if compound_return:
df_returns.loc[:, 'active_cum'] = df_returns['strat_cum'] - df_returns['bench_cum'] + 1
df_returns.loc[:, 'active'] = df_returns['active_cum'].pct_change(1).fillna(0.0)
else:
df_returns.loc[:, 'active'] = df_returns['strat'] - df_returns['bench']
df_returns.loc[:, 'active_cum'] = df_returns['active'].add(1.0).cumprod(axis=0)
start = pd.to_datetime(self.configs['start_date'], format="%Y%m%d")
end = pd.to_datetime(self.configs['end_date'], format="%Y%m%d")
years = (end - start).days / 365.0
self.metrics['yearly_return'] = np.power(df_returns.loc[:, 'active_cum'].values[-1], 1. / years) - 1
self.metrics['yearly_vol'] = df_returns.loc[:, 'active'].std() * np.sqrt(225.)
self.metrics['beta'] = np.corrcoef(df_returns.loc[:, 'bench'], df_returns.loc[:, 'strat'])[0, 1]
self.metrics['sharpe'] = self.metrics['yearly_return'] / self.metrics['yearly_vol']
# bt_strat_mv = pd.read_csv('bt_strat_mv.csv').set_index('trade_date')
# df_returns = df_returns.join(bt_strat_mv, how='right')
self.returns = df_returns
'''
def _get_index_weight(self):
if self.dataview is not None:
res = self.dataview.get_ts('index_weight', start_date=self.start_date, end_date=self.end_date)
else:
res = self.data_api.get_index_weights_daily(self.universe, self.start_date, self.end_date)
return res
def _brinson(self, close, pos, index_weight, group):
"""
Brinson Attribution.
Parameters
----------
close : pd.DataFrame
Index is date, columns are symbols.
pos : pd.DataFrame
Index is date, columns are symbols.
index_weight : pd.DataFrame
Index is date, columns are symbols.
group : pd.DataFrame
Index is date, columns are symbols.
Returns
-------
dict
"""
def group_sum(df, group_daily):
groups = np.unique(group_daily.values.flatten())
mask = np.isnan(groups.astype(float))
groups = groups[np.logical_not(mask)]
res = pd.DataFrame(index=df.index, columns=groups, data=np.nan)
for g in groups:
mask = group_daily == g
tmp = df[mask]
res.loc[:, g] = tmp.sum(axis=1)
return res
ret = close.pct_change(1)
pos_sum = pos.sum(axis=1)
pf_weight = pos.div(pos_sum, axis=0)
pf_weight.loc[pos_sum == 0, :] = 0.0
assert pf_weight.isnull().sum().sum() == 0
pf_weight = pf_weight.reindex(index=ret.index, columns=ret.columns)
pf_weight = pf_weight.fillna(0.0)
weighted_ret_pf = ret.mul(pf_weight)
weighted_ret_index = ret.mul(index_weight)
index_group_weight = group_sum(index_weight, group)
pf_group_weight = group_sum(pf_weight, group)
pf_group_ret = group_sum(weighted_ret_pf, group).div(pf_group_weight)
index_group_ret = group_sum(weighted_ret_index, group).div(index_group_weight)
allo_ret_group = (pf_group_weight - index_group_weight).mul(index_group_ret)
allo_ret = allo_ret_group.sum(axis=1)
selection_ret_group = (pf_group_ret - index_group_ret).mul(index_group_weight)
selection_ret = selection_ret_group.sum(axis=1)
active_ret = (weighted_ret_pf.sum(axis=1) - weighted_ret_index.sum(axis=1))
inter_ret = active_ret - selection_ret - allo_ret
df_brinson = pd.DataFrame(index=allo_ret.index,
data={'allocation': allo_ret,
'selection': selection_ret,
'interaction': inter_ret,
'total_active': active_ret})
return {'df_brinson': df_brinson, 'allocation': allo_ret_group, 'selection': selection_ret_group}
def brinson(self, group):
"""
Parameters
----------
group : str or pd.DataFrame
If group is string, this function will try to fetch the corresponding DataFrame from DataView.
If group is pd.DataFrame, it will be used as-is.
Returns
-------
"""
if isinstance(group, str):
group = self.dataview.get_ts(group, start_date=self.start_date, end_date=self.end_date)
elif isinstance(group, pd.DataFrame):
pass
else:
raise ValueError("Group must be string or DataFrame. But {} is provided.".format(group))
if group is None or group.empty:
raise ValueError("group is None or group is empty")
close = self.closes_adj
pos = self.daily_position
index_weight = self._get_index_weight()
res_dic = self._brinson(close, pos, index_weight, group)
df_brinson = res_dic['df_brinson']
self.df_brinson = df_brinson
self.report_dic['df_brinson'] = df_brinson
plot_brinson(df_brinson, save_folder=self.file_folder)
def do_analyze(self, result_dir, selected_sec=None, brinson_group=None):
if selected_sec is None:
selected_sec = []
print("process trades...")
self.process_trades()
print("get daily stats...")
self.get_daily()
print("calc strategy return...")
self.get_returns(consider_commission=False)
not_none_sec = []
if len(selected_sec) > 0:
print("Plot single securities PnL")
for symbol in selected_sec:
df_daily = self.daily.loc[pd.IndexSlice[symbol, :], :]
df_daily.index = df_daily.index.droplevel(0)
if df_daily is not None:
not_none_sec.append(symbol)
plot_trades(df_daily, symbol=symbol, save_folder=self.file_folder)
print("Plot strategy PnL...")
self.plot_pnl(result_dir)
if brinson_group is not None:
print("Do brinson attribution.")
group = self.dataview.get_ts(brinson_group)
if group is None:
raise ValueError("group data is None.")
self.brinson(group)
print("generate report...")
self.gen_report(source_dir=STATIC_FOLDER, template_fn='report_template.html',
out_folder=result_dir,
selected=not_none_sec)
def plot_daily_trading_holding_pnl(trading, holding, total, total_cum):
"""
Parameters
----------
Series
"""
idx0 = total.index
n = len(idx0)
idx = np.arange(n)
fig, (ax0, ax2, ax3) = plt.subplots(3, 1, figsize=(16, 13.5), sharex=True)
ax1 = ax0.twinx()
bar_width = 0.4
profit_color, lose_color = '#D63434', '#2DB635'
curve_color = '#174F67'
y_label = 'Profit / Loss ($)'
color_arr_raw = np.array([profit_color] * n)
color_arr = color_arr_raw.copy()
color_arr[total < 0] = lose_color
ax0.bar(idx, total, width=bar_width, color=color_arr)
ax0.set(title='Daily PnL', ylabel=y_label, xlim=[-2, n+2],)
ax0.xaxis.set_major_formatter(MyFormatter(idx0, '%y-%m-%d'))
ax1.plot(idx, total_cum, lw=1.5, color=curve_color)
ax1.set(ylabel='Cum. ' + y_label)
ax1.yaxis.label.set_color(curve_color)
color_arr = color_arr_raw.copy()
color_arr[trading < 0] = lose_color
ax2.bar(idx-bar_width/2, trading, width=bar_width, color=color_arr)
ax2.set(title='Daily Trading PnL', ylabel=y_label)
color_arr = color_arr_raw.copy()
color_arr[holding < 0] = lose_color
ax3.bar(idx+bar_width/2, holding, width=bar_width, color=color_arr)
ax3.set(title='Daily Holding PnL', ylabel=y_label, xticks=idx[: : n//10])
return fig
def plot_portfolio_bench_pnl(portfolio_cum_ret, benchmark_cum_ret, excess_cum_ret):
"""
Parameters
----------
Series
"""
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(16, 9), sharex=True)
idx_dt = portfolio_cum_ret.index
idx = np.arange(len(idx_dt))
y_label_ret = "Cumulative Return (%)"
ax1.plot(idx, (benchmark_cum_ret-1) * TO_PCT, label='Benchmark', color='#174F67')
ax1.plot(idx, (portfolio_cum_ret-1) * TO_PCT, label='Strategy', color='#198DD6')
ax1.legend(loc='upper left')
ax1.set(title="Absolute Return of Portfolio and Benchmark",
#xlabel="Date",
ylabel=y_label_ret)
ax1.grid(axis='y')
ax2.plot(idx, (excess_cum_ret-1) * TO_PCT, label='Extra Return', color='#C37051')
ax2.set(title="Excess Return Compared to Benchmark", ylabel=y_label_ret
#xlabel="Date",
)
ax2.grid(axis='y')
ax2.xaxis.set_major_formatter(MyFormatter(idx_dt, '%y-%m-%d')) # 17-09-31
fig.tight_layout()
return fig
def plot_brinson(df, save_folder):
"""
Parameters
----------
df : pd.DataFrame
"""
allo, selec, inter, total = df['allocation'], df['selection'], df['interaction'], df['total_active']
fig, ax1 = plt.subplots(1, 1, figsize=(21, 8))
idx0 = df.index
idx = range(len(idx0))
ax1.plot(idx, selec, lw=1.5, color='indianred', label='Selection Return')
ax1.plot(idx, allo, lw=1.5, color='royalblue', label='Allocation Return')
ax1.plot(idx, inter, lw=1.5, color='purple', label='Interaction Return')
# ax1.plot(idx, total, lw=1.5, ls='--', color='k', label='Total Active Return')
ax1.axhline(0.0, color='k', lw=0.5, ls='--')
ax1.legend(loc='upper left')
ax1.set_xlabel("Date")
ax1.set_ylabel("Return")
ax1.xaxis.set_major_formatter(MyFormatter(idx0, '%Y-%m-%d'))
plt.tight_layout()
fig.savefig(os.path.join(save_folder, 'brinson_attribution.png'))
plt.close()
def calc_avg_pos_price(pos_arr, price_arr):
"""
Calculate average cost price using position and fill price.
When position = 0, cost price = symbol price.
"""
assert len(pos_arr) == len(price_arr)
avg_price = np.zeros_like(pos_arr, dtype=float)
avg_price[0] = price_arr[0]
for i in range(pos_arr.shape[0] - 1):
if pos_arr[i+1] == 0:
avg_price[i+1] = 0.0
else:
pos_diff = pos_arr[i+1] - pos_arr[i]
if pos_arr[i] == 0 or pos_diff * pos_arr[i] > 0:
count = True
else:
count = False
if count:
avg_price[i+1] = (avg_price[i] * pos_arr[i] + pos_diff * price_arr[i+1]) * 1. / pos_arr[i+1]
else:
avg_price[i+1] = avg_price[i]
return avg_price
def plot_trades(df, symbol="", save_folder='.', marker_size_adjust_ratio=0.1):
old_mpl_rcparams = {k: v for k, v in mpl.rcParams.items()}
mpl.rcParams.update(MPL_RCPARAMS)
idx0 = df.index
idx = range(len(idx0))
price = df.loc[:, 'close']
bv, sv = df.loc[:, 'BuyVolume'].values, df.loc[:, 'SellVolume'].values
profit = df.loc[:, 'CumProfit'].values
avgpx = df.loc[:, 'AvgPosPrice']
bv_m = np.max(bv)
sv_m = np.max(sv)
if bv_m > 0:
bv = bv / bv_m * 100
if sv_m > 0:
sv = sv / sv_m * 100
fig = plt.figure(figsize=(14, 10))
ax1 = plt.subplot2grid((4, 1), (0, 0), rowspan=3)
ax3 = plt.subplot2grid((4, 1), (3, 0), rowspan=1, sharex=ax1)
ax2 = ax1.twinx()
ax1.plot(idx, price, label='Price', linestyle='-', lw=1, marker='', color='yellow')
ax1.scatter(idx, price, label='buy', marker='o', s=bv, color='indianred')
ax1.scatter(idx, price, label='sell', marker='o', s=sv, color='forestgreen')
ax1.plot(idx, avgpx, lw=1, marker='', color='green')
ax1.legend(loc='upper left')
ax1.set(title="Price, Trades and PnL for {:s}".format(symbol), ylabel="Price ($)")
ax1.xaxis.set_major_formatter(MyFormatter(idx0, '%Y-%m'))
ax2.plot(idx, profit, label='PnL', color='k', lw=1, ls='--', alpha=.4)
ax2.legend(loc='upper right')
ax2.set(ylabel="Profit / Loss ($)")
# ax1.xaxis.set_major_formatter(MyFormatter(df.index))#, '%H:%M'))
ax3.plot(idx, df.loc[:, 'position'], marker='D', markersize=3, lw=2)
ax3.axhline(0, color='k', lw=1, ls='--', alpha=0.8)
ax3.set(title="Position of {:s}".format(symbol))
fig.tight_layout()
fig.savefig(save_folder + '/' + "{}.png".format(symbol), facecolor=fig.get_facecolor(), dpi=fig.get_dpi())
mpl.rcParams.update(old_mpl_rcparams)
|
[
"numpy.sqrt",
"numpy.logical_not",
"numpy.array",
"jaqs.data.basic.instrument.InstManager",
"pandas.to_datetime",
"numpy.arange",
"jaqs.util.group_df_to_dict",
"jaqs.trade.analyze.report.Report",
"numpy.zeros_like",
"numpy.max",
"matplotlib.pyplot.close",
"matplotlib.pyplot.subplots",
"jaqs.util.join_relative_path",
"pandas.DataFrame",
"numpy.round",
"collections.OrderedDict",
"matplotlib.rcParams.update",
"numpy.corrcoef",
"jaqs.util.combine_date_time",
"numpy.power",
"jaqs.trade.common.ORDER_ACTION.is_positive",
"os.path.join",
"json.load",
"matplotlib.pyplot.figure",
"numpy.empty_like",
"matplotlib.pyplot.tight_layout",
"os.path.abspath",
"matplotlib.rcParams.items",
"pandas.concat",
"matplotlib.pyplot.subplot2grid"
] |
[((462, 510), 'jaqs.util.join_relative_path', 'jutil.join_relative_path', (['"""trade/analyze/static"""'], {}), "('trade/analyze/static')\n", (486, 510), True, 'import jaqs.util as jutil\n'), ((31059, 31071), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (31068, 31071), True, 'import numpy as np\n'), ((31104, 31155), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {'figsize': '(16, 13.5)', 'sharex': '(True)'}), '(3, 1, figsize=(16, 13.5), sharex=True)\n', (31116, 31155), True, 'import matplotlib.pyplot as plt\n'), ((31337, 31365), 'numpy.array', 'np.array', (['([profit_color] * n)'], {}), '([profit_color] * n)\n', (31345, 31365), True, 'import numpy as np\n'), ((32410, 32458), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {'figsize': '(16, 9)', 'sharex': '(True)'}), '(2, 1, figsize=(16, 9), sharex=True)\n', (32422, 32458), True, 'import matplotlib.pyplot as plt\n'), ((33525, 33560), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(21, 8)'}), '(1, 1, figsize=(21, 8))\n', (33537, 33560), True, 'import matplotlib.pyplot as plt\n'), ((34149, 34167), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (34165, 34167), True, 'import matplotlib.pyplot as plt\n'), ((34242, 34253), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (34251, 34253), True, 'import matplotlib.pyplot as plt\n'), ((34493, 34528), 'numpy.zeros_like', 'np.zeros_like', (['pos_arr'], {'dtype': 'float'}), '(pos_arr, dtype=float)\n', (34506, 34528), True, 'import numpy as np\n'), ((35240, 35273), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (['MPL_RCPARAMS'], {}), '(MPL_RCPARAMS)\n', (35259, 35273), True, 'import matplotlib as mpl\n'), ((35524, 35534), 'numpy.max', 'np.max', (['bv'], {}), '(bv)\n', (35530, 35534), True, 'import numpy as np\n'), ((35546, 35556), 'numpy.max', 'np.max', (['sv'], {}), '(sv)\n', (35552, 35556), True, 'import numpy as np\n'), ((35664, 35692), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(14, 10)'}), '(figsize=(14, 10))\n', (35674, 35692), True, 'import matplotlib.pyplot as plt\n'), ((35703, 35746), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(4, 1)', '(0, 0)'], {'rowspan': '(3)'}), '((4, 1), (0, 0), rowspan=3)\n', (35719, 35746), True, 'import matplotlib.pyplot as plt\n'), ((35757, 35812), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(4, 1)', '(3, 0)'], {'rowspan': '(1)', 'sharex': 'ax1'}), '((4, 1), (3, 0), rowspan=1, sharex=ax1)\n', (35773, 35812), True, 'import matplotlib.pyplot as plt\n'), ((36892, 36929), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (['old_mpl_rcparams'], {}), '(old_mpl_rcparams)\n', (36911, 36929), True, 'import matplotlib as mpl\n'), ((4001, 4029), 'os.path.abspath', 'os.path.abspath', (['file_folder'], {}), '(file_folder)\n', (4016, 4029), False, 'import os\n'), ((5152, 5223), 'jaqs.util.combine_date_time', 'jutil.combine_date_time', (["df.loc[:, 'fill_date']", "df.loc[:, 'fill_time']"], {}), "(df.loc[:, 'fill_date'], df.loc[:, 'fill_time'])\n", (5175, 5223), True, 'import jaqs.util as jutil\n'), ((8911, 8924), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (8922, 8924), False, 'from collections import OrderedDict\n'), ((8943, 8956), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (8954, 8956), False, 'from collections import OrderedDict\n'), ((10765, 10812), 'pandas.concat', 'pd.concat', (['[close, trade]'], {'axis': '(1)', 'join': '"""outer"""'}), "([close, trade], axis=1, join='outer')\n", (10774, 10812), True, 'import pandas as pd\n'), ((14599, 14658), 'pandas.to_datetime', 'pd.to_datetime', (["self.configs['start_date']"], {'format': '"""%Y%m%d"""'}), "(self.configs['start_date'], format='%Y%m%d')\n", (14613, 14658), True, 'import pandas as pd\n'), ((14673, 14730), 'pandas.to_datetime', 'pd.to_datetime', (["self.configs['end_date']"], {'format': '"""%Y%m%d"""'}), "(self.configs['end_date'], format='%Y%m%d')\n", (14687, 14730), True, 'import pandas as pd\n'), ((15720, 15753), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (['MPL_RCPARAMS'], {}), '(MPL_RCPARAMS)\n', (15739, 15753), True, 'import matplotlib as mpl\n'), ((16632, 16669), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (['old_mpl_rcparams'], {}), '(old_mpl_rcparams)\n', (16651, 16669), True, 'import matplotlib as mpl\n'), ((16832, 16889), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {'figsize': '(21, 8)', 'dpi': '(300)', 'sharex': '(True)'}), '(3, 1, figsize=(21, 8), dpi=300, sharex=True)\n', (16844, 16889), True, 'import matplotlib.pyplot as plt\n'), ((17912, 17930), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (17928, 17930), True, 'import matplotlib.pyplot as plt\n'), ((18001, 18012), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (18010, 18012), True, 'import matplotlib.pyplot as plt\n'), ((19170, 19217), 'jaqs.util.group_df_to_dict', 'jutil.group_df_to_dict', (['self.daily'], {'by': '"""symbol"""'}), "(self.daily, by='symbol')\n", (19192, 19217), True, 'import jaqs.util as jutil\n'), ((19410, 19508), 'jaqs.trade.analyze.report.Report', 'Report', (['self.report_dic'], {'source_dir': 'source_dir', 'template_fn': 'template_fn', 'out_folder': 'out_folder'}), '(self.report_dic, source_dir=source_dir, template_fn=template_fn,\n out_folder=out_folder)\n', (19416, 19508), False, 'from jaqs.trade.analyze.report import Report\n'), ((23184, 23202), 'numpy.empty_like', 'np.empty_like', (['arr'], {}), '(arr)\n', (23197, 23202), True, 'import numpy as np\n'), ((27894, 28049), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'allo_ret.index', 'data': "{'allocation': allo_ret, 'selection': selection_ret, 'interaction':\n inter_ret, 'total_active': active_ret}"}), "(index=allo_ret.index, data={'allocation': allo_ret,\n 'selection': selection_ret, 'interaction': inter_ret, 'total_active':\n active_ret})\n", (27906, 28049), True, 'import pandas as pd\n'), ((34184, 34236), 'os.path.join', 'os.path.join', (['save_folder', '"""brinson_attribution.png"""'], {}), "(save_folder, 'brinson_attribution.png')\n", (34196, 34236), False, 'import os\n'), ((1565, 1576), 'numpy.round', 'np.round', (['x'], {}), '(x)\n', (1573, 1576), True, 'import numpy as np\n'), ((4095, 4139), 'os.path.join', 'os.path.join', (['self.file_folder', '"""trades.csv"""'], {}), "(self.file_folder, 'trades.csv')\n", (4107, 4139), False, 'import os\n'), ((6863, 6875), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6872, 6875), False, 'import json\n'), ((15341, 15408), 'numpy.corrcoef', 'np.corrcoef', (["df_returns.loc[:, 'bench']", "df_returns.loc[:, 'strat']"], {}), "(df_returns.loc[:, 'bench'], df_returns.loc[:, 'strat'])\n", (15352, 15408), True, 'import numpy as np\n'), ((16082, 16122), 'os.path.join', 'os.path.join', (['save_folder', '"""pnl_img.png"""'], {}), "(save_folder, 'pnl_img.png')\n", (16094, 16122), False, 'import os\n'), ((16506, 16562), 'os.path.join', 'os.path.join', (['save_folder', '"""pnl_img_trading_holding.png"""'], {}), "(save_folder, 'pnl_img_trading_holding.png')\n", (16518, 16562), False, 'import os\n'), ((17951, 17991), 'os.path.join', 'os.path.join', (['save_folder', '"""pnl_img.png"""'], {}), "(save_folder, 'pnl_img.png')\n", (17963, 17991), False, 'import os\n'), ((19352, 19391), 'os.path.join', 'os.path.join', (['out_folder', '"""returns.csv"""'], {}), "(out_folder, 'returns.csv')\n", (19364, 19391), False, 'import os\n'), ((26513, 26570), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'df.index', 'columns': 'groups', 'data': 'np.nan'}), '(index=df.index, columns=groups, data=np.nan)\n', (26525, 26570), True, 'import pandas as pd\n'), ((35214, 35234), 'matplotlib.rcParams.items', 'mpl.rcParams.items', ([], {}), '()\n', (35232, 35234), True, 'import matplotlib as mpl\n'), ((1714, 1762), 'pandas.to_datetime', 'pd.to_datetime', (['self.dates[ind]'], {'format': '"""%Y%m%d"""'}), "(self.dates[ind], format='%Y%m%d')\n", (1728, 1762), True, 'import pandas as pd\n'), ((4803, 4857), 'jaqs.data.basic.instrument.InstManager', 'InstManager', ([], {'data_api': 'self.data_api', 'symbol': 'symbol_str'}), '(data_api=self.data_api, symbol=symbol_str)\n', (4814, 4857), False, 'from jaqs.data.basic.instrument import InstManager\n'), ((6774, 6810), 'os.path.join', 'os.path.join', (['folder', '"""configs.json"""'], {}), "(folder, 'configs.json')\n", (6786, 6810), False, 'import os\n'), ((13847, 13903), 'pandas.concat', 'pd.concat', (['[strategy_value, self.data_benchmark]'], {'axis': '(1)'}), '([strategy_value, self.data_benchmark], axis=1)\n', (13856, 13903), True, 'import pandas as pd\n'), ((14855, 14920), 'numpy.power', 'np.power', (["df_returns.loc[:, 'active_cum'].values[-1]", '(1.0 / years)'], {}), "(df_returns.loc[:, 'active_cum'].values[-1], 1.0 / years)\n", (14863, 14920), True, 'import numpy as np\n'), ((15041, 15091), 'numpy.sqrt', 'np.sqrt', (['common.CALENDAR_CONST.TRADE_DAYS_PER_YEAR'], {}), '(common.CALENDAR_CONST.TRADE_DAYS_PER_YEAR)\n', (15048, 15091), True, 'import numpy as np\n'), ((15690, 15710), 'matplotlib.rcParams.items', 'mpl.rcParams.items', ([], {}), '()\n', (15708, 15710), True, 'import matplotlib as mpl\n'), ((26473, 26493), 'numpy.logical_not', 'np.logical_not', (['mask'], {}), '(mask)\n', (26487, 26493), True, 'import numpy as np\n'), ((7555, 7589), 'jaqs.trade.common.ORDER_ACTION.is_positive', 'common.ORDER_ACTION.is_positive', (['s'], {}), '(s)\n', (7586, 7589), False, 'from jaqs.trade import common\n')]
|
import io
import sys
import unittest
import asyncio
import random
from contextlib import redirect_stdout
from .utils import *
from queuebot import QueueBot, QueueConfig, DiscordUser
config = {
"SECRET_TOKEN": "<PASSWORD>",
"TA_ROLES": ["UGTA"],
"LISTEN_CHANNELS": ["join-queue"],
"CHECK_VOICE_WAITING": "False",
"VOICE_WAITING": "waiting-room",
"ALERT_ON_FIRST_JOIN": "True",
"VOICE_OFFICES": ["Office Hours Room 1", "Office Hours Room 2", "Office Hours Room 3"],
"ALERTS_CHANNEL": "queue-alerts",
}
config = QueueConfig(config, test_mode=True)
# TODO Comment each test case
class QueueTest(unittest.TestCase):
def setUp(self):
random.seed(SEED)
self.config = config.copy()
self.bot = QueueBot(self.config, None, testing=True)
# self.bot.waiting_room = MockVoice(config.VOICE_WAITING)
self.bot.logger = MockLogger()
self.bot.office_rooms = [MockVoice(name) for name in config.VOICE_OFFICES]
def reset_vc_queue(self):
# Reset queue
russ = get_rand_element(ALL_TAS)
message = MockMessage("!q clear", russ)
with io.StringIO() as buf, redirect_stdout(buf):
run(self.bot.queue_command(message))
self.assertEqual(len(self.bot._queue), 0)
# Empty voice channels
for v in self.bot.office_rooms:
v.members = []
def test_no_tas(self):
# No TAs in rooms
student = get_rand_element(ALL_STUDENTS)
self.assertEqual(len(self.bot._queue), 0)
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", student)
run(self.bot.queue_command(message))
self.assertTrue(buf.getvalue().strip().startswith(
f"SEND: ✅ {student.get_mention()} you have been added at position #1"))
self.assertEqual(len(self.bot._queue), 1)
self.reset_vc_queue()
def test_one_ta(self):
ta = get_rand_element(ALL_TAS)
office_room = get_rand_element(self.bot.office_rooms)
office_room.members.append(ta)
student = get_rand_element(ALL_STUDENTS)
self.assertEqual(len(self.bot._queue), 0)
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", student)
run(self.bot.queue_command(message))
self.assertTrue(buf.getvalue().strip().startswith(
f"SEND: {ta.get_mention()} The queue is no longer empty"))
self.assertEqual(len(self.bot._queue), 1)
self.reset_vc_queue()
def get_mentions_from_send(self, buf):
send_str = buf.getvalue().strip().split("\n", 1)[0]
assert send_str.startswith("SEND:")
assert "<@" in send_str
assert "The queue is no longer empty" in send_str
return send_str.lstrip("SEND: ") \
.rstrip(" The queue is no longer empty") \
.split(" ")
def test_many_tas_one_room(self):
tas = get_n_rand(ALL_TAS, 3)
office_room = get_rand_element(self.bot.office_rooms)
office_room.members.extend(tas)
mention_set = set()
student = get_rand_element(ALL_STUDENTS)
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", student)
run(self.bot.queue_command(message))
mentions = self.get_mentions_from_send(buf)
mention_set.update(mentions)
for ta in tas:
self.assertTrue(ta.get_mention() in mention_set)
mention_set.remove(ta.get_mention())
self.assertEqual(len(mention_set), 0)
self.reset_vc_queue()
def test_many_tas_all_rooms(self):
tas = get_n_rand(ALL_TAS, 5)
tas_copy = tas.copy()
while len(tas) > 0:
for office_room in self.bot.office_rooms:
# If we run out of TAs while going through all the rooms
if len(tas) == 0:
break
office_room.add_member(tas.pop())
mention_set = set()
student = get_rand_element(ALL_STUDENTS)
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", student)
run(self.bot.queue_command(message))
mentions = self.get_mentions_from_send(buf)
mention_set.update(mentions)
for ta in tas_copy:
self.assertTrue(ta.get_mention() in mention_set)
mention_set.remove(ta.get_mention())
self.assertEqual(len(mention_set), 0)
self.reset_vc_queue()
def test_ta_with_student(self):
busy_room, open_room = get_n_rand(self.bot.office_rooms, 2)
busy_ta, open_ta = get_n_rand(ALL_TAS, 2)
busy_student, open_student = get_n_rand(ALL_STUDENTS, 2)
busy_room.add_many_members(busy_ta, busy_student)
open_room.add_member(open_ta)
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", busy_student)
run(self.bot.queue_command(message))
mentions = self.get_mentions_from_send(buf)
self.assertEqual(mentions, [open_ta.get_mention()])
def test_ta_with_student2(self):
rooms = get_n_rand(self.bot.office_rooms, 3)
busy_rooms = rooms[:-1]
open_room = rooms[-1]
busy_ta, open_ta = get_n_rand(ALL_TAS, 2)
students = [ None ]
open_student = None
while open_student in students:
students = get_n_rand(ALL_STUDENTS, 5)
open_student = get_rand_element(ALL_STUDENTS)
busy_rooms[0].add_many_members(busy_ta, *students[:-2])
busy_rooms[1].add_many_members(busy_ta, *students[-2:])
open_room.add_member(open_ta)
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", open_student)
run(self.bot.queue_command(message))
mentions = self.get_mentions_from_send(buf)
self.assertEqual(mentions, [open_ta.get_mention()])
def test_two_tas(self):
tas = get_n_rand(ALL_TAS, 2)
rooms = get_n_rand(self.bot.office_rooms, 2)
rooms[0].add_member(tas[0])
rooms[1].add_member(tas[1])
students = get_n_rand(ALL_STUDENTS, 2)
# Check for both alerted
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", students[0])
run(self.bot.queue_command(message))
ta_list = set(self.get_mentions_from_send(buf))
for ta in tas:
ta_list.remove(ta.get_mention())
self.assertEqual(len(ta_list), 0)
# Remove first student from queue
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q next", tas[0])
run(self.bot.queue_command(message))
self.assertEqual(len(self.bot._queue), 0)
# First ta helps first student
rooms[0].add_member(students[0])
# Another student joins
with io.StringIO() as buf, redirect_stdout(buf):
message = MockMessage("!q join", students[1])
run(self.bot.queue_command(message))
ta_list = self.get_mentions_from_send(buf)
self.assertEqual(ta_list, [tas[1].get_mention()])
if __name__ == '__main__':
unittest.main()
|
[
"queuebot.QueueBot",
"queuebot.QueueConfig",
"contextlib.redirect_stdout",
"random.seed",
"unittest.main",
"io.StringIO"
] |
[((543, 578), 'queuebot.QueueConfig', 'QueueConfig', (['config'], {'test_mode': '(True)'}), '(config, test_mode=True)\n', (554, 578), False, 'from queuebot import QueueBot, QueueConfig, DiscordUser\n'), ((7358, 7373), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7371, 7373), False, 'import unittest\n'), ((676, 693), 'random.seed', 'random.seed', (['SEED'], {}), '(SEED)\n', (687, 693), False, 'import random\n'), ((749, 790), 'queuebot.QueueBot', 'QueueBot', (['self.config', 'None'], {'testing': '(True)'}), '(self.config, None, testing=True)\n', (757, 790), False, 'from queuebot import QueueBot, QueueConfig, DiscordUser\n'), ((1134, 1147), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1145, 1147), False, 'import io\n'), ((1156, 1176), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (1171, 1176), False, 'from contextlib import redirect_stdout\n'), ((1545, 1558), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1556, 1558), False, 'import io\n'), ((1567, 1587), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (1582, 1587), False, 'from contextlib import redirect_stdout\n'), ((2207, 2220), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (2218, 2220), False, 'import io\n'), ((2229, 2249), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (2244, 2249), False, 'from contextlib import redirect_stdout\n'), ((3231, 3244), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (3242, 3244), False, 'import io\n'), ((3253, 3273), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (3268, 3273), False, 'from contextlib import redirect_stdout\n'), ((4151, 4164), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (4162, 4164), False, 'import io\n'), ((4173, 4193), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (4188, 4193), False, 'from contextlib import redirect_stdout\n'), ((4943, 4956), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (4954, 4956), False, 'import io\n'), ((4965, 4985), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (4980, 4985), False, 'from contextlib import redirect_stdout\n'), ((5801, 5814), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (5812, 5814), False, 'import io\n'), ((5823, 5843), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (5838, 5843), False, 'from contextlib import redirect_stdout\n'), ((6356, 6369), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (6367, 6369), False, 'import io\n'), ((6378, 6398), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (6393, 6398), False, 'from contextlib import redirect_stdout\n'), ((6734, 6747), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (6745, 6747), False, 'import io\n'), ((6756, 6776), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (6771, 6776), False, 'from contextlib import redirect_stdout\n'), ((7058, 7071), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (7069, 7071), False, 'import io\n'), ((7080, 7100), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (7095, 7100), False, 'from contextlib import redirect_stdout\n')]
|
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from tests.common.tensorio import compare_tensor
from tests.common.test_op import triangle
from akg.utils import kernel_exec as utils
from tests.common.gen_random import random_gaussian
def triangle_execute(shape, const_value, lower, dtype, attrs):
support_type = ['float16', 'float32']
assert dtype in support_type
assert len(shape) <= 2
if attrs is None:
attrs = {'enable_pre_poly_loop_partition': False}
attrs['enable_pre_poly_loop_partition'] = False
attrs['enable_post_poly_loop_partition'] = False
attrs['enable_convert_if'] = True
attrs['enable_double_buffer'] = False
output_shape = shape
if len(shape) == 1:
output_shape = [shape[0], shape[0]]
input, bench_mark = gen_data(shape, output_shape, const_value, lower, dtype)
op_attrs = [const_value, lower]
mod = triangle_compile(shape, dtype, op_attrs, attrs)
source_code = mod.imported_modules[0].get_source()
output = np.full(output_shape, np.nan, dtype)
output = utils.mod_launch(mod, (input, output), expect=bench_mark)
# compare result
compare_result = compare_tensor(output, bench_mark, rtol=5e-3, equal_nan=True)
return input, output, bench_mark, compare_result
def triangle_compile(shape, dtype, op_attrs, attrs):
return utils.op_build_test(triangle.triangle, [shape], [dtype], op_attrs, kernel_name='triangle', attrs=attrs)
def gen_data(shape, output_shape, const_value, lower, dtype):
input = random_gaussian(shape, miu=1, sigma=0.3).astype(dtype)
if len(shape) == 2:
bench_mark = input
else:
bench_mark = np.zeros(output_shape).astype(dtype)
for i in range(output_shape[0]):
bench_mark[i] = input
if lower:
for i in range(output_shape[0]):
bench_mark[i][i + 1:] = const_value
else:
for i in range(output_shape[0]):
bench_mark[i][:i] = const_value
return input, bench_mark
|
[
"tests.common.gen_random.random_gaussian",
"akg.utils.kernel_exec.mod_launch",
"numpy.zeros",
"numpy.full",
"akg.utils.kernel_exec.op_build_test",
"tests.common.tensorio.compare_tensor"
] |
[((1566, 1602), 'numpy.full', 'np.full', (['output_shape', 'np.nan', 'dtype'], {}), '(output_shape, np.nan, dtype)\n', (1573, 1602), True, 'import numpy as np\n'), ((1616, 1673), 'akg.utils.kernel_exec.mod_launch', 'utils.mod_launch', (['mod', '(input, output)'], {'expect': 'bench_mark'}), '(mod, (input, output), expect=bench_mark)\n', (1632, 1673), True, 'from akg.utils import kernel_exec as utils\n'), ((1717, 1779), 'tests.common.tensorio.compare_tensor', 'compare_tensor', (['output', 'bench_mark'], {'rtol': '(0.005)', 'equal_nan': '(True)'}), '(output, bench_mark, rtol=0.005, equal_nan=True)\n', (1731, 1779), False, 'from tests.common.tensorio import compare_tensor\n'), ((1898, 2005), 'akg.utils.kernel_exec.op_build_test', 'utils.op_build_test', (['triangle.triangle', '[shape]', '[dtype]', 'op_attrs'], {'kernel_name': '"""triangle"""', 'attrs': 'attrs'}), "(triangle.triangle, [shape], [dtype], op_attrs,\n kernel_name='triangle', attrs=attrs)\n", (1917, 2005), True, 'from akg.utils import kernel_exec as utils\n'), ((2078, 2118), 'tests.common.gen_random.random_gaussian', 'random_gaussian', (['shape'], {'miu': '(1)', 'sigma': '(0.3)'}), '(shape, miu=1, sigma=0.3)\n', (2093, 2118), False, 'from tests.common.gen_random import random_gaussian\n'), ((2215, 2237), 'numpy.zeros', 'np.zeros', (['output_shape'], {}), '(output_shape)\n', (2223, 2237), True, 'import numpy as np\n')]
|
from transformer import *
from logger import logger
def find_missing():
from db import paients_source, paients_info
import re
for pi in paients_info.find():
if paients_source.find({'_id': re.compile(pi['住院号'], re.IGNORECASE)}).count()>0:
pass
else:
print(pi['住院号'])
def verify_data(collection):
'verify the data format is correct or not.'
for d in collection.find():
info = d.get('d').get('info')
if len(info) <12 and info[0] != '1':
logger.error('invalid patient info:' + d['_id']+str(info))
if len(d.get('d').get('doctor_advice')) == 0:
logger.error('invalid doctor advice:' + d['_id'])
else:
has_long = False
has_short = False
for a in d.get('d').get('doctor_advice'):
if len(a) != 18:
logger.error('invalid doctor advice:' + d['_id'])
logger.error("invalid doctor advice: " + a)
if a[3] == '长':
has_long = True
else:
has_short = True
if not (has_long and has_short):
logger.error('invalid doctor advice: ' + d['_id'] + ', long/short: {}/{}'.format(has_long, has_short) )
def get_info(collection):
'count PE'
for d in collection.find():
if len(d.get('d').get('doctor_advice')) == 0:
print('invalid doctor advice:' + d['_id'])
else:
one_p = split_all_ad(d)
print(one_p)
break
def main():
'main entry'
from datetime import datetime
from db import paients_source
start = datetime.now()
print('hello..')
# verify_data(paients_source)
# get_info(collection)
find_missing()
print(datetime.now() - start)
if __name__ == '__main__':
main()
|
[
"datetime.datetime.now",
"db.paients_info.find",
"re.compile",
"logger.logger.error"
] |
[((149, 168), 'db.paients_info.find', 'paients_info.find', ([], {}), '()\n', (166, 168), False, 'from db import paients_source, paients_info\n'), ((1670, 1684), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1682, 1684), False, 'from datetime import datetime\n'), ((649, 698), 'logger.logger.error', 'logger.error', (["('invalid doctor advice:' + d['_id'])"], {}), "('invalid doctor advice:' + d['_id'])\n", (661, 698), False, 'from logger import logger\n'), ((1799, 1813), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1811, 1813), False, 'from datetime import datetime\n'), ((879, 928), 'logger.logger.error', 'logger.error', (["('invalid doctor advice:' + d['_id'])"], {}), "('invalid doctor advice:' + d['_id'])\n", (891, 928), False, 'from logger import logger\n'), ((949, 992), 'logger.logger.error', 'logger.error', (["('invalid doctor advice: ' + a)"], {}), "('invalid doctor advice: ' + a)\n", (961, 992), False, 'from logger import logger\n'), ((209, 245), 're.compile', 're.compile', (["pi['住院号']", 're.IGNORECASE'], {}), "(pi['住院号'], re.IGNORECASE)\n", (219, 245), False, 'import re\n')]
|
#!/usr/bin/env python
import os
import re
import subprocess
import sys
# version -> classifier
# '' means default classifier
cuda_vers = {
'11.2': ['cuda11', '']
}
def check_classifier(classifier):
'''
Check the mapping from cuda version to jar classifier.
Used by maven build.
'''
cu_ver = detect_cuda_ver()
classifier_list = cuda_vers[cu_ver]
if classifier not in classifier_list:
raise Exception("Jar classifier '{}' mismatches the 'nvcc' version {} !".format(classifier, cu_ver))
def get_classifier():
cu_ver = detect_cuda_ver()
classifier_list = cuda_vers[cu_ver]
return classifier_list[0]
def get_supported_vers():
'''
Get the supported cuda versions.
'''
return cuda_vers.keys()
def get_supported_vers_str():
'''
Get the supported cuda versions and join them as a string.
Used by shell script.
'''
return ' '.join(cuda_vers.keys())
def detect_cuda_ver():
'''
Detect the cuda version from current nvcc tool.
'''
nvcc_ver_bin = subprocess.check_output('nvcc --version', shell=True)
nvcc_ver = re.search('release ([.0-9]+), V([.0-9]+)', str(nvcc_ver_bin)).group(1)
if nvcc_ver in get_supported_vers():
return nvcc_ver
else:
raise Exception("Unsupported cuda version: {}, Please check your 'nvcc' version.".format(nvcc_ver))
def cudaver():
return 'cuda{}'.format(detect_cuda_ver())
if __name__ == "__main__":
num_args = len(sys.argv)
action = sys.argv[1].lower() if num_args > 1 else 'l'
if action =='c':
classifier = sys.argv[2].lower() if num_args > 2 else ''
check_classifier(classifier)
elif action == 'd':
print(detect_cuda_ver())
elif action == 'g':
print(get_classifier())
elif action == 'l':
print(get_supported_vers_str())
else:
print("Unsupported action: " + action)
|
[
"subprocess.check_output"
] |
[((1045, 1098), 'subprocess.check_output', 'subprocess.check_output', (['"""nvcc --version"""'], {'shell': '(True)'}), "('nvcc --version', shell=True)\n", (1068, 1098), False, 'import subprocess\n')]
|
# Generated by Django 3.0.2 on 2020-03-29 19:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('hitchhikeapp', '0010_userdata_userid'),
]
operations = [
migrations.DeleteModel(
name='Dog',
),
]
|
[
"django.db.migrations.DeleteModel"
] |
[((229, 263), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Dog"""'}), "(name='Dog')\n", (251, 263), False, 'from django.db import migrations\n')]
|
#!/usr/bin/env python3
import logging
import torch.nn as nn
from fairseq import checkpoint_utils
from fairseq.models import BaseFairseqModel, register_model
from pytorch_translate import rnn
from pytorch_translate.rnn import (
LSTMSequenceEncoder,
RNNDecoder,
RNNEncoder,
RNNModel,
base_architecture,
)
from pytorch_translate.tasks.pytorch_translate_task import PytorchTranslateTask
logger = logging.getLogger(__name__)
@register_model("dual_learning")
class DualLearningModel(BaseFairseqModel):
"""
An architecture to jointly train primal model and dual model by leveraging
distribution duality, which exist for both parallel data and monolingual
data.
"""
def __init__(self, args, task, primal_model, dual_model, lm_model=None):
super().__init__()
self.args = args
self.task_keys = ["primal", "dual"]
self.models = nn.ModuleDict(
{"primal": primal_model, "dual": dual_model, "lm": lm_model}
)
def forward(self, src_tokens, src_lengths, prev_output_tokens=None):
"""
If batch is monolingual, need to run beam decoding to generate
fake prev_output_tokens.
"""
# TODO: pass to dual model too
primal_encoder_out = self.models["primal"].encoder(src_tokens, src_lengths)
primal_decoder_out = self.models["primal"].decoder(
prev_output_tokens, primal_encoder_out
)
return primal_decoder_out
def max_positions(self):
return {
"primal_source": (
self.models["primal"].encoder.max_positions(),
self.models["primal"].decoder.max_positions(),
),
"dual_source": (
self.models["dual"].encoder.max_positions(),
self.models["dual"].decoder.max_positions(),
),
"primal_parallel": (
self.models["primal"].encoder.max_positions(),
self.models["primal"].decoder.max_positions(),
),
"dual_parallel": (
self.models["dual"].encoder.max_positions(),
self.models["dual"].decoder.max_positions(),
),
}
@register_model("dual_learning_rnn")
class RNNDualLearningModel(DualLearningModel):
"""Train two models for a task and its duality jointly.
This class uses RNN arch, but can be extended to take arch as an arument.
This class takes translation as a task, but the framework is intended
to be general enough to be applied to other tasks as well.
"""
def __init__(self, args, task, primal_model, dual_model, lm_model=None):
super().__init__(args, task, primal_model, dual_model, lm_model)
@staticmethod
def add_args(parser):
rnn.RNNModel.add_args(parser)
parser.add_argument(
"--unsupervised-dual",
default=False,
action="store_true",
help="Train with dual loss from monolingual data.",
)
parser.add_argument(
"--supervised-dual",
default=False,
action="store_true",
help="Train with dual loss from parallel data.",
)
@classmethod
def build_model(cls, args, task):
""" Build both the primal and dual models.
For simplicity, both models share the same arch, i.e. the same model
params would be used to initialize both models.
Support for different models/archs would be added in further iterations.
"""
base_architecture(args)
if args.sequence_lstm:
encoder_class = LSTMSequenceEncoder
else:
encoder_class = RNNEncoder
decoder_class = RNNDecoder
encoder_embed_tokens, decoder_embed_tokens = RNNModel.build_embed_tokens(
args, task.primal_src_dict, task.primal_tgt_dict
)
primal_encoder = encoder_class(
task.primal_src_dict,
embed_dim=args.encoder_embed_dim,
embed_tokens=encoder_embed_tokens,
cell_type=args.cell_type,
num_layers=args.encoder_layers,
hidden_dim=args.encoder_hidden_dim,
dropout_in=args.encoder_dropout_in,
dropout_out=args.encoder_dropout_out,
residual_level=args.residual_level,
bidirectional=bool(args.encoder_bidirectional),
)
primal_decoder = decoder_class(
src_dict=task.primal_src_dict,
dst_dict=task.primal_tgt_dict,
embed_tokens=decoder_embed_tokens,
vocab_reduction_params=args.vocab_reduction_params,
encoder_hidden_dim=args.encoder_hidden_dim,
embed_dim=args.decoder_embed_dim,
out_embed_dim=args.decoder_out_embed_dim,
cell_type=args.cell_type,
num_layers=args.decoder_layers,
hidden_dim=args.decoder_hidden_dim,
attention_type=args.attention_type,
dropout_in=args.decoder_dropout_in,
dropout_out=args.decoder_dropout_out,
residual_level=args.residual_level,
averaging_encoder=args.averaging_encoder,
)
primal_task = PytorchTranslateTask(
args, task.primal_src_dict, task.primal_tgt_dict
)
primal_model = rnn.RNNModel(primal_task, primal_encoder, primal_decoder)
if args.pretrained_forward_checkpoint:
pretrained_forward_state = checkpoint_utils.load_checkpoint_to_cpu(
args.pretrained_forward_checkpoint
)
primal_model.load_state_dict(pretrained_forward_state["model"], strict=True)
print(
f"Loaded pretrained primal model from {args.pretrained_forward_checkpoint}"
)
encoder_embed_tokens, decoder_embed_tokens = RNNModel.build_embed_tokens(
args, task.dual_src_dict, task.dual_tgt_dict
)
dual_encoder = encoder_class(
task.dual_src_dict,
embed_dim=args.encoder_embed_dim,
embed_tokens=encoder_embed_tokens,
cell_type=args.cell_type,
num_layers=args.encoder_layers,
hidden_dim=args.encoder_hidden_dim,
dropout_in=args.encoder_dropout_in,
dropout_out=args.encoder_dropout_out,
residual_level=args.residual_level,
bidirectional=bool(args.encoder_bidirectional),
)
dual_decoder = decoder_class(
src_dict=task.dual_src_dict,
dst_dict=task.dual_tgt_dict,
embed_tokens=decoder_embed_tokens,
vocab_reduction_params=args.vocab_reduction_params,
encoder_hidden_dim=args.encoder_hidden_dim,
embed_dim=args.decoder_embed_dim,
out_embed_dim=args.decoder_out_embed_dim,
cell_type=args.cell_type,
num_layers=args.decoder_layers,
hidden_dim=args.decoder_hidden_dim,
attention_type=args.attention_type,
dropout_in=args.decoder_dropout_in,
dropout_out=args.decoder_dropout_out,
residual_level=args.residual_level,
averaging_encoder=args.averaging_encoder,
)
dual_task = PytorchTranslateTask(args, task.dual_src_dict, task.dual_tgt_dict)
dual_model = rnn.RNNModel(dual_task, dual_encoder, dual_decoder)
if args.pretrained_backward_checkpoint:
pretrained_backward_state = checkpoint_utils.load_checkpoint_to_cpu(
args.pretrained_backward_checkpoint
)
dual_model.load_state_dict(pretrained_backward_state["model"], strict=True)
print(
f"Loaded pretrained dual model from {args.pretrained_backward_checkpoint}"
)
# TODO (T36875783): instantiate a langauge model
lm_model = None
return RNNDualLearningModel(args, task, primal_model, dual_model, lm_model)
|
[
"logging.getLogger",
"pytorch_translate.tasks.pytorch_translate_task.PytorchTranslateTask",
"pytorch_translate.rnn.RNNModel.add_args",
"pytorch_translate.rnn.RNNModel",
"fairseq.models.register_model",
"fairseq.checkpoint_utils.load_checkpoint_to_cpu",
"pytorch_translate.rnn.base_architecture",
"pytorch_translate.rnn.RNNModel.build_embed_tokens",
"torch.nn.ModuleDict"
] |
[((416, 443), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (433, 443), False, 'import logging\n'), ((447, 478), 'fairseq.models.register_model', 'register_model', (['"""dual_learning"""'], {}), "('dual_learning')\n", (461, 478), False, 'from fairseq.models import BaseFairseqModel, register_model\n'), ((2219, 2254), 'fairseq.models.register_model', 'register_model', (['"""dual_learning_rnn"""'], {}), "('dual_learning_rnn')\n", (2233, 2254), False, 'from fairseq.models import BaseFairseqModel, register_model\n'), ((900, 975), 'torch.nn.ModuleDict', 'nn.ModuleDict', (["{'primal': primal_model, 'dual': dual_model, 'lm': lm_model}"], {}), "({'primal': primal_model, 'dual': dual_model, 'lm': lm_model})\n", (913, 975), True, 'import torch.nn as nn\n'), ((2789, 2818), 'pytorch_translate.rnn.RNNModel.add_args', 'rnn.RNNModel.add_args', (['parser'], {}), '(parser)\n', (2810, 2818), False, 'from pytorch_translate import rnn\n'), ((3551, 3574), 'pytorch_translate.rnn.base_architecture', 'base_architecture', (['args'], {}), '(args)\n', (3568, 3574), False, 'from pytorch_translate.rnn import LSTMSequenceEncoder, RNNDecoder, RNNEncoder, RNNModel, base_architecture\n'), ((3797, 3874), 'pytorch_translate.rnn.RNNModel.build_embed_tokens', 'RNNModel.build_embed_tokens', (['args', 'task.primal_src_dict', 'task.primal_tgt_dict'], {}), '(args, task.primal_src_dict, task.primal_tgt_dict)\n', (3824, 3874), False, 'from pytorch_translate.rnn import LSTMSequenceEncoder, RNNDecoder, RNNEncoder, RNNModel, base_architecture\n'), ((5213, 5283), 'pytorch_translate.tasks.pytorch_translate_task.PytorchTranslateTask', 'PytorchTranslateTask', (['args', 'task.primal_src_dict', 'task.primal_tgt_dict'], {}), '(args, task.primal_src_dict, task.primal_tgt_dict)\n', (5233, 5283), False, 'from pytorch_translate.tasks.pytorch_translate_task import PytorchTranslateTask\n'), ((5329, 5386), 'pytorch_translate.rnn.RNNModel', 'rnn.RNNModel', (['primal_task', 'primal_encoder', 'primal_decoder'], {}), '(primal_task, primal_encoder, primal_decoder)\n', (5341, 5386), False, 'from pytorch_translate import rnn\n'), ((5847, 5920), 'pytorch_translate.rnn.RNNModel.build_embed_tokens', 'RNNModel.build_embed_tokens', (['args', 'task.dual_src_dict', 'task.dual_tgt_dict'], {}), '(args, task.dual_src_dict, task.dual_tgt_dict)\n', (5874, 5920), False, 'from pytorch_translate.rnn import LSTMSequenceEncoder, RNNDecoder, RNNEncoder, RNNModel, base_architecture\n'), ((7247, 7313), 'pytorch_translate.tasks.pytorch_translate_task.PytorchTranslateTask', 'PytorchTranslateTask', (['args', 'task.dual_src_dict', 'task.dual_tgt_dict'], {}), '(args, task.dual_src_dict, task.dual_tgt_dict)\n', (7267, 7313), False, 'from pytorch_translate.tasks.pytorch_translate_task import PytorchTranslateTask\n'), ((7335, 7386), 'pytorch_translate.rnn.RNNModel', 'rnn.RNNModel', (['dual_task', 'dual_encoder', 'dual_decoder'], {}), '(dual_task, dual_encoder, dual_decoder)\n', (7347, 7386), False, 'from pytorch_translate import rnn\n'), ((5473, 5548), 'fairseq.checkpoint_utils.load_checkpoint_to_cpu', 'checkpoint_utils.load_checkpoint_to_cpu', (['args.pretrained_forward_checkpoint'], {}), '(args.pretrained_forward_checkpoint)\n', (5512, 5548), False, 'from fairseq import checkpoint_utils\n'), ((7475, 7551), 'fairseq.checkpoint_utils.load_checkpoint_to_cpu', 'checkpoint_utils.load_checkpoint_to_cpu', (['args.pretrained_backward_checkpoint'], {}), '(args.pretrained_backward_checkpoint)\n', (7514, 7551), False, 'from fairseq import checkpoint_utils\n')]
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2022.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" Test Driver HDF5 """
import os
import pathlib
import shutil
import tempfile
import unittest
import warnings
from test import QiskitNatureTestCase
from test.drivers.second_quantization.test_driver import TestDriver
from qiskit_nature.drivers.second_quantization import HDF5Driver
from qiskit_nature.drivers import QMolecule
from qiskit_nature.properties.second_quantization.electronic import ElectronicStructureDriverResult
class TestDriverHDF5(QiskitNatureTestCase, TestDriver):
"""HDF5 Driver tests."""
def setUp(self):
super().setUp()
driver = HDF5Driver(
hdf5_input=self.get_resource_path(
"test_driver_hdf5.hdf5", "drivers/second_quantization/hdf5d"
)
)
self.driver_result = driver.run()
def test_convert(self):
"""Test the legacy-conversion method."""
legacy_file_path = self.get_resource_path(
"test_driver_hdf5_legacy.hdf5", "drivers/second_quantization/hdf5d"
)
with self.subTest("replace=True"):
# pylint: disable=consider-using-with
tmp_file = tempfile.NamedTemporaryFile(delete=False, suffix=".hdf5")
tmp_file.close()
os.unlink(tmp_file.name)
shutil.copy(legacy_file_path, tmp_file.name)
try:
driver = HDF5Driver(tmp_file.name)
# replacing file won't trigger deprecation on run
driver.convert(replace=True)
driver.run()
finally:
os.unlink(tmp_file.name)
msg_mol_ref = (
"The HDF5Driver.run with legacy HDF5 file method is deprecated as of version 0.4.0 "
"and will be removed no sooner than 3 months after the release "
". Your HDF5 file contains the legacy QMolecule object! You should "
"consider converting it to the new property framework. See also HDF5Driver.convert."
)
with self.subTest("replace=False"):
# pylint: disable=consider-using-with
tmp_file = tempfile.NamedTemporaryFile(delete=False, suffix=".hdf5")
tmp_file.close()
new_file_name = pathlib.Path(tmp_file.name).with_name(
str(pathlib.Path(tmp_file.name).stem) + "_new.hdf5"
)
os.unlink(tmp_file.name)
shutil.copy(legacy_file_path, tmp_file.name)
try:
driver = HDF5Driver(tmp_file.name)
# not replacing file will trigger deprecation on run
driver.convert(replace=False)
with warnings.catch_warnings(record=True) as c_m:
warnings.simplefilter("always")
driver.run()
self.assertEqual(str(c_m[0].message), msg_mol_ref)
# using new file won't trigger deprecation
HDF5Driver(new_file_name).run()
finally:
os.unlink(tmp_file.name)
os.unlink(new_file_name)
class TestDriverHDF5Legacy(QiskitNatureTestCase, TestDriver):
"""HDF5 Driver legacy file-support tests."""
def setUp(self):
super().setUp()
hdf5_file = self.get_resource_path(
"test_driver_hdf5_legacy.hdf5", "drivers/second_quantization/hdf5d"
)
# Using QMolecule directly here to avoid the deprecation on HDF5Driver.run method
# to be triggered and let it be handled on the method test_convert
# Those deprecation messages are shown only once and this one could prevent
# the test_convert one to show if called first.
molecule = QMolecule(hdf5_file)
molecule.load()
warnings.filterwarnings("ignore", category=DeprecationWarning)
self.driver_result = ElectronicStructureDriverResult.from_legacy_driver_result(molecule)
warnings.filterwarnings("default", category=DeprecationWarning)
if __name__ == "__main__":
unittest.main()
|
[
"qiskit_nature.properties.second_quantization.electronic.ElectronicStructureDriverResult.from_legacy_driver_result",
"pathlib.Path",
"qiskit_nature.drivers.second_quantization.HDF5Driver",
"warnings.catch_warnings",
"qiskit_nature.drivers.QMolecule",
"warnings.simplefilter",
"os.unlink",
"shutil.copy",
"tempfile.NamedTemporaryFile",
"unittest.main",
"warnings.filterwarnings"
] |
[((4436, 4451), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4449, 4451), False, 'import unittest\n'), ((4118, 4138), 'qiskit_nature.drivers.QMolecule', 'QMolecule', (['hdf5_file'], {}), '(hdf5_file)\n', (4127, 4138), False, 'from qiskit_nature.drivers import QMolecule\n'), ((4171, 4233), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'DeprecationWarning'}), "('ignore', category=DeprecationWarning)\n", (4194, 4233), False, 'import warnings\n'), ((4263, 4330), 'qiskit_nature.properties.second_quantization.electronic.ElectronicStructureDriverResult.from_legacy_driver_result', 'ElectronicStructureDriverResult.from_legacy_driver_result', (['molecule'], {}), '(molecule)\n', (4320, 4330), False, 'from qiskit_nature.properties.second_quantization.electronic import ElectronicStructureDriverResult\n'), ((4339, 4402), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""default"""'], {'category': 'DeprecationWarning'}), "('default', category=DeprecationWarning)\n", (4362, 4402), False, 'import warnings\n'), ((1600, 1657), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)', 'suffix': '""".hdf5"""'}), "(delete=False, suffix='.hdf5')\n", (1627, 1657), False, 'import tempfile\n'), ((1699, 1723), 'os.unlink', 'os.unlink', (['tmp_file.name'], {}), '(tmp_file.name)\n', (1708, 1723), False, 'import os\n'), ((1736, 1780), 'shutil.copy', 'shutil.copy', (['legacy_file_path', 'tmp_file.name'], {}), '(legacy_file_path, tmp_file.name)\n', (1747, 1780), False, 'import shutil\n'), ((2555, 2612), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)', 'suffix': '""".hdf5"""'}), "(delete=False, suffix='.hdf5')\n", (2582, 2612), False, 'import tempfile\n'), ((2803, 2827), 'os.unlink', 'os.unlink', (['tmp_file.name'], {}), '(tmp_file.name)\n', (2812, 2827), False, 'import os\n'), ((2840, 2884), 'shutil.copy', 'shutil.copy', (['legacy_file_path', 'tmp_file.name'], {}), '(legacy_file_path, tmp_file.name)\n', (2851, 2884), False, 'import shutil\n'), ((1823, 1848), 'qiskit_nature.drivers.second_quantization.HDF5Driver', 'HDF5Driver', (['tmp_file.name'], {}), '(tmp_file.name)\n', (1833, 1848), False, 'from qiskit_nature.drivers.second_quantization import HDF5Driver\n'), ((2026, 2050), 'os.unlink', 'os.unlink', (['tmp_file.name'], {}), '(tmp_file.name)\n', (2035, 2050), False, 'import os\n'), ((2927, 2952), 'qiskit_nature.drivers.second_quantization.HDF5Driver', 'HDF5Driver', (['tmp_file.name'], {}), '(tmp_file.name)\n', (2937, 2952), False, 'from qiskit_nature.drivers.second_quantization import HDF5Driver\n'), ((3435, 3459), 'os.unlink', 'os.unlink', (['tmp_file.name'], {}), '(tmp_file.name)\n', (3444, 3459), False, 'import os\n'), ((3476, 3500), 'os.unlink', 'os.unlink', (['new_file_name'], {}), '(new_file_name)\n', (3485, 3500), False, 'import os\n'), ((2670, 2697), 'pathlib.Path', 'pathlib.Path', (['tmp_file.name'], {}), '(tmp_file.name)\n', (2682, 2697), False, 'import pathlib\n'), ((3089, 3125), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (3112, 3125), False, 'import warnings\n'), ((3154, 3185), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (3175, 3185), False, 'import warnings\n'), ((3366, 3391), 'qiskit_nature.drivers.second_quantization.HDF5Driver', 'HDF5Driver', (['new_file_name'], {}), '(new_file_name)\n', (3376, 3391), False, 'from qiskit_nature.drivers.second_quantization import HDF5Driver\n'), ((2729, 2756), 'pathlib.Path', 'pathlib.Path', (['tmp_file.name'], {}), '(tmp_file.name)\n', (2741, 2756), False, 'import pathlib\n')]
|
from openpyxl import Workbook
wb = Workbook()
ws = wb.active
data = [
["Fruit", "Quantity"],
["Kiwi", 3],
["Grape", 15],
["Apple", 3],
["Peach", 3],
["Pomegranate", 3],
["Pear", 3],
["Tangerine", 3],
["Blueberry", 3],
["Mango", 3],
["Watermelon", 3],
["Blackberry", 3],
["Orange", 3],
["Raspberry", 3],
["Banana", 3]
]
for r in data:
ws.append(r)
ws.auto_filter.ref = "A1:B15"
ws.auto_filter.add_filter_column(0, ["Kiwi", "Apple", "Mango"])
ws.auto_filter.add_sort_condition("B2:B15")
wb.save("filtered.xlsx")
|
[
"openpyxl.Workbook"
] |
[((36, 46), 'openpyxl.Workbook', 'Workbook', ([], {}), '()\n', (44, 46), False, 'from openpyxl import Workbook\n')]
|
from bleak import BleakClient
import asyncio
import functools
notify_uuid = "00002a19-0000-1000-8000-00805f9b34fb".format(0x2A19)
def callback(sender, data, mac_address):
#data = bytearray(data)
dataint = int.from_bytes(data, byteorder='little', signed=True)
print(mac_address, dataint)
def run(addresses):
loop = asyncio.get_event_loop()
tasks = asyncio.gather(*(connect_to_device(address) for address in addresses))
loop.run_until_complete(tasks)
async def connect_to_device(address):
print("starting", address, "loop")
async with BleakClient(address, timeout=10.0) as client:
print("connect to", address)
try:
#model_number = await client.read_gatt_char(address)
await client.start_notify(notify_uuid, functools.partial(callback, mac_address=address))
await asyncio.sleep(1000.0)
await client.stop_notify(notify_uuid)
except Exception as e:
print(e)
print("disconnect from", address)
if __name__ == "__main__":
run(
["96E8409A-F2EB-4029-B3DC-615FADE0C838","D31CB0CA-890E-476B-80D9-80ED8A3AA69A"]
)
|
[
"asyncio.get_event_loop",
"functools.partial",
"asyncio.sleep",
"bleak.BleakClient"
] |
[((334, 358), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (356, 358), False, 'import asyncio\n'), ((573, 607), 'bleak.BleakClient', 'BleakClient', (['address'], {'timeout': '(10.0)'}), '(address, timeout=10.0)\n', (584, 607), False, 'from bleak import BleakClient\n'), ((855, 876), 'asyncio.sleep', 'asyncio.sleep', (['(1000.0)'], {}), '(1000.0)\n', (868, 876), False, 'import asyncio\n'), ((787, 835), 'functools.partial', 'functools.partial', (['callback'], {'mac_address': 'address'}), '(callback, mac_address=address)\n', (804, 835), False, 'import functools\n')]
|
#!/usr/bin/python
'''*****************************************************************************************************************
Seeed Studio Relay Board Library V2
Test Application #2
By <NAME> (https://www.johnwargo.com)
********************************************************************************************************************'''
import sys
import time
from seeed_relay_v1 import Relay
def process_loop():
# turn all of the relays on
relay.all_on()
relay.print_status_all()
# wait a second
time.sleep(1)
# turn all of the relays off
relay.all_off()
relay.print_status_all()
# wait a second
time.sleep(1)
# now cycle each relay every second in an infinite loop
while True:
# test the on/off methods
print('Testing on/off methods')
for i in range(1, 5):
relay.on(i)
relay.print_status_all()
time.sleep(1)
relay.off(i)
relay.print_status_all()
time.sleep(1)
# test the toggle method
print('Testing the toggle methods')
for i in range(1, 5):
relay.toggle_port(i)
relay.print_status_all()
time.sleep(1)
relay.toggle_port(i)
relay.print_status_all()
time.sleep(1)
print('Repeating loop')
# Now see what we're supposed to do next
if __name__ == "__main__":
# Create the relay object
relay = Relay()
try:
process_loop()
except KeyboardInterrupt:
print("\nExiting application")
# turn off all of the relays
relay.all_off()
# exit the application
sys.exit(0)
|
[
"sys.exit",
"time.sleep",
"seeed_relay_v1.Relay"
] |
[((545, 558), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (555, 558), False, 'import time\n'), ((665, 678), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (675, 678), False, 'import time\n'), ((1480, 1487), 'seeed_relay_v1.Relay', 'Relay', ([], {}), '()\n', (1485, 1487), False, 'from seeed_relay_v1 import Relay\n'), ((933, 946), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (943, 946), False, 'import time\n'), ((1021, 1034), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1031, 1034), False, 'import time\n'), ((1225, 1238), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1235, 1238), False, 'import time\n'), ((1321, 1334), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1331, 1334), False, 'import time\n'), ((1690, 1701), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1698, 1701), False, 'import sys\n')]
|
import requests
keyword = "python"
try:
kv = {'q':keyword}
r = requests.get('http://www.so.com/s', params=kv)
print(r.request.url)
r.raise_for_status()
print(len(r.text))
except:
print('爬取失败')
|
[
"requests.get"
] |
[((72, 118), 'requests.get', 'requests.get', (['"""http://www.so.com/s"""'], {'params': 'kv'}), "('http://www.so.com/s', params=kv)\n", (84, 118), False, 'import requests\n')]
|
import torch.nn as nn
class RODEncode(nn.Module):
def __init__(self, in_channels=2):
super(RODEncode, self).__init__()
self.conv1a = nn.Conv3d(
in_channels=in_channels,
out_channels=64,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv1a_1 = nn.Conv3d(
in_channels=64,
out_channels=64,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv1a_2 = nn.Conv3d(
in_channels=64,
out_channels=64,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv1b = nn.Conv3d(
in_channels=64,
out_channels=64,
kernel_size=(9, 5, 5),
stride=(2, 2, 2),
padding=(4, 2, 2),
)
self.conv2a = nn.Conv3d(
in_channels=64,
out_channels=128,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv2b = nn.Conv3d(
in_channels=128,
out_channels=128,
kernel_size=(9, 5, 5),
stride=(2, 2, 2),
padding=(4, 2, 2),
)
self.conv3a = nn.Conv3d(
in_channels=128,
out_channels=256,
kernel_size=(9, 5, 5),
stride=(1, 1, 1),
padding=(4, 2, 2),
)
self.conv3b = nn.Conv3d(
in_channels=256,
out_channels=256,
kernel_size=(9, 5, 5),
stride=(1, 2, 2),
padding=(4, 2, 2),
)
self.bn1a = nn.BatchNorm3d(num_features=64)
self.bn1a_1 = nn.BatchNorm3d(num_features=64)
self.bn1a_2 = nn.BatchNorm3d(num_features=64)
self.bn1b = nn.BatchNorm3d(num_features=64)
self.bn2a = nn.BatchNorm3d(num_features=128)
self.bn2b = nn.BatchNorm3d(num_features=128)
self.bn3a = nn.BatchNorm3d(num_features=256)
self.bn3b = nn.BatchNorm3d(num_features=256)
self.relu = nn.ReLU()
def forward(self, x):
x = self.relu(
self.bn1a(self.conv1a(x))
) # (B, 2, W, 128, 128) -> (B, 64, W, 128, 128)
# additional
x = self.relu(
self.bn1a_1(self.conv1a_1(x))
) # (B, 64, W, 128, 128) -> (B, 64, W, 128, 128)
x = self.relu(
self.bn1a_2(self.conv1a_2(x))
) # (B, 64, W, 128, 128) -> (B, 64, W, 128, 128)
x = self.relu(
self.bn1b(self.conv1b(x))
) # (B, 64, W, 128, 128) -> (B, 64, W/2, 64, 64)
x = self.relu(
self.bn2a(self.conv2a(x))
) # (B, 64, W/2, 64, 64) -> (B, 128, W/2, 64, 64)
x = self.relu(
self.bn2b(self.conv2b(x))
) # (B, 128, W/2, 64, 64) -> (B, 128, W/4, 32, 32)
x = self.relu(
self.bn3a(self.conv3a(x))
) # (B, 128, W/4, 32, 32) -> (B, 256, W/4, 32, 32)
x = self.relu(
self.bn3b(self.conv3b(x))
) # (B, 256, W/4, 32, 32) -> (B, 256, W/4, 16, 16)
return x
class RODDecode(nn.Module):
def __init__(self, n_class):
super(RODDecode, self).__init__()
self.convt1 = nn.ConvTranspose3d(
in_channels=256,
out_channels=128,
kernel_size=(4, 6, 6),
stride=(2, 2, 2),
padding=(1, 2, 2),
)
self.convt2 = nn.ConvTranspose3d(
in_channels=128,
out_channels=64,
kernel_size=(4, 6, 6),
stride=(2, 2, 2),
padding=(1, 2, 2),
)
self.convt3 = nn.ConvTranspose3d(
in_channels=64,
out_channels=n_class,
kernel_size=(3, 6, 6),
stride=(1, 2, 2),
padding=(1, 2, 2),
)
self.prelu = nn.PReLU()
self.sigmoid = nn.Sigmoid()
# self.upsample = nn.Upsample(size=(rodnet_configs['win_size'], radar_configs['ramap_rsize'],
# radar_configs['ramap_asize']), mode='nearest')
def forward(self, x):
x = self.prelu(self.convt1(x)) # (B, 256, W/4, 16, 16) -> (B, 128, W/2, 32, 32)
x = self.prelu(self.convt2(x)) # (B, 128, W/2, 32, 32) -> (B, 64, W, 64, 64)
x = self.convt3(x) # (B, 64, W, 64, 64) -> (B, 3, W, 128, 128)
return x
|
[
"torch.nn.Sigmoid",
"torch.nn.ReLU",
"torch.nn.Conv3d",
"torch.nn.PReLU",
"torch.nn.BatchNorm3d",
"torch.nn.ConvTranspose3d"
] |
[((155, 270), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': 'in_channels', 'out_channels': '(64)', 'kernel_size': '(9, 5, 5)', 'stride': '(1, 1, 1)', 'padding': '(4, 2, 2)'}), '(in_channels=in_channels, out_channels=64, kernel_size=(9, 5, 5),\n stride=(1, 1, 1), padding=(4, 2, 2))\n', (164, 270), True, 'import torch.nn as nn\n'), ((362, 468), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': '(64)', 'out_channels': '(64)', 'kernel_size': '(9, 5, 5)', 'stride': '(1, 1, 1)', 'padding': '(4, 2, 2)'}), '(in_channels=64, out_channels=64, kernel_size=(9, 5, 5), stride=(1,\n 1, 1), padding=(4, 2, 2))\n', (371, 468), True, 'import torch.nn as nn\n'), ((560, 666), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': '(64)', 'out_channels': '(64)', 'kernel_size': '(9, 5, 5)', 'stride': '(1, 1, 1)', 'padding': '(4, 2, 2)'}), '(in_channels=64, out_channels=64, kernel_size=(9, 5, 5), stride=(1,\n 1, 1), padding=(4, 2, 2))\n', (569, 666), True, 'import torch.nn as nn\n'), ((756, 862), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': '(64)', 'out_channels': '(64)', 'kernel_size': '(9, 5, 5)', 'stride': '(2, 2, 2)', 'padding': '(4, 2, 2)'}), '(in_channels=64, out_channels=64, kernel_size=(9, 5, 5), stride=(2,\n 2, 2), padding=(4, 2, 2))\n', (765, 862), True, 'import torch.nn as nn\n'), ((952, 1060), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': '(64)', 'out_channels': '(128)', 'kernel_size': '(9, 5, 5)', 'stride': '(1, 1, 1)', 'padding': '(4, 2, 2)'}), '(in_channels=64, out_channels=128, kernel_size=(9, 5, 5), stride=(\n 1, 1, 1), padding=(4, 2, 2))\n', (961, 1060), True, 'import torch.nn as nn\n'), ((1149, 1258), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'kernel_size': '(9, 5, 5)', 'stride': '(2, 2, 2)', 'padding': '(4, 2, 2)'}), '(in_channels=128, out_channels=128, kernel_size=(9, 5, 5), stride=\n (2, 2, 2), padding=(4, 2, 2))\n', (1158, 1258), True, 'import torch.nn as nn\n'), ((1347, 1456), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': '(128)', 'out_channels': '(256)', 'kernel_size': '(9, 5, 5)', 'stride': '(1, 1, 1)', 'padding': '(4, 2, 2)'}), '(in_channels=128, out_channels=256, kernel_size=(9, 5, 5), stride=\n (1, 1, 1), padding=(4, 2, 2))\n', (1356, 1456), True, 'import torch.nn as nn\n'), ((1545, 1654), 'torch.nn.Conv3d', 'nn.Conv3d', ([], {'in_channels': '(256)', 'out_channels': '(256)', 'kernel_size': '(9, 5, 5)', 'stride': '(1, 2, 2)', 'padding': '(4, 2, 2)'}), '(in_channels=256, out_channels=256, kernel_size=(9, 5, 5), stride=\n (1, 2, 2), padding=(4, 2, 2))\n', (1554, 1654), True, 'import torch.nn as nn\n'), ((1741, 1772), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(64)'}), '(num_features=64)\n', (1755, 1772), True, 'import torch.nn as nn\n'), ((1795, 1826), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(64)'}), '(num_features=64)\n', (1809, 1826), True, 'import torch.nn as nn\n'), ((1849, 1880), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(64)'}), '(num_features=64)\n', (1863, 1880), True, 'import torch.nn as nn\n'), ((1901, 1932), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(64)'}), '(num_features=64)\n', (1915, 1932), True, 'import torch.nn as nn\n'), ((1953, 1985), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(128)'}), '(num_features=128)\n', (1967, 1985), True, 'import torch.nn as nn\n'), ((2006, 2038), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(128)'}), '(num_features=128)\n', (2020, 2038), True, 'import torch.nn as nn\n'), ((2059, 2091), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(256)'}), '(num_features=256)\n', (2073, 2091), True, 'import torch.nn as nn\n'), ((2112, 2144), 'torch.nn.BatchNorm3d', 'nn.BatchNorm3d', ([], {'num_features': '(256)'}), '(num_features=256)\n', (2126, 2144), True, 'import torch.nn as nn\n'), ((2165, 2174), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (2172, 2174), True, 'import torch.nn as nn\n'), ((3336, 3453), 'torch.nn.ConvTranspose3d', 'nn.ConvTranspose3d', ([], {'in_channels': '(256)', 'out_channels': '(128)', 'kernel_size': '(4, 6, 6)', 'stride': '(2, 2, 2)', 'padding': '(1, 2, 2)'}), '(in_channels=256, out_channels=128, kernel_size=(4, 6, 6),\n stride=(2, 2, 2), padding=(1, 2, 2))\n', (3354, 3453), True, 'import torch.nn as nn\n'), ((3543, 3659), 'torch.nn.ConvTranspose3d', 'nn.ConvTranspose3d', ([], {'in_channels': '(128)', 'out_channels': '(64)', 'kernel_size': '(4, 6, 6)', 'stride': '(2, 2, 2)', 'padding': '(1, 2, 2)'}), '(in_channels=128, out_channels=64, kernel_size=(4, 6, 6),\n stride=(2, 2, 2), padding=(1, 2, 2))\n', (3561, 3659), True, 'import torch.nn as nn\n'), ((3749, 3869), 'torch.nn.ConvTranspose3d', 'nn.ConvTranspose3d', ([], {'in_channels': '(64)', 'out_channels': 'n_class', 'kernel_size': '(3, 6, 6)', 'stride': '(1, 2, 2)', 'padding': '(1, 2, 2)'}), '(in_channels=64, out_channels=n_class, kernel_size=(3, 6,\n 6), stride=(1, 2, 2), padding=(1, 2, 2))\n', (3767, 3869), True, 'import torch.nn as nn\n'), ((3958, 3968), 'torch.nn.PReLU', 'nn.PReLU', ([], {}), '()\n', (3966, 3968), True, 'import torch.nn as nn\n'), ((3992, 4004), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (4002, 4004), True, 'import torch.nn as nn\n')]
|
from django.contrib import admin
from .models import File
admin.site.register(File)
|
[
"django.contrib.admin.site.register"
] |
[((59, 84), 'django.contrib.admin.site.register', 'admin.site.register', (['File'], {}), '(File)\n', (78, 84), False, 'from django.contrib import admin\n')]
|
# LSTM with Variable Length Input Sequences to One Character Output
import numpy
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.utils import np_utils
from keras.preprocessing.sequence import pad_sequences
from theano.tensor.shared_randomstreams import RandomStreams
# fix random seed for reproducibility
numpy.random.seed(7)
# define the raw dataset
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
# create mapping of characters to integers (0-25) and the reverse
char_to_int = dict((c, i) for i, c in enumerate(alphabet))
int_to_char = dict((i, c) for i, c in enumerate(alphabet))
# prepare the dataset of input to output pairs encoded as integers
num_inputs = 16
max_len = 5
dataX = []
dataY = []
for i in range(num_inputs):
start = numpy.random.randint(len(alphabet)-2)
end = numpy.random.randint(start, min(start+max_len,len(alphabet)-1))
sequence_in = alphabet[start:end+1]
sequence_out = alphabet[end + 1]
dataX.append([char_to_int[char] for char in sequence_in])
dataY.append(char_to_int[sequence_out])
print( sequence_in, '->', sequence_out )
# convert list of lists to array and pad sequences if needed
X = pad_sequences(dataX, maxlen=max_len, dtype='float32')
# reshape X to be [samples, time steps, features]
X = numpy.reshape(X, (X.shape[0], max_len, 1))
# normalize
X = X / float(len(alphabet))
# one hot encode the output variable
y = np_utils.to_categorical(dataY)
# create and fit the model
batch_size = 1
model = Sequential()
model.add(LSTM(16, batch_input_shape=(batch_size, X.shape[1], X.shape[2]), stateful=True))
model.add(Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
for i in range(1):
model.fit(X, y, nb_epoch=1, batch_size=batch_size, verbose=2, shuffle=False)
model.reset_states()
# summarize performance of the model
scores = model.evaluate(X, y, batch_size=batch_size, verbose=0)
model.reset_states()
print("Model Accuracy: %.2f%%" % (scores[1]*100))
# demonstrate some model predictions
for i in range(1):
pattern_index = numpy.random.randint(len(dataX))
pattern = dataX[pattern_index]
x = pad_sequences([pattern], maxlen=max_len, dtype='float32')
x = numpy.reshape(x, (1, max_len, 1))
x = x / float(len(alphabet))
prediction = model.predict(x, verbose=0)
index = numpy.argmax(prediction)
result = int_to_char[index]
seq_in = [int_to_char[value] for value in pattern]
print( seq_in, "->", result )
|
[
"numpy.reshape",
"numpy.argmax",
"keras.models.Sequential",
"keras.layers.LSTM",
"keras.utils.np_utils.to_categorical",
"numpy.random.seed",
"keras.layers.Dense",
"keras.preprocessing.sequence.pad_sequences"
] |
[((374, 394), 'numpy.random.seed', 'numpy.random.seed', (['(7)'], {}), '(7)\n', (391, 394), False, 'import numpy\n'), ((1226, 1279), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['dataX'], {'maxlen': 'max_len', 'dtype': '"""float32"""'}), "(dataX, maxlen=max_len, dtype='float32')\n", (1239, 1279), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((1336, 1378), 'numpy.reshape', 'numpy.reshape', (['X', '(X.shape[0], max_len, 1)'], {}), '(X, (X.shape[0], max_len, 1))\n', (1349, 1378), False, 'import numpy\n'), ((1465, 1495), 'keras.utils.np_utils.to_categorical', 'np_utils.to_categorical', (['dataY'], {}), '(dataY)\n', (1488, 1495), False, 'from keras.utils import np_utils\n'), ((1549, 1561), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1559, 1561), False, 'from keras.models import Sequential\n'), ((1573, 1652), 'keras.layers.LSTM', 'LSTM', (['(16)'], {'batch_input_shape': '(batch_size, X.shape[1], X.shape[2])', 'stateful': '(True)'}), '(16, batch_input_shape=(batch_size, X.shape[1], X.shape[2]), stateful=True)\n', (1577, 1652), False, 'from keras.layers import LSTM\n'), ((1665, 1704), 'keras.layers.Dense', 'Dense', (['y.shape[1]'], {'activation': '"""softmax"""'}), "(y.shape[1], activation='softmax')\n", (1670, 1704), False, 'from keras.layers import Dense\n'), ((2255, 2312), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['[pattern]'], {'maxlen': 'max_len', 'dtype': '"""float32"""'}), "([pattern], maxlen=max_len, dtype='float32')\n", (2268, 2312), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((2322, 2355), 'numpy.reshape', 'numpy.reshape', (['x', '(1, max_len, 1)'], {}), '(x, (1, max_len, 1))\n', (2335, 2355), False, 'import numpy\n'), ((2449, 2473), 'numpy.argmax', 'numpy.argmax', (['prediction'], {}), '(prediction)\n', (2461, 2473), False, 'import numpy\n')]
|
"""
Direction prediction based on learning dataset from reactome
PPI direction calculated from domain interaction directions
"""
# Imports
import sqlite3, csv, os
import pandas as pd
import logging
import pickle
# # Initiating logger
# logger = logging.getLogger()
# handler = logging.FileHandler('../../workflow/SLK3.log')
# logger.setLevel(logging.DEBUG)
# handler.setLevel(logging.DEBUG)
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# handler.setFormatter(formatter)
# logger.addHandler(handler)
class DirScore:
def __init__(self):
# Defining constants
self.REACTOME_DB = '../../SLKlib/mapper/protein/output/reactome_mapped.db'
self.PFAM_FILE = ['../prediction/direction/files/uniprot-pfam_human.tab',
'../prediction/direction/files/uniprot-pfam_drosi.tab',
'../prediction/direction/files/uniprot-pfam_danio.tab',
'../prediction/direction/files/uniprot-pfam_celegans.tab']
logging.basicConfig(level=logging.DEBUG)
self.pfam_dict = {}
self.dir_score_dict = {}
# Adding the two output dictionaries of test_score function to a pickle files
# so that the next function can access them inbetween script executions
# TODO: remove pickle files after each run
self.PICKLE_FILE = 'dir_score.pickle'
if os.path.isfile(self.PICKLE_FILE):
self.pfam_dict, self.dir_score_dict = pickle.load(open(self.PICKLE_FILE, 'rb'))
else:
self.test_scores()
pickle.dump((self.pfam_dict, self.dir_score_dict), open(self.PICKLE_FILE, 'wb'))
def test_scores(self):
# Setting as global so next script can access it
df_all = pd.DataFrame(columns=['a_dom', 'b_dom'])
conn = sqlite3.connect(self.REACTOME_DB)
# Setting up learning data set
logging.debug("Started connection to reactome dataset")
for inpfam in self.PFAM_FILE:
with open(inpfam) as infile:
infile.readline()
for line in infile:
line = line.strip().split('\t')
if len(line) == 4:
self.pfam_dict[line[0]] = line[3].split(';')[0:-1]
with conn:
c = conn.cursor()
counter = 0
# Getting PPI data
logging.debug('Getting PPI data')
c.execute("SELECT interactor_a_node_name, interactor_b_node_name FROM edge")
while True:
row = c.fetchone()
counter += 1
if row is None:
break
else:
a_node = row[0].split(':')[1]
b_node = row[1].split(':')[1]
if a_node not in self.pfam_dict or b_node not in self.pfam_dict:
continue
int_list = [self.pfam_dict[a_node], self.pfam_dict[b_node]]
for id1, id2 in zip(int_list[0], int_list[1]):
# Setting up dataframe for all domain-domain interactions
# len(df_all) sets the name of the line
df_all = df_all.set_value(len(df_all), col=['a_dom', 'b_dom'], value=[id1, id2])
# All domains in a dataframe, without direction
all_domain_df = df_all['a_dom'].append(df_all['b_dom']).reset_index(name='domain')
all_count = all_domain_df.groupby('domain').size().reset_index(name='counter')
# Getting probability of each domain
# Number of domain occurrence / Number of all domains
logging.debug('Getting probability of each domain')
prob_dom = {}
# Number of all domain occurrences
total_occurrence = all_count['counter'].sum()
# Iterating over domains
for index, domain in all_count['domain'].iteritems():
dom_count = all_count.loc[all_count['domain'] == domain, 'counter'].iloc[0]
P_domain = dom_count / total_occurrence
# Adding data into a dictionary
prob_dom[domain] = P_domain
#print(domain, P_domain)
# Getting directed domain-domain interaction probabilities
# Number of directed DDI / number of all DDIs
logging.debug('Getting DDI probabilities')
prob_inter = {}
# Getting the occurrences for each directed interaction
all_inter_counted = df_all.groupby(['a_dom', 'b_dom']).size().reset_index(name='counter')
all_inter_counter = all_inter_counted['counter'].sum()
# Iterating over interactions
for index2, count in all_inter_counted['counter'].iteritems():
P_inter = count / all_inter_counter
# Getting domain ids
a_dom = all_inter_counted.loc[all_inter_counted['counter'] == count, 'a_dom'].iloc[0]
b_dom = all_inter_counted.loc[all_inter_counted['counter'] == count, 'b_dom'].iloc[0]
# Adding the into a dictionary
prob_inter['->'.join((a_dom, b_dom))] = P_inter
# Calculating direction score
# (P_AtoB - P_BtoA) / P_A * P_B
logging.debug('Calculating direction scores')
for key in prob_inter.keys():
a = key.split('->')[0]
b = key.split('->')[1]
other_dir = '->'.join((b, a))
if other_dir in prob_inter.keys():
dir_score = (prob_inter[key] - prob_inter[other_dir]) / prob_dom[a] * prob_dom[b]
self.dir_score_dict[key] = dir_score
else:
dir_score = (prob_inter[key] - 0) / prob_dom[a] * prob_dom[b]
self.dir_score_dict[key] = dir_score
#print(key, dir_score)
#return self.dir_score_dict, self.pfam_dict
# LAYER 3
def apply_to_db(self):
#logger.debug(self.pfam_dict)
#logger.debug(self.dir_score_dict)
conn2 = sqlite3.connect('SLK3_layers.db')
# logger.debug("Connected to '%s" % conn2)
with conn2:
c2 = conn2.cursor()
c22 = conn2.cursor()
c2.execute("SELECT interactor_a_node_name, interactor_b_node_name FROM ATG_Reg")
while True:
row = c2.fetchone()
if row is None:
break
else:
prot_a = row[0].split(':')[1]
prot_b = row[1].split(':')[1]
dir_score_sum = 0
# Summing DDI scores
#logging.debug('Summing DDI scores')
if prot_a in self.pfam_dict.keys() and prot_b in self.pfam_dict.keys():
for dom_a, dom_b in zip(self.pfam_dict[prot_a], self.pfam_dict[prot_b]):
#print(dir_score_dict['->'.join((dom_a, dom_b))])
if '->'.join((dom_a, dom_b)) in self.dir_score_dict.keys():
dir_score_sum += self.dir_score_dict['->'.join((dom_a, dom_b))]
# To get final direction score of the unknown PPIs we calculate
# the average of each proteins' all domain interaction scores
if len(self.pfam_dict[prot_a]) * len(self.pfam_dict[prot_b]) == 0:
logging.debug(prot_a, len(self.pfam_dict[prot_a]), prot_b, len(self.pfam_dict[prot_b]))
continue
else:
dir_score_final_PPI = dir_score_sum / (len(self.pfam_dict[prot_a]) * len(self.pfam_dict[prot_b]))
#logging.debug("Updating scores")
c22.execute("UPDATE ATG_Reg SET confidence_scores = '%s' "
"WHERE ATG_Reg.interactor_a_node_name = '%s' AND ATG_Reg.interactor_b_node_name = '%s'"
% ('|dir_pred:' + str(dir_score_final_PPI), row[0], row[1]))
if __name__ == '__main__':
test = DirScore()
logger.debug('Creating test set')
test.test_scores()
logger.debug('Adding scores to dataset')
test.apply_to_db()
logger.debug('Direction prediction done')
|
[
"logging.basicConfig",
"logging.debug",
"sqlite3.connect",
"os.path.isfile",
"pandas.DataFrame"
] |
[((1036, 1076), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (1055, 1076), False, 'import logging\n'), ((1412, 1444), 'os.path.isfile', 'os.path.isfile', (['self.PICKLE_FILE'], {}), '(self.PICKLE_FILE)\n', (1426, 1444), False, 'import sqlite3, csv, os\n'), ((1786, 1826), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['a_dom', 'b_dom']"}), "(columns=['a_dom', 'b_dom'])\n", (1798, 1826), True, 'import pandas as pd\n'), ((1847, 1880), 'sqlite3.connect', 'sqlite3.connect', (['self.REACTOME_DB'], {}), '(self.REACTOME_DB)\n', (1862, 1880), False, 'import sqlite3, csv, os\n'), ((1936, 1991), 'logging.debug', 'logging.debug', (['"""Started connection to reactome dataset"""'], {}), "('Started connection to reactome dataset')\n", (1949, 1991), False, 'import logging\n'), ((3823, 3874), 'logging.debug', 'logging.debug', (['"""Getting probability of each domain"""'], {}), "('Getting probability of each domain')\n", (3836, 3874), False, 'import logging\n'), ((4532, 4574), 'logging.debug', 'logging.debug', (['"""Getting DDI probabilities"""'], {}), "('Getting DDI probabilities')\n", (4545, 4574), False, 'import logging\n'), ((5460, 5505), 'logging.debug', 'logging.debug', (['"""Calculating direction scores"""'], {}), "('Calculating direction scores')\n", (5473, 5505), False, 'import logging\n'), ((6284, 6317), 'sqlite3.connect', 'sqlite3.connect', (['"""SLK3_layers.db"""'], {}), "('SLK3_layers.db')\n", (6299, 6317), False, 'import sqlite3, csv, os\n'), ((2474, 2507), 'logging.debug', 'logging.debug', (['"""Getting PPI data"""'], {}), "('Getting PPI data')\n", (2487, 2507), False, 'import logging\n')]
|
import json
import os.path
import sys
from exceptions import *
from create_folder_structure import create_folder_structure
def main():
try:
if len(sys.argv) != 3:
raise InvalidArgumentCount
if not os.path.exists(sys.argv[2]):
raise InvalidFilePath
if not os.path.exists(sys.argv[1]):
raise InvalidFolderPath
try:
json_object = json.load(open(sys.argv[2]))
except ValueError:
raise InvalidJsonFile
output_folder = sys.argv[1]
create_folder_structure(output_folder, json_object)
except InvalidArgumentCount:
print("""
Invalid number of arguments
Please make sure to use quotes for outputFolder and jsonFile if path includes spaces
Valid paths may be:
"file.json"
"./file.json"
"folder/file.json"
"./folder/file.json"
"absolute/path/to/file.json"
Usage:
main.py "<outputFolder>" "<jsonFile>"
""")
except InvalidFolderPath:
print("""
Output folder does not exist
""")
except InvalidFilePath:
print("""
Input json file does not exist
""")
except InvalidJsonFile:
print("""
Input json file is invalid
""")
main()
|
[
"create_folder_structure.create_folder_structure"
] |
[((547, 598), 'create_folder_structure.create_folder_structure', 'create_folder_structure', (['output_folder', 'json_object'], {}), '(output_folder, json_object)\n', (570, 598), False, 'from create_folder_structure import create_folder_structure\n')]
|
from flask import request
from google.auth.transport import requests
import google.oauth2.id_token
from server.ApplikationsAdministration import ApplikationsAdministration
#Benutzer.py, BenutzerMapper + BenutzerMethoden in ApplikationsAdministration
def secured(function):
"""Decorator zur Google Firebase-basierten Authentifizierung von Benutzern
Da es sich bei diesem System um eine basale Fallstudie zu Lehrzwecken handelt, wurde hier
bewusst auf ein ausgefeiltes Berechtigungskonzept verzichtet. Vielmehr soll dieses Decorator
einen Weg aufzeigen, wie man technisch mit vertretbarem Aufwand in eine Authentifizierung
einsteigen kann.
POLICY: Die hier demonstrierte Policy ist, dass jeder, der einen durch Firebase akzeptierten
Account besitzt, sich an diesem System anmelden kann. Bei jeder Anmeldung werden Klarname,
Mail-Adresse sowie die Google User ID in unserem System gespeichert bzw. geupdated. Auf diese
Weise könnte dann für eine Erweiterung des Systems auf jene Daten zurückgegriffen werden.
"""
firebase_request_adapter = requests.Request()
def wrapper(*args, **kwargs):
# Verify Firebase auth.
id_token = request.cookies.get("token")
error_message = None
claims = None
objects = None
if id_token:
try:
# Verify the token against the Firebase Auth API. This example
# verifies the token on each page load. For improved performance,
# some applications may wish to cache results in an encrypted
# session store (see for instance
# http://flask.pocoo.org/docs/1.0/quickstart/#sessions).
claims = google.oauth2.id_token.verify_firebase_token(
id_token, firebase_request_adapter)
if claims is not None:
adm = ApplikationsAdministration()
google_user_id = claims.get("user_id")
email = claims.get("email")
name = claims.get("name")
user = adm.get_user_by_google_user_id(google_user_id)
# Benennen wie in ApplikationsAdministration
if user is not None:
"""Fall: Der Benutzer ist unserem System bereits bekannt.
Wir gehen davon aus, dass die google_user_id sich nicht ändert.
Wohl aber können sich der zugehörige Klarname (name) und die
E-Mail-Adresse ändern. Daher werden diese beiden Daten sicherheitshalber
in unserem System geupdated."""
user.set_name(name)
user.set_email(email)
adm.update_benutzer(user)
#set_name und set_email benennen wie in Benutzer.py
#adm.save-user benennen wie in ApplikationsAdministration.py
else:
"""Fall: Der Benutzer war bislang noch nicht eingelogged.
Wir legen daher ein neues User-Objekt an, um dieses ggf. später
nutzen zu können.
"""
user = adm.benutzer_anlegen(name, email, google_user_id)
#Benennen wie in ApplikationsAdministration
print(request.method, request.path, "angefragt durch:", name, email)
objects = function(*args, **kwargs)
return objects
else:
return '', 401 # UNAUTHORIZED !!!
except ValueError as exc:
# This will be raised if the token is expired or any other
# verification checks fail.
error_message = str(exc)
return exc, 401 # UNAUTHORIZED !!!
return '', 401 # UNAUTHORIZED !!!
return wrapper
|
[
"flask.request.cookies.get",
"server.ApplikationsAdministration.ApplikationsAdministration",
"google.auth.transport.requests.Request"
] |
[((1086, 1104), 'google.auth.transport.requests.Request', 'requests.Request', ([], {}), '()\n', (1102, 1104), False, 'from google.auth.transport import requests\n'), ((1191, 1219), 'flask.request.cookies.get', 'request.cookies.get', (['"""token"""'], {}), "('token')\n", (1210, 1219), False, 'from flask import request\n'), ((1888, 1916), 'server.ApplikationsAdministration.ApplikationsAdministration', 'ApplikationsAdministration', ([], {}), '()\n', (1914, 1916), False, 'from server.ApplikationsAdministration import ApplikationsAdministration\n')]
|
import FWCore.ParameterSet.Config as cms
from RecoEgamma.ElectronIdentification.Identification.mvaElectronID_tools import *
# Documentation of the MVA
# https://twiki.cern.ch/twiki/bin/viewauth/CMS/MultivariateElectronIdentificationRun2
# https://rembserj.web.cern.ch/rembserj/notes/Electron_MVA_ID_2017_documentation
#
# In this file we define the locations of the MVA weights, cuts on the MVA values
# for specific working points, and configure those cuts in VID
#
# The tag is an extra string attached to the names of the products
# such as ValueMaps that needs to distinguish cases when the same MVA estimator
# class is used with different tuning/weights
mvaTag = "Fall17NoIsoV1"
# There are 6 categories in this MVA. They have to be configured in this strict order
# (cuts and weight files order):
# 0 EB1 (eta<0.8) pt 5-10 GeV | pt < ptSplit && |eta| < ebSplit
# 1 EB2 (eta>=0.8) pt 5-10 GeV | pt < ptSplit && |eta| >= ebSplit && |eta| < ebeeSplit
# 2 EE pt 5-10 GeV | pt < ptSplit && |eta| >= ebeeSplit
# 3 EB1 (eta<0.8) pt 10-inf GeV | pt >= ptSplit && |eta| < ebSplit
# 4 EB2 (eta>=0.8) pt 10-inf GeV | pt >= ptSplit && |eta| >= ebSplit && |eta| < ebeeSplit
# 5 EE pt 10-inf GeV | pt >= ptSplit && |eta| >= ebeeSplit
mvaFall17WeightFiles_V1 = cms.vstring(
"RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB1_5_2017_puinfo_BDT.weights.xml.gz",
"RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB2_5_2017_puinfo_BDT.weights.xml.gz",
"RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EE_5_2017_puinfo_BDT.weights.xml.gz",
"RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB1_10_2017_puinfo_BDT.weights.xml.gz",
"RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB2_10_2017_puinfo_BDT.weights.xml.gz",
"RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EE_10_2017_puinfo_BDT.weights.xml.gz"
)
## The working point for this MVA that is expected to have about 90% signal
# WP tuned to give about 90 and 80% signal efficiecny for electrons from Drell-Yan with pT > 25 GeV
# The working point for the low pt categories is just taken over from the high pt
idName90 = "mvaEleID-Fall17-noIso-V1-wp90"
MVA_WP90 = EleMVA_WP(
idName = idName90, mvaTag = mvaTag,
cutCategory0 = "0.9165112826974601 - exp(-pt / 2.7381703555094217) * 1.03549199648109", # EB1 low pt
cutCategory1 = "0.8655738322220173 - exp(-pt / 2.4027944652597073) * 0.7975615613282494", # EB2 low pt
cutCategory2 = "-3016.035055227131 - exp(-pt / -52140.61856333602) * -3016.3029387236506", # EE low pt
cutCategory3 = "0.9616542816132922 - exp(-pt / 8.757943837889817) * 3.1390200321591206", # EB1
cutCategory4 = "0.9319258011430132 - exp(-pt / 8.846057432565809) * 3.5985063793347787", # EB2
cutCategory5 = "0.8899260780999244 - exp(-pt / 10.124234115859881) * 4.352791250718547", # EE
)
idName80 = "mvaEleID-Fall17-noIso-V1-wp80"
MVA_WP80 = EleMVA_WP(
idName = idName80, mvaTag = mvaTag,
cutCategory0 = "0.9530240956555949 - exp(-pt / 2.7591425841003647) * 0.4669644718545271", # EB1 low pt
cutCategory1 = "0.9336564763961019 - exp(-pt / 2.709276284272272) * 0.33512286599215946", # EB2 low pt
cutCategory2 = "0.9313133688365339 - exp(-pt / 1.5821934800715558) * 3.8889462619659265", # EE low pt
cutCategory3 = "0.9825268564943458 - exp(-pt / 8.702601455860762) * 1.1974861596609097", # EB1
cutCategory4 = "0.9727509457929913 - exp(-pt / 8.179525631018565) * 1.7111755094657688", # EB2
cutCategory5 = "0.9562619539540145 - exp(-pt / 8.109845366281608) * 3.013927699126942", # EE
)
### WP tuned for HZZ analysis with very high efficiency (about 98%)
# The working points were found by requiring the same signal efficiencies in
# each category as for the Spring 16 HZZ ID
# (see RecoEgamma/ElectronIdentification/python/Identification/mvaElectronID_Spring16_HZZ_V1_cff.py)
idNamewpLoose = "mvaEleID-Fall17-noIso-V1-wpLoose"
MVA_WPLoose = EleMVA_WP(
idName = idNamewpLoose, mvaTag = mvaTag,
cutCategory0 = "-0.13285867293779202", # EB1 low pt
cutCategory1 = "-0.31765300958836074", # EB2 low pt
cutCategory2 = "-0.0799205914718861" , # EE low pt
cutCategory3 = "-0.856871961305474" , # EB1
cutCategory4 = "-0.8107642141584835" , # EB2
cutCategory5 = "-0.7179265933023059" # EE
)
#
# Finally, set up VID configuration for all cuts
#
# Create the PSet that will be fed to the MVA value map producer
mvaEleID_Fall17_noIso_V1_producer_config = cms.PSet(
mvaName = cms.string(mvaClassName),
mvaTag = cms.string(mvaTag),
# Category parameters
nCategories = cms.int32(6),
categoryCuts = cms.vstring(*EleMVA_6CategoriesCuts),
# Weight files and variable definitions
weightFileNames = mvaFall17WeightFiles_V1,
variableDefinition = cms.string("RecoEgamma/ElectronIdentification/data/ElectronMVAEstimatorRun2Fall17V1Variables.txt")
)
# Create the VPset's for VID cuts
mvaEleID_Fall17_V1_wpLoose = configureVIDMVAEleID( MVA_WPLoose )
mvaEleID_Fall17_V1_wp90 = configureVIDMVAEleID( MVA_WP90 )
mvaEleID_Fall17_V1_wp80 = configureVIDMVAEleID( MVA_WP80 )
mvaEleID_Fall17_V1_wpLoose.isPOGApproved = cms.untracked.bool(True)
mvaEleID_Fall17_V1_wp90.isPOGApproved = cms.untracked.bool(True)
mvaEleID_Fall17_V1_wp80.isPOGApproved = cms.untracked.bool(True)
|
[
"FWCore.ParameterSet.Config.vstring",
"FWCore.ParameterSet.Config.int32",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.string"
] |
[((1339, 1964), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB1_5_2017_puinfo_BDT.weights.xml.gz"""', '"""RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB2_5_2017_puinfo_BDT.weights.xml.gz"""', '"""RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EE_5_2017_puinfo_BDT.weights.xml.gz"""', '"""RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB1_10_2017_puinfo_BDT.weights.xml.gz"""', '"""RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB2_10_2017_puinfo_BDT.weights.xml.gz"""', '"""RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EE_10_2017_puinfo_BDT.weights.xml.gz"""'], {}), "(\n 'RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB1_5_2017_puinfo_BDT.weights.xml.gz'\n ,\n 'RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB2_5_2017_puinfo_BDT.weights.xml.gz'\n ,\n 'RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EE_5_2017_puinfo_BDT.weights.xml.gz'\n ,\n 'RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB1_10_2017_puinfo_BDT.weights.xml.gz'\n ,\n 'RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EB2_10_2017_puinfo_BDT.weights.xml.gz'\n ,\n 'RecoEgamma/ElectronIdentification/data/Fall17/EIDmva_EE_10_2017_puinfo_BDT.weights.xml.gz'\n )\n", (1350, 1964), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5293, 5317), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (5311, 5317), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5358, 5382), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (5376, 5382), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5423, 5447), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (5441, 5447), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4604, 4628), 'FWCore.ParameterSet.Config.string', 'cms.string', (['mvaClassName'], {}), '(mvaClassName)\n', (4614, 4628), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4656, 4674), 'FWCore.ParameterSet.Config.string', 'cms.string', (['mvaTag'], {}), '(mvaTag)\n', (4666, 4674), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4728, 4740), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(6)'], {}), '(6)\n', (4737, 4740), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4768, 4804), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['*EleMVA_6CategoriesCuts'], {}), '(*EleMVA_6CategoriesCuts)\n', (4779, 4804), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4927, 5035), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""RecoEgamma/ElectronIdentification/data/ElectronMVAEstimatorRun2Fall17V1Variables.txt"""'], {}), "(\n 'RecoEgamma/ElectronIdentification/data/ElectronMVAEstimatorRun2Fall17V1Variables.txt'\n )\n", (4937, 5035), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
from typing import Union, List
import pexpect
from figcli.utils.utils import Utils
import sys
class FiggyAction:
"""
Actions prevent cyclic dependencies, and are designed for leveraging FiggyCli for cleanup steps when running inside
of tests.
"""
def __init__(self, command, extra_args=""):
self.c = Utils.default_colors()
self.command = command
self.extra_args = extra_args
self._child = self.spawn(command)
print(f"{self.c.fg_yl}Executing action: {self._child.args}{self.c.rs}")
self._child.logfile = sys.stdout
self._child.delaybeforesend = .5
def spawn(self, command: str):
return pexpect.spawn(command, timeout=10, encoding='utf-8')
def expect_multiple(self, regexes: List[str]):
print(f'Expecting: {regexes}')
return self._child.expect(regexes)
def expect(self, regex: Union[List[str], str], retry=True):
print(f'Expecting: {regex}')
expect_list = [regex] + [pexpect.TIMEOUT] if isinstance(regex, str) else regex + [pexpect.TIMEOUT]
result = self._child.expect(expect_list)
if result == len(expect_list) - 1 and retry:
self.alert(f"EXPECT FAILED: {regex} initiating retry!")
self._child = self.spawn(self.command)
return self.expect(regex, retry=False)
else:
return result
def sendline(self, line: str):
print(f'Sending: {line}')
self._child.sendline(line)
def wait(self):
self._child.wait()
def alert(self, msg: str):
print(f"{self.c.fg_yl}-----------------------------------------{self.c.rs}")
print(f"{self.c.fg_rd} ALERT: {msg}{self.c.rs}")
print(f"{self.c.fg_yl}-----------------------------------------{self.c.rs}")
|
[
"pexpect.spawn",
"figcli.utils.utils.Utils.default_colors"
] |
[((333, 355), 'figcli.utils.utils.Utils.default_colors', 'Utils.default_colors', ([], {}), '()\n', (353, 355), False, 'from figcli.utils.utils import Utils\n'), ((679, 731), 'pexpect.spawn', 'pexpect.spawn', (['command'], {'timeout': '(10)', 'encoding': '"""utf-8"""'}), "(command, timeout=10, encoding='utf-8')\n", (692, 731), False, 'import pexpect\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: <NAME>.
@mail: <EMAIL>
"""
# from qc.__version__ import __version__
import georinex as gr
import numpy as np
from matplotlib.pyplot import figure, show
import matplotlib.pyplot as plt
obs = gr.load(
'tests/test_data/Rinex3/KLSQ00GRL_R_20213070000_01D_15S_MO.rnx',
# tlim=['2021-11-03T12:00', '2021-11-03T12:30'])
tlim=['2021-11-03T05:30', '2021-11-03T07:30'])
# tlim=['2021-11-03T15:00', '2021-11-03T18:00'])
# hdr = gr.rinexheader(
# 'tests/test_data/Rinex3/KLSQ00GRL_R_20213070000_01D_15S_MO.rnx')
# rnx_version = 3
# %% Starting test
# Copying helper functions from Multipath class - later on, it could be turned
# into a separate class with helper functions
# Pick GPS satellites
svG = []
for i in range(0, len(obs.sv)):
if str(obs.sv[i].values)[0] == 'G':
svG.append(str(obs.sv[i].values))
else:
continue
# %%
# 5:30 to 7:30, G08 and G21 give 2 cycle slips # [290:300]
# 'G01','G06','G08','G10','G12','G14','G17','G19','G21','G22','G24','G30','G32'
sat = 'G21'
sattest = obs.sel(sv=sat).dropna(dim='time', how='all')
# G02 data vars with no-nan: C1C, D1C, L1C, S1C, C1W, C2W, D2W, L2W, S1W, S2W
I_max = 0.4 # Maximal ionospheric delay [m/h]
k = 4 # criterion factor
L1 = sattest['L1C'] # GPS
L2 = sattest['L2W'] # GPS
# L1 = sattest['L1C'] # Galileo
# L2 = sattest['L8Q'] # Galileo
L4 = np.abs(L1 - L2)
sigma_L4 = np.std(L4)
criterion = k*sigma_L4 + I_max
slips_nr = 0
L4_diff = []
for i in range(1, len(L4)):
L4_diff.append(np.abs(L4[i] - L4[i-1]))
if (np.abs(L4[i] - L4[i-1]) > criterion):
# If satisfied, raise cycle-slip flag
slips_nr = slips_nr + 1
ax = figure(figsize=(10, 6)).gca()
ax.plot(L2.time[1:], L4_diff, label=sat)
plt.axhline(y=criterion, label='Slip limit', linestyle='-', color='r')
ax.grid()
ax.legend()
plt.xlabel('Time [epochs]')
plt.ylabel('L4')
plt.title('Single-frequency Melbourne-Wuebbena')
show()
print('Slips:', slips_nr, ', Slip criterion:', criterion.values)
# %%
# Plot all loaded sats, L1 and L2
ax = figure(figsize=(10, 6)).gca()
for i in range(0, len(svG)):
test = obs.sel(sv=svG[i]).dropna(dim='time', how='all')
L1test = test['L1C']
L2test = test['L2W']
ax.plot(L1test.time, L1test, label=svG[i], linewidth=2.0)
#ax.plot(L2test.time, L2test, label='L2', linewidth=0.5)
ax.grid()
ax.legend()
plt.xlabel('Time [epochs]')
plt.ylabel('Carrier phases')
show()
# %%
# Plot separate sats, L1 and L2
ax = figure(figsize=(10, 6)).gca()
test = obs.sel(sv='E21').dropna(dim='time', how='all')
L1test = test['L1C']
L2test = test['L2W']
ax.plot(L1test.time, L1test, label='L1', linewidth=2.0)
ax.plot(L2test.time, L2test, label='L2', linewidth=1.0)
ax.grid()
# ax.legend()
plt.xlabel('Time [epochs]')
plt.ylabel('Carrier phases')
show()
# %% Dual-frequency Melbourne-Wuebbena testing
# 'G01','G06','G08','G10','G12','G14','G17','G19','G21','G22','G24','G30','G32'
sat = 'G21'
sattest = obs.sel(sv=sat).dropna(dim='time', how='all')
# G02 data vars with no-nan: C1C, D1C, L1C, S1C, C1W, C2W, D2W, L2W, S1W, S2W
freq = [1575.42, 1227.60, 1176.45] # L1, L2, L5 for GPS
f1 = freq[0]*1e6
f2 = freq[1]*1e6
P1 = sattest['C1C']
P2 = sattest['C2W']
L1 = sattest['L1C'] # GPS
L2 = sattest['L2W'] # GPS
# L1 = sattest['L1C'] # Galileo
# L2 = sattest['L8Q'] # Galileo
L6 = (1/(f1-f2))*(f1*L1 - f2*L2) - (1/(f1+f2))*(f1*P1 + f2*P2)
sigma_L6 = np.std(L6)
k = 4 # criterion factor
criterion = k*sigma_L6
slips_nr = 0
L6_diff = []
for i in range(1, len(L6)):
L6_diff.append(np.abs(L6[i] - L6[i-1]))
if (np.abs(L6[i] - L6[i-1]) > criterion):
# If satisfied, raise cycle-slip flag
slips_nr = slips_nr + 1
ax = figure(figsize=(10, 6)).gca()
ax.plot(L2.time[1:], L6_diff, label=sat)
plt.axhline(y=criterion, label='Slip limit', linestyle='-', color='r')
ax.grid()
ax.legend()
plt.xlabel('Time [epochs]')
plt.ylabel('L6')
plt.title('Dual-frequency Melbourne-Wuebbena')
show()
print('Slips:', slips_nr, ', Slip criterion:', criterion.values)
# %% Work in Progress
class Slips:
"""
Class for cycle slip detection of RINEX files.
Provides options for different detection algorithms.
Parameters
----------
L1 : TYPE
DESCRIPTION.
Returns
-------
L4 : TYPE
DESCRIPTION.
"""
def __init__(self):
pass
def slips_MW_single_freq(self, obs):
"""
Cycle slip detection algorithm 1.
Based on Melbourne-Wuebbena,
but only on carrier phase data (single-frequency)
(from Vaclavovic-Dousa 2016 article)
Parameters
----------
obs : TYPE
DESCRIPTION.
Returns
-------
None.
"""
# Select a list of GPS satellites
svG = []
for i in range(0, len(obs.sv)):
if str(obs.sv[i].values)[0] == 'G':
svG.append(str(obs.sv[i].values))
else:
continue
# Melbourne-Wuebbena parameters (predetermined)
I_max = 0.4 # Maximal ionospheric delay [m/h]
k = 4 # criterion factor
# For each tracked satellite
for i in range(0, len(svG)):
current_sat = obs.sel(sv=svG[i]).dropna(dim='time', how='all')
L1 = current_sat['L1C']
L2 = current_sat['L2W']
L4 = np.abs(L1 - L2)
sigma_L4 = np.std(L4)
criterion = k*sigma_L4 + I_max
slips_nr = 0
L4_diff = []
for j in range(1, len(L4)):
L4_diff.append(np.abs(L4[j] - L4[j-1]))
if (np.abs(L4[j] - L4[j-1]) > criterion):
# If satisfied, raise cycle-slip flag
slips_nr = slips_nr + 1
print('Sat:', svG[i],
', Slips:', slips_nr,
', Slip criterion:', criterion.values)
def plot_slips(self, obs, sat_nr: str):
"""
Plot cycle slips for one satellite vehicle.
Parameters
----------
obs : TYPE
DESCRIPTION.
sat_nr : str
DESCRIPTION.
Returns
-------
None.
"""
sat = obs.sel(sv=sat_nr).dropna(dim='time', how='all')
I_max = 0.4 # Maximal ionospheric delay [m/h]
k = 4 # criterion factor
L1 = sat['L1C']
L2 = sat['L2W']
L4 = np.abs(L1 - L2)
sigma_L4 = np.std(L4)
criterion = k*sigma_L4 + I_max
slips_nr = 0
L4_diff = []
for i in range(1, len(L4)):
L4_diff.append(np.abs(L4[i] - L4[i-1]))
if (np.abs(L4[i] - L4[i-1]) > criterion):
# If satisfied, raise cycle-slip flag
slips_nr = slips_nr + 1
ax = figure(figsize=(10, 6)).gca()
ax.plot(L2.time[1:], L4_diff, label=sat_nr, linewidth=1.0)
# labelfull = 'Slip limit: ', criterion.values
plt.axhline(y=criterion, label='Slip limit', linestyle='-', color='r')
ax.grid()
ax.legend()
plt.xlabel('Time [epochs]')
plt.ylabel('L4')
show()
print('Sat:', sat_nr,
', Slips:', slips_nr,
', Slip criterion:', criterion.values)
# %% Testing first algorithm
sliptest = Slips().slips_MW_single_freq(obs)
# %% Testing plot function
sliptest = Slips().plot_slips(obs, 'G08')
|
[
"numpy.abs",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"georinex.load",
"matplotlib.pyplot.axhline",
"matplotlib.pyplot.figure",
"numpy.std",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show"
] |
[((254, 377), 'georinex.load', 'gr.load', (['"""tests/test_data/Rinex3/KLSQ00GRL_R_20213070000_01D_15S_MO.rnx"""'], {'tlim': "['2021-11-03T05:30', '2021-11-03T07:30']"}), "('tests/test_data/Rinex3/KLSQ00GRL_R_20213070000_01D_15S_MO.rnx',\n tlim=['2021-11-03T05:30', '2021-11-03T07:30'])\n", (261, 377), True, 'import georinex as gr\n'), ((1410, 1425), 'numpy.abs', 'np.abs', (['(L1 - L2)'], {}), '(L1 - L2)\n', (1416, 1425), True, 'import numpy as np\n'), ((1438, 1448), 'numpy.std', 'np.std', (['L4'], {}), '(L4)\n', (1444, 1448), True, 'import numpy as np\n'), ((1781, 1851), 'matplotlib.pyplot.axhline', 'plt.axhline', ([], {'y': 'criterion', 'label': '"""Slip limit"""', 'linestyle': '"""-"""', 'color': '"""r"""'}), "(y=criterion, label='Slip limit', linestyle='-', color='r')\n", (1792, 1851), True, 'import matplotlib.pyplot as plt\n'), ((1874, 1901), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [epochs]"""'], {}), "('Time [epochs]')\n", (1884, 1901), True, 'import matplotlib.pyplot as plt\n'), ((1902, 1918), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""L4"""'], {}), "('L4')\n", (1912, 1918), True, 'import matplotlib.pyplot as plt\n'), ((1919, 1967), 'matplotlib.pyplot.title', 'plt.title', (['"""Single-frequency Melbourne-Wuebbena"""'], {}), "('Single-frequency Melbourne-Wuebbena')\n", (1928, 1967), True, 'import matplotlib.pyplot as plt\n'), ((1968, 1974), 'matplotlib.pyplot.show', 'show', ([], {}), '()\n', (1972, 1974), False, 'from matplotlib.pyplot import figure, show\n'), ((2402, 2429), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [epochs]"""'], {}), "('Time [epochs]')\n", (2412, 2429), True, 'import matplotlib.pyplot as plt\n'), ((2430, 2458), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Carrier phases"""'], {}), "('Carrier phases')\n", (2440, 2458), True, 'import matplotlib.pyplot as plt\n'), ((2459, 2465), 'matplotlib.pyplot.show', 'show', ([], {}), '()\n', (2463, 2465), False, 'from matplotlib.pyplot import figure, show\n'), ((2772, 2799), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [epochs]"""'], {}), "('Time [epochs]')\n", (2782, 2799), True, 'import matplotlib.pyplot as plt\n'), ((2800, 2828), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Carrier phases"""'], {}), "('Carrier phases')\n", (2810, 2828), True, 'import matplotlib.pyplot as plt\n'), ((2829, 2835), 'matplotlib.pyplot.show', 'show', ([], {}), '()\n', (2833, 2835), False, 'from matplotlib.pyplot import figure, show\n'), ((3441, 3451), 'numpy.std', 'np.std', (['L6'], {}), '(L6)\n', (3447, 3451), True, 'import numpy as np\n'), ((3802, 3872), 'matplotlib.pyplot.axhline', 'plt.axhline', ([], {'y': 'criterion', 'label': '"""Slip limit"""', 'linestyle': '"""-"""', 'color': '"""r"""'}), "(y=criterion, label='Slip limit', linestyle='-', color='r')\n", (3813, 3872), True, 'import matplotlib.pyplot as plt\n'), ((3895, 3922), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [epochs]"""'], {}), "('Time [epochs]')\n", (3905, 3922), True, 'import matplotlib.pyplot as plt\n'), ((3923, 3939), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""L6"""'], {}), "('L6')\n", (3933, 3939), True, 'import matplotlib.pyplot as plt\n'), ((3940, 3986), 'matplotlib.pyplot.title', 'plt.title', (['"""Dual-frequency Melbourne-Wuebbena"""'], {}), "('Dual-frequency Melbourne-Wuebbena')\n", (3949, 3986), True, 'import matplotlib.pyplot as plt\n'), ((3987, 3993), 'matplotlib.pyplot.show', 'show', ([], {}), '()\n', (3991, 3993), False, 'from matplotlib.pyplot import figure, show\n'), ((1555, 1580), 'numpy.abs', 'np.abs', (['(L4[i] - L4[i - 1])'], {}), '(L4[i] - L4[i - 1])\n', (1561, 1580), True, 'import numpy as np\n'), ((1588, 1613), 'numpy.abs', 'np.abs', (['(L4[i] - L4[i - 1])'], {}), '(L4[i] - L4[i - 1])\n', (1594, 1613), True, 'import numpy as np\n'), ((1710, 1733), 'matplotlib.pyplot.figure', 'figure', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (1716, 1733), False, 'from matplotlib.pyplot import figure, show\n'), ((2088, 2111), 'matplotlib.pyplot.figure', 'figure', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (2094, 2111), False, 'from matplotlib.pyplot import figure, show\n'), ((2509, 2532), 'matplotlib.pyplot.figure', 'figure', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (2515, 2532), False, 'from matplotlib.pyplot import figure, show\n'), ((3576, 3601), 'numpy.abs', 'np.abs', (['(L6[i] - L6[i - 1])'], {}), '(L6[i] - L6[i - 1])\n', (3582, 3601), True, 'import numpy as np\n'), ((3609, 3634), 'numpy.abs', 'np.abs', (['(L6[i] - L6[i - 1])'], {}), '(L6[i] - L6[i - 1])\n', (3615, 3634), True, 'import numpy as np\n'), ((3731, 3754), 'matplotlib.pyplot.figure', 'figure', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (3737, 3754), False, 'from matplotlib.pyplot import figure, show\n'), ((6444, 6459), 'numpy.abs', 'np.abs', (['(L1 - L2)'], {}), '(L1 - L2)\n', (6450, 6459), True, 'import numpy as np\n'), ((6480, 6490), 'numpy.std', 'np.std', (['L4'], {}), '(L4)\n', (6486, 6490), True, 'import numpy as np\n'), ((6984, 7054), 'matplotlib.pyplot.axhline', 'plt.axhline', ([], {'y': 'criterion', 'label': '"""Slip limit"""', 'linestyle': '"""-"""', 'color': '"""r"""'}), "(y=criterion, label='Slip limit', linestyle='-', color='r')\n", (6995, 7054), True, 'import matplotlib.pyplot as plt\n'), ((7101, 7128), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [epochs]"""'], {}), "('Time [epochs]')\n", (7111, 7128), True, 'import matplotlib.pyplot as plt\n'), ((7137, 7153), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""L4"""'], {}), "('L4')\n", (7147, 7153), True, 'import matplotlib.pyplot as plt\n'), ((7162, 7168), 'matplotlib.pyplot.show', 'show', ([], {}), '()\n', (7166, 7168), False, 'from matplotlib.pyplot import figure, show\n'), ((5396, 5411), 'numpy.abs', 'np.abs', (['(L1 - L2)'], {}), '(L1 - L2)\n', (5402, 5411), True, 'import numpy as np\n'), ((5436, 5446), 'numpy.std', 'np.std', (['L4'], {}), '(L4)\n', (5442, 5446), True, 'import numpy as np\n'), ((6637, 6662), 'numpy.abs', 'np.abs', (['(L4[i] - L4[i - 1])'], {}), '(L4[i] - L4[i - 1])\n', (6643, 6662), True, 'import numpy as np\n'), ((6678, 6703), 'numpy.abs', 'np.abs', (['(L4[i] - L4[i - 1])'], {}), '(L4[i] - L4[i - 1])\n', (6684, 6703), True, 'import numpy as np\n'), ((6824, 6847), 'matplotlib.pyplot.figure', 'figure', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (6830, 6847), False, 'from matplotlib.pyplot import figure, show\n'), ((5613, 5638), 'numpy.abs', 'np.abs', (['(L4[j] - L4[j - 1])'], {}), '(L4[j] - L4[j - 1])\n', (5619, 5638), True, 'import numpy as np\n'), ((5658, 5683), 'numpy.abs', 'np.abs', (['(L4[j] - L4[j - 1])'], {}), '(L4[j] - L4[j - 1])\n', (5664, 5683), True, 'import numpy as np\n')]
|
from collections import OrderedDict
from datetime import datetime, timezone
import unittest
from os.path import join
from tinydb import TinyDB, storages
from goldfinchsong import utils
IMAGE_NAMES = ['goldfinch1.jpg', 'goldfinch2.jpg', 'goldfinch3.jpg',
'goldfinch4.jpg', 'goldfinch5.jpg']
TEST_TEXT1 = 'This is a test of the goldfinchsong project. This test checks ' \
'abbreviations, vowel elision, length checking, and other logic. ' \
'Tests are important!'
TEST_TEXT2 = 'This is a test of the goldfinchsong project. Tests ' \
'abbreviations, vowel elision, length checking, and other logic. ' \
'Tests are important!'
class LoadContentTests(unittest.TestCase):
def test_basic_load(self):
image_directory = 'tests/images/'
db = TinyDB(storage=storages.MemoryStorage)
content = utils.load_content(db, image_directory)
full_image_path = content[0]
image_file = full_image_path.replace(image_directory, '')
status_text = content[1]
self.assertTrue(image_file in IMAGE_NAMES)
self.assertEqual(image_file.replace('.jpg', ''), status_text)
def test_storage_in_db(self):
image_directory = 'tests/images/'
# let's load a list of tweets into the db
db = TinyDB(storage=storages.MemoryStorage)
image_names = [
'goldfinch1.jpg',
'goldfinch2.jpg',
'goldfinch3.jpg',
'goldfinch4.jpg'
]
for image_name in image_names:
delivery_timestamp = datetime.now(tz=timezone.utc).isoformat()
tweet = {'image': image_name, 'delivered_on': delivery_timestamp}
db.insert(tweet)
content = utils.load_content(db, image_directory)
self.assertEqual(content[2], 'goldfinch5.jpg')
tweets = db.all()
self.assertEqual(len(tweets), 4, msg=tweets)
class UtilitiesTests(unittest.TestCase):
def test_apply_abbreviations(self):
text_conversions = {
'abbreviations': 'abbr',
'goldfinchsong': 'gf',
'important': 'impt'
}
# exhausts all conversions before reaching limit
new_text1 = utils.apply_abbreviations(TEST_TEXT1, text_conversions)
expected_text1 = 'This is a test of the gf project. This test checks ' \
'abbr, vowel elision, length checking, and other logic. ' \
'Tests are impt!'
self.assertEqual(expected_text1, new_text1)
new_text2 = utils.apply_abbreviations(TEST_TEXT2, text_conversions)
self.assertTrue(len(new_text2) <= 117)
def test_apply_vowel_elision(self):
result_text = utils.apply_vowel_elision(TEST_TEXT1)
expected_text = 'This is a tst of the gldfnchsng prjct. Ths tst chcks ' \
'abbrvtns, vwl elsn, lngth chckng, and othr lgc. Tsts ' \
'are imprtnt!'
self.assertEqual(expected_text, result_text)
def test_assemble_elided_status(self):
complete_words = ['test', 'a', 'is', 'This']
elided_words = ['systm', 'gldfnch', 'the', 'of']
result = utils.assemble_elided_status(complete_words, elided_words)
self.assertEqual('This is a test of the gldfnch systm', result)
def test_chop_words(self):
result_text = utils.chop_words(TEST_TEXT1)
expected_text = 'This is a test of the goldfinchsong project. This test checks ' \
'abbreviations, vowel elision, length checking, and'
self.assertEqual(expected_text, result_text)
def test_is_image(self):
image_files = [
'image.gif',
'image.jpg',
'image.jpeg',
'image.png',
'image.GIF',
'image.JPG',
'image.JPEG',
'image.PNG',
'image.GiF',
'image.JpG',
'image.JpEg',
'image.PnG'
]
for image_file in image_files:
self.assertTrue(utils.is_image_file(image_file))
def test_is_not_image(self):
image_files = [
'image.docx',
'image.pdf',
'image.md',
'image.html',
'image.css',
'image.odt',
'image.sh',
'image.xlsx',
'image.txt',
'image.c',
'image.py',
'image'
]
for image_file in image_files:
self.assertFalse(utils.is_image_file(image_file))
def test_trim_file_extensions(self):
image_files = [
'image.gif',
'image.jpg',
'image.jpeg',
'image.png',
'image.GIF',
'image.JPG',
'image.JPEG',
'image.PNG',
'image.GiF',
'image.JpG',
'image.JpEg',
'image.PnG'
]
for image_file in image_files:
self.assertEqual(utils.trim_file_extension(image_file), 'image')
def test_to_compact_text(self):
text_conversions = {
'abbreviations': 'abbrs',
'goldfinchsong': 'gfnch',
'important': 'importnt'
}
candidate_text1 = utils.to_compact_text(TEST_TEXT1, 100, text_conversions)
expected_text1 = 'Ths is a tst of the gfnch prjct. Ths tst chcks abbrs, ' \
'vwl elsn, lngth chckng, and othr lgc. Tsts are'
self.assertEqual(expected_text1, candidate_text1)
candidate_text2 = utils.to_compact_text(TEST_TEXT1, 50, text_conversions)
expected_text2 = 'Ths is a tst of the gfnch prjct. Ths tst chcks'
self.assertEqual(expected_text2, candidate_text2)
candidate_text3 = utils.to_compact_text(TEST_TEXT1, 20, text_conversions)
expected_text3 = 'Ths is a tst of the'
self.assertEqual(expected_text3, candidate_text3)
def test_extract_status_text(self):
conversion_data = (
('abbreviations', 'abbrs'),
('goldfinchsong', 'gfnch'),
('important', 'importnt'),
)
text_conversions = OrderedDict(conversion_data)
file = 'Some_goldfinchsong_image-file_with_a_very_long_set_of_' \
'characters_and_abbreviations_that_conveys_important_info.png'
candidate_text1 = utils.extract_status_text(file, text_conversions, maximum_length=100,)
expected_text1 = 'Some gfnch image-file with a very long set of characters and abbrs that conveys important info'
self.assertEqual(expected_text1, candidate_text1)
candidate_text2 = utils.extract_status_text(file, text_conversions, maximum_length=70,)
expected_text2 = 'Sme gfnch imge-fle wth a vry lng st of chrctrs and abbrs tht cnvys'
self.assertEqual(expected_text2, candidate_text2)
def test_get_unused_files(self):
available_files = list()
for index in range(1,101):
image_name = 'image{0}.png'.format(index)
available_files.append(image_name)
db = TinyDB(storage=storages.MemoryStorage)
for id in range(1,52):
image_name = 'image{0}.png'.format(id)
db.insert({'image': image_name})
unused_files = utils.get_unused_files(db, available_files)
self.assertEqual(len(unused_files), 49)
self.assertEqual(unused_files[0], 'image52.png')
self.assertEqual(unused_files[5], 'image57.png')
self.assertEqual(unused_files[10], 'image62.png')
self.assertEqual(unused_files[15], 'image67.png')
self.assertEqual(unused_files[20], 'image72.png')
self.assertEqual(unused_files[33], 'image85.png')
self.assertEqual(unused_files[48], 'image100.png')
def test_db_purge_when_all_posted(self):
available_files = list()
for index in range(1,101):
image_name = 'image{0}.png'.format(index)
available_files.append(image_name)
db = TinyDB(storage=storages.MemoryStorage)
for id in range(1,106):
image_name = 'image{0}.png'.format(id)
db.insert({'image': image_name})
self.assertEqual(len(db.all()), 105)
unused_files = utils.get_unused_files(db, available_files)
self.assertEqual(len(unused_files), 100)
self.assertEqual(unused_files[0], 'image1.png')
self.assertEqual(unused_files[5], 'image6.png')
self.assertEqual(unused_files[10], 'image11.png')
self.assertEqual(unused_files[33], 'image34.png')
self.assertEqual(unused_files[50], 'image51.png')
|
[
"collections.OrderedDict",
"tinydb.TinyDB",
"goldfinchsong.utils.apply_abbreviations",
"goldfinchsong.utils.is_image_file",
"goldfinchsong.utils.to_compact_text",
"goldfinchsong.utils.load_content",
"goldfinchsong.utils.chop_words",
"goldfinchsong.utils.extract_status_text",
"goldfinchsong.utils.trim_file_extension",
"datetime.datetime.now",
"goldfinchsong.utils.assemble_elided_status",
"goldfinchsong.utils.apply_vowel_elision",
"goldfinchsong.utils.get_unused_files"
] |
[((821, 859), 'tinydb.TinyDB', 'TinyDB', ([], {'storage': 'storages.MemoryStorage'}), '(storage=storages.MemoryStorage)\n', (827, 859), False, 'from tinydb import TinyDB, storages\n'), ((878, 917), 'goldfinchsong.utils.load_content', 'utils.load_content', (['db', 'image_directory'], {}), '(db, image_directory)\n', (896, 917), False, 'from goldfinchsong import utils\n'), ((1315, 1353), 'tinydb.TinyDB', 'TinyDB', ([], {'storage': 'storages.MemoryStorage'}), '(storage=storages.MemoryStorage)\n', (1321, 1353), False, 'from tinydb import TinyDB, storages\n'), ((1746, 1785), 'goldfinchsong.utils.load_content', 'utils.load_content', (['db', 'image_directory'], {}), '(db, image_directory)\n', (1764, 1785), False, 'from goldfinchsong import utils\n'), ((2224, 2279), 'goldfinchsong.utils.apply_abbreviations', 'utils.apply_abbreviations', (['TEST_TEXT1', 'text_conversions'], {}), '(TEST_TEXT1, text_conversions)\n', (2249, 2279), False, 'from goldfinchsong import utils\n'), ((2551, 2606), 'goldfinchsong.utils.apply_abbreviations', 'utils.apply_abbreviations', (['TEST_TEXT2', 'text_conversions'], {}), '(TEST_TEXT2, text_conversions)\n', (2576, 2606), False, 'from goldfinchsong import utils\n'), ((2717, 2754), 'goldfinchsong.utils.apply_vowel_elision', 'utils.apply_vowel_elision', (['TEST_TEXT1'], {}), '(TEST_TEXT1)\n', (2742, 2754), False, 'from goldfinchsong import utils\n'), ((3182, 3240), 'goldfinchsong.utils.assemble_elided_status', 'utils.assemble_elided_status', (['complete_words', 'elided_words'], {}), '(complete_words, elided_words)\n', (3210, 3240), False, 'from goldfinchsong import utils\n'), ((3367, 3395), 'goldfinchsong.utils.chop_words', 'utils.chop_words', (['TEST_TEXT1'], {}), '(TEST_TEXT1)\n', (3383, 3395), False, 'from goldfinchsong import utils\n'), ((5253, 5309), 'goldfinchsong.utils.to_compact_text', 'utils.to_compact_text', (['TEST_TEXT1', '(100)', 'text_conversions'], {}), '(TEST_TEXT1, 100, text_conversions)\n', (5274, 5309), False, 'from goldfinchsong import utils\n'), ((5552, 5607), 'goldfinchsong.utils.to_compact_text', 'utils.to_compact_text', (['TEST_TEXT1', '(50)', 'text_conversions'], {}), '(TEST_TEXT1, 50, text_conversions)\n', (5573, 5607), False, 'from goldfinchsong import utils\n'), ((5766, 5821), 'goldfinchsong.utils.to_compact_text', 'utils.to_compact_text', (['TEST_TEXT1', '(20)', 'text_conversions'], {}), '(TEST_TEXT1, 20, text_conversions)\n', (5787, 5821), False, 'from goldfinchsong import utils\n'), ((6152, 6180), 'collections.OrderedDict', 'OrderedDict', (['conversion_data'], {}), '(conversion_data)\n', (6163, 6180), False, 'from collections import OrderedDict\n'), ((6359, 6428), 'goldfinchsong.utils.extract_status_text', 'utils.extract_status_text', (['file', 'text_conversions'], {'maximum_length': '(100)'}), '(file, text_conversions, maximum_length=100)\n', (6384, 6428), False, 'from goldfinchsong import utils\n'), ((6636, 6704), 'goldfinchsong.utils.extract_status_text', 'utils.extract_status_text', (['file', 'text_conversions'], {'maximum_length': '(70)'}), '(file, text_conversions, maximum_length=70)\n', (6661, 6704), False, 'from goldfinchsong import utils\n'), ((7078, 7116), 'tinydb.TinyDB', 'TinyDB', ([], {'storage': 'storages.MemoryStorage'}), '(storage=storages.MemoryStorage)\n', (7084, 7116), False, 'from tinydb import TinyDB, storages\n'), ((7267, 7310), 'goldfinchsong.utils.get_unused_files', 'utils.get_unused_files', (['db', 'available_files'], {}), '(db, available_files)\n', (7289, 7310), False, 'from goldfinchsong import utils\n'), ((7992, 8030), 'tinydb.TinyDB', 'TinyDB', ([], {'storage': 'storages.MemoryStorage'}), '(storage=storages.MemoryStorage)\n', (7998, 8030), False, 'from tinydb import TinyDB, storages\n'), ((8227, 8270), 'goldfinchsong.utils.get_unused_files', 'utils.get_unused_files', (['db', 'available_files'], {}), '(db, available_files)\n', (8249, 8270), False, 'from goldfinchsong import utils\n'), ((4050, 4081), 'goldfinchsong.utils.is_image_file', 'utils.is_image_file', (['image_file'], {}), '(image_file)\n', (4069, 4081), False, 'from goldfinchsong import utils\n'), ((4512, 4543), 'goldfinchsong.utils.is_image_file', 'utils.is_image_file', (['image_file'], {}), '(image_file)\n', (4531, 4543), False, 'from goldfinchsong import utils\n'), ((4991, 5028), 'goldfinchsong.utils.trim_file_extension', 'utils.trim_file_extension', (['image_file'], {}), '(image_file)\n', (5016, 5028), False, 'from goldfinchsong import utils\n'), ((1579, 1608), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (1591, 1608), False, 'from datetime import datetime, timezone\n')]
|
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Input generator for image data."""
import os
import lingvo.compat as tf
from lingvo.core import base_input_generator
from tensorflow.python.ops import io_ops
class _MnistInputBase(base_input_generator.BaseTinyDatasetInput):
"""Base input params for MNIST."""
@classmethod
def Params(cls):
"""Defaults params."""
p = super().Params()
p.data_dtype = tf.uint8
p.data_shape = (28, 28, 1)
p.label_dtype = tf.uint8
return p
def _Preprocess(self, raw):
data = tf.stack([
tf.image.per_image_standardization(img) for img in tf.unstack(raw)
])
data.set_shape(raw.shape)
return data
class MnistTrainInput(_MnistInputBase):
"""MNist training set."""
@classmethod
def Params(cls):
"""Defaults params."""
p = super().Params()
p.data = 'x_train'
p.label = 'y_train'
p.num_samples = 60000
p.batch_size = 256
p.repeat = True
return p
class MnistTestInput(_MnistInputBase):
"""MNist test set."""
@classmethod
def Params(cls):
"""Defaults params."""
p = super().Params()
p.data = 'x_test'
p.label = 'y_test'
p.num_samples = 10000
p.batch_size = 256
p.repeat = False
return p
def _GetRandomImages(batch_size):
images = tf.random.uniform((batch_size, 28, 28, 1), 0, 255, tf.int32)
return tf.cast(images, tf.uint8)
def _GetRandomLabels(batch_size):
labels = tf.random.categorical(0.1 * tf.ones((1, 10)), batch_size)
return tf.cast(labels, tf.uint8)
def FakeMnistData(tmpdir, train_size=60000, test_size=10000):
"""Fake Mnist data for unit tests."""
data_path = os.path.join(tmpdir, 'ckpt')
with tf.Graph().as_default():
tf.random.set_seed(91)
with tf.Session() as sess:
sess.run(
io_ops.save_v2(
data_path,
tensor_names=['x_train', 'y_train', 'x_test', 'y_test'],
shape_and_slices=['', '', '', ''],
tensors=[
_GetRandomImages(train_size),
_GetRandomLabels(train_size),
_GetRandomImages(test_size),
_GetRandomLabels(test_size)
]))
return data_path
|
[
"lingvo.compat.random.uniform",
"lingvo.compat.image.per_image_standardization",
"lingvo.compat.cast",
"lingvo.compat.ones",
"os.path.join",
"lingvo.compat.unstack",
"lingvo.compat.Session",
"lingvo.compat.random.set_seed",
"lingvo.compat.Graph"
] |
[((1958, 2018), 'lingvo.compat.random.uniform', 'tf.random.uniform', (['(batch_size, 28, 28, 1)', '(0)', '(255)', 'tf.int32'], {}), '((batch_size, 28, 28, 1), 0, 255, tf.int32)\n', (1975, 2018), True, 'import lingvo.compat as tf\n'), ((2028, 2053), 'lingvo.compat.cast', 'tf.cast', (['images', 'tf.uint8'], {}), '(images, tf.uint8)\n', (2035, 2053), True, 'import lingvo.compat as tf\n'), ((2168, 2193), 'lingvo.compat.cast', 'tf.cast', (['labels', 'tf.uint8'], {}), '(labels, tf.uint8)\n', (2175, 2193), True, 'import lingvo.compat as tf\n'), ((2312, 2340), 'os.path.join', 'os.path.join', (['tmpdir', '"""ckpt"""'], {}), "(tmpdir, 'ckpt')\n", (2324, 2340), False, 'import os\n'), ((2377, 2399), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(91)'], {}), '(91)\n', (2395, 2399), True, 'import lingvo.compat as tf\n'), ((2129, 2145), 'lingvo.compat.ones', 'tf.ones', (['(1, 10)'], {}), '((1, 10))\n', (2136, 2145), True, 'import lingvo.compat as tf\n'), ((2409, 2421), 'lingvo.compat.Session', 'tf.Session', ([], {}), '()\n', (2419, 2421), True, 'import lingvo.compat as tf\n'), ((1225, 1264), 'lingvo.compat.image.per_image_standardization', 'tf.image.per_image_standardization', (['img'], {}), '(img)\n', (1259, 1264), True, 'import lingvo.compat as tf\n'), ((2348, 2358), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (2356, 2358), True, 'import lingvo.compat as tf\n'), ((1276, 1291), 'lingvo.compat.unstack', 'tf.unstack', (['raw'], {}), '(raw)\n', (1286, 1291), True, 'import lingvo.compat as tf\n')]
|
from src.base.solution import Solution
from src.tests.part1.q389_test_find_diff import FindDiffTestCases
class FindDiff(Solution):
def verify_output(self, test_output, output):
return test_output[0] == output[0]
def run_test(self, input):
return self.findTheDifference(input[0], input[1])
def gen_test_cases(self):
return FindDiffTestCases()
def print_output(self, output):
super(FindDiff, self).print_output(output)
def findTheDifference(self, s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
lkp = dict()
for ch in s:
lkp[ch] = lkp.get(ch, 0) + 1
for ch in t:
lkp[ch] = lkp.get(ch, 0) - 1
if lkp[ch] < 0:
return ch
if __name__ == '__main__':
solution = FindDiff()
solution.run_tests()
|
[
"src.tests.part1.q389_test_find_diff.FindDiffTestCases"
] |
[((361, 380), 'src.tests.part1.q389_test_find_diff.FindDiffTestCases', 'FindDiffTestCases', ([], {}), '()\n', (378, 380), False, 'from src.tests.part1.q389_test_find_diff import FindDiffTestCases\n')]
|
import contextlib
import os
import tempfile
import warnings
from enum import Enum
import mip
class IISFinderAlgorithm(Enum):
DELETION_FILTER = 1
ADDITIVE_ALGORITHM = 2
class SubRelaxationInfeasible(Exception):
pass
class NonRelaxableModel(Exception):
pass
class ConflictFinder:
"""This class groups some IIS (Irreducible Infeasible Set) search algorithms"""
def __init__(self, model: mip.Model):
if model.status == mip.OptimizationStatus.LOADED:
print("model not runned yet, checking if feasible or not")
model.emphasis = 1 # feasibility
model.preprocess = 1 # -1 automatic, 0 off, 1 on.
model.optimize()
assert (
model.status == mip.OptimizationStatus.INFEASIBLE
), "model is not linear infeasible"
self.model = model
def find_iis(
self, method: IISFinderAlgorithm = IISFinderAlgorithm.DELETION_FILTER,
cbc_verbose: bool = False
) -> mip.ConstrList:
"""main method to find an IIS, this method is just a grouping of the other implementations
Args:
model (mip.Model): Infeasible model where to find the IIS
method (str, optional): name of the method to use ["deletion-filter", "additive_algorithm"]. Defaults to 'deletion-filter".
Returns:
mip.ConstrList: IIS constraint list
"""
# assert ,is not because time limit
with contextlib.nullcontext() if cbc_verbose else ignore_output() as iow:
if method == IISFinderAlgorithm.DELETION_FILTER:
return self.deletion_filter()
if method == IISFinderAlgorithm.ADDITIVE_ALGORITHM:
return self.additive_algorithm()
def deletion_filter(self) -> mip.ConstrList:
"""deletion filter algorithm for search an IIS
Args:
model (mip.Model): Infeasible model
Returns:
mip.ConstrList: IIS
"""
# 1. create a model with all constraints but one
aux_model = self.model.copy()
aux_model.objective = 1
aux_model.emphasis = 1 # feasibility
aux_model.preprocess = 1 # -1 automatic, 0 off, 1 on.
print("starting deletion_filter algorithm")
for inc_crt in self.model.constrs:
aux_model_inc_crt = aux_model.constr_by_name(
inc_crt.name
) # find constraint by name
aux_model.remove(aux_model_inc_crt) # temporally remove inc_crt
aux_model.optimize()
status = aux_model.status
# 2. test feasibility, if feasible, return dropped constraint to the set
# 2.1 else removed it permanently
# print('status {}'.format(status))
if status == mip.OptimizationStatus.INFEASIBLE:
# print("removing permanently {}".format(inc_crt.name))
continue
elif status in [
mip.OptimizationStatus.FEASIBLE,
mip.OptimizationStatus.OPTIMAL,
]:
aux_model.add_constr(
inc_crt.expr, name=inc_crt.name, priority=inc_crt.priority
)
iis = aux_model.constrs
return iis
def additive_algorithm(self) -> mip.ConstrList:
"""Additive algorithm to find an IIS
Returns:
mip.ConstrList: IIS
"""
# Create some aux models to test feasibility of the set of constraints
aux_model_testing = mip.Model()
for var in self.model.vars:
aux_model_testing.add_var(
name=var.name,
lb=var.lb,
ub=var.ub,
var_type=var.var_type,
# obj= var.obj,
# column=var.column #!! libc++abi.dylib: terminating with uncaught exception of type CoinError
)
aux_model_testing.objective = 1
aux_model_testing.emphasis = 1 # feasibility
aux_model_testing.preprocess = 1 # -1 automatic, 0 off, 1 on.
aux_model_iis = (
aux_model_testing.copy()
) # a second aux model to test feasibility of the incumbent iis
# algorithm start
all_constraints = self.model.constrs
testing_crt_set = mip.ConstrList(model=aux_model_testing) # T
iis = mip.ConstrList(model=aux_model_iis) # I
while True:
for crt in all_constraints:
testing_crt_set.add(crt.expr, name=crt.name)
aux_model_testing.constrs = testing_crt_set
aux_model_testing.optimize()
if aux_model_testing.status == mip.OptimizationStatus.INFEASIBLE:
iis.add(crt.expr, name=crt.name)
aux_model_iis.constrs = iis
aux_model_iis.optimize()
if aux_model_iis.status == mip.OptimizationStatus.INFEASIBLE:
return iis
elif aux_model_iis.status in [
mip.OptimizationStatus.FEASIBLE,
mip.OptimizationStatus.OPTIMAL,
]:
testing_crt_set = mip.ConstrList(model=aux_model_testing)
for (
crt
) in (
iis
): # basically this loop is for set T=I // aux_model_iis = iis.copy()
testing_crt_set.add(crt.expr, name=crt.name)
break
def deletion_filter_milp_ir_lc_bd(self) -> mip.ConstrList:
"""Integer deletion filter algorithm (milp_ir_lc_bd)
Raises:
NotImplementedError: [description]
Returns:
mip.ConstrList: [description]
"""
raise NotImplementedError("WIP")
# major constraint sets definition
t_aux_model = mip.Model(name="t_auxiliary_model")
iis_aux_model = mip.Model(name="t_auxiliary_model")
linear_constraints = mip.ConstrList(
model=t_aux_model
) # all the linear model constraints
variable_bound_constraints = mip.ConstrList(
model=t_aux_model
) # all the linear model constrants related specifically for the variable bounds
integer_varlist_crt = mip.VarList(
model=t_aux_model
) # the nature vars constraints for vartype in Integer/Binary
# fill the above sets with the constraints
for crt in self.model.constrs:
linear_constraints.add(crt.expr, name=crt.name)
for var in self.model.vars:
if var.lb != -mip.INF:
variable_bound_constraints.add(
var >= var.lb, name="{}_lb_crt".format(var.name)
)
if var.ub != mip.INF:
variable_bound_constraints.add(
var <= var.ub, name="{}_ub_crt".format(var.name)
)
for var in self.model.vars:
if var.var_type in (mip.INTEGER, mip.BINARY):
integer_varlist_crt.add(var)
status = "IIS"
# add all LC,BD to the incumbent, T= LC + BD
for (
var
) in (
self.model.vars
): # add all variables as if they where CONTINUOUS and without bonds (because this will be separated)
iis_aux_model.add_var(
name=var.name, lb=-mip.INF, ub=mip.INF, var_type=mip.CONTINUOUS
)
for crt in linear_constraints + variable_bound_constraints:
iis_aux_model.add_constr(crt.expr, name=crt.name, priority=crt.priority)
iis_aux_model.optimize()
if iis_aux_model.status == mip.OptimizationStatus.INFEASIBLE:
# if infeasible means that this is a particular version of an LP
return self.deletion_filter() # (STEP 2)
# add all the integer constraints to the model
iis_aux_model.vars.remove(
[var for var in integer_varlist_crt]
) # remove all integer variables
for var in integer_varlist_crt:
iis_aux_model.add_var(
name=var.name,
lb=-mip.INF,
ub=mip.INF,
var_type=var.var_type, # this will add the var with his original type
)
# filter IR constraints that create infeasibility (STEP 1)
for var in integer_varlist_crt:
iis_aux_model.vars.remove(iis_aux_model.var_by_name(var.name))
iis_aux_model.add_var(
name=var.name,
lb=-mip.INF,
ub=mip.INF,
var_type=mip.CONTINUOUS, # relax the integer constraint over var
)
iis_aux_model.optimize()
# if infeasible then update incumbent T = T-{ir_var_crt}
# else continue
# STEP 2 filter lc constraints
# STEP 3 filter BD constraints
# return IS o IIS
def deletion_filter_milp_lc_ir_bd(self) -> mip.ConstrList:
raise NotImplementedError # TODO
class ConflictRelaxer:
def __init__(self, model: mip.Model):
if model.status == mip.OptimizationStatus.LOADED:
print("model not runned yet, checking if feasible or not")
model.emphasis = 1 # feasibility
model.preprocess = 1 # -1 automatic, 0 off, 1 on.
model.optimize()
assert (
model.status == mip.OptimizationStatus.INFEASIBLE
), "model is not linear infeasible"
self.model = model
self.iis_num_iterations = 0
self.iis_iterations = []
self.relax_slack_iterations = []
@property
def slack_by_crt(self) -> dict:
answ = {}
for slack_dict_iter in self.relax_slack_iterations:
for crt_name in slack_dict_iter.keys():
if crt_name in answ.keys():
answ[crt_name] += slack_dict_iter[crt_name]
else:
answ[crt_name] = slack_dict_iter[crt_name]
return answ
def hierarchy_relaxer(
self,
relaxer_objective: str = "min_abs_slack_val",
default_priority: mip.constants.ConstraintPriority = mip.constants.ConstraintPriority.MANDATORY,
cbc_verbose: bool = False
) -> mip.Model:
"""hierarchy relaxer algorithm, it's gonna find a IIS and then relax it using the objective function defined (`relaxer_objective`) and then update the model
with the relaxed constraints. This process runs until there's not more IIS on the model.
Args:
relaxer_objective (str, optional): objective function of the relaxer model (IIS relaxer model). Defaults to 'min_abs_slack_val'.
default_priority (ConstraintPriority, optional): If a constraint does not have a supported substring priority in the name, it will assign a default priority.
Defaults to ConstraintPriority.MANDATORY.
Raises:
NonRelaxableModel: [description]
Returns:
mip.Model: relaxed model
"""
relaxed_model = self.model.copy()
relaxed_model._status = self.model._status # TODO solve this in a different way
# map unmaped constraitns to default
for crt in relaxed_model.constrs:
if not crt.priority:
crt.priority = default_priority
iis_it = 0
iis_dict = {}
taboo_list_iis = []
cf = ConflictFinder(relaxed_model)
while True:
# 1. find iis
iis = cf.find_iis(IISFinderAlgorithm.DELETION_FILTER)
self.iis_iterations.append([crt.name for crt in iis]) # track iteration
self.iis_num_iterations += 1 # track iteration
iis_priority_set = set([crt.priority for crt in iis])
# check if "relaxable" model mapping
if iis_priority_set == set([mip.constants.ConstraintPriority.MANDATORY]):
raise NonRelaxableModel("Infeasible model, is not possible to relax MANDATORY constraints")
# 2. relax iis
with contextlib.nullcontext() if cbc_verbose else ignore_output() as iow:
for level, relaxing_level in enumerate(sorted(iis_priority_set, key=lambda x: x.value)):
# highest case (raise exception)
if relaxing_level == mip.constants.ConstraintPriority.MANDATORY:
raise NonRelaxableModel("Infeasible model, is not possible to relax MANDATORY constraints")
try:
slack_dict = self.relax_iis(iis, relaxer_objective=relaxer_objective, lowest_priority=relaxing_level)
except SubRelaxationInfeasible as e:
warnings.warn(f'Warning relaxing more than one level, currently on l{level} : {relaxing_level}')
continue
else:
# relaxable iis, this is will continue with the next iteration then
break
self.relax_slack_iterations.append(slack_dict)
# 3. add the slack variables to the original problem
with contextlib.nullcontext() if cbc_verbose else ignore_output() as iow:
relaxed_model = self.relax_constraints(relaxed_model, slack_dict)
# 4. check if feasible
relaxed_model.emphasis = 1 # feasibility
with contextlib.nullcontext() if cbc_verbose else ignore_output() as iow:
relaxed_model.optimize()
if relaxed_model.status in [
mip.OptimizationStatus.FEASIBLE,
mip.OptimizationStatus.OPTIMAL,
]:
print("finished relaxation process !")
break
else:
print(
"relaxed the current IIS, still infeasible, searching for a new IIS to relax"
)
print("relaxed constraints {0}".format(list(slack_dict.keys())))
iis_it += 1
# print(f'found iis_{iis_it} = {[crt.name for crt in iis]}')
iis_dict[iis_it] = {}
iis_crt = [crt.name for crt in iis]
iis_dict[iis_it]['iis'] = [{'name': crt.name, 'priority': str(crt.priority).split('.')[1]} for crt in iis]
print(f'found iis_{iis_it} : len = {len(iis_crt)} in_taboo = {(iis_crt in taboo_list_iis)}')
taboo_list_iis.append(iis_crt)
iis_dict[iis_it]['slack'] = slack_dict
return relaxed_model
@classmethod
def relax_iis(
cls, iis: mip.ConstrList, relaxer_objective: str = "min_abs_slack_val", lowest_priority: 'mip.constants.ConstraintPriority' = None
) -> dict:
"""This function is the sub module that finds the optimum relaxation for an IIS, given a crt priority mapping and a objective function
Args:
iis (mip.ConstrList): IIS constraint list
relaxer_objective (str, optional): objective function to use when relaxing. Defaults to 'min_abs_slack_val'.
Returns:
dict: a slack variable dictionary with the value of the {constraint_name:slack.value} pair to be added to each constraint in order to make the IIS feasible
"""
relax_iis_model = mip.Model()
if lowest_priority is None:
lowest_priority = min([crt.priority for crt in iis])
to_relax_crts = [crt for crt in iis if crt.priority == lowest_priority or crt.priority < lowest_priority]
# create a model that only contains the iis
slack_vars = {}
abs_slack_vars = {}
abs_slack_cod_vars = {}
for crt in iis:
# print(crt.name, crt.priority)
for var in crt._Constr__model.vars:
relax_iis_model.add_var(
name=var.name,
lb=var.lb,
ub=var.ub,
var_type=var.var_type,
obj=var.obj,
)
if crt in to_relax_crts:
# if this is a -to be relax- constraint
slack_vars[crt.name] = relax_iis_model.add_var(
name="{0}__{1}".format(crt.name, "slack"),
lb=-mip.INF,
ub=mip.INF,
var_type=mip.CONTINUOUS,
)
abs_slack_vars[crt.name] = relax_iis_model.add_var(
name="{0}_abs".format(slack_vars[crt.name].name),
lb=0,
ub=mip.INF,
var_type=mip.CONTINUOUS,
)
# add relaxed constraint to model
relax_expr = crt.expr + slack_vars[crt.name]
relax_iis_model.add_constr(
relax_expr,
name="{}_relaxed".format(crt.name),
)
# add abs(slack) variable encoding constraints
relax_iis_model.add_constr(
abs_slack_vars[crt.name] >= slack_vars[crt.name],
name="{}_positive_min_bound".format(slack_vars[crt.name].name),
)
relax_iis_model.add_constr(
abs_slack_vars[crt.name] >= -slack_vars[crt.name],
name="{}_negative_min_bound".format(slack_vars[crt.name].name),
)
else:
# if not to be relaxed we added directly to the model
relax_iis_model.add_constr(
crt.expr, name="{}_original".format(crt.name), priority=crt.priority
)
# find the min abs value of the slack variables
relax_iis_model.objective = mip.xsum(list(abs_slack_vars.values()))
relax_iis_model.sense = mip.MINIMIZE
relax_iis_model.optimize()
if relax_iis_model.status == mip.OptimizationStatus.INFEASIBLE:
raise SubRelaxationInfeasible("sub relaxation model infeasible, this could mean that in the IIS the mandatory constraints are infeasible sometimes")
slack_dict = {}
for crt in to_relax_crts:
slack_dict[crt.name] = slack_vars[crt.name].x
return slack_dict
@classmethod
def relax_constraints(cls, relaxed_model: mip.Model, slack_dict: dict) -> mip.Model:
"""this method creates a modification of the model `relaxed_model` where all the constraints in the slack_dict are
modified in order to add the slack values to make the IIS disappear
Args:
relaxed_model (mip.Model): model to relax
slack_dict (dict): pairs {constraint_name: slack_var.value}
Returns:
mip.Model: a modification of the original model where all the constraints are modified with the slack values
"""
for crt_name in slack_dict.keys():
crt_original = relaxed_model.constr_by_name(crt_name)
relax_expr = crt_original.expr + slack_dict[crt_name]
relaxed_model.add_constr(
relax_expr, name=crt_original.name, priority=crt_original.priority
)
relaxed_model.remove(crt_original) # remove constraint
return relaxed_model
@contextlib.contextmanager
def ignore_output():
with tempfile.TemporaryFile() as f:
orig_std_out = os.dup(1)
os.dup2(f.fileno(), 1)
yield # pause the coroutine to execute the with code
os.dup2(orig_std_out, 1)
os.close(orig_std_out)
|
[
"os.dup2",
"os.close",
"os.dup",
"mip.VarList",
"warnings.warn",
"tempfile.TemporaryFile",
"contextlib.nullcontext",
"mip.ConstrList",
"mip.Model"
] |
[((3533, 3544), 'mip.Model', 'mip.Model', ([], {}), '()\n', (3542, 3544), False, 'import mip\n'), ((4305, 4344), 'mip.ConstrList', 'mip.ConstrList', ([], {'model': 'aux_model_testing'}), '(model=aux_model_testing)\n', (4319, 4344), False, 'import mip\n'), ((4364, 4399), 'mip.ConstrList', 'mip.ConstrList', ([], {'model': 'aux_model_iis'}), '(model=aux_model_iis)\n', (4378, 4399), False, 'import mip\n'), ((5945, 5980), 'mip.Model', 'mip.Model', ([], {'name': '"""t_auxiliary_model"""'}), "(name='t_auxiliary_model')\n", (5954, 5980), False, 'import mip\n'), ((6005, 6040), 'mip.Model', 'mip.Model', ([], {'name': '"""t_auxiliary_model"""'}), "(name='t_auxiliary_model')\n", (6014, 6040), False, 'import mip\n'), ((6071, 6104), 'mip.ConstrList', 'mip.ConstrList', ([], {'model': 't_aux_model'}), '(model=t_aux_model)\n', (6085, 6104), False, 'import mip\n'), ((6200, 6233), 'mip.ConstrList', 'mip.ConstrList', ([], {'model': 't_aux_model'}), '(model=t_aux_model)\n', (6214, 6233), False, 'import mip\n'), ((6366, 6396), 'mip.VarList', 'mip.VarList', ([], {'model': 't_aux_model'}), '(model=t_aux_model)\n', (6377, 6396), False, 'import mip\n'), ((15430, 15441), 'mip.Model', 'mip.Model', ([], {}), '()\n', (15439, 15441), False, 'import mip\n'), ((19403, 19427), 'tempfile.TemporaryFile', 'tempfile.TemporaryFile', ([], {}), '()\n', (19425, 19427), False, 'import tempfile\n'), ((19457, 19466), 'os.dup', 'os.dup', (['(1)'], {}), '(1)\n', (19463, 19466), False, 'import os\n'), ((19568, 19592), 'os.dup2', 'os.dup2', (['orig_std_out', '(1)'], {}), '(orig_std_out, 1)\n', (19575, 19592), False, 'import os\n'), ((19601, 19623), 'os.close', 'os.close', (['orig_std_out'], {}), '(orig_std_out)\n', (19609, 19623), False, 'import os\n'), ((1472, 1496), 'contextlib.nullcontext', 'contextlib.nullcontext', ([], {}), '()\n', (1494, 1496), False, 'import contextlib\n'), ((12232, 12256), 'contextlib.nullcontext', 'contextlib.nullcontext', ([], {}), '()\n', (12254, 12256), False, 'import contextlib\n'), ((13313, 13337), 'contextlib.nullcontext', 'contextlib.nullcontext', ([], {}), '()\n', (13335, 13337), False, 'import contextlib\n'), ((13571, 13595), 'contextlib.nullcontext', 'contextlib.nullcontext', ([], {}), '()\n', (13593, 13595), False, 'import contextlib\n'), ((5208, 5247), 'mip.ConstrList', 'mip.ConstrList', ([], {'model': 'aux_model_testing'}), '(model=aux_model_testing)\n', (5222, 5247), False, 'import mip\n'), ((12892, 12999), 'warnings.warn', 'warnings.warn', (['f"""Warning relaxing more than one level, currently on l{level} : {relaxing_level}"""'], {}), "(\n f'Warning relaxing more than one level, currently on l{level} : {relaxing_level}'\n )\n", (12905, 12999), False, 'import warnings\n')]
|
import re
import xlsxwriter
def parse_menu_to_excel(filename,menu_dict,days_dict,results,goal_dict,food_database,reversed_ingredient_dict,grocery_dict):
# making a temporary dict to map dates and columns in excel:
temp_dates_dict = {}
i=0
for key in days_dict.keys():
temp_dates_dict[days_dict[key]['date_var'].get()] = i
i += 1
temp_meals_dict = {}
i = 0
for meal in ['Breakfast', 'Lunch','Dinner']:
temp_meals_dict[meal] = i
i += 1
# converting the menu-dict to dates and lunches
for item in list(menu_dict.keys()):
new_key = tuple(tuple(elem.replace('M1', 'Breakfast').replace('M2', 'Lunch').replace('M3', 'Dinner').replace('D1', days_dict['D1']['date_var'].get()).replace('D2',days_dict['D2']['date_var'].get()).replace('D3',days_dict['D3']['date_var'].get()).replace('D4',days_dict['D4']['date_var'].get()).replace('D5',days_dict['D5']['date_var'].get()).replace('D6',days_dict['D6']['date_var'].get()).replace('D7',days_dict['D7']['date_var'].get())
for elem in tup) for tup in item)
menu_dict[new_key] = menu_dict[item]
menu_dict.pop(item)
# putting it into an excel file:
workbook = xlsxwriter.Workbook(filename)
separator_format = workbook.add_format({'bg_color': '#000000'})
# make worksheets
menu_worksheet = workbook.add_worksheet(f"Menu - {days_dict['D1']['date_var'].get()} to {days_dict['D7']['date_var'].get()}") # for menu
temp_worksheet_dict = {}
global_groceries_worksheet = workbook.add_worksheet("your grocery list")
for group in list(menu_dict.keys()):
temp_worksheet_dict[group] = workbook.add_worksheet(f"{list(menu_dict[group].keys())[0][:31]}")
# print the menu to menu-sheet
col = 0
for key in temp_dates_dict:
menu_worksheet.write(0, col, key)
col += 1
row = 1
for key in temp_meals_dict:
menu_worksheet.write(row, 0, key)
row += 1
for group in menu_dict.keys():
for slot in group:
menu_worksheet.write(temp_meals_dict[slot[1]] + 1,temp_dates_dict[slot[0]] + 1, str(list(menu_dict[group].keys())[0]))
for i in range(0,8):
menu_worksheet.write(4,i,"",separator_format)
menu_worksheet.write(5,0, "Results:")
row = 5
for metric in results.keys():
menu_worksheet.write(row,1,str(f"{metric}: {round(results[metric],2)}"))
row += 1
menu_worksheet.write(5,2, "Goals:")
row = 6
for metric in goal_dict.keys():
menu_worksheet.write(row,3,str(f"{metric}: {round(goal_dict[metric],2)}"))
row += 1
# writing the global grocery-list:
row = 1
col = 0
global_groceries_worksheet.write(0,0,"Your grocery list:")
for ingredient in grocery_dict.keys():
ingredient_id = reversed_ingredient_dict[ingredient]
global_groceries_worksheet.write(row, col, ingredient)
global_groceries_worksheet.write(row, col + 1, str(grocery_dict[ingredient]))
global_groceries_worksheet.write(row, col + 2, str(food_database['ingredients'][ingredient_id]['unit']))
row += 1
# writing the recipe-lists:
for group in menu_dict.keys():
temp_worksheet_dict[group].write(0,0, f"Ingredient list for {list(menu_dict[group].keys())[0]}:")
row = 1
col = 0
for recipe in menu_dict[group].keys():
for ingredient in menu_dict[group][recipe].keys():
ingredient_id = reversed_ingredient_dict[ingredient]
temp_worksheet_dict[group].write(row, col, ingredient)
temp_worksheet_dict[group].write(row, col + 1, str(menu_dict[group][recipe][ingredient]))
temp_worksheet_dict[group].write(row, col + 2, str(food_database['ingredients'][ingredient_id]['unit']))
row += 1
workbook.close()
|
[
"xlsxwriter.Workbook"
] |
[((1222, 1251), 'xlsxwriter.Workbook', 'xlsxwriter.Workbook', (['filename'], {}), '(filename)\n', (1241, 1251), False, 'import xlsxwriter\n')]
|
from channels import Group
# websocket.connect
def ws_add(message):
Group("chat").add(message.reply_channel)
# websocket.receive
def ws_message(message):
Group("chat").send({
"text": message.content['text'],
})
# websocket.disconnect
def ws_disconnect(message):
Group("chat").discard(message.reply_channel)
|
[
"channels.Group"
] |
[((73, 86), 'channels.Group', 'Group', (['"""chat"""'], {}), "('chat')\n", (78, 86), False, 'from channels import Group\n'), ((164, 177), 'channels.Group', 'Group', (['"""chat"""'], {}), "('chat')\n", (169, 177), False, 'from channels import Group\n'), ((289, 302), 'channels.Group', 'Group', (['"""chat"""'], {}), "('chat')\n", (294, 302), False, 'from channels import Group\n')]
|
from copy import deepcopy
import numpy as np
import pybullet as p
import gym
from gym import spaces
from env.robot import Manipulator
from env.work import Work
class Env():
def __init__(self, reward,
step_max_pos = 0.002,
step_max_orn = 0.02,
initial_pos_noise = 0.001,
initial_orn_noise = 0.001,
step_pos_noise = 0.0002,
step_orn_noise = 0.0002):
p.connect(p.GUI)
p.setPhysicsEngineParameter(enableFileCaching=0)
p.setRealTimeSimulation(False)
p.setGravity(0, 0, -9.8)
p.configureDebugVisualizer(p.COV_ENABLE_GUI, 0)
p.setPhysicsEngineParameter(contactBreakingThreshold=0.001)
# Init
self._is_init_env = False
# Plane
self.plane_pos = [0, 0, -0.1]
p.loadURDF("urdf/plane/plane.urdf", self.plane_pos)
self.reward = reward
self.max_initial_pos_noise = initial_pos_noise
self.max_initial_orn_noise = initial_orn_noise
self.max_step_pos_noise = step_pos_noise
self.max_step_orn_noise = step_orn_noise
# robot
self.step_max_pos = step_max_pos
self.step_max_orn = step_max_orn
self.inv_scaled_force_coef = 5000
# for learning
self.action_space = spaces.Box(
low=-1,
high=1,
shape=(6,),
dtype=np.float32
)
self.observation_space = spaces.Box(
low=-1,
high=1,
shape=(12,),
dtype=np.float32
)
self._act_rel_tcp_pose = [0, 0, 0, 0, 0, 0]
def init_env(self, mode = 'rel',
robot_tcp_pose = [0, 0, 0, 0, 0, 0],
robot_base_pose = [0, 0, 0, 0, 0, 0],
robot_tool_pose = [0, 0, 0, 0, 0, 0],
work_base_pose = [0, 0, 0, 0, 0, 0]):
if self._is_init_env == False:
# Load work
self.work = Work(base_pose = work_base_pose)
self.act_abs_work_pose = work_base_pose
# Load robot
self.robot = Manipulator(tool_pose=robot_tool_pose, base_pose=robot_base_pose)
self._reset_robot_pose(mode=mode, tcp_pose=robot_tcp_pose)
self.initial_pos_noise = np.random.uniform(-self.max_initial_pos_noise,
self.max_initial_pos_noise, 3)
self.initial_orn_noise = np.random.uniform(-self.max_initial_orn_noise,
self.max_initial_orn_noise, 3)
self._is_init_env = True
return self.observe_state(mode = mode)
def _reset_robot_pose(self, mode='rel', tcp_pose=[0, 0, 0, 0, 0, 0]):
abs_tcp_pose = np.zeros(6)
if mode == 'rel':
abs_tcp_pose = np.array(self.act_abs_work_pose) + np.array(tcp_pose)
elif mode == 'abs':
abs_tcp_pose = tcp_pose
else:
print("ERROR(enviroment.py): mode is not correct.")
abs_tcp_pose = [0, 0, 0, 0, 0, 0]
self.robot.reset_pose(abs_tcp_pose=abs_tcp_pose)
def reset(self,
mode = 'rel',
tcp_pose = [0, 0, 0, 0, 0, 0],
base_pose = [0, 0, 0, 0, 0, 0],
tool_pose = [0, 0, 0, 0, 0, 0],
work_pose = [0, 0, 0, 0, 0, 0]):
if self._is_init_env == False:
return self.init_env(mode = mode,
robot_tcp_pose = tcp_pose,
robot_base_pose = base_pose,
robot_tool_pose = tool_pose,
work_base_pose = work_pose)
# For 処理の高速化
'''
if np.linalg.norm( np.array(tool_pose) - self.prev_tool_pose ) < 1e-6:
else:
'''
# Reset env
p.resetSimulation()
# Load Plane
p.loadURDF("urdf/plane/plane.urdf", self.plane_pos)
# Reset work
self.work.reset(base_pose = work_pose)
# Reset Robot
self.robot.reset_base(base_pose=base_pose, tool_pose=tool_pose)
self._reset_robot_pose(mode='rel', tcp_pose=tcp_pose)
self.initial_pos_noise = np.random.uniform(-self.max_initial_pos_noise,
self.max_initial_pos_noise, 3)
self.initial_orn_noise = np.random.uniform(-self.max_initial_orn_noise,
self.max_initial_orn_noise, 3)
self.prev_tool_pose = tool_pose
return self.observe_state(mode = mode)
def destory(self):
p.disconnect()
def step(self, action, step):
# ここは指令値生成なので,真値が良い
cmd_abs_tcp_pose = np.zeros(6)
cmd_abs_tcp_pose[:3] = np.array(self._act_abs_tcp_pose[:3]) + np.array(action[:3])
cmd_abs_tcp_pose[3:6] = np.array(self._act_abs_tcp_pose[3:6]) + np.array(action[3:6])
print('next_pose:', cmd_abs_tcp_pose)
self.robot.move_to_pose(cmd_abs_tcp_pose, mode='direct')
pose, force, success, out_range = self.decision()
r = self.calc_reward(relative_pose = pose,
success = success,
out_range = out_range,
act_step = step)
done = success or out_range
return np.concatenate([pose, force]), r, done, success
def decision(self):
'''
observe
act_abs_tcp_pose
act_rel_tcp_pose
act_abs_work_pose
act_force
'''
act_pose_noisy, act_force = self.observe_state(mode='rel')
scaled_act_force = act_force / self.inv_scaled_force_coef
# [Note] ここは真値で評価
success_range_of_pos = 0.003
success_range_of_orn = 0.04
success = (np.linalg.norm(self._act_rel_tcp_pose[:3]) <= success_range_of_pos and \
np.linalg.norm(self._act_rel_tcp_pose[3:]) <= success_range_of_orn)
# [Note] ここは真値で評価は正しくない気がする.
out_range_of_pos = 0.1
out_range_of_orn = 0.8
out_range = any([abs(pos) > out_range_of_pos for pos in act_pose_noisy[:3]]) \
or any([abs(orn) > out_range_of_orn for orn in act_pose_noisy[3:6]])
return act_pose_noisy, scaled_act_force, success, out_range
def observe_state(self, mode='rel'):
self._act_abs_tcp_pose, self.act_force, _ = self.robot.get_state()
self._act_abs_work_pose = self.work.get_state()
self._act_rel_tcp_pose = np.array(self._act_abs_tcp_pose) - np.array(self._act_abs_work_pose)
'''
ノイズ処理
'''
act_rel_tcp_pose_noisy = np.zeros(6)
act_rel_tcp_pose_noisy[:3] = self._act_rel_tcp_pose[:3] + self.initial_pos_noise
act_rel_tcp_pose_noisy[3:6] = self._act_rel_tcp_pose[3:6] + self.initial_orn_noise
act_rel_tcp_pose_noisy[:3] += np.random.uniform(-self.max_step_pos_noise,
self.max_step_pos_noise, 3)
act_rel_tcp_pose_noisy[3:6] += np.random.uniform(-self.max_step_orn_noise,
self.max_step_orn_noise, 3)
if mode == 'rel':
return act_rel_tcp_pose_noisy, self.act_force
elif mode == 'abs':
act_abs_tcp_pose_noisy = np.zeros(6)
act_abs_tcp_pose_noisy[:3] = self._act_abs_tcp_pose[:3] + self.initial_pos_noise
act_abs_tcp_pose_noisy[3:6] = self._act_abs_tcp_pose[3:6] + self.initial_orn_noise
act_abs_work_pose_noisy = np.zeros(6)
act_abs_work_pose_noisy[:3] = self._act_abs_work_pose[:3] + self.initial_pos_noise
act_abs_work_pose_noisy[3:6] = self._act_abs_work_pose[3:6] + self.initial_orn_noise
return act_abs_tcp_pose_noisy, act_abs_work_pose_noisy, self.act_force
def calc_reward(self, relative_pose, success, out_range, act_step):
return self.reward.reward_function(relative_pose, success, out_range, act_step)
def scale_action(self, action):
scaled_action = deepcopy(action)
scaled_action[:3]*=self.step_max_pos
scaled_action[3:]*=self.step_max_orn
return scaled_action
|
[
"env.work.Work",
"pybullet.resetSimulation",
"copy.deepcopy",
"pybullet.connect",
"numpy.linalg.norm",
"pybullet.setGravity",
"gym.spaces.Box",
"numpy.array",
"pybullet.setPhysicsEngineParameter",
"pybullet.configureDebugVisualizer",
"numpy.zeros",
"pybullet.disconnect",
"numpy.concatenate",
"numpy.random.uniform",
"pybullet.setRealTimeSimulation",
"pybullet.loadURDF",
"env.robot.Manipulator"
] |
[((458, 474), 'pybullet.connect', 'p.connect', (['p.GUI'], {}), '(p.GUI)\n', (467, 474), True, 'import pybullet as p\n'), ((483, 531), 'pybullet.setPhysicsEngineParameter', 'p.setPhysicsEngineParameter', ([], {'enableFileCaching': '(0)'}), '(enableFileCaching=0)\n', (510, 531), True, 'import pybullet as p\n'), ((540, 570), 'pybullet.setRealTimeSimulation', 'p.setRealTimeSimulation', (['(False)'], {}), '(False)\n', (563, 570), True, 'import pybullet as p\n'), ((579, 603), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-9.8)'], {}), '(0, 0, -9.8)\n', (591, 603), True, 'import pybullet as p\n'), ((612, 659), 'pybullet.configureDebugVisualizer', 'p.configureDebugVisualizer', (['p.COV_ENABLE_GUI', '(0)'], {}), '(p.COV_ENABLE_GUI, 0)\n', (638, 659), True, 'import pybullet as p\n'), ((668, 727), 'pybullet.setPhysicsEngineParameter', 'p.setPhysicsEngineParameter', ([], {'contactBreakingThreshold': '(0.001)'}), '(contactBreakingThreshold=0.001)\n', (695, 727), True, 'import pybullet as p\n'), ((841, 892), 'pybullet.loadURDF', 'p.loadURDF', (['"""urdf/plane/plane.urdf"""', 'self.plane_pos'], {}), "('urdf/plane/plane.urdf', self.plane_pos)\n", (851, 892), True, 'import pybullet as p\n'), ((1324, 1380), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-1)', 'high': '(1)', 'shape': '(6,)', 'dtype': 'np.float32'}), '(low=-1, high=1, shape=(6,), dtype=np.float32)\n', (1334, 1380), False, 'from gym import spaces\n'), ((1472, 1529), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-1)', 'high': '(1)', 'shape': '(12,)', 'dtype': 'np.float32'}), '(low=-1, high=1, shape=(12,), dtype=np.float32)\n', (1482, 1529), False, 'from gym import spaces\n'), ((2752, 2763), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (2760, 2763), True, 'import numpy as np\n'), ((3800, 3819), 'pybullet.resetSimulation', 'p.resetSimulation', ([], {}), '()\n', (3817, 3819), True, 'import pybullet as p\n'), ((3849, 3900), 'pybullet.loadURDF', 'p.loadURDF', (['"""urdf/plane/plane.urdf"""', 'self.plane_pos'], {}), "('urdf/plane/plane.urdf', self.plane_pos)\n", (3859, 3900), True, 'import pybullet as p\n'), ((4160, 4237), 'numpy.random.uniform', 'np.random.uniform', (['(-self.max_initial_pos_noise)', 'self.max_initial_pos_noise', '(3)'], {}), '(-self.max_initial_pos_noise, self.max_initial_pos_noise, 3)\n', (4177, 4237), True, 'import numpy as np\n'), ((4323, 4400), 'numpy.random.uniform', 'np.random.uniform', (['(-self.max_initial_orn_noise)', 'self.max_initial_orn_noise', '(3)'], {}), '(-self.max_initial_orn_noise, self.max_initial_orn_noise, 3)\n', (4340, 4400), True, 'import numpy as np\n'), ((4575, 4589), 'pybullet.disconnect', 'p.disconnect', ([], {}), '()\n', (4587, 4589), True, 'import pybullet as p\n'), ((4681, 4692), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (4689, 4692), True, 'import numpy as np\n'), ((6605, 6616), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (6613, 6616), True, 'import numpy as np\n'), ((6836, 6907), 'numpy.random.uniform', 'np.random.uniform', (['(-self.max_step_pos_noise)', 'self.max_step_pos_noise', '(3)'], {}), '(-self.max_step_pos_noise, self.max_step_pos_noise, 3)\n', (6853, 6907), True, 'import numpy as np\n'), ((6999, 7070), 'numpy.random.uniform', 'np.random.uniform', (['(-self.max_step_orn_noise)', 'self.max_step_orn_noise', '(3)'], {}), '(-self.max_step_orn_noise, self.max_step_orn_noise, 3)\n', (7016, 7070), True, 'import numpy as np\n'), ((8019, 8035), 'copy.deepcopy', 'deepcopy', (['action'], {}), '(action)\n', (8027, 8035), False, 'from copy import deepcopy\n'), ((1964, 1994), 'env.work.Work', 'Work', ([], {'base_pose': 'work_base_pose'}), '(base_pose=work_base_pose)\n', (1968, 1994), False, 'from env.work import Work\n'), ((2099, 2164), 'env.robot.Manipulator', 'Manipulator', ([], {'tool_pose': 'robot_tool_pose', 'base_pose': 'robot_base_pose'}), '(tool_pose=robot_tool_pose, base_pose=robot_base_pose)\n', (2110, 2164), False, 'from env.robot import Manipulator\n'), ((2273, 2350), 'numpy.random.uniform', 'np.random.uniform', (['(-self.max_initial_pos_noise)', 'self.max_initial_pos_noise', '(3)'], {}), '(-self.max_initial_pos_noise, self.max_initial_pos_noise, 3)\n', (2290, 2350), True, 'import numpy as np\n'), ((2440, 2517), 'numpy.random.uniform', 'np.random.uniform', (['(-self.max_initial_orn_noise)', 'self.max_initial_orn_noise', '(3)'], {}), '(-self.max_initial_orn_noise, self.max_initial_orn_noise, 3)\n', (2457, 2517), True, 'import numpy as np\n'), ((4724, 4760), 'numpy.array', 'np.array', (['self._act_abs_tcp_pose[:3]'], {}), '(self._act_abs_tcp_pose[:3])\n', (4732, 4760), True, 'import numpy as np\n'), ((4763, 4783), 'numpy.array', 'np.array', (['action[:3]'], {}), '(action[:3])\n', (4771, 4783), True, 'import numpy as np\n'), ((4816, 4853), 'numpy.array', 'np.array', (['self._act_abs_tcp_pose[3:6]'], {}), '(self._act_abs_tcp_pose[3:6])\n', (4824, 4853), True, 'import numpy as np\n'), ((4856, 4877), 'numpy.array', 'np.array', (['action[3:6]'], {}), '(action[3:6])\n', (4864, 4877), True, 'import numpy as np\n'), ((5297, 5326), 'numpy.concatenate', 'np.concatenate', (['[pose, force]'], {}), '([pose, force])\n', (5311, 5326), True, 'import numpy as np\n'), ((6465, 6497), 'numpy.array', 'np.array', (['self._act_abs_tcp_pose'], {}), '(self._act_abs_tcp_pose)\n', (6473, 6497), True, 'import numpy as np\n'), ((6500, 6533), 'numpy.array', 'np.array', (['self._act_abs_work_pose'], {}), '(self._act_abs_work_pose)\n', (6508, 6533), True, 'import numpy as np\n'), ((2817, 2849), 'numpy.array', 'np.array', (['self.act_abs_work_pose'], {}), '(self.act_abs_work_pose)\n', (2825, 2849), True, 'import numpy as np\n'), ((2852, 2870), 'numpy.array', 'np.array', (['tcp_pose'], {}), '(tcp_pose)\n', (2860, 2870), True, 'import numpy as np\n'), ((5756, 5798), 'numpy.linalg.norm', 'np.linalg.norm', (['self._act_rel_tcp_pose[:3]'], {}), '(self._act_rel_tcp_pose[:3])\n', (5770, 5798), True, 'import numpy as np\n'), ((5849, 5891), 'numpy.linalg.norm', 'np.linalg.norm', (['self._act_rel_tcp_pose[3:]'], {}), '(self._act_rel_tcp_pose[3:])\n', (5863, 5891), True, 'import numpy as np\n'), ((7272, 7283), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (7280, 7283), True, 'import numpy as np\n'), ((7510, 7521), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (7518, 7521), True, 'import numpy as np\n')]
|
import _thread
import time
import threading
#
# def print_time(threadName,delay):
# count = 0;
# while count < 5:
# time.sleep(delay)
# count += 1;
# print("%s: %s" % (threadName, time.ctime(time.time())))
#
# try:
# _thread.start_new(print_time,("Thread-1",2,))
# _thread.start_new(print_time("Thread-2",4))
# except:
# print("error")
#
# while 1:
# pass
# Python3 通过两个标准库 _thread 和 threading 提供对线程的支持。
# _thread 提供了低级别的、原始的线程以及一个简单的锁,它相比于 threading 模块的功能还是比较有限的。
# threading 模块除了包含 _thread 模块中的所有方法外,还提供的其他方法:
# threading.currentThread(): 返回当前的线程变量。
# threading.enumerate(): 返回一个包含正在运行的线程的list。正在运行指线程启动后、结束前,不包括启动前和终止后的线程。
# threading.activeCount(): 返回正在运行的线程数量,与len(threading.enumerate())有相同的结果。
# 除了使用方法外,线程模块同样提供了Thread类来处理线程,Thread类提供了以下方法:
# run(): 用以表示线程活动的方法。
# start():启动线程活动。
# join([time]): 主线程中,创建了子线程B,并且在主线程A中调用了B.join(),那么,主线程A会在调用的地方等待,
# 直到子线程B完成操作后,才接着往下执行。参数time代表线程运行最大时间,即如果超过这个时间,不管这个此线程有
# 没有执行完毕都会被回收,然后主线程或函数都会接着执行的。
# isAlive(): 返回线程是否活动的。
# getName(): 返回线程名。
# setName(): 设置线程名。
exitFlag = 0
class MyThread(threading.Thread):
def __init__(self,threadID,name,counter):
threading.Thread.__init__(self)
self.threadID = threadID
self.name = name
self.counter = counter
def run(self):
print ("开始线程:" + self.name)
print_time(self.name, 2,self.counter)
print ("退出线程:" + self.name)
def print_time(threadName, delay, counter):
while counter:
# if exitFlag:
# threadName.exit()
time.sleep(delay)
print("%s: %s" % (threadName, time.ctime(time.time())))
counter -= 1
# 创建新线程
thread1 = MyThread(1, "Thread-1", 5)
thread2 = MyThread(2, "Thread-2", 5)
# 开启新线程
thread1.start()
thread2.start()
thread1.join()
thread2.join()
print ("退出主线程")
|
[
"threading.Thread.__init__",
"time.time",
"time.sleep"
] |
[((1161, 1192), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (1186, 1192), False, 'import threading\n'), ((1547, 1564), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (1557, 1564), False, 'import time\n'), ((1614, 1625), 'time.time', 'time.time', ([], {}), '()\n', (1623, 1625), False, 'import time\n')]
|
import requests
from bbdata.config import output_api_url
from bbdata.util import handle_response
class Objects:
base_path = "/objects"
auth = None
def __init__(self, auth):
self.auth = auth
def get_all(self, tags=None, search=None, page=None, per_page=None,
writable=False):
"""
Get the list of accessible objects.
GET /objects
https://bbdata.daplab.ch/api/#objects_get
"""
params = {
"tags": tags,
"search": search,
"page": page,
"perPage": per_page,
"writable": writable,
}
url = output_api_url + self.base_path
r = requests.get(url, params, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
def put(self, name, unit_symbol, owner, description=None):
"""
Create a new object.
PUT /objects
https://bbdata.daplab.ch/api/#objects_put
"""
json = {
"name": name,
"description": description,
"unitSymbol": unit_symbol,
'owner': owner
}
url = output_api_url + self.base_path
r = requests.put(url, json=json, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
def get(self, object_id):
"""
Get an object.
GET /objects/{objectIs}
https://bbdata.daplab.ch/api/#objects__objectid__get
"""
url = output_api_url + self.base_path + "/" + str(object_id)
r = requests.get(url, headers=self.auth.headers)
# return ObjectResponse(r.json())
return handle_response(r.status_code, r.json())
def post(self, object_id, data):
"""
Edit the name and/or the description of the object.
Only the properties appearing in the body will be modified.
POST /objects/{objectId}
https://bbdata.daplab.ch/api/#objects__objectid__post
"""
# TODO The data to send isn't define in the API Docs
url = output_api_url + self.base_path + "/" + str(object_id)
r = requests.post(url, data, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
def delete(self, object_id):
"""
Delete the object with the given id
POST /objects/{objectId}
https://bbdata.daplab.ch/api/#objects__objectid__delete
"""
# TODO This method is in the Postman profile but isn't in the docs
url = output_api_url + self.base_path + "/" + str(object_id)
r = requests.delete(url, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
def post_disable(self, object_id):
"""
Disable this object. All associated tokens will be removed.
POST /objects/{objectId}/disable
https://bbdata.daplab.ch/api/#objects__objectid__disable_post
"""
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/disable"
r = requests.post(url, headers=self.auth.headers)
return handle_response(r.status_code, True)
def post_enable(self, object_id):
"""
Enable this object.
POST /objects/{objectId}/enable
https://bbdata.daplab.ch/api/#objects__objectid__enable_post
"""
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/enable"
r = requests.post(url, headers=self.auth.headers)
return handle_response(r.status_code, True)
def get_tokens(self, object_id, description=None):
"""
Get the list of tokens for the object. A token is used to submit new
measures (see input-api).
An optional description can be passed in the
body (max 65 characters).
GET /objects/{objectId}/tokens
https://bbdata.daplab.ch/api/#objects__objectid__tokens_get
"""
# TODO The API docs says it's possible to pass an optional description
# but it looks like it's a mistake for a GET request...
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/tokens"
json = {
"description": description
}
r = requests.get(url, json, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
def put_tokens(self, object_id):
"""
Generate a new secured token.
PUT /objects/{objectId}/tokens
https://bbdata.daplab.ch/api/#objects__objectid__tokens_put
"""
# TODO The optional description should probably be added in this
# method
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/tokens"
r = requests.put(url, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
def post_tokens(self, object_id, description):
"""
Edit the token's description.
POST /objects/{objectId}/tokens
https://bbdata.daplab.ch/api/#objects__objectid__tokens_post
"""
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/tokens"
json = {
"description": description
}
r = requests.post(url, json=json, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
def delete_tokens(self, object_id, token_id):
"""
Revoke a token.
DELETE /objects/{objectId}/tokens
https://bbdata.daplab.ch/api/#objects__objectid__tokens_delete
"""
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/tokens"
params = {
"tokenId": token_id
}
r = requests.delete(url, params=params, headers=self.auth.headers)
return handle_response(r.status_code, True)
def put_tags(self, object_id, tags):
"""
Add tags to the object.
PUT /objects/{objectId}/tags
https://bbdata.daplab.ch/api/#objects__objectid__tags_put
"""
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/tags"
params = {
"tags": tags
}
r = requests.put(url, params=params, headers=self.auth.headers)
return handle_response(r.status_code, True)
def delete_tags(self, object_id, tags):
"""
Remove tags.
DELETE /objects/{objectId}/tags
https://bbdata.daplab.ch/api/#objects__objectid__tags_delete
"""
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/tags"
params = {
"tags": tags
}
r = requests.put(url, params=params, headers=self.auth.headers)
return handle_response(r.status_code, True)
def get_comments(self, object_id):
"""
Get all comments attached to this object. Use the /comments endpoint
for more actions.
GET /objects/{objectId}/comments
https://bbdata.daplab.ch/api/#objects__objectid__comments_get
"""
url = output_api_url + self.base_path + "/" + str(object_id) \
+ "/comments"
r = requests.get(url, headers=self.auth.headers)
return handle_response(r.status_code, r.json())
|
[
"requests.post",
"bbdata.util.handle_response",
"requests.get",
"requests.delete",
"requests.put"
] |
[((696, 748), 'requests.get', 'requests.get', (['url', 'params'], {'headers': 'self.auth.headers'}), '(url, params, headers=self.auth.headers)\n', (708, 748), False, 'import requests\n'), ((1211, 1266), 'requests.put', 'requests.put', (['url'], {'json': 'json', 'headers': 'self.auth.headers'}), '(url, json=json, headers=self.auth.headers)\n', (1223, 1266), False, 'import requests\n'), ((1576, 1620), 'requests.get', 'requests.get', (['url'], {'headers': 'self.auth.headers'}), '(url, headers=self.auth.headers)\n', (1588, 1620), False, 'import requests\n'), ((2147, 2198), 'requests.post', 'requests.post', (['url', 'data'], {'headers': 'self.auth.headers'}), '(url, data, headers=self.auth.headers)\n', (2160, 2198), False, 'import requests\n'), ((2611, 2658), 'requests.delete', 'requests.delete', (['url'], {'headers': 'self.auth.headers'}), '(url, headers=self.auth.headers)\n', (2626, 2658), False, 'import requests\n'), ((3067, 3112), 'requests.post', 'requests.post', (['url'], {'headers': 'self.auth.headers'}), '(url, headers=self.auth.headers)\n', (3080, 3112), False, 'import requests\n'), ((3128, 3164), 'bbdata.util.handle_response', 'handle_response', (['r.status_code', '(True)'], {}), '(r.status_code, True)\n', (3143, 3164), False, 'from bbdata.util import handle_response\n'), ((3473, 3518), 'requests.post', 'requests.post', (['url'], {'headers': 'self.auth.headers'}), '(url, headers=self.auth.headers)\n', (3486, 3518), False, 'import requests\n'), ((3534, 3570), 'bbdata.util.handle_response', 'handle_response', (['r.status_code', '(True)'], {}), '(r.status_code, True)\n', (3549, 3570), False, 'from bbdata.util import handle_response\n'), ((4276, 4326), 'requests.get', 'requests.get', (['url', 'json'], {'headers': 'self.auth.headers'}), '(url, json, headers=self.auth.headers)\n', (4288, 4326), False, 'import requests\n'), ((4789, 4833), 'requests.put', 'requests.put', (['url'], {'headers': 'self.auth.headers'}), '(url, headers=self.auth.headers)\n', (4801, 4833), False, 'import requests\n'), ((5287, 5343), 'requests.post', 'requests.post', (['url'], {'json': 'json', 'headers': 'self.auth.headers'}), '(url, json=json, headers=self.auth.headers)\n', (5300, 5343), False, 'import requests\n'), ((5781, 5843), 'requests.delete', 'requests.delete', (['url'], {'params': 'params', 'headers': 'self.auth.headers'}), '(url, params=params, headers=self.auth.headers)\n', (5796, 5843), False, 'import requests\n'), ((5859, 5895), 'bbdata.util.handle_response', 'handle_response', (['r.status_code', '(True)'], {}), '(r.status_code, True)\n', (5874, 5895), False, 'from bbdata.util import handle_response\n'), ((6257, 6316), 'requests.put', 'requests.put', (['url'], {'params': 'params', 'headers': 'self.auth.headers'}), '(url, params=params, headers=self.auth.headers)\n', (6269, 6316), False, 'import requests\n'), ((6332, 6368), 'bbdata.util.handle_response', 'handle_response', (['r.status_code', '(True)'], {}), '(r.status_code, True)\n', (6347, 6368), False, 'from bbdata.util import handle_response\n'), ((6728, 6787), 'requests.put', 'requests.put', (['url'], {'params': 'params', 'headers': 'self.auth.headers'}), '(url, params=params, headers=self.auth.headers)\n', (6740, 6787), False, 'import requests\n'), ((6803, 6839), 'bbdata.util.handle_response', 'handle_response', (['r.status_code', '(True)'], {}), '(r.status_code, True)\n', (6818, 6839), False, 'from bbdata.util import handle_response\n'), ((7228, 7272), 'requests.get', 'requests.get', (['url'], {'headers': 'self.auth.headers'}), '(url, headers=self.auth.headers)\n', (7240, 7272), False, 'import requests\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2016-11-15 07:06
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('resolwe_bio_kb', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='feature',
name='aliases',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=256), blank=True, default=[], size=None),
),
migrations.AlterField(
model_name='feature',
name='name',
field=models.CharField(max_length=1024),
),
migrations.AlterField(
model_name='feature',
name='sub_type',
field=models.CharField(choices=[(b'protein-coding', b'Protein-coding'), (b'pseudo', b'Pseudo'), (b'rRNA', b'rRNA'), (b'ncRNA', b'ncRNA'), (b'snRNA', b'snRNA'), (b'snoRNA', b'snoRNA'), (b'tRNA', b'tRNA'), (b'asRNA', b'asRNA'), (b'other', b'Other'), (b'unknown', b'Unknown')], max_length=20),
),
]
|
[
"django.db.models.CharField"
] |
[((679, 712), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1024)'}), '(max_length=1024)\n', (695, 712), False, 'from django.db import migrations, models\n'), ((837, 1139), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[(b'protein-coding', b'Protein-coding'), (b'pseudo', b'Pseudo'), (b'rRNA',\n b'rRNA'), (b'ncRNA', b'ncRNA'), (b'snRNA', b'snRNA'), (b'snoRNA',\n b'snoRNA'), (b'tRNA', b'tRNA'), (b'asRNA', b'asRNA'), (b'other',\n b'Other'), (b'unknown', b'Unknown')]", 'max_length': '(20)'}), "(choices=[(b'protein-coding', b'Protein-coding'), (\n b'pseudo', b'Pseudo'), (b'rRNA', b'rRNA'), (b'ncRNA', b'ncRNA'), (\n b'snRNA', b'snRNA'), (b'snoRNA', b'snoRNA'), (b'tRNA', b'tRNA'), (\n b'asRNA', b'asRNA'), (b'other', b'Other'), (b'unknown', b'Unknown')],\n max_length=20)\n", (853, 1139), False, 'from django.db import migrations, models\n'), ((490, 522), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (506, 522), False, 'from django.db import migrations, models\n')]
|
import pytest
from click.testing import CliRunner
from secedgar.cli import daily, filing
from secedgar.utils.exceptions import FilingTypeError
def run_cli_command(cli, user_input, directory, catch_exceptions=False):
runner = CliRunner()
user_input = user_input + " --directory {}".format(directory)
return runner.invoke(cli, user_input, catch_exceptions=catch_exceptions)
def check_bad_inputs(cli, user_input, expected_exception, directory):
# SystemExit does not raise exception by runner
if expected_exception is SystemExit:
result = run_cli_command(cli, user_input, directory)
assert result.exit_code != 0
else:
with pytest.raises(expected_exception):
run_cli_command(cli, user_input, directory)
class TestCLIFiling:
@pytest.mark.parametrize(
"user_input,expected_exception",
[
("-l aapl msft Facebook", SystemExit), # missing filing type
("-l aapl -t null", FilingTypeError), # unrecognized filing type
("-l aapl -t FILING_10Q -n abc", SystemExit), # count is not int
("-l aapl -t FILING_10Q -n 0", ValueError) # no filings available if 0 picked
]
)
def test_filing_bad_inputs(self, user_input, expected_exception, tmp_data_directory):
check_bad_inputs(filing, user_input, expected_exception, tmp_data_directory)
@pytest.mark.parametrize(
"user_input",
[
"-l aapl msft fb FILING_10Q",
"-l aapl msft fb FILING_10Q -n 10",
"-l aapl msft fb FILING_10Q -n 1"
]
)
def test_multiple_companies_input(self, user_input, tmp_data_directory):
pass
class TestCLIDaily:
@pytest.mark.parametrize(
"user_input,expected_exception",
[
("", SystemExit),
("-d 2020", ValueError)
]
)
def test_daily_bad_inputs(self, user_input, expected_exception, tmp_data_directory):
check_bad_inputs(daily, user_input, expected_exception, tmp_data_directory)
|
[
"pytest.mark.parametrize",
"pytest.raises",
"click.testing.CliRunner"
] |
[((231, 242), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (240, 242), False, 'from click.testing import CliRunner\n'), ((792, 1031), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""user_input,expected_exception"""', "[('-l aapl msft Facebook', SystemExit), ('-l aapl -t null', FilingTypeError\n ), ('-l aapl -t FILING_10Q -n abc', SystemExit), (\n '-l aapl -t FILING_10Q -n 0', ValueError)]"], {}), "('user_input,expected_exception', [(\n '-l aapl msft Facebook', SystemExit), ('-l aapl -t null',\n FilingTypeError), ('-l aapl -t FILING_10Q -n abc', SystemExit), (\n '-l aapl -t FILING_10Q -n 0', ValueError)])\n", (815, 1031), False, 'import pytest\n'), ((1386, 1530), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""user_input"""', "['-l aapl msft fb FILING_10Q', '-l aapl msft fb FILING_10Q -n 10',\n '-l aapl msft fb FILING_10Q -n 1']"], {}), "('user_input', ['-l aapl msft fb FILING_10Q',\n '-l aapl msft fb FILING_10Q -n 10', '-l aapl msft fb FILING_10Q -n 1'])\n", (1409, 1530), False, 'import pytest\n'), ((1713, 1818), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""user_input,expected_exception"""', "[('', SystemExit), ('-d 2020', ValueError)]"], {}), "('user_input,expected_exception', [('', SystemExit),\n ('-d 2020', ValueError)])\n", (1736, 1818), False, 'import pytest\n'), ((672, 705), 'pytest.raises', 'pytest.raises', (['expected_exception'], {}), '(expected_exception)\n', (685, 705), False, 'import pytest\n')]
|
import os
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
nstep=200
nx=400
nv=3
u=np.zeros((nx,nv))
prim=np.zeros((nx,nv))
gam=5./3.
dx=1./nx
dt=1e-3
time=0
x=np.linspace(0,1,num=nx)
def ptou(pri):
u=np.zeros((nx,nv))
rho=pri[:,0]
v=pri[:,1]
prs=pri[:,2]
mom=rho*v
u[:,0]=rho
u[:,1]=mom
u[:,2]=0.5*mom*v+prs/(gam-1)
return(u)
def utop(u):
pri=np.zeros((nx,nv))
rho=u[:,0]
mom=u[:,1]
ene=u[:,2]
vel=mom/(rho+1e-6)
pri[:,0]=rho
pri[:,1]=vel
pri[:,2]=(ene-0.5*mom*vel)*(gam-1)
return(pri)
def getmaxv(pri):
rho=pri[:,0]
vel=pri[:,1]
prs=pri[:,2]
cs=np.sqrt(gam*prs/rho)
return(max(abs(vel)+cs))
def getflux(u):
f=np.zeros((nx,nv))
pri=utop(u)
rho=pri[:,0]
v=pri[:,1]
prs=pri[:,2]
mom=u[:,1]
ene=u[:,2]
f[:,0]=mom
f[:,1]=mom*v+prs
f[:,2]=(ene+prs)*v
return(f)
prim[:,0]=1.
prim[:,1]=0.
prim[:,2]=1.
for i in range(int(nx/2),nx):
prim[i,0]=0.1
prim[i,1]=0.
prim[i,2]=0.125
print (prim[:,2])
u=ptou(prim)
uold=u
pold=prim
fig = plt.figure()
gs = gridspec.GridSpec(nv,1)
ax1 = fig.add_subplot(gs[0,0])
ax2 = fig.add_subplot(gs[1,0])
ax3 = fig.add_subplot(gs[2,0])
ax1.plot(x,prim[:,0],'pres')
ax2.plot(x,prim[:,1],'pres')
ax3.plot(x,prim[:,2],'pres')
fig.show()
for nstep in range(0,nstep):
print (time)
um=np.roll(u, 1,axis=0)
up=np.roll(u,-1,axis=0)
um[0,:] =um[1,:]
up[nx-1,:]=up[nx-2,:]
fm=getflux(um)
fp=getflux(up)
cfl=0.49
dtdx=1./getmaxv(p)
dt=dtdx*dx
time=time+dt
un=0.5*(um+up) - cfl*dtdx* (fp-fm)
u=un
p=utop(u)
plt.close(fig)
fig = plt.figure()
gs = gridspec.GridSpec(nv,1)
ax1 = fig.add_subplot(gs[0,0])
ax2 = fig.add_subplot(gs[1,0])
ax3 = fig.add_subplot(gs[2,0])
ax1.plot(p[:,0])
ax2.plot(p[:,1])
ax3.plot(p[:,2])
fig.show()
|
[
"numpy.sqrt",
"numpy.roll",
"matplotlib.pyplot.close",
"numpy.zeros",
"numpy.linspace",
"matplotlib.pyplot.figure",
"matplotlib.gridspec.GridSpec"
] |
[((118, 136), 'numpy.zeros', 'np.zeros', (['(nx, nv)'], {}), '((nx, nv))\n', (126, 136), True, 'import numpy as np\n'), ((141, 159), 'numpy.zeros', 'np.zeros', (['(nx, nv)'], {}), '((nx, nv))\n', (149, 159), True, 'import numpy as np\n'), ((195, 220), 'numpy.linspace', 'np.linspace', (['(0)', '(1)'], {'num': 'nx'}), '(0, 1, num=nx)\n', (206, 220), True, 'import numpy as np\n'), ((1105, 1117), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1115, 1117), True, 'import matplotlib.pyplot as plt\n'), ((1123, 1147), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['nv', '(1)'], {}), '(nv, 1)\n', (1140, 1147), False, 'from matplotlib import gridspec\n'), ((240, 258), 'numpy.zeros', 'np.zeros', (['(nx, nv)'], {}), '((nx, nv))\n', (248, 258), True, 'import numpy as np\n'), ((412, 430), 'numpy.zeros', 'np.zeros', (['(nx, nv)'], {}), '((nx, nv))\n', (420, 430), True, 'import numpy as np\n'), ((652, 676), 'numpy.sqrt', 'np.sqrt', (['(gam * prs / rho)'], {}), '(gam * prs / rho)\n', (659, 676), True, 'import numpy as np\n'), ((723, 741), 'numpy.zeros', 'np.zeros', (['(nx, nv)'], {}), '((nx, nv))\n', (731, 741), True, 'import numpy as np\n'), ((1399, 1420), 'numpy.roll', 'np.roll', (['u', '(1)'], {'axis': '(0)'}), '(u, 1, axis=0)\n', (1406, 1420), True, 'import numpy as np\n'), ((1432, 1454), 'numpy.roll', 'np.roll', (['u', '(-1)'], {'axis': '(0)'}), '(u, -1, axis=0)\n', (1439, 1454), True, 'import numpy as np\n'), ((1723, 1737), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (1732, 1737), True, 'import matplotlib.pyplot as plt\n'), ((1752, 1764), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1762, 1764), True, 'import matplotlib.pyplot as plt\n'), ((1778, 1802), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['nv', '(1)'], {}), '(nv, 1)\n', (1795, 1802), False, 'from matplotlib import gridspec\n')]
|
import unittest
from pluralizer import Pluralizer
import re
# Standard singular/plural matches.
#
# @type {Array}
BASIC_TESTS = [
# Uncountables.
['firmware', 'firmware'],
['fish', 'fish'],
['media', 'media'],
['moose', 'moose'],
['police', 'police'],
['sheep', 'sheep'],
['series', 'series'],
['agenda', 'agenda'],
['news', 'news'],
['reindeer', 'reindeer'],
['starfish', 'starfish'],
['smallpox', 'smallpox'],
['tennis', 'tennis'],
['chickenpox', 'chickenpox'],
['shambles', 'shambles'],
['garbage', 'garbage'],
['you', 'you'],
['wildlife', 'wildlife'],
['Staff', 'Staff'],
['STAFF', 'STAFF'],
['turquois', 'turquois'],
['carnivorous', 'carnivorous'],
['only', 'only'],
['aircraft', 'aircraft'],
# Latin.
['veniam', 'veniam'],
# Pluralization.
['this', 'these'],
['that', 'those'],
['is', 'are'],
['man', 'men'],
['superman', 'supermen'],
['ox', 'oxen'],
['bus', 'buses'],
['airbus', 'airbuses'],
['railbus', 'railbuses'],
['wife', 'wives'],
['guest', 'guests'],
['thing', 'things'],
['mess', 'messes'],
['guess', 'guesses'],
['person', 'people'],
['meteor', 'meteors'],
['chateau', 'chateaus'],
['lap', 'laps'],
['cough', 'coughs'],
['death', 'deaths'],
['coach', 'coaches'],
['boy', 'boys'],
['toy', 'toys'],
['guy', 'guys'],
['girl', 'girls'],
['chair', 'chairs'],
['toe', 'toes'],
['tiptoe', 'tiptoes'],
['tomato', 'tomatoes'],
['potato', 'potatoes'],
['tornado', 'tornadoes'],
['torpedo', 'torpedoes'],
['hero', 'heroes'],
['superhero', 'superheroes'],
['volcano', 'volcanoes'],
['canto', 'cantos'],
['hetero', 'heteros'],
['photo', 'photos'],
['portico', 'porticos'],
['quarto', 'quartos'],
['kimono', 'kimonos'],
['albino', 'albinos'],
['cherry', 'cherries'],
['piano', 'pianos'],
['pro', 'pros'],
['combo', 'combos'],
['turbo', 'turbos'],
['bar', 'bars'],
['crowbar', 'crowbars'],
['van', 'vans'],
['tobacco', 'tobaccos'],
['afficionado', 'afficionados'],
['monkey', 'monkeys'],
['neutrino', 'neutrinos'],
['rhino', 'rhinos'],
['steno', 'stenos'],
['latino', 'latinos'],
['casino', 'casinos'],
['avocado', 'avocados'],
['commando', 'commandos'],
['tuxedo', 'tuxedos'],
['speedo', 'speedos'],
['dingo', 'dingoes'],
['echo', 'echoes'],
['nacho', 'nachos'],
['motto', 'mottos'],
['psycho', 'psychos'],
['poncho', 'ponchos'],
['pass', 'passes'],
['ghetto', 'ghettos'],
['mango', 'mangos'],
['lady', 'ladies'],
['bath', 'baths'],
['professional', 'professionals'],
['dwarf', 'dwarves'], # Proper spelling is "dwarfs".
['encyclopedia', 'encyclopedias'],
['louse', 'lice'],
['roof', 'roofs'],
['woman', 'women'],
['formula', 'formulas'],
['polyhedron', 'polyhedra'],
['index', 'indices'], # Maybe "indexes".
['matrix', 'matrices'],
['vertex', 'vertices'],
['axe', 'axes'], # Could also be plural of "ax".
['pickaxe', 'pickaxes'],
['crisis', 'crises'],
['criterion', 'criteria'],
['phenomenon', 'phenomena'],
['addendum', 'addenda'],
['datum', 'data'],
['forum', 'forums'],
['millennium', 'millennia'],
['alumnus', 'alumni'],
['medium', 'mediums'],
['census', 'censuses'],
['genus', 'genera'],
['dogma', 'dogmata'],
['life', 'lives'],
['hive', 'hives'],
['kiss', 'kisses'],
['dish', 'dishes'],
['human', 'humans'],
['knife', 'knives'],
['phase', 'phases'],
['judge', 'judges'],
['class', 'classes'],
['witch', 'witches'],
['church', 'churches'],
['massage', 'massages'],
['prospectus', 'prospectuses'],
['syllabus', 'syllabi'],
['viscus', 'viscera'],
['cactus', 'cacti'],
['hippopotamus', 'hippopotamuses'],
['octopus', 'octopuses'],
['platypus', 'platypuses'],
['kangaroo', 'kangaroos'],
['atlas', 'atlases'],
['stigma', 'stigmata'],
['schema', 'schemata'],
['phenomenon', 'phenomena'],
['diagnosis', 'diagnoses'],
['mongoose', 'mongooses'],
['mouse', 'mice'],
['liturgist', 'liturgists'],
['box', 'boxes'],
['gas', 'gases'],
['self', 'selves'],
['chief', 'chiefs'],
['quiz', 'quizzes'],
['child', 'children'],
['shelf', 'shelves'],
['fizz', 'fizzes'],
['tooth', 'teeth'],
['thief', 'thieves'],
['day', 'days'],
['loaf', 'loaves'],
['fix', 'fixes'],
['spy', 'spies'],
['vertebra', 'vertebrae'],
['clock', 'clocks'],
['lap', 'laps'],
['cuff', 'cuffs'],
['leaf', 'leaves'],
['calf', 'calves'],
['moth', 'moths'],
['mouth', 'mouths'],
['house', 'houses'],
['proof', 'proofs'],
['hoof', 'hooves'],
['elf', 'elves'],
['turf', 'turfs'],
['craft', 'crafts'],
['die', 'dice'],
['penny', 'pennies'],
['campus', 'campuses'],
['virus', 'viri'],
['iris', 'irises'],
['bureau', 'bureaus'],
['kiwi', 'kiwis'],
['wiki', 'wikis'],
['igloo', 'igloos'],
['ninja', 'ninjas'],
['pizza', 'pizzas'],
['kayak', 'kayaks'],
['canoe', 'canoes'],
['tiding', 'tidings'],
['pea', 'peas'],
['drive', 'drives'],
['nose', 'noses'],
['movie', 'movies'],
['status', 'statuses'],
['alias', 'aliases'],
['memorandum', 'memorandums'],
['language', 'languages'],
['plural', 'plurals'],
['word', 'words'],
['multiple', 'multiples'],
['reward', 'rewards'],
['sandwich', 'sandwiches'],
['subway', 'subways'],
['direction', 'directions'],
['land', 'lands'],
['row', 'rows'],
['grow', 'grows'],
['flow', 'flows'],
['rose', 'roses'],
['raise', 'raises'],
['friend', 'friends'],
['follower', 'followers'],
['male', 'males'],
['nail', 'nails'],
['sex', 'sexes'],
['tape', 'tapes'],
['ruler', 'rulers'],
['king', 'kings'],
['queen', 'queens'],
['zero', 'zeros'],
['quest', 'quests'],
['goose', 'geese'],
['foot', 'feet'],
['ex', 'exes'],
['reflex', 'reflexes'],
['heat', 'heats'],
['train', 'trains'],
['test', 'tests'],
['pie', 'pies'],
['fly', 'flies'],
['eye', 'eyes'],
['lie', 'lies'],
['node', 'nodes'],
['trade', 'trades'],
['chinese', 'chinese'],
['please', 'pleases'],
['japanese', 'japanese'],
['regex', 'regexes'],
['license', 'licenses'],
['zebra', 'zebras'],
['general', 'generals'],
['corps', 'corps'],
['pliers', 'pliers'],
['flyer', 'flyers'],
['scissors', 'scissors'],
['fireman', 'firemen'],
['chirp', 'chirps'],
['harp', 'harps'],
['corpse', 'corpses'],
['dye', 'dyes'],
['move', 'moves'],
['zombie', 'zombies'],
['variety', 'varieties'],
['talkie', 'talkies'],
['walkie-talkie', 'walkie-talkies'],
['groupie', 'groupies'],
['goonie', 'goonies'],
['lassie', 'lassies'],
['genie', 'genies'],
['foodie', 'foodies'],
['faerie', 'faeries'],
['collie', 'collies'],
['obloquy', 'obloquies'],
['looey', 'looies'],
['osprey', 'ospreys'],
['cover', 'covers'],
['tie', 'ties'],
['groove', 'grooves'],
['bee', 'bees'],
['ave', 'aves'],
['wave', 'waves'],
['wolf', 'wolves'],
['airwave', 'airwaves'],
['archive', 'archives'],
['arch', 'arches'],
['dive', 'dives'],
['aftershave', 'aftershaves'],
['cave', 'caves'],
['grave', 'graves'],
['gift', 'gifts'],
['nerve', 'nerves'],
['nerd', 'nerds'],
['carve', 'carves'],
['rave', 'raves'],
['scarf', 'scarves'],
['sale', 'sales'],
['sail', 'sails'],
['swerve', 'swerves'],
['love', 'loves'],
['dove', 'doves'],
['glove', 'gloves'],
['wharf', 'wharves'],
['valve', 'valves'],
['werewolf', 'werewolves'],
['view', 'views'],
['emu', 'emus'],
['menu', 'menus'],
['wax', 'waxes'],
['fax', 'faxes'],
['nut', 'nuts'],
['crust', 'crusts'],
['lemma', 'lemmata'],
['anathema', 'anathemata'],
['analysis', 'analyses'],
['locus', 'loci'],
['uterus', 'uteri'],
['curriculum', 'curricula'],
['quorum', 'quora'],
['genius', 'geniuses'],
['flower', 'flowers'],
['crash', 'crashes'],
['soul', 'souls'],
['career', 'careers'],
['planet', 'planets'],
['son', 'sons'],
['sun', 'suns'],
['drink', 'drinks'],
['diploma', 'diplomas'],
['dilemma', 'dilemmas'],
['grandma', 'grandmas'],
['no', 'nos'],
['yes', 'yeses'],
['employ', 'employs'],
['employee', 'employees'],
['history', 'histories'],
['story', 'stories'],
['purchase', 'purchases'],
['order', 'orders'],
['key', 'keys'],
['bomb', 'bombs'],
['city', 'cities'],
['sanity', 'sanities'],
['ability', 'abilities'],
['activity', 'activities'],
['cutie', 'cuties'],
['validation', 'validations'],
['floaty', 'floaties'],
['nicety', 'niceties'],
['goalie', 'goalies'],
['crawly', 'crawlies'],
['duty', 'duties'],
['scrutiny', 'scrutinies'],
['deputy', 'deputies'],
['beauty', 'beauties'],
['bank', 'banks'],
['family', 'families'],
['tally', 'tallies'],
['ally', 'allies'],
['alley', 'alleys'],
['valley', 'valleys'],
['medley', 'medleys'],
['melody', 'melodies'],
['trolly', 'trollies'],
['thunk', 'thunks'],
['koala', 'koalas'],
['special', 'specials'],
['book', 'books'],
['knob', 'knobs'],
['crab', 'crabs'],
['plough', 'ploughs'],
['high', 'highs'],
['low', 'lows'],
['hiccup', 'hiccups'],
['bonus', 'bonuses'],
['circus', 'circuses'],
['abacus', 'abacuses'],
['phobia', 'phobias'],
['case', 'cases'],
['lace', 'laces'],
['trace', 'traces'],
['mage', 'mages'],
['lotus', 'lotuses'],
['motorbus', 'motorbuses'],
['cutlas', 'cutlases'],
['tequila', 'tequilas'],
['liar', 'liars'],
['delta', 'deltas'],
['visa', 'visas'],
['flea', 'fleas'],
['favela', 'favelas'],
['cobra', 'cobras'],
['finish', 'finishes'],
['gorilla', 'gorillas'],
['mass', 'masses'],
['face', 'faces'],
['rabbit', 'rabbits'],
['adventure', 'adventures'],
['breeze', 'breezes'],
['brew', 'brews'],
['canopy', 'canopies'],
['copy', 'copies'],
['spy', 'spies'],
['cave', 'caves'],
['charge', 'charges'],
['cinema', 'cinemas'],
['coffee', 'coffees'],
['favourite', 'favourites'],
['themself', 'themselves'],
['country', 'countries'],
['issue', 'issues'],
['authority', 'authorities'],
['force', 'forces'],
['objective', 'objectives'],
['present', 'presents'],
['industry', 'industries'],
['believe', 'believes'],
['century', 'centuries'],
['category', 'categories'],
['eve', 'eves'],
['fee', 'fees'],
['gene', 'genes'],
['try', 'tries'],
['currency', 'currencies'],
['pose', 'poses'],
['cheese', 'cheeses'],
['clue', 'clues'],
['cheer', 'cheers'],
['litre', 'litres'],
['money', 'monies'],
['attorney', 'attorneys'],
['balcony', 'balconies'],
['cockney', 'cockneys'],
['donkey', 'donkeys'],
['honey', 'honeys'],
['smiley', 'smilies'],
['survey', 'surveys'],
['whiskey', 'whiskeys'],
['whisky', 'whiskies'],
['volley', 'volleys'],
['tongue', 'tongues'],
['suit', 'suits'],
['suite', 'suites'],
['cruise', 'cruises'],
['eave', 'eaves'],
['consultancy', 'consultancies'],
['pouch', 'pouches'],
['wallaby', 'wallabies'],
['abyss', 'abysses'],
['weekly', 'weeklies'],
['whistle', 'whistles'],
['utilise', 'utilises'],
['utilize', 'utilizes'],
['mercy', 'mercies'],
['mercenary', 'mercenaries'],
['take', 'takes'],
['flush', 'flushes'],
['gate', 'gates'],
['evolve', 'evolves'],
['slave', 'slaves'],
['native', 'natives'],
['revolve', 'revolves'],
['twelve', 'twelves'],
['sleeve', 'sleeves'],
['subjective', 'subjectives'],
['stream', 'streams'],
['beam', 'beams'],
['foam', 'foams'],
['callus', 'calluses'],
['use', 'uses'],
['beau', 'beaus'],
['gateau', 'gateaus'],
['fetus', 'fetuses'],
['luau', 'luaus'],
['pilau', 'pilaus'],
['shoe', 'shoes'],
['sandshoe', 'sandshoes'],
['zeus', 'zeuses'],
['nucleus', 'nuclei'],
['sky', 'skies'],
['beach', 'beaches'],
['brush', 'brushes'],
['hoax', 'hoaxes'],
['scratch', 'scratches'],
['nanny', 'nannies'],
['negro', 'negroes'],
['taco', 'tacos'],
['cafe', 'cafes'],
['cave', 'caves'],
['giraffe', 'giraffes'],
['goodwife', 'goodwives'],
['housewife', 'housewives'],
['safe', 'safes'],
['save', 'saves'],
['pocketknife', 'pocketknives'],
['tartufe', 'tartufes'],
['tartuffe', 'tartuffes'],
['truffle', 'truffles'],
['jefe', 'jefes'],
['agrafe', 'agrafes'],
['agraffe', 'agraffes'],
['bouffe', 'bouffes'],
['carafe', 'carafes'],
['chafe', 'chafes'],
['pouffe', 'pouffes'],
['pouf', 'poufs'],
['piaffe', 'piaffes'],
['gaffe', 'gaffes'],
['executive', 'executives'],
['cove', 'coves'],
['dove', 'doves'],
['fave', 'faves'],
['positive', 'positives'],
['solve', 'solves'],
['trove', 'troves'],
['treasure', 'treasures'],
['suave', 'suaves'],
['bluff', 'bluffs'],
['half', 'halves'],
['knockoff', 'knockoffs'],
['handkerchief', 'handkerchiefs'],
['reed', 'reeds'],
['reef', 'reefs'],
['yourself', 'yourselves'],
['sunroof', 'sunroofs'],
['plateau', 'plateaus'],
['radius', 'radii'],
['stratum', 'strata'],
['stratus', 'strati'],
['focus', 'foci'],
['fungus', 'fungi'],
['appendix', 'appendices'],
['seraph', 'seraphim'],
['cherub', 'cherubim'],
['memo', 'memos'],
['cello', 'cellos'],
['automaton', 'automata'],
['button', 'buttons'],
['crayon', 'crayons'],
['captive', 'captives'],
['abrasive', 'abrasives'],
['archive', 'archives'],
['additive', 'additives'],
['hive', 'hives'],
['beehive', 'beehives'],
['olive', 'olives'],
['black olive', 'black olives'],
['chive', 'chives'],
['adjective', 'adjectives'],
['cattle drive', 'cattle drives'],
['explosive', 'explosives'],
['executive', 'executives'],
['negative', 'negatives'],
['fugitive', 'fugitives'],
['progressive', 'progressives'],
['laxative', 'laxatives'],
['incentive', 'incentives'],
['genesis', 'geneses'],
['surprise', 'surprises'],
['enterprise', 'enterprises'],
['relative', 'relatives'],
['positive', 'positives'],
['perspective', 'perspectives'],
['superlative', 'superlatives'],
['afterlife', 'afterlives'],
['native', 'natives'],
['detective', 'detectives'],
['collective', 'collectives'],
['lowlife', 'lowlives'],
['low-life', 'low-lives'],
['strife', 'strifes'],
['pony', 'ponies'],
['phony', 'phonies'],
['felony', 'felonies'],
['colony', 'colonies'],
['symphony', 'symphonies'],
['semicolony', 'semicolonies'],
['radiotelephony', 'radiotelephonies'],
['company', 'companies'],
['ceremony', 'ceremonies'],
['carnivore', 'carnivores'],
['emphasis', 'emphases'],
['abuse', 'abuses'],
['ass', 'asses'],
['mile', 'miles'],
['consensus', 'consensuses'],
['coatdress', 'coatdresses'],
['courthouse', 'courthouses'],
['playhouse', 'playhouses'],
['crispness', 'crispnesses'],
['racehorse', 'racehorses'],
['greatness', 'greatnesses'],
['demon', 'demons'],
['lemon', 'lemons'],
['pokemon', 'pokemon'],
['pokémon', 'pokémon'],
['christmas', 'christmases'],
['zymase', 'zymases'],
['accomplice', 'accomplices'],
['amice', 'amices'],
['titmouse', 'titmice'],
['slice', 'slices'],
['base', 'bases'],
['database', 'databases'],
['rise', 'rises'],
['uprise', 'uprises'],
['size', 'sizes'],
['prize', 'prizes'],
['booby', 'boobies'],
['hobby', 'hobbies'],
['baby', 'babies'],
['cookie', 'cookies'],
['budgie', 'budgies'],
['calorie', 'calories'],
['brownie', 'brownies'],
['lolly', 'lollies'],
['hippie', 'hippies'],
['smoothie', 'smoothies'],
['techie', 'techies'],
['specie', 'species'],
['quickie', 'quickies'],
['pixie', 'pixies'],
['rotisserie', 'rotisseries'],
['porkpie', 'porkpies'],
['newbie', 'newbies'],
['veggie', 'veggies'],
['bourgeoisie', 'bourgeoisies'],
['party', 'parties'],
['apology', 'apologies'],
['ancestry', 'ancestries'],
['anomaly', 'anomalies'],
['anniversary', 'anniversaries'],
['battery', 'batteries'],
['nappy', 'nappies'],
['hanky', 'hankies'],
['junkie', 'junkies'],
['hogtie', 'hogties'],
['footsie', 'footsies'],
['curry', 'curries'],
['fantasy', 'fantasies'],
['housefly', 'houseflies'],
['falsy', 'falsies'],
['doggy', 'doggies'],
['carny', 'carnies'],
['cabby', 'cabbies'],
['charlie', 'charlies'],
['bookie', 'bookies'],
['auntie', 'aunties'],
# Prototype inheritance.
['constructor', 'constructors'],
# Non-standard case.
['randomWord', 'randomWords'],
['camelCase', 'camelCases'],
['PascalCase', 'PascalCases'],
['Alumnus', 'Alumni'],
['CHICKEN', 'CHICKENS'],
['日本語', '日本語'],
['한국', '한국'],
['中文', '中文'],
['اللغة العربية', 'اللغة العربية'],
['四 chicken', '四 chickens'],
['Order2', 'Order2s'],
['Work Order2', 'Work Order2s'],
['SoundFX2', 'SoundFX2s'],
['oDonald', 'oDonalds']
]
#
# Odd plural to singular tests.
#
# @type {Array}
#
SINGULAR_TESTS = [
['dingo', 'dingos'],
['mango', 'mangoes'],
['echo', 'echos'],
['ghetto', 'ghettoes'],
['nucleus', 'nucleuses'],
['bureau', 'bureaux'],
['seraph', 'seraphs']
]
#
# Odd singular to plural tests.
#
# @type {Array}
#
PLURAL_TESTS = [
['plateaux', 'plateaux'],
['axis', 'axes'],
['basis', 'bases'],
['automatum', 'automata'],
['thou', 'you'],
['axiS', 'axes'],
['passerby', 'passersby']
]
class TestPluralize(unittest.TestCase):
def test_methods_plural(self):
pluralizer = Pluralizer()
for test in [*BASIC_TESTS, *PLURAL_TESTS]:
self.assertEqual(pluralizer.plural(test[0]), test[1])
def test_methods_is_plural(self):
pluralizer = Pluralizer()
for test in [*BASIC_TESTS, *PLURAL_TESTS]:
self.assertTrue(pluralizer.isPlural(test[1]), f"isPlural('{test[1]}')")
def test_methods_singular(self):
pluralizer = Pluralizer()
for test in [*BASIC_TESTS, *SINGULAR_TESTS]:
self.assertEqual(pluralizer.singular(test[1]), test[0])
def test_methods_is_singular(self):
pluralizer = Pluralizer()
for test in [*BASIC_TESTS, *SINGULAR_TESTS]:
self.assertTrue(pluralizer.isSingular(test[0]))
def test_automatically_convert_plural(self):
pluralizer = Pluralizer()
for test in [*BASIC_TESTS, *PLURAL_TESTS]:
self.assertEqual(pluralizer.pluralize(test[1], 5), test[1])
self.assertEqual(pluralizer.pluralize(test[0], 5), test[1])
def test_automatically_convert_singular(self):
pluralizer = Pluralizer()
for test in [*BASIC_TESTS, *SINGULAR_TESTS]:
self.assertEqual(pluralizer.pluralize(test[0], 1), test[0])
self.assertEqual(pluralizer.pluralize(test[1], 1), test[0])
def test_prepend_count_plural_words(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.pluralize('test', 5, True), '5 tests')
def test_prepend_count_singular_words(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.pluralize('test', 1, True), '1 test')
def test_add_new_uncountable_rules(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.pluralize('paper'), 'papers')
pluralizer.addUncountableRule('paper')
self.assertEqual(pluralizer.pluralize('paper'), 'paper')
def test_add_new_irregular_words(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.pluralize('irregular'), 'irregulars')
pluralizer.addIrregularRule('irregular', 'regular')
self.assertEqual(pluralizer.pluralize('irregular'), 'regular')
def test_return_false_for_irregular_words(self):
pluralizer = Pluralizer()
self.assertTrue(pluralizer.isPlural('irregulars'))
pluralizer.addIrregularRule('irregulars', 'regular')
self.assertFalse(pluralizer.isPlural('irregulars'))
def test_add_new_plural_matching_rules(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.plural('regex'), 'regexes')
pluralizer.addPluralRule(re.compile(r'(?i)gex$'), 'gexii')
self.assertEqual(pluralizer.plural('regex'), 'regexii')
def test_add_new_singular_matching_rules(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.singular('singles'), 'single')
pluralizer.addSingularRule(re.compile('singles$'), 'singular')
self.assertEqual(pluralizer.singular('singles'), 'singular')
def test_allow_new_plural_matching_rules_to_be_strings(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.plural('person'), 'people')
pluralizer.addPluralRule('person', 'peeps')
self.assertEqual(pluralizer.plural('person'), 'peeps')
def test_allow_new_singular_matching_rules_to_be_strings(self):
pluralizer = Pluralizer()
self.assertEqual(pluralizer.singular('mornings'), 'morning')
pluralizer.addSingularRule('mornings', 'suck')
self.assertEqual(pluralizer.singular('mornings'), 'suck')
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"re.compile",
"pluralizer.Pluralizer"
] |
[((22163, 22178), 'unittest.main', 'unittest.main', ([], {}), '()\n', (22176, 22178), False, 'import unittest\n'), ((18576, 18588), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (18586, 18588), False, 'from pluralizer import Pluralizer\n'), ((18766, 18778), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (18776, 18778), False, 'from pluralizer import Pluralizer\n'), ((18973, 18985), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (18983, 18985), False, 'from pluralizer import Pluralizer\n'), ((19169, 19181), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (19179, 19181), False, 'from pluralizer import Pluralizer\n'), ((19366, 19378), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (19376, 19378), False, 'from pluralizer import Pluralizer\n'), ((19647, 19659), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (19657, 19659), False, 'from pluralizer import Pluralizer\n'), ((19926, 19938), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (19936, 19938), False, 'from pluralizer import Pluralizer\n'), ((20085, 20097), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (20095, 20097), False, 'from pluralizer import Pluralizer\n'), ((20240, 20252), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (20250, 20252), False, 'from pluralizer import Pluralizer\n'), ((20497, 20509), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (20507, 20509), False, 'from pluralizer import Pluralizer\n'), ((20790, 20802), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (20800, 20802), False, 'from pluralizer import Pluralizer\n'), ((21055, 21067), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (21065, 21067), False, 'from pluralizer import Pluralizer\n'), ((21337, 21349), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (21347, 21349), False, 'from pluralizer import Pluralizer\n'), ((21645, 21657), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (21655, 21657), False, 'from pluralizer import Pluralizer\n'), ((21927, 21939), 'pluralizer.Pluralizer', 'Pluralizer', ([], {}), '()\n', (21937, 21939), False, 'from pluralizer import Pluralizer\n'), ((21165, 21187), 're.compile', 're.compile', (['"""(?i)gex$"""'], {}), "('(?i)gex$')\n", (21175, 21187), False, 'import re\n'), ((21452, 21474), 're.compile', 're.compile', (['"""singles$"""'], {}), "('singles$')\n", (21462, 21474), False, 'import re\n')]
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from django.db.models import F
from promgen.version import __version__
from django.conf import settings
# Wrappers around request api to ensure we always attach our user agent
# https://github.com/requests/requests/blob/master/requests/api.py
def post(url, data=None, json=None, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, data=data, json=json, **kwargs)
def get(url, params=None, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, params=params, **kwargs)
def delete(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.delete(url, **kwargs)
def setting(key, default=None, domain=None):
"""
Settings helper based on saltstack's query
Allows a simple way to query settings from YAML
using the style `path:to:key` to represent
path:
to:
key: value
"""
rtn = settings.PROMGEN
if domain:
rtn = rtn[domain]
for index in key.split(":"):
try:
rtn = rtn[index]
except KeyError:
return default
return rtn
class HelpFor:
# Wrap a model's lower level api so that we can easily
# grab help_text for a specific field
# help_text = HelpFor(DjangoModel)
# help_test.field_name
def __init__(self, model):
self.model = model
def __getattr__(self, name):
return self.model._meta.get_field(name).help_text
def inc_for_pk(model, pk, **kwargs):
# key=F('key') + value
model.objects.filter(pk=pk).update(**{key: F(key) + kwargs[key] for key in kwargs})
|
[
"django.db.models.F"
] |
[((1947, 1953), 'django.db.models.F', 'F', (['key'], {}), '(key)\n', (1948, 1953), False, 'from django.db.models import F\n')]
|
import time
import kubernetes
import pytest
from dagster_k8s.client import DagsterK8sError, WaitForPodState
from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod
from dagster_k8s_test_infra.helm import get_helm_test_namespace
def construct_pod_spec(name, cmd):
return kubernetes.client.V1PodSpec(
restart_policy="Never",
containers=[
kubernetes.client.V1Container(name=name, image="busybox", args=["/bin/sh", "-c", cmd])
],
)
def construct_pod_manifest(name, cmd):
return kubernetes.client.V1Pod(
metadata=kubernetes.client.V1ObjectMeta(name=name), spec=construct_pod_spec(name, cmd),
)
def construct_job_manifest(name, cmd):
return kubernetes.client.V1Job(
api_version="batch/v1",
kind="Job",
metadata=kubernetes.client.V1ObjectMeta(name=name),
spec=kubernetes.client.V1JobSpec(
template=kubernetes.client.V1PodTemplateSpec(spec=construct_pod_spec(name, cmd)),
),
)
def test_wait_for_pod(cluster_provider): # pylint: disable=unused-argument
api = kubernetes.client.CoreV1Api()
with get_helm_test_namespace() as namespace:
# Without this sleep, we get the following error on kind:
# HTTP response body:
# {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"No API
# token found for service account \"default\", retry after the token is automatically
# created and added to the service
# account","reason":"ServerTimeout","details":{"name":"create
# pod","kind":"serviceaccounts","retryAfterSeconds":1},"code":500}
time.sleep(5)
try:
api.create_namespaced_pod(
body=construct_pod_manifest("sayhi1", 'echo "hello world"'), namespace=namespace
)
wait_for_pod("sayhi1", namespace=namespace)
assert retrieve_pod_logs("sayhi1", namespace=namespace) == "hello world\n"
api.create_namespaced_pod(
body=construct_pod_manifest("sayhi2", 'echo "hello world"'), namespace=namespace
)
wait_for_pod("sayhi2", namespace=namespace, wait_for_state=WaitForPodState.Terminated)
with pytest.raises(
DagsterK8sError, match="Timed out while waiting for pod to become ready"
):
api.create_namespaced_pod(
body=construct_pod_manifest("sayhi3", 'sleep 5; echo "hello world"'),
namespace=namespace,
)
wait_for_pod("sayhi3", namespace=namespace, wait_timeout=1)
with pytest.raises(DagsterK8sError) as exc_info:
api.create_namespaced_pod(
body=construct_pod_manifest("fail", 'echo "whoops!"; exit 1'),
namespace=namespace,
)
wait_for_pod("fail", namespace=namespace, wait_for_state=WaitForPodState.Terminated)
# not doing total match because integration test. unit tests test full log message
assert "Pod did not exit successfully." in str(exc_info.value)
finally:
for pod_name in ["sayhi1", "sayhi2", "sayhi3", "fail"]:
try:
api.delete_namespaced_pod(pod_name, namespace=namespace)
except kubernetes.client.rest.ApiException:
pass
def test_wait_for_job(cluster_provider): # pylint: disable=unused-argument
with get_helm_test_namespace() as namespace:
# Without this sleep, we get the following error on kind:
# HTTP response body:
# {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"No API
# token found for service account \"default\", retry after the token is automatically
# created and added to the service
# account","reason":"ServerTimeout","details":{"name":"create
# pod","kind":"serviceaccounts","retryAfterSeconds":1},"code":500}
time.sleep(5)
try:
api = kubernetes.client.BatchV1Api()
api.create_namespaced_job(
body=construct_job_manifest("sayhi1", 'echo "hello world"'), namespace=namespace
)
wait_for_job_success("sayhi1", namespace=namespace)
with pytest.raises(
DagsterK8sError, match="Timed out while waiting for job sayhi2 to complete"
):
api.create_namespaced_job(
body=construct_job_manifest("sayhi2", 'sleep 5; echo "hello world"'),
namespace=namespace,
)
wait_for_job_success("sayhi2", namespace=namespace, wait_timeout=1)
with pytest.raises(
DagsterK8sError, match="Encountered failed job pods for job fail with status:",
):
api.create_namespaced_job(
body=construct_job_manifest("fail", 'echo "whoops!"; exit 1'),
namespace=namespace,
)
wait_for_job_success("fail", namespace=namespace)
finally:
for job in ["sayhi1", "sayhi2", "fail"]:
try:
api.delete_namespaced_job(
job, namespace=namespace, propagation_policy="Foreground"
)
except kubernetes.client.rest.ApiException:
pass
|
[
"kubernetes.client.BatchV1Api",
"kubernetes.client.V1ObjectMeta",
"kubernetes.client.CoreV1Api",
"time.sleep",
"dagster_k8s_test_infra.helm.get_helm_test_namespace",
"pytest.raises",
"dagster_k8s.utils.wait_for_job_success",
"dagster_k8s.utils.wait_for_pod",
"kubernetes.client.V1Container",
"dagster_k8s.utils.retrieve_pod_logs"
] |
[((1112, 1141), 'kubernetes.client.CoreV1Api', 'kubernetes.client.CoreV1Api', ([], {}), '()\n', (1139, 1141), False, 'import kubernetes\n'), ((1152, 1177), 'dagster_k8s_test_infra.helm.get_helm_test_namespace', 'get_helm_test_namespace', ([], {}), '()\n', (1175, 1177), False, 'from dagster_k8s_test_infra.helm import get_helm_test_namespace\n'), ((1674, 1687), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1684, 1687), False, 'import time\n'), ((3525, 3550), 'dagster_k8s_test_infra.helm.get_helm_test_namespace', 'get_helm_test_namespace', ([], {}), '()\n', (3548, 3550), False, 'from dagster_k8s_test_infra.helm import get_helm_test_namespace\n'), ((4047, 4060), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (4057, 4060), False, 'import time\n'), ((597, 638), 'kubernetes.client.V1ObjectMeta', 'kubernetes.client.V1ObjectMeta', ([], {'name': 'name'}), '(name=name)\n', (627, 638), False, 'import kubernetes\n'), ((828, 869), 'kubernetes.client.V1ObjectMeta', 'kubernetes.client.V1ObjectMeta', ([], {'name': 'name'}), '(name=name)\n', (858, 869), False, 'import kubernetes\n'), ((1864, 1907), 'dagster_k8s.utils.wait_for_pod', 'wait_for_pod', (['"""sayhi1"""'], {'namespace': 'namespace'}), "('sayhi1', namespace=namespace)\n", (1876, 1907), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n'), ((2158, 2249), 'dagster_k8s.utils.wait_for_pod', 'wait_for_pod', (['"""sayhi2"""'], {'namespace': 'namespace', 'wait_for_state': 'WaitForPodState.Terminated'}), "('sayhi2', namespace=namespace, wait_for_state=WaitForPodState.\n Terminated)\n", (2170, 2249), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n'), ((4093, 4123), 'kubernetes.client.BatchV1Api', 'kubernetes.client.BatchV1Api', ([], {}), '()\n', (4121, 4123), False, 'import kubernetes\n'), ((4287, 4338), 'dagster_k8s.utils.wait_for_job_success', 'wait_for_job_success', (['"""sayhi1"""'], {'namespace': 'namespace'}), "('sayhi1', namespace=namespace)\n", (4307, 4338), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n'), ((399, 489), 'kubernetes.client.V1Container', 'kubernetes.client.V1Container', ([], {'name': 'name', 'image': '"""busybox"""', 'args': "['/bin/sh', '-c', cmd]"}), "(name=name, image='busybox', args=['/bin/sh',\n '-c', cmd])\n", (428, 489), False, 'import kubernetes\n'), ((1927, 1975), 'dagster_k8s.utils.retrieve_pod_logs', 'retrieve_pod_logs', (['"""sayhi1"""'], {'namespace': 'namespace'}), "('sayhi1', namespace=namespace)\n", (1944, 1975), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n'), ((2263, 2355), 'pytest.raises', 'pytest.raises', (['DagsterK8sError'], {'match': '"""Timed out while waiting for pod to become ready"""'}), "(DagsterK8sError, match=\n 'Timed out while waiting for pod to become ready')\n", (2276, 2355), False, 'import pytest\n'), ((2590, 2649), 'dagster_k8s.utils.wait_for_pod', 'wait_for_pod', (['"""sayhi3"""'], {'namespace': 'namespace', 'wait_timeout': '(1)'}), "('sayhi3', namespace=namespace, wait_timeout=1)\n", (2602, 2649), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n'), ((2668, 2698), 'pytest.raises', 'pytest.raises', (['DagsterK8sError'], {}), '(DagsterK8sError)\n', (2681, 2698), False, 'import pytest\n'), ((2913, 3002), 'dagster_k8s.utils.wait_for_pod', 'wait_for_pod', (['"""fail"""'], {'namespace': 'namespace', 'wait_for_state': 'WaitForPodState.Terminated'}), "('fail', namespace=namespace, wait_for_state=WaitForPodState.\n Terminated)\n", (2925, 3002), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n'), ((4357, 4452), 'pytest.raises', 'pytest.raises', (['DagsterK8sError'], {'match': '"""Timed out while waiting for job sayhi2 to complete"""'}), "(DagsterK8sError, match=\n 'Timed out while waiting for job sayhi2 to complete')\n", (4370, 4452), False, 'import pytest\n'), ((4687, 4754), 'dagster_k8s.utils.wait_for_job_success', 'wait_for_job_success', (['"""sayhi2"""'], {'namespace': 'namespace', 'wait_timeout': '(1)'}), "('sayhi2', namespace=namespace, wait_timeout=1)\n", (4707, 4754), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n'), ((4773, 4871), 'pytest.raises', 'pytest.raises', (['DagsterK8sError'], {'match': '"""Encountered failed job pods for job fail with status:"""'}), "(DagsterK8sError, match=\n 'Encountered failed job pods for job fail with status:')\n", (4786, 4871), False, 'import pytest\n'), ((5100, 5149), 'dagster_k8s.utils.wait_for_job_success', 'wait_for_job_success', (['"""fail"""'], {'namespace': 'namespace'}), "('fail', namespace=namespace)\n", (5120, 5149), False, 'from dagster_k8s.utils import retrieve_pod_logs, wait_for_job_success, wait_for_pod\n')]
|
# Generated by Django 3.1.6 on 2021-02-15 08:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Service',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('short_name', models.CharField(max_length=8)),
('medium_name', models.CharField(max_length=16)),
],
),
migrations.CreateModel(
name='Bearer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bearer_id', models.TextField()),
('cost', models.IntegerField()),
('mimeValue', models.CharField(max_length=255)),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='radioepg.service')),
],
),
]
|
[
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.AutoField",
"django.db.models.CharField"
] |
[((336, 429), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (352, 429), False, 'from django.db import migrations, models\n'), ((459, 489), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(8)'}), '(max_length=8)\n', (475, 489), False, 'from django.db import migrations, models\n'), ((524, 555), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(16)'}), '(max_length=16)\n', (540, 555), False, 'from django.db import migrations, models\n'), ((687, 780), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (703, 780), False, 'from django.db import migrations, models\n'), ((809, 827), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (825, 827), False, 'from django.db import migrations, models\n'), ((855, 876), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (874, 876), False, 'from django.db import migrations, models\n'), ((909, 941), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (925, 941), False, 'from django.db import migrations, models\n'), ((972, 1062), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""radioepg.service"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'radioepg.service')\n", (989, 1062), False, 'from django.db import migrations, models\n')]
|
from celery import Celery
from clients.dobie_client import send_data_to_dobie
app = Celery('qualichain_mediator')
app.config_from_object('settings', namespace='CELERY_')
@app.task()
def consume_messages_async(message):
"""
This task is used to received job posting text and feed DOBIE component
"""
extracted_skills = send_data_to_dobie(message)
print(extracted_skills, flush=True)
return extracted_skills
|
[
"clients.dobie_client.send_data_to_dobie",
"celery.Celery"
] |
[((86, 115), 'celery.Celery', 'Celery', (['"""qualichain_mediator"""'], {}), "('qualichain_mediator')\n", (92, 115), False, 'from celery import Celery\n'), ((338, 365), 'clients.dobie_client.send_data_to_dobie', 'send_data_to_dobie', (['message'], {}), '(message)\n', (356, 365), False, 'from clients.dobie_client import send_data_to_dobie\n')]
|
import json, math
from ingest import ingest_json_body
from housepy import config, log, strings, util
def parse(request):
log.info("ambit_geo.parse")
sample = ingest_json_body(request)
if sample is None:
return sample, "Could not parse"
data = {}
for key, value in sample.items():
if key == "UTC":
dt = util.parse_date(sample['UTC']) # these are marked UTC in the data
t = util.timestamp(dt)
data['t_utc'] = t
continue
if key == "Longitude":
data['longitude'] = math.degrees(float(sample['Longitude']))
continue
if key == "Latitude":
data['latitude'] = math.degrees(float(sample['Latitude']))
continue
if key == "GPSAltitude":
data['altitude'] = float(sample['GPSAltitude'])
continue
if type(value) != str:
continue
data[key] = strings.as_numeric(value)
try:
log.debug("%s %s %s" % (data['longitude'], data['latitude'], data['altitude']))
except:
log.error("MISSING GEO")
return data
|
[
"housepy.log.info",
"housepy.util.timestamp",
"housepy.util.parse_date",
"housepy.log.debug",
"housepy.log.error",
"housepy.strings.as_numeric",
"ingest.ingest_json_body"
] |
[((126, 153), 'housepy.log.info', 'log.info', (['"""ambit_geo.parse"""'], {}), "('ambit_geo.parse')\n", (134, 153), False, 'from housepy import config, log, strings, util\n'), ((167, 192), 'ingest.ingest_json_body', 'ingest_json_body', (['request'], {}), '(request)\n', (183, 192), False, 'from ingest import ingest_json_body\n'), ((988, 1013), 'housepy.strings.as_numeric', 'strings.as_numeric', (['value'], {}), '(value)\n', (1006, 1013), False, 'from housepy import config, log, strings, util\n'), ((1033, 1112), 'housepy.log.debug', 'log.debug', (["('%s %s %s' % (data['longitude'], data['latitude'], data['altitude']))"], {}), "('%s %s %s' % (data['longitude'], data['latitude'], data['altitude']))\n", (1042, 1112), False, 'from housepy import config, log, strings, util\n'), ((352, 382), 'housepy.util.parse_date', 'util.parse_date', (["sample['UTC']"], {}), "(sample['UTC'])\n", (367, 382), False, 'from housepy import config, log, strings, util\n'), ((434, 452), 'housepy.util.timestamp', 'util.timestamp', (['dt'], {}), '(dt)\n', (448, 452), False, 'from housepy import config, log, strings, util\n'), ((1133, 1157), 'housepy.log.error', 'log.error', (['"""MISSING GEO"""'], {}), "('MISSING GEO')\n", (1142, 1157), False, 'from housepy import config, log, strings, util\n')]
|
#!/usr/bin/env python
'''update gandi DNS domain entry, with LiveDNS v5
Cf. https://doc.livedns.gandi.net/#work-with-domains
'''
import argparse
import ipaddress
import json
import os
from subprocess import check_output
import requests
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('domain')
parser.add_argument('name')
parser.add_argument('--ip', help="defaults to ifconfig.me's return")
parser.add_argument('--api_key', help="defaults to GANDI_API_KEY env var, or the return of 'pass api/gandi'")
args = parser.parse_args()
if args.ip is None:
args.ip = requests.get('http://ifconfig.me', headers={'User-Agent': 'curl/7.61.1'}).content.decode().strip()
ip = ipaddress.ip_address(args.ip)
if args.api_key is None:
args.api_key = os.environ.get('GANDI_API_KEY', check_output(['pass', 'api/gandi'], text=True).strip())
key = {'X-Api-Key': args.api_key}
r = requests.get(f'https://dns.api.gandi.net/api/v5/domains/{args.domain}/records/{args.name}', headers=key)
r.raise_for_status()
if r.json()[0]['rrset_values'][0] == args.ip:
if args.verbose:
print('ok')
else:
type_ = 'AAAA' if isinstance(ip, ipaddress.IPv6Address) else 'A'
url = f'https://dns.api.gandi.net/api/v5/domains/{args.domain}/records/{args.name}/{type_}'
data = {'rrset_values': [args.ip]}
headers = {'Content-Type': 'application/json', **key}
r = requests.put(url, data=json.dumps(data), headers=headers)
if args.verbose:
print(r.json())
else:
r.raise_for_status()
|
[
"subprocess.check_output",
"argparse.ArgumentParser",
"json.dumps",
"requests.get",
"ipaddress.ip_address"
] |
[((248, 292), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (271, 292), False, 'import argparse\n'), ((758, 787), 'ipaddress.ip_address', 'ipaddress.ip_address', (['args.ip'], {}), '(args.ip)\n', (778, 787), False, 'import ipaddress\n'), ((961, 1075), 'requests.get', 'requests.get', (['f"""https://dns.api.gandi.net/api/v5/domains/{args.domain}/records/{args.name}"""'], {'headers': 'key'}), "(\n f'https://dns.api.gandi.net/api/v5/domains/{args.domain}/records/{args.name}'\n , headers=key)\n", (973, 1075), False, 'import requests\n'), ((1474, 1490), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1484, 1490), False, 'import json\n'), ((865, 911), 'subprocess.check_output', 'check_output', (["['pass', 'api/gandi']"], {'text': '(True)'}), "(['pass', 'api/gandi'], text=True)\n", (877, 911), False, 'from subprocess import check_output\n'), ((653, 726), 'requests.get', 'requests.get', (['"""http://ifconfig.me"""'], {'headers': "{'User-Agent': 'curl/7.61.1'}"}), "('http://ifconfig.me', headers={'User-Agent': 'curl/7.61.1'})\n", (665, 726), False, 'import requests\n')]
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
from collections import deque
from collections import defaultdict
class Solution(object):
def verticalOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if not root:
return []
queue = deque([(root, 0)])
verticalNodeMap = defaultdict(list)
while queue:
node, horrizotalDistace = queue.popleft()
if node:
verticalNodeMap[horrizotalDistace].append(node.val)
queue.append((node.left, horrizotalDistace - 1))
queue.append((node.right, horrizotalDistace + 1))
minHorrizotalDistace, maxHorrizotalDistace = min(verticalNodeMap.keys()), max(verticalNodeMap.keys())
result = []
for key in range(minHorrizotalDistace, maxHorrizotalDistace + 1):
result.append(verticalNodeMap[key])
return result
# My solution during mock, getting TLE, don't know why
from collections import defaultdict
from collections import deque
class Solution(object):
def verticalOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if not root:
return []
orderMap = defaultdict(list)
queue = deque([(root, 0)])
while queue:
currentNode, vLine = queue.popleft()
if currentNode:
orderMap[vLine].append(root.val)
queue.append((root.left, vLine - 1))
queue.append((root.right, vLine + 1))
result = []
for i in range(min(orderMap.keys()), max(orderMap.keys()) + 1):
result.append(orderMap[i])
return result
|
[
"collections.deque",
"collections.defaultdict"
] |
[((471, 489), 'collections.deque', 'deque', (['[(root, 0)]'], {}), '([(root, 0)])\n', (476, 489), False, 'from collections import deque\n'), ((516, 533), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (527, 533), False, 'from collections import defaultdict\n'), ((1434, 1451), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1445, 1451), False, 'from collections import defaultdict\n'), ((1468, 1486), 'collections.deque', 'deque', (['[(root, 0)]'], {}), '([(root, 0)])\n', (1473, 1486), False, 'from collections import deque\n')]
|
import colorsys
import cv2
from PIL import Image
import pkg_resources
from .LivescoreBase import LivescoreBase
from .details import Alliance, OngoingMatchDetails
class LivescoreCommon(LivescoreBase):
def __init__(self, game_year, **kwargs):
super(LivescoreCommon, self).__init__(game_year, **kwargs)
self._match_key = None
self._match_name = None
def _getMatchKeyName(self, img, debug_img):
if self._match_key is None:
tl = self._transformPoint((220, 6))
br = self._transformPoint((570, 43))
raw_match_name = self._parseRawMatchName(self._getImgCropThresh(img, tl, br))
self._match_key = self._getMatchKey(raw_match_name)
if self._match_key:
self._match_name = raw_match_name
else:
self._match_name = None
if self._debug:
box = self._cornersToBox(tl, br)
self._drawBox(debug_img, box, (0, 255, 0))
return self._match_key, self._match_name
def _getTimeAndMode(self, img, debug_img):
# Check for match under review
review_point1 = self._transformPoint((624, 93))
review_sample1 = img[review_point1[1], review_point1[0], :]
hsvL = colorsys.rgb_to_hsv(float(review_sample1[2])/255, float(review_sample1[1])/255, float(review_sample1[0])/255)
review_point2 = self._transformPoint((1279 - 624, 93))
review_sample2 = img[review_point2[1], review_point2[0], :]
hsvR = colorsys.rgb_to_hsv(float(review_sample2[2])/255, float(review_sample2[1])/255, float(review_sample2[0])/255)
if 0.116 < hsvL[0] < 0.216 and 0.116 < hsvR[0] < 0.216:
return 0, 'post_match'
# Find time remaining
horiz_center = self._TEMPLATE_SHAPE[0]/2
tl = self._transformPoint((horiz_center-25, 56))
br = self._transformPoint((horiz_center+25, 82))
time_remaining = self._parseDigits(self._getImgCropThresh(img, tl, br))
if self._debug:
# draw a green box for time
box = self._cornersToBox(tl, br)
self._drawBox(debug_img, box, (0, 255, 0))
# Determine mode: 'pre_match', 'auto', 'teleop', or 'post_match'
mode_point = self._transformPoint((520, 70))
mode_point2 = self._transformPoint((581, 70))
mode_sample = img[mode_point[1], mode_point[0], :]
mode_sample2 = img[mode_point2[1], mode_point2[0], :]
hsv1 = colorsys.rgb_to_hsv(float(mode_sample[2])/255, float(mode_sample[1])/255, float(mode_sample[0])/255)
hsv2 = colorsys.rgb_to_hsv(float(mode_sample2[2])/255, float(mode_sample2[1])/255, float(mode_sample2[0])/255)
if time_remaining is None:
return None, None
if time_remaining == 0:
if hsv1[1] > 0.6 and hsv2[1] > 0.6: # Both saturated
mode = 'post_match'
elif hsv1[1] > 0.6: # First saturated
mode = 'auto' # End of auton
else:
mode = 'pre_match'
elif time_remaining <= 15 and hsv2[1] < 0.6:
mode = 'auto'
else:
mode = 'teleop'
if self._debug:
box = self._cornersToBox(tl, br)
self._drawBox(debug_img, box, (0, 255, 0))
cv2.circle(debug_img, review_point1, 2, (0, 255, 0), -1)
cv2.circle(debug_img, review_point2, 2, (0, 255, 0), -1)
cv2.circle(debug_img, mode_point, 2, (0, 255, 0), -1)
cv2.circle(debug_img, mode_point2, 2, (0, 255, 0), -1)
return time_remaining, mode
def _getFlipped(self, img, debug_img):
# Sample point to determine red/blue side
color_point = self._transformPoint((520, 95))
color_sample = img[color_point[1], color_point[0], :]
is_flipped = color_sample[0] > color_sample[2] # More blue than red
if self._debug:
cv2.circle(debug_img, color_point, 2, (0, 255, 0), -1)
return is_flipped
def _getScores(self, img, debug_img, is_flipped):
# Left score limits
left_tl = self._transformPoint((520, 110))
left_br = self._transformPoint((634, 155))
# Right score limits
right_tl = self._transformPoint((644, 110))
right_br = self._transformPoint((760, 155))
left_score = self._parseDigits(self._getImgCropThresh(img, left_tl, left_br, white=True))
right_score = self._parseDigits(self._getImgCropThresh(img, right_tl, right_br, white=True))
if is_flipped:
red_score = right_score
blue_score = left_score
else:
red_score = left_score
blue_score = right_score
if self._debug:
left_box = self._cornersToBox(left_tl, left_br)
right_box = self._cornersToBox(right_tl, right_br)
self._drawBox(debug_img, left_box, (255, 255, 0) if is_flipped else (255, 0, 255))
self._drawBox(debug_img, right_box, (255, 0, 255) if is_flipped else (255, 255, 0))
return red_score, blue_score
def _getMatchDetails(self, img, force_find_overlay):
debug_img = None
if self._debug:
debug_img = img.copy()
time_remaining, mode = self._getTimeAndMode(img, debug_img)
if self._is_new_overlay or force_find_overlay:
self._match_key = None
match_key, match_name = self._getMatchKeyName(img, debug_img)
is_flipped = self._getFlipped(img, debug_img)
red_score, blue_score = self._getScores(img, debug_img, is_flipped)
box = self._cornersToBox(self._transformPoint((0, 0)), self._transformPoint((1280, 170)))
self._drawBox(debug_img, box, (255, 255, 0))
if self._debug:
cv2.imshow("Match Details", debug_img)
cv2.waitKey()
if match_key is not None and red_score is not None \
and blue_score is not None and time_remaining is not None:
return OngoingMatchDetails(
match_key=match_key,
match_name=match_name,
mode=mode,
time=time_remaining,
red=Alliance(
score=red_score,
),
blue=Alliance(
score=blue_score,
)
)
else:
return None
|
[
"cv2.circle",
"cv2.waitKey",
"cv2.imshow"
] |
[((3319, 3375), 'cv2.circle', 'cv2.circle', (['debug_img', 'review_point1', '(2)', '(0, 255, 0)', '(-1)'], {}), '(debug_img, review_point1, 2, (0, 255, 0), -1)\n', (3329, 3375), False, 'import cv2\n'), ((3388, 3444), 'cv2.circle', 'cv2.circle', (['debug_img', 'review_point2', '(2)', '(0, 255, 0)', '(-1)'], {}), '(debug_img, review_point2, 2, (0, 255, 0), -1)\n', (3398, 3444), False, 'import cv2\n'), ((3457, 3510), 'cv2.circle', 'cv2.circle', (['debug_img', 'mode_point', '(2)', '(0, 255, 0)', '(-1)'], {}), '(debug_img, mode_point, 2, (0, 255, 0), -1)\n', (3467, 3510), False, 'import cv2\n'), ((3523, 3577), 'cv2.circle', 'cv2.circle', (['debug_img', 'mode_point2', '(2)', '(0, 255, 0)', '(-1)'], {}), '(debug_img, mode_point2, 2, (0, 255, 0), -1)\n', (3533, 3577), False, 'import cv2\n'), ((3939, 3993), 'cv2.circle', 'cv2.circle', (['debug_img', 'color_point', '(2)', '(0, 255, 0)', '(-1)'], {}), '(debug_img, color_point, 2, (0, 255, 0), -1)\n', (3949, 3993), False, 'import cv2\n'), ((5788, 5826), 'cv2.imshow', 'cv2.imshow', (['"""Match Details"""', 'debug_img'], {}), "('Match Details', debug_img)\n", (5798, 5826), False, 'import cv2\n'), ((5839, 5852), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (5850, 5852), False, 'import cv2\n')]
|
"""Test API utilities."""
import json
from pytradfri.api.libcoap_api import APIFactory
from pytradfri.gateway import Gateway
def test_constructor_timeout_passed_to_subprocess(monkeypatch):
"""Test that original timeout is passed to subprocess."""
capture = {}
def capture_args(*args, **kwargs):
capture.update(kwargs)
return json.dumps([])
monkeypatch.setattr("subprocess.check_output", capture_args)
api = APIFactory("anything", timeout=20, psk="abc")
api.request(Gateway().get_devices())
assert capture["timeout"] == 20
def test_custom_timeout_passed_to_subprocess(monkeypatch):
"""Test that custom timeout is passed to subprocess."""
capture = {}
def capture_args(*args, **kwargs):
capture.update(kwargs)
return json.dumps([])
monkeypatch.setattr("subprocess.check_output", capture_args)
api = APIFactory("anything", psk="abc")
api.request(Gateway().get_devices(), timeout=1)
assert capture["timeout"] == 1
|
[
"json.dumps",
"pytradfri.api.libcoap_api.APIFactory",
"pytradfri.gateway.Gateway"
] |
[((449, 494), 'pytradfri.api.libcoap_api.APIFactory', 'APIFactory', (['"""anything"""'], {'timeout': '(20)', 'psk': '"""abc"""'}), "('anything', timeout=20, psk='abc')\n", (459, 494), False, 'from pytradfri.api.libcoap_api import APIFactory\n'), ((888, 921), 'pytradfri.api.libcoap_api.APIFactory', 'APIFactory', (['"""anything"""'], {'psk': '"""abc"""'}), "('anything', psk='abc')\n", (898, 921), False, 'from pytradfri.api.libcoap_api import APIFactory\n'), ((357, 371), 'json.dumps', 'json.dumps', (['[]'], {}), '([])\n', (367, 371), False, 'import json\n'), ((796, 810), 'json.dumps', 'json.dumps', (['[]'], {}), '([])\n', (806, 810), False, 'import json\n'), ((511, 520), 'pytradfri.gateway.Gateway', 'Gateway', ([], {}), '()\n', (518, 520), False, 'from pytradfri.gateway import Gateway\n'), ((938, 947), 'pytradfri.gateway.Gateway', 'Gateway', ([], {}), '()\n', (945, 947), False, 'from pytradfri.gateway import Gateway\n')]
|
# scrapes Townes van Zandt lyrics
# sample code so I don't have to remember all of this stuff
# the next time I want to source some verses
from bs4 import BeautifulSoup as soup
import requests
import string
punctuation_trans_table = str.maketrans("", "", string.punctuation)
def strip_punctuation(s):
return s.translate(punctuation_trans_table)
base_url = "http://ippc2.orst.edu/coopl/lyrics/"
index = requests.get(base_url + "albums.html")
parsed_index = soup(index.text)
all_links = parsed_index.find_all("a") # get all <a> tags
links = [l for l in all_links if l.text] # filter out image links
def to_filename(s, path="texts/townes_van_zandt/"):
'''Quick and dirty snake-casing'''
s = s.replace("&", "and") # special case, "Poncho & Lefty"
s = strip_punctuation(s)
s = s.lower()
s = s.replace(" ", "_")
s = path + s + ".txt"
return s
def process_link(link):
title = link.text
f = open(to_filename(title), "w")
remote_file = link.get("href")
song_file = requests.get(base_url + remote_file)
verses = [l for l in soup(song_file.text).find_all("font")
if l.get("size")]
for verse in verses:
if verse.text:
f.writelines("\n".join(verse.stripped_strings))
f.write("\n\n")
|
[
"bs4.BeautifulSoup",
"requests.get"
] |
[((413, 451), 'requests.get', 'requests.get', (["(base_url + 'albums.html')"], {}), "(base_url + 'albums.html')\n", (425, 451), False, 'import requests\n'), ((467, 483), 'bs4.BeautifulSoup', 'soup', (['index.text'], {}), '(index.text)\n', (471, 483), True, 'from bs4 import BeautifulSoup as soup\n'), ((1025, 1061), 'requests.get', 'requests.get', (['(base_url + remote_file)'], {}), '(base_url + remote_file)\n', (1037, 1061), False, 'import requests\n'), ((1087, 1107), 'bs4.BeautifulSoup', 'soup', (['song_file.text'], {}), '(song_file.text)\n', (1091, 1107), True, 'from bs4 import BeautifulSoup as soup\n')]
|
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""KE Version Rule Scanner Tests."""
import unittest
import mock
from tests import unittest_utils
from google.cloud.security.common.gcp_type import (
ke_cluster as ke_cluster_type)
from google.cloud.security.common.gcp_type import (
organization as organization_type)
from google.cloud.security.common.gcp_type import project as project_type
from google.cloud.security.scanner.scanners import ke_version_scanner
# pylint: disable=bad-indentation
class FakeProjectDao(object):
def get_project(self, project_id, snapshot_timestamp=0):
return project_type.Project(project_id=project_id)
class FakeOrgDao(object):
def find_ancestors(self, resource_id, snapshot_timestamp=0):
return [organization_type.Organization(organization_id=123456)]
class KeVersionScannerTest(unittest_utils.ForsetiTestCase):
def tearDown(self):
self.org_patcher.stop()
self.project_patcher.stop()
def setUp(self):
# patch the daos
self.org_patcher = mock.patch(
'google.cloud.security.common.data_access.'
'org_resource_rel_dao.OrgResourceRelDao')
self.mock_org_rel_dao = self.org_patcher.start()
self.mock_org_rel_dao.return_value = FakeOrgDao()
self.project_patcher = mock.patch(
'google.cloud.security.common.data_access.'
'project_dao.ProjectDao')
self.mock_project_dao = self.project_patcher.start()
self.mock_project_dao.return_value = FakeProjectDao()
self.server_config = {
'defaultClusterVersion': '1.7.11-gke.1',
'validNodeVersions': [
'1.8.6-gke.0',
'1.7.11-gke.1',
'1.7.10-gke.1',
'1.6.13-gke.1',
],
'defaultImageType': 'COS',
'validImageTypes': [
'UBUNTU',
'COS'
],
'validMasterVersions': [
'1.8.6-gke.0',
'1.7.11-gke.1'
]
}
self.ke_clusters = {
# The main backend service.
'master-version-invalid': ke_cluster_type.KeCluster.from_dict(
'foo', self.server_config,
{
'name': 'master-version-invalid',
'nodePools': [{
'name': 'default-pool',
'version': '1.6.13-gke.1'
}],
'initialClusterVersion': '1.6.13-gke.1',
'currentMasterVersion': '1.6.13-gke.1',
'currentNodeVersion': '1.6.13-gke.1'
}),
'node-version-invalid': ke_cluster_type.KeCluster.from_dict(
'foo', self.server_config,
{
'name': 'node-version-invalid',
'nodePools': [{
'name': 'default-pool',
'version': '1.8.4-gke.1'
}],
'initialClusterVersion': '1.8.4-gke.1',
'currentMasterVersion': '1.8.6-gke.0',
'currentNodeVersion': '1.8.4-gke.1'
}),
'node-version-not-allowed': ke_cluster_type.KeCluster.from_dict(
'foo', self.server_config,
{
'name': 'node-version-not-allowed',
'nodePools': [{
'name': 'default-pool',
'version': '1.7.10-gke.1'
}],
'initialClusterVersion': '1.7.10-gke.1',
'currentMasterVersion': '1.7.11-gke.1',
'currentNodeVersion': '1.7.10-gke.1'
}),
'multiple-node-pools': ke_cluster_type.KeCluster.from_dict(
'foo', self.server_config,
{
'name': 'multiple-node-pools',
'nodePools': [{
'name': 'default-pool',
'version': '1.7.11-gke.1'
}, {
'name': 'secondary-pool',
'version': '1.7.11-gke.1'
}],
'initialClusterVersion': '1.7.11-gke.1',
'currentMasterVersion': '1.7.11-gke.1',
'currentNodeVersion': '1.7.11-gke.1'
})
}
self.scanner = ke_version_scanner.KeVersionScanner(
{}, {}, '',
unittest_utils.get_datafile_path(
__file__, 'ke_version_scanner_test_data.yaml'))
self.scanner._retrieve = mock.Mock(
return_value=self.ke_clusters.values())
@mock.patch.object(
ke_version_scanner.KeVersionScanner,
'_output_results_to_db', autospec=True)
def test_run_scanner(self, mock_output_results):
self.scanner.run()
expected_violations = [
{'resource_id': 'node-version-not-allowed',
'resource_type': 'ke',
'rule_index': 2,
'rule_name': 'Disallowed node pool version',
'violation_data': {'cluster_name': 'node-version-not-allowed',
'node_pool_name': 'default-pool',
'project_id': 'foo',
'violation_reason': (
"Node pool version 1.7.10-gke.1 is not "
"allowed (['>= 1.6.13-gke.1', "
"'>= 1.7.11-gke.1', '>= 1.8.4-gke.1', "
"'>= 1.9.*']).")},
'violation_type': 'KE_VERSION_VIOLATION'},
{'resource_id': 'master-version-invalid',
'resource_type': 'ke',
'rule_index': 1,
'rule_name': 'Unsupported master version',
'violation_data': {'cluster_name': 'master-version-invalid',
'node_pool_name': '',
'project_id': 'foo',
'violation_reason': (
"Master version 1.6.13-gke.1 is not "
"supported (['1.7.11-gke.1', "
"'1.8.6-gke.0']).")},
'violation_type': 'KE_VERSION_VIOLATION'},
{'resource_id': 'node-version-invalid',
'resource_type': 'ke',
'rule_index': 0,
'rule_name': 'Unsupported node pool version',
'violation_data': {'cluster_name': 'node-version-invalid',
'node_pool_name': 'default-pool',
'project_id': 'foo',
'violation_reason': (
"Node pool version 1.8.4-gke.1 is not "
"supported (['1.6.13-gke.1', "
"'1.7.10-gke.1', '1.7.11-gke.1', "
"'1.8.6-gke.0']).")},
'violation_type': 'KE_VERSION_VIOLATION'}]
mock_output_results.assert_called_once_with(mock.ANY,
expected_violations)
if __name__ == '__main__':
unittest.main()
|
[
"mock.patch",
"tests.unittest_utils.get_datafile_path",
"google.cloud.security.common.gcp_type.organization.Organization",
"google.cloud.security.common.gcp_type.project.Project",
"mock.patch.object",
"google.cloud.security.common.gcp_type.ke_cluster.KeCluster.from_dict",
"unittest.main"
] |
[((5310, 5408), 'mock.patch.object', 'mock.patch.object', (['ke_version_scanner.KeVersionScanner', '"""_output_results_to_db"""'], {'autospec': '(True)'}), "(ke_version_scanner.KeVersionScanner,\n '_output_results_to_db', autospec=True)\n", (5327, 5408), False, 'import mock\n'), ((7879, 7894), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7892, 7894), False, 'import unittest\n'), ((1179, 1222), 'google.cloud.security.common.gcp_type.project.Project', 'project_type.Project', ([], {'project_id': 'project_id'}), '(project_id=project_id)\n', (1199, 1222), True, 'from google.cloud.security.common.gcp_type import project as project_type\n'), ((1618, 1721), 'mock.patch', 'mock.patch', (['"""google.cloud.security.common.data_access.org_resource_rel_dao.OrgResourceRelDao"""'], {}), "(\n 'google.cloud.security.common.data_access.org_resource_rel_dao.OrgResourceRelDao'\n )\n", (1628, 1721), False, 'import mock\n'), ((1887, 1964), 'mock.patch', 'mock.patch', (['"""google.cloud.security.common.data_access.project_dao.ProjectDao"""'], {}), "('google.cloud.security.common.data_access.project_dao.ProjectDao')\n", (1897, 1964), False, 'import mock\n'), ((1333, 1387), 'google.cloud.security.common.gcp_type.organization.Organization', 'organization_type.Organization', ([], {'organization_id': '(123456)'}), '(organization_id=123456)\n', (1363, 1387), True, 'from google.cloud.security.common.gcp_type import organization as organization_type\n'), ((2744, 3045), 'google.cloud.security.common.gcp_type.ke_cluster.KeCluster.from_dict', 'ke_cluster_type.KeCluster.from_dict', (['"""foo"""', 'self.server_config', "{'name': 'master-version-invalid', 'nodePools': [{'name': 'default-pool',\n 'version': '1.6.13-gke.1'}], 'initialClusterVersion': '1.6.13-gke.1',\n 'currentMasterVersion': '1.6.13-gke.1', 'currentNodeVersion':\n '1.6.13-gke.1'}"], {}), "('foo', self.server_config, {'name':\n 'master-version-invalid', 'nodePools': [{'name': 'default-pool',\n 'version': '1.6.13-gke.1'}], 'initialClusterVersion': '1.6.13-gke.1',\n 'currentMasterVersion': '1.6.13-gke.1', 'currentNodeVersion':\n '1.6.13-gke.1'})\n", (2779, 3045), True, 'from google.cloud.security.common.gcp_type import ke_cluster as ke_cluster_type\n'), ((3288, 3584), 'google.cloud.security.common.gcp_type.ke_cluster.KeCluster.from_dict', 'ke_cluster_type.KeCluster.from_dict', (['"""foo"""', 'self.server_config', "{'name': 'node-version-invalid', 'nodePools': [{'name': 'default-pool',\n 'version': '1.8.4-gke.1'}], 'initialClusterVersion': '1.8.4-gke.1',\n 'currentMasterVersion': '1.8.6-gke.0', 'currentNodeVersion': '1.8.4-gke.1'}"], {}), "('foo', self.server_config, {'name':\n 'node-version-invalid', 'nodePools': [{'name': 'default-pool',\n 'version': '1.8.4-gke.1'}], 'initialClusterVersion': '1.8.4-gke.1',\n 'currentMasterVersion': '1.8.6-gke.0', 'currentNodeVersion': '1.8.4-gke.1'}\n )\n", (3323, 3584), True, 'from google.cloud.security.common.gcp_type import ke_cluster as ke_cluster_type\n'), ((3830, 4133), 'google.cloud.security.common.gcp_type.ke_cluster.KeCluster.from_dict', 'ke_cluster_type.KeCluster.from_dict', (['"""foo"""', 'self.server_config', "{'name': 'node-version-not-allowed', 'nodePools': [{'name': 'default-pool',\n 'version': '1.7.10-gke.1'}], 'initialClusterVersion': '1.7.10-gke.1',\n 'currentMasterVersion': '1.7.11-gke.1', 'currentNodeVersion':\n '1.7.10-gke.1'}"], {}), "('foo', self.server_config, {'name':\n 'node-version-not-allowed', 'nodePools': [{'name': 'default-pool',\n 'version': '1.7.10-gke.1'}], 'initialClusterVersion': '1.7.10-gke.1',\n 'currentMasterVersion': '1.7.11-gke.1', 'currentNodeVersion':\n '1.7.10-gke.1'})\n", (3865, 4133), True, 'from google.cloud.security.common.gcp_type import ke_cluster as ke_cluster_type\n'), ((4375, 4728), 'google.cloud.security.common.gcp_type.ke_cluster.KeCluster.from_dict', 'ke_cluster_type.KeCluster.from_dict', (['"""foo"""', 'self.server_config', "{'name': 'multiple-node-pools', 'nodePools': [{'name': 'default-pool',\n 'version': '1.7.11-gke.1'}, {'name': 'secondary-pool', 'version':\n '1.7.11-gke.1'}], 'initialClusterVersion': '1.7.11-gke.1',\n 'currentMasterVersion': '1.7.11-gke.1', 'currentNodeVersion':\n '1.7.11-gke.1'}"], {}), "('foo', self.server_config, {'name':\n 'multiple-node-pools', 'nodePools': [{'name': 'default-pool', 'version':\n '1.7.11-gke.1'}, {'name': 'secondary-pool', 'version': '1.7.11-gke.1'}],\n 'initialClusterVersion': '1.7.11-gke.1', 'currentMasterVersion':\n '1.7.11-gke.1', 'currentNodeVersion': '1.7.11-gke.1'})\n", (4410, 4728), True, 'from google.cloud.security.common.gcp_type import ke_cluster as ke_cluster_type\n'), ((5110, 5189), 'tests.unittest_utils.get_datafile_path', 'unittest_utils.get_datafile_path', (['__file__', '"""ke_version_scanner_test_data.yaml"""'], {}), "(__file__, 'ke_version_scanner_test_data.yaml')\n", (5142, 5189), False, 'from tests import unittest_utils\n')]
|
import math
def lognormal_mean(m, stddev):
""" compute mean of log x with mean and std. of x
Args:
m: mean of x
stddev: standard deviation of x
Returns: mean of log x
"""
return math.log(m) - (0.5 * math.log(1.0 + (stddev * stddev) / (m * m)))
def lognormal_stddev(m, stddev):
""" compute std. of log x with mean and std. of x
Args:
m: mean of x
stddev: standard deviation of x
Returns: std. of log x
"""
return math.sqrt(math.log((stddev * stddev) / (m * m) + 1))
def lognormal_underlying_mean(m, stddev):
""" compute mean of x with mean and std of log x
Args:
m: mean of log x
stddev: std of log x
Returns:
"""
# if m == 0 or stddev == 0:
# print '{}'.format('why ???')
# return 0
return math.exp(m + 0.5 * stddev * stddev)
def lognormal_underlying_stddev(m, stddev):
""" compute std of x with mean and std of log x
Args:
m: mean of log x
stddev: std of log x
Returns: std of x
"""
# if m == 0 or stddev == 0:
# print '{}'.format('strange why???')
# return 0
return math.sqrt((math.exp(stddev**2.0) - 1.0) *
math.exp(2.0*m + stddev**2.0))
#return lognormal_underlying_mean(m, stddev) * \
# math.sqrt((math.exp(stddev * stddev) - 1.0))
|
[
"math.exp",
"math.log"
] |
[((873, 908), 'math.exp', 'math.exp', (['(m + 0.5 * stddev * stddev)'], {}), '(m + 0.5 * stddev * stddev)\n', (881, 908), False, 'import math\n'), ((230, 241), 'math.log', 'math.log', (['m'], {}), '(m)\n', (238, 241), False, 'import math\n'), ((528, 567), 'math.log', 'math.log', (['(stddev * stddev / (m * m) + 1)'], {}), '(stddev * stddev / (m * m) + 1)\n', (536, 567), False, 'import math\n'), ((251, 292), 'math.log', 'math.log', (['(1.0 + stddev * stddev / (m * m))'], {}), '(1.0 + stddev * stddev / (m * m))\n', (259, 292), False, 'import math\n'), ((1292, 1325), 'math.exp', 'math.exp', (['(2.0 * m + stddev ** 2.0)'], {}), '(2.0 * m + stddev ** 2.0)\n', (1300, 1325), False, 'import math\n'), ((1239, 1262), 'math.exp', 'math.exp', (['(stddev ** 2.0)'], {}), '(stddev ** 2.0)\n', (1247, 1262), False, 'import math\n')]
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #To suppress warnings thrown by tensorflow
from time import sleep
import numpy as np
from cv2 import cv2
import pyautogui as pg
import Sudoku_Core as SC
import OCR
s = 513//9 #Size of board//9
fs = 25 #Size of the final image
def getBoard():
pg.click(266, 740)
sleep(1)
pg.click(266, 930) #Changing the difficulty to expert
sleep(2)
image = pg.screenshot(region=(10, 187, 513, 513))
image = cv2.cvtColor(np.asarray(image), cv2.COLOR_RGB2GRAY)
_,image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)
return image
def readBoard(image):
for i in range(9):
for j in range(9):
subImage = image[i*s + 3: (i+1)*s - 3, j*s + 3: (j+1)*s - 3] #(+3, -3) is a hack to remove border contours
contour, _ = cv2.findContours(subImage, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
if contour != []:
(x, y, w, h) = cv2.boundingRect(contour[0])
img = cv2.resize(subImage[y: y+h, x: x+w], (fs, fs), interpolation=cv2.INTER_AREA)
else:
img = np.zeros((fs,fs), dtype='uint8')
SC.board[i][j] = OCR.model.predict(img.reshape(1, fs, fs, 1)).argmax()
def outputBoard():
for ((posY, posX), v) in SC.moves.items():
posX = 42 + posX * 57
posY = 216 + posY * 57
pg.moveTo(posX, posY, 0.1)
pg.click()
# vX = 42 + 55*(v-1)
# vY = 843
# pg.moveTo(vX, vY, 0.1) #To use the numpad in the app
# pg.click()
pg.typewrite(str(v)) #To send numbers from the keyboard
def main():
image = getBoard()
readBoard(image)
print('Got the board, now solving')
if SC.solve(0, 0):
outputBoard()
else:
print('Couldn\'t solve')
input('Press any key to exit')
if __name__ == '__main__':
main()
|
[
"cv2.cv2.threshold",
"pyautogui.moveTo",
"pyautogui.screenshot",
"cv2.cv2.findContours",
"numpy.asarray",
"time.sleep",
"pyautogui.click",
"Sudoku_Core.solve",
"cv2.cv2.resize",
"numpy.zeros",
"Sudoku_Core.moves.items",
"cv2.cv2.boundingRect"
] |
[((298, 316), 'pyautogui.click', 'pg.click', (['(266)', '(740)'], {}), '(266, 740)\n', (306, 316), True, 'import pyautogui as pg\n'), ((318, 326), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (323, 326), False, 'from time import sleep\n'), ((328, 346), 'pyautogui.click', 'pg.click', (['(266)', '(930)'], {}), '(266, 930)\n', (336, 346), True, 'import pyautogui as pg\n'), ((383, 391), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (388, 391), False, 'from time import sleep\n'), ((402, 443), 'pyautogui.screenshot', 'pg.screenshot', ([], {'region': '(10, 187, 513, 513)'}), '(region=(10, 187, 513, 513))\n', (415, 443), True, 'import pyautogui as pg\n'), ((516, 569), 'cv2.cv2.threshold', 'cv2.threshold', (['image', '(127)', '(255)', 'cv2.THRESH_BINARY_INV'], {}), '(image, 127, 255, cv2.THRESH_BINARY_INV)\n', (529, 569), False, 'from cv2 import cv2\n'), ((1180, 1196), 'Sudoku_Core.moves.items', 'SC.moves.items', ([], {}), '()\n', (1194, 1196), True, 'import Sudoku_Core as SC\n'), ((1551, 1565), 'Sudoku_Core.solve', 'SC.solve', (['(0)', '(0)'], {}), '(0, 0)\n', (1559, 1565), True, 'import Sudoku_Core as SC\n'), ((466, 483), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (476, 483), True, 'import numpy as np\n'), ((1249, 1275), 'pyautogui.moveTo', 'pg.moveTo', (['posX', 'posY', '(0.1)'], {}), '(posX, posY, 0.1)\n', (1258, 1275), True, 'import pyautogui as pg\n'), ((1278, 1288), 'pyautogui.click', 'pg.click', ([], {}), '()\n', (1286, 1288), True, 'import pyautogui as pg\n'), ((776, 846), 'cv2.cv2.findContours', 'cv2.findContours', (['subImage', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(subImage, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n', (792, 846), False, 'from cv2 import cv2\n'), ((887, 915), 'cv2.cv2.boundingRect', 'cv2.boundingRect', (['contour[0]'], {}), '(contour[0])\n', (903, 915), False, 'from cv2 import cv2\n'), ((926, 1004), 'cv2.cv2.resize', 'cv2.resize', (['subImage[y:y + h, x:x + w]', '(fs, fs)'], {'interpolation': 'cv2.INTER_AREA'}), '(subImage[y:y + h, x:x + w], (fs, fs), interpolation=cv2.INTER_AREA)\n', (936, 1004), False, 'from cv2 import cv2\n'), ((1026, 1059), 'numpy.zeros', 'np.zeros', (['(fs, fs)'], {'dtype': '"""uint8"""'}), "((fs, fs), dtype='uint8')\n", (1034, 1059), True, 'import numpy as np\n')]
|
from app import db, login
from flask_login import UserMixin
from datetime import datetime
from flask import url_for, redirect
from werkzeug.security import generate_password_hash, check_password_hash
class users(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(100), nullable=False, unique=True)
password = db.Column(db.String(96), nullable=False)
email = db.Column(db.String(128), nullable=False, unique=True)
firstname = db.Column(db.String(130), nullable=False)
lastname = db.Column(db.String(130), nullable=False)
lastLogin = db.Column(db.DateTime)
isActive = db.Column(db.Boolean)
isAdmin = db.Column(db.Boolean)
noteHighScore = db.Column(db.Integer)
KeyHighScore = db.Column(db.Integer)
submit = db.relationship("submission", backref="submitter")
###################################################
def __init__(self):
self.isActive = True
self.isAdmin = False
self.noteHighScore = 0
self.lastLogin = None
self.KeyHighScore = 0
def set_password(self, pwd):
self.password = generate_password_hash(pwd, method="<PASSWORD>")
def check_password(self, pwd):
return check_password_hash(self.password, pwd)
def is_active(self):
return self.isActive
def validate(self):
if self.username and self.email and self.firstname and self.lastname:
return True
else:
return False
def getSubmissions(self):
res = submission.query.filter_by(creater_id=self.id).all()
return res
def __repr__(self):
return '<user %r>' % self.username
class submission(db.Model):
__tablename__ = 'submission'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
createdAt = db.Column(db.DateTime, nullable=False)
markedAt = db.Column(db.DateTime)
feedback = db.Column(db.Boolean)
totalmark = db.Column(db.Integer)
difficulty = db.Column(db.String(30), nullable=False)
passed = db.Column(db.Boolean)
creater_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
answers = db.relationship("answer", backref="submission")
def __init__(self):
self.createdAt = datetime.utcnow()
self.markedAt = None
self.feedback = False
self.totalmark = None
self.marked = False
self.passed = False
def validate(self):
if self.difficulty and self.creater_id and self.createdAt:
return True
def __repr__(self):
return '<submission %r>' % self.id
class answer(db.Model):
__tablename__ = 'answer'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
answerSeq = db.Column(db.Integer)
submittedAnswer = db.Column(db.String(400))
feedback = db.Column(db.String(400))
markreceived = db.Column(db.Boolean)
submissionId = db.Column(db.Integer, db.ForeignKey("submission.id"))
def __init__(self):
self.feedback = None
self.markreceived = False
def validate(self):
if self.answerSeq and self.submittedAnswer and self.submissionId:
return True
else:
print("missingfield")
return False
def __repr__(self):
return '<ans>'
@login.user_loader
def load_user(usr_id):
return users.query.get(int(usr_id))
@login.unauthorized_handler
def unauthorized():
return redirect(url_for("auth.login"))
|
[
"datetime.datetime.utcnow",
"app.db.String",
"flask.url_for",
"werkzeug.security.generate_password_hash",
"app.db.Column",
"app.db.ForeignKey",
"app.db.relationship",
"werkzeug.security.check_password_hash"
] |
[((273, 332), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (282, 332), False, 'from app import db, login\n'), ((657, 679), 'app.db.Column', 'db.Column', (['db.DateTime'], {}), '(db.DateTime)\n', (666, 679), False, 'from app import db, login\n'), ((695, 716), 'app.db.Column', 'db.Column', (['db.Boolean'], {}), '(db.Boolean)\n', (704, 716), False, 'from app import db, login\n'), ((731, 752), 'app.db.Column', 'db.Column', (['db.Boolean'], {}), '(db.Boolean)\n', (740, 752), False, 'from app import db, login\n'), ((773, 794), 'app.db.Column', 'db.Column', (['db.Integer'], {}), '(db.Integer)\n', (782, 794), False, 'from app import db, login\n'), ((814, 835), 'app.db.Column', 'db.Column', (['db.Integer'], {}), '(db.Integer)\n', (823, 835), False, 'from app import db, login\n'), ((849, 899), 'app.db.relationship', 'db.relationship', (['"""submission"""'], {'backref': '"""submitter"""'}), "('submission', backref='submitter')\n", (864, 899), False, 'from app import db, login\n'), ((1821, 1880), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (1830, 1880), False, 'from app import db, login\n'), ((1897, 1935), 'app.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)'}), '(db.DateTime, nullable=False)\n', (1906, 1935), False, 'from app import db, login\n'), ((1951, 1973), 'app.db.Column', 'db.Column', (['db.DateTime'], {}), '(db.DateTime)\n', (1960, 1973), False, 'from app import db, login\n'), ((1989, 2010), 'app.db.Column', 'db.Column', (['db.Boolean'], {}), '(db.Boolean)\n', (1998, 2010), False, 'from app import db, login\n'), ((2027, 2048), 'app.db.Column', 'db.Column', (['db.Integer'], {}), '(db.Integer)\n', (2036, 2048), False, 'from app import db, login\n'), ((2120, 2141), 'app.db.Column', 'db.Column', (['db.Boolean'], {}), '(db.Boolean)\n', (2129, 2141), False, 'from app import db, login\n'), ((2238, 2285), 'app.db.relationship', 'db.relationship', (['"""answer"""'], {'backref': '"""submission"""'}), "('answer', backref='submission')\n", (2253, 2285), False, 'from app import db, login\n'), ((2764, 2823), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (2773, 2823), False, 'from app import db, login\n'), ((2840, 2861), 'app.db.Column', 'db.Column', (['db.Integer'], {}), '(db.Integer)\n', (2849, 2861), False, 'from app import db, login\n'), ((2970, 2991), 'app.db.Column', 'db.Column', (['db.Boolean'], {}), '(db.Boolean)\n', (2979, 2991), False, 'from app import db, login\n'), ((358, 372), 'app.db.String', 'db.String', (['(100)'], {}), '(100)\n', (367, 372), False, 'from app import db, login\n'), ((428, 441), 'app.db.String', 'db.String', (['(96)'], {}), '(96)\n', (437, 441), False, 'from app import db, login\n'), ((481, 495), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (490, 495), False, 'from app import db, login\n'), ((552, 566), 'app.db.String', 'db.String', (['(130)'], {}), '(130)\n', (561, 566), False, 'from app import db, login\n'), ((609, 623), 'app.db.String', 'db.String', (['(130)'], {}), '(130)\n', (618, 623), False, 'from app import db, login\n'), ((1197, 1245), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['pwd'], {'method': '"""<PASSWORD>"""'}), "(pwd, method='<PASSWORD>')\n", (1219, 1245), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((1297, 1336), 'werkzeug.security.check_password_hash', 'check_password_hash', (['self.password', 'pwd'], {}), '(self.password, pwd)\n', (1316, 1336), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((2076, 2089), 'app.db.String', 'db.String', (['(30)'], {}), '(30)\n', (2085, 2089), False, 'from app import db, login\n'), ((2181, 2206), 'app.db.ForeignKey', 'db.ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (2194, 2206), False, 'from app import db, login\n'), ((2336, 2353), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2351, 2353), False, 'from datetime import datetime\n'), ((2894, 2908), 'app.db.String', 'db.String', (['(400)'], {}), '(400)\n', (2903, 2908), False, 'from app import db, login\n'), ((2935, 2949), 'app.db.String', 'db.String', (['(400)'], {}), '(400)\n', (2944, 2949), False, 'from app import db, login\n'), ((3033, 3063), 'app.db.ForeignKey', 'db.ForeignKey', (['"""submission.id"""'], {}), "('submission.id')\n", (3046, 3063), False, 'from app import db, login\n'), ((3550, 3571), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (3557, 3571), False, 'from flask import url_for, redirect\n')]
|
# AUTOGENERATED! DO NOT EDIT! File to edit: source_nbs/12_top.ipynb (unless otherwise specified).
__all__ = ['empty_tensor_handling_loss', 'nan_loss_handling', 'create_dummy_if_empty', 'BaseTop', 'SequenceLabel',
'Classification', 'PreTrain', 'Seq2Seq', 'MultiLabelClassification', 'MaskLM']
# Cell
import logging
from functools import partial
from typing import Dict, Tuple, Union
import tensorflow as tf
import tensorflow_addons as tfa
import transformers
from transformers.modeling_tf_utils import TFSharedEmbeddings
from tensorflow_addons.layers.crf import CRF
from tensorflow_addons.text.crf import crf_log_likelihood
from .params import BaseParams
from .utils import gather_indexes
@tf.function
def empty_tensor_handling_loss(labels, logits, loss_fn):
if tf.equal(tf.size(labels), 0):
return 0.0
if tf.equal(tf.size(tf.shape(labels)), 0):
return 0.0
if tf.equal(tf.shape(labels)[0], 0):
return 0.0
else:
return tf.reduce_mean(loss_fn(
labels, logits, from_logits=True))
@tf.function
def nan_loss_handling(loss):
if tf.math.is_nan(loss):
return 0.0
else:
return loss
@tf.function
def create_dummy_if_empty(inp_tensor: tf.Tensor) -> tf.Tensor:
shape_tensor = tf.shape(inp_tensor)
if tf.equal(shape_tensor[0], 0):
data_type = inp_tensor.dtype
dummy_shape_first_dim = tf.convert_to_tensor([1], dtype=tf.int32)
dummy_shape = tf.concat(
[dummy_shape_first_dim, shape_tensor[1:]], axis=0)
dummy_tensor = tf.zeros(dummy_shape, dtype=data_type)
return dummy_tensor
else:
return inp_tensor
class BaseTop(tf.keras.Model):
def __init__(self, params: BaseParams, problem_name: str) -> None:
super(BaseTop, self).__init__(name=problem_name)
self.params = params
self.problem_name = problem_name
def call(self, inputs: Tuple[Dict], mode: str):
raise NotImplementedError
# Cell
class SequenceLabel(tf.keras.Model):
def __init__(self, params: BaseParams, problem_name: str):
super(SequenceLabel, self).__init__(name=problem_name)
self.params = params
self.problem_name = problem_name
num_classes = self.params.num_classes[self.problem_name]
self.dense = tf.keras.layers.Dense(num_classes, activation=None)
self.dropout = tf.keras.layers.Dropout(1-params.dropout_keep_prob)
if self.params.crf:
self.crf = CRF(num_classes)
self.metric_fn = tf.keras.metrics.Accuracy(
name='{}_acc'.format(self.problem_name)
)
else:
self.metric_fn = tf.keras.metrics.SparseCategoricalAccuracy(
name='{}_acc'.format(self.problem_name))
def return_crf_result(self, labels: tf.Tensor, logits: tf.Tensor, mode: str, input_mask: tf.Tensor):
input_mask.set_shape([None, None])
logits = create_dummy_if_empty(logits)
input_mask = create_dummy_if_empty(input_mask)
viterbi_decoded, potentials, sequence_length, chain_kernel = self.crf(
logits, input_mask)
if mode != tf.estimator.ModeKeys.PREDICT:
loss = -crf_log_likelihood(potentials,
labels, sequence_length, chain_kernel)[0]
loss = tf.reduce_mean(loss)
loss = nan_loss_handling(loss)
self.add_loss(loss)
acc = self.metric_fn(
labels, viterbi_decoded, sample_weight=input_mask)
self.add_metric(acc)
# make the crf prediction has the same shape as non-crf prediction
return tf.one_hot(viterbi_decoded, name='%s_predict' % self.problem_name, depth=self.params.num_classes[self.problem_name])
def call(self, inputs, mode):
training = (mode == tf.estimator.ModeKeys.TRAIN)
feature, hidden_feature = inputs
hidden_feature = hidden_feature['seq']
if mode != tf.estimator.ModeKeys.PREDICT:
labels = feature['{}_label_ids'.format(self.problem_name)]
# sometimes the length of labels dose not equal to length of inputs
# that's caused by tf.data.experimental.bucket_by_sequence_length in multi problem scenario
pad_len = tf.shape(input=hidden_feature)[
1] - tf.shape(input=labels)[1]
# top, bottom, left, right
pad_tensor = [[0, 0], [0, pad_len]]
labels = tf.pad(tensor=labels, paddings=pad_tensor)
else:
labels = None
hidden_feature = self.dropout(hidden_feature, training)
if self.params.crf:
return self.return_crf_result(labels, hidden_feature, mode, feature['model_input_mask'])
logits = self.dense(hidden_feature)
if mode != tf.estimator.ModeKeys.PREDICT:
loss = empty_tensor_handling_loss(
labels, logits,
tf.keras.losses.sparse_categorical_crossentropy)
self.add_loss(loss)
acc = self.metric_fn(
labels, logits, sample_weight=feature['model_input_mask'])
self.add_metric(acc)
return tf.nn.softmax(
logits, name='%s_predict' % self.problem_name)
# Cell
class Classification(tf.keras.layers.Layer):
def __init__(self, params: BaseParams, problem_name: str) -> None:
super(Classification, self).__init__(name=problem_name)
self.params = params
self.problem_name = problem_name
num_classes = self.params.num_classes[self.problem_name]
self.dense = tf.keras.layers.Dense(num_classes, activation=None)
self.metric_fn = tf.keras.metrics.SparseCategoricalAccuracy(
name='{}_acc'.format(self.problem_name))
self.dropout = tf.keras.layers.Dropout(1-params.dropout_keep_prob)
def call(self, inputs, mode):
training = (mode == tf.estimator.ModeKeys.TRAIN)
feature, hidden_feature = inputs
hidden_feature = hidden_feature['pooled']
if mode != tf.estimator.ModeKeys.PREDICT:
labels = feature['{}_label_ids'.format(self.problem_name)]
else:
labels = None
hidden_feature = self.dropout(hidden_feature, training)
logits = self.dense(hidden_feature)
if mode != tf.estimator.ModeKeys.PREDICT:
# labels = tf.squeeze(labels)
# convert labels to one-hot to use label_smoothing
one_hot_labels = tf.one_hot(
labels, depth=self.params.num_classes[self.problem_name])
loss_fn = partial(tf.keras.losses.categorical_crossentropy,
from_logits=True, label_smoothing=self.params.label_smoothing)
loss = empty_tensor_handling_loss(
one_hot_labels, logits,
loss_fn)
loss = nan_loss_handling(loss)
self.add_loss(loss)
acc = self.metric_fn(labels, logits)
self.add_metric(acc)
return tf.nn.softmax(
logits, name='%s_predict' % self.problem_name)
# Cell
class PreTrain(tf.keras.Model):
def __init__(self, params: BaseParams, problem_name: str, input_embeddings: tf.Tensor=None, share_embedding=True):
super(PreTrain, self).__init__(name=problem_name)
self.params = params
self.nsp = transformers.models.bert.modeling_tf_bert.TFBertNSPHead(
self.params.bert_config)
if share_embedding is False:
self.vocab_size = self.params.bert_config.vocab_size
self.share_embedding = False
else:
word_embedding_weight = input_embeddings.word_embeddings
self.vocab_size = word_embedding_weight.shape[0]
embedding_size = word_embedding_weight.shape[-1]
share_valid = (self.params.bert_config.hidden_size ==
embedding_size)
if not share_valid and self.params.share_embedding:
logging.warning(
'Share embedding is enabled but hidden_size != embedding_size')
self.share_embedding = self.params.share_embedding & share_valid
if self.share_embedding:
self.share_embedding_layer = TFSharedEmbeddings(
vocab_size=word_embedding_weight.shape[0], hidden_size=word_embedding_weight.shape[1])
self.share_embedding_layer.build([1])
self.share_embedding_layer.weight = word_embedding_weight
else:
self.share_embedding_layer = tf.keras.layers.Dense(self.vocab_size)
def call(self,
inputs: Tuple[Dict[str, Dict[str, tf.Tensor]], Dict[str, Dict[str, tf.Tensor]]],
mode: str) -> Tuple[tf.Tensor, tf.Tensor]:
features, hidden_features = inputs
# compute logits
nsp_logits = self.nsp(hidden_features['pooled'])
# masking is done inside the model
seq_hidden_feature = hidden_features['seq']
if mode != tf.estimator.ModeKeys.PREDICT:
positions = features['masked_lm_positions']
# gather_indexes will flatten the seq hidden_states, we need to reshape
# back to 3d tensor
input_tensor = gather_indexes(seq_hidden_feature, positions)
shape_tensor = tf.shape(positions)
shape_list = tf.concat(
[shape_tensor, [seq_hidden_feature.shape.as_list()[-1]]], axis=0)
input_tensor = tf.reshape(input_tensor, shape=shape_list)
# set_shape to determin rank
input_tensor.set_shape(
[None, None, seq_hidden_feature.shape.as_list()[-1]])
else:
input_tensor = seq_hidden_feature
if self.share_embedding:
mlm_logits = self.share_embedding_layer(
input_tensor, mode='linear')
else:
mlm_logits = self.share_embedding_layer(input_tensor)
if mode != tf.estimator.ModeKeys.PREDICT:
nsp_labels = features['next_sentence_label_ids']
mlm_labels = features['masked_lm_ids']
mlm_labels.set_shape([None, None])
# compute loss
nsp_loss = empty_tensor_handling_loss(
nsp_labels, nsp_logits,
tf.keras.losses.sparse_categorical_crossentropy)
mlm_loss_layer = transformers.modeling_tf_utils.TFMaskedLanguageModelingLoss()
# mlm_loss = tf.reduce_mean(
# mlm_loss_layer.compute_loss(mlm_labels, mlm_logits))
# add a useless from_logits argument to match the function signature of keras losses.
def loss_fn_wrapper(labels, logits, from_logits=True):
return mlm_loss_layer.compute_loss(labels, logits)
mlm_loss = empty_tensor_handling_loss(
mlm_labels,
mlm_logits,
loss_fn_wrapper
)
loss = nsp_loss + mlm_loss
self.add_loss(loss)
return (tf.sigmoid(nsp_logits), tf.nn.softmax(mlm_logits))
# Cell
class Seq2Seq(tf.keras.Model):
def __init__(self, params: BaseParams, problem_name: str, input_embeddings: tf.keras.layers.Layer):
super(Seq2Seq, self).__init__(name=problem_name)
# self.params = params
# self.problem_name = problem_name
# # if self.params.init_weight_from_huggingface:
# # self.decoder = load_transformer_model(
# # self.params.transformer_decoder_model_name,
# # self.params.transformer_decoder_model_loading)
# # else:
# # self.decoder = load_transformer_model(
# # self.params.bert_decoder_config, self.params.transformer_decoder_model_loading)
# # TODO: better implementation
# logging.warning(
# 'Seq2Seq model is not well supported yet. Bugs are expected.')
# config = self.params.bert_decoder_config
# # some hacky approach to share embeddings from encoder to decoder
# word_embedding_weight = input_embeddings.word_embeddings
# self.vocab_size = word_embedding_weight.shape[0]
# self.share_embedding_layer = TFSharedEmbeddings(
# vocab_size=word_embedding_weight.shape[0], hidden_size=word_embedding_weight.shape[1])
# self.share_embedding_layer.build([1])
# self.share_embedding_layer.weight = word_embedding_weight
# # self.decoder = TFBartDecoder(
# # config=config, embed_tokens=self.share_embedding_layer)
# self.decoder = TFBartDecoderForConditionalGeneration(
# config=config, embedding_layer=self.share_embedding_layer)
# self.decoder.set_bos_id(self.params.bos_id)
# self.decoder.set_eos_id(self.params.eos_id)
# self.metric_fn = tf.keras.metrics.SparseCategoricalAccuracy(
# name='{}_acc'.format(self.problem_name))
raise NotImplementedError
def _seq2seq_label_shift_right(self, labels: tf.Tensor, eos_id: int) -> tf.Tensor:
batch_eos_ids = tf.fill([tf.shape(labels)[0], 1], eos_id)
batch_eos_ids = tf.cast(batch_eos_ids, dtype=tf.int64)
decoder_lable = labels[:, 1:]
decoder_lable = tf.concat([decoder_lable, batch_eos_ids], axis=1)
return decoder_lable
def call(self,
inputs: Tuple[Dict[str, Dict[str, tf.Tensor]], Dict[str, Dict[str, tf.Tensor]]],
mode: str):
features, hidden_features = inputs
encoder_mask = features['model_input_mask']
if mode == tf.estimator.ModeKeys.PREDICT:
input_ids = None
decoder_padding_mask = None
else:
input_ids = features['%s_label_ids' % self.problem_name]
decoder_padding_mask = features['{}_mask'.format(
self.problem_name)]
if mode == tf.estimator.ModeKeys.PREDICT:
return self.decoder.generate(eos_token_id=self.params.eos_id, encoder_hidden_states=hidden_features['seq'])
else:
decoder_output = self.decoder(input_ids=input_ids,
encoder_hidden_states=hidden_features['seq'],
encoder_padding_mask=encoder_mask,
decoder_padding_mask=decoder_padding_mask,
decode_max_length=self.params.decode_max_seq_len,
mode=mode)
loss = decoder_output.loss
logits = decoder_output.logits
self.add_loss(loss)
decoder_label = self._seq2seq_label_shift_right(
features['%s_label_ids' % self.problem_name], eos_id=self.params.eos_id)
acc = self.metric_fn(decoder_label, logits)
self.add_metric(acc)
return logits
# Cell
class MultiLabelClassification(tf.keras.Model):
def __init__(self, params: BaseParams, problem_name: str) -> None:
super(MultiLabelClassification, self).__init__(name=problem_name)
self.params = params
self.problem_name = problem_name
self.dense = tf.keras.layers.Dense(
self.params.num_classes[problem_name])
self.dropout = tf.keras.layers.Dropout(
1-self.params.dropout_keep_prob
)
# self.metric_fn = tfa.metrics.F1Score(
# num_classes=self.params.num_classes[problem_name],
# threshold=self.params.multi_cls_threshold,
# average='macro',
# name='{}_f1'.format(problem_name))
def call(self, inputs, mode):
training = (mode == tf.estimator.ModeKeys.TRAIN)
feature, hidden_feature = inputs
hidden_feature = hidden_feature['pooled']
if mode != tf.estimator.ModeKeys.PREDICT:
labels = feature['{}_label_ids'.format(self.problem_name)]
else:
labels = None
hidden_feature = self.dropout(hidden_feature, training)
logits = self.dense(hidden_feature)
if mode != tf.estimator.ModeKeys.PREDICT:
labels = tf.cast(labels, tf.float32)
# use weighted loss
label_weights = self.params.multi_cls_positive_weight
def _loss_fn_wrapper(x, y, from_logits=True):
return tf.nn.weighted_cross_entropy_with_logits(x, y, pos_weight=label_weights, name='{}_loss'.format(self.problem_name))
loss = empty_tensor_handling_loss(
labels, logits, _loss_fn_wrapper)
loss = nan_loss_handling(loss)
self.add_loss(loss)
# labels = create_dummy_if_empty(labels)
# logits = create_dummy_if_empty(logits)
# f1 = self.metric_fn(labels, logits)
# self.add_metric(f1)
return tf.nn.sigmoid(
logits, name='%s_predict' % self.problem_name)
# Cell
class MaskLM(tf.keras.Model):
"""Multimodal MLM top layer.
"""
def __init__(self, params: BaseParams, problem_name: str, input_embeddings: tf.keras.layers.Layer=None, share_embedding=True) -> None:
super(MaskLM, self).__init__(name=problem_name)
self.params = params
self.problem_name = problem_name
if share_embedding is False:
self.vocab_size = self.params.bert_config.vocab_size
self.share_embedding = False
else:
word_embedding_weight = input_embeddings.word_embeddings
self.vocab_size = word_embedding_weight.shape[0]
embedding_size = word_embedding_weight.shape[-1]
share_valid = (self.params.bert_config.hidden_size ==
embedding_size)
if not share_valid and self.params.share_embedding:
logging.warning(
'Share embedding is enabled but hidden_size != embedding_size')
self.share_embedding = self.params.share_embedding & share_valid
if self.share_embedding:
self.share_embedding_layer = TFSharedEmbeddings(
vocab_size=self.vocab_size, hidden_size=word_embedding_weight.shape[1])
self.share_embedding_layer.build([1])
self.share_embedding_layer.weight = word_embedding_weight
else:
self.share_embedding_layer = tf.keras.layers.Dense(self.vocab_size)
def call(self, inputs, mode):
features, hidden_features = inputs
# masking is done inside the model
seq_hidden_feature = hidden_features['seq']
if mode != tf.estimator.ModeKeys.PREDICT:
positions = features['masked_lm_positions']
# gather_indexes will flatten the seq hidden_states, we need to reshape
# back to 3d tensor
input_tensor = gather_indexes(seq_hidden_feature, positions)
shape_tensor = tf.shape(positions)
shape_list = tf.concat([shape_tensor, [seq_hidden_feature.shape.as_list()[-1]]], axis=0)
input_tensor = tf.reshape(input_tensor, shape=shape_list)
# set_shape to determin rank
input_tensor.set_shape(
[None, None, seq_hidden_feature.shape.as_list()[-1]])
else:
input_tensor = seq_hidden_feature
if self.share_embedding:
mlm_logits = self.share_embedding_layer(
input_tensor, mode='linear')
else:
mlm_logits = self.share_embedding_layer(input_tensor)
if mode != tf.estimator.ModeKeys.PREDICT:
mlm_labels = features['masked_lm_ids']
mlm_labels.set_shape([None, None])
# compute loss
mlm_loss = empty_tensor_handling_loss(
mlm_labels,
mlm_logits,
tf.keras.losses.sparse_categorical_crossentropy
)
loss = nan_loss_handling(mlm_loss)
self.add_loss(loss)
return tf.nn.softmax(mlm_logits)
|
[
"tensorflow.equal",
"tensorflow.shape",
"tensorflow.pad",
"tensorflow.keras.layers.Dense",
"tensorflow.nn.softmax",
"tensorflow.reduce_mean",
"tensorflow.cast",
"tensorflow_addons.layers.crf.CRF",
"tensorflow.concat",
"tensorflow.nn.sigmoid",
"transformers.modeling_tf_utils.TFMaskedLanguageModelingLoss",
"tensorflow.convert_to_tensor",
"tensorflow.size",
"tensorflow.zeros",
"tensorflow.one_hot",
"transformers.models.bert.modeling_tf_bert.TFBertNSPHead",
"tensorflow.keras.layers.Dropout",
"logging.warning",
"tensorflow.sigmoid",
"tensorflow.reshape",
"tensorflow_addons.text.crf.crf_log_likelihood",
"functools.partial",
"tensorflow.math.is_nan",
"transformers.modeling_tf_utils.TFSharedEmbeddings"
] |
[((1106, 1126), 'tensorflow.math.is_nan', 'tf.math.is_nan', (['loss'], {}), '(loss)\n', (1120, 1126), True, 'import tensorflow as tf\n'), ((1274, 1294), 'tensorflow.shape', 'tf.shape', (['inp_tensor'], {}), '(inp_tensor)\n', (1282, 1294), True, 'import tensorflow as tf\n'), ((1302, 1330), 'tensorflow.equal', 'tf.equal', (['shape_tensor[0]', '(0)'], {}), '(shape_tensor[0], 0)\n', (1310, 1330), True, 'import tensorflow as tf\n'), ((793, 808), 'tensorflow.size', 'tf.size', (['labels'], {}), '(labels)\n', (800, 808), True, 'import tensorflow as tf\n'), ((1401, 1442), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['[1]'], {'dtype': 'tf.int32'}), '([1], dtype=tf.int32)\n', (1421, 1442), True, 'import tensorflow as tf\n'), ((1465, 1525), 'tensorflow.concat', 'tf.concat', (['[dummy_shape_first_dim, shape_tensor[1:]]'], {'axis': '(0)'}), '([dummy_shape_first_dim, shape_tensor[1:]], axis=0)\n', (1474, 1525), True, 'import tensorflow as tf\n'), ((1562, 1600), 'tensorflow.zeros', 'tf.zeros', (['dummy_shape'], {'dtype': 'data_type'}), '(dummy_shape, dtype=data_type)\n', (1570, 1600), True, 'import tensorflow as tf\n'), ((2311, 2362), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['num_classes'], {'activation': 'None'}), '(num_classes, activation=None)\n', (2332, 2362), True, 'import tensorflow as tf\n'), ((2387, 2440), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', (['(1 - params.dropout_keep_prob)'], {}), '(1 - params.dropout_keep_prob)\n', (2410, 2440), True, 'import tensorflow as tf\n'), ((3662, 3783), 'tensorflow.one_hot', 'tf.one_hot', (['viterbi_decoded'], {'name': "('%s_predict' % self.problem_name)", 'depth': 'self.params.num_classes[self.problem_name]'}), "(viterbi_decoded, name='%s_predict' % self.problem_name, depth=\n self.params.num_classes[self.problem_name])\n", (3672, 3783), True, 'import tensorflow as tf\n'), ((5181, 5241), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {'name': "('%s_predict' % self.problem_name)"}), "(logits, name='%s_predict' % self.problem_name)\n", (5194, 5241), True, 'import tensorflow as tf\n'), ((5601, 5652), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['num_classes'], {'activation': 'None'}), '(num_classes, activation=None)\n', (5622, 5652), True, 'import tensorflow as tf\n'), ((5799, 5852), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', (['(1 - params.dropout_keep_prob)'], {}), '(1 - params.dropout_keep_prob)\n', (5822, 5852), True, 'import tensorflow as tf\n'), ((7024, 7084), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {'name': "('%s_predict' % self.problem_name)"}), "(logits, name='%s_predict' % self.problem_name)\n", (7037, 7084), True, 'import tensorflow as tf\n'), ((7365, 7450), 'transformers.models.bert.modeling_tf_bert.TFBertNSPHead', 'transformers.models.bert.modeling_tf_bert.TFBertNSPHead', (['self.params.bert_config'], {}), '(self.params.bert_config\n )\n', (7420, 7450), False, 'import transformers\n'), ((13110, 13148), 'tensorflow.cast', 'tf.cast', (['batch_eos_ids'], {'dtype': 'tf.int64'}), '(batch_eos_ids, dtype=tf.int64)\n', (13117, 13148), True, 'import tensorflow as tf\n'), ((13211, 13260), 'tensorflow.concat', 'tf.concat', (['[decoder_lable, batch_eos_ids]'], {'axis': '(1)'}), '([decoder_lable, batch_eos_ids], axis=1)\n', (13220, 13260), True, 'import tensorflow as tf\n'), ((15141, 15201), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['self.params.num_classes[problem_name]'], {}), '(self.params.num_classes[problem_name])\n', (15162, 15201), True, 'import tensorflow as tf\n'), ((15238, 15296), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', (['(1 - self.params.dropout_keep_prob)'], {}), '(1 - self.params.dropout_keep_prob)\n', (15261, 15296), True, 'import tensorflow as tf\n'), ((16792, 16852), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['logits'], {'name': "('%s_predict' % self.problem_name)"}), "(logits, name='%s_predict' % self.problem_name)\n", (16805, 16852), True, 'import tensorflow as tf\n'), ((19885, 19910), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['mlm_logits'], {}), '(mlm_logits)\n', (19898, 19910), True, 'import tensorflow as tf\n'), ((857, 873), 'tensorflow.shape', 'tf.shape', (['labels'], {}), '(labels)\n', (865, 873), True, 'import tensorflow as tf\n'), ((915, 931), 'tensorflow.shape', 'tf.shape', (['labels'], {}), '(labels)\n', (923, 931), True, 'import tensorflow as tf\n'), ((2491, 2507), 'tensorflow_addons.layers.crf.CRF', 'CRF', (['num_classes'], {}), '(num_classes)\n', (2494, 2507), False, 'from tensorflow_addons.layers.crf import CRF\n'), ((3341, 3361), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['loss'], {}), '(loss)\n', (3355, 3361), True, 'import tensorflow as tf\n'), ((4474, 4516), 'tensorflow.pad', 'tf.pad', ([], {'tensor': 'labels', 'paddings': 'pad_tensor'}), '(tensor=labels, paddings=pad_tensor)\n', (4480, 4516), True, 'import tensorflow as tf\n'), ((6488, 6556), 'tensorflow.one_hot', 'tf.one_hot', (['labels'], {'depth': 'self.params.num_classes[self.problem_name]'}), '(labels, depth=self.params.num_classes[self.problem_name])\n', (6498, 6556), True, 'import tensorflow as tf\n'), ((6596, 6712), 'functools.partial', 'partial', (['tf.keras.losses.categorical_crossentropy'], {'from_logits': '(True)', 'label_smoothing': 'self.params.label_smoothing'}), '(tf.keras.losses.categorical_crossentropy, from_logits=True,\n label_smoothing=self.params.label_smoothing)\n', (6603, 6712), False, 'from functools import partial\n'), ((8247, 8357), 'transformers.modeling_tf_utils.TFSharedEmbeddings', 'TFSharedEmbeddings', ([], {'vocab_size': 'word_embedding_weight.shape[0]', 'hidden_size': 'word_embedding_weight.shape[1]'}), '(vocab_size=word_embedding_weight.shape[0], hidden_size=\n word_embedding_weight.shape[1])\n', (8265, 8357), False, 'from transformers.modeling_tf_utils import TFSharedEmbeddings\n'), ((8545, 8583), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['self.vocab_size'], {}), '(self.vocab_size)\n', (8566, 8583), True, 'import tensorflow as tf\n'), ((9299, 9318), 'tensorflow.shape', 'tf.shape', (['positions'], {}), '(positions)\n', (9307, 9318), True, 'import tensorflow as tf\n'), ((9464, 9506), 'tensorflow.reshape', 'tf.reshape', (['input_tensor'], {'shape': 'shape_list'}), '(input_tensor, shape=shape_list)\n', (9474, 9506), True, 'import tensorflow as tf\n'), ((10347, 10408), 'transformers.modeling_tf_utils.TFMaskedLanguageModelingLoss', 'transformers.modeling_tf_utils.TFMaskedLanguageModelingLoss', ([], {}), '()\n', (10406, 10408), False, 'import transformers\n'), ((10995, 11017), 'tensorflow.sigmoid', 'tf.sigmoid', (['nsp_logits'], {}), '(nsp_logits)\n', (11005, 11017), True, 'import tensorflow as tf\n'), ((11019, 11044), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['mlm_logits'], {}), '(mlm_logits)\n', (11032, 11044), True, 'import tensorflow as tf\n'), ((16091, 16118), 'tensorflow.cast', 'tf.cast', (['labels', 'tf.float32'], {}), '(labels, tf.float32)\n', (16098, 16118), True, 'import tensorflow as tf\n'), ((18002, 18097), 'transformers.modeling_tf_utils.TFSharedEmbeddings', 'TFSharedEmbeddings', ([], {'vocab_size': 'self.vocab_size', 'hidden_size': 'word_embedding_weight.shape[1]'}), '(vocab_size=self.vocab_size, hidden_size=\n word_embedding_weight.shape[1])\n', (18020, 18097), False, 'from transformers.modeling_tf_utils import TFSharedEmbeddings\n'), ((18285, 18323), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['self.vocab_size'], {}), '(self.vocab_size)\n', (18306, 18323), True, 'import tensorflow as tf\n'), ((18821, 18840), 'tensorflow.shape', 'tf.shape', (['positions'], {}), '(positions)\n', (18829, 18840), True, 'import tensorflow as tf\n'), ((18969, 19011), 'tensorflow.reshape', 'tf.reshape', (['input_tensor'], {'shape': 'shape_list'}), '(input_tensor, shape=shape_list)\n', (18979, 19011), True, 'import tensorflow as tf\n'), ((7994, 8073), 'logging.warning', 'logging.warning', (['"""Share embedding is enabled but hidden_size != embedding_size"""'], {}), "('Share embedding is enabled but hidden_size != embedding_size')\n", (8009, 8073), False, 'import logging\n'), ((17749, 17828), 'logging.warning', 'logging.warning', (['"""Share embedding is enabled but hidden_size != embedding_size"""'], {}), "('Share embedding is enabled but hidden_size != embedding_size')\n", (17764, 17828), False, 'import logging\n'), ((3210, 3279), 'tensorflow_addons.text.crf.crf_log_likelihood', 'crf_log_likelihood', (['potentials', 'labels', 'sequence_length', 'chain_kernel'], {}), '(potentials, labels, sequence_length, chain_kernel)\n', (3228, 3279), False, 'from tensorflow_addons.text.crf import crf_log_likelihood\n'), ((4286, 4316), 'tensorflow.shape', 'tf.shape', ([], {'input': 'hidden_feature'}), '(input=hidden_feature)\n', (4294, 4316), True, 'import tensorflow as tf\n'), ((4339, 4361), 'tensorflow.shape', 'tf.shape', ([], {'input': 'labels'}), '(input=labels)\n', (4347, 4361), True, 'import tensorflow as tf\n'), ((13053, 13069), 'tensorflow.shape', 'tf.shape', (['labels'], {}), '(labels)\n', (13061, 13069), True, 'import tensorflow as tf\n')]
|
import re
regex = re.compile('[^a-zA-Z]')
def score_word(word, corpus=None):
word = regex.sub('', word) # leave only alpha
score = 0
consec_bonus = 2
for i, letter in enumerate(word):
if letter.islower():
continue
if i > 0 and word[i-1].upper():
score += consec_bonus
if i == 0:
score += 10
elif (i == 1) or (i == len(word)-1):
score += 3
else:
score += 1
if (i >= 1) and (corpus is not None) and (word[i:].lower() in corpus):
score += len(word[i:])-1
return score
def score_acronym(capitalized_acronym, corpus=None):
"""
For each capitalized letter in the acronym:
* 10 points if first letter in a word (with exception of first letter)
* 3 point if second or last letter in a word
* 1 point otherwise
* N bonus points if begins an N-length valid sub-word
(ex: multiVariable -> 8 bonus points)
* 2 bonus points if immediately following a capitalizd letter
"""
return sum([score_word(word, corpus=corpus) for word in capitalized_acronym.split(' ')]) - 10
|
[
"re.compile"
] |
[((19, 42), 're.compile', 're.compile', (['"""[^a-zA-Z]"""'], {}), "('[^a-zA-Z]')\n", (29, 42), False, 'import re\n')]
|
"""Extension for built-in Sass functionality."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from itertools import product
import math
import os.path
from pathlib import PurePosixPath
from six.moves import xrange
from scss.extension import Extension
from scss.namespace import Namespace
from scss.source import SourceFile
from scss.types import (
Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type)
class CoreExtension(Extension):
name = 'core'
namespace = Namespace()
def handle_import(self, name, compilation, rule):
"""Implementation of the core Sass import mechanism, which just looks
for files on disk.
"""
# TODO virtually all of this is the same as the django stuff, except
# for the bit that actually looks for and tries to open the file.
# would be much easier if you could just stick an object in the search
# path that implements the pathlib API. the only problem is what to do
# when one path is a child of another, so the same file has two names,
# but tbh i'm not actually sure that's something worth protecting
# against...? like, the only cost is that we'll parse twice (or, later
# on, not respect single-import), and the fix is to just Not Do That
# TODO i think with the new origin semantics, i've made it possible to
# import relative to the current file even if the current file isn't
# anywhere in the search path. is that right?
path = PurePosixPath(name)
if path.suffix:
search_exts = [path.suffix]
else:
search_exts = ['.scss', '.sass']
relative_to = path.parent
basename = path.stem
search_path = [] # tuple of (origin, start_from)
if relative_to.is_absolute():
relative_to = PurePosixPath(*relative_to.parts[1:])
elif rule.source_file.origin:
# Search relative to the current file first, only if not doing an
# absolute import
search_path.append(
rule.source_file.origin / rule.source_file.relpath.parent)
search_path.extend(compilation.compiler.search_path)
for prefix, suffix in product(('_', ''), search_exts):
filename = prefix + basename + suffix
for origin in search_path:
relpath = relative_to / filename
# Lexically (ignoring symlinks!) eliminate .. from the part
# of the path that exists within Sass-space. pathlib
# deliberately doesn't do this, but os.path does.
relpath = PurePosixPath(os.path.normpath(str(relpath)))
if rule.source_file.key == (origin, relpath):
# Avoid self-import
# TODO is this what ruby does?
continue
path = origin / relpath
if not path.exists():
continue
# All good!
# TODO if this file has already been imported, we'll do the
# source preparation twice. make it lazy.
return SourceFile.read(origin, relpath)
# Alias to make the below declarations less noisy
ns = CoreExtension.namespace
# ------------------------------------------------------------------------------
# Color creation
def _interpret_percentage(n, relto=1., clamp=True):
expect_type(n, Number, unit='%')
if n.is_unitless:
ret = n.value / relto
else:
ret = n.value / 100
if clamp:
if ret < 0:
return 0
elif ret > 1:
return 1
return ret
@ns.declare
def rgba(r, g, b, a):
r = _interpret_percentage(r, relto=255)
g = _interpret_percentage(g, relto=255)
b = _interpret_percentage(b, relto=255)
a = _interpret_percentage(a, relto=1)
return Color.from_rgb(r, g, b, a)
@ns.declare
def rgb(r, g, b, type='rgb'):
return rgba(r, g, b, Number(1.0))
@ns.declare
def rgba_(color, a=None):
if a is None:
alpha = 1
else:
alpha = _interpret_percentage(a)
return Color.from_rgb(*color.rgba[:3], alpha=alpha)
@ns.declare
def rgb_(color):
return rgba_(color, a=Number(1))
@ns.declare
def hsla(h, s, l, a):
return Color.from_hsl(
h.value / 360 % 1,
# Ruby sass treats plain numbers for saturation and lightness as though
# they were percentages, just without the %
_interpret_percentage(s, relto=100),
_interpret_percentage(l, relto=100),
alpha=a.value,
)
@ns.declare
def hsl(h, s, l):
return hsla(h, s, l, Number(1))
@ns.declare
def hsla_(color, a=None):
return rgba_(color, a)
@ns.declare
def hsl_(color):
return rgba_(color, a=Number(1))
@ns.declare
def mix(color1, color2, weight=Number(50, "%")):
"""
Mixes together two colors. Specifically, takes the average of each of the
RGB components, optionally weighted by the given percentage.
The opacity of the colors is also considered when weighting the components.
Specifically, takes the average of each of the RGB components,
optionally weighted by the given percentage.
The opacity of the colors is also considered when weighting the components.
The weight specifies the amount of the first color that should be included
in the returned color.
50%, means that half the first color
and half the second color should be used.
25% means that a quarter of the first color
and three quarters of the second color should be used.
For example:
mix(#f00, #00f) => #7f007f
mix(#f00, #00f, 25%) => #3f00bf
mix(rgba(255, 0, 0, 0.5), #00f) => rgba(63, 0, 191, 0.75)
"""
# This algorithm factors in both the user-provided weight
# and the difference between the alpha values of the two colors
# to decide how to perform the weighted average of the two RGB values.
#
# It works by first normalizing both parameters to be within [-1, 1],
# where 1 indicates "only use color1", -1 indicates "only use color 0",
# and all values in between indicated a proportionately weighted average.
#
# Once we have the normalized variables w and a,
# we apply the formula (w + a)/(1 + w*a)
# to get the combined weight (in [-1, 1]) of color1.
# This formula has two especially nice properties:
#
# * When either w or a are -1 or 1, the combined weight is also that
# number (cases where w * a == -1 are undefined, and handled as a
# special case).
#
# * When a is 0, the combined weight is w, and vice versa
#
# Finally, the weight of color1 is renormalized to be within [0, 1]
# and the weight of color2 is given by 1 minus the weight of color1.
#
# Algorithm from the Sass project: http://sass-lang.com/
p = _interpret_percentage(weight)
# Scale weight to [-1, 1]
w = p * 2 - 1
# Compute difference in alpha channels
a = color1.alpha - color2.alpha
# Weight of first color
if w * a == -1:
# Avoid zero-div case
scaled_weight1 = w
else:
scaled_weight1 = (w + a) / (1 + w * a)
# Unscale back to [0, 1] and get the weight of the other color
w1 = (scaled_weight1 + 1) / 2
w2 = 1 - w1
# Do the scaling. Note that alpha isn't scaled by alpha, as that wouldn't
# make much sense; it uses the original untwiddled weight, p.
channels = [
ch1 * w1 + ch2 * w2
for (ch1, ch2) in zip(color1.rgba[:3], color2.rgba[:3])]
alpha = color1.alpha * p + color2.alpha * (1 - p)
return Color.from_rgb(*channels, alpha=alpha)
# ------------------------------------------------------------------------------
# Color inspection
@ns.declare
def red(color):
r, g, b, a = color.rgba
return Number(r * 255)
@ns.declare
def green(color):
r, g, b, a = color.rgba
return Number(g * 255)
@ns.declare
def blue(color):
r, g, b, a = color.rgba
return Number(b * 255)
@ns.declare_alias('opacity')
@ns.declare
def alpha(color):
return Number(color.alpha)
@ns.declare
def hue(color):
h, s, l = color.hsl
return Number(h * 360, "deg")
@ns.declare
def saturation(color):
h, s, l = color.hsl
return Number(s * 100, "%")
@ns.declare
def lightness(color):
h, s, l = color.hsl
return Number(l * 100, "%")
@ns.declare
def ie_hex_str(color):
c = Color(color).value
return String("#{3:02X}{0:02X}{1:02X}{2:02X}".format(
int(round(c[0])),
int(round(c[1])),
int(round(c[2])),
int(round(c[3] * 255)),
))
# ------------------------------------------------------------------------------
# Color modification
@ns.declare_alias('fade-in')
@ns.declare_alias('fadein')
@ns.declare
def opacify(color, amount):
r, g, b, a = color.rgba
if amount.is_simple_unit('%'):
amt = amount.value / 100
else:
amt = amount.value
return Color.from_rgb(
r, g, b,
alpha=a + amt)
@ns.declare_alias('fade-out')
@ns.declare_alias('fadeout')
@ns.declare
def transparentize(color, amount):
r, g, b, a = color.rgba
if amount.is_simple_unit('%'):
amt = amount.value / 100
else:
amt = amount.value
return Color.from_rgb(
r, g, b,
alpha=a - amt)
@ns.declare
def lighten(color, amount):
return adjust_color(color, lightness=amount)
@ns.declare
def darken(color, amount):
return adjust_color(color, lightness=-amount)
@ns.declare
def saturate(color, amount):
return adjust_color(color, saturation=amount)
@ns.declare
def desaturate(color, amount):
return adjust_color(color, saturation=-amount)
@ns.declare
def greyscale(color):
h, s, l = color.hsl
return Color.from_hsl(h, 0, l, alpha=color.alpha)
@ns.declare
def grayscale(color):
if isinstance(color, Number):
# grayscale(n) and grayscale(n%) are CSS3 filters and should be left
# intact, but only when using the "a" spelling
return String.unquoted("grayscale(%s)" % (color.render(),))
else:
return greyscale(color)
@ns.declare_alias('spin')
@ns.declare
def adjust_hue(color, degrees):
h, s, l = color.hsl
delta = degrees.value / 360
return Color.from_hsl((h + delta) % 1, s, l, alpha=color.alpha)
@ns.declare
def complement(color):
h, s, l = color.hsl
return Color.from_hsl((h + 0.5) % 1, s, l, alpha=color.alpha)
@ns.declare
def invert(color):
"""Returns the inverse (negative) of a color. The red, green, and blue
values are inverted, while the opacity is left alone.
"""
r, g, b, a = color.rgba
return Color.from_rgb(1 - r, 1 - g, 1 - b, alpha=a)
@ns.declare
def adjust_lightness(color, amount):
return adjust_color(color, lightness=amount)
@ns.declare
def adjust_saturation(color, amount):
return adjust_color(color, saturation=amount)
@ns.declare
def scale_lightness(color, amount):
return scale_color(color, lightness=amount)
@ns.declare
def scale_saturation(color, amount):
return scale_color(color, saturation=amount)
@ns.declare
def adjust_color(
color, red=None, green=None, blue=None,
hue=None, saturation=None, lightness=None, alpha=None):
do_rgb = red or green or blue
do_hsl = hue or saturation or lightness
if do_rgb and do_hsl:
raise ValueError(
"Can't adjust both RGB and HSL channels at the same time")
zero = Number(0)
a = color.alpha + (alpha or zero).value
if do_rgb:
r, g, b = color.rgba[:3]
channels = [
current + (adjustment or zero).value / 255
for (current, adjustment) in zip(color.rgba, (red, green, blue))]
return Color.from_rgb(*channels, alpha=a)
else:
h, s, l = color.hsl
h = (h + (hue or zero).value / 360) % 1
s += _interpret_percentage(saturation or zero, relto=100, clamp=False)
l += _interpret_percentage(lightness or zero, relto=100, clamp=False)
return Color.from_hsl(h, s, l, a)
def _scale_channel(channel, scaleby):
if scaleby is None:
return channel
expect_type(scaleby, Number)
if not scaleby.is_simple_unit('%'):
raise ValueError("Expected percentage, got %r" % (scaleby,))
factor = scaleby.value / 100
if factor > 0:
# Add x% of the remaining range, up to 1
return channel + (1 - channel) * factor
else:
# Subtract x% of the existing channel. We add here because the factor
# is already negative
return channel * (1 + factor)
@ns.declare
def scale_color(
color, red=None, green=None, blue=None,
saturation=None, lightness=None, alpha=None):
do_rgb = red or green or blue
do_hsl = saturation or lightness
if do_rgb and do_hsl:
raise ValueError(
"Can't scale both RGB and HSL channels at the same time")
scaled_alpha = _scale_channel(color.alpha, alpha)
if do_rgb:
channels = [
_scale_channel(channel, scaleby)
for channel, scaleby in zip(color.rgba, (red, green, blue))]
return Color.from_rgb(*channels, alpha=scaled_alpha)
else:
channels = [
_scale_channel(channel, scaleby)
for channel, scaleby
in zip(color.hsl, (None, saturation, lightness))]
return Color.from_hsl(*channels, alpha=scaled_alpha)
@ns.declare
def change_color(
color, red=None, green=None, blue=None,
hue=None, saturation=None, lightness=None, alpha=None):
do_rgb = red or green or blue
do_hsl = hue or saturation or lightness
if do_rgb and do_hsl:
raise ValueError(
"Can't change both RGB and HSL channels at the same time")
if alpha is None:
alpha = color.alpha
else:
alpha = alpha.value
if do_rgb:
channels = list(color.rgba[:3])
if red:
channels[0] = _interpret_percentage(red, relto=255)
if green:
channels[1] = _interpret_percentage(green, relto=255)
if blue:
channels[2] = _interpret_percentage(blue, relto=255)
return Color.from_rgb(*channels, alpha=alpha)
else:
channels = list(color.hsl)
if hue:
expect_type(hue, Number, unit=None)
channels[0] = (hue.value / 360) % 1
# Ruby sass treats plain numbers for saturation and lightness as though
# they were percentages, just without the %
if saturation:
channels[1] = _interpret_percentage(saturation, relto=100)
if lightness:
channels[2] = _interpret_percentage(lightness, relto=100)
return Color.from_hsl(*channels, alpha=alpha)
# ------------------------------------------------------------------------------
# String functions
@ns.declare_alias('e')
@ns.declare_alias('escape')
@ns.declare
def unquote(*args):
arg = List.from_maybe_starargs(args).maybe()
if isinstance(arg, String):
return String(arg.value, quotes=None)
else:
return String(arg.render(), quotes=None)
@ns.declare
def quote(*args):
arg = List.from_maybe_starargs(args).maybe()
if isinstance(arg, String):
return String(arg.value, quotes='"')
else:
return String(arg.render(), quotes='"')
@ns.declare
def str_length(string):
expect_type(string, String)
# nb: can't use `len(string)`, because that gives the Sass list length,
# which is 1
return Number(len(string.value))
# TODO this and several others should probably also require integers
# TODO and assert that the indexes are valid
@ns.declare
def str_insert(string, insert, index):
expect_type(string, String)
expect_type(insert, String)
expect_type(index, Number, unit=None)
py_index = index.to_python_index(len(string.value), check_bounds=False)
return String(
string.value[:py_index] + insert.value + string.value[py_index:],
quotes=string.quotes)
@ns.declare
def str_index(string, substring):
expect_type(string, String)
expect_type(substring, String)
# 1-based indexing, with 0 for failure
return Number(string.value.find(substring.value) + 1)
@ns.declare
def str_slice(string, start_at, end_at=None):
expect_type(string, String)
expect_type(start_at, Number, unit=None)
py_start_at = start_at.to_python_index(len(string.value))
if end_at is None:
py_end_at = None
else:
expect_type(end_at, Number, unit=None)
# Endpoint is inclusive, unlike Python
py_end_at = end_at.to_python_index(len(string.value)) + 1
return String(
string.value[py_start_at:py_end_at],
quotes=string.quotes)
@ns.declare
def to_upper_case(string):
expect_type(string, String)
return String(string.value.upper(), quotes=string.quotes)
@ns.declare
def to_lower_case(string):
expect_type(string, String)
return String(string.value.lower(), quotes=string.quotes)
# ------------------------------------------------------------------------------
# Number functions
@ns.declare
def percentage(value):
expect_type(value, Number, unit=None)
return value * Number(100, unit='%')
ns.set_function('abs', 1, Number.wrap_python_function(abs))
ns.set_function('round', 1, Number.wrap_python_function(round))
ns.set_function('ceil', 1, Number.wrap_python_function(math.ceil))
ns.set_function('floor', 1, Number.wrap_python_function(math.floor))
# ------------------------------------------------------------------------------
# List functions
def __parse_separator(separator, default_from=None):
if separator is None:
separator = 'auto'
separator = String.unquoted(separator).value
if separator == 'comma':
return True
elif separator == 'space':
return False
elif separator == 'auto':
if not default_from:
return True
elif len(default_from) < 2:
return True
else:
return default_from.use_comma
else:
raise ValueError('Separator must be auto, comma, or space')
# TODO get the compass bit outta here
@ns.declare_alias('-compass-list-size')
@ns.declare
def length(*lst):
if len(lst) == 1 and isinstance(lst[0], (list, tuple, List)):
lst = lst[0]
return Number(len(lst))
@ns.declare
def set_nth(list, n, value):
expect_type(n, Number, unit=None)
py_n = n.to_python_index(len(list))
return List(
tuple(list[:py_n]) + (value,) + tuple(list[py_n + 1:]),
use_comma=list.use_comma)
# TODO get the compass bit outta here
@ns.declare_alias('-compass-nth')
@ns.declare
def nth(lst, n):
"""Return the nth item in the list."""
expect_type(n, (String, Number), unit=None)
if isinstance(n, String):
if n.value.lower() == 'first':
i = 0
elif n.value.lower() == 'last':
i = -1
else:
raise ValueError("Invalid index %r" % (n,))
else:
# DEVIATION: nth treats lists as circular lists
i = n.to_python_index(len(lst), circular=True)
return lst[i]
@ns.declare
def join(lst1, lst2, separator=String.unquoted('auto')):
expect_type(separator, String)
ret = []
ret.extend(List.from_maybe(lst1))
ret.extend(List.from_maybe(lst2))
if separator.value == 'comma':
use_comma = True
elif separator.value == 'space':
use_comma = False
elif separator.value == 'auto':
# The Sass docs are slightly misleading here, but the algorithm is: use
# the delimiter from the first list that has at least 2 items, or
# default to spaces.
if len(lst1) > 1:
use_comma = lst1.use_comma
elif len(lst2) > 1:
use_comma = lst2.use_comma
else:
use_comma = False
else:
raise ValueError("separator for join() must be comma, space, or auto")
return List(ret, use_comma=use_comma)
@ns.declare
def min_(*lst):
if len(lst) == 1 and isinstance(lst[0], (list, tuple, List)):
lst = lst[0]
return min(lst)
@ns.declare
def max_(*lst):
if len(lst) == 1 and isinstance(lst[0], (list, tuple, List)):
lst = lst[0]
return max(lst)
@ns.declare
def append(lst, val, separator=None):
ret = []
ret.extend(List.from_maybe(lst))
ret.append(val)
use_comma = __parse_separator(separator, default_from=lst)
return List(ret, use_comma=use_comma)
@ns.declare
def index(lst, val):
for i in xrange(len(lst)):
if lst.value[i] == val:
return Number(i + 1)
return Boolean(False)
@ns.declare
def zip_(*lists):
return List(
[List(zipped) for zipped in zip(*lists)],
use_comma=True)
# TODO need a way to use "list" as the arg name without shadowing the builtin
@ns.declare
def list_separator(list):
if list.use_comma:
return String.unquoted('comma')
else:
return String.unquoted('space')
# ------------------------------------------------------------------------------
# Map functions
@ns.declare
def map_get(map, key):
return map.to_dict().get(key, Null())
@ns.declare
def map_merge(*maps):
key_order = []
index = {}
for map in maps:
for key, value in map.to_pairs():
if key not in index:
key_order.append(key)
index[key] = value
pairs = [(key, index[key]) for key in key_order]
return Map(pairs, index=index)
@ns.declare
def map_keys(map):
return List(
[k for (k, v) in map.to_pairs()],
use_comma=True)
@ns.declare
def map_values(map):
return List(
[v for (k, v) in map.to_pairs()],
use_comma=True)
@ns.declare
def map_has_key(map, key):
return Boolean(key in map.to_dict())
# DEVIATIONS: these do not exist in ruby sass
@ns.declare
def map_get3(map, key, default):
return map.to_dict().get(key, default)
@ns.declare
def map_get_nested3(map, keys, default=Null()):
for key in keys:
map = map.to_dict().get(key, None)
if map is None:
return default
return map
@ns.declare
def map_merge_deep(*maps):
pairs = []
keys = set()
for map in maps:
for key, value in map.to_pairs():
keys.add(key)
for key in keys:
values = [map.to_dict().get(key, None) for map in maps]
values = [v for v in values if v is not None]
if all(isinstance(v, Map) for v in values):
pairs.append((key, map_merge_deep(*values)))
else:
pairs.append((key, values[-1]))
return Map(pairs)
# ------------------------------------------------------------------------------
# Meta functions
@ns.declare
def type_of(obj): # -> bool, number, string, color, list
return String(obj.sass_type_name)
@ns.declare
def unit(number): # -> px, em, cm, etc.
numer = '*'.join(sorted(number.unit_numer))
denom = '*'.join(sorted(number.unit_denom))
if denom:
ret = numer + '/' + denom
else:
ret = numer
return String.unquoted(ret)
@ns.declare
def unitless(value):
if not isinstance(value, Number):
raise TypeError("Expected number, got %r" % (value,))
return Boolean(value.is_unitless)
@ns.declare
def comparable(number1, number2):
left = number1.to_base_units()
right = number2.to_base_units()
return Boolean(
left.unit_numer == right.unit_numer
and left.unit_denom == right.unit_denom)
@ns.declare
def keywords(value):
"""Extract named arguments, as a map, from an argument list."""
expect_type(value, Arglist)
return value.extract_keywords()
# ------------------------------------------------------------------------------
# Miscellaneous
@ns.declare
def if_(condition, if_true, if_false=Null()):
return if_true if condition else if_false
|
[
"scss.types.Color.from_rgb",
"scss.source.SourceFile.read",
"scss.types.List.from_maybe_starargs",
"scss.types.expect_type",
"scss.namespace.Namespace",
"scss.types.Number",
"itertools.product",
"scss.types.Color",
"scss.types.Number.wrap_python_function",
"scss.types.String.unquoted",
"scss.types.Color.from_hsl",
"scss.types.List",
"scss.types.String",
"scss.types.Boolean",
"scss.types.Map",
"pathlib.PurePosixPath",
"scss.types.List.from_maybe",
"scss.types.Null"
] |
[((598, 609), 'scss.namespace.Namespace', 'Namespace', ([], {}), '()\n', (607, 609), False, 'from scss.namespace import Namespace\n'), ((3545, 3577), 'scss.types.expect_type', 'expect_type', (['n', 'Number'], {'unit': '"""%"""'}), "(n, Number, unit='%')\n", (3556, 3577), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((4006, 4032), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['r', 'g', 'b', 'a'], {}), '(r, g, b, a)\n', (4020, 4032), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((4254, 4298), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['*color.rgba[:3]'], {'alpha': 'alpha'}), '(*color.rgba[:3], alpha=alpha)\n', (4268, 4298), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((4956, 4971), 'scss.types.Number', 'Number', (['(50)', '"""%"""'], {}), "(50, '%')\n", (4962, 4971), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((7771, 7809), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['*channels'], {'alpha': 'alpha'}), '(*channels, alpha=alpha)\n', (7785, 7809), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((7980, 7995), 'scss.types.Number', 'Number', (['(r * 255)'], {}), '(r * 255)\n', (7986, 7995), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((8067, 8082), 'scss.types.Number', 'Number', (['(g * 255)'], {}), '(g * 255)\n', (8073, 8082), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((8153, 8168), 'scss.types.Number', 'Number', (['(b * 255)'], {}), '(b * 255)\n', (8159, 8168), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((8241, 8260), 'scss.types.Number', 'Number', (['color.alpha'], {}), '(color.alpha)\n', (8247, 8260), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((8326, 8348), 'scss.types.Number', 'Number', (['(h * 360)', '"""deg"""'], {}), "(h * 360, 'deg')\n", (8332, 8348), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((8421, 8441), 'scss.types.Number', 'Number', (['(s * 100)', '"""%"""'], {}), "(s * 100, '%')\n", (8427, 8441), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((8513, 8533), 'scss.types.Number', 'Number', (['(l * 100)', '"""%"""'], {}), "(l * 100, '%')\n", (8519, 8533), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((9119, 9157), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['r', 'g', 'b'], {'alpha': '(a + amt)'}), '(r, g, b, alpha=a + amt)\n', (9133, 9157), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((9427, 9465), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['r', 'g', 'b'], {'alpha': '(a - amt)'}), '(r, g, b, alpha=a - amt)\n', (9441, 9465), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((9925, 9967), 'scss.types.Color.from_hsl', 'Color.from_hsl', (['h', '(0)', 'l'], {'alpha': 'color.alpha'}), '(h, 0, l, alpha=color.alpha)\n', (9939, 9967), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((10419, 10475), 'scss.types.Color.from_hsl', 'Color.from_hsl', (['((h + delta) % 1)', 's', 'l'], {'alpha': 'color.alpha'}), '((h + delta) % 1, s, l, alpha=color.alpha)\n', (10433, 10475), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((10548, 10602), 'scss.types.Color.from_hsl', 'Color.from_hsl', (['((h + 0.5) % 1)', 's', 'l'], {'alpha': 'color.alpha'}), '((h + 0.5) % 1, s, l, alpha=color.alpha)\n', (10562, 10602), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((10817, 10861), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['(1 - r)', '(1 - g)', '(1 - b)'], {'alpha': 'a'}), '(1 - r, 1 - g, 1 - b, alpha=a)\n', (10831, 10861), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((11619, 11628), 'scss.types.Number', 'Number', (['(0)'], {}), '(0)\n', (11625, 11628), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((12304, 12332), 'scss.types.expect_type', 'expect_type', (['scaleby', 'Number'], {}), '(scaleby, Number)\n', (12315, 12332), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15533, 15560), 'scss.types.expect_type', 'expect_type', (['string', 'String'], {}), '(string, String)\n', (15544, 15560), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15863, 15890), 'scss.types.expect_type', 'expect_type', (['string', 'String'], {}), '(string, String)\n', (15874, 15890), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15895, 15922), 'scss.types.expect_type', 'expect_type', (['insert', 'String'], {}), '(insert, String)\n', (15906, 15922), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15927, 15964), 'scss.types.expect_type', 'expect_type', (['index', 'Number'], {'unit': 'None'}), '(index, Number, unit=None)\n', (15938, 15964), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16053, 16151), 'scss.types.String', 'String', (['(string.value[:py_index] + insert.value + string.value[py_index:])'], {'quotes': 'string.quotes'}), '(string.value[:py_index] + insert.value + string.value[py_index:],\n quotes=string.quotes)\n', (16059, 16151), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16217, 16244), 'scss.types.expect_type', 'expect_type', (['string', 'String'], {}), '(string, String)\n', (16228, 16244), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16249, 16279), 'scss.types.expect_type', 'expect_type', (['substring', 'String'], {}), '(substring, String)\n', (16260, 16279), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16446, 16473), 'scss.types.expect_type', 'expect_type', (['string', 'String'], {}), '(string, String)\n', (16457, 16473), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16478, 16518), 'scss.types.expect_type', 'expect_type', (['start_at', 'Number'], {'unit': 'None'}), '(start_at, Number, unit=None)\n', (16489, 16518), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16812, 16877), 'scss.types.String', 'String', (['string.value[py_start_at:py_end_at]'], {'quotes': 'string.quotes'}), '(string.value[py_start_at:py_end_at], quotes=string.quotes)\n', (16818, 16877), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16940, 16967), 'scss.types.expect_type', 'expect_type', (['string', 'String'], {}), '(string, String)\n', (16951, 16967), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17076, 17103), 'scss.types.expect_type', 'expect_type', (['string', 'String'], {}), '(string, String)\n', (17087, 17103), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17309, 17346), 'scss.types.expect_type', 'expect_type', (['value', 'Number'], {'unit': 'None'}), '(value, Number, unit=None)\n', (17320, 17346), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17416, 17448), 'scss.types.Number.wrap_python_function', 'Number.wrap_python_function', (['abs'], {}), '(abs)\n', (17443, 17448), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17478, 17512), 'scss.types.Number.wrap_python_function', 'Number.wrap_python_function', (['round'], {}), '(round)\n', (17505, 17512), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17541, 17579), 'scss.types.Number.wrap_python_function', 'Number.wrap_python_function', (['math.ceil'], {}), '(math.ceil)\n', (17568, 17579), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17609, 17648), 'scss.types.Number.wrap_python_function', 'Number.wrap_python_function', (['math.floor'], {}), '(math.floor)\n', (17636, 17648), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((18557, 18590), 'scss.types.expect_type', 'expect_type', (['n', 'Number'], {'unit': 'None'}), '(n, Number, unit=None)\n', (18568, 18590), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((18897, 18940), 'scss.types.expect_type', 'expect_type', (['n', '(String, Number)'], {'unit': 'None'}), '(n, (String, Number), unit=None)\n', (18908, 18940), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((19343, 19366), 'scss.types.String.unquoted', 'String.unquoted', (['"""auto"""'], {}), "('auto')\n", (19358, 19366), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((19373, 19403), 'scss.types.expect_type', 'expect_type', (['separator', 'String'], {}), '(separator, String)\n', (19384, 19403), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((20114, 20144), 'scss.types.List', 'List', (['ret'], {'use_comma': 'use_comma'}), '(ret, use_comma=use_comma)\n', (20118, 20144), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((20616, 20646), 'scss.types.List', 'List', (['ret'], {'use_comma': 'use_comma'}), '(ret, use_comma=use_comma)\n', (20620, 20646), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((20789, 20803), 'scss.types.Boolean', 'Boolean', (['(False)'], {}), '(False)\n', (20796, 20803), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((21636, 21659), 'scss.types.Map', 'Map', (['pairs'], {'index': 'index'}), '(pairs, index=index)\n', (21639, 21659), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((22166, 22172), 'scss.types.Null', 'Null', ([], {}), '()\n', (22170, 22172), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((22787, 22797), 'scss.types.Map', 'Map', (['pairs'], {}), '(pairs)\n', (22790, 22797), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((22980, 23006), 'scss.types.String', 'String', (['obj.sass_type_name'], {}), '(obj.sass_type_name)\n', (22986, 23006), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((23248, 23268), 'scss.types.String.unquoted', 'String.unquoted', (['ret'], {}), '(ret)\n', (23263, 23268), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((23416, 23442), 'scss.types.Boolean', 'Boolean', (['value.is_unitless'], {}), '(value.is_unitless)\n', (23423, 23442), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((23573, 23662), 'scss.types.Boolean', 'Boolean', (['(left.unit_numer == right.unit_numer and left.unit_denom == right.unit_denom)'], {}), '(left.unit_numer == right.unit_numer and left.unit_denom == right.\n unit_denom)\n', (23580, 23662), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((23782, 23809), 'scss.types.expect_type', 'expect_type', (['value', 'Arglist'], {}), '(value, Arglist)\n', (23793, 23809), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((23995, 24001), 'scss.types.Null', 'Null', ([], {}), '()\n', (23999, 24001), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((1628, 1647), 'pathlib.PurePosixPath', 'PurePosixPath', (['name'], {}), '(name)\n', (1641, 1647), False, 'from pathlib import PurePosixPath\n'), ((2341, 2372), 'itertools.product', 'product', (["('_', '')", 'search_exts'], {}), "(('_', ''), search_exts)\n", (2348, 2372), False, 'from itertools import product\n'), ((4102, 4113), 'scss.types.Number', 'Number', (['(1.0)'], {}), '(1.0)\n', (4108, 4113), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((4765, 4774), 'scss.types.Number', 'Number', (['(1)'], {}), '(1)\n', (4771, 4774), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((8579, 8591), 'scss.types.Color', 'Color', (['color'], {}), '(color)\n', (8584, 8591), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((11891, 11925), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['*channels'], {'alpha': 'a'}), '(*channels, alpha=a)\n', (11905, 11925), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((12185, 12211), 'scss.types.Color.from_hsl', 'Color.from_hsl', (['h', 's', 'l', 'a'], {}), '(h, s, l, a)\n', (12199, 12211), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((13300, 13345), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['*channels'], {'alpha': 'scaled_alpha'}), '(*channels, alpha=scaled_alpha)\n', (13314, 13345), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((13533, 13578), 'scss.types.Color.from_hsl', 'Color.from_hsl', (['*channels'], {'alpha': 'scaled_alpha'}), '(*channels, alpha=scaled_alpha)\n', (13547, 13578), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((14331, 14369), 'scss.types.Color.from_rgb', 'Color.from_rgb', (['*channels'], {'alpha': 'alpha'}), '(*channels, alpha=alpha)\n', (14345, 14369), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((14862, 14900), 'scss.types.Color.from_hsl', 'Color.from_hsl', (['*channels'], {'alpha': 'alpha'}), '(*channels, alpha=alpha)\n', (14876, 14900), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15184, 15214), 'scss.types.String', 'String', (['arg.value'], {'quotes': 'None'}), '(arg.value, quotes=None)\n', (15190, 15214), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15403, 15432), 'scss.types.String', 'String', (['arg.value'], {'quotes': '"""\\""""'}), '(arg.value, quotes=\'"\')\n', (15409, 15432), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((16648, 16686), 'scss.types.expect_type', 'expect_type', (['end_at', 'Number'], {'unit': 'None'}), '(end_at, Number, unit=None)\n', (16659, 16686), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17366, 17387), 'scss.types.Number', 'Number', (['(100)'], {'unit': '"""%"""'}), "(100, unit='%')\n", (17372, 17387), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((17873, 17899), 'scss.types.String.unquoted', 'String.unquoted', (['separator'], {}), '(separator)\n', (17888, 17899), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((19433, 19454), 'scss.types.List.from_maybe', 'List.from_maybe', (['lst1'], {}), '(lst1)\n', (19448, 19454), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((19471, 19492), 'scss.types.List.from_maybe', 'List.from_maybe', (['lst2'], {}), '(lst2)\n', (19486, 19492), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((20499, 20519), 'scss.types.List.from_maybe', 'List.from_maybe', (['lst'], {}), '(lst)\n', (20514, 20519), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((21083, 21107), 'scss.types.String.unquoted', 'String.unquoted', (['"""comma"""'], {}), "('comma')\n", (21098, 21107), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((21133, 21157), 'scss.types.String.unquoted', 'String.unquoted', (['"""space"""'], {}), "('space')\n", (21148, 21157), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((21327, 21333), 'scss.types.Null', 'Null', ([], {}), '()\n', (21331, 21333), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((1958, 1995), 'pathlib.PurePosixPath', 'PurePosixPath', (['*relative_to.parts[1:]'], {}), '(*relative_to.parts[1:])\n', (1971, 1995), False, 'from pathlib import PurePosixPath\n'), ((4356, 4365), 'scss.types.Number', 'Number', (['(1)'], {}), '(1)\n', (4362, 4365), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((4900, 4909), 'scss.types.Number', 'Number', (['(1)'], {}), '(1)\n', (4906, 4909), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((14444, 14479), 'scss.types.expect_type', 'expect_type', (['hue', 'Number'], {'unit': 'None'}), '(hue, Number, unit=None)\n', (14455, 14479), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15097, 15127), 'scss.types.List.from_maybe_starargs', 'List.from_maybe_starargs', (['args'], {}), '(args)\n', (15121, 15127), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((15316, 15346), 'scss.types.List.from_maybe_starargs', 'List.from_maybe_starargs', (['args'], {}), '(args)\n', (15340, 15346), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((20764, 20777), 'scss.types.Number', 'Number', (['(i + 1)'], {}), '(i + 1)\n', (20770, 20777), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((20862, 20874), 'scss.types.List', 'List', (['zipped'], {}), '(zipped)\n', (20866, 20874), False, 'from scss.types import Arglist, Boolean, Color, List, Null, Number, String, Map, expect_type\n'), ((3274, 3306), 'scss.source.SourceFile.read', 'SourceFile.read', (['origin', 'relpath'], {}), '(origin, relpath)\n', (3289, 3306), False, 'from scss.source import SourceFile\n')]
|
# Copyright 2021 Internet Corporation for Assigned Names and Numbers.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at https://mozilla.org/MPL/2.0/.
#
# Developed by Sinodun IT (sinodun.com)
#
# Aggregation client subnet statistics
import textwrap
import grafanalib.core as GCore
import grafanacommon as GCommon
def query_classification_chart(chart_title, yaxis_label, prefix_field, agginfo, nodesel):
return GCommon.BarChart(
title = chart_title,
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = True,
xaxis = GCommon.BarChartAxis(
title = 'Queries per second',
),
yaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_CATEGORY,
tickmargin = 110,
title = yaxis_label,
),
),
traces = [
GCommon.BarChartTrace(
name = 'AForA',
x = 'AForA',
y = 'AForAPrefix',
text = 'AForA',
),
GCommon.BarChartTrace(
name = 'AForRoot',
x = 'AForRoot',
y = 'AForRootPrefix',
text = 'AForRoot',
),
GCommon.BarChartTrace(
name = 'FunnyQueryClass',
x = 'FunnyQueryClass',
y = 'FunnyQueryClassPrefix',
text = 'FunnyQueryClass',
),
GCommon.BarChartTrace(
name = 'FunnyQueryType',
x = 'FunnyQueryType',
y = 'FunnyQueryTypePrefix',
text = 'FunnyQueryType',
),
GCommon.BarChartTrace(
name = 'Localhost',
x = 'Localhost',
y = 'LocalhostPrefix',
text = 'Localhost',
),
GCommon.BarChartTrace(
name = 'NonAuthTld',
x = 'NonAuthTld',
y = 'NonAuthTldPrefix',
text = 'NonAuthTld',
),
GCommon.BarChartTrace(
name = 'Ok',
x = 'Ok',
y = 'OkPrefix',
text = 'Ok',
),
GCommon.BarChartTrace(
name = 'RFC1918Ptr',
x = 'RFC1918Ptr',
y = 'RFC1918PtrPrefix',
text = 'RFC1918Ptr',
),
GCommon.BarChartTrace(
name = 'RootServersNet',
x = 'RootServersNet',
y = 'RootServersNetPrefix',
text = 'RootServersNet',
),
GCommon.BarChartTrace(
name = 'SrcPortZero',
x = 'SrcPortZero',
y = 'SrcPortZeroPrefix',
text = 'SrcPortZero',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS AForAPrefix,
AForA,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(AForACount)/($to - $from) AS AForA
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'A'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS AForRootPrefix,
AForRoot,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(AForRootCount)/($to - $from) AS AForRoot
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'B'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS FunnyQueryClassPrefix,
FunnyQueryClass,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(FunnyQueryClassCount)/($to - $from) AS FunnyQueryClass
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'C'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS FunnyQueryTypePrefix,
FunnyQueryType,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(FunnyQueryTypeCount)/($to - $from) AS FunnyQueryType
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count DESC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'D'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS LocalhostPrefix,
Localhost,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(LocalhostCount)/($to - $from) AS Localhost
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'E'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS NonAuthTldPrefix,
NonAuthTld,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(NonAuthTldCount)/($to - $from) AS NonAuthTld
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'F'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS OkPrefix,
Ok,
TotalCount
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS TotalCount,
sum(Count -
(AForACount +
AForRootCount +
FunnyQueryClassCount +
FunnyQueryTypeCount +
LocalhostCount +
NonAuthTldCount +
RFC1918PtrCount +
RootServersNetCount +
SrcPortZeroCount))/($to - $from) AS Ok
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY TotalCount DESC
LIMIT 40
)
ORDER BY TotalCount ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'G'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS RFC1918PtrPrefix,
RFC1918Ptr,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(RFC1918PtrCount)/($to - $from) AS RFC1918Ptr
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'H'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS RootServersNetPrefix,
RootServersNet,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(RootServersNetCount)/($to - $from) AS RootServersNet
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'I'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Prefix AS SrcPortZeroPrefix,
SrcPortZero,
Count
FROM
(
SELECT
{prefix_field} AS Prefix,
sum(Count) AS Count,
sum(SrcPortZeroCount)/($to - $from) AS SrcPortZero
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY Count DESC
LIMIT 40
)
ORDER BY Count ASC
""".format(
prefix_field=prefix_field,
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'J'
),
],
)
def dash(myuid, agginfo, nodesel, **kwargs):
return GCommon.Dashboard(
title = "Client subnet statistics detail",
tags = [
agginfo['graph_tag']
],
uid = myuid,
rows = [
GCore.Row(
height = GCore.Pixels(50),
panels = [
GCommon.HTMLPanel('grafana/common/dashboards/aggregated/client_subnet_statistics_header.html', transparent=True),
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
GCommon.BarChart(
title = 'Clients by fixed subnet',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
xaxis = GCommon.BarChartAxis(
title = 'Queries per second',
),
yaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_CATEGORY,
tickmargin = 110,
title = 'Fixed Subnet',
),
),
traces = [
GCommon.BarChartTrace(
name = 'Subnet',
color = '#A352CC',
x = 'QPS',
y = 'Subnet',
text = 'QPS',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'BusiestClientSubnets' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Subnet,
QPS
FROM
(
SELECT
Prefix AS Subnet,
sum(Count)/($to - $from) AS QPS
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
ORDER BY QPS DESC
LIMIT 30
)
ORDER BY QPS ASC""".format(
nodesel=nodesel)),
refId = 'A'
)
],
),
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
GCommon.BarChart(
title = 'RCODE by clients by ASN',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = True,
xaxis = GCommon.BarChartAxis(
title = 'Queries per second',
),
yaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_CATEGORY,
tickmargin = 110,
title = 'ASN',
),
),
autotrace = True,
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'BusiestClientSubnets' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
notEmpty(rcodeText) ? rcodeText : concat('RCODE', toString(rcode)) AS DisplayRcode,
sum(rcodeCount) / ($to - $from) AS rcodeCount,
ClientASN
FROM
(
SELECT
ClientASN,
rcode,
sum(rcodeCount) AS rcodeCount,
any(sCount) AS sCount
FROM
(
SELECT
ClientASN,
sum(RcodeMap.Count) AS sCount
FROM $table
ARRAY JOIN RcodeMap
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY
ClientASN
ORDER BY sCount DESC, ClientASN ASC
LIMIT 30
) AS ClientASNCounts
ALL LEFT JOIN
(
SELECT
ClientASN,
RcodeMap.ResponseRcode AS rcode,
sum(RcodeMap.Count) AS rcodeCount
FROM $table
ARRAY JOIN RcodeMap
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY
ClientASN,
rcode
UNION ALL
(
SELECT
ClientASN,
rcode,
CAST(0 AS UInt64) AS rcodeCount
FROM
(
SELECT
0 AS Zero,
ClientASN
FROM $table
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY ClientASN
) AS ZeroClientASN
ALL LEFT JOIN
(
SELECT
0 AS Zero,
RcodeMap.ResponseRcode AS rcode
FROM $table
ARRAY JOIN RcodeMap
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY rcode
) AS ZeroRcode USING Zero
)
) AS ClientASNRcodeCounts USING ClientASN
GROUP BY
ClientASN,
rcode
) AS ClientASNRcodeCountsTotal
ALL INNER JOIN
(
SELECT
value_name AS rcodeText,
toUInt16(value) AS rcode
FROM {nodeinfo_database}.iana_text
WHERE registry_name = 'RCODE'
) AS ClientASNNameCountsTotal USING rcode
GROUP BY
ClientASN,
rcode,
rcodeText
ORDER BY
sum(sCount) ASC,
rcodeText ASC,
ClientASN DESC""".format(
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'A'
)
],
),
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
GCommon.BarChart(
title = 'RCODE by clients by AS subnet',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = True,
xaxis = GCommon.BarChartAxis(
title = 'Queries per second',
),
yaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_CATEGORY,
tickmargin = 110,
title = 'AS Subnet',
),
),
autotrace = True,
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'BGPPrefix' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
notEmpty(rcodeText) ? rcodeText : concat('RCODE', toString(rcode)) AS DisplayRcode,
sum(rcodeCount) / ($to - $from) AS rcodeCount,
Prefix
FROM
(
SELECT
Prefix,
rcode,
sum(rcodeCount) AS rcodeCount,
any(sCount) AS sCount
FROM
(
SELECT
Prefix,
sum(RcodeMap.Count) AS sCount
FROM $table
ARRAY JOIN RcodeMap
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY
Prefix
ORDER BY sCount DESC, Prefix ASC
LIMIT 30
) AS PrefixCount
ALL LEFT JOIN
(
SELECT
Prefix,
RcodeMap.ResponseRcode AS rcode,
sum(RcodeMap.Count) AS rcodeCount
FROM $table
ARRAY JOIN RcodeMap
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY
Prefix,
rcode
UNION ALL
(
SELECT
Prefix,
rcode,
CAST(0 AS UInt64) AS rcodeCount
FROM
(
SELECT
0 AS Zero,
Prefix
FROM $table
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
) AS ZeroPrefox
ALL LEFT JOIN
(
SELECT
0 AS Zero,
RcodeMap.ResponseRcode AS rcode
FROM $table
ARRAY JOIN RcodeMap
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY rcode
) AS ZeroRcode USING Zero
)
) AS PrefixRcodeCounts USING Prefix
GROUP BY
Prefix,
rcode
) AS PrefixRcodeCountsTotal
ALL INNER JOIN
(
SELECT
value_name AS rcodeText,
toUInt16(value) AS rcode
FROM {nodeinfo_database}.iana_text
WHERE registry_name = 'RCODE'
) AS PrefixNameCountsTotal USING rcode
GROUP BY
Prefix,
rcode,
rcodeText
ORDER BY
sum(sCount) ASC,
rcodeText ASC,
Prefix DESC""".format(
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'A'
)
],
),
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
GCommon.BarChart(
title = 'RCODE by clients by fixed subnet',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
barmode = GCommon.BAR_CHART_LAYOUT_MODE_STACK,
showlegend = True,
xaxis = GCommon.BarChartAxis(
title = 'Queries per second',
),
yaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_CATEGORY,
tickmargin = 110,
title = 'Fixed Subnet',
),
),
autotrace = True,
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'BusiestClientSubnets' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
notEmpty(rcodeText) ? rcodeText : concat('RCODE', toString(rcode)) AS DisplayRcode,
sum(rcodeCount) / ($to - $from) AS rcodeCount,
Prefix
FROM
(
SELECT
Prefix,
rcode,
sum(rcodeCount) AS rcodeCount,
any(sCount) AS sCount
FROM
(
SELECT
Prefix,
sum(RcodeMap.Count) AS sCount
FROM $table
ARRAY JOIN RcodeMap
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY
Prefix
ORDER BY sCount DESC, Prefix ASC
LIMIT 30
) AS PrefixCount
ALL LEFT JOIN
(
SELECT
Prefix,
RcodeMap.ResponseRcode AS rcode,
sum(RcodeMap.Count) AS rcodeCount
FROM $table
ARRAY JOIN RcodeMap
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY
Prefix,
rcode
UNION ALL
(
SELECT
Prefix,
rcode,
CAST(0 AS UInt64) AS rcodeCount
FROM
(
SELECT
0 AS Zero,
Prefix
FROM $table
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY Prefix
) AS ZeroPrefix
ALL LEFT JOIN
(
SELECT
0 AS Zero,
RcodeMap.ResponseRcode AS rcode
FROM $table
ARRAY JOIN RcodeMap
WHERE
$timeFilter
AND NodeID IN {nodesel}
GROUP BY rcode
) AS ZeroRcode USING Zero
)
) AS PrefixRcodeCounts USING Prefix
GROUP BY
Prefix,
rcode
) AS PrefixRcodeCountsTotal
ALL INNER JOIN
(
SELECT
value_name AS rcodeText,
toUInt16(value) AS rcode
FROM {nodeinfo_database}.iana_text
WHERE registry_name = 'RCODE'
) AS PrefixNameCountsTotal USING rcode
GROUP BY
Prefix,
rcode,
rcodeText
ORDER BY
sum(sCount) ASC,
rcodeText ASC,
Prefix DESC""".format(
nodesel=nodesel,
nodeinfo_database=agginfo['nodeinfo_database'])),
refId = 'A'
)
],
),
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
GCommon.BarChart(
title = 'Root abusers by fixed subnet',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
xaxis = GCommon.BarChartAxis(
title = 'Queries per second',
),
yaxis = GCommon.BarChartAxis(
autotick = False,
axtype = GCommon.BAR_CHART_AXIS_TYPE_CATEGORY,
tickmargin = 110,
title = 'Fixed Subnet',
),
),
traces = [
GCommon.BarChartTrace(
name = 'Subnet',
color = '#A352CC',
x = 'QPS',
y = 'Subnet',
text = 'QPS',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'QueryClassifications' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
Subnet,
QPS
FROM
(
SELECT
FixedPrefix AS Subnet,
sum(RootAbuseCount)/($to - $from) AS QPS
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}
GROUP BY FixedPrefix
ORDER BY QPS DESC
LIMIT 40
)
ORDER BY QPS ASC""".format(
nodesel=nodesel)),
refId = 'A'
)
],
),
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
query_classification_chart(
'Query classification by busiest fixed subnet',
'Fixed Subnet',
'FixedPrefix',
agginfo,
nodesel)
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
query_classification_chart(
'Query classification by busiest ASN',
'ASN',
'ClientASN',
agginfo,
nodesel)
],
),
GCore.Row(
height = GCore.Pixels(GCore.DEFAULT_ROW_HEIGHT.num * 2),
panels = [
query_classification_chart(
'Query classification by busiest AS subnet',
'AS subnet',
'ASPrefix',
agginfo,
nodesel)
],
),
]
)
|
[
"grafanacommon.BarChartTrace",
"grafanacommon.BarChartAxis",
"grafanacommon.HTMLPanel",
"grafanalib.core.Pixels"
] |
[((1152, 1229), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""AForA"""', 'x': '"""AForA"""', 'y': '"""AForAPrefix"""', 'text': '"""AForA"""'}), "(name='AForA', x='AForA', y='AForAPrefix', text='AForA')\n", (1173, 1229), True, 'import grafanacommon as GCommon\n'), ((1330, 1423), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""AForRoot"""', 'x': '"""AForRoot"""', 'y': '"""AForRootPrefix"""', 'text': '"""AForRoot"""'}), "(name='AForRoot', x='AForRoot', y='AForRootPrefix',\n text='AForRoot')\n", (1351, 1423), True, 'import grafanacommon as GCommon\n'), ((1520, 1642), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""FunnyQueryClass"""', 'x': '"""FunnyQueryClass"""', 'y': '"""FunnyQueryClassPrefix"""', 'text': '"""FunnyQueryClass"""'}), "(name='FunnyQueryClass', x='FunnyQueryClass', y=\n 'FunnyQueryClassPrefix', text='FunnyQueryClass')\n", (1541, 1642), True, 'import grafanacommon as GCommon\n'), ((1738, 1856), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""FunnyQueryType"""', 'x': '"""FunnyQueryType"""', 'y': '"""FunnyQueryTypePrefix"""', 'text': '"""FunnyQueryType"""'}), "(name='FunnyQueryType', x='FunnyQueryType', y=\n 'FunnyQueryTypePrefix', text='FunnyQueryType')\n", (1759, 1856), True, 'import grafanacommon as GCommon\n'), ((1952, 2049), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""Localhost"""', 'x': '"""Localhost"""', 'y': '"""LocalhostPrefix"""', 'text': '"""Localhost"""'}), "(name='Localhost', x='Localhost', y='LocalhostPrefix',\n text='Localhost')\n", (1973, 2049), True, 'import grafanacommon as GCommon\n'), ((2146, 2248), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""NonAuthTld"""', 'x': '"""NonAuthTld"""', 'y': '"""NonAuthTldPrefix"""', 'text': '"""NonAuthTld"""'}), "(name='NonAuthTld', x='NonAuthTld', y=\n 'NonAuthTldPrefix', text='NonAuthTld')\n", (2167, 2248), True, 'import grafanacommon as GCommon\n'), ((2344, 2409), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""Ok"""', 'x': '"""Ok"""', 'y': '"""OkPrefix"""', 'text': '"""Ok"""'}), "(name='Ok', x='Ok', y='OkPrefix', text='Ok')\n", (2365, 2409), True, 'import grafanacommon as GCommon\n'), ((2510, 2612), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""RFC1918Ptr"""', 'x': '"""RFC1918Ptr"""', 'y': '"""RFC1918PtrPrefix"""', 'text': '"""RFC1918Ptr"""'}), "(name='RFC1918Ptr', x='RFC1918Ptr', y=\n 'RFC1918PtrPrefix', text='RFC1918Ptr')\n", (2531, 2612), True, 'import grafanacommon as GCommon\n'), ((2708, 2826), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""RootServersNet"""', 'x': '"""RootServersNet"""', 'y': '"""RootServersNetPrefix"""', 'text': '"""RootServersNet"""'}), "(name='RootServersNet', x='RootServersNet', y=\n 'RootServersNetPrefix', text='RootServersNet')\n", (2729, 2826), True, 'import grafanacommon as GCommon\n'), ((2922, 3028), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""SrcPortZero"""', 'x': '"""SrcPortZero"""', 'y': '"""SrcPortZeroPrefix"""', 'text': '"""SrcPortZero"""'}), "(name='SrcPortZero', x='SrcPortZero', y=\n 'SrcPortZeroPrefix', text='SrcPortZero')\n", (2943, 3028), True, 'import grafanacommon as GCommon\n'), ((802, 850), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'title': '"""Queries per second"""'}), "(title='Queries per second')\n", (822, 850), True, 'import grafanacommon as GCommon\n'), ((905, 1026), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'autotick': '(False)', 'axtype': 'GCommon.BAR_CHART_AXIS_TYPE_CATEGORY', 'tickmargin': '(110)', 'title': 'yaxis_label'}), '(autotick=False, axtype=GCommon.\n BAR_CHART_AXIS_TYPE_CATEGORY, tickmargin=110, title=yaxis_label)\n', (925, 1026), True, 'import grafanacommon as GCommon\n'), ((15236, 15252), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(50)'], {}), '(50)\n', (15248, 15252), True, 'import grafanalib.core as GCore\n'), ((15501, 15547), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (15513, 15547), True, 'import grafanalib.core as GCore\n'), ((18132, 18178), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (18144, 18178), True, 'import grafanalib.core as GCore\n'), ((25122, 25168), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (25134, 25168), True, 'import grafanalib.core as GCore\n'), ((32055, 32101), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (32067, 32101), True, 'import grafanalib.core as GCore\n'), ((39005, 39051), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (39017, 39051), True, 'import grafanalib.core as GCore\n'), ((41660, 41706), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (41672, 41706), True, 'import grafanalib.core as GCore\n'), ((42082, 42128), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (42094, 42128), True, 'import grafanalib.core as GCore\n'), ((42484, 42530), 'grafanalib.core.Pixels', 'GCore.Pixels', (['(GCore.DEFAULT_ROW_HEIGHT.num * 2)'], {}), '(GCore.DEFAULT_ROW_HEIGHT.num * 2)\n', (42496, 42530), True, 'import grafanalib.core as GCore\n'), ((15301, 15423), 'grafanacommon.HTMLPanel', 'GCommon.HTMLPanel', (['"""grafana/common/dashboards/aggregated/client_subnet_statistics_header.html"""'], {'transparent': '(True)'}), "(\n 'grafana/common/dashboards/aggregated/client_subnet_statistics_header.html'\n , transparent=True)\n", (15318, 15423), True, 'import grafanacommon as GCommon\n'), ((16375, 16465), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""Subnet"""', 'color': '"""#A352CC"""', 'x': '"""QPS"""', 'y': '"""Subnet"""', 'text': '"""QPS"""'}), "(name='Subnet', color='#A352CC', x='QPS', y='Subnet',\n text='QPS')\n", (16396, 16465), True, 'import grafanacommon as GCommon\n'), ((39884, 39974), 'grafanacommon.BarChartTrace', 'GCommon.BarChartTrace', ([], {'name': '"""Subnet"""', 'color': '"""#A352CC"""', 'x': '"""QPS"""', 'y': '"""Subnet"""', 'text': '"""QPS"""'}), "(name='Subnet', color='#A352CC', x='QPS', y='Subnet',\n text='QPS')\n", (39905, 39974), True, 'import grafanacommon as GCommon\n'), ((15846, 15894), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'title': '"""Queries per second"""'}), "(title='Queries per second')\n", (15866, 15894), True, 'import grafanacommon as GCommon\n'), ((15997, 16121), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'autotick': '(False)', 'axtype': 'GCommon.BAR_CHART_AXIS_TYPE_CATEGORY', 'tickmargin': '(110)', 'title': '"""Fixed Subnet"""'}), "(autotick=False, axtype=GCommon.\n BAR_CHART_AXIS_TYPE_CATEGORY, tickmargin=110, title='Fixed Subnet')\n", (16017, 16121), True, 'import grafanacommon as GCommon\n'), ((18599, 18647), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'title': '"""Queries per second"""'}), "(title='Queries per second')\n", (18619, 18647), True, 'import grafanacommon as GCommon\n'), ((18750, 18865), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'autotick': '(False)', 'axtype': 'GCommon.BAR_CHART_AXIS_TYPE_CATEGORY', 'tickmargin': '(110)', 'title': '"""ASN"""'}), "(autotick=False, axtype=GCommon.\n BAR_CHART_AXIS_TYPE_CATEGORY, tickmargin=110, title='ASN')\n", (18770, 18865), True, 'import grafanacommon as GCommon\n'), ((25595, 25643), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'title': '"""Queries per second"""'}), "(title='Queries per second')\n", (25615, 25643), True, 'import grafanacommon as GCommon\n'), ((25746, 25867), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'autotick': '(False)', 'axtype': 'GCommon.BAR_CHART_AXIS_TYPE_CATEGORY', 'tickmargin': '(110)', 'title': '"""AS Subnet"""'}), "(autotick=False, axtype=GCommon.\n BAR_CHART_AXIS_TYPE_CATEGORY, tickmargin=110, title='AS Subnet')\n", (25766, 25867), True, 'import grafanacommon as GCommon\n'), ((32531, 32579), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'title': '"""Queries per second"""'}), "(title='Queries per second')\n", (32551, 32579), True, 'import grafanacommon as GCommon\n'), ((32682, 32806), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'autotick': '(False)', 'axtype': 'GCommon.BAR_CHART_AXIS_TYPE_CATEGORY', 'tickmargin': '(110)', 'title': '"""Fixed Subnet"""'}), "(autotick=False, axtype=GCommon.\n BAR_CHART_AXIS_TYPE_CATEGORY, tickmargin=110, title='Fixed Subnet')\n", (32702, 32806), True, 'import grafanacommon as GCommon\n'), ((39355, 39403), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'title': '"""Queries per second"""'}), "(title='Queries per second')\n", (39375, 39403), True, 'import grafanacommon as GCommon\n'), ((39506, 39630), 'grafanacommon.BarChartAxis', 'GCommon.BarChartAxis', ([], {'autotick': '(False)', 'axtype': 'GCommon.BAR_CHART_AXIS_TYPE_CATEGORY', 'tickmargin': '(110)', 'title': '"""Fixed Subnet"""'}), "(autotick=False, axtype=GCommon.\n BAR_CHART_AXIS_TYPE_CATEGORY, tickmargin=110, title='Fixed Subnet')\n", (39526, 39630), True, 'import grafanacommon as GCommon\n')]
|
import re
class lexical(object):
'''Lexical Features:
Top Level domain (str)
Number of dots in hostname (int)
Average token length of hostname (float)
Max token length of hostname (int)
Average token length of path (float)
Max token length of path (int)
'''
def __init__(self):
pass
def lexical(self,hostname,path):
dot_num=self.dots(hostname)
arr_host=self.split(hostname)
arr_path=self.split(path)
avg_host=self.avg(arr_host)
max_host=self.max(arr_host)
avg_path=self.avg(arr_path)
max_path=self.max(arr_path)
return dot_num,avg_host,max_host,avg_path,max_path
def dots(self,hostname):
# returns number of dots
return hostname.count('.')
def split(self,string):
# returns a list split by ‘/’, ‘?’, ‘.’, ‘=’, ‘-’ and ‘_’
return re.split('/|\?|\.|=|-|_', string)
def avg(self,arr):
# returns average token length
return sum(len(token) for token in arr)/len(arr)
def max(self,arr):
# returns max token length
return max(len(token) for token in arr)
|
[
"re.split"
] |
[((876, 911), 're.split', 're.split', (['"""/|\\\\?|\\\\.|=|-|_"""', 'string'], {}), "('/|\\\\?|\\\\.|=|-|_', string)\n", (884, 911), False, 'import re\n')]
|
from setuptools import setup, find_packages
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='digicert-express',
version='1.1dev2',
description='Express Install for DigiCert, Inc.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Topic :: Security',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
url='https://github.com/digicert/digicert_express',
author='DigiCert, Inc.',
author_email='<EMAIL>',
license='MIT',
zip_safe=False,
packages=find_packages(exclude=['tests.*', '*.tests.*', '*.tests', 'tests', 'scripts']),
include_package_data=True,
install_requires=[
'python-augeas',
'requests>=2.8.1',
'ndg-httpsclient',
'pyasn1',
'pyOpenSSL' # prefer OS install but we can try here, too
],
)
|
[
"setuptools.find_packages"
] |
[((727, 805), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests.*', '*.tests.*', '*.tests', 'tests', 'scripts']"}), "(exclude=['tests.*', '*.tests.*', '*.tests', 'tests', 'scripts'])\n", (740, 805), False, 'from setuptools import setup, find_packages\n')]
|
import argparse
import json
import numpy as np
import os
import torch
import data_
import models
import utils
from matplotlib import cm, pyplot as plt
from tensorboardX import SummaryWriter
from torch import optim
from torch.utils import data
from tqdm import tqdm
from utils import io
parser = argparse.ArgumentParser()
# CUDA
parser.add_argument('--use_gpu', type=bool, default=True, help='Whether to use GPU.')
# data
parser.add_argument('--dataset_name', type=str, default='spirals',
help='Name of dataset to use.')
parser.add_argument('--n_data_points', default=int(1e6),
help='Number of unique data points in training set.')
parser.add_argument('--batch_size', type=int, default=256,
help='Size of batch used for training.')
parser.add_argument('--num_workers', type=int, default=0,
help='Number of workers used in data loaders.')
# MADE
parser.add_argument('--n_residual_blocks_made', default=4,
help='Number of residual blocks in MADE.')
parser.add_argument('--hidden_dim_made', default=256,
help='Dimensionality of hidden layers in MADE.')
parser.add_argument('--activation_made', default='relu',
help='Activation function for MADE.')
parser.add_argument('--use_batch_norm_made', default=False,
help='Whether to use batch norm in MADE.')
parser.add_argument('--dropout_probability_made', default=None,
help='Dropout probability for MADE.')
# energy net
parser.add_argument('--context_dim', default=64,
help='Dimensionality of context vector.')
parser.add_argument('--n_residual_blocks_energy_net', default=4,
help='Number of residual blocks in energy net.')
parser.add_argument('--hidden_dim_energy_net', default=128,
help='Dimensionality of hidden layers in energy net.')
parser.add_argument('--energy_upper_bound', default=0,
help='Max value for output of energy net.')
parser.add_argument('--activation_energy_net', default='relu',
help='Activation function for energy net.')
parser.add_argument('--use_batch_norm_energy_net', default=False,
help='Whether to use batch norm in energy net.')
parser.add_argument('--dropout_probability_energy_net', default=None,
help='Dropout probability for energy net.')
parser.add_argument('--scale_activation', default='softplus',
help='Activation to use for scales in proposal mixture components.')
parser.add_argument('--apply_context_activation', default=False,
help='Whether to apply activation to context vector.')
# proposal
parser.add_argument('--n_mixture_components', default=10,
help='Number of proposal mixture components (per dimension).')
parser.add_argument('--proposal_component', default='gaussian',
help='Type of location-scale family distribution '
'to use in proposal mixture.')
parser.add_argument('--n_proposal_samples_per_input', default=20,
help='Number of proposal samples used to estimate '
'normalizing constant during training.')
parser.add_argument('--n_proposal_samples_per_input_validation', default=100,
help='Number of proposal samples used to estimate '
'normalizing constant during validation.')
parser.add_argument('--mixture_component_min_scale', default=1e-3,
help='Minimum scale for proposal mixture components.')
# optimization
parser.add_argument('--learning_rate', default=5e-4,
help='Learning rate for Adam.')
parser.add_argument('--n_total_steps', default=int(4e5),
help='Number of total training steps.')
parser.add_argument('--alpha_warm_up_steps', default=5000,
help='Number of warm-up steps for AEM density.')
parser.add_argument('--hard_alpha_warm_up', default=True,
help='Whether to use a hard warm up for alpha')
# logging and checkpoints
parser.add_argument('--monitor_interval', default=100,
help='Interval in steps at which to report training stats.')
parser.add_argument('--visualize_interval', default=10000,
help='Interval in steps at which to report training stats.')
parser.add_argument('--save_interval', default=10000,
help='Interval in steps at which to save model.')
# reproducibility
parser.add_argument('--seed', default=1638128,
help='Random seed for PyTorch and NumPy.')
args = parser.parse_args()
torch.manual_seed(args.seed)
np.random.seed(args.seed)
if args.use_gpu and torch.cuda.is_available():
device = torch.device('cuda')
torch.set_default_tensor_type('torch.cuda.FloatTensor')
else:
device = torch.device('cpu')
# Generate data
train_dataset = data_.load_plane_dataset(args.dataset_name, args.n_data_points)
train_loader = data_.InfiniteLoader(
dataset=train_dataset,
batch_size=args.batch_size,
shuffle=True,
drop_last=True,
num_epochs=None
)
# Generate test grid data
n_points_per_axis = 512
bounds = np.array([
[-4, 4],
[-4, 4]
])
grid_dataset = data_.TestGridDataset(n_points_per_axis=n_points_per_axis, bounds=bounds)
grid_loader = data.DataLoader(
dataset=grid_dataset,
batch_size=1000,
drop_last=False
)
# various dimensions for autoregressive and energy nets
dim = 2 # D
output_dim_multiplier = args.context_dim + 3 * args.n_mixture_components # K + 3M
# Create MADE
made = models.ResidualMADE(
input_dim=dim,
n_residual_blocks=args.n_residual_blocks_made,
hidden_dim=args.hidden_dim_made,
output_dim_multiplier=output_dim_multiplier,
conditional=False,
activation=utils.parse_activation(args.activation_made),
use_batch_norm=args.use_batch_norm_made,
dropout_probability=args.dropout_probability_made
).to(device)
# create energy net
energy_net = models.ResidualEnergyNet(
input_dim=(args.context_dim + 1),
n_residual_blocks=args.n_residual_blocks_energy_net,
hidden_dim=args.hidden_dim_energy_net,
energy_upper_bound=args.energy_upper_bound,
activation=utils.parse_activation(args.activation_energy_net),
use_batch_norm=args.use_batch_norm_energy_net,
dropout_probability=args.dropout_probability_energy_net
).to(device)
# create AEM
aem = models.AEM(
autoregressive_net=made,
energy_net=energy_net,
context_dim=args.context_dim,
n_proposal_mixture_components=args.n_mixture_components,
proposal_component_family=args.proposal_component,
n_proposal_samples_per_input=args.n_proposal_samples_per_input,
mixture_component_min_scale=args.mixture_component_min_scale,
apply_context_activation=args.apply_context_activation
).to(device)
# make optimizer
parameters = list(made.parameters()) + list(energy_net.parameters())
optimizer = optim.Adam(parameters, lr=args.learning_rate)
scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, args.n_total_steps)
# create summary writer and write to log directory
timestamp = io.get_timestamp()
log_dir = os.path.join(io.get_log_root(), args.dataset_name, timestamp)
writer = SummaryWriter(log_dir=log_dir)
filename = os.path.join(log_dir, 'config.json')
with open(filename, 'w') as file:
json.dump(vars(args), file)
# Training loop
tbar = tqdm(range(args.n_total_steps))
alpha = 0
for step in tbar:
aem.train()
scheduler.step(step)
optimizer.zero_grad()
# training step
batch = next(train_loader).to(device)
log_density, log_proposal_density, _, log_normalizer = aem(batch)
mean_log_density = torch.mean(log_density)
mean_log_proposal_density = torch.mean(log_proposal_density)
mean_log_normalizer = torch.mean(log_normalizer)
if args.alpha_warm_up_steps is not None:
if args.hard_alpha_warm_up:
alpha = float(step > args.alpha_warm_up_steps)
else:
alpha = torch.Tensor([min(step / args.alpha_warm_up_steps, 1)])
loss = - (alpha * mean_log_density + mean_log_proposal_density)
else:
loss = - (mean_log_density + mean_log_proposal_density)
loss.backward()
optimizer.step()
if (step + 1) % args.monitor_interval == 0:
s = 'Loss: {:.4f}, log p: {:.4f}, log q: {:.4f}'.format(
loss.item(),
mean_log_density.item(),
mean_log_proposal_density.item()
)
tbar.set_description(s)
# write summaries
summaries = {
'loss': loss.detach(),
'log-prob-aem': mean_log_density.detach(),
'log-prob-proposal': mean_log_proposal_density.detach(),
'log-normalizer': mean_log_normalizer.detach(),
'learning-rate': torch.Tensor(scheduler.get_lr()),
}
for summary, value in summaries.items():
writer.add_scalar(tag=summary, scalar_value=value, global_step=step)
if (step + 1) % args.visualize_interval == 0:
# Plotting
aem.eval()
aem.set_n_proposal_samples_per_input_validation(
args.n_proposal_samples_per_input_validation)
log_density_np = []
log_proposal_density_np = []
for batch in grid_loader:
batch = batch.to(device)
log_density, log_proposal_density, unnormalized_log_density, log_normalizer = aem(
batch)
log_density_np = np.concatenate((
log_density_np, utils.tensor2numpy(log_density)
))
log_proposal_density_np = np.concatenate((
log_proposal_density_np, utils.tensor2numpy(log_proposal_density)
))
fig, axs = plt.subplots(1, 3, figsize=(7.5, 2.5))
axs[0].hist2d(train_dataset.data[:, 0], train_dataset.data[:, 1],
range=bounds, bins=512, cmap=cm.viridis, rasterized=False)
axs[0].set_xticks([])
axs[0].set_yticks([])
axs[1].pcolormesh(grid_dataset.X, grid_dataset.Y,
np.exp(log_proposal_density_np).reshape(grid_dataset.X.shape))
axs[1].set_xlim(bounds[0])
axs[1].set_ylim(bounds[1])
axs[1].set_xticks([])
axs[1].set_yticks([])
axs[2].pcolormesh(grid_dataset.X, grid_dataset.Y,
np.exp(log_density_np).reshape(grid_dataset.X.shape))
axs[2].set_xlim(bounds[0])
axs[2].set_ylim(bounds[1])
axs[2].set_xticks([])
axs[2].set_yticks([])
plt.tight_layout()
path = os.path.join(io.get_output_root(), 'pytorch', '{}.png'.format(args.dataset_name))
if not os.path.exists(path):
os.makedirs(io.get_output_root())
plt.savefig(path, dpi=300)
writer.add_figure(tag='test-grid', figure=fig, global_step=step)
plt.close()
if (step + 1) % args.save_interval == 0:
path = os.path.join(io.get_checkpoint_root(), 'pytorch', '{}.t'.format(args.dataset_name))
if not os.path.exists(path):
os.makedirs(io.get_checkpoint_root())
torch.save(aem.state_dict(), path)
path = os.path.join(io.get_checkpoint_root(),
'pytorch', '{}-{}.t'.format(args.dataset_name, timestamp))
torch.save(aem.state_dict(), path)
|
[
"numpy.array",
"utils.io.get_checkpoint_root",
"torch.cuda.is_available",
"data_.TestGridDataset",
"os.path.exists",
"tensorboardX.SummaryWriter",
"argparse.ArgumentParser",
"torch.mean",
"torch.set_default_tensor_type",
"matplotlib.pyplot.close",
"numpy.exp",
"utils.parse_activation",
"numpy.random.seed",
"matplotlib.pyplot.savefig",
"utils.io.get_output_root",
"utils.io.get_log_root",
"torch.device",
"torch.manual_seed",
"torch.optim.Adam",
"torch.optim.lr_scheduler.CosineAnnealingLR",
"os.path.join",
"models.AEM",
"data_.InfiniteLoader",
"matplotlib.pyplot.tight_layout",
"torch.utils.data.DataLoader",
"utils.tensor2numpy",
"utils.io.get_timestamp",
"matplotlib.pyplot.subplots",
"data_.load_plane_dataset"
] |
[((299, 324), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (322, 324), False, 'import argparse\n'), ((4721, 4749), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (4738, 4749), False, 'import torch\n'), ((4750, 4775), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (4764, 4775), True, 'import numpy as np\n'), ((4990, 5053), 'data_.load_plane_dataset', 'data_.load_plane_dataset', (['args.dataset_name', 'args.n_data_points'], {}), '(args.dataset_name, args.n_data_points)\n', (5014, 5053), False, 'import data_\n'), ((5069, 5191), 'data_.InfiniteLoader', 'data_.InfiniteLoader', ([], {'dataset': 'train_dataset', 'batch_size': 'args.batch_size', 'shuffle': '(True)', 'drop_last': '(True)', 'num_epochs': 'None'}), '(dataset=train_dataset, batch_size=args.batch_size,\n shuffle=True, drop_last=True, num_epochs=None)\n', (5089, 5191), False, 'import data_\n'), ((5270, 5298), 'numpy.array', 'np.array', (['[[-4, 4], [-4, 4]]'], {}), '([[-4, 4], [-4, 4]])\n', (5278, 5298), True, 'import numpy as np\n'), ((5324, 5397), 'data_.TestGridDataset', 'data_.TestGridDataset', ([], {'n_points_per_axis': 'n_points_per_axis', 'bounds': 'bounds'}), '(n_points_per_axis=n_points_per_axis, bounds=bounds)\n', (5345, 5397), False, 'import data_\n'), ((5412, 5483), 'torch.utils.data.DataLoader', 'data.DataLoader', ([], {'dataset': 'grid_dataset', 'batch_size': '(1000)', 'drop_last': '(False)'}), '(dataset=grid_dataset, batch_size=1000, drop_last=False)\n', (5427, 5483), False, 'from torch.utils import data\n'), ((7026, 7071), 'torch.optim.Adam', 'optim.Adam', (['parameters'], {'lr': 'args.learning_rate'}), '(parameters, lr=args.learning_rate)\n', (7036, 7071), False, 'from torch import optim\n'), ((7084, 7151), 'torch.optim.lr_scheduler.CosineAnnealingLR', 'optim.lr_scheduler.CosineAnnealingLR', (['optimizer', 'args.n_total_steps'], {}), '(optimizer, args.n_total_steps)\n', (7120, 7151), False, 'from torch import optim\n'), ((7216, 7234), 'utils.io.get_timestamp', 'io.get_timestamp', ([], {}), '()\n', (7232, 7234), False, 'from utils import io\n'), ((7316, 7346), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {'log_dir': 'log_dir'}), '(log_dir=log_dir)\n', (7329, 7346), False, 'from tensorboardX import SummaryWriter\n'), ((7358, 7394), 'os.path.join', 'os.path.join', (['log_dir', '"""config.json"""'], {}), "(log_dir, 'config.json')\n", (7370, 7394), False, 'import os\n'), ((4797, 4822), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4820, 4822), False, 'import torch\n'), ((4837, 4857), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (4849, 4857), False, 'import torch\n'), ((4862, 4917), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['"""torch.cuda.FloatTensor"""'], {}), "('torch.cuda.FloatTensor')\n", (4891, 4917), False, 'import torch\n'), ((4937, 4956), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (4949, 4956), False, 'import torch\n'), ((7258, 7275), 'utils.io.get_log_root', 'io.get_log_root', ([], {}), '()\n', (7273, 7275), False, 'from utils import io\n'), ((7768, 7791), 'torch.mean', 'torch.mean', (['log_density'], {}), '(log_density)\n', (7778, 7791), False, 'import torch\n'), ((7824, 7856), 'torch.mean', 'torch.mean', (['log_proposal_density'], {}), '(log_proposal_density)\n', (7834, 7856), False, 'import torch\n'), ((7883, 7909), 'torch.mean', 'torch.mean', (['log_normalizer'], {}), '(log_normalizer)\n', (7893, 7909), False, 'import torch\n'), ((11252, 11276), 'utils.io.get_checkpoint_root', 'io.get_checkpoint_root', ([], {}), '()\n', (11274, 11276), False, 'from utils import io\n'), ((6503, 6902), 'models.AEM', 'models.AEM', ([], {'autoregressive_net': 'made', 'energy_net': 'energy_net', 'context_dim': 'args.context_dim', 'n_proposal_mixture_components': 'args.n_mixture_components', 'proposal_component_family': 'args.proposal_component', 'n_proposal_samples_per_input': 'args.n_proposal_samples_per_input', 'mixture_component_min_scale': 'args.mixture_component_min_scale', 'apply_context_activation': 'args.apply_context_activation'}), '(autoregressive_net=made, energy_net=energy_net, context_dim=args\n .context_dim, n_proposal_mixture_components=args.n_mixture_components,\n proposal_component_family=args.proposal_component,\n n_proposal_samples_per_input=args.n_proposal_samples_per_input,\n mixture_component_min_scale=args.mixture_component_min_scale,\n apply_context_activation=args.apply_context_activation)\n', (6513, 6902), False, 'import models\n'), ((9817, 9855), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {'figsize': '(7.5, 2.5)'}), '(1, 3, figsize=(7.5, 2.5))\n', (9829, 9855), True, 'from matplotlib import cm, pyplot as plt\n'), ((10628, 10646), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (10644, 10646), True, 'from matplotlib import cm, pyplot as plt\n'), ((10836, 10862), 'matplotlib.pyplot.savefig', 'plt.savefig', (['path'], {'dpi': '(300)'}), '(path, dpi=300)\n', (10847, 10862), True, 'from matplotlib import cm, pyplot as plt\n'), ((10944, 10955), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (10953, 10955), True, 'from matplotlib import cm, pyplot as plt\n'), ((10676, 10696), 'utils.io.get_output_root', 'io.get_output_root', ([], {}), '()\n', (10694, 10696), False, 'from utils import io\n'), ((10760, 10780), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (10774, 10780), False, 'import os\n'), ((11030, 11054), 'utils.io.get_checkpoint_root', 'io.get_checkpoint_root', ([], {}), '()\n', (11052, 11054), False, 'from utils import io\n'), ((11116, 11136), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (11130, 11136), False, 'import os\n'), ((5888, 5932), 'utils.parse_activation', 'utils.parse_activation', (['args.activation_made'], {}), '(args.activation_made)\n', (5910, 5932), False, 'import utils\n'), ((6307, 6357), 'utils.parse_activation', 'utils.parse_activation', (['args.activation_energy_net'], {}), '(args.activation_energy_net)\n', (6329, 6357), False, 'import utils\n'), ((10806, 10826), 'utils.io.get_output_root', 'io.get_output_root', ([], {}), '()\n', (10824, 10826), False, 'from utils import io\n'), ((11162, 11186), 'utils.io.get_checkpoint_root', 'io.get_checkpoint_root', ([], {}), '()\n', (11184, 11186), False, 'from utils import io\n'), ((9598, 9629), 'utils.tensor2numpy', 'utils.tensor2numpy', (['log_density'], {}), '(log_density)\n', (9616, 9629), False, 'import utils\n'), ((9741, 9781), 'utils.tensor2numpy', 'utils.tensor2numpy', (['log_proposal_density'], {}), '(log_proposal_density)\n', (9759, 9781), False, 'import utils\n'), ((10157, 10188), 'numpy.exp', 'np.exp', (['log_proposal_density_np'], {}), '(log_proposal_density_np)\n', (10163, 10188), True, 'import numpy as np\n'), ((10435, 10457), 'numpy.exp', 'np.exp', (['log_density_np'], {}), '(log_density_np)\n', (10441, 10457), True, 'import numpy as np\n')]
|
import argparse
PROJROOTDIR = {'mac': '/Users/taehapark/SLAB/speedplusbaseline',
'linux': '/home/somrita/Documents/Satellite_Pose_Estimation/speedplusbaseline'}
DATAROOTDIR = {'mac': '/Users/taehapark/SLAB/speedplus/data/datasets',
'linux': '/home/somrita/Documents/Satellite_Pose_Estimation/dataset'}
parser = argparse.ArgumentParser('Configurations for SPEED+ Baseline Study')
# ------------------------------------------------------------------------------------------
# Basic directories and names
parser.add_argument('--seed', type=int, default=2021)
parser.add_argument('--projroot', type=str, default=PROJROOTDIR['linux'])
parser.add_argument('--dataroot', type=str, default=DATAROOTDIR['linux'])
parser.add_argument('--dataname', type=str, default='speedplus')
parser.add_argument('--savedir', type=str, default='checkpoints/synthetic/krn')
parser.add_argument('--resultfn', type=str, default='')
parser.add_argument('--logdir', type=str, default='log/synthetic/krn')
parser.add_argument('--pretrained', type=str, default='')
# ------------------------------------------------------------------------------------------
# Model config.
parser.add_argument('--model_name', type=str, default='krn')
parser.add_argument('--input_shape', nargs='+', type=int, default=(224, 224))
parser.add_argument('--num_keypoints', type=int, default=11) # KRN-specific
parser.add_argument('--num_classes', type=int, default=5000) # SPN-specific
parser.add_argument('--num_neighbors', type=int, default=5) # SPN-specific
parser.add_argument('--keypts_3d_model', type=str, default='src/utils/tangoPoints.mat')
parser.add_argument('--attitude_class', type=str, default='src/utils/attitudeClasses.mat')
# ------------------------------------------------------------------------------------------
# Training config.
parser.add_argument('--start_over', dest='auto_resume', action='store_false', default=True)
parser.add_argument('--randomize_texture', dest='randomize_texture', action='store_true', default=False)
parser.add_argument('--perform_dann', dest='dann', action='store_true', default=False)
parser.add_argument('--texture_alpha', type=float, default=0.5)
parser.add_argument('--texture_ratio', type=float, default=0.5)
parser.add_argument('--use_fp16', dest='fp16', action='store_true', default=False)
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument('--max_epochs', type=int, default=75)
parser.add_argument('--num_workers', type=int, default=8)
parser.add_argument('--test_epoch', type=int, default=-1)
parser.add_argument('--optimizer', type=str, default='rmsprop')
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--momentum', type=float, default=0.9)
parser.add_argument('--weight_decay', type=float, default=5e-5)
parser.add_argument('--lr_decay_alpha', type=float, default=0.96)
parser.add_argument('--lr_decay_step', type=int, default=1)
# ------------------------------------------------------------------------------------------
# Dataset-related inputs
parser.add_argument('--train_domain', type=str, default='synthetic')
parser.add_argument('--test_domain', type=str, default='lightbox')
parser.add_argument('--train_csv', type=str, default='train.csv')
parser.add_argument('--test_csv', type=str, default='lightbox.csv')
# ------------------------------------------------------------------------------------------
# Other miscellaneous settings
parser.add_argument('--gpu_id', type=int, default=0)
parser.add_argument('--no_cuda', dest='use_cuda', action='store_false', default=True)
# End
cfg = parser.parse_args()
|
[
"argparse.ArgumentParser"
] |
[((346, 413), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Configurations for SPEED+ Baseline Study"""'], {}), "('Configurations for SPEED+ Baseline Study')\n", (369, 413), False, 'import argparse\n')]
|
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from h1.api_client import ApiClient, Endpoint as _Endpoint
from h1.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from h1.model.event import Event
from h1.model.inline_response400 import InlineResponse400
from h1.model.insight_project_journal_create import InsightProjectJournalCreate
from h1.model.insight_project_journal_credential_patch import InsightProjectJournalCredentialPatch
from h1.model.insight_project_journal_transfer import InsightProjectJournalTransfer
from h1.model.insight_project_journal_update import InsightProjectJournalUpdate
from h1.model.journal import Journal
from h1.model.journal_credential import JournalCredential
from h1.model.resource_service import ResourceService
from h1.model.tag import Tag
from h1.model.tag_array import TagArray
class InsightProjectJournalApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __insight_project_journal_create(
self,
project_id,
location_id,
insight_project_journal_create,
**kwargs
):
"""Create insight/journal # noqa: E501
Create journal # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_create(project_id, location_id, insight_project_journal_create, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
insight_project_journal_create (InsightProjectJournalCreate):
Keyword Args:
x_idempotency_key (str): Idempotency key. [optional]
x_dry_run (str): Dry run. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Journal
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['insight_project_journal_create'] = \
insight_project_journal_create
return self.call_with_http_info(**kwargs)
self.insight_project_journal_create = _Endpoint(
settings={
'response_type': (Journal,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal',
'operation_id': 'insight_project_journal_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'insight_project_journal_create',
'x_idempotency_key',
'x_dry_run',
],
'required': [
'project_id',
'location_id',
'insight_project_journal_create',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'insight_project_journal_create':
(InsightProjectJournalCreate,),
'x_idempotency_key':
(str,),
'x_dry_run':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'x_idempotency_key': 'x-idempotency-key',
'x_dry_run': 'x-dry-run',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'insight_project_journal_create': 'body',
'x_idempotency_key': 'header',
'x_dry_run': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__insight_project_journal_create
)
def __insight_project_journal_credential_create(
self,
project_id,
location_id,
journal_id,
journal_credential,
**kwargs
):
"""Create insight/journal.credential # noqa: E501
Create insight/journal.credential # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_credential_create(project_id, location_id, journal_id, journal_credential, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
journal_credential (JournalCredential):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JournalCredential
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['journal_credential'] = \
journal_credential
return self.call_with_http_info(**kwargs)
self.insight_project_journal_credential_create = _Endpoint(
settings={
'response_type': (JournalCredential,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential',
'operation_id': 'insight_project_journal_credential_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'journal_credential',
],
'required': [
'project_id',
'location_id',
'journal_id',
'journal_credential',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'journal_credential':
(JournalCredential,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'journal_credential': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__insight_project_journal_credential_create
)
def __insight_project_journal_credential_delete(
self,
project_id,
location_id,
journal_id,
credential_id,
**kwargs
):
"""Delete insight/journal.credential # noqa: E501
Delete insight/journal.credential # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_credential_delete(project_id, location_id, journal_id, credential_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
credential_id (str): credentialId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Journal
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['credential_id'] = \
credential_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_credential_delete = _Endpoint(
settings={
'response_type': (Journal,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}',
'operation_id': 'insight_project_journal_credential_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'credential_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
'credential_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'credential_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'credential_id': 'credentialId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'credential_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_credential_delete
)
def __insight_project_journal_credential_get(
self,
project_id,
location_id,
journal_id,
credential_id,
**kwargs
):
"""Get insight/journal.credential # noqa: E501
Get insight/journal.credential # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_credential_get(project_id, location_id, journal_id, credential_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
credential_id (str): credentialId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JournalCredential
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['credential_id'] = \
credential_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_credential_get = _Endpoint(
settings={
'response_type': (JournalCredential,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}',
'operation_id': 'insight_project_journal_credential_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'credential_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
'credential_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'credential_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'credential_id': 'credentialId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'credential_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_credential_get
)
def __insight_project_journal_credential_list(
self,
project_id,
location_id,
journal_id,
**kwargs
):
"""List insight/journal.credential # noqa: E501
List insight/journal.credential # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_credential_list(project_id, location_id, journal_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[JournalCredential]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_credential_list = _Endpoint(
settings={
'response_type': ([JournalCredential],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential',
'operation_id': 'insight_project_journal_credential_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_credential_list
)
def __insight_project_journal_credential_patch(
self,
project_id,
location_id,
journal_id,
credential_id,
insight_project_journal_credential_patch,
**kwargs
):
"""Update insight/journal.credential # noqa: E501
Update insight/journal.credential # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_credential_patch(project_id, location_id, journal_id, credential_id, insight_project_journal_credential_patch, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
credential_id (str): credentialId
insight_project_journal_credential_patch (InsightProjectJournalCredentialPatch):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
JournalCredential
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['credential_id'] = \
credential_id
kwargs['insight_project_journal_credential_patch'] = \
insight_project_journal_credential_patch
return self.call_with_http_info(**kwargs)
self.insight_project_journal_credential_patch = _Endpoint(
settings={
'response_type': (JournalCredential,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}',
'operation_id': 'insight_project_journal_credential_patch',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'credential_id',
'insight_project_journal_credential_patch',
],
'required': [
'project_id',
'location_id',
'journal_id',
'credential_id',
'insight_project_journal_credential_patch',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'credential_id':
(str,),
'insight_project_journal_credential_patch':
(InsightProjectJournalCredentialPatch,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'credential_id': 'credentialId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'credential_id': 'path',
'insight_project_journal_credential_patch': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__insight_project_journal_credential_patch
)
def __insight_project_journal_delete(
self,
project_id,
location_id,
journal_id,
**kwargs
):
"""Delete insight/journal # noqa: E501
Delete journal # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_delete(project_id, location_id, journal_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}',
'operation_id': 'insight_project_journal_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_delete
)
def __insight_project_journal_event_get(
self,
project_id,
location_id,
journal_id,
event_id,
**kwargs
):
"""Get insight/journal.event # noqa: E501
Get insight/journal.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_event_get(project_id, location_id, journal_id, event_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
event_id (str): eventId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Event
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['event_id'] = \
event_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_event_get = _Endpoint(
settings={
'response_type': (Event,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/event/{eventId}',
'operation_id': 'insight_project_journal_event_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'event_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
'event_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'event_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'event_id': 'eventId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'event_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_event_get
)
def __insight_project_journal_event_list(
self,
project_id,
location_id,
journal_id,
**kwargs
):
"""List insight/journal.event # noqa: E501
List insight/journal.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_event_list(project_id, location_id, journal_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
Keyword Args:
limit (float): $limit. [optional] if omitted the server will use the default value of 100
skip (float): $skip. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Event]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_event_list = _Endpoint(
settings={
'response_type': ([Event],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/event',
'operation_id': 'insight_project_journal_event_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'limit',
'skip',
],
'required': [
'project_id',
'location_id',
'journal_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'limit',
]
},
root_map={
'validations': {
('limit',): {
'inclusive_maximum': 1000,
'inclusive_minimum': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'limit':
(float,),
'skip':
(float,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'limit': '$limit',
'skip': '$skip',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'limit': 'query',
'skip': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_event_list
)
def __insight_project_journal_get(
self,
project_id,
location_id,
journal_id,
**kwargs
):
"""Get insight/journal # noqa: E501
Returns a single journal # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_get(project_id, location_id, journal_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Journal
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_get = _Endpoint(
settings={
'response_type': (Journal,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}',
'operation_id': 'insight_project_journal_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_get
)
def __insight_project_journal_list(
self,
project_id,
location_id,
**kwargs
):
"""List insight/journal # noqa: E501
List journal # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_list(project_id, location_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
Keyword Args:
name (str): Filter by name. [optional]
tag_value (str): Filter by tag.value. [optional]
tag_key (str): Filter by tag.key. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Journal]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_list = _Endpoint(
settings={
'response_type': ([Journal],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal',
'operation_id': 'insight_project_journal_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'name',
'tag_value',
'tag_key',
],
'required': [
'project_id',
'location_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'name':
(str,),
'tag_value':
(str,),
'tag_key':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'name': 'name',
'tag_value': 'tag.value',
'tag_key': 'tag.key',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'name': 'query',
'tag_value': 'query',
'tag_key': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_list
)
def __insight_project_journal_log_get(
self,
project_id,
location_id,
journal_id,
**kwargs
):
"""Get insight/journal.log # noqa: E501
websocket is also supported # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_log_get(project_id, location_id, journal_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
Keyword Args:
since (datetime): since. [optional]
until (datetime): until. [optional]
follow (bool): follow. [optional] if omitted the server will use the default value of False
tail (float): tail. [optional]
tag (TagArray): tag. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_log_get = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/log',
'operation_id': 'insight_project_journal_log_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'since',
'until',
'follow',
'tail',
'tag',
],
'required': [
'project_id',
'location_id',
'journal_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'since':
(datetime,),
'until':
(datetime,),
'follow':
(bool,),
'tail':
(float,),
'tag':
(TagArray,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'since': 'since',
'until': 'until',
'follow': 'follow',
'tail': 'tail',
'tag': 'tag',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'since': 'query',
'until': 'query',
'follow': 'query',
'tail': 'query',
'tag': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_log_get
)
def __insight_project_journal_service_get(
self,
project_id,
location_id,
journal_id,
service_id,
**kwargs
):
"""Get insight/journal.service # noqa: E501
Get insight/journal.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_service_get(project_id, location_id, journal_id, service_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
service_id (str): serviceId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ResourceService
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['service_id'] = \
service_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_service_get = _Endpoint(
settings={
'response_type': (ResourceService,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/service/{serviceId}',
'operation_id': 'insight_project_journal_service_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'service_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
'service_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'service_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'service_id': 'serviceId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'service_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_service_get
)
def __insight_project_journal_service_list(
self,
project_id,
location_id,
journal_id,
**kwargs
):
"""List insight/journal.service # noqa: E501
List insight/journal.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_service_list(project_id, location_id, journal_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[ResourceService]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_service_list = _Endpoint(
settings={
'response_type': ([ResourceService],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/service',
'operation_id': 'insight_project_journal_service_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_service_list
)
def __insight_project_journal_tag_create(
self,
project_id,
location_id,
journal_id,
tag,
**kwargs
):
"""Create insight/journal.tag # noqa: E501
Create insight/journal.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_tag_create(project_id, location_id, journal_id, tag, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
tag (Tag):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['tag'] = \
tag
return self.call_with_http_info(**kwargs)
self.insight_project_journal_tag_create = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',
'operation_id': 'insight_project_journal_tag_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'tag',
],
'required': [
'project_id',
'location_id',
'journal_id',
'tag',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'tag':
(Tag,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'tag': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__insight_project_journal_tag_create
)
def __insight_project_journal_tag_delete(
self,
project_id,
location_id,
journal_id,
tag_id,
**kwargs
):
"""Delete insight/journal.tag # noqa: E501
Delete insight/journal.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_tag_delete(project_id, location_id, journal_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_tag_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag/{tagId}',
'operation_id': 'insight_project_journal_tag_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'tag_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'tag_id': 'tagId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_tag_delete
)
def __insight_project_journal_tag_get(
self,
project_id,
location_id,
journal_id,
tag_id,
**kwargs
):
"""Get insight/journal.tag # noqa: E501
Get insight/journal.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_tag_get(project_id, location_id, journal_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_tag_get = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag/{tagId}',
'operation_id': 'insight_project_journal_tag_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'tag_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'tag_id': 'tagId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_tag_get
)
def __insight_project_journal_tag_list(
self,
project_id,
location_id,
journal_id,
**kwargs
):
"""List insight/journal.tag # noqa: E501
List insight/journal.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_tag_list(project_id, location_id, journal_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
return self.call_with_http_info(**kwargs)
self.insight_project_journal_tag_list = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',
'operation_id': 'insight_project_journal_tag_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
],
'required': [
'project_id',
'location_id',
'journal_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__insight_project_journal_tag_list
)
def __insight_project_journal_tag_put(
self,
project_id,
location_id,
journal_id,
tag_array,
**kwargs
):
"""Replace insight/journal.tag # noqa: E501
Replace insight/journal.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_tag_put(project_id, location_id, journal_id, tag_array, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
tag_array (TagArray):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['tag_array'] = \
tag_array
return self.call_with_http_info(**kwargs)
self.insight_project_journal_tag_put = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',
'operation_id': 'insight_project_journal_tag_put',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'tag_array',
],
'required': [
'project_id',
'location_id',
'journal_id',
'tag_array',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'tag_array':
(TagArray,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'tag_array': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__insight_project_journal_tag_put
)
def __insight_project_journal_transfer(
self,
project_id,
location_id,
journal_id,
insight_project_journal_transfer,
**kwargs
):
"""Transfer insight/journal # noqa: E501
action transfer # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_transfer(project_id, location_id, journal_id, insight_project_journal_transfer, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
insight_project_journal_transfer (InsightProjectJournalTransfer):
Keyword Args:
x_idempotency_key (str): Idempotency key. [optional]
x_dry_run (str): Dry run. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Journal
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['insight_project_journal_transfer'] = \
insight_project_journal_transfer
return self.call_with_http_info(**kwargs)
self.insight_project_journal_transfer = _Endpoint(
settings={
'response_type': (Journal,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}/actions/transfer',
'operation_id': 'insight_project_journal_transfer',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'insight_project_journal_transfer',
'x_idempotency_key',
'x_dry_run',
],
'required': [
'project_id',
'location_id',
'journal_id',
'insight_project_journal_transfer',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'insight_project_journal_transfer':
(InsightProjectJournalTransfer,),
'x_idempotency_key':
(str,),
'x_dry_run':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
'x_idempotency_key': 'x-idempotency-key',
'x_dry_run': 'x-dry-run',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'insight_project_journal_transfer': 'body',
'x_idempotency_key': 'header',
'x_dry_run': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__insight_project_journal_transfer
)
def __insight_project_journal_update(
self,
project_id,
location_id,
journal_id,
insight_project_journal_update,
**kwargs
):
"""Update insight/journal # noqa: E501
Returns modified journal # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insight_project_journal_update(project_id, location_id, journal_id, insight_project_journal_update, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
journal_id (str): Journal Id
insight_project_journal_update (InsightProjectJournalUpdate):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Journal
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['journal_id'] = \
journal_id
kwargs['insight_project_journal_update'] = \
insight_project_journal_update
return self.call_with_http_info(**kwargs)
self.insight_project_journal_update = _Endpoint(
settings={
'response_type': (Journal,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/insight/{locationId}/project/{projectId}/journal/{journalId}',
'operation_id': 'insight_project_journal_update',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'journal_id',
'insight_project_journal_update',
],
'required': [
'project_id',
'location_id',
'journal_id',
'insight_project_journal_update',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'journal_id':
(str,),
'insight_project_journal_update':
(InsightProjectJournalUpdate,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'journal_id': 'journalId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'journal_id': 'path',
'insight_project_journal_update': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__insight_project_journal_update
)
|
[
"h1.api_client.Endpoint",
"h1.api_client.ApiClient"
] |
[((4796, 6054), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Journal,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal', 'operation_id':\n 'insight_project_journal_create', 'http_method': 'POST', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'insight_project_journal_create',\n 'x_idempotency_key', 'x_dry_run'], 'required': ['project_id',\n 'location_id', 'insight_project_journal_create'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'insight_project_journal_create': (\n InsightProjectJournalCreate,), 'x_idempotency_key': (str,), 'x_dry_run':\n (str,)}, 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'x_idempotency_key': 'x-idempotency-key', 'x_dry_run':\n 'x-dry-run'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'insight_project_journal_create': 'body', 'x_idempotency_key':\n 'header', 'x_dry_run': 'header'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': ['application/json']}", 'api_client': 'api_client', 'callable': '__insight_project_journal_create'}), "(settings={'response_type': (Journal,), 'auth': ['BearerAuth'],\n 'endpoint_path': '/insight/{locationId}/project/{projectId}/journal',\n 'operation_id': 'insight_project_journal_create', 'http_method': 'POST',\n 'servers': None}, params_map={'all': ['project_id', 'location_id',\n 'insight_project_journal_create', 'x_idempotency_key', 'x_dry_run'],\n 'required': ['project_id', 'location_id',\n 'insight_project_journal_create'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'insight_project_journal_create': (InsightProjectJournalCreate,),\n 'x_idempotency_key': (str,), 'x_dry_run': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId',\n 'x_idempotency_key': 'x-idempotency-key', 'x_dry_run': 'x-dry-run'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'insight_project_journal_create': 'body', 'x_idempotency_key': 'header',\n 'x_dry_run': 'header'}, 'collection_format_map': {}}, headers_map={\n 'accept': ['application/json'], 'content_type': ['application/json']},\n api_client=api_client, callable=__insight_project_journal_create)\n", (4805, 6054), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((10692, 11840), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (JournalCredential,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential',\n 'operation_id': 'insight_project_journal_credential_create',\n 'http_method': 'POST', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'journal_credential'],\n 'required': ['project_id', 'location_id', 'journal_id',\n 'journal_credential'], 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,),\n 'journal_credential': (JournalCredential,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'journal_credential': 'body'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': ['application/json']}", 'api_client': 'api_client', 'callable': '__insight_project_journal_credential_create'}), "(settings={'response_type': (JournalCredential,), 'auth': [\n 'BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential',\n 'operation_id': 'insight_project_journal_credential_create',\n 'http_method': 'POST', 'servers': None}, params_map={'all': [\n 'project_id', 'location_id', 'journal_id', 'journal_credential'],\n 'required': ['project_id', 'location_id', 'journal_id',\n 'journal_credential'], 'nullable': [], 'enum': [], 'validation': []},\n root_map={'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'project_id': (str,), 'location_id': (str,), 'journal_id': (str,),\n 'journal_credential': (JournalCredential,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'journal_credential': 'body'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': ['application/json']}, api_client=\n api_client, callable=__insight_project_journal_credential_create)\n", (10701, 11840), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((16358, 17491), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Journal,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}'\n , 'operation_id': 'insight_project_journal_credential_delete',\n 'http_method': 'DELETE', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'credential_id'],\n 'required': ['project_id', 'location_id', 'journal_id', 'credential_id'\n ], 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'credential_id': (\n str,)}, 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'credential_id':\n 'credentialId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'credential_id': 'path'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_credential_delete'}), "(settings={'response_type': (Journal,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}'\n , 'operation_id': 'insight_project_journal_credential_delete',\n 'http_method': 'DELETE', 'servers': None}, params_map={'all': [\n 'project_id', 'location_id', 'journal_id', 'credential_id'], 'required':\n ['project_id', 'location_id', 'journal_id', 'credential_id'],\n 'nullable': [], 'enum': [], 'validation': []}, root_map={'validations':\n {}, 'allowed_values': {}, 'openapi_types': {'project_id': (str,),\n 'location_id': (str,), 'journal_id': (str,), 'credential_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'credential_id':\n 'credentialId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'credential_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_credential_delete)\n", (16367, 17491), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((21990, 23125), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (JournalCredential,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}'\n , 'operation_id': 'insight_project_journal_credential_get',\n 'http_method': 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'credential_id'],\n 'required': ['project_id', 'location_id', 'journal_id', 'credential_id'\n ], 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'credential_id': (\n str,)}, 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'credential_id':\n 'credentialId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'credential_id': 'path'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_credential_get'}), "(settings={'response_type': (JournalCredential,), 'auth': [\n 'BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}'\n , 'operation_id': 'insight_project_journal_credential_get',\n 'http_method': 'GET', 'servers': None}, params_map={'all': [\n 'project_id', 'location_id', 'journal_id', 'credential_id'], 'required':\n ['project_id', 'location_id', 'journal_id', 'credential_id'],\n 'nullable': [], 'enum': [], 'validation': []}, root_map={'validations':\n {}, 'allowed_values': {}, 'openapi_types': {'project_id': (str,),\n 'location_id': (str,), 'journal_id': (str,), 'credential_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'credential_id':\n 'credentialId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'credential_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_credential_get)\n", (21999, 23125), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((27468, 28466), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': ([JournalCredential],), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential',\n 'operation_id': 'insight_project_journal_credential_list',\n 'http_method': 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id'], 'required': [\n 'project_id', 'location_id', 'journal_id'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_credential_list'}), "(settings={'response_type': ([JournalCredential],), 'auth': [\n 'BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential',\n 'operation_id': 'insight_project_journal_credential_list',\n 'http_method': 'GET', 'servers': None}, params_map={'all': [\n 'project_id', 'location_id', 'journal_id'], 'required': ['project_id',\n 'location_id', 'journal_id'], 'nullable': [], 'enum': [], 'validation':\n []}, root_map={'validations': {}, 'allowed_values': {}, 'openapi_types':\n {'project_id': (str,), 'location_id': (str,), 'journal_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_credential_list)\n", (27477, 28466), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((33178, 34580), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (JournalCredential,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}'\n , 'operation_id': 'insight_project_journal_credential_patch',\n 'http_method': 'PATCH', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'credential_id',\n 'insight_project_journal_credential_patch'], 'required': ['project_id',\n 'location_id', 'journal_id', 'credential_id',\n 'insight_project_journal_credential_patch'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'credential_id': (\n str,), 'insight_project_journal_credential_patch': (\n InsightProjectJournalCredentialPatch,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'journal_id': 'journalId',\n 'credential_id': 'credentialId'}, 'location_map': {'project_id': 'path',\n 'location_id': 'path', 'journal_id': 'path', 'credential_id': 'path',\n 'insight_project_journal_credential_patch': 'body'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': ['application/json']}", 'api_client': 'api_client', 'callable': '__insight_project_journal_credential_patch'}), "(settings={'response_type': (JournalCredential,), 'auth': [\n 'BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/credential/{credentialId}'\n , 'operation_id': 'insight_project_journal_credential_patch',\n 'http_method': 'PATCH', 'servers': None}, params_map={'all': [\n 'project_id', 'location_id', 'journal_id', 'credential_id',\n 'insight_project_journal_credential_patch'], 'required': ['project_id',\n 'location_id', 'journal_id', 'credential_id',\n 'insight_project_journal_credential_patch'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'credential_id': (str,),\n 'insight_project_journal_credential_patch': (\n InsightProjectJournalCredentialPatch,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'journal_id': 'journalId',\n 'credential_id': 'credentialId'}, 'location_map': {'project_id': 'path',\n 'location_id': 'path', 'journal_id': 'path', 'credential_id': 'path',\n 'insight_project_journal_credential_patch': 'body'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': ['application/json']}, api_client=\n api_client, callable=__insight_project_journal_credential_patch)\n", (33187, 34580), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((38978, 39932), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': None, 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}',\n 'operation_id': 'insight_project_journal_delete', 'http_method':\n 'DELETE', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id'], 'required': [\n 'project_id', 'location_id', 'journal_id'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_delete'}), "(settings={'response_type': None, 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}',\n 'operation_id': 'insight_project_journal_delete', 'http_method':\n 'DELETE', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id'], 'required': ['project_id', 'location_id',\n 'journal_id'], 'nullable': [], 'enum': [], 'validation': []}, root_map=\n {'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'project_id': (str,), 'location_id': (str,), 'journal_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_delete)\n", (38987, 39932), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((44248, 45317), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Event,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/event/{eventId}'\n , 'operation_id': 'insight_project_journal_event_get', 'http_method':\n 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'event_id'], 'required':\n ['project_id', 'location_id', 'journal_id', 'event_id'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'event_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'event_id': 'eventId'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'event_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_event_get'}), "(settings={'response_type': (Event,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/event/{eventId}'\n , 'operation_id': 'insight_project_journal_event_get', 'http_method':\n 'GET', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'event_id'], 'required': ['project_id',\n 'location_id', 'journal_id', 'event_id'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'event_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId', 'event_id': 'eventId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path', 'event_id': 'path'\n }, 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_event_get)\n", (44257, 45317), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((49781, 50959), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': ([Event],), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/event',\n 'operation_id': 'insight_project_journal_event_list', 'http_method':\n 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'limit', 'skip'],\n 'required': ['project_id', 'location_id', 'journal_id'], 'nullable': [],\n 'enum': [], 'validation': ['limit']}", 'root_map': "{'validations': {('limit',): {'inclusive_maximum': 1000,\n 'inclusive_minimum': 1}}, 'allowed_values': {}, 'openapi_types': {\n 'project_id': (str,), 'location_id': (str,), 'journal_id': (str,),\n 'limit': (float,), 'skip': (float,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'journal_id': 'journalId',\n 'limit': '$limit', 'skip': '$skip'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path', 'limit': 'query',\n 'skip': 'query'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_event_list'}), "(settings={'response_type': ([Event],), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/event',\n 'operation_id': 'insight_project_journal_event_list', 'http_method':\n 'GET', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'limit', 'skip'], 'required': [\n 'project_id', 'location_id', 'journal_id'], 'nullable': [], 'enum': [],\n 'validation': ['limit']}, root_map={'validations': {('limit',): {\n 'inclusive_maximum': 1000, 'inclusive_minimum': 1}}, 'allowed_values':\n {}, 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'limit': (float,), 'skip': (float,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'limit': '$limit', 'skip':\n '$skip'}, 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'limit': 'query', 'skip': 'query'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_event_list)\n", (49790, 50959), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((55432, 56377), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Journal,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}',\n 'operation_id': 'insight_project_journal_get', 'http_method': 'GET',\n 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id'], 'required': [\n 'project_id', 'location_id', 'journal_id'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_get'}), "(settings={'response_type': (Journal,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}',\n 'operation_id': 'insight_project_journal_get', 'http_method': 'GET',\n 'servers': None}, params_map={'all': ['project_id', 'location_id',\n 'journal_id'], 'required': ['project_id', 'location_id', 'journal_id'],\n 'nullable': [], 'enum': [], 'validation': []}, root_map={'validations':\n {}, 'allowed_values': {}, 'openapi_types': {'project_id': (str,),\n 'location_id': (str,), 'journal_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path'}, 'collection_format_map': {}},\n headers_map={'accept': ['application/json'], 'content_type': []},\n api_client=api_client, callable=__insight_project_journal_get)\n", (55441, 56377), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((60574, 61630), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': ([Journal],), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal', 'operation_id':\n 'insight_project_journal_list', 'http_method': 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'name', 'tag_value', 'tag_key'],\n 'required': ['project_id', 'location_id'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'name': (str,), 'tag_value': (str,),\n 'tag_key': (str,)}, 'attribute_map': {'project_id': 'projectId',\n 'location_id': 'locationId', 'name': 'name', 'tag_value': 'tag.value',\n 'tag_key': 'tag.key'}, 'location_map': {'project_id': 'path',\n 'location_id': 'path', 'name': 'query', 'tag_value': 'query', 'tag_key':\n 'query'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_list'}), "(settings={'response_type': ([Journal],), 'auth': ['BearerAuth'],\n 'endpoint_path': '/insight/{locationId}/project/{projectId}/journal',\n 'operation_id': 'insight_project_journal_list', 'http_method': 'GET',\n 'servers': None}, params_map={'all': ['project_id', 'location_id',\n 'name', 'tag_value', 'tag_key'], 'required': ['project_id',\n 'location_id'], 'nullable': [], 'enum': [], 'validation': []}, root_map\n ={'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'project_id': (str,), 'location_id': (str,), 'name': (str,),\n 'tag_value': (str,), 'tag_key': (str,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'name': 'name', 'tag_value':\n 'tag.value', 'tag_key': 'tag.key'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'name': 'query', 'tag_value': 'query',\n 'tag_key': 'query'}, 'collection_format_map': {}}, headers_map={\n 'accept': ['application/json'], 'content_type': []}, api_client=\n api_client, callable=__insight_project_journal_list)\n", (60583, 61630), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((66301, 67590), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': None, 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/log',\n 'operation_id': 'insight_project_journal_log_get', 'http_method': 'GET',\n 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'since', 'until',\n 'follow', 'tail', 'tag'], 'required': ['project_id', 'location_id',\n 'journal_id'], 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'since': (datetime,\n ), 'until': (datetime,), 'follow': (bool,), 'tail': (float,), 'tag': (\n TagArray,)}, 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'since': 'since', 'until':\n 'until', 'follow': 'follow', 'tail': 'tail', 'tag': 'tag'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'since': 'query', 'until': 'query', 'follow':\n 'query', 'tail': 'query', 'tag': 'query'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_log_get'}), "(settings={'response_type': None, 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/log',\n 'operation_id': 'insight_project_journal_log_get', 'http_method': 'GET',\n 'servers': None}, params_map={'all': ['project_id', 'location_id',\n 'journal_id', 'since', 'until', 'follow', 'tail', 'tag'], 'required': [\n 'project_id', 'location_id', 'journal_id'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'since': (datetime,), 'until': (datetime,),\n 'follow': (bool,), 'tail': (float,), 'tag': (TagArray,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'since': 'since', 'until':\n 'until', 'follow': 'follow', 'tail': 'tail', 'tag': 'tag'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'since': 'query', 'until': 'query', 'follow':\n 'query', 'tail': 'query', 'tag': 'query'}, 'collection_format_map': {}},\n headers_map={'accept': ['application/json'], 'content_type': []},\n api_client=api_client, callable=__insight_project_journal_log_get)\n", (66310, 67590), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((72444, 73543), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (ResourceService,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/service/{serviceId}'\n , 'operation_id': 'insight_project_journal_service_get', 'http_method':\n 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'service_id'],\n 'required': ['project_id', 'location_id', 'journal_id', 'service_id'],\n 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'service_id': (str,\n )}, 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'service_id': 'serviceId'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'service_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_service_get'}), "(settings={'response_type': (ResourceService,), 'auth': [\n 'BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/service/{serviceId}'\n , 'operation_id': 'insight_project_journal_service_get', 'http_method':\n 'GET', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'service_id'], 'required': ['project_id',\n 'location_id', 'journal_id', 'service_id'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'service_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId', 'service_id': 'serviceId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path', 'service_id':\n 'path'}, 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_service_get)\n", (72453, 73543), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((77873, 78861), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': ([ResourceService],), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/service',\n 'operation_id': 'insight_project_journal_service_list', 'http_method':\n 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id'], 'required': [\n 'project_id', 'location_id', 'journal_id'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_service_list'}), "(settings={'response_type': ([ResourceService],), 'auth': [\n 'BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/service',\n 'operation_id': 'insight_project_journal_service_list', 'http_method':\n 'GET', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id'], 'required': ['project_id', 'location_id',\n 'journal_id'], 'nullable': [], 'enum': [], 'validation': []}, root_map=\n {'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'project_id': (str,), 'location_id': (str,), 'journal_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_service_list)\n", (77882, 78861), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((83146, 84177), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Tag,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',\n 'operation_id': 'insight_project_journal_tag_create', 'http_method':\n 'POST', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'tag'], 'required': [\n 'project_id', 'location_id', 'journal_id', 'tag'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'tag': (Tag,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path', 'tag': 'body'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': ['application/json']}", 'api_client': 'api_client', 'callable': '__insight_project_journal_tag_create'}), "(settings={'response_type': (Tag,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',\n 'operation_id': 'insight_project_journal_tag_create', 'http_method':\n 'POST', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'tag'], 'required': ['project_id',\n 'location_id', 'journal_id', 'tag'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'tag': (Tag,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'journal_id': 'journalId'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'tag': 'body'}, 'collection_format_map': {}},\n headers_map={'accept': ['application/json'], 'content_type': [\n 'application/json']}, api_client=api_client, callable=\n __insight_project_journal_tag_create)\n", (83155, 84177), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((88623, 89675), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': None, 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag/{tagId}'\n , 'operation_id': 'insight_project_journal_tag_delete', 'http_method':\n 'DELETE', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'tag_id'], 'required':\n ['project_id', 'location_id', 'journal_id', 'tag_id'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'tag_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'tag_id': 'tagId'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'tag_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_tag_delete'}), "(settings={'response_type': None, 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag/{tagId}'\n , 'operation_id': 'insight_project_journal_tag_delete', 'http_method':\n 'DELETE', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'tag_id'], 'required': ['project_id',\n 'location_id', 'journal_id', 'tag_id'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'tag_id': (str,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'journal_id': 'journalId',\n 'tag_id': 'tagId'}, 'location_map': {'project_id': 'path',\n 'location_id': 'path', 'journal_id': 'path', 'tag_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_tag_delete)\n", (88632, 89675), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((94088, 95133), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Tag,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag/{tagId}'\n , 'operation_id': 'insight_project_journal_tag_get', 'http_method':\n 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'tag_id'], 'required':\n ['project_id', 'location_id', 'journal_id', 'tag_id'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'tag_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId', 'tag_id': 'tagId'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'tag_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_tag_get'}), "(settings={'response_type': (Tag,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag/{tagId}'\n , 'operation_id': 'insight_project_journal_tag_get', 'http_method':\n 'GET', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'tag_id'], 'required': ['project_id',\n 'location_id', 'journal_id', 'tag_id'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'tag_id': (str,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'journal_id': 'journalId',\n 'tag_id': 'tagId'}, 'location_map': {'project_id': 'path',\n 'location_id': 'path', 'journal_id': 'path', 'tag_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_tag_get)\n", (94097, 95133), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((99433, 100396), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': ([Tag],), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',\n 'operation_id': 'insight_project_journal_tag_list', 'http_method':\n 'GET', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id'], 'required': [\n 'project_id', 'location_id', 'journal_id'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client', 'callable': '__insight_project_journal_tag_list'}), "(settings={'response_type': ([Tag],), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',\n 'operation_id': 'insight_project_journal_tag_list', 'http_method':\n 'GET', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id'], 'required': ['project_id', 'location_id',\n 'journal_id'], 'nullable': [], 'enum': [], 'validation': []}, root_map=\n {'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'project_id': (str,), 'location_id': (str,), 'journal_id': (str,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client,\n callable=__insight_project_journal_tag_list)\n", (99442, 100396), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((104712, 105763), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': ([Tag],), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',\n 'operation_id': 'insight_project_journal_tag_put', 'http_method': 'PUT',\n 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id', 'tag_array'],\n 'required': ['project_id', 'location_id', 'journal_id', 'tag_array'],\n 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,), 'tag_array': (\n TagArray,)}, 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path', 'tag_array':\n 'body'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': ['application/json']}", 'api_client': 'api_client', 'callable': '__insight_project_journal_tag_put'}), "(settings={'response_type': ([Tag],), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/tag',\n 'operation_id': 'insight_project_journal_tag_put', 'http_method': 'PUT',\n 'servers': None}, params_map={'all': ['project_id', 'location_id',\n 'journal_id', 'tag_array'], 'required': ['project_id', 'location_id',\n 'journal_id', 'tag_array'], 'nullable': [], 'enum': [], 'validation': [\n ]}, root_map={'validations': {}, 'allowed_values': {}, 'openapi_types':\n {'project_id': (str,), 'location_id': (str,), 'journal_id': (str,),\n 'tag_array': (TagArray,)}, 'attribute_map': {'project_id': 'projectId',\n 'location_id': 'locationId', 'journal_id': 'journalId'}, 'location_map':\n {'project_id': 'path', 'location_id': 'path', 'journal_id': 'path',\n 'tag_array': 'body'}, 'collection_format_map': {}}, headers_map={\n 'accept': ['application/json'], 'content_type': ['application/json']},\n api_client=api_client, callable=__insight_project_journal_tag_put)\n", (104721, 105763), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((110469, 111883), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Journal,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/actions/transfer'\n , 'operation_id': 'insight_project_journal_transfer', 'http_method':\n 'POST', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id',\n 'insight_project_journal_transfer', 'x_idempotency_key', 'x_dry_run'],\n 'required': ['project_id', 'location_id', 'journal_id',\n 'insight_project_journal_transfer'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,),\n 'insight_project_journal_transfer': (InsightProjectJournalTransfer,),\n 'x_idempotency_key': (str,), 'x_dry_run': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId', 'x_idempotency_key': 'x-idempotency-key', 'x_dry_run':\n 'x-dry-run'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'insight_project_journal_transfer':\n 'body', 'x_idempotency_key': 'header', 'x_dry_run': 'header'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': ['application/json']}", 'api_client': 'api_client', 'callable': '__insight_project_journal_transfer'}), "(settings={'response_type': (Journal,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}/actions/transfer'\n , 'operation_id': 'insight_project_journal_transfer', 'http_method':\n 'POST', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'insight_project_journal_transfer',\n 'x_idempotency_key', 'x_dry_run'], 'required': ['project_id',\n 'location_id', 'journal_id', 'insight_project_journal_transfer'],\n 'nullable': [], 'enum': [], 'validation': []}, root_map={'validations':\n {}, 'allowed_values': {}, 'openapi_types': {'project_id': (str,),\n 'location_id': (str,), 'journal_id': (str,),\n 'insight_project_journal_transfer': (InsightProjectJournalTransfer,),\n 'x_idempotency_key': (str,), 'x_dry_run': (str,)}, 'attribute_map': {\n 'project_id': 'projectId', 'location_id': 'locationId', 'journal_id':\n 'journalId', 'x_idempotency_key': 'x-idempotency-key', 'x_dry_run':\n 'x-dry-run'}, 'location_map': {'project_id': 'path', 'location_id':\n 'path', 'journal_id': 'path', 'insight_project_journal_transfer':\n 'body', 'x_idempotency_key': 'header', 'x_dry_run': 'header'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': ['application/json']}, api_client=\n api_client, callable=__insight_project_journal_transfer)\n", (110478, 111883), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((116638, 117803), 'h1.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (Journal,), 'auth': ['BearerAuth'], 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}',\n 'operation_id': 'insight_project_journal_update', 'http_method':\n 'PATCH', 'servers': None}", 'params_map': "{'all': ['project_id', 'location_id', 'journal_id',\n 'insight_project_journal_update'], 'required': ['project_id',\n 'location_id', 'journal_id', 'insight_project_journal_update'],\n 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'project_id': (\n str,), 'location_id': (str,), 'journal_id': (str,),\n 'insight_project_journal_update': (InsightProjectJournalUpdate,)},\n 'attribute_map': {'project_id': 'projectId', 'location_id':\n 'locationId', 'journal_id': 'journalId'}, 'location_map': {'project_id':\n 'path', 'location_id': 'path', 'journal_id': 'path',\n 'insight_project_journal_update': 'body'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': ['application/json']}", 'api_client': 'api_client', 'callable': '__insight_project_journal_update'}), "(settings={'response_type': (Journal,), 'auth': ['BearerAuth'],\n 'endpoint_path':\n '/insight/{locationId}/project/{projectId}/journal/{journalId}',\n 'operation_id': 'insight_project_journal_update', 'http_method':\n 'PATCH', 'servers': None}, params_map={'all': ['project_id',\n 'location_id', 'journal_id', 'insight_project_journal_update'],\n 'required': ['project_id', 'location_id', 'journal_id',\n 'insight_project_journal_update'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'project_id': (str,), 'location_id': (str,),\n 'journal_id': (str,), 'insight_project_journal_update': (\n InsightProjectJournalUpdate,)}, 'attribute_map': {'project_id':\n 'projectId', 'location_id': 'locationId', 'journal_id': 'journalId'},\n 'location_map': {'project_id': 'path', 'location_id': 'path',\n 'journal_id': 'path', 'insight_project_journal_update': 'body'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': ['application/json']}, api_client=\n api_client, callable=__insight_project_journal_update)\n", (116647, 117803), True, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n'), ((1380, 1391), 'h1.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (1389, 1391), False, 'from h1.api_client import ApiClient, Endpoint as _Endpoint\n')]
|
from machine import Pin, ADC
from network import LoRa
import socket
from utime import sleep
# Use a pin for a 'config' mode
configPin = Pin('P21', Pin.IN, Pin.PULL_UP)
# Create an ADC object
adc = ADC()
# vbatt pin:
vbatt = adc.channel(attn=1, pin='P16')
def battConversion():
adcVoltage = vbatt()
voltage = adcVoltage*3*1.334/4095
return voltage
# Initialise LoRa in LoRa mode
# For Europe, use LoRa.EU868
lora = LoRa(mode=LoRa.LORA, region=LoRa.EU868)
# Create a raw LoRa socket
s = socket.socket(socket.AF_LORA, socket.SOCK_RAW)
# Check the Config pin:
configMode = not configPin()
if not configMode:
print('Reading Battery')
pycom.rgbled(0x0000FF)
message = 'Battery Status: {}'.format(battConversion())
print(message)
sleep(2)
print('Sending battery status estimate...')
pycom.rgbled(0xFF0000)
sleep(2)
s.setblocking(True)
# Send some data
s.send(message)
print('Message Sent!')
pycom.rgbled(0x00FF00)
sleep(2)
print('Going to sleep')
machine.deepsleep(300000)
# Otherwise, we are in 'config' so exit to REPL
print('Config Mode')
|
[
"utime.sleep",
"socket.socket",
"machine.Pin",
"network.LoRa",
"machine.ADC"
] |
[((137, 168), 'machine.Pin', 'Pin', (['"""P21"""', 'Pin.IN', 'Pin.PULL_UP'], {}), "('P21', Pin.IN, Pin.PULL_UP)\n", (140, 168), False, 'from machine import Pin, ADC\n'), ((199, 204), 'machine.ADC', 'ADC', ([], {}), '()\n', (202, 204), False, 'from machine import Pin, ADC\n'), ((433, 472), 'network.LoRa', 'LoRa', ([], {'mode': 'LoRa.LORA', 'region': 'LoRa.EU868'}), '(mode=LoRa.LORA, region=LoRa.EU868)\n', (437, 472), False, 'from network import LoRa\n'), ((505, 551), 'socket.socket', 'socket.socket', (['socket.AF_LORA', 'socket.SOCK_RAW'], {}), '(socket.AF_LORA, socket.SOCK_RAW)\n', (518, 551), False, 'import socket\n'), ((765, 773), 'utime.sleep', 'sleep', (['(2)'], {}), '(2)\n', (770, 773), False, 'from utime import sleep\n'), ((854, 862), 'utime.sleep', 'sleep', (['(2)'], {}), '(2)\n', (859, 862), False, 'from utime import sleep\n'), ((987, 995), 'utime.sleep', 'sleep', (['(2)'], {}), '(2)\n', (992, 995), False, 'from utime import sleep\n')]
|
"""
A Lake Winnipeg Basin Information Network (BIN) harvester for the SHARE project
Example API request: http://130.179.67.140/api/3/action/package_search?q= (problematic)
http://130.179.67.140/api/3/action/current_package_list_with_resources (currently using)
It oddly returns 5 more datasets than all searchable ones on LWBIN data hub.
Known issues:
1 -- Five datasets can be searched but cannot be accessed via LWBIN.
Clicking on the searching result would result in linking to a redirected page like this:
http://172.16.58.3/user/login?came_from=http://130.179.67.140/dataset/mpca-surface-water-data-access-interactive-map
Within each dataset there are resouces that contain urls to source pages. For future work considering using resources
urls as canonical urls.
2 -- Resouces properties contained in raw metadata of the datasets are not added to the normalized metadata at this
point.
3 -- Single name contributors can be used as filters or an invalid query will be returned. Has nothing to do with scrapi but the frontend.
"""
from __future__ import unicode_literals
import json
import logging
from dateutil.parser import parse
from scrapi import requests
from scrapi.base import JSONHarvester
from scrapi.linter.document import RawDocument
from scrapi.base.helpers import build_properties, datetime_formatter, parse_name
logger = logging.getLogger(__name__)
ORGANIZATIONS = (
"organization", "fund", "canada", "agriculture", "commitee", "international", "council", "office", "of",
"observation", "institute", "lwbin", "cocorahs", "usgs", "nsidc"
)
def is_organization(name):
"""Return a boolean to indicate if the name passed to the function is an organization
"""
words = name.split(' ')
return any(word.strip(";").lower() in ORGANIZATIONS for word in words)
def clean_authors(authors):
"""Cleam authors list.
"""
authors = authors.strip().replace('<span class="author-names">', '').replace('</span>', '')
authors = authors.split(',')
new_authors = []
for author in authors:
if is_organization(author):
new_authors.append(author)
else:
if ' and ' in author or ' <em>et al.</em>' in author:
split_name = author.replace(' <em>et al.</em>', '').split(' and ')
new_authors.extend(split_name)
else:
new_authors.append(author)
return new_authors
def process_contributors(authors, emails):
"""Process authors and add author emails
If multiple authors and one email, put email in a new author
"""
emails = emails.split(',')
authors = clean_authors(authors)
contributor_list = []
append_emails = len(authors) == 1 and len(emails) == 1 and not emails[0] == u'' # append the email to the author only when 1 record is observed
for i, author in enumerate(authors):
if is_organization(author):
contributor = {
'name': author
}
else:
contributor = parse_name(author)
if append_emails:
contributor['email'] = emails[i]
contributor_list.append(contributor)
if not append_emails and emails[0] != u'':
for email in emails:
contributor = {
'name': '',
'email': email
}
contributor_list.append(contributor)
return contributor_list
def process_licenses(license_title, license_url, license_id):
"""Process licenses to comply with the normalized schema
"""
if not license_url:
return []
else:
license = {
'uri': license_url,
'description': "{} ({})".format(license_title, license_id) or ""
}
return [license]
def construct_url(url, dataset_path, end_point):
"""
:return: a url that directs back to the page on LBWIN Data Hub instead of the source page.
:param url: host url
:param dataset_path: parent path of all datasets
:param end_point: name of datasets
"""
return "/".join([url, dataset_path, end_point])
def process_object_uris(url, extras):
"""Extract doi from /extras, and return a list of object uris including /url and doi if it exists.
"""
doi = []
for d in extras:
if d['key'] == "DOI" or d['key'] == "DOI:":
doi.append(d['value'])
if doi == []:
return [url]
else:
return [url].extend(doi)
class LWBINHarvester(JSONHarvester):
short_name = 'lwbin'
long_name = 'Lake Winnipeg Basin Information Network'
url = 'http://130.179.67.140'
dataset_path = "dataset" # dataset base url for constructing urls that go back to LWBIN instead of source pages.
DEFAULT_ENCODING = 'UTF-8'
record_encoding = None
@property
def schema(self):
return {
'title': ('/title', lambda x: x or ''),
'description': ('/notes'),
'providerUpdatedDateTime': ('/metadata_modified', datetime_formatter),
'uris': {
'canonicalUri': ('/name', lambda x: construct_url(self.url, self.dataset_path, x)), # Construct new urls directing to LWBIN
'objectUris': ('/url', '/extras', process_object_uris) # Default urls from the metadata directing to source pages
},
'contributors': ('/author', '/author_email', process_contributors),
'licenses': ('/license_title', '/license_url', '/license_id', process_licenses),
'tags': ('/tags', lambda x: [tag['name'].lower() for tag in (x or [])]),
'freeToRead': {
'startDate': ('/isopen', '/metadata_created', lambda x, y: parse(y).date().isoformat() if x else None)
},
'otherProperties': build_properties(
('maintainer', '/maintainer'),
('maintainerEmail', '/maintainer_email'),
('revisionTimestamp', ('/revision_timestamp', datetime_formatter)),
('id', '/id'),
('metadataCreated', ('/metadata_created', datetime_formatter)),
('state', '/state'),
('version', '/version'),
('creatorUserId', '/creator_user_id'),
('type', '/type'),
('numberOfResources', '/num_resources'),
('numberOfTags', '/num_tags'),
('name', '/name'),
('groups', '/groups'),
)
}
def harvest(self, start_date=None, end_date=None):
"""Returns a list of Rawdocuments (metadata)
Searching by time is not supported by LWBIN CKAN API. all datasets have to be scanned each time.
"""
base_url = 'http://172.16.58.3/api/3/action/current_package_list_with_resources'
records = requests.get(base_url).json()['result']
total = len(records) # Total number of documents
logger.info('{} documents to be harvested'.format(total))
return [
RawDocument({
'doc': json.dumps(record),
'source': self.short_name,
'docID': record['id'],
'filetype': 'json'
}) for record in records
]
|
[
"logging.getLogger",
"dateutil.parser.parse",
"scrapi.base.helpers.parse_name",
"json.dumps",
"scrapi.base.helpers.build_properties",
"scrapi.requests.get"
] |
[((1347, 1374), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1364, 1374), False, 'import logging\n'), ((3013, 3031), 'scrapi.base.helpers.parse_name', 'parse_name', (['author'], {}), '(author)\n', (3023, 3031), False, 'from scrapi.base.helpers import build_properties, datetime_formatter, parse_name\n'), ((5765, 6245), 'scrapi.base.helpers.build_properties', 'build_properties', (["('maintainer', '/maintainer')", "('maintainerEmail', '/maintainer_email')", "('revisionTimestamp', ('/revision_timestamp', datetime_formatter))", "('id', '/id')", "('metadataCreated', ('/metadata_created', datetime_formatter))", "('state', '/state')", "('version', '/version')", "('creatorUserId', '/creator_user_id')", "('type', '/type')", "('numberOfResources', '/num_resources')", "('numberOfTags', '/num_tags')", "('name', '/name')", "('groups', '/groups')"], {}), "(('maintainer', '/maintainer'), ('maintainerEmail',\n '/maintainer_email'), ('revisionTimestamp', ('/revision_timestamp',\n datetime_formatter)), ('id', '/id'), ('metadataCreated', (\n '/metadata_created', datetime_formatter)), ('state', '/state'), (\n 'version', '/version'), ('creatorUserId', '/creator_user_id'), ('type',\n '/type'), ('numberOfResources', '/num_resources'), ('numberOfTags',\n '/num_tags'), ('name', '/name'), ('groups', '/groups'))\n", (5781, 6245), False, 'from scrapi.base.helpers import build_properties, datetime_formatter, parse_name\n'), ((6788, 6810), 'scrapi.requests.get', 'requests.get', (['base_url'], {}), '(base_url)\n', (6800, 6810), False, 'from scrapi import requests\n'), ((7019, 7037), 'json.dumps', 'json.dumps', (['record'], {}), '(record)\n', (7029, 7037), False, 'import json\n'), ((5675, 5683), 'dateutil.parser.parse', 'parse', (['y'], {}), '(y)\n', (5680, 5683), False, 'from dateutil.parser import parse\n')]
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
'''Analysis file.'''
import sys
import os.path
import tensorflow as tf
from absl import app
from absl import flags
from absl import gfile
import cPickle as pickle
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pylab
import matplotlib.pyplot as plt
import numpy as np, h5py
import scipy.io as sio
from scipy import ndimage
import random
import re # regular expression matching
FLAGS = flags.FLAGS
flags.DEFINE_string('folder_name', 'experiment4', 'folder where to store all the data')
flags.DEFINE_string('save_location',
'/home/bhaishahster/',
'where to store logs and outputs?');
flags.DEFINE_string('data_location',
'/home/bhaishahster/data_breakdown/',
'where to take data from?')
flags.DEFINE_integer('n_b_in_c', 10, 'number of batches in one chunk of data')
flags.DEFINE_integer('np_randseed', 23, 'numpy RNG seed')
flags.DEFINE_integer('randseed', 65, 'python RNG seed')
flags.DEFINE_integer('ratio_SU', 2, 'ratio of subunits/cells')
flags.DEFINE_string('model_id', 'poisson', 'which model to fit')
FLAGS = flags.FLAGS
def main(argv):
print('\nCode started')
np.random.seed(FLAGS.np_randseed)
random.seed(FLAGS.randseed)
## Load data summary
filename = FLAGS.data_location + 'data_details.mat'
summary_file = gfile.Open(filename, 'r')
data_summary = sio.loadmat(summary_file)
cells = np.squeeze(data_summary['cells'])
if FLAGS.model_id == 'poisson' or FLAGS.model_id == 'logistic' or FLAGS.model_id == 'hinge':
cells_choose = (cells ==3287) | (cells ==3318 ) | (cells ==3155) | (cells ==3066)
if FLAGS.model_id == 'poisson_full':
cells_choose = np.array(np.ones(np.shape(cells)), dtype='bool')
n_cells = np.sum(cells_choose)
tot_spks = np.squeeze(data_summary['tot_spks'])
total_mask = np.squeeze(data_summary['totalMaskAccept_log']).T
tot_spks_chosen_cells = tot_spks[cells_choose]
chosen_mask = np.array(np.sum(total_mask[cells_choose,:],0)>0, dtype='bool')
print(np.shape(chosen_mask))
print(np.sum(chosen_mask))
stim_dim = np.sum(chosen_mask)
print('\ndataset summary loaded')
# use stim_dim, chosen_mask, cells_choose, tot_spks_chosen_cells, n_cells
# decide the number of subunits to fit
n_su = FLAGS.ratio_SU*n_cells
#batchsz = [100, 500, 1000, 100, 500, 1000, 100, 500, 1000, 1000, 1000, 5000, 10000, 5000, 10000]
#n_b_in_c = [10, 2, 1, 10, 2, 1, 10, 2, 1, 1, 1, 1, 1, 1, 1 ]
#step_sz = [0.0001, 0.0001, 0.0001, 0.01, 0.01, 0.01 , 1, 1, 1, 10, 100, 10, 10, 1, 1 ]
batchsz = [100, 500, 1000, 5000, 1000, 100, 500, 1000, 5000, 10000, 100, 500, 1000, 5000, 10000, 100, 500, 1000, 5000, 10000]
n_b_in_c = [10, 2, 1, 1, 1, 10, 2, 1, 1, 1, 10, 2, 1, 1, 1, 10, 2, 1, 1, 1 ]
step_sz = [0.1, 0.1, 0.1, 0.1, 0.1, 1 , 1, 1, 1, 1, 5, 5, 5, 5, 5, 10, 10, 10, 10, 10 ]
with tf.Session() as sess:
# Learn population model!
stim = tf.placeholder(tf.float32, shape=[None, stim_dim], name='stim')
resp = tf.placeholder(tf.float32, name='resp')
data_len = tf.placeholder(tf.float32, name='data_len')
# get filename
if FLAGS.model_id == 'poisson' or FLAGS.model_id == 'poisson_full':
w = tf.Variable(np.array(0.01 * np.random.randn(stim_dim, n_su), dtype='float32'))
a = tf.Variable(np.array(0.1 * np.random.rand(n_cells, 1, n_su), dtype='float32'))
if FLAGS.model_id == 'logistic' or FLAGS.model_id == 'hinge':
w = tf.Variable(np.array(0.01 * np.random.randn(stim_dim, n_su), dtype='float32'))
a = tf.Variable(np.array(0.01 * np.random.rand(n_su, n_cells), dtype='float32'))
b_init = np.random.randn(n_cells) #np.log((np.sum(response,0))/(response.shape[0]-np.sum(response,0)))
b = tf.Variable(b_init,dtype='float32')
plt.figure()
for icnt, ibatchsz in enumerate(batchsz):
in_b_in_c = n_b_in_c[icnt]
istep_sz = np.array(step_sz[icnt],dtype='double')
print(icnt)
if FLAGS.model_id == 'poisson':
short_filename = ('data_model=ASM_pop_batch_sz='+ str(ibatchsz) + '_n_b_in_c' + str(in_b_in_c) +
'_step_sz'+ str(istep_sz)+'_bg')
else:
short_filename = ('data_model='+ str(FLAGS.model_id) +'_batch_sz='+ str(ibatchsz) + '_n_b_in_c' + str(in_b_in_c) +
'_step_sz'+ str(istep_sz)+'_bg')
parent_folder = FLAGS.save_location + FLAGS.folder_name + '/'
save_location = parent_folder +short_filename + '/'
print(gfile.IsDirectory(save_location))
print(save_location)
save_filename = save_location + short_filename
#determine filelist
file_list = gfile.ListDirectory(save_location)
save_filename = save_location + short_filename
print('\nLoading: ', save_filename)
bin_files = []
meta_files = []
for file_n in file_list:
if re.search(short_filename + '.', file_n):
if re.search('.meta', file_n):
meta_files += [file_n]
else:
bin_files += [file_n]
#print(bin_files)
print(len(meta_files), len(bin_files), len(file_list))
# get latest iteration
iterations = np.array([])
for file_name in bin_files:
try:
iterations = np.append(iterations, int(file_name.split('/')[-1].split('-')[-1]))
except:
print('Could not load filename: ' + file_name)
iterations.sort()
print(iterations)
iter_plot = iterations[-1]
print(int(iter_plot))
# load tensorflow variables
saver_var = tf.train.Saver(tf.all_variables())
restore_file = save_filename + '-' + str(int(iter_plot))
saver_var.restore(sess, restore_file)
a_eval = a.eval()
print(np.exp(np.squeeze(a_eval)))
#print(np.shape(a_eval))
# get 2D region to plot
mask2D = np.reshape(chosen_mask, [40, 80])
nz_idx = np.nonzero(mask2D)
np.shape(nz_idx)
print(nz_idx)
ylim = np.array([np.min(nz_idx[0])-1, np.max(nz_idx[0])+1])
xlim = np.array([np.min(nz_idx[1])-1, np.max(nz_idx[1])+1])
w_eval = w.eval()
#plt.figure()
n_su = w_eval.shape[1]
for isu in np.arange(n_su):
xx = np.zeros((3200))
xx[chosen_mask] = w_eval[:, isu]
fig = plt.subplot(20, n_su, n_su * icnt + isu+1)
plt.imshow(np.reshape(xx, [40, 80]), interpolation='nearest', cmap='gray')
plt.ylim(ylim)
plt.xlim(xlim)
fig.axes.get_xaxis().set_visible(False)
fig.axes.get_yaxis().set_visible(False)
#if FLAGS.model_id == 'logistic' or FLAGS.model_id == 'hinge':
# plt.title(str(a_eval[isu, :]))
#else:
# plt.title(str(np.squeeze(np.exp(a_eval[:, 0, isu]))), fontsize=12)
if isu == 4:
plt.title('Iteration:' + str(int(iter_plot)) + ' batchSz:' + str(ibatchsz) + ' step size:' + str(istep_sz), fontsize=18)
plt.show()
plt.draw()
if __name__ == '__main__':
app.run()
|
[
"absl.gfile.Open",
"numpy.random.rand",
"scipy.io.loadmat",
"numpy.array",
"numpy.arange",
"re.search",
"numpy.reshape",
"tensorflow.Session",
"tensorflow.placeholder",
"absl.gfile.IsDirectory",
"absl.app.run",
"numpy.max",
"numpy.random.seed",
"numpy.min",
"matplotlib.pyplot.ylim",
"tensorflow.all_variables",
"tensorflow.Variable",
"matplotlib.use",
"numpy.squeeze",
"numpy.nonzero",
"numpy.shape",
"matplotlib.pyplot.draw",
"numpy.random.randn",
"absl.flags.DEFINE_string",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.show",
"absl.flags.DEFINE_integer",
"random.seed",
"numpy.sum",
"matplotlib.pyplot.figure",
"numpy.zeros",
"absl.gfile.ListDirectory",
"matplotlib.pyplot.subplot"
] |
[((836, 859), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (850, 859), False, 'import matplotlib\n'), ((1072, 1163), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""folder_name"""', '"""experiment4"""', '"""folder where to store all the data"""'], {}), "('folder_name', 'experiment4',\n 'folder where to store all the data')\n", (1091, 1163), False, 'from absl import flags\n'), ((1161, 1260), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""save_location"""', '"""/home/bhaishahster/"""', '"""where to store logs and outputs?"""'], {}), "('save_location', '/home/bhaishahster/',\n 'where to store logs and outputs?')\n", (1180, 1260), False, 'from absl import flags\n'), ((1299, 1405), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""data_location"""', '"""/home/bhaishahster/data_breakdown/"""', '"""where to take data from?"""'], {}), "('data_location', '/home/bhaishahster/data_breakdown/',\n 'where to take data from?')\n", (1318, 1405), False, 'from absl import flags\n'), ((1442, 1520), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""n_b_in_c"""', '(10)', '"""number of batches in one chunk of data"""'], {}), "('n_b_in_c', 10, 'number of batches in one chunk of data')\n", (1462, 1520), False, 'from absl import flags\n'), ((1521, 1578), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""np_randseed"""', '(23)', '"""numpy RNG seed"""'], {}), "('np_randseed', 23, 'numpy RNG seed')\n", (1541, 1578), False, 'from absl import flags\n'), ((1579, 1634), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""randseed"""', '(65)', '"""python RNG seed"""'], {}), "('randseed', 65, 'python RNG seed')\n", (1599, 1634), False, 'from absl import flags\n'), ((1635, 1697), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""ratio_SU"""', '(2)', '"""ratio of subunits/cells"""'], {}), "('ratio_SU', 2, 'ratio of subunits/cells')\n", (1655, 1697), False, 'from absl import flags\n'), ((1698, 1762), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""model_id"""', '"""poisson"""', '"""which model to fit"""'], {}), "('model_id', 'poisson', 'which model to fit')\n", (1717, 1762), False, 'from absl import flags\n'), ((1829, 1862), 'numpy.random.seed', 'np.random.seed', (['FLAGS.np_randseed'], {}), '(FLAGS.np_randseed)\n', (1843, 1862), True, 'import numpy as np, h5py\n'), ((1865, 1892), 'random.seed', 'random.seed', (['FLAGS.randseed'], {}), '(FLAGS.randseed)\n', (1876, 1892), False, 'import random\n'), ((1989, 2014), 'absl.gfile.Open', 'gfile.Open', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (1999, 2014), False, 'from absl import gfile\n'), ((2032, 2057), 'scipy.io.loadmat', 'sio.loadmat', (['summary_file'], {}), '(summary_file)\n', (2043, 2057), True, 'import scipy.io as sio\n'), ((2068, 2101), 'numpy.squeeze', 'np.squeeze', (["data_summary['cells']"], {}), "(data_summary['cells'])\n", (2078, 2101), True, 'import numpy as np, h5py\n'), ((2402, 2422), 'numpy.sum', 'np.sum', (['cells_choose'], {}), '(cells_choose)\n', (2408, 2422), True, 'import numpy as np, h5py\n'), ((2437, 2473), 'numpy.squeeze', 'np.squeeze', (["data_summary['tot_spks']"], {}), "(data_summary['tot_spks'])\n", (2447, 2473), True, 'import numpy as np, h5py\n'), ((2741, 2760), 'numpy.sum', 'np.sum', (['chosen_mask'], {}), '(chosen_mask)\n', (2747, 2760), True, 'import numpy as np, h5py\n'), ((7693, 7703), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7701, 7703), True, 'import matplotlib.pyplot as plt\n'), ((7706, 7716), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (7714, 7716), True, 'import matplotlib.pyplot as plt\n'), ((7756, 7765), 'absl.app.run', 'app.run', ([], {}), '()\n', (7763, 7765), False, 'from absl import app\n'), ((2489, 2536), 'numpy.squeeze', 'np.squeeze', (["data_summary['totalMaskAccept_log']"], {}), "(data_summary['totalMaskAccept_log'])\n", (2499, 2536), True, 'import numpy as np, h5py\n'), ((2675, 2696), 'numpy.shape', 'np.shape', (['chosen_mask'], {}), '(chosen_mask)\n', (2683, 2696), True, 'import numpy as np, h5py\n'), ((2706, 2725), 'numpy.sum', 'np.sum', (['chosen_mask'], {}), '(chosen_mask)\n', (2712, 2725), True, 'import numpy as np, h5py\n'), ((3709, 3721), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (3719, 3721), True, 'import tensorflow as tf\n'), ((3772, 3835), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, stim_dim]', 'name': '"""stim"""'}), "(tf.float32, shape=[None, stim_dim], name='stim')\n", (3786, 3835), True, 'import tensorflow as tf\n'), ((3847, 3886), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'name': '"""resp"""'}), "(tf.float32, name='resp')\n", (3861, 3886), True, 'import tensorflow as tf\n'), ((3902, 3945), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'name': '"""data_len"""'}), "(tf.float32, name='data_len')\n", (3916, 3945), True, 'import tensorflow as tf\n'), ((4619, 4631), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4629, 4631), True, 'import matplotlib.pyplot as plt\n'), ((2613, 2651), 'numpy.sum', 'np.sum', (['total_mask[cells_choose, :]', '(0)'], {}), '(total_mask[cells_choose, :], 0)\n', (2619, 2651), True, 'import numpy as np, h5py\n'), ((4474, 4498), 'numpy.random.randn', 'np.random.randn', (['n_cells'], {}), '(n_cells)\n', (4489, 4498), True, 'import numpy as np, h5py\n'), ((4578, 4614), 'tensorflow.Variable', 'tf.Variable', (['b_init'], {'dtype': '"""float32"""'}), "(b_init, dtype='float32')\n", (4589, 4614), True, 'import tensorflow as tf\n'), ((4728, 4767), 'numpy.array', 'np.array', (['step_sz[icnt]'], {'dtype': '"""double"""'}), "(step_sz[icnt], dtype='double')\n", (4736, 4767), True, 'import numpy as np, h5py\n'), ((5443, 5477), 'absl.gfile.ListDirectory', 'gfile.ListDirectory', (['save_location'], {}), '(save_location)\n', (5462, 5477), False, 'from absl import gfile\n'), ((5955, 5967), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5963, 5967), True, 'import numpy as np, h5py\n'), ((6628, 6661), 'numpy.reshape', 'np.reshape', (['chosen_mask', '[40, 80]'], {}), '(chosen_mask, [40, 80])\n', (6638, 6661), True, 'import numpy as np, h5py\n'), ((6677, 6695), 'numpy.nonzero', 'np.nonzero', (['mask2D'], {}), '(mask2D)\n', (6687, 6695), True, 'import numpy as np, h5py\n'), ((6702, 6718), 'numpy.shape', 'np.shape', (['nz_idx'], {}), '(nz_idx)\n', (6710, 6718), True, 'import numpy as np, h5py\n'), ((6962, 6977), 'numpy.arange', 'np.arange', (['n_su'], {}), '(n_su)\n', (6971, 6977), True, 'import numpy as np, h5py\n'), ((2358, 2373), 'numpy.shape', 'np.shape', (['cells'], {}), '(cells)\n', (2366, 2373), True, 'import numpy as np, h5py\n'), ((5284, 5316), 'absl.gfile.IsDirectory', 'gfile.IsDirectory', (['save_location'], {}), '(save_location)\n', (5301, 5316), False, 'from absl import gfile\n'), ((5658, 5697), 're.search', 're.search', (["(short_filename + '.')", 'file_n'], {}), "(short_filename + '.', file_n)\n", (5667, 5697), False, 'import re\n'), ((6357, 6375), 'tensorflow.all_variables', 'tf.all_variables', ([], {}), '()\n', (6373, 6375), True, 'import tensorflow as tf\n'), ((6992, 7006), 'numpy.zeros', 'np.zeros', (['(3200)'], {}), '(3200)\n', (7000, 7006), True, 'import numpy as np, h5py\n'), ((7064, 7108), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(20)', 'n_su', '(n_su * icnt + isu + 1)'], {}), '(20, n_su, n_su * icnt + isu + 1)\n', (7075, 7108), True, 'import matplotlib.pyplot as plt\n'), ((7199, 7213), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ylim'], {}), '(ylim)\n', (7207, 7213), True, 'import matplotlib.pyplot as plt\n'), ((7222, 7236), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xlim'], {}), '(xlim)\n', (7230, 7236), True, 'import matplotlib.pyplot as plt\n'), ((5712, 5738), 're.search', 're.search', (['""".meta"""', 'file_n'], {}), "('.meta', file_n)\n", (5721, 5738), False, 'import re\n'), ((6530, 6548), 'numpy.squeeze', 'np.squeeze', (['a_eval'], {}), '(a_eval)\n', (6540, 6548), True, 'import numpy as np, h5py\n'), ((7127, 7151), 'numpy.reshape', 'np.reshape', (['xx', '[40, 80]'], {}), '(xx, [40, 80])\n', (7137, 7151), True, 'import numpy as np, h5py\n'), ((4076, 4107), 'numpy.random.randn', 'np.random.randn', (['stim_dim', 'n_su'], {}), '(stim_dim, n_su)\n', (4091, 4107), True, 'import numpy as np, h5py\n'), ((4164, 4196), 'numpy.random.rand', 'np.random.rand', (['n_cells', '(1)', 'n_su'], {}), '(n_cells, 1, n_su)\n', (4178, 4196), True, 'import numpy as np, h5py\n'), ((4320, 4351), 'numpy.random.randn', 'np.random.randn', (['stim_dim', 'n_su'], {}), '(stim_dim, n_su)\n', (4335, 4351), True, 'import numpy as np, h5py\n'), ((4409, 4438), 'numpy.random.rand', 'np.random.rand', (['n_su', 'n_cells'], {}), '(n_su, n_cells)\n', (4423, 4438), True, 'import numpy as np, h5py\n'), ((6762, 6779), 'numpy.min', 'np.min', (['nz_idx[0]'], {}), '(nz_idx[0])\n', (6768, 6779), True, 'import numpy as np, h5py\n'), ((6783, 6800), 'numpy.max', 'np.max', (['nz_idx[0]'], {}), '(nz_idx[0])\n', (6789, 6800), True, 'import numpy as np, h5py\n'), ((6828, 6845), 'numpy.min', 'np.min', (['nz_idx[1]'], {}), '(nz_idx[1])\n', (6834, 6845), True, 'import numpy as np, h5py\n'), ((6849, 6866), 'numpy.max', 'np.max', (['nz_idx[1]'], {}), '(nz_idx[1])\n', (6855, 6866), True, 'import numpy as np, h5py\n')]
|
import logging
import os.path as path
from typing import List, Optional, Tuple
from psychopy import core, visual
from bcipy.acquisition.marker_writer import NullMarkerWriter, MarkerWriter
from bcipy.helpers.task import SPACE_CHAR
from bcipy.helpers.stimuli import resize_image
from bcipy.helpers.system_utils import get_screen_resolution
from bcipy.helpers.triggers import TriggerCallback, _calibration_trigger
class RSVPDisplay(object):
"""RSVP Display Object for inquiry Presentation.
Animates a inquiry in RSVP. Mode should be determined outside.
"""
def __init__(
self,
window: visual.Window,
static_clock,
experiment_clock: core.Clock,
marker_writer: Optional[MarkerWriter] = None,
task_color: List[str] = ['white'],
task_font: str = 'Times',
task_pos: Tuple[float, float] = (-.8, .9),
task_height: float = 0.2,
task_text: str = '1/100',
info_color: List[str] = ['white'],
info_text: List[str] = ['Information Text'],
info_font: List[str] = ['Times'],
info_pos=[(.8, .9)],
info_height=[0.2],
stim_font='Times',
stim_pos=(-.8, .9),
stim_height=0.2,
stim_inquiry: List[str] = ['a'] * 10,
stim_colors: List[str] = ['white'] * 10,
stim_timing: List[float] = [1] * 10,
is_txt_stim: bool = True,
static_time: float = .05,
trigger_type: str = 'image',
space_char: SPACE_CHAR = SPACE_CHAR):
"""Initialize RSVP window parameters and objects.
PARAMETERS:
----------
# Experiment
window(visual.Window): PsychoPy Window
static_clock(TODO): no idea
experiment_clock(core.Clock): Clock used to timestamp experiment
marker_writer(MarkerWriter): object used to write triggers to
the daq stream.
# Task
task_color(list[string]): Color of the task string. Shares the
length of the task_text. If of length 1 the entire task
bar shares the same color.
task_font(string): Font of task string
task_pos(tuple): position of task string
task_height(float): height for task string
task_text(string): text of the task bar
# Info
info_text(list[string]): Text list for information texts
info_color(list[string]): Color of the information text string
info_font(list[string]): Font of the information text string
info_pos(list[tuple]): Position of the information text string
info_height(list[float]): Height of the information text string
# Stimuli
stim_height(float): height of the stimuli object
stim_pos(tuple): position of stimuli
stim_font(string): font of the stimuli
stim_inquiry(list[string]): list of elements to flash
stim_colors(list[string]): list of colors for stimuli
stim_timing(list[float]): timing for each letter flash
"""
self.window = window
self.refresh_rate = window.getActualFrameRate()
self.logger = logging.getLogger(__name__)
self.stimuli_inquiry = stim_inquiry
self.stimuli_colors = stim_colors
self.stimuli_timing = stim_timing
self.is_txt_stim = is_txt_stim
self.staticPeriod = static_clock
self.static_time = static_time
self.experiment_clock = experiment_clock
self.timing_clock = core.Clock()
# Used to handle writing the marker stimulus
self.marker_writer = marker_writer or NullMarkerWriter()
# Length of the stimuli (number of flashes)
self.stim_length = len(stim_inquiry)
# Informational Parameters
self.info_text = info_text
# Stim parameters
self.stimuli_font = stim_font
self.stimuli_height = stim_height
self.stimuli_pos = stim_pos
# Trigger Items
self.first_run = True
self.trigger_type = trigger_type
self.trigger_callback = TriggerCallback()
# Callback used on presentation of first stimulus.
self.first_stim_callback = lambda _sti: None
self.size_list_sti = []
self.space_char = space_char
self.task = visual.TextStim(win=self.window, color=task_color[0],
height=task_height,
text=task_text,
font=task_font, pos=task_pos,
wrapWidth=None, colorSpace='rgb',
opacity=1, depth=-6.0)
# Create multiple text objects based on input
self.text = []
for idx in range(len(self.info_text)):
self.text.append(visual.TextStim(
win=self.window,
color=info_color[idx],
height=info_height[idx],
text=self.info_text[idx],
font=info_font[idx],
pos=info_pos[idx],
wrapWidth=None, colorSpace='rgb',
opacity=1, depth=-6.0))
# Create Stimuli Object
if self.is_txt_stim:
self.sti = visual.TextStim(
win=self.window,
color='white',
height=self.stimuli_height,
text='+',
font=self.stimuli_font,
pos=self.stimuli_pos,
wrapWidth=None, colorSpace='rgb',
opacity=1, depth=-6.0)
else:
self.sti = visual.ImageStim(
win=self.window,
image=None,
mask=None,
pos=self.stimuli_pos,
ori=0.0)
def draw_static(self):
"""Draw static elements in a stimulus."""
self.task.draw()
for idx in range(len(self.text)):
self.text[idx].draw()
def schedule_to(self, ele_list=[], time_list=[], color_list=[]):
"""Schedule stimuli elements (works as a buffer).
Args:
ele_list(list[string]): list of elements of stimuli
time_list(list[float]): list of timings of stimuli
color_list(list[string]): colors of elements of stimuli
"""
self.stimuli_inquiry = ele_list
self.stimuli_timing = time_list
self.stimuli_colors = color_list
def update_task(self, text: str, color_list: List[str], pos: Tuple[float]):
"""Update Task Object.
PARAMETERS:
-----------
text: text for task
color_list: list of the colors for each char
pos: position of task
"""
self.task.text = text
self.task.color = color_list[0]
self.task.pos = pos
def do_inquiry(self):
"""Do inquiry.
Animates a inquiry of flashing letters to achieve RSVP.
"""
# init an array for timing information
timing = []
if self.first_run:
# play a inquiry start sound to help orient triggers
first_stim_timing = _calibration_trigger(
self.experiment_clock,
trigger_type=self.trigger_type, display=self.window,
on_trigger=self.marker_writer.push_marker)
timing.append(first_stim_timing)
self.first_stim_time = first_stim_timing[-1]
self.first_run = False
# generate a inquiry (list of stimuli with meta information)
inquiry = self._generate_inquiry()
# do the inquiry
for idx in range(len(inquiry)):
self.is_first_stim = (idx == 0)
# set a static period to do all our stim setting.
# will warn if ISI value is violated.
self.staticPeriod.name = 'Stimulus Draw Period'
self.staticPeriod.start(self.stimuli_timing[idx])
# Reset the timing clock to start presenting
self.window.callOnFlip(
self.trigger_callback.callback,
self.experiment_clock,
inquiry[idx]['sti_label'])
self.window.callOnFlip(self.marker_writer.push_marker, inquiry[idx]['sti_label'])
if idx == 0 and callable(self.first_stim_callback):
self.first_stim_callback(inquiry[idx]['sti'])
# Draw stimulus for n frames
inquiry[idx]['sti'].draw()
self.draw_static()
self.window.flip()
core.wait((inquiry[idx]['time_to_present'] - 1) / self.refresh_rate)
# End static period
self.staticPeriod.complete()
# append timing information
if self.is_txt_stim:
timing.append(self.trigger_callback.timing)
else:
timing.append(self.trigger_callback.timing)
self.trigger_callback.reset()
# draw in static and flip once more
self.draw_static()
self.window.flip()
return timing
def _generate_inquiry(self):
"""Generate inquiry.
Generate stimuli for next RSVP inquiry.
"""
stim_info = []
for idx in range(len(self.stimuli_inquiry)):
current_stim = {}
# turn ms timing into frames! Much more accurate!
current_stim['time_to_present'] = int(self.stimuli_timing[idx] * self.refresh_rate)
# check if stimulus needs to use a non-default size
if self.size_list_sti:
this_stimuli_size = self.size_list_sti[idx]
else:
this_stimuli_size = self.stimuli_height
# Set the Stimuli attrs
if self.stimuli_inquiry[idx].endswith('.png'):
current_stim['sti'] = self.create_stimulus(mode='image', height_int=this_stimuli_size)
current_stim['sti'].image = self.stimuli_inquiry[idx]
current_stim['sti'].size = resize_image(
current_stim['sti'].image, current_stim['sti'].win.size, this_stimuli_size)
current_stim['sti_label'] = path.splitext(
path.basename(self.stimuli_inquiry[idx]))[0]
else:
# text stimulus
current_stim['sti'] = self.create_stimulus(mode='text', height_int=this_stimuli_size)
txt = self.stimuli_inquiry[idx]
# customize presentation of space char.
current_stim['sti'].text = txt if txt != SPACE_CHAR else self.space_char
current_stim['sti'].color = self.stimuli_colors[idx]
current_stim['sti_label'] = txt
# test whether the word will be too big for the screen
text_width = current_stim['sti'].boundingBox[0]
if text_width > self.window.size[0]:
monitor_width, monitor_height = get_screen_resolution()
text_height = current_stim['sti'].boundingBox[1]
# If we are in full-screen, text size in Psychopy norm units
# is monitor width/monitor height
if self.window.size[0] == monitor_width:
new_text_width = monitor_width / monitor_height
else:
# If not, text width is calculated relative to both
# monitor size and window size
new_text_width = (
self.window.size[1] / monitor_height) * (
monitor_width / monitor_height)
new_text_height = (text_height * new_text_width) / text_width
current_stim['sti'].height = new_text_height
stim_info.append(current_stim)
return stim_info
def update_task_state(self, text: str, color_list: List[str]) -> None:
"""Update task state.
Removes letters or appends to the right.
Args:
text(string): new text for task state
color_list(list[string]): list of colors for each
"""
task_state_text = visual.TextStim(
win=self.window, font=self.task.font, text=text)
x_task_position = task_state_text.boundingBox[0] / \
self.window.size[0] - 1
task_pos = (x_task_position, 1 - self.task.height)
self.update_task(text=text, color_list=color_list, pos=task_pos)
def wait_screen(self, message, color):
"""Wait Screen.
Args:
message(string): message to be displayed while waiting
"""
# Construct the wait message
wait_message = visual.TextStim(win=self.window, font=self.stimuli_font,
text=message,
height=.1,
color=color,
pos=(0, -.5),
wrapWidth=2,
colorSpace='rgb',
opacity=1, depth=-6.0)
# Try adding our BCI logo. Pass if not found.
try:
wait_logo = visual.ImageStim(
self.window,
image='bcipy/static/images/gui_images/bci_cas_logo.png',
pos=(0, .5),
mask=None,
ori=0.0)
wait_logo.size = resize_image(
'bcipy/static/images/gui_images/bci_cas_logo.png',
self.window.size, 1)
wait_logo.draw()
except Exception:
self.logger.debug('Cannot load logo image')
pass
# Draw and flip the screen.
wait_message.draw()
self.window.flip()
def create_stimulus(self, height_int: int, mode: str = 'text'):
"""Create Stimulus.
Returns a TextStim or ImageStim object.
Args:
height_int: The height of the stimulus
mode: "text" or "image", determines which to return
"""
if mode == 'text':
return visual.TextStim(
win=self.window,
color='white',
height=height_int,
text='+',
font=self.stimuli_font,
pos=self.stimuli_pos,
wrapWidth=None,
colorSpace='rgb',
opacity=1,
depth=-6.0)
if mode == 'image':
return visual.ImageStim(
win=self.window,
image=None,
mask=None,
units='',
pos=self.stimuli_pos,
size=(height_int, height_int),
ori=0.0)
|
[
"logging.getLogger",
"bcipy.helpers.triggers.TriggerCallback",
"psychopy.core.Clock",
"psychopy.core.wait",
"psychopy.visual.TextStim",
"os.path.basename",
"bcipy.helpers.system_utils.get_screen_resolution",
"bcipy.helpers.stimuli.resize_image",
"bcipy.acquisition.marker_writer.NullMarkerWriter",
"bcipy.helpers.triggers._calibration_trigger",
"psychopy.visual.ImageStim"
] |
[((3211, 3238), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (3228, 3238), False, 'import logging\n'), ((3565, 3577), 'psychopy.core.Clock', 'core.Clock', ([], {}), '()\n', (3575, 3577), False, 'from psychopy import core, visual\n'), ((4137, 4154), 'bcipy.helpers.triggers.TriggerCallback', 'TriggerCallback', ([], {}), '()\n', (4152, 4154), False, 'from bcipy.helpers.triggers import TriggerCallback, _calibration_trigger\n'), ((4359, 4543), 'psychopy.visual.TextStim', 'visual.TextStim', ([], {'win': 'self.window', 'color': 'task_color[0]', 'height': 'task_height', 'text': 'task_text', 'font': 'task_font', 'pos': 'task_pos', 'wrapWidth': 'None', 'colorSpace': '"""rgb"""', 'opacity': '(1)', 'depth': '(-6.0)'}), "(win=self.window, color=task_color[0], height=task_height,\n text=task_text, font=task_font, pos=task_pos, wrapWidth=None,\n colorSpace='rgb', opacity=1, depth=-6.0)\n", (4374, 4543), False, 'from psychopy import core, visual\n'), ((12188, 12252), 'psychopy.visual.TextStim', 'visual.TextStim', ([], {'win': 'self.window', 'font': 'self.task.font', 'text': 'text'}), '(win=self.window, font=self.task.font, text=text)\n', (12203, 12252), False, 'from psychopy import core, visual\n'), ((12719, 12891), 'psychopy.visual.TextStim', 'visual.TextStim', ([], {'win': 'self.window', 'font': 'self.stimuli_font', 'text': 'message', 'height': '(0.1)', 'color': 'color', 'pos': '(0, -0.5)', 'wrapWidth': '(2)', 'colorSpace': '"""rgb"""', 'opacity': '(1)', 'depth': '(-6.0)'}), "(win=self.window, font=self.stimuli_font, text=message,\n height=0.1, color=color, pos=(0, -0.5), wrapWidth=2, colorSpace='rgb',\n opacity=1, depth=-6.0)\n", (12734, 12891), False, 'from psychopy import core, visual\n'), ((3678, 3696), 'bcipy.acquisition.marker_writer.NullMarkerWriter', 'NullMarkerWriter', ([], {}), '()\n', (3694, 3696), False, 'from bcipy.acquisition.marker_writer import NullMarkerWriter, MarkerWriter\n'), ((5289, 5485), 'psychopy.visual.TextStim', 'visual.TextStim', ([], {'win': 'self.window', 'color': '"""white"""', 'height': 'self.stimuli_height', 'text': '"""+"""', 'font': 'self.stimuli_font', 'pos': 'self.stimuli_pos', 'wrapWidth': 'None', 'colorSpace': '"""rgb"""', 'opacity': '(1)', 'depth': '(-6.0)'}), "(win=self.window, color='white', height=self.stimuli_height,\n text='+', font=self.stimuli_font, pos=self.stimuli_pos, wrapWidth=None,\n colorSpace='rgb', opacity=1, depth=-6.0)\n", (5304, 5485), False, 'from psychopy import core, visual\n'), ((5644, 5736), 'psychopy.visual.ImageStim', 'visual.ImageStim', ([], {'win': 'self.window', 'image': 'None', 'mask': 'None', 'pos': 'self.stimuli_pos', 'ori': '(0.0)'}), '(win=self.window, image=None, mask=None, pos=self.\n stimuli_pos, ori=0.0)\n', (5660, 5736), False, 'from psychopy import core, visual\n'), ((7169, 7312), 'bcipy.helpers.triggers._calibration_trigger', '_calibration_trigger', (['self.experiment_clock'], {'trigger_type': 'self.trigger_type', 'display': 'self.window', 'on_trigger': 'self.marker_writer.push_marker'}), '(self.experiment_clock, trigger_type=self.trigger_type,\n display=self.window, on_trigger=self.marker_writer.push_marker)\n', (7189, 7312), False, 'from bcipy.helpers.triggers import TriggerCallback, _calibration_trigger\n'), ((8558, 8626), 'psychopy.core.wait', 'core.wait', (["((inquiry[idx]['time_to_present'] - 1) / self.refresh_rate)"], {}), "((inquiry[idx]['time_to_present'] - 1) / self.refresh_rate)\n", (8567, 8626), False, 'from psychopy import core, visual\n'), ((13247, 13377), 'psychopy.visual.ImageStim', 'visual.ImageStim', (['self.window'], {'image': '"""bcipy/static/images/gui_images/bci_cas_logo.png"""', 'pos': '(0, 0.5)', 'mask': 'None', 'ori': '(0.0)'}), "(self.window, image=\n 'bcipy/static/images/gui_images/bci_cas_logo.png', pos=(0, 0.5), mask=\n None, ori=0.0)\n", (13263, 13377), False, 'from psychopy import core, visual\n'), ((13477, 13566), 'bcipy.helpers.stimuli.resize_image', 'resize_image', (['"""bcipy/static/images/gui_images/bci_cas_logo.png"""', 'self.window.size', '(1)'], {}), "('bcipy/static/images/gui_images/bci_cas_logo.png', self.window\n .size, 1)\n", (13489, 13566), False, 'from bcipy.helpers.stimuli import resize_image\n'), ((14153, 14340), 'psychopy.visual.TextStim', 'visual.TextStim', ([], {'win': 'self.window', 'color': '"""white"""', 'height': 'height_int', 'text': '"""+"""', 'font': 'self.stimuli_font', 'pos': 'self.stimuli_pos', 'wrapWidth': 'None', 'colorSpace': '"""rgb"""', 'opacity': '(1)', 'depth': '(-6.0)'}), "(win=self.window, color='white', height=height_int, text='+',\n font=self.stimuli_font, pos=self.stimuli_pos, wrapWidth=None,\n colorSpace='rgb', opacity=1, depth=-6.0)\n", (14168, 14340), False, 'from psychopy import core, visual\n'), ((14541, 14674), 'psychopy.visual.ImageStim', 'visual.ImageStim', ([], {'win': 'self.window', 'image': 'None', 'mask': 'None', 'units': '""""""', 'pos': 'self.stimuli_pos', 'size': '(height_int, height_int)', 'ori': '(0.0)'}), "(win=self.window, image=None, mask=None, units='', pos=self\n .stimuli_pos, size=(height_int, height_int), ori=0.0)\n", (14557, 14674), False, 'from psychopy import core, visual\n'), ((4870, 5082), 'psychopy.visual.TextStim', 'visual.TextStim', ([], {'win': 'self.window', 'color': 'info_color[idx]', 'height': 'info_height[idx]', 'text': 'self.info_text[idx]', 'font': 'info_font[idx]', 'pos': 'info_pos[idx]', 'wrapWidth': 'None', 'colorSpace': '"""rgb"""', 'opacity': '(1)', 'depth': '(-6.0)'}), "(win=self.window, color=info_color[idx], height=info_height[\n idx], text=self.info_text[idx], font=info_font[idx], pos=info_pos[idx],\n wrapWidth=None, colorSpace='rgb', opacity=1, depth=-6.0)\n", (4885, 5082), False, 'from psychopy import core, visual\n'), ((10013, 10105), 'bcipy.helpers.stimuli.resize_image', 'resize_image', (["current_stim['sti'].image", "current_stim['sti'].win.size", 'this_stimuli_size'], {}), "(current_stim['sti'].image, current_stim['sti'].win.size,\n this_stimuli_size)\n", (10025, 10105), False, 'from bcipy.helpers.stimuli import resize_image\n'), ((10950, 10973), 'bcipy.helpers.system_utils.get_screen_resolution', 'get_screen_resolution', ([], {}), '()\n', (10971, 10973), False, 'from bcipy.helpers.system_utils import get_screen_resolution\n'), ((10202, 10242), 'os.path.basename', 'path.basename', (['self.stimuli_inquiry[idx]'], {}), '(self.stimuli_inquiry[idx])\n', (10215, 10242), True, 'import os.path as path\n')]
|
# Copyright 2018 MassOpenCloud.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__all__ = [
'init',
'cleanup',
'set_defaults',
'add_extra_exmods',
'clear_extra_exmods',
'get_allowed_exmods',
'RequestContextSerializer',
'get_client',
'get_server',
'get_notifier',
'TRANSPORT_ALIASES',
]
import functools
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_service import periodic_task
from oslo_utils import importutils
from oslo_utils import timeutils
import nova.conf
import nova.context
import nova.exception
from nova.i18n import _
from nova import objects
profiler = importutils.try_import("osprofiler.profiler")
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
TRANSPORT = None
LEGACY_NOTIFIER = None
NOTIFICATION_TRANSPORT = None
NOTIFIER = None
ALLOWED_EXMODS = [
nova.exception.__name__,
]
EXTRA_EXMODS = []
# NOTE(markmc): The nova.openstack.common.rpc entries are for backwards compat
# with Havana rpc_backend configuration values. The nova.rpc entries are for
# compat with Essex values.
TRANSPORT_ALIASES = {
'nova.openstack.common.rpc.impl_kombu': 'rabbit',
'nova.openstack.common.rpc.impl_qpid': 'qpid',
'nova.openstack.common.rpc.impl_zmq': 'zmq',
'nova.rpc.impl_kombu': 'rabbit',
'nova.rpc.impl_qpid': 'qpid',
'nova.rpc.impl_zmq': 'zmq',
}
class RequestContextSerializer(messaging.Serializer):
"""Request context serializer and deserializer from Nova.rpc.
This is the original serializer from nova. Nothing is changed besides
the docstring.
"""
def __init__(self, base):
self._base = base
def serialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.serialize_entity(context, entity)
def deserialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.deserialize_entity(context, entity)
def serialize_context(self, context):
return context.to_dict()
def deserialize_context(self, context):
return nova.context.RequestContext.from_dict(context)
class ProfilerRequestContextSerializer(RequestContextSerializer):
"""Serializer and deserializer impl.
Serializer and deserializer impl based on Jaeger tracing metadata
propagation. For usage check out docs/how-to.md . This is the only impl that
is important.
"""
pass
|
[
"oslo_utils.importutils.try_import",
"oslo_log.log.getLogger"
] |
[((1216, 1261), 'oslo_utils.importutils.try_import', 'importutils.try_import', (['"""osprofiler.profiler"""'], {}), "('osprofiler.profiler')\n", (1238, 1261), False, 'from oslo_utils import importutils\n'), ((1293, 1320), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1310, 1320), True, 'from oslo_log import log as logging\n')]
|
# Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Transformer model components."""
from typing import Optional
import haiku as hk
import jax
import jax.numpy as jnp
import numpy as np
class CausalSelfAttention(hk.MultiHeadAttention):
"""Self attention with a causal mask applied."""
def __call__(
self,
query: jnp.ndarray,
key: Optional[jnp.ndarray] = None,
value: Optional[jnp.ndarray] = None,
mask: Optional[jnp.ndarray] = None,
) -> jnp.ndarray:
key = key if key is not None else query
value = value if value is not None else query
if query.ndim != 3:
raise ValueError('Expect queries of shape [B, T, D].')
seq_len = query.shape[1]
causal_mask = np.tril(np.ones((seq_len, seq_len)))
mask = mask * causal_mask if mask is not None else causal_mask
return super().__call__(query, key, value, mask)
class DenseBlock(hk.Module):
"""A 2-layer MLP which widens then narrows the input."""
def __init__(self,
init_scale: float,
widening_factor: int = 4,
name: Optional[str] = None):
super().__init__(name=name)
self._init_scale = init_scale
self._widening_factor = widening_factor
def __call__(self, x: jnp.ndarray) -> jnp.ndarray:
hiddens = x.shape[-1]
initializer = hk.initializers.VarianceScaling(self._init_scale)
x = hk.Linear(self._widening_factor * hiddens, w_init=initializer)(x)
x = jax.nn.gelu(x)
return hk.Linear(hiddens, w_init=initializer)(x)
class Transformer(hk.Module):
"""A transformer stack."""
def __init__(self,
num_heads: int,
num_layers: int,
dropout_rate: float,
name: Optional[str] = None):
super().__init__(name=name)
self._num_layers = num_layers
self._num_heads = num_heads
self._dropout_rate = dropout_rate
def __call__(self,
h: jnp.ndarray,
mask: Optional[jnp.ndarray],
is_training: bool) -> jnp.ndarray:
"""Connects the transformer.
Args:
h: Inputs, [B, T, D].
mask: Padding mask, [B, T].
is_training: Whether we're training or not.
Returns:
Array of shape [B, T, D].
"""
init_scale = 2. / self._num_layers
dropout_rate = self._dropout_rate if is_training else 0.
if mask is not None:
mask = mask[:, None, None, :]
# Note: names chosen to approximately match those used in the GPT-2 code;
# see https://github.com/openai/gpt-2/blob/master/src/model.py.
for i in range(self._num_layers):
h_norm = layer_norm(h, name=f'h{i}_ln_1')
h_attn = CausalSelfAttention(
num_heads=self._num_heads,
key_size=32,
w_init_scale=init_scale,
name=f'h{i}_attn')(h_norm, mask=mask)
h_attn = hk.dropout(hk.next_rng_key(), dropout_rate, h_attn)
h = h + h_attn
h_norm = layer_norm(h, name=f'h{i}_ln_2')
h_dense = DenseBlock(init_scale, name=f'h{i}_mlp')(h_norm)
h_dense = hk.dropout(hk.next_rng_key(), dropout_rate, h_dense)
h = h + h_dense
h = layer_norm(h, name='ln_f')
return h
def layer_norm(x: jnp.ndarray, name: Optional[str] = None) -> jnp.ndarray:
"""Apply a unique LayerNorm to x with default settings."""
return hk.LayerNorm(axis=-1,
create_scale=True,
create_offset=True,
name=name)(x)
|
[
"numpy.ones",
"jax.nn.gelu",
"haiku.initializers.VarianceScaling",
"haiku.next_rng_key",
"haiku.LayerNorm",
"haiku.Linear"
] |
[((1964, 2013), 'haiku.initializers.VarianceScaling', 'hk.initializers.VarianceScaling', (['self._init_scale'], {}), '(self._init_scale)\n', (1995, 2013), True, 'import haiku as hk\n'), ((2096, 2110), 'jax.nn.gelu', 'jax.nn.gelu', (['x'], {}), '(x)\n', (2107, 2110), False, 'import jax\n'), ((3945, 4016), 'haiku.LayerNorm', 'hk.LayerNorm', ([], {'axis': '(-1)', 'create_scale': '(True)', 'create_offset': '(True)', 'name': 'name'}), '(axis=-1, create_scale=True, create_offset=True, name=name)\n', (3957, 4016), True, 'import haiku as hk\n'), ((1375, 1402), 'numpy.ones', 'np.ones', (['(seq_len, seq_len)'], {}), '((seq_len, seq_len))\n', (1382, 1402), True, 'import numpy as np\n'), ((2022, 2084), 'haiku.Linear', 'hk.Linear', (['(self._widening_factor * hiddens)'], {'w_init': 'initializer'}), '(self._widening_factor * hiddens, w_init=initializer)\n', (2031, 2084), True, 'import haiku as hk\n'), ((2122, 2160), 'haiku.Linear', 'hk.Linear', (['hiddens'], {'w_init': 'initializer'}), '(hiddens, w_init=initializer)\n', (2131, 2160), True, 'import haiku as hk\n'), ((3483, 3500), 'haiku.next_rng_key', 'hk.next_rng_key', ([], {}), '()\n', (3498, 3500), True, 'import haiku as hk\n'), ((3685, 3702), 'haiku.next_rng_key', 'hk.next_rng_key', ([], {}), '()\n', (3700, 3702), True, 'import haiku as hk\n')]
|
#!/usr/bin/env python
import boto3
import cv2
import numpy
import os
import base64
import gspread
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from httplib2 import Http
from time import localtime, strftime, time, sleep
from oauth2client.service_account import ServiceAccountCredentials
from apiclient import discovery, errors
from apiclient.discovery import build
from oauth2client import client
from oauth2client import tools
from oauth2client import file
def compare_faces(
bucket,
key,
bucket_target,
key_target,
threshold=80,
region='us-east-1'):
'''
Require for face comparision
'''
rekognition = boto3.client('rekognition', region)
response = rekognition.compare_faces(
SourceImage={
'S3Object': {
'Bucket': bucket,
'Name': key,
}
},
TargetImage={
'S3Object': {
'Bucket': bucket_target,
'Name': key_target,
}
},
SimilarityThreshold=threshold,
)
return response['SourceImageFace'], response['FaceMatches']
def upload_log(text):
'''
Upload the Alert time to the google drive sheet
'''
scope = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'ProjectLog-41cafcffcf13.json', scope)
gc = gspread.authorize(credentials)
wks = gc.open('ISeeU_Log').sheet1
wks.append_row([text])
def send(service, user_id, message):
'''
Send the mime email package
'''
try:
message = (
service.users().messages().send(
userId=user_id,
body=message).execute())
print('Message Id: %s' % message['id'])
return message
except errors.HttpError as error:
print('An error occurred: %s' % error)
def create_email(sender, to, subject, message_text, pic):
'''
Create the email
Included information: Sender, Receiver, Subject, Text, Attached Image
'''
message = MIMEMultipart()
message['to'] = to
message['from'] = sender
message['Subject'] = subject
msg = MIMEText(message_text)
message.attach(msg)
fp = open(pic, 'rb')
msg = MIMEImage(fp.read(), _subtype='jpeg')
fp.close()
imagename = os.path.basename(pic)
msg.add_header('Content-Disposition', 'attachment', filename=imagename)
message.attach(msg)
return {'raw': base64.urlsafe_b64encode(message.as_string())}
def authenticate():
'''
Using oauth2 to get the credentials.
It will give all permission related to gmail.
client_secret.json is the secret key you get from google.
Reference: Gmail API python quickstart
'''
SCOPES = 'https://mail.google.com'
store = file.Storage('credentials.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store)
service = discovery.build('gmail', 'v1', http=creds.authorize(Http()))
return service
def stranger_detected(pic):
'''
Recore the date time and make them as the code for the user the trigger
alarm
'''
nowtime = strftime("%Y-%m-%d %H:%M:%S", localtime())
trigcode = strftime("%d%H%M%S", localtime())
# Upload log to Google drive
text = 'Stranger show up at ' + nowtime
upload_log(text)
# Information of email
# pic = 'guldan.jpg' # Attached Image
sender = "<EMAIL>"
to = "<EMAIL>" # User email address
subject = "Alert from ISeeU!"
text = text + '\nReply ' + trigcode + ' to trigger the alarm.'
# Sending email to user
service = authenticate()
message = create_email(sender, to, subject, text, pic)
send(service, 'me', message)
return service, subject, trigcode
def main():
while True:
print('No face detected...')
if os.path.isfile('face.jpg'):
print('Face found!')
bucket_name = 'ec500j1-project-iseeu'
source_name = ['sh.jpg'] # User input faces
target_name = 'face.jpg' # Temporary image
s3 = boto3.client('s3')
# Upload images to s3 server
for img in source_name:
s3.upload_file(img, bucket_name, img)
s3.upload_file(target_name, bucket_name, target_name)
while True:
try:
# Check if the images are successfully uploaded
for img in source_name:
boto3.resource('s3').Object(bucket_name, img).load()
boto3.resource('s3').Object(
bucket_name, target_name).load()
except BaseException:
continue
break
sources, matches = {}, {}
for img in source_name:
try:
sources[img], matches[img] = compare_faces(
bucket_name, img, bucket_name, target_name)
except Exception as e:
# If Rekognition failure
print('Rekognition error: ' + e)
os.remove('face.jpg')
if len(matches[img]) == 0:
# Send notification email
service, target, trigcode = stranger_detected(
'face.jpg')
user_id = 'me'
flag = False # Flag for trigger alert
st = time()
while time() - st < 120: # Listen for 2 minutes
'''
Check all the email for user's reply every 30 seconds.
If the subject match, check if the trigcode match.
If the trigcode match too, return True to set off alarm.
'''
threads = service.users().threads().list(
userId=user_id).execute().get('threads', [])
for thread in threads:
tdata = service.users().threads().get(
userId=user_id, id=thread['id']).execute()
nmsgs = len(tdata['messages'])
msg = tdata['messages'][0]['payload']
subject = ''
for header in msg['headers']:
if header['name'] == 'Subject':
subject = header['value']
break
if subject == target:
if thread[u'snippet'][0:8] == trigcode:
# If user replies with trigcode
flag = True
break
if flag:
# If user replies with trigcode
break
nt = strftime('%Y-%m-%d %H:%M:%S', localtime())
print('Still listening: ' + nt)
sleep(30)
print('Alert!') # Emulated alert
else:
print('Not a stranger') # Do nothing
# Delete all images from s3 server
for img in source_name:
s3.delete_object(Bucket=bucket_name, Key=img)
s3.delete_object(Bucket=bucket_name, Key=target_name)
os.remove('face.jpg') # Delete temperary image
sleep(10)
if __name__ == '__main__':
main()
|
[
"boto3.client",
"gspread.authorize",
"oauth2client.client.flow_from_clientsecrets",
"time.sleep",
"os.path.isfile",
"oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name",
"os.remove",
"email.mime.multipart.MIMEMultipart",
"boto3.resource",
"os.path.basename",
"oauth2client.file.Storage",
"httplib2.Http",
"oauth2client.tools.run_flow",
"time.localtime",
"time.time",
"email.mime.text.MIMEText"
] |
[((784, 819), 'boto3.client', 'boto3.client', (['"""rekognition"""', 'region'], {}), "('rekognition', region)\n", (796, 819), False, 'import boto3\n'), ((1421, 1513), 'oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name', 'ServiceAccountCredentials.from_json_keyfile_name', (['"""ProjectLog-41cafcffcf13.json"""', 'scope'], {}), "('ProjectLog-41cafcffcf13.json'\n , scope)\n", (1469, 1513), False, 'from oauth2client.service_account import ServiceAccountCredentials\n'), ((1527, 1557), 'gspread.authorize', 'gspread.authorize', (['credentials'], {}), '(credentials)\n', (1544, 1557), False, 'import gspread\n'), ((2198, 2213), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', ([], {}), '()\n', (2211, 2213), False, 'from email.mime.multipart import MIMEMultipart\n'), ((2309, 2331), 'email.mime.text.MIMEText', 'MIMEText', (['message_text'], {}), '(message_text)\n', (2317, 2331), False, 'from email.mime.text import MIMEText\n'), ((2461, 2482), 'os.path.basename', 'os.path.basename', (['pic'], {}), '(pic)\n', (2477, 2482), False, 'import os\n'), ((2935, 2967), 'oauth2client.file.Storage', 'file.Storage', (['"""credentials.json"""'], {}), "('credentials.json')\n", (2947, 2967), False, 'from oauth2client import file\n'), ((3042, 3102), 'oauth2client.client.flow_from_clientsecrets', 'client.flow_from_clientsecrets', (['"""client_secret.json"""', 'SCOPES'], {}), "('client_secret.json', SCOPES)\n", (3072, 3102), False, 'from oauth2client import client\n'), ((3119, 3146), 'oauth2client.tools.run_flow', 'tools.run_flow', (['flow', 'store'], {}), '(flow, store)\n', (3133, 3146), False, 'from oauth2client import tools\n'), ((3418, 3429), 'time.localtime', 'localtime', ([], {}), '()\n', (3427, 3429), False, 'from time import localtime, strftime, time, sleep\n'), ((3467, 3478), 'time.localtime', 'localtime', ([], {}), '()\n', (3476, 3478), False, 'from time import localtime, strftime, time, sleep\n'), ((4081, 4107), 'os.path.isfile', 'os.path.isfile', (['"""face.jpg"""'], {}), "('face.jpg')\n", (4095, 4107), False, 'import os\n'), ((7760, 7769), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (7765, 7769), False, 'from time import localtime, strftime, time, sleep\n'), ((4323, 4341), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (4335, 4341), False, 'import boto3\n'), ((7703, 7724), 'os.remove', 'os.remove', (['"""face.jpg"""'], {}), "('face.jpg')\n", (7712, 7724), False, 'import os\n'), ((3213, 3219), 'httplib2.Http', 'Http', ([], {}), '()\n', (3217, 3219), False, 'from httplib2 import Http\n'), ((5688, 5694), 'time.time', 'time', ([], {}), '()\n', (5692, 5694), False, 'from time import localtime, strftime, time, sleep\n'), ((5354, 5375), 'os.remove', 'os.remove', (['"""face.jpg"""'], {}), "('face.jpg')\n", (5363, 5375), False, 'import os\n'), ((7334, 7343), 'time.sleep', 'sleep', (['(30)'], {}), '(30)\n', (7339, 7343), False, 'from time import localtime, strftime, time, sleep\n'), ((5721, 5727), 'time.time', 'time', ([], {}), '()\n', (5725, 5727), False, 'from time import localtime, strftime, time, sleep\n'), ((7241, 7252), 'time.localtime', 'localtime', ([], {}), '()\n', (7250, 7252), False, 'from time import localtime, strftime, time, sleep\n'), ((4794, 4814), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (4808, 4814), False, 'import boto3\n'), ((4721, 4741), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (4735, 4741), False, 'import boto3\n')]
|
# -*- coding: utf-8 -*-
"""Implicitly reference attributes of an object."""
from ast import Name, Assign, Load, Call, Lambda, With, Str, arg, \
Attribute, Subscript, Store, Del
from macropy.core.quotes import macros, q, u, name, ast_literal
from macropy.core.hquotes import macros, hq
from macropy.core.walkers import Walker
from .util import wrapwith, AutorefMarker
from .letdoutil import isdo, islet, ExpandedDoView, ExpandedLetView
from ..dynassign import dyn
from ..lazyutil import force1, mark_lazy
# with autoref(o):
# with autoref(scipy.loadmat("mydata.mat")): # evaluate once, assign to a gensym
# with autoref(scipy.loadmat("mydata.mat")) as o: # evaluate once, assign to given name
#
# We need something like::
#
# with autoref(o):
# x # --> (o.x if hasattr(o, "x") else x)
# x.a # --> (o.x.a if hasattr(o, "x") else x.a)
# x[s] # --> (o.x[s] if hasattr(o, "x") else x[s])
# o # --> o
# with autoref(p):
# x # --> (p.x if hasattr(p, "x") else (o.x if hasattr(o, "x") else x))
# x.a # --> (p.x.a if hasattr(p, "x") else (o.x.a if hasattr(o, "x") else x.a))
# x[s] # --> (p.x[s] if hasattr(p, "x") else (o.x[s] if hasattr(o, "x") else x[s]))
# o # --> (p.o if hasattr(p, "o") else o)
# o.x # --> (p.o.x if hasattr(p, "o") else o.x)
# o[s] # --> (p.o[s] if hasattr(p, "o") else o[s])
#
# One possible clean-ish implementation is::
#
# with AutorefMarker("o"): # no-op at runtime
# x # --> (lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x")))
# x.a # --> ((lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x")))).a
# x[s] # --> ((lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x"))))[s]
# o # --> o (can only occur if an asname is supplied)
# with AutorefMarker("p"):
# x # --> (lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))
# x.a # --> ((lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x"))).a
# x[s] # --> ((lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))[s]
# # when the inner autoref expands, it doesn't know about the outer one, so we will get this:
# o # --> (lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o")))
# o.x # --> ((lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o")))).x
# o[s] # --> ((lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o"))))[s]
# # the outer autoref needs the marker to know to skip this (instead of looking up o.p):
# p # --> p
#
# The lambda is needed, because the lexical-variable lookup for ``x`` must occur at the use site,
# and it can only be performed by Python itself. We could modify ``_autoref_resolve`` to take
# ``locals()`` and ``globals()`` as arguments and look also in the ``builtins`` module,
# but that way we get no access to the enclosing scopes (the "E" in LEGB).
#
# Recall the blocks expand from inside out.
#
# We must leave an AST marker in place of the each autoref block, so that any outer autoref block (when it expands)
# understands that within that block, any read access to the name "p" is to be left alone.
#
# In ``_autoref_resolve``, we use a single args parameter to avoid dealing with ``*args``
# when analyzing the Call node, thus avoiding much special-case code for the AST differences
# between Python 3.4 and 3.5+.
#
# In reality, we also capture-and-assign the autoref'd expr into a gensym'd variable (instead of referring
# to ``o`` and ``p`` directly), so that arbitrary expressions can be autoref'd without giving them
# a name in user code.
@mark_lazy
def _autoref_resolve(args):
*objs, s = [force1(x) for x in args]
for o in objs:
if hasattr(o, s):
return True, force1(getattr(o, s))
return False, None
def autoref(block_body, args, asname):
assert len(args) == 1, "expected exactly one argument, the expr to implicitly reference"
assert block_body, "expected at least one statement inside the 'with autoref' block"
gen_sym = dyn.gen_sym
o = asname.id if asname else gen_sym("_o") # Python itself guarantees asname to be a bare Name.
# with AutorefMarker("_o42"):
def isexpandedautorefblock(tree):
if not (type(tree) is With and len(tree.items) == 1):
return False
ctxmanager = tree.items[0].context_expr
return type(ctxmanager) is Call and \
type(ctxmanager.func) is Name and ctxmanager.func.id == "AutorefMarker" and \
len(ctxmanager.args) == 1 and type(ctxmanager.args[0]) is Str
def getreferent(tree):
return tree.items[0].context_expr.args[0].s
# (lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))
def isautoreference(tree):
return type(tree) is Call and \
len(tree.args) == 1 and type(tree.args[0]) is Call and \
type(tree.args[0].func) is Name and tree.args[0].func.id == "_autoref_resolve" and \
type(tree.func) is Lambda and len(tree.func.args.args) == 1 and \
tree.func.args.args[0].arg.startswith("_ar")
def get_resolver_list(tree): # (p, o, "x")
return tree.args[0].args[0].elts
def add_to_resolver_list(tree, objnode):
lst = get_resolver_list(tree)
lst.insert(-1, objnode)
# x --> the autoref code above.
def makeautoreference(tree):
assert type(tree) is Name and (type(tree.ctx) is Load or not tree.ctx)
newtree = hq[(lambda __ar_: __ar_[1] if __ar_[0] else ast_literal[tree])(_autoref_resolve((name[o], u[tree.id])))]
our_lambda_argname = gen_sym("_ar")
@Walker
def renametmp(tree, **kw):
if type(tree) is Name and tree.id == "__ar_":
tree.id = our_lambda_argname
elif type(tree) is arg and tree.arg == "__ar_":
tree.arg = our_lambda_argname
return tree
return renametmp.recurse(newtree)
@Walker
def transform(tree, *, referents, set_ctx, stop, **kw):
if type(tree) in (Attribute, Subscript, Name) and type(tree.ctx) in (Store, Del):
stop()
# skip autoref lookup for let/do envs
elif islet(tree):
view = ExpandedLetView(tree)
set_ctx(referents=referents + [view.body.args.args[0].arg]) # lambda e14: ...
elif isdo(tree):
view = ExpandedDoView(tree)
set_ctx(referents=referents + [view.body[0].args.args[0].arg]) # lambda e14: ...
elif isexpandedautorefblock(tree):
set_ctx(referents=referents + [getreferent(tree)])
elif isautoreference(tree): # generated by an inner already expanded autoref block
stop()
thename = get_resolver_list(tree)[-1].s
if thename in referents:
# remove autoref lookup for an outer referent, inserted early by an inner autoref block
# (that doesn't know that any outer block exists)
tree = q[name[thename]] # (lambda ...)(_autoref_resolve((p, "o"))) --> o
else:
add_to_resolver_list(tree, q[name[o]]) # _autoref_resolve((p, "x")) --> _autoref_resolve((p, o, "x"))
elif type(tree) is Call and type(tree.func) is Name and tree.func.id == "AutorefMarker": # nested autorefs
stop()
elif type(tree) is Name and (type(tree.ctx) is Load or not tree.ctx) and tree.id not in referents:
stop()
tree = makeautoreference(tree)
# Attribute works as-is, because a.b.c --> Attribute(Attribute(a, "b"), "c"), so Name "a" gets transformed.
# Subscript similarly, a[1][2] --> Subscript(Subscript(a, 1), 2), so Name "a" gets transformed.
return tree
# skip (by name) some common references inserted by other macros
always_skip = ['letter', 'dof', 'namelambda', 'curry', 'currycall', 'lazy', 'lazyrec', 'lazycall']
newbody = [Assign(targets=[q[name[o]]], value=args[0])]
for stmt in block_body:
newbody.append(transform.recurse(stmt, referents=always_skip + [o]))
return wrapwith(item=hq[AutorefMarker(u[o])],
body=newbody,
locref=block_body[0])
|
[
"ast.Assign"
] |
[((8203, 8246), 'ast.Assign', 'Assign', ([], {'targets': '[q[name[o]]]', 'value': 'args[0]'}), '(targets=[q[name[o]]], value=args[0])\n', (8209, 8246), False, 'from ast import Name, Assign, Load, Call, Lambda, With, Str, arg, Attribute, Subscript, Store, Del\n')]
|
# -*- coding: utf-8 -*-
"""
This is part of HashBruteStation software
Docs EN: http://hack4sec.pro/wiki/index.php/Hash_Brute_Station_en
Docs RU: http://hack4sec.pro/wiki/index.php/Hash_Brute_Station
License: MIT
Copyright (c) <NAME> <http://anton-kuzmin.ru> (ru) <http://anton-kuzmin.pro> (en)
Integration tests for HashlistsByAlgLoaderThread
"""
import sys
import os
import time
import pytest
sys.path.append('../../')
from libs.common import file_get_contents, md5
from classes.HashlistsByAlgLoaderThread import HashlistsByAlgLoaderThread
from CommonUnit import CommonUnit
class Test_HashlistsByAlgLoaderThread(CommonUnit):
""" Unit tests for HashlistsByAlgLoaderThread """
db = None
thrd = None
def setup(self):
""" Tests setup """
self._clean_db()
self.thrd = HashlistsByAlgLoaderThread()
self.thrd.catch_exceptions = False
def teardown(self):
""" Tests teardown """
if isinstance(self.thrd, HashlistsByAlgLoaderThread):
self.thrd.available = False
time.sleep(1)
del self.thrd
self._clean_db()
def test_get_common_hashlist_id_by_alg_get(self):
""" Test get_common_hashlist_id_by_alg_get() """
self._add_hashlist(have_salts=1, common_by_alg=3)
assert self.thrd.get_common_hashlist_id_by_alg(3) == 1
test_data = [
(
1,
{'hash': 'a', 'salt': '1', 'summ': md5('a:1')},
),
(
0,
{'hash': 'a', 'salt': '', 'summ': md5('a')},
),
]
@pytest.mark.parametrize("have_salt,_hash", test_data)
def test_get_common_hashlist_id_by_alg_create(self, have_salt, _hash):
"""
Test get_common_hashlist_id_by_alg_create()
:param have_salt: does hashlist has salt?
:param _hash: hash data row
:return:
"""
self._add_hashlist(have_salts=have_salt, common_by_alg=0)
self._add_hash(hash=_hash['hash'], salt=_hash['salt'], summ=_hash['summ'])
assert self.thrd.get_common_hashlist_id_by_alg(3) == 2
test_hashlist_data = {'id': 2, 'name': 'All-MD4', 'have_salts': have_salt, 'delimiter': self.thrd.DELIMITER,
'cracked': 0, 'uncracked': 0, 'errors': '', 'parsed': 0, 'status': 'ready',
'common_by_alg': 3}
hashlist_data = self.db.fetch_row("SELECT * FROM hashlists WHERE id = 2")
for field in test_hashlist_data:
assert hashlist_data[field] == test_hashlist_data[field]
def test_get_common_hashlist_id_by_alg_with_salt_create_one_salt_forget(self):
""" Test get_common_hashlist_id_by_alg_create() """
self._add_hashlist(have_salts=1, common_by_alg=0)
self._add_hash(hash='a', salt='b', summ='333')
self._add_hashlist(id=2, have_salts=0, common_by_alg=0)
self._add_hash(hashlist_id=2, hash='c', salt='d', summ='111')
assert self.thrd.get_common_hashlist_id_by_alg(3) == 3
test_hashlist_data = {'id': 3, 'name': 'All-MD4', 'have_salts': 1, 'delimiter': self.thrd.DELIMITER,
'cracked': 0, 'uncracked': 0, 'errors': '', 'parsed': 0, 'status': 'ready',
'common_by_alg': 3}
hashlist_data = self.db.fetch_row("SELECT * FROM hashlists WHERE id = 3")
for field in test_hashlist_data:
assert hashlist_data[field] == test_hashlist_data[field]
def test_get_current_work_hashlist(self):
""" Test get_current_work_hashlist() """
assert not self.thrd.get_current_work_hashlist()
self.db.insert("task_works", {'hashlist_id': 3, 'status': 'work', 'task_id': 1})
assert self.thrd.get_current_work_hashlist() == 3
def test_get_hashlist_status(self):
""" Test get_hashlist_status() """
self._add_hashlist(common_by_alg=1)
assert self.thrd.get_hashlist_status(1) == 'ready'
def test_is_alg_in_parse(self):
""" Test is_alg_in_parse() """
assert self.thrd.is_alg_in_parse(3) is False
self._add_hashlist(common_by_alg=1)
self.db.insert("task_works", {'hashlist_id': 1, 'status': 'waitoutparse', 'task_id': 1})
assert self.thrd.is_alg_in_parse(3) is True
assert self.thrd.is_alg_in_parse(4) is False
self._add_hashlist(id=2, alg_id=4, common_by_alg=1)
self.db.insert("task_works", {'hashlist_id': 2, 'status': 'outparsing', 'task_id': 1})
assert self.thrd.is_alg_in_parse(4) is True
def test_hashes_count_in_hashlist(self):
""" Test hashes_count_in_hashlist() """
assert self.thrd.hashes_count_in_hashlist(1) == 0
self._add_hash()
assert self.thrd.hashes_count_in_hashlist(1) == 1
def test_hashes_count_by_algs(self):
""" Test hashes_count_by_algs() """
assert self.thrd.hashes_count_by_algs() == {}
self._add_hashlist()
self._add_hash(summ='111')
self._add_hash(summ='222', hash='a', salt='b')
self._add_hashlist(id=2, alg_id=4)
self._add_hash(hashlist_id=2, summ='333')
assert self.thrd.hashes_count_by_algs() == {3: 2, 4: 1}
def test_is_alg_have_salts(self):
""" Test is_alg_have_salts() """
self._add_hashlist()
assert self.thrd.is_alg_have_salts(3) is False
self._add_hashlist(id=2, have_salts=1) # Forget salt bug
assert self.thrd.is_alg_have_salts(3) is True
def test_get_possible_hashlist_and_alg_simple(self):
""" Test get_possible_hashlist_and_alg_simple() """
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(hash='b', summ='222')
assert self.thrd.get_possible_hashlist_and_alg() == {'hashlist_id': 2, 'alg_id': 3}
def test_get_possible_hashlist_and_alg_none_already(self):
""" Test get_possible_hashlist_and_alg_none_already() """
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(hash='b', summ='222')
self._add_hashlist(id=2, common_by_alg=3)
self._add_hash(hashlist_id=2, hash='a', summ='111')
self._add_hash(hashlist_id=2, hash='b', summ='222')
assert self.thrd.get_possible_hashlist_and_alg() is None
def test_get_possible_hashlist_and_alg_none_in_parse(self):
""" Test get_possible_hashlist_and_alg_none_in_parse() """
self.db.insert("task_works", {'hashlist_id': 1, 'status': 'waitoutparse', 'task_id': 1})
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(hash='b', summ='222')
assert self.thrd.get_possible_hashlist_and_alg() is None
self.db.update("task_works", {'status': 'outparsing'}, "id=1")
assert self.thrd.get_possible_hashlist_and_alg() is None
def test_get_possible_hashlist_and_alg_none_not_ready(self):
""" Test get_possible_hashlist_and_alg_none_not_ready() """
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(hash='b', summ='222')
self._add_hashlist(id=2, status='wait', common_by_alg=3)
assert self.thrd.get_possible_hashlist_and_alg() is None
def test_get_possible_hashlist_and_alg_none_in_work(self):
""" Test get_possible_hashlist_and_alg_none_in_work() """
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(hash='b', summ='222')
self._add_hashlist(id=2, common_by_alg=3)
self.db.insert("task_works", {'hashlist_id': 2, 'status': 'work', 'task_id': 1})
assert self.thrd.get_possible_hashlist_and_alg() is None
def test_clean_old_hashes(self):
""" Test clean_old_hashes() """
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(hash='b', summ='222')
assert self.db.fetch_one("SELECT COUNT(*) FROM hashes WHERE hashlist_id = 1") == 2
self.thrd.clean_old_hashes(1)
assert self.db.fetch_one("SELECT COUNT(*) FROM hashes WHERE hashlist_id = 1") == 0
assert self.db.fetch_one("SELECT cracked+uncracked FROM hashlists WHERE id = 1") == 0
def test_put_all_hashes_of_alg_in_file(self):
""" Test put_all_hashes_of_alg_in_file() """
self._add_hashlist()
self._add_hash(hash='a', summ='111')
self._add_hash(summ='222')
self._add_hash(hash='b', summ='333')
path = self.thrd.put_all_hashes_of_alg_in_file(3)
assert os.path.exists(path)
assert file_get_contents(path) == 'a\nb\n'
self._add_hashlist(id=2, have_salts=1, alg_id=4)
self._add_hash(hashlist_id=2, hash='a', salt='b', summ='111')
self._add_hash(hashlist_id=2, summ='222')
self._add_hash(hashlist_id=2, hash='c', salt='d', summ='333')
path = self.thrd.put_all_hashes_of_alg_in_file(4)
assert os.path.exists(path)
assert file_get_contents(path) == 'a{0}b\nc{0}d\n'.format(self.thrd.DELIMITER)
|
[
"os.path.exists",
"libs.common.md5",
"libs.common.file_get_contents",
"time.sleep",
"pytest.mark.parametrize",
"classes.HashlistsByAlgLoaderThread.HashlistsByAlgLoaderThread",
"sys.path.append"
] |
[((398, 423), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (413, 423), False, 'import sys\n'), ((1570, 1623), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""have_salt,_hash"""', 'test_data'], {}), "('have_salt,_hash', test_data)\n", (1593, 1623), False, 'import pytest\n'), ((811, 839), 'classes.HashlistsByAlgLoaderThread.HashlistsByAlgLoaderThread', 'HashlistsByAlgLoaderThread', ([], {}), '()\n', (837, 839), False, 'from classes.HashlistsByAlgLoaderThread import HashlistsByAlgLoaderThread\n'), ((8537, 8557), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (8551, 8557), False, 'import os\n'), ((8932, 8952), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (8946, 8952), False, 'import os\n'), ((1053, 1066), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1063, 1066), False, 'import time\n'), ((8573, 8596), 'libs.common.file_get_contents', 'file_get_contents', (['path'], {}), '(path)\n', (8590, 8596), False, 'from libs.common import file_get_contents, md5\n'), ((8968, 8991), 'libs.common.file_get_contents', 'file_get_contents', (['path'], {}), '(path)\n', (8985, 8991), False, 'from libs.common import file_get_contents, md5\n'), ((1442, 1452), 'libs.common.md5', 'md5', (['"""a:1"""'], {}), "('a:1')\n", (1445, 1452), False, 'from libs.common import file_get_contents, md5\n'), ((1537, 1545), 'libs.common.md5', 'md5', (['"""a"""'], {}), "('a')\n", (1540, 1545), False, 'from libs.common import file_get_contents, md5\n')]
|
from __future__ import absolute_import, division, print_function
import json
import logging
import os
import time
import importlib
import multiprocessing
import cv2
import fire
import logzero
from logzero import logger
import numpy as np
from rmexp import config, cvutils, dbutils, gabriel_pb2, client
from rmexp.schema import models
logzero.formatter(logging.Formatter(
fmt='%(asctime)s.%(msecs)03d - %(levelname)s: %(message)s', datefmt='%H:%M:%S'))
logzero.loglevel(logging.DEBUG)
def work_loop(job_queue, app, busy_wait=None):
"""[summary]
Arguments:
job_queue {[type]} -- [description]
app {[type]} -- [description]
Keyword Arguments:
busy_wait {float} -- if not None, busy spin seconds instead of running actual app (default: {None})
"""
handler = importlib.import_module(app).Handler()
while True:
get_ts = time.time()
msg = job_queue.get()[0]
get_wait = time.time() - get_ts
if get_wait > 2e-3:
logger.warn("[pid {}] took {} ms to get a new request. Maybe waiting".format(
os.getpid(), int(1000 * get_wait)))
arrival_ts = time.time()
gabriel_msg = gabriel_pb2.Message()
gabriel_msg.ParseFromString(msg)
encoded_im, ts = gabriel_msg.data, gabriel_msg.timestamp
logger.debug("[pid {}] about to process frame {}".format(
os.getpid(), gabriel_msg.index))
cts = time.clock()
if not busy_wait:
# do real work
encoded_im_np = np.frombuffer(encoded_im, dtype=np.uint8)
img = cv2.imdecode(encoded_im_np, cv2.CV_LOAD_IMAGE_UNCHANGED)
result = handler.process(img)
else:
# busy wait fixed time
tic = time.time()
while True:
if time.time() - tic > busy_wait:
break
result = 'busy wait {}'.format(busy_wait)
finished_ts = time.time()
time_lapse = (finished_ts - ts) * 1000
cpu_proc_ms = round((time.clock() - cts) * 1000)
if gabriel_msg.reply:
reply = gabriel_pb2.Message()
reply.data = str(result)
reply.timestamp = gabriel_msg.timestamp
reply.index = gabriel_msg.index
reply.finished_ts = finished_ts
reply.arrival_ts = arrival_ts
reply.cpu_proc_ms = cpu_proc_ms
job_queue.put([reply.SerializeToString(), ])
logger.debug('[pid {}] takes {} ms (cpu: {} ms) for frame {}: {}.'.format(
os.getpid(), (time.time() - ts) * 1000, cpu_proc_ms, gabriel_msg.index, result))
class Sampler(object):
"""A Class to sample video stream. Designed to work with cam.read().
Sample once every sample_period calls
"""
def __init__(self, sample_period, sample_func=None):
super(Sampler, self).__init__()
self._sp = sample_period
assert(type(sample_period) is int and sample_period > 0)
self._sf = sample_func
self._cnt = 0
def sample(self):
while True:
self._cnt = (self._cnt + 1) % self._sp
if self._cnt == 0:
return self._sf()
self._sf()
def process_and_time(img, app_handler):
ts = time.time()
result = app_handler.process(img)
time_lapse = int(round((time.time() - ts) * 1000))
return result, time_lapse
def store(
data,
session,
store_result,
store_latency,
store_profile,
**kwargs):
name, trace, idx, result, time_lapse = data
if store_result:
rec, _ = dbutils.get_or_create(
session,
models.SS,
name=name,
index=idx,
trace=trace)
rec.val = str(result)
if store_latency:
rec, _ = dbutils.get_or_create(
session,
models.LegoLatency,
name=name,
index=idx)
rec.val = int(time_lapse)
if store_profile:
rec = kwargs
rec.update(
{'trace': trace,
'index': idx,
'name': name,
'latency': time_lapse
}
)
dbutils.insert(
session,
models.ResourceLatency,
rec
)
def batch_process(video_uri,
app,
experiment_name,
trace=None,
store_result=False,
store_latency=False,
store_profile=False,
**kwargs):
"""Batch process a video. Able to store both the result and the frame processing latency.
Arguments:
video_uri {string} -- Video URI
app {string} -- Applicaiton name
experiment_name {string} -- Experiment name
Keyword Arguments:
trace {string} -- Trace id
store_result {bool} -- Whether to store result into database
store_result {bool} -- [description] (default: {False})
store_latency {bool} -- [description] (default: {False})
cpu {string} -- No of CPUs used. Used to populate profile database
memory {string} -- No of memory used. Used to populate profile database
num_worker {int} -- No of simultaneous workers. Used to populate profile database
"""
if trace is None:
trace = os.path.basename(os.path.dirname(video_uri))
app = importlib.import_module(app)
app_handler = app.Handler()
vc = client.VideoClient(
app.__name__, video_uri, None, loop=False, random_start=False)
idx = 1
with dbutils.session_scope() as session:
for img in vc.get_frame_generator():
cpu_time_ts = time.clock()
result, time_lapse = process_and_time(img, app_handler)
logger.debug("[pid: {}] processing frame {} from {}. {} ms".format(os.getpid(),
idx, video_uri, int(time_lapse)))
logger.debug(result)
store(
(experiment_name, trace, idx, result, time_lapse),
session,
store_result,
store_latency,
store_profile,
**kwargs
)
idx += 1
def phash(video_uri):
cam = cv2.VideoCapture(video_uri)
has_frame = True
with dbutils.session_scope(dry_run=False) as sess:
trace_name = os.path.basename(os.path.dirname(video_uri))
idx = 1
while has_frame:
has_frame, img = cam.read()
if img is not None:
cur_hash = cvutils.phash(img)
sess.add(models.SS(
name='{}-f{}-phash'.format(trace_name, idx),
val=str(cur_hash),
trace=trace_name))
idx += 1
def phash_diff_adjacent_frame(video_uri, output_dir):
cam = cv2.VideoCapture(video_uri)
os.makedirs(output_dir)
has_frame = True
prev_hash = None
idx = 1
logger.debug('calculating phash diff for adjacent frames')
while has_frame:
has_frame, img = cam.read()
if img is not None:
cur_hash = cvutils.phash(img)
if prev_hash is not None:
diff = cur_hash - prev_hash
cv2.putText(img, 'diff={}'.format(
diff), (int(img.shape[1] / 3), img.shape[0] - 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), thickness=5)
cv2.imwrite(os.path.join(
output_dir, '{:010d}.jpg'.format(idx)), img)
logger.debug(diff)
prev_hash = cur_hash
idx += 1
if __name__ == "__main__":
fire.Fire()
|
[
"time.clock",
"fire.Fire",
"cv2.imdecode",
"logzero.logger.debug",
"rmexp.gabriel_pb2.Message",
"rmexp.dbutils.insert",
"rmexp.dbutils.get_or_create",
"os.getpid",
"numpy.frombuffer",
"importlib.import_module",
"os.path.dirname",
"time.time",
"rmexp.client.VideoClient",
"os.makedirs",
"logging.Formatter",
"rmexp.dbutils.session_scope",
"logzero.loglevel",
"cv2.VideoCapture",
"rmexp.cvutils.phash"
] |
[((460, 491), 'logzero.loglevel', 'logzero.loglevel', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (476, 491), False, 'import logzero\n'), ((356, 458), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s.%(msecs)03d - %(levelname)s: %(message)s"""', 'datefmt': '"""%H:%M:%S"""'}), "(fmt=\n '%(asctime)s.%(msecs)03d - %(levelname)s: %(message)s', datefmt='%H:%M:%S')\n", (373, 458), False, 'import logging\n'), ((3275, 3286), 'time.time', 'time.time', ([], {}), '()\n', (3284, 3286), False, 'import time\n'), ((5414, 5442), 'importlib.import_module', 'importlib.import_module', (['app'], {}), '(app)\n', (5437, 5442), False, 'import importlib\n'), ((5484, 5570), 'rmexp.client.VideoClient', 'client.VideoClient', (['app.__name__', 'video_uri', 'None'], {'loop': '(False)', 'random_start': '(False)'}), '(app.__name__, video_uri, None, loop=False, random_start=\n False)\n', (5502, 5570), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((6320, 6347), 'cv2.VideoCapture', 'cv2.VideoCapture', (['video_uri'], {}), '(video_uri)\n', (6336, 6347), False, 'import cv2\n'), ((6915, 6942), 'cv2.VideoCapture', 'cv2.VideoCapture', (['video_uri'], {}), '(video_uri)\n', (6931, 6942), False, 'import cv2\n'), ((6947, 6970), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (6958, 6970), False, 'import os\n'), ((7029, 7087), 'logzero.logger.debug', 'logger.debug', (['"""calculating phash diff for adjacent frames"""'], {}), "('calculating phash diff for adjacent frames')\n", (7041, 7087), False, 'from logzero import logger\n'), ((7703, 7714), 'fire.Fire', 'fire.Fire', ([], {}), '()\n', (7712, 7714), False, 'import fire\n'), ((883, 894), 'time.time', 'time.time', ([], {}), '()\n', (892, 894), False, 'import time\n'), ((1160, 1171), 'time.time', 'time.time', ([], {}), '()\n', (1169, 1171), False, 'import time\n'), ((1195, 1216), 'rmexp.gabriel_pb2.Message', 'gabriel_pb2.Message', ([], {}), '()\n', (1214, 1216), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((1450, 1462), 'time.clock', 'time.clock', ([], {}), '()\n', (1460, 1462), False, 'import time\n'), ((1959, 1970), 'time.time', 'time.time', ([], {}), '()\n', (1968, 1970), False, 'import time\n'), ((3627, 3703), 'rmexp.dbutils.get_or_create', 'dbutils.get_or_create', (['session', 'models.SS'], {'name': 'name', 'index': 'idx', 'trace': 'trace'}), '(session, models.SS, name=name, index=idx, trace=trace)\n', (3648, 3703), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((3834, 3906), 'rmexp.dbutils.get_or_create', 'dbutils.get_or_create', (['session', 'models.LegoLatency'], {'name': 'name', 'index': 'idx'}), '(session, models.LegoLatency, name=name, index=idx)\n', (3855, 3906), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((4204, 4256), 'rmexp.dbutils.insert', 'dbutils.insert', (['session', 'models.ResourceLatency', 'rec'], {}), '(session, models.ResourceLatency, rec)\n', (4218, 4256), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((5597, 5620), 'rmexp.dbutils.session_scope', 'dbutils.session_scope', ([], {}), '()\n', (5618, 5620), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((6378, 6414), 'rmexp.dbutils.session_scope', 'dbutils.session_scope', ([], {'dry_run': '(False)'}), '(dry_run=False)\n', (6399, 6414), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((810, 838), 'importlib.import_module', 'importlib.import_module', (['app'], {}), '(app)\n', (833, 838), False, 'import importlib\n'), ((947, 958), 'time.time', 'time.time', ([], {}), '()\n', (956, 958), False, 'import time\n'), ((1544, 1585), 'numpy.frombuffer', 'np.frombuffer', (['encoded_im'], {'dtype': 'np.uint8'}), '(encoded_im, dtype=np.uint8)\n', (1557, 1585), True, 'import numpy as np\n'), ((1604, 1660), 'cv2.imdecode', 'cv2.imdecode', (['encoded_im_np', 'cv2.CV_LOAD_IMAGE_UNCHANGED'], {}), '(encoded_im_np, cv2.CV_LOAD_IMAGE_UNCHANGED)\n', (1616, 1660), False, 'import cv2\n'), ((1770, 1781), 'time.time', 'time.time', ([], {}), '()\n', (1779, 1781), False, 'import time\n'), ((2126, 2147), 'rmexp.gabriel_pb2.Message', 'gabriel_pb2.Message', ([], {}), '()\n', (2145, 2147), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((5375, 5401), 'os.path.dirname', 'os.path.dirname', (['video_uri'], {}), '(video_uri)\n', (5390, 5401), False, 'import os\n'), ((5704, 5716), 'time.clock', 'time.clock', ([], {}), '()\n', (5714, 5716), False, 'import time\n'), ((6002, 6022), 'logzero.logger.debug', 'logger.debug', (['result'], {}), '(result)\n', (6014, 6022), False, 'from logzero import logger\n'), ((6462, 6488), 'os.path.dirname', 'os.path.dirname', (['video_uri'], {}), '(video_uri)\n', (6477, 6488), False, 'import os\n'), ((7196, 7214), 'rmexp.cvutils.phash', 'cvutils.phash', (['img'], {}), '(img)\n', (7209, 7214), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((1402, 1413), 'os.getpid', 'os.getpid', ([], {}), '()\n', (1411, 1413), False, 'import os\n'), ((2564, 2575), 'os.getpid', 'os.getpid', ([], {}), '()\n', (2573, 2575), False, 'import os\n'), ((6630, 6648), 'rmexp.cvutils.phash', 'cvutils.phash', (['img'], {}), '(img)\n', (6643, 6648), False, 'from rmexp import config, cvutils, dbutils, gabriel_pb2, client\n'), ((7597, 7615), 'logzero.logger.debug', 'logger.debug', (['diff'], {}), '(diff)\n', (7609, 7615), False, 'from logzero import logger\n'), ((1102, 1113), 'os.getpid', 'os.getpid', ([], {}), '()\n', (1111, 1113), False, 'import os\n'), ((2047, 2059), 'time.clock', 'time.clock', ([], {}), '()\n', (2057, 2059), False, 'import time\n'), ((3353, 3364), 'time.time', 'time.time', ([], {}), '()\n', (3362, 3364), False, 'import time\n'), ((5864, 5875), 'os.getpid', 'os.getpid', ([], {}), '()\n', (5873, 5875), False, 'import os\n'), ((1825, 1836), 'time.time', 'time.time', ([], {}), '()\n', (1834, 1836), False, 'import time\n'), ((2578, 2589), 'time.time', 'time.time', ([], {}), '()\n', (2587, 2589), False, 'import time\n')]
|
import random
import sys
# usage: python3 words_gen.py > list.txt
N = int(sys.argv[1]) # how many words should be in the resulting list
with open("scripts/words.txt", "r") as f:
words = f.readlines()
for i in range(N):
print(words[random.randint(0, 466550 - 1)].rstrip())
|
[
"random.randint"
] |
[((251, 280), 'random.randint', 'random.randint', (['(0)', '(466550 - 1)'], {}), '(0, 466550 - 1)\n', (265, 280), False, 'import random\n')]
|
import unittest
from modules.Input import *
class CommandInputTest(unittest.TestCase):
def setUp(self):
self.field_a_locations = ["gOtOFieldAroW8",
" go to fieLDA RoW 18 ",
"gOTOfield A rOW88"]
self.field_b_locations = ["gOtOFieldBroW8",
" go to fieLDB RoW 18 ",
"gOTOfield B rOW88"]
self.charger_locations = ["gotocharGeR",
" goTo cHaRgeR ",
" go toCHARGER "]
self.planting_locations = ["plantPotaToes inFieLDA rOW7",
" plANt caRRottsin fieldBRow 88"]
def tearDown(self):
self.field_a_locations = []
self.field_b_locations = []
self.charger_locations = []
self.planting_locations = []
def test_field_a_rows(self):
self.assertEqual(user_input(self.field_a_locations[0])[0], "FAR8")
self.assertEqual(user_input(self.field_a_locations[0])[1], "N/A")
self.assertEqual(user_input(self.field_a_locations[1])[0], "FAR18")
self.assertEqual(user_input(self.field_a_locations[1])[1], "N/A")
self.assertEqual(user_input(self.field_a_locations[2])[0], "FAR88")
self.assertEqual(user_input(self.field_a_locations[2])[1], "N/A")
def test_field_b_rows(self):
self.assertEqual(user_input(self.field_b_locations[0])[0], "FBR8")
self.assertEqual(user_input(self.field_b_locations[0])[1], "N/A")
self.assertEqual(user_input(self.field_b_locations[1])[0], "FBR18")
self.assertEqual(user_input(self.field_b_locations[1])[1], "N/A")
self.assertEqual(user_input(self.field_b_locations[2])[0], "FBR88")
self.assertEqual(user_input(self.field_b_locations[2])[1], "N/A")
def test_charger(self):
self.assertEqual(user_input(self.charger_locations[0])[0], "Charger")
self.assertEqual(user_input(self.charger_locations[0])[1], "N/A")
self.assertEqual(user_input(self.charger_locations[1])[0], "Charger")
self.assertEqual(user_input(self.charger_locations[1])[1], "N/A")
self.assertEqual(user_input(self.charger_locations[2])[0], "Charger")
self.assertEqual(user_input(self.charger_locations[2])[1], "N/A")
def test_bad_input(self):
self.assertEqual(user_input("")[0], "Invalid task")
self.assertEqual(user_input("")[1], "N/A")
self.assertEqual(user_input("fg73f37g")[0], "Invalid task")
self.assertEqual(user_input("fg73f37g")[1], "N/A")
def test_planting_instructions(self):
self.assertEqual(user_input(self.planting_locations[0])[0], "FAR7")
self.assertEqual(user_input(self.planting_locations[0])[1], "PotaToes")
self.assertEqual(user_input(self.planting_locations[1])[0], "FBR88")
self.assertEqual(user_input(self.planting_locations[1])[1], "caRRotts")
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((3021, 3036), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3034, 3036), False, 'import unittest\n')]
|
"""
-----------------------------------------------------------------------------
This source file is part of VPET - Virtual Production Editing Tools
http://vpet.research.animationsinstitut.de/
http://github.com/FilmakademieRnd/VPET
Copyright (c) 2021 <NAME>, Animationsinstitut R&D Lab
This project has been initiated in the scope of the EU funded project
Dreamspace under grant agreement no 610005 in the years 2014, 2015 and 2016.
http://dreamspaceproject.eu/
Post Dreamspace the project has been further developed on behalf of the
research and development activities of Animationsinstitut.
The VPET component Blender Scene Distribution is intended for research and development
purposes only. Commercial use of any kind is not permitted.
There is no support by Filmakademie. Since the Blender Scene Distribution is available
for free, Filmakademie shall only be liable for intent and gross negligence;
warranty is limited to malice. Scene DistributiorUSD may under no circumstances
be used for racist, sexual or any illegal purposes. In all non-commercial
productions, scientific publications, prototypical non-commercial software tools,
etc. using the Blender Scene Distribution Filmakademie has to be named as follows:
“VPET-Virtual Production Editing Tool by <NAME>,
Animationsinstitut (http://research.animationsinstitut.de)“.
In case a company or individual would like to use the Blender Scene Distribution in
a commercial surrounding or for commercial purposes, software based on these
components or any part thereof, the company/individual will have to contact
Filmakademie (research<at>filmakademie.de).
-----------------------------------------------------------------------------
"""
bl_info = {
"name" : "VPET Blender",
"author" : "<NAME>",
"description" : "",
"blender" : (2, 92, 2),
"version" : (0, 5, 0),
"location" : "VIEW3D",
"warning" : "",
"category" : "Animationsinstitut"
}
from typing import Set
import bpy
from .bl_op import DoDistribute
from .bl_op import StopDistribute
from .bl_op import SetupScene
from .bl_op import InstallZMQ
from .bl_panel import VPET_PT_Panel
from .tools import initialize
from .settings import VpetData
from .settings import VpetProperties
# imported classes to register
classes = (DoDistribute, StopDistribute, SetupScene, VPET_PT_Panel, VpetProperties, InstallZMQ)
## Register classes and VpetSettings
#
def register():
bpy.types.WindowManager.vpet_data = VpetData()
from bpy.utils import register_class
for cls in classes:
try:
register_class(cls)
print(f"Registering {cls.__name__}")
except Exception as e:
print(f"{cls.__name__} "+ str(e))
bpy.types.Scene.vpet_properties = bpy.props.PointerProperty(type=VpetProperties)
initialize()
print("Registered VPET Addon")
## Unregister for removal of Addon
#
def unregister():
del bpy.types.WindowManager.vpet_data
from bpy.utils import unregister_class
for cls in classes:
try:
unregister_class(cls)
except Exception as e:
print(f"{cls.__name__} "+ str(e))
print("Unregistered VPET Addon")
|
[
"bpy.props.PointerProperty",
"bpy.utils.register_class",
"bpy.utils.unregister_class"
] |
[((2745, 2791), 'bpy.props.PointerProperty', 'bpy.props.PointerProperty', ([], {'type': 'VpetProperties'}), '(type=VpetProperties)\n', (2770, 2791), False, 'import bpy\n'), ((2556, 2575), 'bpy.utils.register_class', 'register_class', (['cls'], {}), '(cls)\n', (2570, 2575), False, 'from bpy.utils import register_class\n'), ((3039, 3060), 'bpy.utils.unregister_class', 'unregister_class', (['cls'], {}), '(cls)\n', (3055, 3060), False, 'from bpy.utils import unregister_class\n')]
|
# Unlike the other datasets, CIFAR-10 uses ResNet and suffers from
# a variety of problems, including exploding gradients
import torch
import torch.nn as nn
from tqdm.notebook import tnrange, tqdm
# For loading model sanely
import os.path
import sys
# This here actually adds the path
sys.path.append("../../")
import models.resnet as resnet
# Define the `device` PyTorch will be running on, please hope it is CUDA
device = "cuda" if torch.cuda.is_available() else "cpu"
print("Notebook will use PyTorch Device: " + device.upper())
# Helps adjust learning rate for better results
def adjust_learning_rate(optimizer, epoch, learning_rate, long_training):
actual_learning_rate = learning_rate
if long_training:
first_update_threshold = 100
second_update_threshold = 150
else:
first_update_threshold = 20
second_update_threshold = 25
if epoch >= first_update_threshold:
actual_learning_rate = 0.01
if epoch >= second_update_threshold:
actual_learning_rate = 0.001
for param_group in optimizer.param_groups:
param_group["lr"] = actual_learning_rate
# This method creates a new model and also trains it
def standard_training(
trainSetLoader,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_standard"
):
# Number of epochs is decided by training length
if long_training:
epochs = 200
else:
epochs = 30
learning_rate = 0.1
# Network parameters
loss_function = nn.CrossEntropyLoss()
model = resnet.ResNet18()
model = model.to(device)
model = nn.DataParallel(model)
model.train()
# Consider using ADAM here as another gradient descent algorithm
optimizer = torch.optim.SGD(
model.parameters(), lr=learning_rate, momentum=0.9, weight_decay=0.0002
)
# If a trained model already exists, give up the training part
if load_if_available and os.path.isfile(load_path):
print("Found already trained model...")
model = torch.load(load_path)
print("... loaded!")
else:
print("Training the model...")
# Use a pretty progress bar to show updates
for epoch in tnrange(epochs, desc="Training Progress"):
# Print loss results
total_epoch_loss = 0
# Adjust the learning rate
adjust_learning_rate(optimizer, epoch, learning_rate, long_training)
for _, (images, labels) in enumerate(tqdm(trainSetLoader, desc="Batches")):
# Cast to proper tensors
images, labels = images.to(device), labels.to(device)
# Clean the gradients
optimizer.zero_grad()
# Predict
logits = model(images)
# Calculate loss
loss = loss_function(logits, labels)
# Gradient descent
loss.backward()
# Add total accumulated loss
total_epoch_loss += loss.item()
# Also clip the gradients (ReLU leads to vanishing or
# exploding gradients)
torch.nn.utils.clip_grad_norm_(model.parameters(), 10)
optimizer.step()
print("Loss at epoch {} is {}".format(epoch, total_epoch_loss))
print("... done!")
# Make sure the model is in eval mode before returning
model.eval()
return model
|
[
"torch.nn.CrossEntropyLoss",
"torch.load",
"models.resnet.ResNet18",
"tqdm.notebook.tnrange",
"torch.nn.DataParallel",
"torch.cuda.is_available",
"tqdm.notebook.tqdm",
"sys.path.append"
] |
[((287, 312), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (302, 312), False, 'import sys\n'), ((437, 462), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (460, 462), False, 'import torch\n'), ((1530, 1551), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1549, 1551), True, 'import torch.nn as nn\n'), ((1564, 1581), 'models.resnet.ResNet18', 'resnet.ResNet18', ([], {}), '()\n', (1579, 1581), True, 'import models.resnet as resnet\n'), ((1623, 1645), 'torch.nn.DataParallel', 'nn.DataParallel', (['model'], {}), '(model)\n', (1638, 1645), True, 'import torch.nn as nn\n'), ((2042, 2063), 'torch.load', 'torch.load', (['load_path'], {}), '(load_path)\n', (2052, 2063), False, 'import torch\n'), ((2217, 2258), 'tqdm.notebook.tnrange', 'tnrange', (['epochs'], {'desc': '"""Training Progress"""'}), "(epochs, desc='Training Progress')\n", (2224, 2258), False, 'from tqdm.notebook import tnrange, tqdm\n'), ((2497, 2533), 'tqdm.notebook.tqdm', 'tqdm', (['trainSetLoader'], {'desc': '"""Batches"""'}), "(trainSetLoader, desc='Batches')\n", (2501, 2533), False, 'from tqdm.notebook import tnrange, tqdm\n')]
|
import os
import numpy as np
from montepython.likelihood_class import Likelihood
import montepython.io_mp as io_mp
import warnings
import ccl_tools as tools
import pyccl as ccl
class covfefe(Likelihood):
# initialization routine
def __init__(self, path, data, command_line):
Likelihood.__init__(self, path, data, command_line)
self.nb = data.cosmo_arguments['n_bins']
self.cm = data.cosmo_arguments['cov']
n_sims = 20000
# Load Covariance matrix
fn = 'cov_{}_{}.npz'.format(self.cm,self.nb)
self.cov = np.load(os.path.join(self.data_directory, fn))['arr_0']
if self.cm=='sim':
factor = (n_sims-self.cov.shape[0]-2.)/(n_sims-1.)
else:
factor = 1.
self.icov = factor*np.linalg.inv(self.cov)
# Load ell bandpowers
self.ell_bp = np.load(os.path.join(self.data_directory, 'ell_bp.npz'))['lsims'].astype(int)
self.nl = len(self.ell_bp)
# Load photo_z
fn = 'z_{}.npz'.format(self.nb)
self.z = np.load(os.path.join(self.data_directory, fn))['arr_0']
fn = 'pz_{}.npz'.format(self.nb)
self.pz = np.load(os.path.join(self.data_directory, fn))['arr_0']
fn = 'bz_{}.npz'.format(self.nb)
self.bz = np.load(os.path.join(self.data_directory, fn))['arr_0']
# Load data
fn = 'data_{}.npz'.format(self.nb)
self.data = np.load(os.path.join(self.data_directory, fn))['arr_0']
# end of initialization
# compute likelihood
def loglkl(self, cosmo, data):
# Get theory Cls
cosmo_ccl = tools.get_cosmo_ccl(cosmo.pars)
tracers = tools.get_tracers_ccl(cosmo_ccl, self.z, self.pz, self.bz)
theory = tools.get_cls_ccl(cosmo_ccl, tracers, self.ell_bp)
# Get chi2
chi2 = (self.data-theory).dot(self.icov).dot(self.data-theory)
lkl = - 0.5 * chi2
return lkl
|
[
"montepython.likelihood_class.Likelihood.__init__",
"ccl_tools.get_cls_ccl",
"os.path.join",
"ccl_tools.get_cosmo_ccl",
"numpy.linalg.inv",
"ccl_tools.get_tracers_ccl"
] |
[((297, 348), 'montepython.likelihood_class.Likelihood.__init__', 'Likelihood.__init__', (['self', 'path', 'data', 'command_line'], {}), '(self, path, data, command_line)\n', (316, 348), False, 'from montepython.likelihood_class import Likelihood\n'), ((1625, 1656), 'ccl_tools.get_cosmo_ccl', 'tools.get_cosmo_ccl', (['cosmo.pars'], {}), '(cosmo.pars)\n', (1644, 1656), True, 'import ccl_tools as tools\n'), ((1675, 1733), 'ccl_tools.get_tracers_ccl', 'tools.get_tracers_ccl', (['cosmo_ccl', 'self.z', 'self.pz', 'self.bz'], {}), '(cosmo_ccl, self.z, self.pz, self.bz)\n', (1696, 1733), True, 'import ccl_tools as tools\n'), ((1751, 1801), 'ccl_tools.get_cls_ccl', 'tools.get_cls_ccl', (['cosmo_ccl', 'tracers', 'self.ell_bp'], {}), '(cosmo_ccl, tracers, self.ell_bp)\n', (1768, 1801), True, 'import ccl_tools as tools\n'), ((785, 808), 'numpy.linalg.inv', 'np.linalg.inv', (['self.cov'], {}), '(self.cov)\n', (798, 808), True, 'import numpy as np\n'), ((582, 619), 'os.path.join', 'os.path.join', (['self.data_directory', 'fn'], {}), '(self.data_directory, fn)\n', (594, 619), False, 'import os\n'), ((1064, 1101), 'os.path.join', 'os.path.join', (['self.data_directory', 'fn'], {}), '(self.data_directory, fn)\n', (1076, 1101), False, 'import os\n'), ((1179, 1216), 'os.path.join', 'os.path.join', (['self.data_directory', 'fn'], {}), '(self.data_directory, fn)\n', (1191, 1216), False, 'import os\n'), ((1294, 1331), 'os.path.join', 'os.path.join', (['self.data_directory', 'fn'], {}), '(self.data_directory, fn)\n', (1306, 1331), False, 'import os\n'), ((1434, 1471), 'os.path.join', 'os.path.join', (['self.data_directory', 'fn'], {}), '(self.data_directory, fn)\n', (1446, 1471), False, 'import os\n'), ((870, 917), 'os.path.join', 'os.path.join', (['self.data_directory', '"""ell_bp.npz"""'], {}), "(self.data_directory, 'ell_bp.npz')\n", (882, 917), False, 'import os\n')]
|
from __future__ import division
import itertools
import json
import math
import os
import random
import shutil
import subprocess
import sys
durationA = str(5)
durationB = str(4)
durationC = str(1)
def main():
if len(sys.argv) > 1:
nbDepth = int(sys.argv[1])
if nbDepth < 2 :
nbDepth =2
else :
nbDepth =2
mainFolder = "depth"
if not os.path.exists(mainFolder):
subprocess.call(["mkdir", mainFolder])
generateDomain("depth", nbDepth)
#print "Every file has been written. Exiting"
def generateDomain(folderName, nbDepth):
domainFilename = folderName + "/" + folderName + "-flat" + str(nbDepth) + ".dom.anml"
printDomainToFile(domainFilename, nbDepth)
domainFilename = folderName + "/" + folderName + "-hier" + str(nbDepth) + ".dom.anml"
printDomainHierToFile(domainFilename, nbDepth)
def printDomainToFile(domainFilename, nbDepth):
with open(domainFilename, "w") as f:
for i in range(0, nbDepth):
f.write("predicate a" + str(i+1) +"();\n")
f.write("predicate b" + str(i+1) +"();\n")
f.write("predicate c" + str(i+1) +"();\n")
f.write("predicate d" + str(i+1) +"();\n")
f.write("predicate e" + str(i+1) +"();\n")
f.write("\naction An" + str(i+1) + " () {\n")
f.write("\tduration := " + durationA + ";\n")
if i > 0:
f.write("\t[start] {\n")
f.write("\t\tb"+ str(i) +" == true;\n")
f.write("\t\td"+ str(i) +" == true;\n")
f.write("\t\te"+ str(i) +" == true;\n")
f.write("\t};\n")
f.write("\t[start] a" + str(i+1) + " := true;\n")
f.write("\t[end] {\n")
f.write("\t\ta" + str(i+1) + " := false;\n")
f.write("\t\tb" + str(i+1) + " := true;\n")
f.write("\t\td" + str(i+1) + " := false;\n")
f.write("\t};\n")
f.write("};\n")
f.write("\naction Bn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationB + ";\n")
f.write("\t[start] a" + str(i+1) + " == true;\n")
f.write("\t[start] c" + str(i+1) + " := true;\n")
f.write("\t[end] {\n")
f.write("\t\tc" + str(i+1) + " := false;\n")
f.write("\t\td" + str(i+1) + " := true;\n")
f.write("\t};\n")
f.write("};\n")
f.write("\naction Cn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationC + ";\n")
f.write("\t[start] c" + str(i+1) + " == true;\n")
f.write("\t[end] {\n")
f.write("\t\tb" + str(i+1) + " := false;\n")
f.write("\t\te" + str(i+1) + " := true;\n")
f.write("\t};\n")
f.write("};\n")
######################## problem ###############
f.write("\n/*******Problem************/\n")
f.write("[all] contains{\n")
f.write("\tCn" + str(nbDepth) +"();\n")
f.write("};")
def printDomainHierToFile(domainFilename, nbDepth):
with open(domainFilename, "w") as f:
for i in range(0, nbDepth):
if i == 0:
f.write("\naction An" + str(i+1) + " () {\n")
f.write("\tmotivated;\n")
f.write("\tduration := " + durationA + ";\n")
f.write("};\n")
else:
f.write("\naction An" + str(i+1) + " () {\n")
f.write("\tmotivated;\n")
f.write("\tduration := " + durationA + ";\n")
f.write("\ta : ABC" + str(i) + "();\n")
f.write("\t end(a) < start;\n")
f.write("};\n")
f.write("\naction Bn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationB + ";\n")
f.write("\tmotivated;\n")
f.write("};\n")
f.write("\naction Cn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationC + ";\n")
f.write("\tmotivated;\n")
f.write("};\n")
f.write("\naction ABC" + str(i+1) + " () {\n")
f.write("\t[all] contains {\n")
f.write("\t\t b" + str(i+1) + " : An" + str(i+1) + "();\n")
f.write("\t\t d" + str(i+1) + " : Bn" + str(i+1) + "();\n")
f.write("\t\t e" + str(i+1) + " : Cn" + str(i+1) + "();\n")
f.write("\t};\n")
f.write("\tstart(b" + str(i+1) + ") < start(d" + str(i+1) + ");\n")
f.write("\tend(d" + str(i+1) + ") < end(b" + str(i+1) + ");\n")
f.write("\tstart(d" + str(i+1) + ") < start(e" + str(i+1) + ");\n")
f.write("\tend(e" + str(i+1) + ") < end(d" + str(i+1) + ");\n")
f.write("};\n")
#################### problem #############
f.write("\n/*******Problem************/\n")
f.write("[all] contains{\n")
f.write("\tCn" + str(nbDepth) +"();\n")
f.write("};")
if __name__ == "__main__":
main()
|
[
"os.path.exists",
"subprocess.call"
] |
[((394, 420), 'os.path.exists', 'os.path.exists', (['mainFolder'], {}), '(mainFolder)\n', (408, 420), False, 'import os\n'), ((430, 468), 'subprocess.call', 'subprocess.call', (["['mkdir', mainFolder]"], {}), "(['mkdir', mainFolder])\n", (445, 468), False, 'import subprocess\n')]
|
import decimal
from django import template
register = template.Library()
@register.simple_tag
def can_change_status(payment_request, user):
return payment_request.can_user_change_status(user)
@register.simple_tag
def can_delete(payment_request, user):
return payment_request.can_user_delete(user)
@register.simple_tag
def can_edit(payment_request, user):
return payment_request.can_user_edit(user)
@register.simple_tag
def percentage(value, total):
if not total:
return decimal.Decimal(0)
unrounded_total = (value / total) * 100
# round using Decimal since we're dealing with currency
rounded_total = unrounded_total.quantize(
decimal.Decimal('0.0'),
rounding=decimal.ROUND_DOWN,
)
return rounded_total
|
[
"decimal.Decimal",
"django.template.Library"
] |
[((56, 74), 'django.template.Library', 'template.Library', ([], {}), '()\n', (72, 74), False, 'from django import template\n'), ((504, 522), 'decimal.Decimal', 'decimal.Decimal', (['(0)'], {}), '(0)\n', (519, 522), False, 'import decimal\n'), ((683, 705), 'decimal.Decimal', 'decimal.Decimal', (['"""0.0"""'], {}), "('0.0')\n", (698, 705), False, 'import decimal\n')]
|
""" pylabnet measurement and service classes for Swabian Instruments TimeTagger
which implements qudi's SlowCounter interface.
This file contains pylabnet wrapper and service classes to allow qudi to
access Swabian Instruments TT through pylabnet network as SlowCounter.
Steps:
- instantiate TimeTagger
- instantiate pylabnet-SlowCtrWrap (pass ref to TimeTagger as tagger)
- instantiate pylabnet-SlowCtrService and assign module to the created wrapper
- start pylabnet-server for SlowCtrService
- in qudi, instantiate SlowCtrClient as one of the hardware modules
"""
from pylabnet.network.core.service_base import ServiceBase
import TimeTagger as TT
import time
import copy
import pickle
class Wrap:
""" Measurement instance which implements qudi's SlowCounter interface.
"""
def __init__(self, tagger, channel_list, clock_frequency, buffer_size):
# References to the device and to TT.Counter measurement
self._tagger = tagger
self._counter = None
# Counter parameters
self._channel_list = channel_list
self._clock_frequency = clock_frequency
self._buffer_size = buffer_size
self._bin_width = 0
self._bin_width_sec = 0
def set_up_clock(self, clock_frequency=None, clock_channel=None):
"""
Sets sample clock frequency for the Counter measurement.
:param clock_frequency: (float) sample clock frequency. If not given,
configuration value is used
:param clock_channel: ignored (internal timebase is used to generate
sample clock signal)
:return: (int) operation status code: 0 - OK
-1 - Error
"""
# Use config value, if no clock_frequency is specified
if clock_frequency is None:
clock_frequency = self._clock_frequency
# Calculate final bin width
bin_width = int(1e12 / clock_frequency) # in picoseconds, for device
bin_width_sec = bin_width * 1e-12 # is seconds, for software timing
# Set new values param to internal variables
self._bin_width = bin_width
self._bin_width_sec = bin_width_sec
return 0
def set_up_counter(self,
counter_channels=None,
sources=None,
clock_channel=None,
counter_buffer=None):
"""
Configures the actual counter with a given clock.
(list of int) [optional] list of channels
to count clicks on. If not given, config value is used.
:param counter_buffer: (int) [optional] size of the memory buffer.
If not given, config value is used.
:param counter_channels: ignored
This argument should not be used. Counter GUI initializes set of plot curves
self.curves during its on_activate() method. It basically calls
counter_hardware.get_counter_channels() and uses this list to init self.curves
Only after that user can click "Start" button, which will call set_up_counter().
And since GUI already has inited set of curves, set of channels must not be
modified here! It will case GUI to fail.
:param sources: ignored
:param clock_channel: ignored
:return: (int) operation status code: 0 - OK
-1 - Error
"""
# Set counter channels
if counter_channels is not None:
channel_list = counter_channels
else:
channel_list = self._channel_list
# apply counter channel change
self.set_counter_channels(channel_list=channel_list)
# Set buffer size
if counter_buffer is not None:
buffer_size = counter_buffer
else:
buffer_size = self._buffer_size
# sanity check:
if not isinstance(buffer_size, int) or buffer_size <= 0:
# self.log.error('set_up_counter(): invalid parameter value counter_buffer = {}.'
# 'This parameter must be a positive integer.'
# ''.format(buffer_size))
return -1
# apply buffer size change
self._buffer_size = buffer_size
# Create instance of Counter measurement
try:
self._counter = TT.Counter(
tagger=self._tagger,
channels=self._channel_list,
binwidth=self._bin_width,
n_values=self._buffer_size
)
# handle initialization error (TT functions always produce NotImplementedError)
except NotImplementedError:
self._counter = None
# self.log.error('set_up_counter(): failed to instantiate TT.Counter measurement')
return -1
# Start Counter
# (TT.Counter measurement starts running immediately after instantiation,
# so it is necessary to erase all counts collected since instantiation)
self._counter.stop()
self._counter.clear()
self._counter.start()
return 0
def close_clock(self):
"""
Closes the clock.
:return: (int) error code: 0 - OK
-1 - Error
"""
# self._bin_width = 0
# self._bin_width_sec = 0
return 0
def close_counter(self):
"""
Closes the counter and cleans up afterwards.
:return: (int) error code: 0 - OK
-1 - Error
"""
# Try stopping and clearing TT.Counter measurement
try:
self._counter.stop()
self._counter.clear()
# Handle the case of exception in TT function call (NotImplementedError)
# and the case of self._ctr = None (AttributeError)
except (NotImplementedError, AttributeError):
pass
# Remove reference to the counter
# self._ctr = None
# Clear counter parameters
# self._buffer_size = []
# Do not clear channel list:
# Counter GUI inits its list of curves self.curves
# by calling counter_hardware.get_counter_channels() before
# calling counter_hardware.set_up_counter()
# If one clears _channel_list here, GUI will fail at the next
# "Start" button click after reloading.
#
# self._channel_list = []
return 0
def get_counter(self, samples=1):
"""
Returns the current counts per second of the counter.
:param samples: (int) [optional] number of samples to read in one go
(default is one sample)
:return: numpy.array((samples, uint32), dtype=np.uint32)
array of count rate [counts/second] arrays of length samples for each click channel
Empty array [] is returned in the case of error.
"""
# Sanity check: samples has valid value
if samples != 1:
if not isinstance(samples, int) or samples <= 0:
# self.log.error('get_counter(): invalid argument samples={0}. This argument must be a positive integer'
# ''.format(samples))
return []
# MORE SOPHISTICATED VERSION
# (WORKS TOO SLOWLY: PROBABLY BECAUSE OF SLOW INTEGER DIVISION OF LARGE INTEGERS)
#
# start_time = time.time()
# while time.time() - start_time < self._timeout:
# new_complete_bins = self._ctr.getCaptureDuration() // self._bin_width - self._last_read_bin
#
# self._overflow = new_complete_bins
# # self.log.error('new_complete_bins = {}'.format(new_complete_bins))
#
# if new_complete_bins < samples:
# time.sleep(self._bin_width_sec/2)
# continue
# elif new_complete_bins == samples:
# self._last_read_bin += new_complete_bins
# break
# else:
# # self.log.warn('Counter is overflowing. \n'
# # 'Software pulls data in too slowly and counter bins are too short, '
# # 'such that some bins are lost. \n'
# # 'Try reducing sampling rate or increasing oversampling')
# self._last_read_bin += new_complete_bins
# break
# Wait for specified number of samples (samples parameter) to be accumulated
#
# This approach is very naive and is more or less accurate for
# clock frequency below 50 Hz.
#
# For higher frequencies, the actual time sampling interval is determined
# by software delays (about 1 ms). Counter measurement overflows
# (most of the samples are over-written before software reads them in)
# but does not fail. The only problem here is that time axis on the count-trace
# graph is no longer accurate:
# the difference between consecutive tick labels is much smaller than the actual
# time interval between measured samples (about 1 ms)
time.sleep(samples * self._bin_width_sec)
# read-in most recent 'samples' samples
try:
count_array = self._counter.getData()[:, -samples:]
except NotImplementedError:
# self.log.error('get_counter() reading operation failed')
return []
except AttributeError:
# self.log.error('get_counter(): counter was not initialized')
return []
# Calculate count rate [count/sec]
count_rate_array = count_array / self._bin_width_sec
return count_rate_array
def get_counter_channels(self):
"""
Returns the list of click channel numbers.
:return: (list of int) list of click channel numbers
"""
return copy.deepcopy(self._channel_list)
def set_counter_channels(self, channel_list=None):
"""
Set click channel list.
Notice that this method only modifies internal variable _channel_list.
To apply the change to the counter, one has to call set_up_counter() again.
:param channel_list: (list of int) list of channels to count clicks on
:return: (list of int) actual list of click channels
"""
if channel_list is None:
return self.get_counter_channels()
# Sanity check:
all_channels = self._get_all_channels()
if not set(channel_list).issubset(set(all_channels)):
# self.log.error('set_counter_channels(): requested list of channels is invalid: '
# 'some channels are not present on the device.'
# 'requested list: {0} \n'
# 'available channels: {1}'
# ''.format(channel_list, all_channels))
return self.get_counter_channels()
# Apply changes to internal variable self._channel_list
self._channel_list = channel_list
# Sort channel numbers, such that channel order does not depend
# on order of numbers in the config file
self._channel_list.sort()
return self.get_counter_channels()
def _get_all_channels(self):
"""
Return list of all channels available on the device.
Positive/negative values correspond to rising/falling edge detection.
For example:
1 means 'rising edge on connector 1'
-1 means 'falling edge on connector 1
:return: (list of int) list of all available channel numbers,
including edge sign.
"""
try:
available_channel_tuple = list(
self._tagger.getChannelList(TT.TT_CHANNEL_RISING_AND_FALLING_EDGES)
)
# handle exception in the call (TT functions normally produce NotImplementedError)
except NotImplementedError:
# self.log.error('_get_all_channels(): communication with the device failed')
return []
# handle the case of self._tagger = None
except AttributeError:
# self.log.error('_get_all_channels(): _tagger is None. Initialize device first')
return []
return list(available_channel_tuple)
class Service(ServiceBase):
def exposed_set_up_clock(self, clock_frequency=None, clock_channel=None):
"""
Sets sample clock frequency for the Counter measurement.
:param clock_frequency: (float) sample clock frequency. If not given,
configuration value is used
:param clock_channel: ignored (internal timebase is used to generate
sample clock signal)
:return: (int) operation status code: 0 - OK
-1 - Error
"""
return self._module.set_up_clock(
clock_frequency=clock_frequency,
clock_channel=clock_channel
)
def exposed_set_up_counter(self, counter_channels=None, sources=None, clock_channel=None, counter_buffer=None):
"""
Configures the actual counter with a given clock.
(list of int) [optional] list of channels
to count clicks on. If not given, config value is used.
:param counter_buffer: (int) [optional] size of the memory buffer.
If not given, config value is used.
:param counter_channels: ignored
This argument should not be used. Counter GUI initializes set of plot curves
self.curves during its on_activate() method. It basically calls
counter_hardware.get_counter_channels() and uses this list to init self.curves
Only after that user can click "Start" button, which will call set_up_counter().
And since GUI already has inited set of curves, set of channels must not be
modified here! It will case GUI to fail.
:param sources: ignored
:param clock_channel: ignored
:return: (int) operation status code: 0 - OK
-1 - Error
"""
return self._module.set_up_counter(
counter_channels=counter_channels,
sources=sources,
clock_channel=clock_channel,
counter_buffer=counter_buffer
)
def exposed_close_clock(self):
"""
Closes the clock.
:return: (int) error code: 0 - OK
-1 - Error
"""
return self._module.close_clock()
def exposed_close_counter(self):
"""
Closes the counter and cleans up afterwards.
:return: (int) error code: 0 - OK
-1 - Error
"""
return self._module.close_ctr()
def exposed_get_counter(self, samples=1):
"""
Returns the current counts per second of the counter.
:param samples: (int) [optional] number of samples to read in one go
(default is one sample)
:return: numpy.array((samples, uint32), dtype=np.uint32)
array of count rate [counts/second] arrays of length samples for each click channel
Empty array [] is returned in the case of error.
"""
res = self._module.get_counter(samples=samples)
return pickle.dumps(res)
def exposed_get_counter_channels(self):
"""
Returns the list of click channel numbers.
:return: (list of int) list of click channel numbers
"""
res = self._module.get_counter_channels()
return pickle.dumps(res)
|
[
"pickle.dumps",
"TimeTagger.Counter",
"time.sleep",
"copy.deepcopy"
] |
[((9264, 9305), 'time.sleep', 'time.sleep', (['(samples * self._bin_width_sec)'], {}), '(samples * self._bin_width_sec)\n', (9274, 9305), False, 'import time\n'), ((10017, 10050), 'copy.deepcopy', 'copy.deepcopy', (['self._channel_list'], {}), '(self._channel_list)\n', (10030, 10050), False, 'import copy\n'), ((15586, 15603), 'pickle.dumps', 'pickle.dumps', (['res'], {}), '(res)\n', (15598, 15603), False, 'import pickle\n'), ((15852, 15869), 'pickle.dumps', 'pickle.dumps', (['res'], {}), '(res)\n', (15864, 15869), False, 'import pickle\n'), ((4459, 4578), 'TimeTagger.Counter', 'TT.Counter', ([], {'tagger': 'self._tagger', 'channels': 'self._channel_list', 'binwidth': 'self._bin_width', 'n_values': 'self._buffer_size'}), '(tagger=self._tagger, channels=self._channel_list, binwidth=self.\n _bin_width, n_values=self._buffer_size)\n', (4469, 4578), True, 'import TimeTagger as TT\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Must still be recoded with some cleaner code.
"""
# imports.
from dev0s.classes.config import *
from dev0s.classes import utils
from dev0s.classes.defaults.color import color, symbol
from dev0s.classes import console
from dev0s.classes.defaults.exceptions import Exceptions
# pip.
from datetime import datetime, timezone
import shutil, math
from PIL import Image as _Image_
"""
Notes.
All default files & formats must exact the same as the default dict, bool, list etc in the native sense.
There are lots additionals though. But a dict and Dictionary should be able to be used universally as if the user would not know the difference (which could very quickly in some instances).
"""
# the format classes.
class Formats():
# variables.
digits = [0,1,2,3,4,5,6,7,8,9,]
str_digits = ["0","1","2","3","4","5","6","7","8","9"]
alphabet, capitalized_alphabet = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"], []
for i in alphabet: capitalized_alphabet.append(i.upper())
special_characters = ["±","§","!","@","€","#","£","$","¢","%","∞","^","&","ª","(",")","–","_","+","=","{","}","[","]",";",":","'",'"',"|","\\","//","?",">",".",",","<"]
# check & get format / instance.
def check(
nones=None,
booleans=None,
none_allowed_booleans=None,
strings=None,
none_allowed_strings=None,
integers=None,
none_allowed_integers=None,
bytes_=None,
none_allowed_bytes=None,
arrays=None,
none_allowed_arrays=None,
dictionaries=None,
none_allowed_dictionaries=None,
):
if nones != None:
for key,value in nones.items():
if value == None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [!null].")
if booleans != None:
for key,value in booleans.items():
if not isinstance(value, bool): raise ValueError(f"Invalid [{key}] format [{value}], required format is [bool].")
if none_allowed_booleans != None:
for key,value in none_allowed_booleans.items():
if not isinstance(value, bool) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [bool].")
if strings != None:
for key,value in strings.items():
if not isinstance(value, str): raise ValueError(f"Invalid [{key}] format [{value}], required format is [str].")
if none_allowed_strings != None:
for key,value in none_allowed_strings.items():
if not isinstance(value, str) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [str].")
if integers != None:
for key,value in integers.items():
if not isinstance(value, int): raise ValueError(f"Invalid [{key}] format [{value}], required format is [int].")
if none_allowed_integers != None:
for key,value in none_allowed_integers.items():
if not isinstance(value, int) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [int].")
if bytes_ != None:
for key,value in bytes_.items():
if not isinstance(value, bytes): raise ValueError(f"Invalid [{key}] format [{value}], required format is [bytes].")
if none_allowed_bytes != None:
for key,value in none_allowed_bytes.items():
if not isinstance(value, bytes) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [bytes].")
if arrays != None:
for key,value in arrays.items():
if not isinstance(value, list): raise ValueError(f"Invalid [{key}] format [{value}], required format is [list].")
if none_allowed_arrays != None:
for key,value in none_allowed_arrays.items():
if not isinstance(value, list) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [list].")
if dictionaries != None:
for key,value in dictionaries.items():
if not isinstance(value, dict): raise ValueError(f"Invalid [{key}] format [{value}], required format is [dict].")
if none_allowed_dictionaries != None:
for key,value in none_allowed_dictionaries.items():
if not isinstance(value, dict) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [dict].")
def get(value, serialize=False):
if value == None: return None
elif isinstance(value, bool):
if not serialize: return bool
else: return "bool"
elif isinstance(value, str):
if not serialize: return str
else: return "str"
elif isinstance(value, int):
if not serialize: return int
else: return "int"
elif isinstance(value, bytes):
if not serialize: return bytes
else: return "bytes"
elif isinstance(value, list):
if not serialize: return list
else: return "list"
elif isinstance(value, dict):
if not serialize: return dict
else: return "dict"
elif isinstance(value, Boolean) or value.__class__.__name__ == "Boolean":
if not serialize: return Boolean
else: return "Boolean"
elif isinstance(value, String) or value.__class__.__name__ == "String":
if not serialize: return String
else: return "String"
elif isinstance(value, Integer) or value.__class__.__name__ == "Integer":
if not serialize: return Integer
else: return "Integer"
elif isinstance(value, Bytes) or value.__class__.__name__ == "Bytes":
if not serialize: return Bytes
else: return "Bytes"
elif isinstance(value, Array) or value.__class__.__name__ == "Array":
if not serialize: return Array
else: return "Array"
elif isinstance(value, Dictionary) or value.__class__.__name__ == "Dictionary":
if not serialize: return Dictionary
else: return "Dictionary"
elif isinstance(value, FilePath) or value.__class__.__name__ == "FilePath":
if not serialize: return FilePath
else: return "FilePath"
elif isinstance(value, object):
if not serialize: return object
else: return "object"
else: raise ValueError(f"Unknown format [{value}].")
#
# try to parse variable to format, when failed it returns None.
def parse(
# the variable to parse (required) (#1).
variable,
# the expected format (required) (#2).
format=None,
# with safe disabled it throws a ParseError when the variable can't be parsed to the expected format.
safe=True,
# the default return value for when safe is enabled.
default=None,
):
if format in [bool, "bool", Boolean, "Boolean"]:
try:
return bool(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a bool from ({variable.__class__.__name__}) [{variable}].")
elif format in [int, "int"]:
try:
return int(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a int from ({variable.__class__.__name__}) [{variable}].")
elif format in [float, "float", Integer, "Integer"]:
try:
return float(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a float from ({variable.__class__.__name__}) [{variable}].")
elif format in [str, "str", String, "String"]:
try:
return str(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a str from ({variable.__class__.__name__}) [{variable}].")
elif format in [list, "list", Array, "Array"]:
if isinstance(variable, (list,Array)):
return variable
elif not isinstance(variable, (str, String)):
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse an array from ({variable.__class__.__name__}) [{variable}].")
try:
return ast.literal_eval(variable)
except:
try:
return json.loads(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse an array from ({variable.__class__.__name__}) [{variable}].")
elif format in [dict, "dict", Dictionary, "Dictionary"]:
if isinstance(variable, (dict,Dictionary)):
return variable
elif not isinstance(variable, (str, String)):
raise Exceptions.ParseError(f"Unable to parse a dict from ({variable.__class__.__name__}) [{variable}].")
try:
return ast.literal_eval(variable)
except:
try:
return json.loads(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a dict from ({variable.__class__.__name__}) [{variable}].")
else:
raise Exceptions.InvalidUsage(f"Specified format [{format}] is not a valid format option.")
#
# initialize from default format to dev0s format.
def initialize(variable, file_paths=True):
if variable.__class__.__name__ in ["str","String"]:
if file_paths and "/" in variable and Files.exists(variable):
return FilePath(variable)
else:
return String(variable)
elif variable.__class__.__name__ in ["bool","Boolean"]:
return Boolean(variable)
elif variable.__class__.__name__ in ["int","float","Integer"]:
return Integer(variable)
elif variable.__class__.__name__ in ["dict","Dictionary"]:
return Dictionary(variable)
elif variable.__class__.__name__ in ["list","Array"]:
return Array(variable)
else:
return variable
#
# denitialize from dev0s formats to default format.
def denitialize(variable, file_paths=True):
if variable.__class__.__name__ in ["String"]:
return str(variable)
elif variable.__class__.__name__ in ["FilePath"]:
return str(variable)
elif variable.__class__.__name__ in ["Boolean"]:
return bool(variable)
elif variable.__class__.__name__ in ["Integer"]:
return variable.value
elif variable.__class__.__name__ in ["Dictionary", "ResponseObject", "OutputObject", "dict"]:
new = {}
for key,value in variable.items():
new[key] = Formats.denitialize(value, file_paths=file_paths)
return new
elif variable.__class__.__name__ in ["Array", "list"]:
new = []
for value in variable:
new.append(Formats.denitialize(value, file_paths=file_paths))
return new
else:
return variable
#
# the file path object class.
class FilePath(object):
def __init__(self, path, default=False, check=False, load=False):
# docs.
DOCS = {
"module":"FilePath",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.path = str(self.clean(path=str(path), raw=True))
if check == False and default == False and path != False:
if Files.directory(self.path) and self.path[len(self.path)-1] != '/': self.path += '/'
if check and os.path.exists(self.path) == False: raise FileNotFoundError(f"Path [{self.path}] does not exist.")
self.ownership = self.Ownership(path=self.path, load=load)
self.permission = self.Permission(path=self.path, load=load)
#
# - info:
def join(self, name=None, type="/"):
if type not in ["", "/"] and "." not in type:
type = "." + type
path = self.path
if path[len(path)-1] != "/": path += '/'
return FilePath("{}{}{}".format(path, name, type))
def name(self, path=None, remove_extension=False,):
if path == None: path = self.path
if path in [False, None]: return None
x = 1
if path[len(path)-1] == '/': x += 1
name = path.split('/')[len(path.split('/'))-x]
if remove_extension:
count = len(name.split("."))
if count > 1:
c, s = 0, None
for i in name.split("."):
if c < count-1:
if s == None: s = i
else: s += "."+i
c += 1
name = s
return name
def extension(self, name=None, path=None):
if path == None: path = self.path
# - check directory:
extension = None
if name == None and Files.directory(path): extension = 'dir'
else:
# - get extension:
try:
if name == None: name = self.name(path=path)
extension = name.split('.')[len(name.split('.'))-1]
except:
try:
name = self.name(path=path)
extension = name.split('.')[len(name.split('.'))-1]
except: extension = None
# - check image & video:
if extension in ["jpg", "png", "gif", "webp", "tiff", "psd", "raw", "bmp", "heig", "indd", "jpeg", "svg", "ai", "eps", "pdf"]: extension = "img"
elif extension in ["mp4", "m4a", "m4v", "f4v", "f4a", "m4b", "m4r", "f4b", "mov", "3gp", "3gp2", "3g2", "3gpp", "3gpp2", "h.263", "h.264", "hevc", "mpeg4", "theora", "3gp", "windows media 8", "quicktime", "mpeg-4", "vp8", "vp6", "mpeg1", "mpeg2", "mpeg-ts", "mpeg", "dnxhd", "xdcam", "dv", "dvcpro", "dvcprohd", "imx", "xdcam", "hd", "hd422"]: extension = "video"
return extension
def base(self,
# the path (leave None to use self.path) (param #1).
path=None,
# the dirs back.
back=1,
):
if path == None: path = self.path
return Files.base(path=path, back=back)
#
def basename(self, back=1, path=None):
if path == None: path = self.path
return self.name(path=self.base(back=back, path=path))
def size(self, format=str, mode="auto", path=None, options=["auto", "bytes", "kb", "mb", "gb", "tb"]):
def __size__(path):
total = 0
try:
# print("[+] Getting the size of", directory)
for entry in os.scandir(path):
if entry.is_file():
# if it's a file, use stat() function
total += entry.stat().st_size
elif entry.is_dir():
# if it's a directory, recursively call this function
total += __size__(entry.path)
except NotADirectoryError:
# if `directory` isn't a directory, get the file size then
return os.path.getsize(path)
except PermissionError:
# if for whatever reason we can't open the folder, return 0
return 0
return total
#
if path == None: path = self.path
if path != None: path = str(path)
return self.convert_bytes(__size__(path), format=format, mode=mode)
def space(self, format=str, mode="auto", path=None, options=["auto", "bytes", "kb", "mb", "gb", "tb"]):
if path == None: path = self.path
total, used, free = shutil.disk_usage(path)
total, used, free = self.convert_bytes(total, format=format, mode=mode), self.convert_bytes(used, format=format, mode=mode), self.convert_bytes(free, format=format, mode=mode)
return {
"total":total,
"used":used,
"free":free,
}
def convert_bytes(self, bytes:int, format=str, mode="auto", options=["auto", "bytes", "kb", "mb", "gb", "tb"]):
if format in [float, "float", "integer", "Integer", Integer]:
format = float
if (mode == "bytes" or mode == "bytes".upper()):
return float(bytes)
elif format in [int, "int", "integer", "Integer", Integer]:
format = int
if (mode == "bytes" or mode == "bytes".upper()):
return int(round(bytes,0))
if mode == "auto":
if int(bytes/1024**4) >= 10:
bytes = round(bytes/1024**4,2)
if format not in [int, float]:
bytes = '{:,} TB'.format(bytes)#.replace(',', '.')
elif int(bytes/1024**3) >= 10:
bytes = round(bytes/1024**3,2)
if format not in [int, float]:
bytes = '{:,} GB'.format(bytes)#.replace(',', '.')
elif int(bytes/1024**2) >= 10:
bytes = round(bytes/1024**2,2)
if format not in [int, float]:
bytes = '{:,} MB'.format(bytes)#.replace(',', '.')
elif int(bytes/1024) >= 10:
bytes = round(bytes/1024,2)
if format not in [int, float]:
bytes = '{:,} KB'.format(bytes)#.replace(',', '.')
else:
bytes = int(round(bytes,0))
if format not in [int, float]:
bytes = '{:,} Bytes'.format(bytes)#.replace(',', '.')
elif (mode == "bytes" or mode == "bytes".upper()):
bytes = int(round(bytes,0))
if format not in [int, float]:
bytes = '{:,} Bytes'.format(bytes)#.replace(',', '.')
elif mode == "kb" or mode == "kb".upper():
bytes = round(bytes/1024,2)
if format not in [int, float]:
bytes = '{:,} KB'.format(bytes)#.replace(',', '.')
elif mode == "mb" or mode == "mb".upper():
bytes = round(bytes/1024**2,2)
if format not in [int, float]:
bytes = '{:,} MB'.format(bytes)#.replace(',', '.')
elif mode == "gb" or mode == "gb".upper():
bytes = round(bytes/1024**3,2)
if format not in [int, float]:
bytes = '{:,} GB'.format(bytes)#.replace(',', '.')
elif mode == "tb" or mode == "tb".upper():
bytes = round(bytes/1024**4,2)
if format not in [int, float]:
bytes = '{:,} TB'.format(bytes)#.replace(',', '.')
else: raise Exceptions.InvalidUsage(f"Selected an invalid size format [{format}], options {options}.")
return bytes
def exists(self,
# the path (leave None to use self.path) (#1).
path=None,
# root permission required.
sudo=False,
):
if path == None: path = self.path
path = gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True)
path = str(path)
if not sudo:
return os.path.exists(str(path))
else:
try:
output = utils.__execute__(["sudo", "ls","-ld",path])
if "No such file or directory" in str(output):
return False
else: return True
except: return False
#
def mount(self,
# the path (leave None to use self.path) (#1).
path=None,
):
if path == None: path = self.path
path = gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True)
return os.path.ismount(path)
#
def directory(self,
# the path (leave None to use self.path) (#1).
path=None,
):
if path == None: path = self.path
return Files.directory(path)
#
def mtime(self, format='%d-%m-%y %H:%M.%S', path=None):
if path == None: path = self.path
fname = pathlib.Path(path)
try: mtime = fname.stat().st_mtime
except: mtime = fname.stat().ct_mtime
if format in ['s', "seconds"]:
return mtime
else:
return Formats.Date().from_seconds(mtime, format=format)
def clean(self,
# the path (leave None to use self.path) (param #1).
path=None,
# the clean options.
remove_double_slash=True,
remove_first_slash=False,
remove_last_slash=False,
ensure_first_slash=False,
ensure_last_slash=False,
# return the path as a raw string.
raw=False,
):
if path == None: path = self.path
if not isinstance(path, (str, String)):
return path
path = str(path).replace("~",HOME)
while True:
if remove_double_slash and "//" in path: path = path.replace("//","/")
elif remove_first_slash and len(path) > 0 and path[0] == "/": path = path[1:]
elif remove_last_slash and len(path) > 0 and path[len(path)-1] == "/": path = path[:-1]
elif ensure_first_slash and len(path) > 0 and path[0] != "/": path = "/"+path
elif ensure_last_slash and len(path) > 0 and path[len(path)-1] != "/": path += "/"
else: break
if raw:
return path
else:
return FilePath(path)
def absolute(self,
# the path (leave None to use self.path) (param #1).
path=None,
):
if path == None: path = self.path
return FilePath(os.path.abspath(path))
# path to python module path.
def module(self, path=None):
if path == None: path = self.path
return gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True, remove_first_slash=True).replace("/",".").replace(".py","").replace(".__init__", "").replace("__init__", "")
# serialize a requirements file.
def requirements(self, path=None, format="pip", include_version=True):
if format in ["pip3"]: format = "pip"
if format not in ["pip"]: raise ValueError(f"Invalid usage, format [{format}] is not a valid option, options: [pip].")
# pip requirements.
if format == "pip":
requirements = []
for i in Files.load(path).split("\n"):
if len(i) > 0 and i[0] != "#" and i not in [""," "]:
while True:
if len(i) > 0 and i[len(i)-1] in [" "]: i = i[:-1]
else: break
if " " not in i:
sid = None
for lid in ["==", ">=", "<="]:
if lid in i: sid = lid ; break
if sid != None:
if include_version:
requirements.append(i)
else:
requirements.append(i.split(sid)[0])
else:
requirements.append(i)
return requirements
# - commands:
def delete(self,
# the path (leave None to use self.path) (param #1).
path=None,
# the options.
forced=False,
sudo=False,
silent=False,
):
if path == None: path = self.path
if silent: silent = ' 2> /dev/null'
else: silent = ""
if sudo: sudo = "sudo "
else: sudo = ""
options = " "
if forced:
options = " -f "
if Files.directory(path): options = " -fr "
elif Files.directory(path): options = " -r "
os.system(f"{sudo}rm{options}{path}{silent}")
def move(self,
# the to path (#1).
path=None,
# root permission required.
sudo=False,
# root permission required.
log_level=0,
):
return Files.move(
# the from & to path (#1 & #2).
self.path, path,
# root permission required.
sudo=sudo,
# root permission required.
log_level=log_level,
)
self.path = gfp.clean(path=path)
def copy(self,
# the to path (#1).
path=None,
# root permission required.
sudo=False,
# the active log level.
log_level=0,
# the exclude patterns.
exclude=[],
# update deleted files.
delete=True,
):
return Files.copy(
# the from & to path (#1 & #2).
self.path, path,
# root permission required.
sudo=sudo,
# the active log level.
log_level=log_level,
# the exclude patterns.
exclude=exclude,
# update deleted files.
delete=delete,)
def open(self, sudo=False):
if sudo: sudo = "sudo "
else: sudo = ""
if OS in ["macos"]:
os.system(f"{sudo}open {self.path}")
elif OS in ["linux"]:
os.system(f"{sudo}nautulis {self.path}")
else: raise Exceptions.InvalidOperatingSystem(f"Unsupported operating system [{OS}].")
def create(self,
# Option 1: (creating a directory)
# - boolean format:
directory=False,
# Option 2: (creating any file extension)
# - string format:
data="",
# Options:
# - integer format:
permission=None,
# - string format:
owner=None,
group=None,
# - boolean format:
sudo=False,
):
# - option 1:
if directory:
if sudo: os.system('sudo mkdir -p '+self.path)
else: os.system('mkdir -p '+self.path)
# - option 2:
elif data != None:
if sudo:
f = Files.File(path='/tmp/tmp_file', data=data)
f.save()
os.system(f"sudo mv {f.file_path.path} {self.path}")
else:
Files.File(path=self.path, data=data).save()
#with open
# - invalid option:
else: raise ValueError("Invalid option, either enable the [directory] boolean to create a directory, or specify [path] and [data] to create any file sort.")
# - default:
if owner != None or group != None: self.ownership.set(owner=owner, group=group, sudo=sudo)
if permission != None: self.permission.set(permission, sudo=sudo)
#
def check(self,
# Option 1: (creating a directory)
# - boolean format:
directory=False,
# Option 2: (creating any file extension)
# - string format:
data="",
# Options:
# - integer format:
permission=None,
# - string format:
owner=None,
group=None,
# - boolean format:
sudo=False,
silent=False,
recursive=False, # for directories only (for permission & ownership check)
):
# - option 1:
if not self.exists(sudo=sudo):
self.create(directory=directory, data=data, permission=permission, owner=owner, group=group, sudo=sudo)
else:
# - default:
self.ownership.check(owner=owner, group=group, sudo=sudo, silent=silent, recursive=recursive)
self.permission.check(permission=permission, sudo=sudo, silent=silent, recursive=recursive)
#
# support default str functions.
def split(self, path):
return Files.Array(self.path.split(str(path)))
def count(self, path):
return Formats.Integer(self.path.count(str(path)))
def replace(self, from_, to_):
return self.path.replace(str(from_), str(to_))
def lower(self, path):
return self.path.lower(str(path))
def upper(self, path):
return self.path.upper(str(path))
# support subscriptionable.
def __getitem__(self, index):
return self.path[Formats.denitialize(index)]
def __setitem__(self, index, value):
self.path[Formats.denitialize(index)] = str(value)
# support "+" & "-" .
def __add__(self, path):
if isinstance(path, str):
a=1
elif isinstance(path, self.__class__):
path = path.path
elif not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {path.__class__}.")
return self.path + path
def __sub__(self, path):
if isinstance(path, str):
a=1
elif isinstance(path, self.__class__):
path = path.path
elif not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {path.__class__}.")
return self.path.replace(path, "")
# support +.
def __concat__(self, path):
if isinstance(path, str):
a=1
elif isinstance(path, self.__class__):
path = path.path
elif not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {path.__class__}.")
return self.path + path
# support default iteration.
def __iter__(self):
return iter(self.path)
# support '>=' & '>' operator.
def __gt__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) > len(path.path)
def __ge__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) >= len(path.path)
# support '<=' & '<' operator.
def __lt__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) < len(path.path)
def __le__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) <= len(path.path)
# support '==' & '!=' operator.
def __eq__(self, path):
if isinstance(path, str):
return self.path == path
elif not isinstance(path, self.__class__):
return False
return self.path == path.path
def __ne__(self, path):
if isinstance(path, str):
return self.path != path
elif not isinstance(path, self.__class__):
return True
return self.path != path.path
# support 'in' operator.
def __contains__(self, path):
if isinstance(path, (list, Files.Array)):
for i in path:
if i in self.path:
return True
return False
else:
return path in self.path
#
# int representation.
def __repr__(self):
return str(self)
# str representation.
def __str__(self):
return str(self.path)
# int representation.
def __int__(self):
return int(self.path)
# float representation.
def __float__(self):
return float(self.path)
# bool representation.
def __bool__(self):
if self.path in [1.0, 1, "true", "True", "TRUE", True]:
return True
elif self.path in [0, 0.0, "false", "False", "FALSE", False]:
return False
else:
raise Exceptions.FormatError(f"Could not parse a bool from {self.__id__()}.")
# content count.
def __len__(self):
return len(self.path)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "FilePath"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, path, load=False):
if isinstance(path, self.__class__):
path = path.path
self.path = gfp.clean(path=path)
self.ownership = self.Ownership(path=self.path, load=load)
self.permission = self.Permission(path=self.path, load=load)
return self
# return raw data.
def raw(self):
return self.path
# - objects:
class Ownership(object):
def __init__(self, path=None, load=False):
# docs.
DOCS = {
"module":"FilePath.Ownership",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.path = path
self.owner = None
self.group = None
if load:
get = self.get()
self.owner = get["owner"]
self.group = get["permission"]
# - info:
def get(self, path=None):
if path == None: path = self.path
owner = pwd.getpwuid(os.stat(path).st_uid).pw_name
try:
group = grp.getgrgid(os.stat(path).st_gid).gr_name
except KeyError: # unknown group likely from different os / machine.
group = os.stat(path).st_gid
except Exception as e:
raise ValueError(f"Unable to retrieve the group of file {path}, error: {e}.")
return owner, group
def set(self,
# the permission (str) (#1).
owner=None,
# the group (str) (optional) (#2).
group=None,
# the path (optional) (overwrites self.path) (#3).
path=None,
# root permission required.
sudo=False,
# recursive.
recursive=False,
# silent.
silent=False,
):
if path == None: path = self.path
if group == None:
if OS in ["macos"]: group = "wheel"
elif OS in ["linux"]: group = "root"
else: raise ValueError("Unsupported operating system [{}].".format(OS))
silent_option = ""
if silent: silent_option = ' 2> /dev/null'
if recursive:
if sudo: os.system("sudo chown -R {} {} {}".format(owner+":"+group, path, silent_option))
else: os.system("chown -R {} {}".format(owner+":"+group, path))
else:
if sudo: os.system("sudo chown {} {} {}".format(owner+":"+group, path, silent_option))
else: os.system("chown {} {} {}".format(owner+":"+group, path, silent_option))
def check(self, owner=None, group=None, sudo=False, silent=False, iterate=False, recursive=False, path=None): # combine [recursive] and [iterate] to walk all set all files in an directory and check it with the given permission.
if path == None: path = self.path
if group == None:
if OS in ["macos"]: group = "wheel"
elif OS in ["linux"]: group = "root"
else: raise ValueError("Unsupported operating system [{}].".format(OS))
_owner_, _group_ = self.get(path=path)
if _owner_ != owner or _group_ != group:
self.set(owner=owner, group=group, sudo=sudo, silent=silent, recursive=recursive, path=path)
if recursive and iterate and Files.directory(self.path):
for dirpath, subdirs, files in os.walk(self.path):
for path in subdirs:
#print("DIRECTORY:",path)
#print("> FULL PATH NAME:",dirpath+"/"+path)
if path not in ["lost+found"]:
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.ownership.check(owner=owner, group=group, sudo=sudo, silent=silent)
for path in files:
#print("FILE NAME:",path)
#print("> FULL PATH:",dirpath+"/"+path)
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.ownership.check(owner=owner, group=group, sudo=sudo, silent=silent)
class Permission(object):
def __init__(self, path=None, load=False):
# docs.
DOCS = {
"module":"FilePath.Permission",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# defaults.
#self.__class__.__name__ = "Permission"
# init.
self.path = path
self.permission = None
if load: self.permission = self.get()
# - info:
def get(self, path=None):
if path == None: path = self.path
status = os.stat(path)
permission = oct(status.st_mode)[-3:]
return permission
def set(self,
# the permission (int) (#1).
permission=None,
# the path (optional) (overwrites self.path) (#2).
path=None,
# root permission required.
sudo=False,
# recursive.
recursive=False,
# silent.
silent=False,
):
if path == None: path = self.path
silent_option = ""
if silent: silent_option = ' 2> /dev/null'
if recursive:
if sudo: os.system("sudo chmod -R {} {} {}".format(permission, path, silent_option))
else: os.system("chmod -R {} {} {}".format(permission, path, silent_option))
else:
if sudo: os.system("sudo chmod {} {} {}".format(permission, path, silent_option))
else: os.system("chmod {} {} {}".format(permission, path, silent_option))
def check(self, permission=None, sudo=False, silent=False, iterate=False, recursive=False, path=None): # combine [recursive] and [iterate] to walk all set all files in an directory and check it with the given permission.
if path == None: path = self.path
if self.get(path=path) != permission:
self.set(permission=permission, sudo=sudo, silent=silent, recursive=recursive, path=path)
if recursive and iterate and Files.directory(path):
for dirpath, subdirs, files in os.walk(path):
for path in subdirs:
#print("DIR NAME:",path)
#print("> FULL PATH:",dirpath+"/"+path)
if path not in ["lost+found"]:
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.permission.check(permission=permission, sudo=sudo, silent=silent)
for path in files:
#print("FILE NAME:",path)
#print("> FULL PATH:",dirpath+"/"+path)
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.permission.check(permission=permission, sudo=sudo, silent=silent)
#
# the string object class.
class String(object):
def __init__(self,
# the string's value (str) (#1).
string="",
# the path (str, FilePath) (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"String",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.string = str(string)
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def save(self, string=None, path=None, sudo=False):
if string == None: string = self.string
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
self.string = str(string)
return Files.save(path, str(string), format="str", sudo=sudo)
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.string = Files.load(self.file_path.path, format="str", sudo=sudo)
return self.string
def is_numerical(self):
for i in ["q", "w", "e", "r", "t", "y", "u", "i", "o", "p", "a", "s", "d", "f", "g", "h", "j", "k", "l", "z", "x", "c", "v", "b", "n", "m"]:
if i in self.string.lower(): return False
return True
def bash(self):
a = self.string.replace('(','\(').replace(')','\)').replace("'","\'").replace(" ","\ ").replace("$","\$").replace("!","\!").replace("?","\?").replace("@","\@").replace("$","\$").replace("%","\%").replace("^","\^").replace("&","\&").replace("*","\*").replace("'","\'").replace('"','\"')
return a
def identifier(self):
x = self.string.lower().replace(' ','-')
return x
def variable_format(self,
exceptions={
"smart_card":"smartcard",
"smart_cards":"smartcards" ,
"web_server":"webserver" ,
},
):
s, c = "", 0
for i in self.string:
try:
n = self.string[c+1]
except:
n = "none"
try:
p = self.string[c-1]
except:
p = "none"
if s != "" and i.lower() != i and str(n).lower() == str(n) and str(p).lower() == str(p):
s += "_"
s += i.lower()
c += 1
if s in list(exceptions.keys()):
return exceptions[s]
else:
return s
def class_format(self):
s, next_capital = "", False
for i in self.string:
if i == "_":
next_capital = True
elif next_capital:
s += i.upper()
else:
s += i
return s
def capitalized_scentence(self):
x = self.string.split(" ")
cap = [y.capitalize() for y in x]
return " ".join(cap)
def capitalized_word(self):
try:
new = self.string[0].upper()
c = 0
for i in self.string:
if c > 0: new += i
c += 1
return new
except IndexError: return self.string
def generate(self,
# the length of the generated string.
length=6,
# include digits.
digits=False,
# include capital letters.
capitalize=False,
# include special characters.
special=False,
):
charset = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]
if capitalize:
for i in ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]: charset.append(i.upper())
if digits: digits = ["1","2","3","4","5","6","7","8","9","0"]
else: digits = []
if special: special = ["!", "?", "&", "#","@", "*"]
else: special = []
s = ""
for i in range(length):
if len(digits) > 0 and random.randrange(1,101) <= 40:
s += digits[random.randrange(0, len(digits))]
elif len(special) > 0 and random.randrange(1,101) <= 10:
s += special[random.randrange(0, len(special))]
else:
s += charset[random.randrange(0, len(charset))]
return s
#
# iterate a string (backwards) to check the first occurency of a specified charset.
def first_occurence(self, charset=[" ", "\n"], reversed=False, string=None):
if string == None: string = self.string
if reversed:
c, space_newline_id = len(string)-1, ""
for _ in string:
char = string[c]
if char in charset:
a = 0
for i in charset:
if i == char: return i
c -= 1
return None
else:
c, space_newline_id = 0, ""
for _ in string:
char = string[c]
if char in charset:
a = 0
for i in charset:
if i == char: return i
c += 1
return None
# splice a string into before/after by a first occurence.
# if include is True and both include_before and inluce_after are False it includes at before.
def before_after_first_occurence(self, slicer=" ", include=True, include_before=False, include_after=False, string=None):
if isinstance(slicer, list):
first = self.first_occurence(charset=slicer, string=string)
return self.before_after_first_occurence(slicer=first, include=include, include_before=include_before, include_after=include_after, string=string)
else:
if string == None: string = self.string
before, after, slice_count, slices, _last_ = "", "", string.count(slicer), 0, ""
for char in string:
if len(_last_) >= len(slicer): _last_ = _last_[1:]
_last_ += char
if _last_ == slicer:
slices += 1
if include:
if slices != slice_count or include_before:
before += char
elif include_after:
after += char
else:
before += char
elif slices > 0:
after += char
else:
before += char
return before, after
# splice a string into before/selected/after by a first occurence.
def before_selected_after_first_occurence(self, slicer=" ", string=None):
if string == None: string = self.string
before, selected, after, slice_count, open, _last_ = "", "", "", string.count(slicer), False, ""
selected_sliced_count = 0
for char in string:
if isinstance(slicer, str) and len(_last_) >= len(slicer): _last_ = _last_[1:]
elif isinstance(slicer, list) and len(_last_) >= len(slicer[selected_sliced_count]): _last_ = _last_[1:]
_last_ += char
if (isinstance(slicer, str) and _last_ == slicer) or (isinstance(slicer, list) and _last_ == slicer[selected_sliced_count]):
selected_sliced_count += 1
selected += char
if open: open = False
else: open = True
elif open:
after += char
else:
before += char
return before, selected, after
# splice a string into before/after by a last occurence.
# if include is True and both include_before and inluce_after are False it includes at before.
def before_after_last_occurence(self, slicer=" ", include=True, include_before=False, include_after=False, string=None):
if string == None: string = self.string
before, after, slice_count, slices, _last_ = "", "", string.count(slicer), 0, ""
for char in string:
if len(_last_) >= len(slicer): _last_ = _last_[1:]
_last_ += char
if _last_ == slicer:
slices += 1
if include:
if slices != slice_count or include_before:
before += char
elif include_after:
after += char
else:
before += char
elif slices == slice_count:
after += char
else:
before += char
return before, after
# splice a string into before/selected/after by a last occurence.
def before_selected_after_last_occurence(self, slicer=" ", string=None):
if string == None: string = self.string
before, selected, after, slice_count, slices, _last_ = "", "", "", string.count(slicer), 0, ""
for char in string:
if len(_last_) >= len(slicer): _last_ = _last_[1:]
_last_ += char
if _last_ == slicer:
slices += 1
selected += char
elif slices == slice_count:
after += char
else:
before += char
return before, selected, after
# get the first text between an 2 string identifiers [start,end] by depth.
# identifiers must be parameter number 1.
def between(self, identifiers=["{","}"], depth=1, include=True, string=None):
# vars.
if string == None: string = self.string
keep_last = [len(identifiers[0]), len(identifiers[1])]
last = ["", ""]
unadded = ""
s, open, opened, first_open = "", 0, False, False
# iterate.
for i in string:
# set last & unadded.
unadded += i
last[0] += i
last[1] += i
if len(last[0]) > keep_last[0]:
last[0] = str(String(last[0]).remove_first(1))
if len(last[1]) > keep_last[1]:
last[1] = str(String(last[1]).remove_first(1))
# check ids.
if last[0] == identifiers[0]:
open += 1
first_open = True
elif last[1] == identifiers[1]:
open -= 1
if open >= depth:
if include or open == depth:
if include and first_open:
s += identifiers[0]
unadded = ""
first_open = False
else:
s += unadded
unadded = ""
opened = True
if opened and open < depth:
if include:
s += unadded
unadded = ""
break
# remainders.
if unadded != "" and opened and open < depth:
if include:
s += unadded
unadded = ""
# handler.
return Formats.String(s)
#
# get the text with betwee & replace the inside between str with a new str.
def replace_between(self,
# the between identifiers (list) (#1).
identifiers=["{","}"],
# the new string (str) (#2).
to="",
# the identifiers depth.
depth=1,
# the optional string.
string=None,
):
update = False
if string == None:
update = True
string = self.string
sliced = self.between(identifiers, depth=depth, include=True, string=string)
string = string.replace(str(sliced), to)
if update:
self.string = string
return string
#
# increase version.
def increase_version(self):
# version 2.
#
path = "/tmp/increase_version"
Files.save(path, f"""version='{self.string}"""+"""' && echo $version | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{if(length($NF+1)>length($NF))$(NF-1)++; $NF=sprintf("%0*d", length($NF), ($NF+1)%(10^length($NF))); print}'""")
return subprocess.check_output([f"bash", path]).decode().replace("\n","")
# version 1.
#
old_version = self.string
base, _base_= [], old_version.split(".")
increase = True
for i in _base_:
base.append(int(i))
count = len(base)-1
for i in range(len(base)):
if increase:
if base[count] >= 9:
if count > 0:
base[count-1] += 1
base[count] = 0
increase = False
else:
base[count] += 1
break
else:
base[count] += 1
break
else:
if count > 0 and int(base[count]) >= 10:
base[count-1] += 1
base[count] = 0
increase = False
elif count == 0: break
count -= 1
version = ""
for i in base:
if version == "": version = str(i)
else: version += "."+str(i)
return version
# slice dict from string.
# get the first {} from the string by depth.
def slice_dict(self, depth=1):
return self.between(["{", "}"], depth=depth)
# slice array from string.
# get the first [] from the string by depth.
def slice_array(self, depth=1):
return self.between(["[", "]"], depth=depth)
# slice tuple from string.
# get the first () from the string by depth.
def slice_tuple(self, depth=1):
return self.between(["(", ")"], depth=depth)
# iterate chars.
# > for charcount, char in String.iterate_chars()
def iterate_chars(self):
charcount, items = 0, []
for char in self.string:
items.append([charcount, char])
charcount += 1
return items
def iterate_characters(self):
return self.iterate_chars()
# iterate lines.
# > for linecount, line in String.iterate_lines()
def iterate_lines(self):
linecount, items = 0, []
for line in self.string.split("\n"):
items.append([linecount, line])
linecount += 1
return items
# slice indent from string.
# get the content bewteen the \n{indent}
def indent(self, indent=4):
s = ""
for i in range(indent): s += " "
return s
def line_indent(self, line=""):
# get line indent.
line = line.replace(" ", " ")
if len(line) > 0 and " " in line:
line_indent = 0
for c in line:
if c in [" "]: line_indent += 1
else: break
else: line_indent = 0
return Formats.Integer(line_indent)
def slice_indent(self, indent=4, depth=1, string=None, remove_indent=True):
if string == None: string = self.string
string = string.replace(" ", " ")
s, open, opened, d = "", 0, False, 0
for line in string.split("\n"):
# get line indent.
if len(line) > 0 and " " in line:
line_indent = 0
for c in line:
if c in [" "]: line_indent += 1
else: break
else: line_indent = 0
# check indent match.
if (not opened and line_indent == indent) or (opened and line_indent >= indent):
if d >= depth:
if remove_indent:
s += line[indent:]+"\n"
else:
s += line+"\n"
opened = True
#elif len(line) > 0 and not opened and line_indent == indent:
# d += 1
elif len(line) > 0 and line_indent <= indent:
if opened:
break
else:
d += 1
return s
# get the first / last n characters of the string.
def first(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
return self.string[:count]
def last(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
if len(self.string) >= count:
return self.string[count:]
else:
return None
#
# remove first / last n characters of the string.
def remove_first(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
removed = self.first(count)
self.string = self.string[count:]
return self.string
def remove_last(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
removed = self.last(count)
self.string = self.string[:-count]
return self.string
#
# support default str functions.
def split(self, string):
if isinstance(string, (list, Array)):
if isinstance(string, Array): array = string.array
else: array = string
new, last, next_start = [], "", None
for i in self.string:
last += i
newslice = False
#l_next_start = None
for test in array:
if test in last:
if str(last[-len(test):]) == str(test):
#l_next_start = last[:-len(test)]
last = last[:-len(test)]
newslice = True
break
if newslice:
new.append(last)
last = ""
#if next_start == None: new.append(last)
#elif include:
# new.append(next_start+last)
# next_start = None
#if include and l_next_start != None:
# next_start = l_next_start
if last != "":
new.append(last)
return new
else:
return Files.Array(self.string.split(str(string)))
def count(self, string):
return Formats.Integer(self.string.count(str(string)))
def replace(self, from_, to_):
return self.string.replace(str(from_), str(to_))
def lower(self, string):
return self.string.lower(str(string))
def upper(self, string):
return self.string.upper(str(string))
# support "+" & "-" .
def __add__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
return self.string + string
def __iadd__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
self.string = self.string + string
return self
def __sub__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
return self.string.replace(string, "")
def __isub__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
self.string = self.string.replace(string, "")
return self
# support subscriptionable.
def __getitem__(self, index):
return self.string[Formats.denitialize(index)]
def __setitem__(self, index, value):
self.string[Formats.denitialize(index)] = str(value)
# support default iteration.
def __iter__(self):
return iter(self.string)
# support '>=' & '>' operator.
def __gt__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) > len(string)
def __ge__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) >= len(string)
# support '<=' & '<' operator.
def __lt__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) < len(string)
def __le__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) <= len(string)
# support '==' & '!=' operator.
def __eq__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
return False
return self.string == string
def __ne__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
return True
return self.string != string
# support +.
def __concat__(self, string):
if isinstance(string, (str)):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not concat object {self.__class__} & {string.__class__}.")
return self.string + string
# support 'in' operator.
def __contains__(self, string):
if isinstance(string, (list, Files.Array)):
for i in string:
if str(i) in str(self.string):
return True
return False
else:
return str(string) in str(self.string)
#
# representation.
def __repr__(self):
return str(self)
# str representation.
def __str__(self):
return str(self.string)
# int representation.
def __int__(self):
return int(self.string)
# float representation.
def __float__(self):
return float(self.string)
# bool representation.
def __bool__(self):
return len(self.string) > 0
#if self.string in [1.0, 1, "true", "True", "TRUE", True]:
# return True
#elif self.string in [0, 0.0, "false", "False", "FALSE", False]:
# return False
#else:
# raise Exceptions.FormatError(f"Could not parse a bool from {self.__id__()}.")
# content count.
def __len__(self):
return len(self.string)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# # object instance.
def instance(self):
return "String"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, string):
if isinstance(string, (int, float)):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not assign object {self.__class__} & {string.__class__}.")
self.string = str(string)
return self
# return raw data.
def raw(self):
return self.str
#
# the boolean object class.
class Boolean(object):
def __init__(self,
# the boolean's value (bool) (#1).
boolean=False,
# the path (str, FilePath) (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Boolean",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(boolean, Formats.Boolean):
boolean = boolean.bool
# init.
self.bool = boolean
if self.bool in ["true", "True", "TRUE", True]: self.bool = True
else: self.bool = False
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def save(self, bool=None, path=None, sudo=False):
if bool != None: bool = self.bool
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
self.bool = bool
return Files.save(path, str(bool), format="str", sudo=sudo)
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.bool = Files.load(self.file_path.path, format="str", sudo=sudo)
return self.bool
def string(self, true="True", false="False"):
if self.bool:
return true
else:
return false
# native support.
def __index__(self):
return int(self)
# support '==' & '!=' operator.
def __eq__(self, boolean):
if isinstance(boolean, bool):
return self.bool == boolean
elif not isinstance(boolean, self.__class__):
return False
return self.bool == boolean.bool
def __ne__(self, boolean):
if isinstance(boolean, bool):
return self.bool != boolean
elif not isinstance(boolean, self.__class__):
return True
return self.bool != boolean.bool
# support default iteration.
def __iter__(self):
return iter(str(self.bool))
# support 'in' operator.
def __contains__(self, string):
return string in str(self.bool)
#
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(self.bool)
# int representation.
def __int__(self):
if self.bool:
return 1
else:
return 0
# float representation.
def __float__(self):
if self.bool:
return 1.0
else:
return 0.0
# bool representation.
def __bool__(self):
return self.bool
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Boolean"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, boolean):
if isinstance(boolean, (int, float)):
a=1
elif isinstance(value, self.__class__):
boolean = boolean.bool
elif not isinstance(boolean, self.__class__):
raise Exceptions.FormatError(f"Can not assign object {self.__class__} & {boolean.__class__}.")
self.bool = boolean
return self
# return raw data.
def raw(self):
return self.bool
#
# the integer object class.
class Integer(object):
def __init__(self,
# the integers value (int, float) (param #1).
value=0,
# the path (str, FilePath) (param #2).
path=False,
# the integer format (str) (param #3).
format="auto",
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Integer",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(value, Formats.Integer):
if "." in str(value):
value = value.float
else:
value = value.int
# init.
if "." in str(value):
self.format = "float"
self.value = float(value)
else:
self.format = "int"
self.value = int(value)
self.int = int(value)
self.float = float(value)
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def save(self, data=None, path=None, sudo=False):
if data != None: data = self.raw()
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
if data != self.raw():
self.assign(data)
return Files.save(path, str(data), format="str", sudo=sudo)
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
data = Files.load(self.file_path.path, format="str", sudo=sudo)
self.assign(data)
return data
def increase_version(self):
# version 1.
#
old_version = self.value
base, _base_= [], old_version.split(".")
increase = True
for i in _base_:
base.append(int(i))
count = len(base)-1
for i in range(len(base)):
if increase:
if base[count] >= 9:
if count > 0:
base[count-1] += 1
base[count] = 0
increase = False
else:
base[count] += 1
break
else:
base[count] += 1
break
else:
if count > 0 and int(base[count]) >= 10:
base[count-1] += 1
base[count] = 0
increase = False
elif count == 0: break
count -= 1
version = ""
for i in base:
if version == "": version = str(i)
else: version += "."+str(i)
return version
def round(self, decimals):
"""
Returns a value rounded down to a specific number of decimal places.
"""
if not isinstance(decimals, int):
raise TypeError("decimal places must be an integer")
else: return round(self.value, decimals)
def round_down(self, decimals):
"""
Returns a value rounded down to a specific number of decimal places.
"""
if not isinstance(decimals, int):
raise TypeError("decimal places must be an integer")
elif decimals < 0:
raise ValueError("decimal places has to be 0 or more")
elif decimals == 0:
return math.ceil(self.value)
factor = 10 ** decimals
return math.floor(self.value * factor) / factor
#
def generate(self, length=6):
return utils.generate.pincode(length=length)
#
# int format.
def __index__(self):
return self.value
# support "+, -, *, %, @, /, //, **" .
def __add__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value + value)
def __sub__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value - value)
def __iadd__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {value.__class__}.")
self.value += value
return self
def __isub__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {value.__class__}.")
self.value -= value
return self
def __mod__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mod object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value % value)
def __mul__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value * value)
def __pow__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value ** value)
def __div__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value / value)
def __truediv__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value / value)
def __floordiv__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value // value)
def __concat__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value + value)
# support "+=" & "-=".
def __pos__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value + value)
def __matmul__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not matmul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value @ value)
# support //.
#def __floordiv__(a, b)
# return a // b.
# support default iteration.
def __iter__(self):
return iter(str(self.value))
# support '>=' & '>' operator.
def __gt__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value > integer
def __ge__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value >= integer
# support '<=' & '<' operator.
def __lt__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value < integer
def __le__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value <= integer
# support '==' & '!=' operator.
def __eq__(self, integer):
if isinstance(integer, (int,float)):
return self.value == integer
elif not isinstance(integer, self.__class__):
return False
return self.value == integer.value
def __ne__(self, integer):
if isinstance(integer, (int,float)):
return self.value != integer
elif not isinstance(integer, self.__class__):
return True
return self.value != integer.value
# support 'in' operator.
def __contains__(self, integer):
if isinstance(integer, (list, Files.Array)):
for i in integer:
if str(integer) in str(self.value):
return True
return False
else:
return str(value) in str(self.value)
#
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(self.value)
# int representation.
def __int__(self):
return self.int
# float representation.
def __float__(self):
if self.format == "float":
return self.float
else:
return float(self.int)
# bool representation.
def __bool__(self):
if self.value in [1.0, 1]:
return True
elif self.value in [0, 0.0]:
return False
else:
raise Exceptions.FormatError(f"Could not parse a bool from {self.__id__()}.")
# content count.
def __len__(self):
return len(str(self.value))
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Integer"
#
# support self assignment.
def assign(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not assign object {self.__class__} & {value.__class__}.")
self.value = value
return self
# return raw data.
def raw(self):
return self.value
#
# the date object class.
class Date(object):
def __init__(self,
#
# Leave all parameters None to initialize a Date() object with the current date.
# Pass another Date object, str repr or timestamp in seconds to initialize a Date object from that timestamp.
#
# the date parameter (str, int, Date) (optional) (#1).
date=None,
# the format for the date (leave None to parse the date format automatically) (str).
format=None,
):
# docs.
DOCS = {
"module":"Date",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# formats.
self.default_format = "%d-%m-%y %H:%M:%S" # is Date() str repr
self.seconds_format = '%S'
self.minute_format = '%M'
self.hour_format = '%H'
self.day_format = '%d'
self.day_name_format = '%A'
self.week_format = '%V'
self.month_format = '%m'
self.month_name_format = '%h'
self.year_format = '%Y'
self.date_format = '%d-%m-%y'
self.timestamp_format = '%d-%m-%y %H:%M'
self.shell_timestamp_format = '%d_%m_%y-%H_%M'
self.seconds_timestamp_format = '%d-%m-%y %H:%M:%S'
self.shell_seconds_timestamp_format = '%d_%m_%y-%H_%M_%S'
self.formats = [
self.shell_seconds_timestamp_format,
self.seconds_timestamp_format,
self.shell_timestamp_format,
self.timestamp_format,
self.date_format,
self.year_format,
self.seconds_format,
self.minute_format,
self.hour_format,
self.day_format,
self.day_name_format,
self.week_format,
self.month_format,
self.month_name_format,
]
# assign
if date == None:
self.initialize()
else:
self.assign(date, format=format)
#
def initialize(self,
#
# Leave all parameters None to initialize a Date() object with the current date.
#
# Initialize a future / previous date.
# option 1:
# specify the timestamp to initialize a previous / future date (format required).
timestamp=None,
# the timestamp format (leave None to parse).
format=None,
# options 2:
# initialize by seconds.
seconds=None,
# option 3:
# define the datetime object.
datetime_obj=None,
):
# defaults.
#self.__class__.__name__ = "Date"
# by datetime_obj
if datetime_obj != None:
seconds = time.mktime(datetime_obj.timetuple())
today = datetime.fromtimestamp(float(seconds))
# by timestamp & format.
elif timestamp != None:
if format == None:
format = self.parse_format(timestamp)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/).")
seconds = time.mktime(datetime.strptime(str(timestamp), str(format)).timetuple())
today = datetime.fromtimestamp(float(seconds))
# by seconds.
elif seconds != None:
today = datetime.fromtimestamp(float(seconds))
# by current.
else:
today = datetime.today()
# attributes.
self.seconds = str(today.strftime(self.seconds_format))
self.minute = str(today.strftime(self.minute_format))
self.hour = str(today.strftime(self.hour_format))
self.day = str(today.strftime(self.day_format))
self.day_name = str(today.strftime(self.day_name_format))
self.week = str(today.strftime(self.week_format))
self.month = str(today.strftime(self.month_format))
self.month_name = str(today.strftime(self.month_name_format))
self.year = str(today.strftime(self.year_format))
self.date = str(today.strftime(self.date_format))
self.timestamp = str(today.strftime(self.timestamp_format))
self.shell_timestamp = str(today.strftime(self.shell_timestamp_format))
self.seconds_timestamp = str(today.strftime(self.seconds_timestamp_format))
self.shell_seconds_timestamp = str(today.strftime(self.shell_seconds_timestamp_format))
self.time = self.hour + ":" + self.minute
return self
def compare(self, comparison=None, current=None, format=None):
if current == None: current = str(self)
if isinstance(comparison, Formats.Date):
comparison = str(comparison)
if isinstance(current, Formats.Date):
current = str(current)
if format == None:
comparison_format = self.parse_format(comparison)
if comparison_format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from comparison [{comparison}].")
else:
comparison_format = format
comparison = self.to_seconds(comparison, format=comparison_format)
if format == None:
current_format = self.parse_format(current)
if current_format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from current [{current}].")
else:
current_format = format
current = self.to_seconds(current, format=current_format)
if comparison >= current:
return "future"
elif comparison <= current:
return "past"
elif comparison == current:
return "present"
else:
raise ValueError(f"Unexpected error, comparison seconds: {comparison} current seconds: {current}.")
def increase(self, string=None, weeks=0, days=0, hours=0, minutes=0, seconds=0, format=None):
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if format == None:
format = self.parse_format(string)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from string [{string}].")
seconds += 60*minutes
seconds += 3600*hours
seconds += 3600*24*days
seconds += 3600*24*7*weeks
s = self.to_seconds(string, format=format)
s += seconds
return self.from_seconds(s, format=format)
def decrease(self, string=None, weeks=0, days=0, hours=0, minutes=0, seconds=0, format=None):
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if format == None:
format = self.parse_format(string)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from string [{string}].")
seconds += 60*minutes
seconds += 3600*hours
seconds += 3600*24*days
seconds += 3600*24*7*weeks
s = self.to_seconds(string, format=format)
s -= seconds
return self.from_seconds(s, format=format)
def to_seconds(self, string=None, format=None):
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if format == None:
format = self.default_format
return time.mktime(datetime.strptime(str(string), str(format)).timetuple())
#
def from_seconds(self, seconds, format=None):
if isinstance(seconds, (str,String,Integer)):
seconds = float(seconds)
if format == None:
format = self.default_format
return Date(datetime.fromtimestamp(float(seconds)).strftime(format))
#
def convert(self, string=None, datetime_obj=None, input=None, output="%Y%m%d"):
if datetime_obj == None:
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if input == None:
input = self.parse_format(string)
datetime_obj = datetime.strptime(str(string), str(input))
return datetime_obj.strftime(str(output))
def parse_format(self, string):
if isinstance(string, Formats.Date):
return self.default_format
elif isinstance(string, (int,float,Integer)):
return self.seconds_format
formats = []
if "-" in str(string):
formats += [
self.shell_seconds_timestamp_format,
self.seconds_timestamp_format,
self.shell_timestamp_format,
self.timestamp_format,
self.date_format,
]
else:
formats += [
self.year_format,
self.seconds_format,
#self.minute_format,
#self.hour_format,
#self.day_format,
#self.day_name_format,
#self.week_format,
#self.month_format,
#self.month_name_format,
]
# plus some custom formats.
formats += [
"%d-%m-%y %H:%M.%S", # old default.
"%Y-%m-%d %H:%M:%S", # stock market
"%d-%m-%Y", # dd-mm-yyyy.
"%d-%m-%y %H:%M:%S", # dd-mm-yy hh:mm:ss.
"%d-%m-%Y %H:%M:%S", # dd-mm-yyyy hh:mm:ss.
"%Y-%m-%dT%H:%M:%SZ", # rfc-3339.
"%Y-%m-%d",
]
for format in formats:
try:
datetime.strptime(str(string), str(format))
return format
except Exception as e:
a=1
return None
def assign(self, string, format=None):
if isinstance(string, Formats.Date):
self = string
return self
else:
if format == None:
format = self.parse_format(string)
if format == None:
raise Exceptions.ParseError(f"Unable to parse a Date() object from string [{string}].")
if format == self.seconds_format:
self.initialize(seconds=float(string))
else:
self.initialize(timestamp=string, format=format)
return self
# normalize seconds to 10s or 1m etc.
def normalize_seconds(self, seconds:(int,float)):
if seconds < 0:
raise ValueError("Can not normalize negative seconds.")
if seconds < 0.01:
return f'{int(seconds*1000)}ms'
elif seconds <= 60:
return f'{int(seconds)}s'
elif seconds <= 60*60:
return f'{round(seconds/60,1)}m'
elif seconds <= 60*60*24:
return f'{round(seconds/(60*60),1)}h'
elif seconds <= 60*60*24*30:
return f'{round(seconds/(60*60*24),1)}d'
elif seconds <= 60*60*24*30*12:
return f'{round(seconds/(60*60*24*30),1)}m'
else:
return f'{round(seconds/(60*60*24*30*12),1)}y'
# convert to datetime object.
def datetime(self, timestamp=None):
# set defaults.
if timestamp == None: timestamp = str(self)
# parse format.
seconds = isinstance(timestamp, (int, float))
# by timestamp & format.
if not seconds:
format = self.parse_format(timestamp)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/).")
seconds = time.mktime(datetime.strptime(str(timestamp), str(format)).timetuple())
return datetime.fromtimestamp(float(seconds))
# by seconds.
else:
return datetime.fromtimestamp(float(seconds))
# convert to rfc_3339 format.
def rfc_3339(self, timestamp=None):
# convert.
return self.datetime(timestamp=timestamp).isoformat('T') + "Z"
#
# convert to utc format.
def utc(self, timestamp=None):
# convert.
return self.datetime(timestamp=timestamp).replace(tzinfo=timezone.utc)
#
# support default iteration.
def __iter__(self):
return iter([self.year, self.month, self.week, self.hour, self.minutes, self.seconds])
# support '>=' & '>' operator.
def __gt__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) > float(date)
def __ge__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) >= float(date)
# support '<=' & '<' operator.
def __lt__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) < float(date)
def __le__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) <= float(date)
# support '==' & '!=' operator.
def __eq__(self, date):
if not isinstance(date, self.__class__):
return False
return float(self) == float(date)
def __ne__(self, date):
if not isinstance(date, self.__class__):
return True
return float(self) != float(date)
# support 'in' operator.
def __contains__(self, string):
if isinstance(string, (list, Files.Array)):
for i in string:
if i in str(self):
return True
return False
else:
return string in str(self)
# support "+", -, =-, =+" .
def __add__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {add.__class__}.")
return Date(self.to_seconds() + add)
def __iadd__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not iadd object {self.__class__} & {add.__class__}.")
self = Date(self.to_seconds() + add)
return self
def __sub__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {add.__class__}.")
return Date(self.to_seconds() - add)
def __isub__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not isub object {self.__class__} & {add.__class__}.")
self = Date(self.to_seconds() - add)
return self
# support +.
def __concat__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {add.__class__}.")
return Date(self.to_seconds() - add)
# representation.
def __repr__(self):
return str(self)
#
# int representation.
def __int__(self):
return int(self.to_seconds(self.seconds_timestamp, format=self.seconds_timestamp_format))
# float representation.
def __float__(self):
return float(self.to_seconds(self.seconds_timestamp, format=self.seconds_timestamp_format))
# str representation.
def __str__(self):
return str(self.seconds_timestamp)
# content count.
def __len__(self):
return len(self.seconds_timestamp)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Date"
#
#
# the files class.
class Files():
#
# functions.
def join(path=None, name=None, type=""):
if type not in ["", "/"] and "." not in type:
type = "." + type
path = str(path)
if os.path.exists(path) and Files.directory(path) and path[len(path)-1] != "/": path += '/'
return gfp.clean("{}{}{}".format(path, name, type), remove_double_slash=True, remove_last_slash=False)
def load(path, data="not to be used", format="str", raw=False, sudo=False): # keep data as second param to prevent save load errors.
# correct format.
if format in [str, String, "String", "string", "file"]: format = "str"
if format in [dict, Dictionary, "Dictionary", "dict", "array", "Array"]: format = "json"
if format in [bytes, Bytes, "Bytes"]: format = "bytes"
#format = str(format)
# match format.
path = str(path)
data = None
# sudo.
if sudo:
data = utils.__execute__(["sudo", "cat", path])
if "No such file or directory" in data: raise FileNotFoundError(f"File [{path}] does not exist.")
# proceed.
if format == "str":
if not sudo:
file = open(path,mode='rb')
data = file.read().decode()
file.close()
elif format == "json":
if not sudo:
try:
with open(path, 'r+') as json_file:
data = json.load(json_file)
except json.decoder.JSONDecodeError as e:
try:
data = ast.literal_eval(Files.load(path=path, format="str", raw=True, sudo=sudo))
except:
e = f"Unable to decode file [{path}] (sudo: {sudo}), error: {e}."
raise Exceptions.JSONDecodeError(e)
else:
try:
data = json.loads(data)
except json.decoder.JSONDecodeError as e:
try:
data = ast.literal_eval(Files.load(path=path, format="str", raw=True, sudo=sudo))
except:
e = f"Unable to decode file [{path}] (sudo: {sudo}), error: {e}."
raise Exceptions.JSONDecodeError(e)
elif format == "bytes":
if not sudo:
with open(path, "rb") as file:
data = file.read()
else:
data = data.encode()
else: raise ValueError(f"Unknown format {format}.")
if raw: return data
else: return Formats.initialize(data)
def save(
# the path (str) (#1).
path,
# the data (str, dict, list) (#2).
data,
# the file format, options: [str, bytes, json].
format="str",
# root permission required.
sudo=False,
# json options.
indent=4,
ensure_ascii=False,
# create backups.
backups=False,
# warning: safe True keeps infinitely trying to save the doc when an KeyboardInterrupt is raised by the user.
safe=True,
# system functions.
__loader__=None,
__checks__=True,
__keyboard_interrupt__=False,
__attempt__=1,
__real_path__=None,
):
if __checks__:
# correct format.
if format in [str, String, "String", "string", "file"]: format = "str"
if format in [dict, Dictionary, "Dictionary", "dict", "array"]: format = "json"
if format in [bytes, Bytes, "Bytes"]: format = "bytes"
#format = str(format)
# match format.
path = gfp.clean(str(path), remove_double_slash=True, remove_last_slash=False)
if sudo:
__real_path__ = str(path)
tmp_path = path = f"/tmp/{String().generate(length=12)}"
data = Formats.denitialize(data)
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = str(path)
if format == "str":
file = open(path, "w+")
file.write(data)
file.close()
elif format == "json":
if __checks__:
try:
test = json.dumps(data)
except:
raise Exceptions.JSONDecodeError(f"Unable to dump expected json data: {data}")
try:
with open(path, 'w+') as json_file:
json.dump(data, json_file, ensure_ascii=ensure_ascii, indent=indent)
except PermissionError:
with open(path, 'w') as json_file:
json.dump(data, json_file, ensure_ascii=ensure_ascii, indent=indent)
except KeyboardInterrupt as e:
if __loader__ == None:
__loader__ = console.Loader(f"&RED&Do not interrupt!&END& Saving file [{path}] (attempt: {__attempt__}).")
if __attempt__ >= 100:
__loader__.stop(success=False)
raise KeyboardInterrupt(e)
return Files.save(
path, data,
format=format,
sudo=sudo,
indent=indent,
ensure_ascii=ensure_ascii,
backups=False,
safe=safe,
__loader__=__loader__,
__checks__=False,
__keyboard_interrupt__=str(e),
__attempt__=__attempt__+1,
__real_path__=__real_path__,)
elif format == "bytes":
with open(path, "wb") as file:
file.write(data)
else: raise ValueError(f"Unknown format {format}.")
if sudo:
if Files.directory(path) and path[len(path)-1] != "/":
path += "/"
if __real_path__[len(__real_path__)-1] != "/": __real_path__ += "/"
os.system(f"sudo rsync -aq {gfp.clean(path)} {gfp.clean(__real_path__)} && rm -fr {tmp_path}")
#print(f"sudo mv {gfp.clean(path)} {gfp.clean(__real_path__}")
#os.system(f"sudo mv {gfp.clean(path)} {gfp.clean(__real_path__}")
# os.system(f"sudo rsync -aq {gfp.clean(path)} {gfp.clean(__real_path__} && rm -fr {tmp_path}")
#else:
# os.system(f"sudo rsync -ogq {gfp.clean(path)} {gfp.clean(__real_path__} && rm -fr {tmp_path}")
if __keyboard_interrupt__ != False:
if __loader__ != None:
__loader__.stop()
raise KeyboardInterrupt(__keyboard_interrupt__)
def delete(
# the path (param #1).
path=None,
# root permission required.
sudo=False,
# forced mode.
forced=False,
# hide logs.
silent=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = str(path)
return gfp.delete(path=path, forced=forced, sudo=sudo, silent=silent)
def chmod(
# the path (param #1).
path=None,
# the new permission.
permission=None,
# recursive for entire dir.
recursive=False,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
if permission == None: raise Exceptions.InvalidUsage("Define parameter: permission.")
path = str(path)
return gfp.permission.set(path=path, permission=permission, recursive=recursive, sudo=sudo)
def chown(
# the path (param #1).
path=None,
# the new owner.
owner=None,
# the new group (optional).
group=None,
# recursive for entire dir.
recursive=False,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
if owner == None: raise Exceptions.InvalidUsage("Define parameter: owner.")
path = str(path)
return gfp.ownership.set(path=path, owner=owner, group=group, recursive=recursive, sudo=sudo)
def exists(path=None, sudo=False):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
return gfp.exists(path=path, sudo=sudo)
#
def clean(
# the path (leave None to use self.path) (param #1).
path=None,
# the clean options.
remove_double_slash=True,
remove_first_slash=False,
remove_last_slash=False,
ensure_first_slash=False,
ensure_last_slash=False,
):
if path == None:
raise ValueError("Define parameter: path.")
path = str(path).replace("~",HOME)
while True:
if remove_double_slash and "//" in path: path = path.replace("//","/")
elif remove_first_slash and len(path) > 0 and path[0] == "/": path = path[1:]
elif remove_last_slash and len(path) > 0 and path[len(path)-1] == "/": path = path[:-1]
elif ensure_first_slash and len(path) > 0 and path[0] != "/": path = "/"+path
elif ensure_last_slash and len(path) > 0 and path[len(path)-1] != "/": path += "/"
else: break
return path
def directory(
# the path (#1).
path=None,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = Files.clean(path=path, remove_double_slash=True, remove_last_slash=True)
path = str(path)
return os.path.isdir(path)
#
def mounted(
# the path (#1).
path=None,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True)
path = str(path)
return os.path.ismount(path)
#
def create(
# the path to the file (str) (required) (#1).
path=None,
# the data (str) (optional).
data=None,
# path is directory (bool).
directory=False,
# the owner (str) (optional).
owner=None,
# the group (str) (optional).
group=None,
# the permission (int) (optional).
permission=None,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
elif Files.exists(path, sudo=sudo): Exceptions.DuplicateError(f"Path [{path}] already exists.")
sudo_str = Boolean(sudo).string(true="sudo ", false="")
if directory:
os.system(f"{sudo_str}mkdir -p {path}")
else:
if isinstance(data, (list, Array, dict, Dictionary)):
if isinstance(data, (Dictionary,Array)):
data = data.raw()
Files.save(path=path, data=data, format="json", sudo=sudo, )
else:
Files.save(path=path, data=str(data), sudo=sudo)
if not Files.exists(path, sudo=sudo):
raise ValueError(f"Unable to create {Boolean(directory).string(true='directory', false='file')} [{path}] (sudo: {sudo}).")
if permission != None:
Files.chmod(path=path, permission=permission, sudo=sudo)
if owner != None:
Files.chown(path=path, owner=owner, group=group, sudo=sudo)
def copy(
# the from & to path (#1 & #2).
from_, to_,
# root permission required.
sudo=False,
# the active log level.
log_level=0,
# the exclude patterns.
exclude=[],
# update deleted files.
delete=True,
):
if not Files.exists(from_, sudo=sudo):
raise FileNotFoundError(f"Specified copy path [{from_}] does not exist.")
directory = False
if Files.directory(from_, sudo=sudo):
directory = True
from_ += "/"
to_ += "/"
from_ = gfp.clean(from_)
to_ = gfp.clean(to_)
if not Files.exists(gfp.base(to_), sudo=sudo): Files.create(gfp.base(to_), sudo=sudo, directory=directory)
exclude_str = ""
for i in exclude: exclude_str += f" --exclude '{i}'"
os.system(f"{Boolean(sudo).string(true='sudo ', false='')}rsync -azt{Boolean(log_level >= 1).string(true='P',false='')} {from_} {to_} {Boolean(delete).string(true='--delete', false='')}{exclude_str}")
def move(
# the from & to path (#1 & #2).
from_, to_,
# root permission required.
sudo=False,
# root permission required.
log_level=0,
):
if not Files.exists(from_, sudo=sudo):
raise FileNotFoundError(f"Specified move path [{from_}] does not exist.")
directory = False
if Files.directory(from_, sudo=sudo):
directory = True
from_ += "/"
to_ += "/"
from_ = gfp.clean(from_)
to_ = gfp.clean(to_)
if not Files.exists(gfp.base(to_), sudo=sudo): Files.create(gfp.base(to_), sudo=sudo, directory=directory)
os.system(f"{Boolean(sudo).string(true='sudo ', false='')}mv {from_} {to_}")
def base(
# the path (str, FilePath) (#1).
path=None,
# the dirs back.
back=1,
):
if path == None: raise ValueError("Define parameter: path:str.")
path = str(path)
base = path.replace('//','/')
if base[len(base)-1] == '/': base = base[:-1]
if len(base.split("/")) <= 1: raise ValueError("Path [{}] has no base.".format(base))
startslash = True
if base[0] != "/":
startslash = False
base = base.split("/")
m, c, s = len(base), 0, ""
for i in base:
if c >= m-back: break
if c == 0:
s = f"/{i}/"
else:
s += f"{i}/"
c += 1
if startslash:
return s
else:
return s[1:]
#
#
# the file object class.
class File(object):
def __init__(self, path=None, data=None, load=False, default=None):
# docs.
DOCS = {
"module":"File",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(data, Files.File):
data = data.data
# init.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
self.data = data
if default != None and not os.path.exists(self.file_path.path):
self.save(data=default)
if load: self.load()
# can be filled with executing [self.x = x()]:
def load(self, default=None, sudo=False):
utils.__check_memory_only__(str(self.file_path.path))
if not os.path.exists(str(self.file_path.path)) and default != None:
self.save(data=default, sudo=sudo)
self.data = Files.load(self.file_path.path, format=str, sudo=sudo)
return self.data
def load_line(self, line_number, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(str(default), self.file_path.path, sudo=sudo)
data = Files.load(self.file_path.path, format=str, sudo=sudo)
return data.split('\n')[line_number]
def save(self, data=None, path=None, overwrite_duplicates=True, sudo=False):
if path == None: path = self.file_path.path
if data == None: data = self.data
utils.__check_memory_only__(path)
if overwrite_duplicates:
self.data = data
return Files.save(path, data, sudo=sudo)
else:
file_name, original_path = Formats.FilePath(path).name(), path
extension = file_name.split('.')[file_name.count('.')]
file_name_without_extension = file_name.replace(extension, '')
while True:
if not os.path.exists(path): break
else: path = original_path.replace(file_name, file_name_without_extension+'-'+str(index)+extension)
self.data = data
return Files.save(path, data, sudo=sudo)
def check(self, default=None, save=True):
if default != None and isinstance(default, (str, String)):
if not self.fp.exists():
self.data = default
if save:
self.save(data=default)
# support default iteration.
def __iter__(self):
return iter(self.data)
# support '>=' & '>' operator.
def __gt__(self, string):
if not isinstance(string, str):
return len(self) > len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) > len(string.data)
def __ge__(self, string):
if not isinstance(string, str):
return len(self) >= len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) >= len(string.data)
# support '<=' & '<' operator.
def __lt__(self, string):
if not isinstance(string, str):
return len(self) < len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) < len(string.data)
def __le__(self, string):
if not isinstance(string, str):
return len(self) <= len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) <= len(string.data)
# support '==' & '!=' operator.
def __eq__(self, string):
if not isinstance(string, str):
return self.data == string
elif not isinstance(string, self.__class__):
return False
return self.data == string.data
def __ne__(self, string):
if not isinstance(string, str):
return self.data != string
elif not isinstance(string, self.__class__):
return True
return self.data != string.data
# support 'in' operator.
def __contains__(self, key):
if isinstance(key, (list, Files.Array)):
for i in key:
if i in self.data:
return True
return False
else:
return key in self.data
# str representation.
def __str__(self):
return str(self.data)
# content count.
def __len__(self):
return len(self.data)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "File"
#
# support self assignment.
def assign(self, data):
if isinstance(data, self.__class__):
data = data.data
self.data = data
return self
# return raw data.
def raw(self):
return self.data
#
# the array object class.
class Array(object):
def __init__(self,
# the array (param #1).
array=[],
# the path (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Array",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(array, Files.Array):
array = array.array
elif not isinstance(array, list):
raise Exceptions.InstanceError(f"Parameter [{self.__class__.__name__}.array] must be a [Array] or [list], not [{array.__class__.__name__}].")
# initialize dictionary recursive.
#new = []
#for i in array: new.append(Formats.initialize(i))
#array = new
#if isinstance(array, Array):
# array = array.array
# init.
if path in [False, None]:
self.file_path = self.fp = None # used in local memory (not fysical)
self.__path__ = None
else:
self.file_path = self.fp = Formats.FilePath(path)
self.__path__ = self.file_path.path
self.array = array
if default != None and self.file_path != None and not os.path.exists(self.file_path.path):
self.save(array=default)
self.array = default
if load: self.load()
#
# save to file.
def save(self, array=None, path=None, ensure_ascii=False, indent=4, sudo=False):
if array != None: array = self.array
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
self.array = array
return Files.save(path, Formats.denitialize(array), format="json", indent=indent, ensure_ascii=ensure_ascii, sudo=sudo)
# load from file.
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.array = Files.load(self.file_path.path, format="json", sudo=sudo)
return self.array
# convert to string.
def string(self, joiner=" ", sum_first=False):
string = ""
for x in self.array:
if sum_first and string == "": string = joiner + str(x)
elif string == '': string = str(x)
else: string += joiner + str(x)
return str(string)
# divide into several arrays.
def divide(self, into=2):
avg = len(self.array) / float(into)
out = []
last = 0.0
while last < len(self.array):
out.append(self.array[int(last):int(last + avg)])
last += avg
if len(out) > into:
while len(out) > into:
last = out.pop(len(out)-1)
out[len(out)-1] += last
return out
# reomve indexes or values.
def remove(self, indexes=[], values=[]):
array = self.array
for i in indexes:
try: array.pop(i)
except: a=1
if values != []:
new = []
for v in array:
if v not in values: new.append(v)
array = new
return Array(array, path=self.__path__)
# default list functions.
def append(self, var):
array = list(self.array)
return Array(array.append(var), path=self.__path__)
def pop(self, index):
array = list(self.array)
return Array(array.pop(index), path=self.__path__)
def count(self, item=None):
if item == None:
return Formats.Integer(len(self.array))
elif isinstance(item, (str, Formats.String)):
c = 0
for i in self:
if i == item: c += 1
return Formats.Integer(c)
elif isinstance(item, (list, Files.Array)):
c = 0
for x in self:
for y in item:
if x == y: c += 1
return Formats.Integer(c)
else: raise Exceptions.InstanceError("Parameter [item] must either be None, String or Array.")
# check.
def check(self, default=None, save=True):
if default != None and isinstance(default, (list, Array)):
if not self.fp.exists():
self.array = default
if save:
self.save(data=default)
else:
for i in default:
if i not in self.array:
self.array.append(i)
if save:
self.save()
# clean content.
def clean(self,
# the string replacements.
# example:
# { "Hello":"hello" }
# [ ["Hello", "hello"] ]
replacements={},
# the first characters to remove (String & Array).
remove_first=[],
# the last characters to remove (String & Array).
remove_last=[],
# the first characters that are ensured (String & Array) (List: check is one of the list is ensured).
ensure_first=[],
# the last characters that are ensured (String & Array) (List: check is one of the list is ensured).
ensure_last=[],
# remove all values within the list from the array.
remove_values=[],
# update the self array.
update=True,
# the dicionary (leave None to use self.array).
array=None,
):
if array == None: array = list(self.array)
if isinstance(remove_first, (str, Formats.String)):
remove_first = [remove_first]
if isinstance(remove_last, (str, Formats.String)):
remove_last = [remove_last]
if isinstance(ensure_first, (str, Formats.String)):
ensure_first = [ensure_first]
if isinstance(ensure_last, (str, Formats.String)):
ensure_last = [ensure_last]
new = []
for item in list(array):
if item not in remove_values:
while True:
edits = False
for i in remove_first:
if len(item) >= len(i) and item[:len(i)] == i:
item = item[len(i):]
edits = True
for i in remove_last:
if len(item) >= len(i) and item[len(i):] == i:
item = item[:-len(i)]
edits = True
for i in ensure_first:
if len(item) >= len(i) and item[:len(i)] != i:
item = i+item
edits = True
for i in ensure_last:
if len(item) >= len(i) and item[len(i):] != i:
item += i
edits = True
for from_, to_ in replacements.items():
if isinstance(item, (str, Formats.String)) and from_ in item:
item = item.replace(from_, to_)
edits = True
if not edits: break
new.append(item)
return Array(new, path=self.__path__)
# iterations.
def iterate(self, sorted=False, reversed=False, array=None):
if array == None: array = list(self.array)
return self.items(reversed=reversed, sorted=sorted, array=array)
# iterate items.
def items(self, sorted=False, reversed=False, array=None):
if array == None: array = list(self.array)
if sorted: array = self.sort(array=array)
if reversed: return self.reversed(array=array)
else: return Array(array, path=self.__path__)
# reserse array.
def reversed(self, array=None):
if array == None: array = self.array
reversed_keys = []
c = len(array)-1
for _ in range(len(array)):
reversed_keys.append(array[c])
c -= 1
return Array(reversed_keys, path=self.__path__)
# sort array.
def sort(self, reversed=False, array=None):
if array == None: array = self.array
return Array(sorted(array, reverse=reversed), path=self.__path__)
# dump json string.
def json(self, sorted=False, reversed=False, indent=4, array=None, ):
#return json.dumps(Formats.denitialize(self), indent=indent)
if array == None: array = self.array
return json.dumps(self.serialize(json=False, sorted=sorted, reversed=reversed, array=array), indent=indent)
# serialize array.
def serialize(self, sorted=False, reversed=False, json=False, array=None):
if array == None: array = self.array
if isinstance(array, Files.Array):
array = array.array
if sorted:
items = self.items(reversed=reversed, array=self.sort(alphabetical=True, array=array))
else:
items = self.items(reversed=reversed, array=array)
new = []
for value in items:
if isinstance(value, (dict, Files.Dictionary)):
value = Files.Dictionary().serialize(json=json, sorted=sorted, reversed=reversed, dictionary=value)
elif isinstance(value, (list, Files.Array)):
value = self.serialize(json=json, sorted=sorted, reversed=reversed, array=value)
elif isinstance(value, object):
value = str(value)
elif isinstance(value, str) or isinstance(value, bool) or value == None:
if value in [True, "True", "True".lower()]:
if json:
value = "true"
else:
value = True
elif value in [False, "False", "False".lower()]:
if json:
value = "false"
else:
value = False
elif value in [None, "None", "None".lower()]:
if json:
value = "null"
else:
value = None
new.append(value)
return new
# randomize the content of the array always non recursive.
def randomize(self,
# optionally pass the array (leave None to use self.array).
array=None,
):
if array == None: array = list(self.array)
randomized = []
while len(array) > 0:
index = random.randrange(0, len(array))
item = array.pop(index)
randomized.append(item)
return Array(randomized, path=self.__path__)
#
# limit the content of the array.
def limit(self,
# limit to the number of samples.
limit:int,
# the index to start from.
start=0,
# optionally pass the array (leave None to use self.array).
array=None,
):
if array == None: array = list(self.array)
return Array(array[start:start+limit], path=self.__path__)
# min of numerical array.
def min(self):
min = self.array[0]
for item in self.array:
if item < min:
min = item
return min
# max of numerical array.
def max(self):
max = self.array[0]
for item in self.array:
if item > max:
max = item
return max
# sum numerical array.
def sum(self):
return sum(self.array)
# mean of numerical array.
def mean(self, window=None):
return self.sum() / len(self.array)
#
# variance of numerical array.
def variance(self):
mean = self.mean()
deviations = []
for x in self.array:
deviations.append((x - mean) ** 2)
return sum(deviations) / len(self.array)
# standard deviation of numerical array.
def stdev(self):
return math.sqrt(self.variance())
# copy.
def copy(self):
return Files.Array(self.array, path=self.__path__)
#
# support "+", -, =-, =+" .
def __add__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
return Array(self.array + array)
def __iadd__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
self.array += array
def __sub__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
new = []
for i in self.array:
if i not in array:
new.append(i)
return Array(new)
def __isub__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
new = []
for i in self.array:
if i not in array:
new.append(i)
self.array = new
# support +.
def __concat__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
return Array(self.array + array)
# support default iteration.
def __iter__(self):
return iter(self.array)
# support '>=' & '>' operator.
def __gt__(self, array):
if not isinstance(array, list):
return len(self.array) > len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) > len(array.array)
def __ge__(self, array):
if not isinstance(array, list):
return len(self.array) >= len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) >= len(array.array)
# support '<=' & '<' operator.
def __lt__(self, array):
if not isinstance(array, list):
return len(self.array) < len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) < len(array.array)
def __le__(self, array):
if not isinstance(array, list):
return len(self.array) <= len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) <= len(array.array)
# support '==' & '!=' operator.
def __eq__(self, array):
if not isinstance(array, list):
return self.array == array
elif not isinstance(array, self.__class__):
return False
return self.array == array.array
def __ne__(self, array):
if not isinstance(array, list):
return self.array != array
elif not isinstance(array, self.__class__):
return True
return self.array != array.array
# support 'in' operator.
def __contains__(self, key):
if isinstance(key, (list, Files.Array)):
for i in key:
if i in self.array:
return True
return False
else:
return key in self.array
# support '*' operator.
def __mul__(self, value):
if isinstance(value, int):
a=1
else:
raise Exceptions.FormatError(f"Can not mul object {self.__class__.__name__} & {value.__class__.__name__}.")
return Array(self.array * value)
# support '/' operator.
def __div__(self, value):
if isinstance(value, int):
a=1
else:
raise Exceptions.FormatError(f"Can not div object {self.__class__.__name__} & {value.__class__.__name__}.")
return Array(self.divide(into=value))
# support item assignment.
def __setitem__(self, index, value):
#if "/" in item
try:
self.array[Formats.denitialize(index)] = value
except IndexError:
self.array.append(value)
def __getitem__(self, index):
return self.array[Formats.denitialize(index)]
def __delitem__(self, index):
#if "/" in item
return self.array.pop(Formats.denitialize(index))
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(Formats.denitialize(self.array))
# content count.
def __len__(self):
return len(self.array)
# object id.
def __id__(self):
if len(self.array) > 10:
return f"({self.instance()}:[{self.array[0]}, {self.array[0]}, {self.array[0]}, ... {self.array[len(self.array)-3]}, {self.array[len(self.array)-2]}, {self.array[len(self.array)-1]}])"
else:
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Array"
#
# support self assignment.
def assign(self, array):
if isinstance(array, self.__class__):
array = array.array
self.array = array
# assign a new path.
def assign_path(self, path):
self.file_path = FilePath(path)
self.__path__ = path
# return raw data.
def raw(self):
return self.array
#
# the dictionary object class.
class Dictionary(object):
def __init__(self,
# the dictionary (param #1).
dictionary={},
# the file path (param #2).
path=False,
# load the file path dictionary on init.
load=False,
# specify default to check & create the dict.
default=None,
):
# docs.
DOCS = {
"module":"Dictionary",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(dictionary, Files.Dictionary):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, dict):
raise Exceptions.InstanceError(f"Parameter [{self.__class__.__name__}.dictionary] must be a [Dictionary] or [dict], not [{dictionary.__class__.__name__}].")
"""elif not isinstance(dictionary, dict):
max_attempts = 2
for attempt in range(max_attempts):
try:
if 1+attempt == 1:
dictionary = dictionary.dict()
break
elif 1+attempt == 2:
dictionary = dictionary.json()
break
except:
if 1+attempt >= max_attempts:
raise Exceptions.InstanceError(f"Parameter [{self.__class__.__name__}.dictionary] must be a [Dictionary] or [dict], not [{dictionary.__class__.__name__}].")
"""
# initialize dictionary recursive.
#for key in list(dictionary.keys()):
# dictionary[key] = Formats.initialize(dictionary[key])
#if isinstance(dictionary, Dictionary):
# dictionary = dictionary.dictionary
# arguments.
self.dictionary = dictionary
self.path = gfp.clean(path=path)
self.default = default
self.file_path = self.fp = None
self.__path__ = None
# checks.
if path not in [False, None]:
self.file_path = self.fp = Formats.FilePath(path)
self.__path__ = self.file_path.path
if self.default != None:
self.dictionary = self.check(default=self.default).dictionary
if self.file_path != None and not self.file_path.exists(): self.save()
if load: self.load()
#
# save to file.
def save(self, dictionary=None, path=None, ensure_ascii=False, indent=4, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if dictionary == None: dictionary = dict(self.dictionary)
if path == None: path = self.file_path.path
return Files.save(path, Formats.denitialize(dictionary), format="json", indent=indent, ensure_ascii=ensure_ascii, sudo=sudo)
# load from file.
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.dictionary = Files.load(self.file_path.path, format="json", sudo=sudo)
return self.dictionary
# load a single line from file.
def load_line(self, line_number, sudo=False):
utils.__check_memory_only__(self.file_path.path)
data = Files.load(str(self.file_path.path, sudo=sudo))
return data.split('\n')[line_number]
# check the dictionary.
def check(self,
# Option 1:
key=None, # check a certain key, it appends if not present
value=None, # check a certain key, append the value if not present (no format check)
# Option 2:
default=None, # check based on a default dictionary, it appends it not present.
# Optionals:
dictionary=None, # overwrite the start dictionary, leave None to use self.dictionary.
save=False, # saves the output & and sets the output to self.dictionary.
):
# functions.
def __iterate_dict__(dictionary, default):
#print("\niterating new dictionary: [{}] & default [{}]\n".format(dictionary, default))
for identifier, item in default.items():
if isinstance(item, (dict,Dictionary)):
try: dictionary[str(identifier)] = __iterate_dict__(dictionary[str(identifier)], item)
except KeyError: dictionary[str(identifier)] = dict(item)
elif isinstance(item, (list,Array)):
if isinstance(item, (list)): item = list(item)
elif isinstance(item, (Array)): item = item.array
try: dictionary[str(identifier)]
except KeyError: dictionary[str(identifier)] = item
else:
try: dictionary[str(identifier)]
except KeyError: dictionary[str(identifier)] = item
return dictionary
# init.
if dictionary == None: dictionary = dict(self.dictionary)
if not isinstance(dictionary, (dict, Dictionary)):
raise Exceptions.InvalidUsage(f"<Dictionary.check> parameter [dicionary] requires to be a [dict, Dictionary] not [{dictionary.__class__.__name__}].")
# - option 1:
if key == None and value != None: raise ValueError("Define both parameters: [key & value].")
elif value == None and key != None: raise ValueError("Define both parameters: [key & value].")
if key != None and value != None:
try: dictionary[key]
except KeyError: dictionary[key] = value
return dictionary
# - option 2:
if default == None: default = self.default
if default == None: raise ValueError("Define both parameters: [key & value] or parameter [default].")
dictionary = __iterate_dict__(dictionary, default)
return Dictionary(dictionary, path=self.__path__)
#
# divide dictionary into multiple arrays.
def divide(self, into=2):
return_list = [dict() for idx in range(into)]
idx = 0
for k,v in self.dictionary.items():
return_list[idx][k] = v
if idx < into-1: # indexes start at 0
idx += 1
else:
idx = 0
return Array(return_list)
# append to dict.
def append(self,
# by default it only overwrites if a key does not exist and sums the key if it is a str / int.
#
# a dictionary to append.
dictionary,
# the overwrite formats (add "*" for all).
overwrite=[],
# the sum formats (add "*" for all).
sum=["int", "float"],
# the banned dictionary keys.
banned=[],
# do not use.
dictionary_=None,
):
if dictionary_ == None: dictionary_ = dict(self.dictionary)
if dictionary == dictionary_: return dictionary
if dictionary_ == {}: return dictionary
for key, value in dictionary.items():
if key not in banned:
if isinstance(value, (dict, Dictionary)):
found = True
try: dictionary_[key]
except: found = False
if found:
dictionary_[key] = self.append(value, overwrite=overwrite, sum=sum, banned=banned, dictionary_=dictionary_[key], save=False, update=False)
else:
dictionary_[key] = value
else:
format = value.__class__.__name__
if "*" in sum or format in sum:
if format in ["str", "int", "float", "list", "Array"]:
try: dictionary_[key] += value
except KeyError: dictionary_[key] = value
else: # cant be summed.
dictionary_[key] = value
elif "*" in overwrite or format in overwrite:
dictionary_[key] = value
else:
try: dictionary_[key]
except KeyError: dictionary_[key] = value
return Dictionary(dictionary_, path=self.__path__)
# edit.
def edit(self,
# the dictionary (leave None to use self.dictionary).
dictionary=None,
# the edits (dict).
# adds / replaces the current (except the exceptions).
edits={},
# the edits key Exceptions.
exceptions=[],
# the edits value Exceptions.
value_exceptions=[None],
# the instances to overwrite (list[str]) (missing stands for the keys that are missing in the dictionary).
overwite=["missing"],
# the instances to combine (list[str]) (dict is always recursive).
combine=["int", "float", "Integer", "list", "Array"],
# the log level.
log_level=-1,
):
def edit_dict(dictionary={}, edits={}):
c = 0
for key, value in edits.items():
found = True
try: dictionary[key]
except KeyError: found = False
# recursive.
if key not in exceptions and value not in value_exceptions and isinstance(value, (dict, Dictionary)):
if isinstance(value, (Dictionary)):
value = value.dictionary
if found:
dictionary[key], lc = edit_dict(dictionary=dictionary[key], edits=value)
c += lc
else:
if log_level >= 0:
print(f"Editing {alias} config {key}: {value}.")
dictionary[key] = value
c += 1
elif key not in exceptions and value not in value_exceptions and not found and "missing" in overwrite:
if log_level >= 0:
print(f"Editing {alias} config {key}: {value}.")
dictionary[key] = value
c += 1
elif key not in exceptions and value not in value_exceptions and found and value.__class__.__name__ in combine:
if log_level >= 0:
print(f"Editing {alias} config {key}: {value}.")
dictionary[key] = dictionary[key] + value
c += 1
return dictionary, c
# check specific.
if dictionary == None: dictionary = dict(self.dictionary)
dictionary, c = edit_dict(dictionary=dictionary, edits=edits)
return Dictionary(dictionary, path=self.__path__)
# unpack attribute(s).
def unpack(self,
# the key / keys / defaults parameter (#1).
# str instance:
# unpack the str key
# list instance:
# unpack all keys in the list.
# dict instance:
# unpack all keys from the dict & when not present return the key's value as default.
keys,
):
defaults_ = {}
if isinstance(keys, (dict, Files.Dictionary)):
if isinstance(keys, dict):
defaults_ = dict(keys)
keys = list(keys.keys())
else:
defaults_ = keys.dict()
keys = keys.keys()
elif isinstance(keys, str):
keys = [keys]
unpacked = []
for key in keys:
value, set = None, True
try:
value = self.dictionary[key]
except KeyError:
try:
value = defaults_[key]
except KeyError:
set = False
if not set:
raise Exceptions.UnpackError(f"Dictionary does not contain attribute [{key}].")
unpacked.append(value)
if len(unpacked) == 1:
return unpacked[0]
else:
return unpacked
# remove.
def remove(self, keys=[], values=[], update=True, save=False, dictionary=None):
if dictionary == None:
dictionary = dict(self.dictionary)
for i in list(keys):
try: del dictionary[i]
except: a=1
if values != []:
new = {}
for k,v in dictionary.items():
if v not in values: new[k] = v
dictionary = new
return Dictionary(dictionary, path=self.__path__)
# count keys or values.
def count(self, item=None, values=False):
if item == None:
return Formats.Integer(len(self.dictionary))
elif isinstance(item, (str, Formats.String)):
c, array = 0, []
if values:
array = self.values()
else:
array = self.keys()
for i in array:
if i == item: c += 1
return Formats.Integer(c)
elif isinstance(item, (list, Files.Array)):
c, array = 0, []
if values:
array = self.values()
else:
array = self.keys()
for x in array:
for y in item:
if x == y: c += 1
return Formats.Integer(c)
else: raise Exceptions.InstanceError(f"Parameter [item] must either be [None], [String] or [Array], not [{item.__class__}].")
# insert new keys & values.
def insert(self, dictionary={}, __dictionary__=None):
if __dictionary__ == None:
__dictionary__ = dict(self.dictionary)
for key,value in dictionary.items():
if isinstance(value, (dict, Dictionary)):
if key in __dictionary__:
__dictionary__[key] = self.insert(value, __dictionary__=__dictionary__[key])
else:
__dictionary__[key] = value
elif isinstance(value, (list, Array)):
if key in __dictionary__:
for i in value:
if i not in __dictionary__[key]: __dictionary__[key].append(i)
else:
__dictionary__[key] = value
else:
__dictionary__[key] = value
return Dictionary(__dictionary__, path=self.__path__)
# iterate keys and values.
def iterate(self, sorted=False, reversed=False, dictionary=None):
if dictionary == None: dictionary = self.dictionary
return self.items(reversed=reversed, sorted=sorted, dictionary=dictionary)
def items(self, sorted=False, reversed=False, dictionary=None):
if dictionary == None: dictionary = self.dictionary
if sorted: dictionary = self.sort(dictionary=dictionary)
if reversed: return self.reversed(dictionary=dictionary).items()
else: return dictionary.items()
# iterate keys.
def keys(self, dictionary=None):
if dictionary == None: dictionary = self.dictionary
return Array(list(dictionary.keys()))
# iterate values.
def values(self, dictionary=None):
if dictionary == None: dictionary = dict(self.dictionary)
values = []
for key, value in dictionary.items():
values.append(value)
return Array(values)
# reverse dictionary.
def reversed(self, dictionary=None):
if dictionary == None: dictionary = dict(self.dictionary)
keys = list(dictionary.keys())
reversed_keys = []
c = len(keys)-1
for _ in range(len(keys)):
reversed_keys.append(keys[c])
c -= 1
reversed_dict = {}
for key in reversed_keys:
reversed_dict[key] = dictionary[key]
return Dictionary(reversed_dict, path=self.__path__)
# sort ascending dictionary.
def sort(self,
# reverse ascending to descending.
reversed=False,
# sort the keys or sort the values.
sort="keys",
# system parameters.
dictionary=None,
):
if dictionary == None: dictionary = dict(self.dictionary)
if sort == "values":
new = {}
for key in sorted(dictionary, key=dictionary.get, reverse=reversed):
new[key] = dictionary[key]
elif sort == "keys":
new = {}
for key in sorted(dictionary, reverse=reversed):
new[key] = dictionary[key]
else: raise ValueError(f"Selected an invalid sort mode [{sort}].")
return Dictionary(new, path=self.__path__)
# dump json string.
def json(self, indent=4, dictionary=None, ):
if dictionary == None: dictionary = self.dictionary
return json.dumps(self.serialize(json=False, dictionary=dictionary), indent=indent)
# serialize dict.
def serialize(self, json=False, dictionary=None):
if dictionary == None: dictionary = dict(self.dictionary)
if isinstance(dictionary, Files.Dictionary):
dictionary = dictionary.dictionary
items = self.items(dictionary=dictionary)
dictionary = {}
for key, value in items:
if isinstance(value, (dict, Files.Dictionary)):
value = self.serialize(json=json, dictionary=value)
elif isinstance(value, (list, Files.Array)):
value = Files.Array(value).serialize(json=json)
elif isinstance(value, object):
value = str(value)
elif isinstance(value, str) or isinstance(value, bool) or value == None:
if value in [True, "True", "True".lower()]:
if json:
value = "true"
else:
value = True
elif value in [False, "False", "False".lower()]:
if json:
value = "false"
else:
value = False
elif value in [None, "None", "None".lower()]:
if json:
value = "null"
else:
value = None
dictionary[key] = value
return dictionary
# copy.
def copy(self):
return Files.Dictionary(self.dictionary, path=self.__path__)
#
# system functions.
def __reverse_keys_and_values__(self, dictionary=None):
if dictionary == None: dictionary = self.dictionary
new = {}
for key,value in dictionary.items():
new[value] = key
return new
def __serialize_string__(self, string, banned_characters=["@"]):
c, s, l = 0, "", False
for char in string:
if char not in banned_characters:
# regular letter.
if char.lower() == char:
s += char.lower()
l = False
# capital letter.
else:
if c == 0:
s += char.lower()
else:
if l:
s += char.lower()
else:
s += "_"+char.lower()
l = True
c += 1
return s
def __serialize_dictionary__(self, response):
_response_ = {}
for key,value in response.items():
s_key = self.__serialize_string__(key)
if isinstance(value, dict):
_response_[s_key] = self.__serialize_dictionary__(value)
elif isinstance(value, str):
try: integer = int(value)
except: integer = False
if integer != False:
_response_[s_key] = integer
elif value in ["false", "False", "FALSE", "DISABLED"]:
_response_[s_key] = False
elif value in ["true", "True", "TRUE", "ENABLED"]:
_response_[s_key] = True
else:
_response_[s_key] = value
else:
_response_[s_key] = value
return _response_
# support "+", -, =-, =+" .
def __add__(self, dictionary):
if isinstance(dictionary, dict):
a=1
elif isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
return self.append(dictionary=dictionary, overwrite=["*"], sum=[])
def __iadd__(self, dictionary):
if isinstance(dictionary, dict):
a=1
elif isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
self.dictionary = self.append(dictionary=dictionary, overwrite=["*"], sum=[]).dictionary
def __sub__(self, dictionary):
if isinstance(dictionary, dict):
keys = list(dictionary.keys())
elif isinstance(dictionary, list):
keys = dictionary
elif isinstance(dictionary, Files.Array):
keys = dictionary.array
elif isinstance(dictionary, self.__class__):
keys = dictionary.keys()
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
return self.remove(keys=keys)
def __isub__(self, dictionary):
if isinstance(dictionary, dict):
keys = list(dictionary.keys())
elif isinstance(dictionary, list):
keys = dictionary
elif isinstance(dictionary, Files.Array):
keys = dictionary.array
elif isinstance(dictionary, self.__class__):
keys = dictionary.keys()
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
self.dictionary = self.remove(keys=keys, update=True).dictionary
# support +.
def __concat__(self, string):
if isinstance(dictionary, dict):
a=1
elif isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
return self.append(dictionary=dictionary, sum=[], overwrite=["*"])
# support default iteration.
def __iter__(self):
return iter(self.dictionary)
# support '>=' & '>' operator.
def __gt__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) > len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) > len(dictionary.dictionary)
def __ge__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) >= len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) >= len(dictionary.dictionary)
# support '<=' & '<' operator.
def __lt__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) < len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) < len(dictionary.dictionary)
def __le__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) <= len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) <= len(dictionary.dictionary)
# support '==' & '!=' operator.
def __eq__(self, dictionary):
if isinstance(dictionary, dict):
return str(self.sort()) == str(Dictionary(dictionary).sort())
elif isinstance(dictionary, Dictionary):
return str(self.sort()) == str(dictionary.sort())
else:
try:
return str(self.sort()) == str(dictionary.sort())
except:
return False
def __ne__(self, dictionary):
if isinstance(dictionary, dict):
return str(self.sort()) != str(Dictionary(dictionary).sort())
elif isinstance(dictionary, Dictionary):
return str(self.sort()) != str(dictionary.sort())
else:
try:
return str(self.sort()) != str(dictionary.sort())
except:
return False
# support 'in' operator.
def __contains__(self, key):
keys = list(self.dictionary.keys())
if isinstance(key, (list, Files.Array)):
for i in key:
if i in keys:
return True
return False
else:
return key in keys
# support item assignment.
def __setitem__(self, key, value):
if isinstance(key, (int, Integer)):
key = self.keys()[key]
self.dictionary[Formats.denitialize(key)] = value
def __getitem__(self, key):
if isinstance(key, slice):
raise ValueError("Coming soon.")
elif isinstance(key, (int, Integer)):
key = self.keys()[key]
return self.dictionary[Formats.denitialize(key)]
#
def __delitem__(self, key):
if isinstance(key, (int, Integer)):
key = self.keys()[key]
del self.dictionary[Formats.denitialize(key)]
def __splitkey__(self, key):
if key in self:
return [key]
return gfp.clean(path=key, remove_last_slash=True, remove_double_slash=True, remove_first_slash=True).split("/")
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(Formats.denitialize(self.dictionary))
# content count.
def __len__(self):
return len(self.dictionary)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self, serialize=False):
return "Dictionary"
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, dictionary):
if isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
self.dictionary = dictionary
# assign a new path.
def assign_path(self, path):
self.file_path = FilePath(path)
self.__path__ = path
# return raw data.
def raw(self):
return self.dictionary
#
#
# the directory object class.
class Directory(object):
def __init__(self,
# the dirs file path (param #1).
path=None,
# the hierarchy to check / create.
hierarchy={},
# load the content.
#load=False,
# load recursive.
#recursive=False,
):
# docs.
DOCS = {
"module":"Directory",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(path, Files.Directory):
path = path.fp.path
# init.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else:
if path[len(path)-1] != "/": path += "/"
self.file_path = self.fp = Formats.FilePath(path)
self.hierarchy = hierarchy
if self.hierarchy != {}:
self.check(hierarchy=hierarchy)
# load.
#self.content = {}
#if load:
# self.content = {}
# can be filled with executing [self.x = x()]:
# executable functions.
# actions.
def create(self, file_paths=[], path=None, sudo=False, owner=None, group=None, permission=None):
# - init:
if path == None: path = self.file_path.path
# - create dir:
if not os.path.exists(path):
if sudo: os.system('sudo mkdir -p '+path)
else: os.system('mkdir -p '+path)
# - copy files:
commands = []
for l_path in file_paths:
if sudo:
command = None
if Files.directory(l_path): command = 'sudo cp -r {0} {1} '.format(l_path, path+Formats.FilePath(l_path).name())
else: command = 'sudo cp {0} {1}'.format(l_path, path+Formats.FilePath(l_path).name())
commands.append(command)
else:
command = None
if Files.directory(l_path): command = 'cp -r {0} {1} '.format(l_path, path+Formats.FilePath(l_path).name())
else: command = 'cp {0} {1}'.format(l_path, path+Formats.FilePath(l_path).name())
commands.append(command)
if len(commands) > 0:
if sudo:
script = Files.ShellScript(
data=command,
path='/tmp/shell_script-'+str(random.randrange(23984792,23427687323))+'.sh'
)
script.save()
script.setPermission(755)
script.execute(sudo=sudo)
script.delete()
else: os.system(Files.Array(array=commands,path=False).string(joiner=" \n "))
if owner != None or group!=None: self.file_path.ownership.set(owner=owner, group=group, sudo=sudo)
if permission != None: self.file_path.permission.set(permission=permission, sudo=sudo)
def delete(self, forced=False):
if forced: os.system('rm -fr {}'.format(self.file_path.path))
else: os.system('rm -r {}'.format(self.file_path.path))
def check(self,
# Required:
# - dictionary format:
hierarchy=None,
# Optionals:
# - string format:
owner=None,
group=None,
# - boolean format:
sudo=False,
# - integer format:
permission=None, # (octal format)
recursive=False, # for permission/ownership
silent=False,
):
format = {
"my_directory_name":{
# Required:
"path":"my_directory_name/",
# Optionals:
"permission":755,
"owner":"daanvandenbergh",
"group":None,
"sudo":False,
"directory":True,
"recursive":False, # for permission & ownership (directories).
"default_data":None, # makes it a file
"default":None, # makes it a dictionary
}
}
def checkPermissionOwnership(file_path, dictionary, silent=False, recursive=False):
if dictionary["permission"] != None and dictionary["permission"] != file_path.permission.permission:
#print("editing file [{}] permission [{}] to [{}]...".format(file_path.path, file_path.permission.permission, dictionary["permission"]))
file_path.permission.set(permission=dictionary["permission"], sudo=dictionary["sudo"], recursive=recursive, silent=silent)
if dictionary["owner"] != None and dictionary["owner"] != file_path.ownership.owner:
#print("editing file [{}] owner [{}] to [{}]...".format(file_path.path, file_path.ownership.owner, dictionary["owner"]))
file_path.ownership.set(owner=dictionary["owner"], group=file_path.ownership.group, sudo=dictionary["sudo"], recursive=recursive, silent=silent)
#print("file [{}] current group [{}] wanted group [{}]".format(file_path.path, file_path.ownership.group, dictionary["group"]))
if dictionary["group"] != None and dictionary["group"] != file_path.ownership.group:
#print("editing file [{}] group [{}] to [{}]...".format(file_path.path, file_path.ownership.group, dictionary["group"]))
file_path.ownership.set(owner=file_path.ownership.owner, group=dictionary["group"], sudo=dictionary["sudo"], recursive=recursive, silent=silent)
if hierarchy == None: hierarchy = self.hierarchy
#if owner == None: owner = self.owner
#if group == None: group = self.group
#if permission == None: permission = self.permission
file_path = Formats.FilePath(self.file_path.path)
if file_path.exists(sudo=sudo) == False:
file_path.create(
directory=True,
permission=permission,
group=group,
owner=owner,
sudo=sudo)
elif group != None or owner != None or permission != None:
file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file_path.ownership.group = _group_
file_path.ownership.owner = _owner_
checkPermissionOwnership(file_path, {"sudo":sudo, "owner":owner, "group":group, "permission":permission}, recursive=recursive, silent=silent)
if hierarchy == None: raise ValueError("Define dictionary parameter: hierarchy")
for identifier, dictionary in hierarchy.items():
# - check:
try: dictionary["path"] = self.file_path.path + dictionary["path"]
except: raise ValueError("Invalid hierarchy item [{} : {}]. Specify the [path].".format(identifier, "?"))
try: dictionary["permission"]
except KeyError: dictionary["permission"] = None
try: dictionary["owner"]
except KeyError: dictionary["owner"] = None
try: dictionary["group"]
except KeyError: dictionary["group"] = None
try: dictionary["directory"]
except KeyError: dictionary["directory"] = False
try: dictionary["sudo"]
except KeyError: dictionary["sudo"] = False
try: dictionary["default_data"]
except KeyError: dictionary["default_data"] = None
try: dictionary["default"]
except KeyError: dictionary["default"] = None
try: dictionary["recursive"]
except KeyError: dictionary["recursive"] = False
# - directory:
if dictionary["directory"]:
file_path = Formats.FilePath(dictionary["path"])
if file_path.exists(sudo=dictionary["sudo"]) == False:
file_path.create(
directory=True,
permission=dictionary["permission"],
group=dictionary["group"],
owner=dictionary["owner"],
sudo=dictionary["sudo"],)
else:
file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file_path.ownership.group = _group_
file_path.ownership.owner = _owner_
#if 'back_up_requests/requests' in file_path.path:
# print("file: {}, owner: {}, group: {}, permission: {}".format(file_path.path, file_path.ownership.owner, file_path.ownership.group, file_path.permission.permission))
checkPermissionOwnership(file_path, dictionary, silent=silent, recursive=dictionary["recursive"])
# - file:
elif dictionary["default_data"] != None:
file = Files.File(path=dictionary["path"])
if file.file_path.exists(sudo=dictionary["sudo"]) == False:
file.file_path.create(
data=dictionary["default_data"],
permission=dictionary["permission"],
group=dictionary["group"],
owner=dictionary["owner"],
sudo=dictionary["sudo"])
else:
file.file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file.file_path.ownership.group = _group_
file.file_path.ownership.owner = _owner_
checkPermissionOwnership(file.file_path, dictionary, silent=silent)
# - dictionary:
elif dictionary["default"] != None:
file = Files.Dictionary(path=dictionary["path"])
if file.file_path.exists(sudo=dictionary["sudo"]) == False:
file.save(dictionary["default"])
file.file_path.permission.check(
permission=dictionary["permission"],
sudo=dictionary["sudo"])
file.file_path.ownership.check(
group=dictionary["group"],
owner=dictionary["owner"],
sudo=dictionary["sudo"])
else:
file.file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file.file_path.ownership.group = _group_
file.file_path.ownership.owner = _owner_
checkPermissionOwnership(file.file_path, dictionary, silent=silent)
file.check(default=default, save=True)
else:
raise ValueError("Invalid hierarchy item [{} : {}]. Either [directory] must be enabled, or [default_data / default] must be specified.".format(identifier, dictionary["path"]))
#
# load & save sub paths.
def load(self, path=None, format=str, default=None, sudo=False):
return Files.load(path=self.fullpath(path), format=format, sudo=sudo)
def save(self, path=None, data=None, format=str, sudo=False):
return Files.save(path=self.fullpath(path), data=data, format=format, sudo=sudo)
# returnable functions.
def paths(self,
# get recursively (bool).
recursive=False,
# get files only (bool).
files_only=False,
# get firs only (bool).
dirs_only=False,
# also get empty dirs (bool).
empty_dirs=True,
# the banned full paths (list).
banned=[],
# the banned names (list).
banned_names=[".DS_Store"],
# the banend base names (list).
banned_basenames=["__pycache__"],
# the allowed extensions (list).
extensions=["*"],
# the path (leave None to use self.path) (str, FilePath).
path=None,
):
if dirs_only and files_only: raise ValueError("Both parameters dirs_only & piles_only are True.")
if path == None: path = self.file_path.path
path = str(path)
if not Files.exists(path): return []
if isinstance(extensions, str): extensions = [extensions]
if len(banned) > 0:
l_banned = []
for i in banned:
l_banned.append(gfp.clean(f"{path}/{i}"))
banned = l_banned
paths = []
if recursive:
# does only work with recursive.
for root, dirs, files in os.walk(path):
if not dirs_only:
for name in files:
if name not in banned_names and ("*" in extensions or gfp.extension(name=name) in extensions ):
l_path = gfp.clean(path=f"{root}/{name}")
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
if not files_only:
for name in dirs:
if name not in banned_names and (dirs_only or "*" in extensions or "dir" in extensions ):
l_path = gfp.clean(path=f"{root}/{name}/")
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
if recursive:
paths += self.paths(recursive=recursive, path=l_path, dirs_only=dirs_only, files_only=files_only, banned=banned, banned_names=banned_names, empty_dirs=empty_dirs)
else:
for name in os.listdir(path):
l_path = gfp.clean(path=f"{path}/{name}")
if not dirs_only and not Files.directory(l_path):
if name not in banned_names and ("*" in extensions or gfp.extension(name=name) in extensions ):
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
if not files_only and Files.directory(l_path):
l_path += "/"
if name not in banned_names and (dirs_only or "*" in extensions or "dir" in extensions ):
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
return paths
def names(self,
# get recursively (bool).
recursive=False,
# get files only (bool).
files_only=False,
# get firs only (bool).
dirs_only=False,
# also get empty dirs (bool).
empty_dirs=True,
# remove the extension names (bool).
remove_extensions=False,
# the banned full paths (list).
banned=[],
# the banned names (list).
banned_names=[".DS_Store"],
# the banend base names (list).
banned_basenames=["__pycache__"],
# the allowed extensions (list).
extensions=["*"],
# the path (leave None to use self.path) (str, FilePath).
path=None,
):
names = []
for _path_ in self.paths(dirs_only=dirs_only, files_only=files_only, empty_dirs=empty_dirs, recursive=recursive, path=path, banned=banned, banned_names=banned_names, extensions=extensions):
if remove_extensions:
name = gfp.name(path=_path_)
names.append(name[:-len(gfp.extension(name=name))])
else:
names.append(gfp.name(path=_path_))
return names
def oldest(self):
files = []
for i in os.listdir(self.file_path.path):
if i not in [".DS_Store"]:
path = f'{self.file_path.path}/{i}'.replace("//",'/')
files.append(path)
if len(files) == 0: return False
return min(files, key=os.path.getctime) # oldest is min (this is not a code error)
def newest(self):
files = []
for i in os.listdir(self.file_path.path):
if i not in [".DS_Store"]:
path = f'{self.file_path.path}/{i}'.replace("//",'/')
files.append(path)
if len(files) == 0: return False
return max(files, key=os.path.getctime) # newest is max (this is not a code error)
def random(self):
files = []
for i in os.listdir(self.file_path.path):
if i not in [".DS_Store"]:
path = f'{self.file_path.path}/{i}'.replace("//",'/')
files.append(path)
if len(files) == 0: return False
return files[random.randrange(0, len(files))]
def generate(self, length=24, type="/"):
path, paths = None, self.paths()
for x in range(1000):
path = self.join(utils.generate.shell_string(length=length), type)
if path not in paths:
break
if path == None: __error__("Failed to generate a new random path inside directory [{}].".format(self.file_path.path))
return path
def structured_join(self, name, type="", structure="alphabetical", create_base=False, sudo=False, owner=None, group=None, permission=None):
if type not in ["/", ""]:
type = "."+type
if structure == "alphabetical":
alphabetical = None
try: alphabetical = name[0].upper()
except: alphabetical = "SPECIAL"
if str(alphabetical) not in ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Z","0","1","2","3","4","5","6","7","8","9"]: aplhabetical = "SPECIAL"
base = self.file_path.path + "/" + alphabetical + "/"
if create_base and os.path.exists(base) == False:
self.create(path=base, sudo=sudo, owner=owner, group=group, permission=permission)
alph_dir = base + name + type
return alph_dir
else: raise ValueError("Invalid usage, parameter structure [{}], valid options: {}".format(structure, ["alphabetical"]))
def contains(self, name=None, type="/", recursive=False):
return self.join(name, type) in self.paths(recursive=recursive)
#
def subpath(self, fullpath):
return self.fp.clean(path=fullpath.replace(self.fp.path, ""), remove_double_slash=True)
def fullpath(self, subpath):
return self.fp.clean(path=f"{self.fp.path}/{subpath}", remove_double_slash=True)
# set the icon.
def set_icon(self,
# the path to the .png / .jpg icon.
icon=None,
# the directory path (leave None to use self.fp.path).
path=None,
):
if icon == None: raise Exceptions.InvalidUsage("Define parameter: icon.")
if path == None: path = self.fp.path
if OS in ["osx", "macos"]:
utils.__execute_script__(f"""
#!/bin/bash
# settings.
icon="{icon}"
dest="{path}"
# check inputs
if [ ! -f $icon ]; then
echo "ERROR: File $1 does not exists"
exit 1
elif [[ ! $icon =~ .*\.(png|PNG|jpg|JPG) ]]; then
echo "ERROR: Icon must be a .png|.jpg file"
exit 1
elif [ -f $dest ]; then
folder=false
elif [ -d $dest ]; then
folder=true
else
echo 'ERROR: File|Folder destination does not exists'
exit 1
fi
# create icns icon
sips -i $icon > /dev/null
DeRez -only icns $icon > /tmp/tmpicns.rsrc
# set icon
if [ "$folder" = true ]; then
Rez -append /tmp/tmpicns.rsrc -o $dest$'/Icon\r'
SetFile -a C $dest
SetFile -a V $dest$'/Icon\r'
else
Rez -append /tmp/tmpicns.rsrc -o $dest
SetFile -a C $dest
fi
# clean up
rm /tmp/tmpicns.rsrc
exit 0
""")
else:
raise OSError("Unsupported operating system.")
# index the content.
def index(self,
# the wanted options.
metrics=[],
options=["size", "mtime", "content", "name", "basename", "extension", "mount", "directory"],
# optional path (leave None to use self.path).
path=None,
):
def process(path):
info = {}
if "mtime" in metrics:
info["mtime"] = gfp.mtime(path=path, format="seconds")
if "size" in metrics:
info["size"] = gfp.size(path=path, format=int)
directory = None
if "directory" in metcics:
directory = info["directory"] = Files.directory(str(path))
if "content" in metrics:
if directory == None: raise Exceptions.InvalidUsage("Metric [directory] is required when obtaining metric [content].")
if not directory:
info["content"] = Files.load(path)
else:
info["content"] = None
if "mount" in metrics:
info["mount"] = os.path.ismount(str(path))
if "name" in metrics:
info["name"] = gfp.name(path=path)
if "extension" in metrics:
info["name"] = gfp.extension(path=path)
if "basename" in metrics:
info["basename"] = gfp.basename(path=path)
return info
#
if path == None: path = self.path
if metrics == []:
raise Exceptions.InvalidUsage(f'No metrics are specified, metric options: [{Array(options).string(joiner=" ")}].')
for i in metrics:
if i not in options:
raise Exceptions.InvalidUsage(f'Metric [{i}] is not a valid metric option, options: [{Array(options).string(joiner=" ")}].')
indexed, dir, ids = Dictionary(path=False, dictionary={}), Files.Directory(path=path), []
for _path_ in dir.paths(recursive=True, files_only=True, banned=[gfp.clean(f"{path}/Icon\r")], banned_names=[".DS_Store", "__pycache__"]):
if _path_ not in ids and "/__pycache__/" not in _path_ and "/.DS_Store" not in _path_:
indexed[_path_] = process(_path_)
ids.append(_path_)
for _path_ in dir.paths(recursive=True, dirs_only=True, banned=[gfp.clean(f"{path}/Icon\r")], banned_names=[".DS_Store", "__pycache__"]):
if _path_ not in ids and "/__pycache__/" not in _path_ and "/.DS_Store" not in _path_:
indexed[_path_] = process(_path_)
ids.append(_path_)
return indexed.sort(alphabetical=True)
# open for desktop.
def open(self, path=None, sudo=False):
if path == None: path = self.fp.path
if sudo: sudo = "sudo "
else: sudo = ""
if OS in ["macos"]:
os.system(f"{sudo}open {path}")
elif OS in ["linux"]:
os.system(f"{sudo}nautulis {path}")
else: raise Exceptions.InvalidOperatingSystem(f"Unsupported operating system [{OS}].")
# return references of each file that includes one of the matches.
def find(self, matches:list, path=None, recursive=False, log_level=0):
if path == None: path = self.path
gfp = Formats.FilePath("")
c, references = 0, {}
for string in matches:
if not os.path.exists(path):
raise ValueError(f"Path {path} does not exist.")
elif not Files.directory(path):
raise ValueError(f"Path {path} is not a directory.")
for i_path in self.paths(recursive=recursive, files_only=True, banned_names=[".DS_Store", ".git"], path=path):
data = None
try:
data = Files.load(i_path)
except:
try:
data = f"{Files.load(i_path, format=bytes)}"
except: data = None
if data != None and string in data:
if log_level >= 0:
print("")
print(f"{i_path}:")
lines, linecount = data.split("\n"), 0
for _ in lines:
if string in lines[linecount]:
try: before = lines[linecount-1]
except: before = None
try: after = lines[linecount+1]
except: after = None
if log_level >= 0:
if before != None: print(" * "+before)
print(" * "+lines[linecount])
if after != None: print(" * "+after)
references[i_path] = lines[linecount]
linecount += 1
c += 1
if log_level >= 0 and c > 0: print("")
return references
# replace str within all files.
def replace(self, replacements:list, path=None, recursive=False, log_level=0):
if path == None: path = self.path
gfp = Formats.FilePath("")
c, updates = 0, []
for from_, to in replacements:
if not os.path.exists(path):
raise ValueError(f"Path {path} does not exist.")
elif not Files.directory(path):
raise ValueError(f"Path {path} is not a directory.")
for path in self.paths(recursive=recursive, banned_names=[".DS_Store", ".git"], path=path):
if not Files.directory(path):
try:
data = Files.load(path)
except UnicodeDecodeError: a=1
if from_ in data:
if log_level >= 0:
loader = console.Loader(f"Updating file {path}.")
Files.save(path, data.replace(from_, to))
if log_level >= 0:
loader.stop()
updates.append(path)
c += 1
return updates
# filepath shortcuts.
def join(self, name=None, type=""):
return self.file_path.join(name, type)
def name(self):
return self.file_path.name()
def base(self):
return self.file_path.base()
def basename(self):
return self.file_path.basename()
# support default iteration.
def __iter__(self):
return iter(self.paths())
# support '>=' & '>' operator.
def __gt__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) > len(directory.paths())
def __ge__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) >= len(directory.paths())
# support '<=' & '<' operator.
def __lt__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) < len(directory.paths())
def __le__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) <= len(directory.paths())
# support '==' & '!=' operator.
def __eq__(self, directory):
if not isinstance(directory, self.__class__):
return False
return len(self.paths()) == len(directory.paths())
def __ne__(self, directory):
if not isinstance(directory, self.__class__):
return True
return len(self.paths()) != len(directory.paths())
# support 'in' operator.
def __contains__(self, path):
paths = self.paths()
if isinstance(path, (list, Files.Array)):
for i in path:
if i in paths:
return True
return False
else:
return path in paths
# representation.
def __repr__(self):
return str(self)
#
# system functions.
def __str__(self):
return str(self.fp.path)
# content count.
def __len__(self):
return len(self.paths())
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Directory"
#
@property
def __name__(self):
return self.instance()
# return raw data.
def raw(self):
return self.fp.path
#
# the image object class.
class Image(object):
def __init__(self, path=None, image=None, load=False):
# docs.
DOCS = {
"module":"Image",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
self.image = image
if load: self.load()
#
def load(self, path=None):
if path == None: path = self.file_path.path
self.image = Image.open(path)
def edit_pixel(self, pixel=[0, 0], new_pixel_tuple=None):
pixel = self.image.load()
pix[15, 15] = value
self.image.save(self.file_path.path)
def convert(self,
# the input path (str, FilePath) (#1).
output=None,
# the input path (str, FilePath) (leave None to use self.fp.path)
input=None,
):
if input == None: input = self.fp.path
if output == None:
raise Exceptions.InvalidUsage("Define parameter: [output].")
img = _Image_.open(str(input))
img.save(str(output))
print(f"Successfully converted image {input} to {output}.")
def replace_pixels(self, input_path=None, output_path=None, input_hex=None, output_hex=None):
img = _Image_.open(input_path)
pixels = img.load()
input_rgb, output_rgb = input_hex, output_hex # self.hex_to_rgb(input_hex), self.hex_to_rgb(output_hex)
for i in range(img.size[0]):
for j in range(img.size[1]):
print(pixels[i,j], "VS", input_rgb)
if pixels[i,j] == input_rgb:
pixels[i,j] = output_rgb
img.save(output_path)
def replace_colors(self, input_path=None, output_path=None, hex=None):
img = _Image_.open(input_path)
pixels = img.load()
rgb = hex #self.hex_to_rgb(hex)
for i in range(img.size[0]):
for j in range(img.size[1]):
if pixels[i,j] != rgb and pixels[i,j] != (0, 0, 0, 0):
pixels[i,j] = rgb
img.save(output_path)
def rgb_to_hex(self, tuple):
return '#%02x%02x%02x' % tuple
def hex_to_rgb(self, _hex_):
return tuple(int(_hex_[i:i+2], 16) for i in (0, 2, 4))
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Image"
#
@property
def __name__(self):
return self.instance()
# return raw data.
def raw(self):
return self.fp.path
# suport eq.
def __eq__(self, var):
if var.__class__.__name__ in ["NoneType"]:
return False
else:
return str(var) == str(self)
def __ne__(self, var):
if var.__class__.__name__ in ["NoneType"]:
return True
else:
return str(var) != str(self)
# repr.
def __str__(self):
return str(self.fp)
def __repr__(self):
return str(self)
#
#
# the zip object class.
class Zip(object):
def __init__(self, path=None, check=False):
# docs.
DOCS = {
"module":"Zip",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.file_path = self.fp = Formats.FilePath(path, check=check)
#
def create(self,
# source can either be a string or an array.
source=None,
# remove the source file(s).
remove=False,
# sudo required to move/copy source files.
sudo=False,
):
# create tmp dir.
name = self.file_path.name().replace('.encrypted.zip','').replace("."+self.file_path.extension(),'')
tmp = Formats.FilePath(f'/tmp/zip-{utils.generate.shell_string(24)}')
tmp_content = Formats.FilePath(tmp.join(name, ""))
if tmp.exists(): tmp.delete(forced=True)
if os.path.exists(tmp.path):os.system(f"rm -fr {tmp.path}")
os.system(f"mkdir -p {tmp.path}")
if isinstance(source, str):
target = Formats.FilePath(source)
name = target.name().replace('.encrypted.zip','').replace("."+target.extension(),'')
if remove: target.move(tmp_content.path, sudo=sudo)
else: target.copy(tmp_content.path, sudo=sudo)
elif isinstance(source, list):
tmp_content.create(directory=True)
for path in source:
file_path = Formats.FilePath(path)
if remove: file_path.move("/"+tmp_content.join('/'+file_path.name(),"/"), sudo=sudo)
else: file_path.copy("/"+tmp_content.join('/'+file_path.name(),"/"), sudo=sudo)
else: raise ValueError("Parameter [source] must either be a str or list.")
# write out zip.
base = self.file_path.base()
format = self.file_path.extension()
archive_from = os.path.dirname(tmp_content.path)
archive_to = os.path.basename(tmp_content.path.strip(os.sep))
zip_path = shutil.make_archive(name, format, archive_from, archive_to)
os.system(f'mv {zip_path} {self.file_path.path}')
tmp.delete(forced=True, sudo=sudo)
#
def extract(self,
# the base extract directory.
base=None,
# remove the zip after extraction.
remove=False,
# if sudo required for removing file path.
sudo=False,):
# extract.
if base == None:
base = self.file_path.base()
with zipfile.ZipFile(self.file_path.path, 'r') as zip_ref:
zip_ref.extractall(base)
if remove: self.file_path.delete(forced=True, sudo=sudo)
#
# representation.
def __repr__(self):
return str(self)
#
# system functions.
def __str__(self):
return self.fp.path
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Zip"
#
@property
def __name__(self):
return self.instance()
# return raw data.
def raw(self):
return self.fp.path
#
#
# the bytes object class.
class Bytes(object):
def __init__(self,
# the bytes (param #1).
data=b"",
# the path (str, FilePath) (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Bytes",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(data, Files.Bytes):
data = data.bytes
# bytes.
self.bytes = bytes
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def load(self, sudo=False):
bytes = Files.load(self.file_path.path, format="bytes", sudo=sudo)
self.bytes = bytes
return bytes
def save(self, bytes=None, sudo=False):
if bytes == None: bytes = self.bytes
bytes = Formats.denitialize(bytes)
self.bytes = bytes
return Files.save(self.fp.path, bytes, format="bytes", sudo=sudo)
# suppor default iteration.
def __iter__(self):
return iter(self.bytes)
# support '==' & '!=' operator.
def __eq__(self, bytes_):
if isinstance(bytes_, bytes):
return self.bytes == bytes_
elif not isinstance(bytes_, self.__class__):
return False
return self.bytes == bytes_.bytes
def __ne__(self, bytes_):
if isinstance(bytes_, bytes):
return self.bytes != bytes_
elif not isinstance(bytes_, self.__class__):
return True
return self.bytes != bytes_.bytes
# support 'in' operator.
def __contains__(self, bytes_):
if isinstance(bytes_, (list, Files.Array)):
for i in bytes_:
if i == self.bytes:
return True
return False
else:
return bytes_ in self.bytes
#
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(self.bytes)
# content count.
def __len__(self):
return len(self.bytes)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Bytes"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, b):
if isinstance(b, self.__class__):
b = b.bytes
self.bytes = b
return self
# return raw data.
def raw(self):
return self.bytes
#
#
#
#
# some default classes.
class Classes():
# the speed class.
class Speed():
# the mark function, returns a timestamp used for calculation.
def mark():
return time.time()
#
# calculate the difference between the marked timestamp & the current.
def calculate(
# the marked timestamp from Speed.mark.
stamp,
# the current timestamp (leave None to use Speed.mark)
current=None,
# round to decimals (Leave None to ignore).
decimals=None,
# normalize seconds.
normalize=False,
):
if current == None: current = Speed.mark()
diff = current - stamp
if decimals != None:
diff = round(diff, decimals)
if normalize:
diff = Speed.normalize_seconds(diff)
return diff
# normalize seconds to 10s or 1m etc.
def normalize_seconds(seconds:(int,float), decimals=1):
if seconds < 0:
raise ValueError("Can not normalize negative seconds.")
if seconds < 0.01:
return f'{int(seconds*1000)}ms'
elif seconds <= 60:
return f'{int(seconds)}s'
elif seconds <= 60*60:
return f'{round(seconds/60, decimals)}m'
elif seconds <= 60*60*24:
return f'{round(seconds/(60*60), decimals)}h'
elif seconds <= 60*60*24*30:
return f'{round(seconds/(60*60*24), decimals)}d'
elif seconds <= 60*60*24*30*12:
return f'{round(seconds/(60*60*24*30), decimals)}m'
else:
return f'{round(seconds/(60*60*24*30*12), decimals)}y'
# some default objects.
class Objects():
# the generate object class.
class Generate(object):
def __init__(self):
# docs.
DOCS = {
"module":"Generate",
"initialized":False,
"description":[],
"chapter": "Defaults", }
#
def int(self, length=6):
charset = Array(Formats.digits).string(joiner="")
return ''.join(random.choice(charset) for x in range(length))
#
def string(self, length=6, capitalize=True, digits=True):
charset = Array(Formats.alphabet).string(joiner="")
if capitalize: charset += Array(Formats.capitalized_alphabet).string(joiner="")
if digits: charset += Array(Formats.digits).string(joiner="")
return ''.join(random.choice(charset) for x in range(length))
#
# the interval object class.
class Interval(object):
def __init__(self,
# the sleep time.
sleeptime=1,
# the timeout.
timeout=60,
):
# docs.
DOCS = {
"module":"Interval",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# attributes.
self.sleeptime = sleeptime
self.timeout = timeout
#
def __int__(self):
return int(self.sleeptime)
def __iter__(self):
l = []
for _ in range(int(self.timeout/self.sleeptime)):
l.append(self)
return iter(l)
def sleep(self, chapters=1):
for _ in range(chapters):
time.sleep(int(self)/chapters)
#
#for interval in Interval(sleeptime=60, timeout=3600):
# ...
# interval.sleep()
#
# shortcuts.
FilePath = Formats.FilePath
String = Formats.String
Boolean = Formats.Boolean
Integer = Formats.Integer
Date = Formats.Date
File = Files.File
Directory = Files.Directory
Zip = Files.Zip
Image = Files.Image
Bytes = Files.Bytes
Dictionary = Files.Dictionary
Array = Files.Array
Speed = Classes.Speed
Generate = Objects.Generate
Interval = Objects.Interval
# initialized objects.
gfp = Formats.FilePath("") # is required (do not remove).
gd = gdate = Formats.Date()
#
|
[
"dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage",
"dev0s.classes.defaults.exceptions.Exceptions.FormatError",
"math.floor",
"dev0s.classes.defaults.exceptions.Exceptions.ParseError",
"datetime.datetime.today",
"dev0s.classes.defaults.exceptions.Exceptions.InstanceError",
"dev0s.classes.utils.generate.shell_string",
"dev0s.classes.utils.generate.pincode",
"shutil.disk_usage",
"dev0s.classes.defaults.exceptions.Exceptions.DuplicateError",
"dev0s.classes.utils.__execute__",
"dev0s.classes.defaults.exceptions.Exceptions.UnpackError",
"dev0s.classes.defaults.exceptions.Exceptions.InvalidOperatingSystem",
"dev0s.classes.utils.__execute_script__",
"PIL.Image.open",
"shutil.make_archive",
"math.ceil",
"dev0s.classes.defaults.exceptions.Exceptions.JSONDecodeError",
"dev0s.classes.utils.__check_memory_only__",
"dev0s.classes.console.Loader"
] |
[((14111, 14134), 'shutil.disk_usage', 'shutil.disk_usage', (['path'], {}), '(path)\n', (14128, 14134), False, 'import shutil, math\n'), ((34670, 34703), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['path'], {}), '(path)\n', (34697, 34703), False, 'from dev0s.classes import utils\n'), ((34845, 34893), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (34872, 34893), False, 'from dev0s.classes import utils\n'), ((55859, 55892), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['path'], {}), '(path)\n', (55886, 55892), False, 'from dev0s.classes import utils\n'), ((56023, 56071), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (56050, 56071), False, 'from dev0s.classes import utils\n'), ((59403, 59436), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['path'], {}), '(path)\n', (59430, 59436), False, 'from dev0s.classes import utils\n'), ((59595, 59643), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (59622, 59643), False, 'from dev0s.classes import utils\n'), ((61349, 61386), 'dev0s.classes.utils.generate.pincode', 'utils.generate.pincode', ([], {'length': 'length'}), '(length=length)\n', (61371, 61386), False, 'from dev0s.classes import utils\n'), ((84618, 84658), 'dev0s.classes.utils.__execute__', 'utils.__execute__', (["['sudo', 'cat', path]"], {}), "(['sudo', 'cat', path])\n", (84635, 84658), False, 'from dev0s.classes import utils\n'), ((86956, 87006), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (86979, 87006), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((89201, 89251), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (89224, 89251), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((89561, 89611), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (89584, 89611), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((89643, 89699), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: permission."""'], {}), "('Define parameter: permission.')\n", (89666, 89699), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((90067, 90117), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (90090, 90117), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((90144, 90195), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: owner."""'], {}), "('Define parameter: owner.')\n", (90167, 90195), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((90372, 90422), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (90395, 90422), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((91399, 91449), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (91422, 91449), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((91660, 91710), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (91683, 91710), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((92233, 92283), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: path."""'], {}), "('Define parameter: path.')\n", (92256, 92283), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((96279, 96327), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (96306, 96327), False, 'from dev0s.classes import utils\n'), ((96727, 96760), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['path'], {}), '(path)\n', (96754, 96760), False, 'from dev0s.classes import utils\n'), ((101415, 101448), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['path'], {}), '(path)\n', (101442, 101448), False, 'from dev0s.classes import utils\n'), ((101662, 101710), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (101689, 101710), False, 'from dev0s.classes import utils\n'), ((117595, 117643), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (117622, 117643), False, 'from dev0s.classes import utils\n'), ((117948, 117996), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (117975, 117996), False, 'from dev0s.classes import utils\n'), ((118290, 118338), 'dev0s.classes.utils.__check_memory_only__', 'utils.__check_memory_only__', (['self.file_path.path'], {}), '(self.file_path.path)\n', (118317, 118338), False, 'from dev0s.classes import utils\n'), ((163975, 163999), 'PIL.Image.open', '_Image_.open', (['input_path'], {}), '(input_path)\n', (163987, 163999), True, 'from PIL import Image as _Image_\n'), ((164409, 164433), 'PIL.Image.open', '_Image_.open', (['input_path'], {}), '(input_path)\n', (164421, 164433), True, 'from PIL import Image as _Image_\n'), ((167242, 167301), 'shutil.make_archive', 'shutil.make_archive', (['name', 'format', 'archive_from', 'archive_to'], {}), '(name, format, archive_from, archive_to)\n', (167261, 167301), False, 'import shutil, math\n'), ((25609, 25700), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {path.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {path.__class__}.')\n", (25631, 25700), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((25818, 25909), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {path.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {path.__class__}.')\n", (25840, 25909), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((26061, 26152), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {path.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {path.__class__}.')\n", (26083, 26152), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((26270, 26361), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {path.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {path.__class__}.')\n", (26292, 26361), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((61256, 61287), 'math.floor', 'math.floor', (['(self.value * factor)'], {}), '(self.value * factor)\n', (61266, 61287), False, 'import shutil, math\n'), ((80096, 80187), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {date.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {date.__class__}.')\n", (80118, 80187), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((80299, 80390), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {date.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {date.__class__}.')\n", (80321, 80390), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((80536, 80627), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {date.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {date.__class__}.')\n", (80558, 80627), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((80739, 80830), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {date.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {date.__class__}.')\n", (80761, 80830), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((92322, 92381), 'dev0s.classes.defaults.exceptions.Exceptions.DuplicateError', 'Exceptions.DuplicateError', (['f"""Path [{path}] already exists."""'], {}), "(f'Path [{path}] already exists.')\n", (92347, 92381), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((113789, 113900), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__.__name__} & {value.__class__.__name__}."""'], {}), "(\n f'Can not mul object {self.__class__.__name__} & {value.__class__.__name__}.'\n )\n", (113811, 113900), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((114041, 114152), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not div object {self.__class__.__name__} & {value.__class__.__name__}."""'], {}), "(\n f'Can not div object {self.__class__.__name__} & {value.__class__.__name__}.'\n )\n", (114063, 114152), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((119855, 120008), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['f"""<Dictionary.check> parameter [dicionary] requires to be a [dict, Dictionary] not [{dictionary.__class__.__name__}]."""'], {}), "(\n f'<Dictionary.check> parameter [dicionary] requires to be a [dict, Dictionary] not [{dictionary.__class__.__name__}].'\n )\n", (119878, 120008), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((154360, 154410), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: icon."""'], {}), "('Define parameter: icon.')\n", (154383, 154410), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((154485, 155664), 'dev0s.classes.utils.__execute_script__', 'utils.__execute_script__', (['f\'\\n\\t\\t\\t\\t\\t#!/bin/bash\\n\\n\\t\\t\\t\\t\\t# settings.\\n\\t\\t\\t\\t\\ticon="{icon}"\\n\\t\\t\\t\\t\\tdest="{path}"\\n\\n\\t\\t\\t\\t\\t# check inputs\\n\\t\\t\\t\\t\\tif [ ! -f $icon ]; then \\n\\t\\t\\t\\t\\t\\techo "ERROR: File $1 does not exists"\\n\\t\\t\\t\\t\\t\\texit 1\\n\\t\\t\\t\\t\\telif [[ ! $icon =~ .*\\\\.(png|PNG|jpg|JPG) ]]; then\\n\\t\\t\\t\\t\\t\\techo "ERROR: Icon must be a .png|.jpg file"\\n\\t\\t\\t\\t\\t\\texit 1\\n\\t\\t\\t\\t\\telif [ -f $dest ]; then\\n\\t\\t\\t\\t\\t\\tfolder=false\\n\\t\\t\\t\\t\\telif [ -d $dest ]; then\\n\\t\\t\\t\\t\\t\\tfolder=true\\n\\t\\t\\t\\t\\telse\\n\\t\\t\\t\\t\\t\\techo \\\'ERROR: File|Folder destination does not exists\\\'\\n\\t\\t\\t\\t\\t\\texit 1\\n\\t\\t\\t\\t\\tfi\\n\\n\\t\\t\\t\\t\\t# create icns icon\\n\\t\\t\\t\\t\\tsips -i $icon > /dev/null\\n\\t\\t\\t\\t\\tDeRez -only icns $icon > /tmp/tmpicns.rsrc\\n\\n\\t\\t\\t\\t\\t# set icon\\n\\t\\t\\t\\t\\tif [ "$folder" = true ]; then\\n\\t\\t\\t\\t\\t\\tRez -append /tmp/tmpicns.rsrc -o $dest$\\\'/Icon\\r\\\'\\n\\t\\t\\t\\t\\t\\tSetFile -a C $dest\\n\\t\\t\\t\\t\\t\\tSetFile -a V $dest$\\\'/Icon\\r\\\'\\n\\t\\t\\t\\t\\telse\\n\\t\\t\\t\\t\\t\\tRez -append /tmp/tmpicns.rsrc -o $dest\\n\\t\\t\\t\\t\\t\\tSetFile -a C $dest\\n\\t\\t\\t\\t\\tfi\\n\\n\\t\\t\\t\\t\\t# clean up\\n\\t\\t\\t\\t\\trm /tmp/tmpicns.rsrc\\n\\t\\t\\t\\t\\texit 0\\n\\t\\t\\t\\t\\t\''], {}), '(\n f\'\\n\\t\\t\\t\\t\\t#!/bin/bash\\n\\n\\t\\t\\t\\t\\t# settings.\\n\\t\\t\\t\\t\\ticon="{icon}"\\n\\t\\t\\t\\t\\tdest="{path}"\\n\\n\\t\\t\\t\\t\\t# check inputs\\n\\t\\t\\t\\t\\tif [ ! -f $icon ]; then \\n\\t\\t\\t\\t\\t\\techo "ERROR: File $1 does not exists"\\n\\t\\t\\t\\t\\t\\texit 1\\n\\t\\t\\t\\t\\telif [[ ! $icon =~ .*\\\\.(png|PNG|jpg|JPG) ]]; then\\n\\t\\t\\t\\t\\t\\techo "ERROR: Icon must be a .png|.jpg file"\\n\\t\\t\\t\\t\\t\\texit 1\\n\\t\\t\\t\\t\\telif [ -f $dest ]; then\\n\\t\\t\\t\\t\\t\\tfolder=false\\n\\t\\t\\t\\t\\telif [ -d $dest ]; then\\n\\t\\t\\t\\t\\t\\tfolder=true\\n\\t\\t\\t\\t\\telse\\n\\t\\t\\t\\t\\t\\techo \\\'ERROR: File|Folder destination does not exists\\\'\\n\\t\\t\\t\\t\\t\\texit 1\\n\\t\\t\\t\\t\\tfi\\n\\n\\t\\t\\t\\t\\t# create icns icon\\n\\t\\t\\t\\t\\tsips -i $icon > /dev/null\\n\\t\\t\\t\\t\\tDeRez -only icns $icon > /tmp/tmpicns.rsrc\\n\\n\\t\\t\\t\\t\\t# set icon\\n\\t\\t\\t\\t\\tif [ "$folder" = true ]; then\\n\\t\\t\\t\\t\\t\\tRez -append /tmp/tmpicns.rsrc -o $dest$\\\'/Icon\\r\\\'\\n\\t\\t\\t\\t\\t\\tSetFile -a C $dest\\n\\t\\t\\t\\t\\t\\tSetFile -a V $dest$\\\'/Icon\\r\\\'\\n\\t\\t\\t\\t\\telse\\n\\t\\t\\t\\t\\t\\tRez -append /tmp/tmpicns.rsrc -o $dest\\n\\t\\t\\t\\t\\t\\tSetFile -a C $dest\\n\\t\\t\\t\\t\\tfi\\n\\n\\t\\t\\t\\t\\t# clean up\\n\\t\\t\\t\\t\\trm /tmp/tmpicns.rsrc\\n\\t\\t\\t\\t\\texit 0\\n\\t\\t\\t\\t\\t\'\n )\n', (154509, 155664), False, 'from dev0s.classes import utils\n'), ((160807, 160903), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {directory.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {directory.__class__}.')\n", (160829, 160903), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((161042, 161138), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {directory.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {directory.__class__}.')\n", (161064, 161138), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((161311, 161407), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {directory.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {directory.__class__}.')\n", (161333, 161407), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((161546, 161642), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {directory.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {directory.__class__}.')\n", (161568, 161642), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((163693, 163747), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Define parameter: [output]."""'], {}), "('Define parameter: [output].')\n", (163716, 163747), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((16990, 17036), 'dev0s.classes.utils.__execute__', 'utils.__execute__', (["['sudo', 'ls', '-ld', path]"], {}), "(['sudo', 'ls', '-ld', path])\n", (17007, 17036), False, 'from dev0s.classes import utils\n'), ((21826, 21900), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidOperatingSystem', 'Exceptions.InvalidOperatingSystem', (['f"""Unsupported operating system [{OS}]."""'], {}), "(f'Unsupported operating system [{OS}].')\n", (21859, 21900), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((66130, 66224), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {integer.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {integer.__class__}.')\n", (66152, 66224), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((66438, 66532), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {integer.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {integer.__class__}.')\n", (66460, 66532), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((66780, 66874), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {integer.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {integer.__class__}.')\n", (66802, 66874), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((67088, 67182), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {integer.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {integer.__class__}.')\n", (67110, 67182), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((73585, 73679), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse the date format from comparison [{comparison}]."""'], {}), "(\n f'Unable to parse the date format from comparison [{comparison}].')\n", (73606, 73679), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((73899, 73987), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse the date format from current [{current}]."""'], {}), "(\n f'Unable to parse the date format from current [{current}].')\n", (73920, 73987), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((74643, 74729), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse the date format from string [{string}]."""'], {}), "(\n f'Unable to parse the date format from string [{string}].')\n", (74664, 74729), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((75239, 75325), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse the date format from string [{string}]."""'], {}), "(\n f'Unable to parse the date format from string [{string}].')\n", (75260, 75325), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((77836, 77922), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse a Date() object from string [{string}]."""'], {}), "(\n f'Unable to parse a Date() object from string [{string}].')\n", (77857, 77922), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((79049, 79321), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/)."""'], {}), "(\n f'Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/).'\n )\n", (79070, 79321), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((97756, 97849), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (97778, 97849), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((98041, 98134), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (98063, 98134), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((98359, 98452), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (98381, 98452), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((98644, 98737), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (98666, 98737), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((100462, 100607), 'dev0s.classes.defaults.exceptions.Exceptions.InstanceError', 'Exceptions.InstanceError', (['f"""Parameter [{self.__class__.__name__}.array] must be a [Array] or [list], not [{array.__class__.__name__}]."""'], {}), "(\n f'Parameter [{self.__class__.__name__}.array] must be a [Array] or [list], not [{array.__class__.__name__}].'\n )\n", (100486, 100607), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((111985, 112077), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {array.__class__}.')\n", (112007, 112077), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((112278, 112370), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {array.__class__}.')\n", (112300, 112370), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((112607, 112699), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {array.__class__}.')\n", (112629, 112699), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((112900, 112992), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {array.__class__}.')\n", (112922, 112992), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((116098, 116258), 'dev0s.classes.defaults.exceptions.Exceptions.InstanceError', 'Exceptions.InstanceError', (['f"""Parameter [{self.__class__.__name__}.dictionary] must be a [Dictionary] or [dict], not [{dictionary.__class__.__name__}]."""'], {}), "(\n f'Parameter [{self.__class__.__name__}.dictionary] must be a [Dictionary] or [dict], not [{dictionary.__class__.__name__}].'\n )\n", (116122, 116258), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((125222, 125295), 'dev0s.classes.defaults.exceptions.Exceptions.UnpackError', 'Exceptions.UnpackError', (['f"""Dictionary does not contain attribute [{key}]."""'], {}), "(f'Dictionary does not contain attribute [{key}].')\n", (125244, 125295), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((134522, 134619), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {dictionary.__class__}.')\n", (134544, 134619), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((134856, 134953), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {dictionary.__class__}.')\n", (134878, 134953), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((135223, 135320), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {dictionary.__class__}.')\n", (135245, 135320), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((135557, 135654), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {dictionary.__class__}.')\n", (135579, 135654), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((152666, 152708), 'dev0s.classes.utils.generate.shell_string', 'utils.generate.shell_string', ([], {'length': 'length'}), '(length=length)\n', (152693, 152708), False, 'from dev0s.classes import utils\n'), ((157993, 158067), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidOperatingSystem', 'Exceptions.InvalidOperatingSystem', (['f"""Unsupported operating system [{OS}]."""'], {}), "(f'Unsupported operating system [{OS}].')\n", (158026, 158067), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((6640, 6749), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse a bool from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse a bool from ({variable.__class__.__name__}) [{variable}].'\n )\n", (6661, 6749), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((24692, 24779), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {path.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {path.__class__}.')\n", (24714, 24779), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((24985, 25072), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {path.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {path.__class__}.')\n", (25007, 25072), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((25307, 25394), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {path.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {path.__class__}.')\n", (25329, 25394), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((49539, 49628), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {string.__class__}.')\n", (49561, 49628), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((49853, 49942), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {string.__class__}.')\n", (49875, 49942), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((50188, 50277), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {string.__class__}.')\n", (50210, 50277), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((50513, 50602), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {string.__class__}.')\n", (50535, 50602), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((51179, 51272), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (51201, 51272), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((51505, 51598), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (51527, 51598), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((51865, 51958), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (51887, 51958), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((52191, 52284), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not compare object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not compare object {self.__class__} & {string.__class__}.')\n", (52213, 52284), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((53041, 53133), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not concat object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not concat object {self.__class__} & {string.__class__}.')\n", (53063, 53133), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((54549, 54641), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not assign object {self.__class__} & {string.__class__}."""'], {}), "(\n f'Can not assign object {self.__class__} & {string.__class__}.')\n", (54571, 54641), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((57897, 57990), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not assign object {self.__class__} & {boolean.__class__}."""'], {}), "(\n f'Can not assign object {self.__class__} & {boolean.__class__}.')\n", (57919, 57990), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((61197, 61218), 'math.ceil', 'math.ceil', (['self.value'], {}), '(self.value)\n', (61206, 61218), False, 'import shutil, math\n'), ((61692, 61780), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {value.__class__}.')\n", (61714, 61780), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((62021, 62109), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not sub object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not sub object {self.__class__} & {value.__class__}.')\n", (62043, 62109), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((62351, 62439), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {value.__class__}.')\n", (62373, 62439), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((62673, 62761), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not sub object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not sub object {self.__class__} & {value.__class__}.')\n", (62695, 62761), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((62994, 63082), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mod object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mod object {self.__class__} & {value.__class__}.')\n", (63016, 63082), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((63323, 63411), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mul object {self.__class__} & {value.__class__}.')\n", (63345, 63411), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((63652, 63740), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mul object {self.__class__} & {value.__class__}.')\n", (63674, 63740), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((63982, 64070), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mul object {self.__class__} & {value.__class__}.')\n", (64004, 64070), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((64315, 64403), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mul object {self.__class__} & {value.__class__}.')\n", (64337, 64403), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((64649, 64737), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mul object {self.__class__} & {value.__class__}.')\n", (64671, 64737), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((64982, 65070), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mul object {self.__class__} & {value.__class__}.')\n", (65004, 65070), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((65336, 65424), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not mul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not mul object {self.__class__} & {value.__class__}.')\n", (65358, 65424), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((65668, 65759), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not matmul object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not matmul object {self.__class__} & {value.__class__}.')\n", (65690, 65759), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((68973, 69064), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not assign object {self.__class__} & {value.__class__}."""'], {}), "(\n f'Can not assign object {self.__class__} & {value.__class__}.')\n", (68995, 69064), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((72234, 72250), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (72248, 72250), False, 'from datetime import datetime, timezone\n'), ((81610, 81696), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {add.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {add.__class__}.')\n", (81632, 81696), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((81939, 82026), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not iadd object {self.__class__} & {add.__class__}."""'], {}), "(\n f'Can not iadd object {self.__class__} & {add.__class__}.')\n", (81961, 82026), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((82283, 82369), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not sub object {self.__class__} & {add.__class__}."""'], {}), "(\n f'Can not sub object {self.__class__} & {add.__class__}.')\n", (82305, 82369), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((82612, 82699), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not isub object {self.__class__} & {add.__class__}."""'], {}), "(\n f'Can not isub object {self.__class__} & {add.__class__}.')\n", (82634, 82699), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((82974, 83060), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not sub object {self.__class__} & {add.__class__}."""'], {}), "(\n f'Can not sub object {self.__class__} & {add.__class__}.')\n", (82996, 83060), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((103473, 103560), 'dev0s.classes.defaults.exceptions.Exceptions.InstanceError', 'Exceptions.InstanceError', (['"""Parameter [item] must either be None, String or Array."""'], {}), "(\n 'Parameter [item] must either be None, String or Array.')\n", (103497, 103560), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((110208, 110296), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {array.__class__}.')\n", (110230, 110296), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((110520, 110608), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {array.__class__}.')\n", (110542, 110608), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((110818, 110906), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {array.__class__}.')\n", (110840, 110906), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((111193, 111281), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {array.__class__}.')\n", (111215, 111281), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((111587, 111675), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {array.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {array.__class__}.')\n", (111609, 111675), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((126423, 126546), 'dev0s.classes.defaults.exceptions.Exceptions.InstanceError', 'Exceptions.InstanceError', (['f"""Parameter [item] must either be [None], [String] or [Array], not [{item.__class__}]."""'], {}), "(\n f'Parameter [item] must either be [None], [String] or [Array], not [{item.__class__}].'\n )\n", (126447, 126546), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((132224, 132317), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {dictionary.__class__}.')\n", (132246, 132317), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((132610, 132703), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {dictionary.__class__}.')\n", (132632, 132703), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((134064, 134157), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {dictionary.__class__}.')\n", (134086, 134157), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((156110, 156205), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['"""Metric [directory] is required when obtaining metric [content]."""'], {}), "(\n 'Metric [directory] is required when obtaining metric [content].')\n", (156133, 156205), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((166131, 166162), 'dev0s.classes.utils.generate.shell_string', 'utils.generate.shell_string', (['(24)'], {}), '(24)\n', (166158, 166162), False, 'from dev0s.classes import utils\n'), ((6870, 6978), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse a int from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse a int from ({variable.__class__.__name__}) [{variable}].'\n )\n", (6891, 6978), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((71701, 71973), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/)."""'], {}), "(\n f'Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/).'\n )\n", (71722, 71973), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((87216, 87288), 'dev0s.classes.defaults.exceptions.Exceptions.JSONDecodeError', 'Exceptions.JSONDecodeError', (['f"""Unable to dump expected json data: {data}"""'], {}), "(f'Unable to dump expected json data: {data}')\n", (87242, 87288), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((87630, 87733), 'dev0s.classes.console.Loader', 'console.Loader', (['f"""&RED&Do not interrupt!&END& Saving file [{path}] (attempt: {__attempt__})."""'], {}), "(\n f'&RED&Do not interrupt!&END& Saving file [{path}] (attempt: {__attempt__}).'\n )\n", (87644, 87733), False, 'from dev0s.classes import console\n'), ((7125, 7235), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse a float from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse a float from ({variable.__class__.__name__}) [{variable}].'\n )\n", (7146, 7235), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((8635, 8725), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['f"""Specified format [{format}] is not a valid format option."""'], {}), "(\n f'Specified format [{format}] is not a valid format option.')\n", (8658, 8725), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((133167, 133260), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {dictionary.__class__}.')\n", (133189, 133260), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((133666, 133759), 'dev0s.classes.defaults.exceptions.Exceptions.FormatError', 'Exceptions.FormatError', (['f"""Can not add object {self.__class__} & {dictionary.__class__}."""'], {}), "(\n f'Can not add object {self.__class__} & {dictionary.__class__}.')\n", (133688, 133759), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((160142, 160182), 'dev0s.classes.console.Loader', 'console.Loader', (['f"""Updating file {path}."""'], {}), "(f'Updating file {path}.')\n", (160156, 160182), False, 'from dev0s.classes import console\n'), ((7374, 7482), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse a str from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse a str from ({variable.__class__.__name__}) [{variable}].'\n )\n", (7395, 7482), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((16524, 16619), 'dev0s.classes.defaults.exceptions.Exceptions.InvalidUsage', 'Exceptions.InvalidUsage', (['f"""Selected an invalid size format [{format}], options {options}."""'], {}), "(\n f'Selected an invalid size format [{format}], options {options}.')\n", (16547, 16619), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((85260, 85289), 'dev0s.classes.defaults.exceptions.Exceptions.JSONDecodeError', 'Exceptions.JSONDecodeError', (['e'], {}), '(e)\n', (85286, 85289), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((85580, 85609), 'dev0s.classes.defaults.exceptions.Exceptions.JSONDecodeError', 'Exceptions.JSONDecodeError', (['e'], {}), '(e)\n', (85606, 85609), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((7687, 7798), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse an array from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse an array from ({variable.__class__.__name__}) [{variable}].'\n )\n", (7708, 7798), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((8247, 8356), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse a dict from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse a dict from ({variable.__class__.__name__}) [{variable}].'\n )\n", (8268, 8356), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((7960, 8071), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse an array from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse an array from ({variable.__class__.__name__}) [{variable}].'\n )\n", (7981, 8071), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n'), ((8518, 8627), 'dev0s.classes.defaults.exceptions.Exceptions.ParseError', 'Exceptions.ParseError', (['f"""Unable to parse a dict from ({variable.__class__.__name__}) [{variable}]."""'], {}), "(\n f'Unable to parse a dict from ({variable.__class__.__name__}) [{variable}].'\n )\n", (8539, 8627), False, 'from dev0s.classes.defaults.exceptions import Exceptions\n')]
|
import pygame
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
def main():
pygame.init()
glutInit()
display = (800,600)
pygame.display.set_mode(display, DOUBLEBUF|OPENGL)
gluPerspective(45, (display[0]/display[1]), 0.1, 50.0)
glTranslatef(0.0, 0.0, -5)
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
glRotatef(1, 0, 1, 0)
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
glColor3f(0.0, 1.0, 0.0)
glutWireTorus(0.2, 0.8, 50, 50)
pygame.display.flip()
pygame.time.wait(10)
main()
|
[
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.time.wait",
"pygame.display.set_mode",
"pygame.display.flip"
] |
[((132, 145), 'pygame.init', 'pygame.init', ([], {}), '()\n', (143, 145), False, 'import pygame\n'), ((180, 232), 'pygame.display.set_mode', 'pygame.display.set_mode', (['display', '(DOUBLEBUF | OPENGL)'], {}), '(display, DOUBLEBUF | OPENGL)\n', (203, 232), False, 'import pygame\n'), ((346, 364), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (362, 364), False, 'import pygame\n'), ((567, 588), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (586, 588), False, 'import pygame\n'), ((591, 611), 'pygame.time.wait', 'pygame.time.wait', (['(10)'], {}), '(10)\n', (607, 611), False, 'import pygame\n'), ((403, 416), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (414, 416), False, 'import pygame\n')]
|
from django.db import models
# Create your models here.
##########################################################################
#投票
class Vote(models.Model):
data=models.CharField(max_length=255)
##########################################################################
|
[
"django.db.models.CharField"
] |
[((171, 203), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (187, 203), False, 'from django.db import models\n')]
|
import argparse
import torch
def get_args():
parser = argparse.ArgumentParser(description='RL')
parser.add_argument('--algo', default='a2c',
help='algorithm to use: a2c | ppo ')
parser.add_argument('--lr', type=float, default=7e-5,
help='learning rate (default: 7e-4)')
parser.add_argument('--eps', type=float, default=1e-5,
help='RMSprop optimizer epsilon (default: 1e-5)')
parser.add_argument('--alpha', type=float, default=0.99,
help='RMSprop optimizer apha (default: 0.99)')
parser.add_argument('--gamma', type=float, default=0.99,
help='discount factor for rewards (default: 0.99)')
parser.add_argument('--max-grad-norm', type=float, default=0.5,
help='max norm off gradients (default: 0.5)')
parser.add_argument('--seed', type=int, default=1,
help='random seed (default: 1)')
parser.add_argument('--num-processes', type=int, default=1,
help='how many training CPU processes to use (default: 16)')
parser.add_argument('--num-steps', type=int, default=32,
help='number of forward steps in A2C (default: 5)')
parser.add_argument('--clip-param', type=float, default=0.2,
help='clip parameter (default: 0.2)')
parser.add_argument('--log-interval', type=int, default=50,
help='log interval, one log per n updates (default: 10)')
parser.add_argument('--num-frames', type=int, default=80000,
help='number of frames to train (default: 10e6)')
parser.add_argument('--cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--obs_size', type=int, default=200,
help='observation vector size')
parser.add_argument('--cycle_len', type=int, default=500,
help='observation vector size')
parser.add_argument('--debug', action='store_true', default=False,
help='whether to record the logfile')
parser.add_argument('--num_models', type=int, default=3,
help='number of the model to use')
parser.add_argument('--beta', type=float, default=1,
help='balance the accuracy and latency when calculate the reward')
parser.add_argument('--tau', type=float, default=2,
help='max waiting time for enqueue')
parser.add_argument('--max_latency', type=float, default=16,
help='accept latency for each request')
parser.add_argument('--policy', choices=['async', 'sync'], default='async', help='policy')
args = parser.parse_args()
print("cuda: %s" % str(args.cuda))
if args.cuda:
assert torch.cuda.is_available(), 'CUDA is not available in this machine!'
return args
if __name__ == '__main__':
get_args()
|
[
"torch.cuda.is_available",
"argparse.ArgumentParser"
] |
[((60, 101), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""RL"""'}), "(description='RL')\n", (83, 101), False, 'import argparse\n'), ((2889, 2914), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2912, 2914), False, 'import torch\n')]
|
# course: ICS3U1 2019
# exercise: Culminating Activity
# date: 2019-12-06
# student number: 340926187
# name: <NAME>
# description: Two players (Mr Chun & Mr Pileggi) running around the school
# collecting food for the food drive.
# sprite classes
import pygame
import random
import math
import os
from settings import *
class Player(pygame.sprite.Sprite):
"""
player class that contains all data and functions related to the player
"""
def __init__(self, game, x, y, playerNum):
"""
initalizes a player sprite when an instance is created in the game
parameter, at the x and y paramters, and with the player number
"""
self.playerNum = playerNum
self.groups = game.all_sprites, game.players
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
# image selection for each player
if self.playerNum == 1:
self.image = pygame.transform.rotate(self.game.player1_image, 90)
else:
self.image = pygame.transform.rotate(self.game.player2_image, 90)
self.rect = self.image.get_rect()
# setting the players base movement velocity
self.velX, self.velY = 0, 0
# setting the players position on the grid
self.x = x * tileSize - tileSize
self.y = y * tileSize - tileSize
# players starting score
self.score = 0
# if joysticks are connected, enable joystick controls for the player
self.joystick_count = pygame.joystick.get_count()
if self.joystick_count > 0:
self.joystick_enabled = True
else:
self.joystick_enabled = False
def get_keys(self):
"""
checks for all keys pressed and changes the players velocity on that
axis to the player speed varaiable
"""
self.velX, self.velY = 0, 0
keys = pygame.key.get_pressed()
# player 1 controls
if self.playerNum == 1:
if keys[pygame.K_a]:
self.velX = -player_speed
if keys[pygame.K_d]:
self.velX = player_speed
if keys[pygame.K_w]:
self.velY = -player_speed
if keys[pygame.K_s]:
self.velY = player_speed
# player 2 controls
else:
if keys[pygame.K_LEFT]:
self.velX = -player_speed
if keys[pygame.K_RIGHT]:
self.velX = player_speed
if keys[pygame.K_UP]:
self.velY = -player_speed
if keys[pygame.K_DOWN]:
self.velY = player_speed
# if moving diagonally reduce the speed
if self.velX > 0 and self.velY > 0:
self.velX = player_speed * 0.701
self.velY = player_speed * 0.701
elif self.velX < 0 and self.velY < 0:
self.velX = player_speed * -0.701
self.velY = player_speed * -0.701
def get_joystick_axis(self):
"""
changes the velocity of the character in the x and y based on joystick
input
"""
# joystick controls for two seperate controllers
if self.joystick_count == 2:
# joystick control for player 1
if self.playerNum == 1:
# joystick initialization
joystick = pygame.joystick.Joystick(1)
joystick.init()
# different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0:
self.velX += joystick.get_axis(0) * player_speed
self.velY += joystick.get_axis(1) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(1) * player_speed
self.velY -= joystick.get_axis(0) * player_speed
# joystick control for player 2
elif self.playerNum == 2:
# joystick initialization
joystick = pygame.joystick.Joystick(0)
joystick.init()
# Different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0:
self.velX += joystick.get_axis(0) * player_speed
self.velY += joystick.get_axis(1) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(1) * player_speed
self.velY -= joystick.get_axis(0) * player_speed
# joystick controls for a single controller
elif self.joystick_count == 1:
# joystick control for player 1
if self.playerNum == 1:
# joystick initialization
joystick = pygame.joystick.Joystick(0)
joystick.init()
# different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0:
self.velX += joystick.get_axis(0) * player_speed
self.velY += joystick.get_axis(1) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(1) * player_speed
self.velY -= joystick.get_axis(0) * player_speed
# joystick control for player 2
elif self.playerNum == 2:
# joystick initialization
joystick = pygame.joystick.Joystick(0)
joystick.init()
# different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(4)) != 0 or round(joystick.get_axis(3)) != 0:
self.velX += joystick.get_axis(4) * player_speed
self.velY += joystick.get_axis(3) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(2) * player_speed
self.velY -= joystick.get_axis(3) * player_speed
def direction(self):
"""
rotates the player sprite based on the current direction and new
direction
"""
# player 1 rotation
if self.playerNum == 1:
if self.velX > 100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player1_image, 45)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player1_image, -45)
else:
self.image = pygame.transform.rotate(self.game.player1_image, 0)
elif self.velX < -100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player1_image, 135)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player1_image, -135)
else:
self.image = pygame.transform.rotate(self.game.player1_image, 180)
else:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player1_image, 90)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player1_image, -90)
# player 2 rotation
else:
if self.velX > 100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player2_image, 45)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player2_image, -45)
else:
self.image = pygame.transform.rotate(self.game.player2_image, 0)
elif self.velX < -100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player2_image, 135)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player2_image, -135)
else:
self.image = pygame.transform.rotate(self.game.player2_image, 180)
else:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player2_image, 90)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player2_image, -90)
def wall_collision(self, axis):
"""
checks for player collision with the all wall sprites on the axis
given and prevents player movement onto it
"""
if axis == 'x':
collides = pygame.sprite.spritecollide(self, self.game.walls, False)
if collides:
if self.velX > 0:
self.x = collides[0].rect.left - self.rect.width
if self.velX < 0:
self.x = collides[0].rect.right
self.velX = 0
self.rect.x = self.x
if axis == 'y':
collides = pygame.sprite.spritecollide(self, self.game.walls, False)
if collides:
if self.velY > 0:
self.y = collides[0].rect.top - self.rect.height
if self.velY < 0:
self.y = collides[0].rect.bottom
self.velY = 0
self.rect.y = self.y
def player_collision(self, axis):
"""
checks for player collision with the all wall sprites on the axis
given and prevents player movement onto it
"""
# checks for player 1 collision to player 2
if self.playerNum == 1:
if axis == 'x':
if self.rect.colliderect(self.game.player2):
if self.velX > 0:
self.x = self.game.player2.rect.left - self.rect.width
if self.velX < 0:
self.x = self.game.player2.rect.right
self.velX = 0
self.rect.x = self.x
if axis == 'y':
if self.rect.colliderect(self.game.player2):
if self.velY > 0:
self.y = self.game.player2.rect.top - self.rect.height
if self.velY < 0:
self.y = self.game.player2.rect.bottom
self.velY = 0
self.rect.y = self.y
# checks for player 2 collision to player 1
else:
if axis == 'x':
if self.rect.colliderect(self.game.player1):
if self.velX > 0:
self.x = self.game.player1.rect.left - self.rect.width
if self.velX < 0:
self.x = self.game.player1.rect.right
self.velX = 0
self.rect.x = self.x
if axis == 'y':
if self.rect.colliderect(self.game.player1):
if self.velY > 0:
self.y = self.game.player1.rect.top - self.rect.height
if self.velY < 0:
self.y = self.game.player1.rect.bottom
self.velY = 0
self.rect.y = self.y
def food_collision(self):
"""
checks for player collision with all food sprites killing any sprites it comes collides with and adding 1 to the players score value
"""
collides = pygame.sprite.spritecollide(self, self.game.food, True)
if collides:
self.score += 1
def update(self):
"""
updates the players position
"""
self.get_keys()
if self.joystick_enabled == True:
self.get_joystick_axis()
self.direction()
self.x += self.velX * self.game.dt
self.y += self.velY * self.game.dt
self.rect.x = self.x
self.wall_collision('x')
self.player_collision('x')
self.rect.y = self.y
self.wall_collision('y')
self.player_collision('y')
self.food_collision()
class Wall(pygame.sprite.Sprite):
"""
class to contain all the data for wall sprites
"""
def __init__(self, game, x, y):
"""
initalizes a wall sprite when an instance is create in the game
parameter, at the x and y paramters
"""
self.groups = game.all_sprites, game.walls
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.wall_image
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * tileSize
self.rect.y = y * tileSize
class Floor(pygame.sprite.Sprite):
"""
class to contain all the data for floor sprites
"""
def __init__(self, game, x, y):
"""
initalizes a floor sprite when an instance is created in the game
parameter, at the x and y paramters
"""
self.groups = game.all_sprites, game.floor
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.floor_image
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * tileSize
self.rect.y = y * tileSize
class Food(pygame.sprite.Sprite):
"""
class to contain all the data for food sprites
"""
def __init__(self, game, x, y):
"""
initalizes a food sprite when an instance is created in the game
parameter, at the x and y paramters
"""
self.groups = game.all_sprites, game.food
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
# picks random image for the sprite
self.image = pygame.image.load(os.path.join(food_folder, (random.choice(food_images)))).convert_alpha()
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * tileSize
self.rect.y = y * tileSize
# checks if the sprite is allowed to spawn in the x and y
self.spawnable = False
collided = pygame.sprite.spritecollide(self, self.game.floor, False)
for sprite in collided:
if self.x == sprite.x and self.y == sprite.y:
self.spawnable = True
if self.spawnable == False:
self.kill()
|
[
"pygame.joystick.get_count",
"random.choice",
"pygame.sprite.spritecollide",
"pygame.sprite.Sprite.__init__",
"pygame.joystick.Joystick",
"pygame.transform.rotate",
"pygame.key.get_pressed"
] |
[((821, 869), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self', 'self.groups'], {}), '(self, self.groups)\n', (850, 869), False, 'import pygame\n'), ((1572, 1599), 'pygame.joystick.get_count', 'pygame.joystick.get_count', ([], {}), '()\n', (1597, 1599), False, 'import pygame\n'), ((1953, 1977), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (1975, 1977), False, 'import pygame\n'), ((12547, 12602), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.game.food', '(True)'], {}), '(self, self.game.food, True)\n', (12574, 12602), False, 'import pygame\n'), ((13513, 13561), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self', 'self.groups'], {}), '(self, self.groups)\n', (13542, 13561), False, 'import pygame\n'), ((14117, 14165), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self', 'self.groups'], {}), '(self, self.groups)\n', (14146, 14165), False, 'import pygame\n'), ((14718, 14766), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self', 'self.groups'], {}), '(self, self.groups)\n', (14747, 14766), False, 'import pygame\n'), ((15217, 15274), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.game.floor', '(False)'], {}), '(self, self.game.floor, False)\n', (15244, 15274), False, 'import pygame\n'), ((995, 1047), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(90)'], {}), '(self.game.player1_image, 90)\n', (1018, 1047), False, 'import pygame\n'), ((1087, 1139), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(90)'], {}), '(self.game.player2_image, 90)\n', (1110, 1139), False, 'import pygame\n'), ((9742, 9799), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.game.walls', '(False)'], {}), '(self, self.game.walls, False)\n', (9769, 9799), False, 'import pygame\n'), ((10128, 10185), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.game.walls', '(False)'], {}), '(self, self.game.walls, False)\n', (10155, 10185), False, 'import pygame\n'), ((3404, 3431), 'pygame.joystick.Joystick', 'pygame.joystick.Joystick', (['(1)'], {}), '(1)\n', (3428, 3431), False, 'import pygame\n'), ((4362, 4389), 'pygame.joystick.Joystick', 'pygame.joystick.Joystick', (['(0)'], {}), '(0)\n', (4386, 4389), False, 'import pygame\n'), ((5408, 5435), 'pygame.joystick.Joystick', 'pygame.joystick.Joystick', (['(0)'], {}), '(0)\n', (5432, 5435), False, 'import pygame\n'), ((7475, 7527), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(45)'], {}), '(self.game.player1_image, 45)\n', (7498, 7527), False, 'import pygame\n'), ((8564, 8616), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(45)'], {}), '(self.game.player2_image, 45)\n', (8587, 8616), False, 'import pygame\n'), ((6366, 6393), 'pygame.joystick.Joystick', 'pygame.joystick.Joystick', (['(0)'], {}), '(0)\n', (6390, 6393), False, 'import pygame\n'), ((7599, 7652), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(-45)'], {}), '(self.game.player1_image, -45)\n', (7622, 7652), False, 'import pygame\n'), ((7708, 7759), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(0)'], {}), '(self.game.player1_image, 0)\n', (7731, 7759), False, 'import pygame\n'), ((7865, 7918), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(135)'], {}), '(self.game.player1_image, 135)\n', (7888, 7918), False, 'import pygame\n'), ((8242, 8294), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(90)'], {}), '(self.game.player1_image, 90)\n', (8265, 8294), False, 'import pygame\n'), ((8688, 8741), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(-45)'], {}), '(self.game.player2_image, -45)\n', (8711, 8741), False, 'import pygame\n'), ((8797, 8848), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(0)'], {}), '(self.game.player2_image, 0)\n', (8820, 8848), False, 'import pygame\n'), ((8954, 9007), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(135)'], {}), '(self.game.player2_image, 135)\n', (8977, 9007), False, 'import pygame\n'), ((9331, 9383), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(90)'], {}), '(self.game.player2_image, 90)\n', (9354, 9383), False, 'import pygame\n'), ((14903, 14929), 'random.choice', 'random.choice', (['food_images'], {}), '(food_images)\n', (14916, 14929), False, 'import random\n'), ((7990, 8044), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(-135)'], {}), '(self.game.player1_image, -135)\n', (8013, 8044), False, 'import pygame\n'), ((8100, 8153), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(180)'], {}), '(self.game.player1_image, 180)\n', (8123, 8153), False, 'import pygame\n'), ((8366, 8419), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player1_image', '(-90)'], {}), '(self.game.player1_image, -90)\n', (8389, 8419), False, 'import pygame\n'), ((9079, 9133), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(-135)'], {}), '(self.game.player2_image, -135)\n', (9102, 9133), False, 'import pygame\n'), ((9189, 9242), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(180)'], {}), '(self.game.player2_image, 180)\n', (9212, 9242), False, 'import pygame\n'), ((9455, 9508), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.game.player2_image', '(-90)'], {}), '(self.game.player2_image, -90)\n', (9478, 9508), False, 'import pygame\n')]
|
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from math import inf
from typing import Iterator, Any, List, Dict, Type, Optional
EPSILON = 1e-4
class Error(Exception):
pass
class ConstraintError(Error):
pass
class Constraint(metaclass=ABCMeta):
@abstractmethod
def isvalid(self, schedule: "Schedule", chunk: "Chunk") -> bool:
pass
def geterror(self, schedule: "Schedule", chunk: "Chunk") -> str:
return "'{}' constraint is not met".format(type(self).__name__)
class Property(metaclass=ABCMeta):
@abstractmethod
def get(self, schedule: "Schedule") -> Any:
pass
class Operator(metaclass=ABCMeta):
@abstractmethod
def apply(self, schedule: "Schedule") -> Any:
pass
class Task:
def __init__(self, name: str):
self.name = name
self.constraints = OrderedDict()
def __contains__(self, constraint_cls: Type["Constraint"]) -> bool:
return constraint_cls in self.constraints
def __iter__(self) -> Iterator[Type["Constraint"]]:
return iter(self.constraints)
def __getitem__(self, constraint_cls: Type["Constraint"]) -> "Constraint":
return self.constraints[constraint_cls]
def __getattr__(self, attr: str):
for ctr in self.constraints.values():
if attr in ctr.__dict__:
return ctr.__dict__[attr]
raise AttributeError("'{}' task has no attribute '{}'".format(self.name, attr))
def set(self, constraint: "Constraint") -> "Task":
self.constraints[type(constraint)] = constraint
return self
class Chunk:
def __init__(self, task: "Task", start_time: float, proctimes: Dict[Any, float]):
self.task = task
self.start_time = start_time
self.proctimes = proctimes
def completion_time(self, node: Any) -> float:
if node in self.proctimes:
return self.start_time + self.proctimes[node]
else:
return inf
def isvalid(self, schedule: "Schedule") -> bool:
for ctr in self.task.constraints.values():
if not ctr.isvalid(schedule, self):
return False
return True
def append_to(self, schedule: "Schedule"):
for ctr in self.task.constraints.values():
if not ctr.isvalid(schedule, self):
raise ConstraintError(ctr.geterror(schedule, self))
if self.task in schedule.taskmap:
schedule.taskmap[self.task].append(self)
else:
schedule.taskmap[self.task] = [self]
for node in self.proctimes:
if node in schedule.nodemap:
schedule.nodemap[node].add(self)
else:
schedule.nodemap[node] = ChunkTree(node).add(self)
def remove_from(self, schedule: "Schedule"):
schedule.taskmap[self.task].remove(self)
for node in self.proctimes:
schedule.nodemap[node].remove(self)
class ChunkNode:
def __init__(self, chunk: "Chunk"):
self.chunk = chunk
self.height = 1
self.hi = -inf
self.left = None
self.right = None
class ChunkTree:
def __init__(self, node: Any):
self.node = node
self.root = None
def __iter__(self) -> Optional[Iterator["ChunkNode"]]:
return self._iter_from(self.root)
def _iter_from(self, root: Optional["ChunkNode"]) -> Optional[Iterator["ChunkNode"]]:
if root is None:
return None
else:
yield from self._iter_from(root.left)
yield root
yield from self._iter_from(root.right)
def at(self, time: float) -> List["ChunkNode"]:
nodes = []
self._at_from(self.root, time, nodes)
return nodes
def _at_from(self, root: Optional["ChunkNode"], time: float, nodes: List["ChunkNode"]):
if root is not None:
if root.left is not None and time < root.left.hi:
self._at_from(root.left, time, nodes)
if root.chunk.start_time <= time < root.chunk.completion_time(self.node):
nodes.append(root)
self._at_from(root.right, time, nodes)
def over(self, lo: float, hi: float) -> List["ChunkNode"]:
nodes = []
self._over_from(self.root, lo, hi, nodes)
return nodes
def _over_from(self, root: Optional["ChunkNode"], lo: float, hi: float, nodes: List["ChunkNode"]):
if root is not None:
if root.left is not None and lo < root.left.hi:
self._over_from(root.left, lo, hi, nodes)
if lo < root.chunk.completion_time(self.node) and root.chunk.start_time < hi:
nodes.append(root)
self._over_from(root.right, lo, hi, nodes)
def add(self, chunk: "Chunk") -> "ChunkTree":
self.root = self._add_from(self.root, chunk)
return self
def _add_from(self, root: Optional["ChunkNode"], chunk: "Chunk") -> "ChunkNode":
if root is None:
treenode = ChunkNode(chunk)
treenode.hi = chunk.completion_time(self.node)
return treenode
else:
if chunk.start_time < root.chunk.start_time:
root.left = self._add_from(root.left, chunk)
else:
root.right = self._add_from(root.right, chunk)
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(self._hi(root), chunk.completion_time(self.node))
return self._rotate(root)
def remove(self, chunk: "Chunk") -> "ChunkTree":
self.root = self._remove_from(self.root, chunk)
return self
def _remove_from(self, root: Optional["ChunkNode"], chunk: "Chunk") -> Optional["ChunkNode"]:
if root is None:
return None
else:
if chunk.start_time < root.chunk.start_time:
root.left = self._remove_from(root.left, chunk)
elif chunk.start_time > root.chunk.start_time:
root.right = self._remove_from(root.right, chunk)
else:
if root.left is None:
return root.right
elif root.right is None:
return root.left
else:
successor = self._min_from(root.right)
root.chunk = successor.chunk
root.right = self._remove_from(root.right, successor.chunk)
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(root.chunk.completion_time(self.node), self._hi(root.left), self._hi(root.right))
return self._rotate(root)
def _rotate(self, root: "ChunkNode") -> "ChunkNode":
balance = self._balance(root)
if balance > 1 and self._balance(root.left) >= 0:
return self._rotate_right(root)
elif balance > 1 and self._balance(root.left) < 0:
root.left = self._rotate_left(root.left)
return self._rotate_right(root)
elif balance < -1 and self._balance(root.right) <= 0:
return self._rotate_left(root)
elif balance < -1 and self._balance(root.right) > 0:
root.right = self._rotate_right(root.right)
return self._rotate_left(root)
else:
return root
def _rotate_left(self, root: "ChunkNode") -> "ChunkNode":
pivot = root.right
child = pivot.left
pivot.left = root
root.right = child
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(root.chunk.completion_time(self.node), self._hi(root.left), self._hi(root.right))
pivot.height = 1 + max(self._height(pivot.left), self._height(pivot.right))
pivot.hi = max(pivot.chunk.completion_time(self.node), self._hi(pivot.left), self._hi(pivot.right))
return pivot
def _rotate_right(self, root: "ChunkNode") -> "ChunkNode":
pivot = root.left
child = pivot.right
pivot.right = root
root.left = child
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(root.chunk.completion_time(self.node), self._hi(root.left), self._hi(root.right))
pivot.height = 1 + max(self._height(pivot.left), self._height(pivot.right))
pivot.hi = max(pivot.chunk.completion_time(self.node), self._hi(pivot.left), self._hi(pivot.right))
return pivot
def _balance(self, root: "ChunkNode") -> int:
if root is None:
return 0
else:
return self._height(root.left) - self._height(root.right)
def _height(self, root: "ChunkNode") -> int:
if root is None:
return 0
else:
return root.height
def _hi(self, root: Optional["ChunkNode"]) -> float:
if root is None:
return -inf
else:
return root.hi
def min(self) -> Optional["ChunkNode"]:
return self._min_from(self.root)
def _min_from(self, root: "ChunkNode") -> Optional["ChunkNode"]:
if root is None:
return None
else:
current = root
while current.left is not None:
current = current.left
return current
def max(self) -> Optional["ChunkNode"]:
return self._max_from(self.root)
def _max_from(self, root: "ChunkNode") -> Optional["ChunkNode"]:
if root is None:
return None
else:
current = root
while current.right is not None:
current = current.right
return current
class Schedule:
def __init__(self):
self.taskmap = {}
self.nodemap = {}
def tasks(self) -> Iterator["Task"]:
return iter(self.taskmap)
def hastask(self, task: "Task") -> bool:
return task in self.taskmap
def task(self, task: "Task") -> Optional[List["Chunk"]]:
if task in self.taskmap:
return self.taskmap[task]
else:
return None
def nodes(self) -> Iterator[Any]:
return iter(self.nodemap)
def hasnode(self, node: Any) -> bool:
return node in self.nodemap
def node(self, node: Any) -> Optional["ChunkTree"]:
if node in self.nodemap:
return self.nodemap[node]
else:
return None
# def copy(self):
# chunk_map = self.taskmap.copy()
#
# for tsk in chunk_map:
# chunk_map[tsk] = chunk_map[tsk].copy()
#
# return Schedule(chunk_map)
def get(self, prop: "Property") -> Any:
return prop.get(self)
def apply(self, operator: "Operator") -> Any:
return operator.apply(self)
|
[
"collections.OrderedDict"
] |
[((872, 885), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (883, 885), False, 'from collections import OrderedDict\n')]
|
input = """154, 159
172, 84
235, 204
181, 122
161, 337
305, 104
128, 298
176, 328
146, 71
210, 87
341, 195
50, 96
225, 151
86, 171
239, 68
79, 50
191, 284
200, 122
282, 240
224, 282
327, 74
158, 289
331, 244
154, 327
317, 110
272, 179
173, 175
187, 104
44, 194
202, 332
249, 197
244, 225
52, 127
299, 198
123, 198
349, 75
233, 72
284, 130
119, 150
172, 355
147, 314
58, 335
341, 348
236, 115
185, 270
173, 145
46, 288
214, 127
158, 293
237, 311"""
from collections import namedtuple
Point = namedtuple("Point", ["id", "x", "y"])
points = set()
for id, line in enumerate(input.splitlines()):
words = line.split(",")
x, y = [int(a) for a in words]
points.add(Point(id, x, y))
# get bounds
a_point = next(iter(points))
left_bound = a_point.x
right_bound = a_point.x
up_bound = a_point.y
down_bound = a_point.y
for p in points:
if p.x < left_bound:
left_bound = p.x
if p.x > right_bound:
right_bound = p.x
if p.y < up_bound:
up_bound = p.y
if p.y > down_bound:
down_bound = p.y
# Find closest points within the bounds
# Anything outside the bounds is uninteresting as it just leads off into infinite space
def distance(p, q):
return abs(p.x - q.x) + abs(p.y - q.y)
def find_closest(p, points):
closest_dist = None
closest = set()
for q in points:
dist = distance(p, q)
if closest_dist == None or dist < closest_dist:
closest = {q.id}
closest_dist = dist
elif dist == closest_dist:
closest.add(q.id)
return closest
grid = [
[0] * (right_bound - left_bound + 1) for i in range(down_bound - up_bound + 1)
]
for y in range(up_bound, down_bound + 1):
for x in range(left_bound, right_bound + 1):
closest_points = find_closest(Point(id=None, x=x, y=y), points)
if len(closest_points) > 1:
grid[y-up_bound][x-left_bound] = -1
elif len(closest_points) == 0:
print("wtf")
exit(1)
else:
grid[y - up_bound][x - left_bound] = closest_points.pop()
# We have our grid, we can remove any point ids that lie on the edge as they
# will continue off to infinity
candidate_ids = {p.id for p in points}
for y in [0, down_bound - up_bound]:
for x in [0, right_bound - left_bound]:
if grid[y][x] in candidate_ids:
candidate_ids.remove(grid[y][x])
# we have our contenders
# now find which has the smallest finite space
ids_to_count = {}
for y in range(0, down_bound - up_bound + 1):
for x in range(0, right_bound - left_bound + 1):
if grid[y][x] in candidate_ids:
if grid[y][x] not in ids_to_count:
ids_to_count[grid[y][x]] = 0
ids_to_count[grid[y][x]] += 1
print(max(ids_to_count.values()))
|
[
"collections.namedtuple"
] |
[((493, 530), 'collections.namedtuple', 'namedtuple', (['"""Point"""', "['id', 'x', 'y']"], {}), "('Point', ['id', 'x', 'y'])\n", (503, 530), False, 'from collections import namedtuple\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
import logging
logger = logging.getLogger(__name__)
import multiprocessing as mp
import tweetf0rm.handler
from tweetf0rm.redis_helper import CrawlerQueue
#MAX_QUEUE_SIZE = 32767
class CrawlerProcess(mp.Process):
def __init__(self, node_id, crawler_id, redis_config, handlers):
super(CrawlerProcess, self).__init__()
self.node_id = node_id
self.crawler_id = crawler_id
self.redis_config = redis_config
#self.queue = mp.Queue(maxsize=MAX_QUEUE_SIZE)
self.crawler_queue = CrawlerQueue(node_id, crawler_id, redis_config=redis_config)
self.crawler_queue.clear()
#self.lock = mp.Lock()
self.handlers = handlers
logger.debug("number of handlers attached: %d"%(len(handlers)))
def get_crawler_id(self):
return self.crawler_id
def enqueue(self, request):
#self.queue.put(request, block=True)
self.crawler_queue.put(request)
return True
def get_cmd(self):
#return self.queue.get(block=True)
return self.crawler_queue.get(block=True)
def get_queue_size(self):
self.crawler_queue.qsize()
def run(self):
pass
|
[
"logging.getLogger",
"tweetf0rm.redis_helper.CrawlerQueue"
] |
[((69, 96), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (86, 96), False, 'import logging\n'), ((534, 594), 'tweetf0rm.redis_helper.CrawlerQueue', 'CrawlerQueue', (['node_id', 'crawler_id'], {'redis_config': 'redis_config'}), '(node_id, crawler_id, redis_config=redis_config)\n', (546, 594), False, 'from tweetf0rm.redis_helper import CrawlerQueue\n')]
|
# Copyright (c) 2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
EC2.DhcpOptions
~~~~~~~~~~~~~~
AWS EC2 DhcpOptions interface
"""
# Boto
from botocore.exceptions import ClientError
# Cloudify
from cloudify_aws.common import decorators, utils
from cloudify_aws.ec2 import EC2Base
from cloudify_aws.common.constants import EXTERNAL_RESOURCE_ID
RESOURCE_TYPE = 'EC2 Dhcp Options'
DHCPOPTIONS = 'DhcpOptions'
DHCPOPTIONS_ID = 'DhcpOptionsId'
DHCPOPTIONS_IDS = 'DhcpOptionsIds'
VPC_ID = 'VpcId'
VPC_TYPE = 'cloudify.nodes.aws.ec2.Vpc'
VPC_TYPE_DEPRECATED = 'cloudify.aws.nodes.Vpc'
class EC2DHCPOptions(EC2Base):
"""
EC2 DhcpOptions interface
"""
def __init__(self, ctx_node, resource_id=None, client=None, logger=None):
EC2Base.__init__(self, ctx_node, resource_id, client, logger)
self.type_name = RESOURCE_TYPE
@property
def properties(self):
"""Gets the properties of an external resource"""
params = {DHCPOPTIONS_IDS: [self.resource_id]}
try:
resources = \
self.client.describe_dhcp_options(**params)
except ClientError:
pass
else:
return resources.get(DHCPOPTIONS)[0] if resources else None
def create(self, params):
"""
Create a new AWS EC2 DhcpOptions.
"""
return self.make_client_call('create_dhcp_options', params)
def delete(self, params=None):
"""
Deletes an existing AWS EC2 DhcpOptions.
"""
self.logger.debug('Deleting %s with parameters: %s'
% (self.type_name, params))
res = self.client.delete_dhcp_options(**params)
self.logger.debug('Response: %s' % res)
return res
def attach(self, params):
'''
Attach an AWS EC2 DhcpOptions to a VPC.
'''
self.logger.debug('Attaching %s with: %s'
% (self.type_name, params.get(VPC_ID, None)))
res = self.client.associate_dhcp_options(**params)
self.logger.debug('Response: %s' % res)
return res
def detach(self, params):
'''
Detach an AWS EC2 VPN Gateway from a VPC.
'''
self.logger.debug('Detaching %s from: %s'
% (self.type_name, params.get(VPC_ID, None)))
self.logger.debug('Attaching default %s'
% (self.type_name))
res = self.client.associate_dhcp_options(**params)
self.logger.debug('Response: %s' % res)
return res
@decorators.aws_resource(EC2DHCPOptions, resource_type=RESOURCE_TYPE)
def prepare(ctx, resource_config, **_):
"""Prepares an AWS EC2 DhcpOptions"""
# Save the parameters
ctx.instance.runtime_properties['resource_config'] = resource_config
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE)
def create(ctx, iface, resource_config, **_):
"""Creates an AWS EC2 DhcpOptions"""
# Create a copy of the resource config for clean manipulation.
params = \
dict() if not resource_config else resource_config.copy()
# Actually create the resource
create_response = iface.create(params)[DHCPOPTIONS]
ctx.instance.runtime_properties['create_response'] = \
utils.JsonCleanuper(create_response).to_dict()
dhcp_options_id = create_response.get(DHCPOPTIONS_ID, '')
iface.update_resource_id(dhcp_options_id)
utils.update_resource_id(ctx.instance, dhcp_options_id)
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE,
ignore_properties=True)
def delete(ctx, iface, resource_config, **_):
"""Deletes an AWS EC2 DhcpOptions"""
# Create a copy of the resource config for clean manipulation.
params = \
dict() if not resource_config else resource_config.copy()
dhcp_options_id = params.get(DHCPOPTIONS_ID)
if not dhcp_options_id:
params[DHCPOPTIONS_ID] = \
iface.resource_id or \
ctx.instance.runtime_properties.get(EXTERNAL_RESOURCE_ID)
iface.delete(params)
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE)
def attach(ctx, iface, resource_config, **_):
'''Attaches an AWS EC2 DhcpOptions to a VPC'''
params = dict() if not resource_config else resource_config.copy()
dhcp_options_id = params.get(DHCPOPTIONS_ID)
if not dhcp_options_id:
dhcp_options_id = iface.resource_id
params.update({DHCPOPTIONS_ID: dhcp_options_id})
params.pop('DhcpConfigurations')
vpc_id = params.get(VPC_ID)
if not vpc_id:
targ = \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE) or \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE_DEPRECATED)
# Attempt to use the VPC ID from parameters.
# Fallback to connected VPC.
params[VPC_ID] = \
vpc_id or \
targ.target.instance.runtime_properties.get(EXTERNAL_RESOURCE_ID)
ctx.instance.runtime_properties['vpc_id'] = vpc_id
# # Actually attach the resources
iface.attach(params)
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE,
ignore_properties=True)
def detach(ctx, iface, resource_config, **_):
'''Detach an AWS EC2 DhcpOptions from a VPC'''
params = dict() if not resource_config else resource_config.copy()
params.update({DHCPOPTIONS_ID: 'default'})
vpc_id = params.get(VPC_ID) or ctx.instance.runtime_properties['vpc_id']
if not vpc_id:
targ = \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE) or \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE_DEPRECATED)
# Attempt to use the VPC ID from parameters.
# Fallback to connected VPC.
params[VPC_ID] = \
vpc_id or \
targ.target.instance.runtime_properties.get(EXTERNAL_RESOURCE_ID)
else:
params.update({VPC_ID: vpc_id})
iface.detach(params)
|
[
"cloudify_aws.common.utils.find_rel_by_node_type",
"cloudify_aws.common.utils.JsonCleanuper",
"cloudify_aws.common.decorators.aws_resource",
"cloudify_aws.common.utils.update_resource_id",
"cloudify_aws.ec2.EC2Base.__init__"
] |
[((3126, 3194), 'cloudify_aws.common.decorators.aws_resource', 'decorators.aws_resource', (['EC2DHCPOptions'], {'resource_type': 'RESOURCE_TYPE'}), '(EC2DHCPOptions, resource_type=RESOURCE_TYPE)\n', (3149, 3194), False, 'from cloudify_aws.common import decorators, utils\n'), ((3379, 3433), 'cloudify_aws.common.decorators.aws_resource', 'decorators.aws_resource', (['EC2DHCPOptions', 'RESOURCE_TYPE'], {}), '(EC2DHCPOptions, RESOURCE_TYPE)\n', (3402, 3433), False, 'from cloudify_aws.common import decorators, utils\n'), ((4047, 4125), 'cloudify_aws.common.decorators.aws_resource', 'decorators.aws_resource', (['EC2DHCPOptions', 'RESOURCE_TYPE'], {'ignore_properties': '(True)'}), '(EC2DHCPOptions, RESOURCE_TYPE, ignore_properties=True)\n', (4070, 4125), False, 'from cloudify_aws.common import decorators, utils\n'), ((4634, 4688), 'cloudify_aws.common.decorators.aws_resource', 'decorators.aws_resource', (['EC2DHCPOptions', 'RESOURCE_TYPE'], {}), '(EC2DHCPOptions, RESOURCE_TYPE)\n', (4657, 4688), False, 'from cloudify_aws.common import decorators, utils\n'), ((5626, 5704), 'cloudify_aws.common.decorators.aws_resource', 'decorators.aws_resource', (['EC2DHCPOptions', 'RESOURCE_TYPE'], {'ignore_properties': '(True)'}), '(EC2DHCPOptions, RESOURCE_TYPE, ignore_properties=True)\n', (5649, 5704), False, 'from cloudify_aws.common import decorators, utils\n'), ((3988, 4043), 'cloudify_aws.common.utils.update_resource_id', 'utils.update_resource_id', (['ctx.instance', 'dhcp_options_id'], {}), '(ctx.instance, dhcp_options_id)\n', (4012, 4043), False, 'from cloudify_aws.common import decorators, utils\n'), ((1312, 1373), 'cloudify_aws.ec2.EC2Base.__init__', 'EC2Base.__init__', (['self', 'ctx_node', 'resource_id', 'client', 'logger'], {}), '(self, ctx_node, resource_id, client, logger)\n', (1328, 1373), False, 'from cloudify_aws.ec2 import EC2Base\n'), ((3829, 3865), 'cloudify_aws.common.utils.JsonCleanuper', 'utils.JsonCleanuper', (['create_response'], {}), '(create_response)\n', (3848, 3865), False, 'from cloudify_aws.common import decorators, utils\n'), ((5151, 5202), 'cloudify_aws.common.utils.find_rel_by_node_type', 'utils.find_rel_by_node_type', (['ctx.instance', 'VPC_TYPE'], {}), '(ctx.instance, VPC_TYPE)\n', (5178, 5202), False, 'from cloudify_aws.common import decorators, utils\n'), ((5220, 5282), 'cloudify_aws.common.utils.find_rel_by_node_type', 'utils.find_rel_by_node_type', (['ctx.instance', 'VPC_TYPE_DEPRECATED'], {}), '(ctx.instance, VPC_TYPE_DEPRECATED)\n', (5247, 5282), False, 'from cloudify_aws.common import decorators, utils\n'), ((6072, 6123), 'cloudify_aws.common.utils.find_rel_by_node_type', 'utils.find_rel_by_node_type', (['ctx.instance', 'VPC_TYPE'], {}), '(ctx.instance, VPC_TYPE)\n', (6099, 6123), False, 'from cloudify_aws.common import decorators, utils\n'), ((6141, 6203), 'cloudify_aws.common.utils.find_rel_by_node_type', 'utils.find_rel_by_node_type', (['ctx.instance', 'VPC_TYPE_DEPRECATED'], {}), '(ctx.instance, VPC_TYPE_DEPRECATED)\n', (6168, 6203), False, 'from cloudify_aws.common import decorators, utils\n')]
|
# Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from urllib2 import urlopen # Py2.X
except ImportError:
from urllib.request import urlopen # Py3.X
import sqlite3 as db
from newrelic.api.time_trace import record_exception
from newrelic.api.transaction import (add_custom_parameter,
get_browser_timing_header, get_browser_timing_footer,
record_custom_event)
from newrelic.api.wsgi_application import wsgi_application
_custom_parameters = {
'user' : 'user-name',
'account' : 'account-name',
'product' : 'product-name',
'bytes' : b'bytes-value',
'string' : 'string-value',
'unicode' : u'unicode-value',
'integer' : 1,
'float' : 1.0,
'invalid-utf8' : b'\xe2',
'multibyte-utf8' : b'\xe2\x88\x9a',
'multibyte-unicode' : b'\xe2\x88\x9a'.decode('utf-8'),
'list' : [],
'tuple' : (),
'dict' : {},
}
_err_param = {
'err-param' : 'value'
}
def user_attributes_added():
"""Expected values when the custom parameters in this file are added as user
attributes
"""
user_attributes = _custom_parameters.copy()
user_attributes['list'] = '[]'
user_attributes['tuple'] = '()'
user_attributes['dict'] = '{}'
return user_attributes
def error_user_params_added():
return _err_param.copy()
@wsgi_application()
def fully_featured_app(environ, start_response):
status = '200 OK'
path = environ.get('PATH_INFO')
use_user_attrs = environ.get('record_attributes', 'TRUE') == 'TRUE'
if use_user_attrs:
for attr, val in _custom_parameters.items():
add_custom_parameter(attr, val)
if 'db' in environ and int(environ['db']) > 0:
connection = db.connect(":memory:")
for i in range(int(environ['db']) - 1):
connection.execute("create table test_db%d (a, b, c)" % i)
if 'external' in environ:
for i in range(int(environ['external'])):
r = urlopen('http://www.python.org')
r.read(10)
if 'err_message' in environ:
n_errors = int(environ.get('n_errors', 1))
for i in range(n_errors):
try:
# append number to stats engine to get unique errors, so they
# don't immediately get filtered out.
raise ValueError(environ['err_message'] + str(i))
except ValueError:
if use_user_attrs:
record_exception(params=_err_param)
else:
record_exception()
text = '<html><head>%s</head><body><p>RESPONSE</p>%s</body></html>'
output = (text % (get_browser_timing_header(),
get_browser_timing_footer())).encode('UTF-8')
response_headers = [('Content-type', 'text/html; charset=utf-8'),
('Content-Length', str(len(output)))]
start_response(status, response_headers)
return [output]
@wsgi_application()
def simple_exceptional_app(environ, start_response):
start_response('500 :(',[])
raise ValueError('Transaction had bad value')
@wsgi_application()
def simple_app(environ, start_response):
status = '200 OK'
start_response(status, response_headers=[])
return []
@wsgi_application()
def simple_custom_event_app(environ, start_response):
params = {'snowman': u'\u2603', 'foo': 'bar'}
record_custom_event('SimpleAppEvent', params)
start_response(status='200 OK', response_headers=[])
return []
|
[
"newrelic.api.transaction.get_browser_timing_footer",
"sqlite3.connect",
"newrelic.api.time_trace.record_exception",
"newrelic.api.transaction.record_custom_event",
"newrelic.api.wsgi_application.wsgi_application",
"newrelic.api.transaction.add_custom_parameter",
"newrelic.api.transaction.get_browser_timing_header",
"urllib.request.urlopen"
] |
[((1894, 1912), 'newrelic.api.wsgi_application.wsgi_application', 'wsgi_application', ([], {}), '()\n', (1910, 1912), False, 'from newrelic.api.wsgi_application import wsgi_application\n'), ((3486, 3504), 'newrelic.api.wsgi_application.wsgi_application', 'wsgi_application', ([], {}), '()\n', (3502, 3504), False, 'from newrelic.api.wsgi_application import wsgi_application\n'), ((3644, 3662), 'newrelic.api.wsgi_application.wsgi_application', 'wsgi_application', ([], {}), '()\n', (3660, 3662), False, 'from newrelic.api.wsgi_application import wsgi_application\n'), ((3792, 3810), 'newrelic.api.wsgi_application.wsgi_application', 'wsgi_application', ([], {}), '()\n', (3808, 3810), False, 'from newrelic.api.wsgi_application import wsgi_application\n'), ((3920, 3965), 'newrelic.api.transaction.record_custom_event', 'record_custom_event', (['"""SimpleAppEvent"""', 'params'], {}), "('SimpleAppEvent', params)\n", (3939, 3965), False, 'from newrelic.api.transaction import add_custom_parameter, get_browser_timing_header, get_browser_timing_footer, record_custom_event\n'), ((2288, 2310), 'sqlite3.connect', 'db.connect', (['""":memory:"""'], {}), "(':memory:')\n", (2298, 2310), True, 'import sqlite3 as db\n'), ((2183, 2214), 'newrelic.api.transaction.add_custom_parameter', 'add_custom_parameter', (['attr', 'val'], {}), '(attr, val)\n', (2203, 2214), False, 'from newrelic.api.transaction import add_custom_parameter, get_browser_timing_header, get_browser_timing_footer, record_custom_event\n'), ((2527, 2559), 'urllib.request.urlopen', 'urlopen', (['"""http://www.python.org"""'], {}), "('http://www.python.org')\n", (2534, 2559), False, 'from urllib.request import urlopen\n'), ((3198, 3225), 'newrelic.api.transaction.get_browser_timing_header', 'get_browser_timing_header', ([], {}), '()\n', (3223, 3225), False, 'from newrelic.api.transaction import add_custom_parameter, get_browser_timing_header, get_browser_timing_footer, record_custom_event\n'), ((3239, 3266), 'newrelic.api.transaction.get_browser_timing_footer', 'get_browser_timing_footer', ([], {}), '()\n', (3264, 3266), False, 'from newrelic.api.transaction import add_custom_parameter, get_browser_timing_header, get_browser_timing_footer, record_custom_event\n'), ((3005, 3040), 'newrelic.api.time_trace.record_exception', 'record_exception', ([], {'params': '_err_param'}), '(params=_err_param)\n', (3021, 3040), False, 'from newrelic.api.time_trace import record_exception\n'), ((3083, 3101), 'newrelic.api.time_trace.record_exception', 'record_exception', ([], {}), '()\n', (3099, 3101), False, 'from newrelic.api.time_trace import record_exception\n')]
|
from spydrnet.ir import Port, Instance, InnerPin
from spydrnet_tmr.transformation.util import add_suffix_to_name
IN = Port.Direction.IN
OUT = Port.Direction.OUT
INOUT = Port.Direction.INOUT
def apply_nmr(ports_and_instances_to_replicate, degree, name_suffix='NMR', rename_original=True):
"""
Replicate the selected ports and instances to the n-th degree.
:param ports_and_instances_to_replicate:
:param degree: number of total copies
:param name_suffix: string to append to each replicated element (e.g. 'TMR' or 'DWC')
:param rename_original: rename orginal domain
:type rename_original: bool
:return: A map from an original element to its replicas
"""
nmr_agent = NMR.from_originals_degree_suffix_and_rename(ports_and_instances_to_replicate, degree, name_suffix,
rename_original)
replicas = nmr_agent.apply()
return replicas
class NMR:
@staticmethod
def from_originals_degree_suffix_and_rename(originals, degree, suffix, rename):
nmr_agent = NMR(originals, degree, suffix, rename)
return nmr_agent
def __init__(self, originals, degree, suffix, rename):
# Internal state
self._applied = False
self._wires_to_replicate = None
self._additional_ports_to_replicate = None
self._wiremap = None
self._replicas = dict()
# Inputs
for original in originals:
# if isinstance(original, HRef):
# original = original.item
if isinstance(original, (Port, Instance)):
self._replicas[original] = None
self.replication_degree = degree
self.name_suffix = suffix
self.rename_original = rename
def apply(self):
#self._validate_inputs()
self._identify_additional_wires_and_ports_to_replicate()
self._replicate_ports_and_instances()
self._replicate_wires()
self._connect_wires()
return self._replicas
def _identify_additional_wires_and_ports_to_replicate(self):
src_pins, snk_pins = self._idenfity_src_and_snk_pins_that_will_be_replicated()
wires_to_replicate = self.identify_additional_wires_to_replicate(src_pins, snk_pins)
ports_to_replicate = self.identify_additional_ports_to_replicate(wires_to_replicate)
self._wires_to_replicate = wires_to_replicate
self._replicas.update((port, None) for port in ports_to_replicate)
@staticmethod
def identify_additional_ports_to_replicate(wires_to_replicate):
ports_to_replicate = set()
inner_pins = set()
outer_pins = set()
for wire in wires_to_replicate:
for pin in wire.pins:
if isinstance(pin, InnerPin):
inner_pins.add(pin)
else:
outer_pins.add(pin)
for outer_pin in outer_pins:
inner_pin = outer_pin.inner_pin
if inner_pin in inner_pins:
port = inner_pin.port
ports_to_replicate.add(port)
for pin in port.pins:
inner_pins.discard(pin)
return ports_to_replicate
@staticmethod
def identify_additional_wires_to_replicate(src_pins, snk_pins):
wires_to_replicate = set()
wires_found = set()
for src_pin in src_pins:
wire = src_pin.wire
if not wire or wire in wires_found:
continue
wires_found.add(wire)
search_stack = [(wire, False)]
while search_stack:
wire, visited = search_stack.pop()
if visited:
continue
search_stack.append((wire, True))
for pin in wire.pins:
if pin in snk_pins:
for path_member, part_of_path in reversed(search_stack):
if part_of_path is True:
if path_member not in wires_to_replicate:
wires_to_replicate.add(path_member)
else:
break
elif pin not in src_pins:
other_wires = pin.get_wires(selection='OUTSIDE' if isinstance(pin, InnerPin) else 'INSIDE')
for other_wire in other_wires:
if other_wire not in wires_found:
wires_found.add(other_wire)
search_stack.append((other_wire, False))
return wires_to_replicate
def _idenfity_src_and_snk_pins_that_will_be_replicated(self):
src_pins = set()
snk_pins = set()
for original in self._replicas.keys():
if isinstance(original, Port):
direction = original.direction
if direction in {IN, INOUT}:
src_pins.update(original.get_pins(selection='INSIDE'))
snk_pins.update(original.get_pins(selection='OUTSIDE'))
if direction in {OUT, INOUT}:
src_pins.update(original.get_pins(selection='OUTSIDE'))
snk_pins.update(original.get_pins(selection='INSIDE'))
else:
reference = original.reference
for port in reference.ports:
direction = port.direction
if direction in {IN, INOUT}:
snk_pins.update(map(original.pins.get, port.pins))
if direction in {OUT, INOUT}:
src_pins.update(map(original.pins.get, port.pins))
return src_pins, snk_pins
def _replicate_ports_and_instances(self):
for original in self._replicas.keys():
if isinstance(original, Port):
self._replicate_port(original)
else:
self._replicate_instance(original)
self._reorder_ports_for_readability()
self._reorder_instances_for_readability()
def _replicate_port(self, port):
replicas = list()
for ii in range(1, self.replication_degree):
port_clone = port.clone()
add_suffix_to_name(port_clone, self.name_suffix + '_' + str(ii))
replicas.append(port_clone)
port.definition.add_port(port_clone)
if self.rename_original:
add_suffix_to_name(port, self.name_suffix + '_' + '0')
self._replicas[port] = replicas
def _replicate_instance(self, inst):
replicas = list()
for ii in range(1, self.replication_degree):
inst_clone = inst.clone()
add_suffix_to_name(inst_clone, self.name_suffix + '_' + str(ii))
replicas.append(inst_clone)
inst.parent.add_child(inst_clone)
if self.rename_original:
add_suffix_to_name(inst, self.name_suffix + '_' + '0')
self._replicas[inst] = replicas
def _reorder_ports_for_readability(self):
reordered_definitions = set()
for original in self._replicas.keys():
if isinstance(original, Port):
definition = original.definition
if definition not in reordered_definitions:
reordered_definitions.add(definition)
new_order = list()
def_ports = definition.ports
def_ports_len = len(def_ports)
for def_port in def_ports:
new_order.append(def_port)
if def_port in self._replicas:
new_order += self._replicas[def_port]
if len(new_order) == def_ports_len:
break
definition.ports = new_order
def _reorder_instances_for_readability(self):
reordered_definitions = set()
for original in self._replicas:
if isinstance(original, Instance):
definition = original.parent
if definition not in reordered_definitions:
reordered_definitions.add(definition)
new_order = list()
def_children = definition.children
def_children_len = len(def_children)
for def_child in def_children:
new_order.append(def_child)
if def_child in self._replicas:
new_order += self._replicas[def_child]
if len(new_order) == def_children_len:
break
definition.children = new_order
def _replicate_wires(self):
self._wiremap = dict()
replicated_cables = set()
for wire in self._wires_to_replicate:
cable = wire.cable
if cable not in replicated_cables:
replicated_cables.add(cable)
for ii in range(1, self.replication_degree):
cable_clone = cable.clone()
add_suffix_to_name(cable_clone, self.name_suffix + '_' + str(ii))
for wire_index, cable_wire in enumerate(cable.wires):
if cable_wire in self._wires_to_replicate:
if cable_wire not in self._wiremap:
self._wiremap[cable_wire] = list()
self._wiremap[cable_wire].append(cable_clone.wires[wire_index])
cable.definition.add_cable(cable_clone)
if self.rename_original:
add_suffix_to_name(cable, self.name_suffix + '_' + '0')
self._reorder_cables_for_readibility()
def _reorder_cables_for_readibility(self):
reordered_definitions = set()
for wire in self._wiremap:
definition = wire.cable.definition
if definition not in reordered_definitions:
reordered_definitions.add(definition)
new_order = list()
visited_cables = set()
def_cables = definition.cables
for def_cable in def_cables:
if def_cable in visited_cables:
continue
visited_cables.add(def_cable)
new_order.append(def_cable)
for wire in def_cable.wires:
if wire in self._wiremap:
other_cables = list(other_wire.cable for other_wire in self._wiremap[wire])
for other_cable in other_cables:
if other_cable not in visited_cables:
visited_cables.add(other_cable)
new_order.append(other_cable)
definition.cables = new_order
def _connect_wires(self):
self._connect_replicated_wires()
self._connect_non_replicated_wires_to_replicated_pins()
def _connect_replicated_wires(self):
for wire, other_wires in self._wiremap.items():
for pin in wire.pins:
if isinstance(pin, InnerPin):
port = pin.port
if port in self._replicas:
other_ports = self._replicas[port]
pin_index = port.pins.index(pin)
for ii in range(self.replication_degree - 1):
other_wires[ii].connect_pin(other_ports[ii].pins[pin_index])
else:
inner_pin = pin.inner_pin
instance = pin.instance
if instance in self._replicas:
other_instances = self._replicas[instance]
for ii in range(self.replication_degree - 1):
other_wires[ii].connect_pin(other_instances[ii].pins[inner_pin])
else: # TODO: if move this outside of the if does it do what we would expect?
port = inner_pin.port
if port in self._replicas:
other_ports = self._replicas[port]
pin_index = port.pins.index(inner_pin)
for ii in range(self.replication_degree - 1):
other_wires[ii].connect_pin(instance.pins[other_ports[ii].pins[pin_index]])
def _connect_non_replicated_wires_to_replicated_pins(self):
pinmap = dict()
for original in self._replicas:
if isinstance(original, Instance):
inst = original
other_instances = self._replicas[inst]
for pin in inst.pins:
if pin.inner_pin.port.direction in {IN, INOUT}:
wire = pin.wire
if wire and wire not in self._wiremap:
inner_pin = pin.inner_pin
pinmap[pin] = list()
for ii in range(self.replication_degree - 1):
other_pin = other_instances[ii].pins[inner_pin]
pinmap[pin].append(other_pin)
wire.connect_pin(other_pin)
elif isinstance(original, Port):
port = original
other_ports = self._replicas[port]
for pin in port.pins:
if port.direction in {OUT, INOUT}:
wire = pin.wire
if wire and wire not in self._wiremap:
pin_index = pin.port.pins.index(pin)
pinmap[pin] = list()
for ii in range(self.replication_degree - 1):
other_pin = other_ports[ii].pins[pin_index]
pinmap[pin].append(other_pin)
wire.connect_pin(other_pin)
self._reorder_pins_for_readibility(pinmap)
@staticmethod
def _reorder_pins_for_readibility(pinmap):
reordered_wires = set()
for pin in pinmap:
wire = pin.wire
if wire not in reordered_wires:
reordered_wires.add(wire)
new_order = list()
wire_pins = wire.pins
wire_pins_len = len(wire_pins)
for wire_pin in wire_pins:
new_order.append(wire_pin)
if wire_pin in pinmap:
new_order += pinmap[wire_pin]
if len(new_order) == wire_pins_len:
break
wire.pins = new_order
|
[
"spydrnet_tmr.transformation.util.add_suffix_to_name"
] |
[((6451, 6505), 'spydrnet_tmr.transformation.util.add_suffix_to_name', 'add_suffix_to_name', (['port', "(self.name_suffix + '_' + '0')"], {}), "(port, self.name_suffix + '_' + '0')\n", (6469, 6505), False, 'from spydrnet_tmr.transformation.util import add_suffix_to_name\n'), ((6913, 6967), 'spydrnet_tmr.transformation.util.add_suffix_to_name', 'add_suffix_to_name', (['inst', "(self.name_suffix + '_' + '0')"], {}), "(inst, self.name_suffix + '_' + '0')\n", (6931, 6967), False, 'from spydrnet_tmr.transformation.util import add_suffix_to_name\n'), ((9665, 9720), 'spydrnet_tmr.transformation.util.add_suffix_to_name', 'add_suffix_to_name', (['cable', "(self.name_suffix + '_' + '0')"], {}), "(cable, self.name_suffix + '_' + '0')\n", (9683, 9720), False, 'from spydrnet_tmr.transformation.util import add_suffix_to_name\n')]
|
# Copyright 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import, division, print_function, unicode_literals
import shlex
import tests.utils
class ShellTest(tests.utils.TestCase):
includes = [("@fbcode_macros//build_defs:shell.bzl", "shell")]
@tests.utils.with_project()
def test_split_works_like_shlex_split(self, root):
test_strings = [
r"",
r"FOO BAR",
" foo \t\nbar\n baz",
r'foo -D"bar"',
r'foo -D"\"something quoted\"" last\ string',
r'foo -D"\n contains backslash still" ',
r"""foo -D'something something \"dark side\"'""",
r"""-DFOO -D"\ B'A'R=\"something here\""'something" else' -D\ BAZ -D\\some""",
r'''-DFOO -DBAR="baz \"\\\"lots of quotes\\\"\""''',
]
commands = ["shell.split(%r)" % s.encode("ascii") for s in test_strings]
expected = [shlex.split(s) for s in test_strings]
result = root.runUnitTests(self.includes, commands)
self.assertSuccess(result)
self.assertEqual(
expected, [[x.encode("utf-8") for x in line] for line in result.debug_lines]
)
|
[
"shlex.split"
] |
[((1176, 1190), 'shlex.split', 'shlex.split', (['s'], {}), '(s)\n', (1187, 1190), False, 'import shlex\n')]
|
import openpnm as op
import scipy as sp
import pytest
class SubdomainTest:
def setup_class(self):
ws = op.Workspace()
ws.settings['local_data'] = True
self.net = op.network.Cubic(shape=[3, 3, 3])
self.geo = op.geometry.GenericGeometry(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.geo['pore.diameter'] = sp.rand(self.net.Np)
self.geo.add_model(propname='pore.volume',
model=op.models.geometry.pore_volume.sphere)
self.geo['throat.diameter'] = sp.rand(self.net.Nt)
self.geo.add_model(propname='throat.area',
model=op.models.geometry.throat_area.cylinder)
self.geo.regenerate_models()
self.phase1 = op.phases.GenericPhase(network=self.net)
self.phase2 = op.phases.GenericPhase(network=self.net)
self.phys1 = op.physics.GenericPhysics(network=self.net,
geometry=self.geo,
phase=self.phase1)
self.phys1['pore.blah'] = 1.0
self.phys2 = op.physics.GenericPhysics(network=self.net,
geometry=self.geo,
phase=self.phase2)
self.phys2['pore.blah'] = 2.0
def teardown_class(self):
ws = op.Workspace()
ws.clear()
def test_drop_locations_from_geom_successively_with_single_geometry(self):
assert self.geo.Np == 27
assert self.geo.Nt == 54
self.geo._drop_locations(pores=[0, 1, 2], throats=[0, 1, 2])
assert self.geo.Np == 24
assert self.geo.Nt == 51
self.geo._drop_locations(pores=[3, 4], throats=[3, 4])
assert self.geo.Np == 22
assert self.geo.Nt == 49
self.geo._add_locations(pores=[0, 1, 2, 3, 4], throats=[0, 1, 2, 3, 4])
assert self.geo.Np == 27
assert self.geo.Nt == 54
def test_drop_locations_from_physics_successively_with_two_physics(self):
assert self.phys1.Np == 27
assert self.phys1.Nt == 54
self.phys1._drop_locations(pores=[0, 1], throats=[0, 1])
assert self.phys1.Np == 25
assert self.phys1.Nt == 52
self.phys1._drop_locations(pores=[3, 4], throats=[3, 4])
assert self.phys1.Np == 23
assert self.phys1.Nt == 50
self.phys1._add_locations(pores=[0, 1, 3, 4], throats=[0, 1, 3, 4])
assert self.phys1.Np == 27
assert self.phys1.Nt == 54
def test_drop_locations_all_but_not_complete(self):
assert self.phys1.Np == 27
assert self.phys1.Nt == 54
assert 'pore.'+self.phys1.name in self.phase1.keys()
assert 'throat.'+self.phys1.name in self.phase1.keys()
self.phys1._drop_locations(pores=self.net.Ps)
assert 'pore.'+self.phys1.name in self.phase1.keys()
assert self.phase1.num_pores(self.phys1.name) == 0
assert 'throat.'+self.phys1.name in self.phase1.keys()
self.phys1._drop_locations(throats=self.net.Ts)
assert 'throat.'+self.phys1.name in self.phase1.keys()
assert self.phase1.num_throats(self.phys1.name) == 0
self.phys1._add_locations(pores=self.net.Ps, throats=self.net.Ts)
def test_writting_subdict_names_across_subdomains(self):
ws = op.Workspace()
proj = ws.new_project()
pn = op.network.Cubic(shape=[10, 10, 10], spacing=1e-4, project=proj)
Ps = pn['pore.coords'][:, 0] < pn['pore.coords'][:, 0].mean()
Ts = pn.find_neighbor_throats(pores=Ps, mode='xnor')
geo1 = op.geometry.StickAndBall(network=pn, pores=Ps, throats=Ts)
Ps = pn['pore.coords'][:, 0] >= pn['pore.coords'][:, 0].mean()
Ts = pn.find_neighbor_throats(pores=Ps, mode='or')
geo2 = op.geometry.StickAndBall(network=pn, pores=Ps, throats=Ts)
pn['pore.foo'] = 1
# Can't create a subdict below foo
with pytest.raises(Exception):
pn['pore.foo.bar'] = 1
# Can create a subdict directly
pn['pore.baz.bar'] = 2
# Can't create a new item already used as subdict
with pytest.raises(Exception):
pn['pore.baz'] = 2
# Also works on subdomains
geo1['pore.blah'] = 1
with pytest.raises(Exception):
geo1['pore.blah.boo'] = 1
geo1['pore.bee.bop'] = 1
with pytest.raises(Exception):
geo1['pore.bee'] = 1
# Now start looking across objects
with pytest.raises(Exception):
geo1['pore.foo'] = 1 # Already exists on pn
with pytest.raises(Exception):
geo1['pore.foo.bar'] = 1 # pore.foo already exists on pn
with pytest.raises(Exception):
geo1['pore.baz'] = 1 # pore.baz.bar already exists on pn
# Now start looking across objects
geo2['pore.blah'] = 1
geo2['pore.bee.bop'] = 1
with pytest.raises(Exception):
geo1['pore.bee'] = 1
with pytest.raises(Exception):
pn['pore.bee'] = 1
with pytest.raises(Exception):
pn['pore.bee.bop'] = 1
if __name__ == '__main__':
t = SubdomainTest()
self = t
t.setup_class()
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
|
[
"openpnm.Workspace",
"openpnm.geometry.GenericGeometry",
"openpnm.network.Cubic",
"openpnm.geometry.StickAndBall",
"pytest.raises",
"openpnm.phases.GenericPhase",
"openpnm.physics.GenericPhysics",
"scipy.rand"
] |
[((118, 132), 'openpnm.Workspace', 'op.Workspace', ([], {}), '()\n', (130, 132), True, 'import openpnm as op\n'), ((193, 226), 'openpnm.network.Cubic', 'op.network.Cubic', ([], {'shape': '[3, 3, 3]'}), '(shape=[3, 3, 3])\n', (209, 226), True, 'import openpnm as op\n'), ((246, 336), 'openpnm.geometry.GenericGeometry', 'op.geometry.GenericGeometry', ([], {'network': 'self.net', 'pores': 'self.net.Ps', 'throats': 'self.net.Ts'}), '(network=self.net, pores=self.net.Ps, throats=\n self.net.Ts)\n', (273, 336), True, 'import openpnm as op\n'), ((462, 482), 'scipy.rand', 'sp.rand', (['self.net.Np'], {}), '(self.net.Np)\n', (469, 482), True, 'import scipy as sp\n'), ((644, 664), 'scipy.rand', 'sp.rand', (['self.net.Nt'], {}), '(self.net.Nt)\n', (651, 664), True, 'import scipy as sp\n'), ((849, 889), 'openpnm.phases.GenericPhase', 'op.phases.GenericPhase', ([], {'network': 'self.net'}), '(network=self.net)\n', (871, 889), True, 'import openpnm as op\n'), ((912, 952), 'openpnm.phases.GenericPhase', 'op.phases.GenericPhase', ([], {'network': 'self.net'}), '(network=self.net)\n', (934, 952), True, 'import openpnm as op\n'), ((974, 1060), 'openpnm.physics.GenericPhysics', 'op.physics.GenericPhysics', ([], {'network': 'self.net', 'geometry': 'self.geo', 'phase': 'self.phase1'}), '(network=self.net, geometry=self.geo, phase=self.\n phase1)\n', (999, 1060), True, 'import openpnm as op\n'), ((1209, 1295), 'openpnm.physics.GenericPhysics', 'op.physics.GenericPhysics', ([], {'network': 'self.net', 'geometry': 'self.geo', 'phase': 'self.phase2'}), '(network=self.net, geometry=self.geo, phase=self.\n phase2)\n', (1234, 1295), True, 'import openpnm as op\n'), ((1467, 1481), 'openpnm.Workspace', 'op.Workspace', ([], {}), '()\n', (1479, 1481), True, 'import openpnm as op\n'), ((3439, 3453), 'openpnm.Workspace', 'op.Workspace', ([], {}), '()\n', (3451, 3453), True, 'import openpnm as op\n'), ((3500, 3566), 'openpnm.network.Cubic', 'op.network.Cubic', ([], {'shape': '[10, 10, 10]', 'spacing': '(0.0001)', 'project': 'proj'}), '(shape=[10, 10, 10], spacing=0.0001, project=proj)\n', (3516, 3566), True, 'import openpnm as op\n'), ((3711, 3769), 'openpnm.geometry.StickAndBall', 'op.geometry.StickAndBall', ([], {'network': 'pn', 'pores': 'Ps', 'throats': 'Ts'}), '(network=pn, pores=Ps, throats=Ts)\n', (3735, 3769), True, 'import openpnm as op\n'), ((3916, 3974), 'openpnm.geometry.StickAndBall', 'op.geometry.StickAndBall', ([], {'network': 'pn', 'pores': 'Ps', 'throats': 'Ts'}), '(network=pn, pores=Ps, throats=Ts)\n', (3940, 3974), True, 'import openpnm as op\n'), ((4059, 4083), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4072, 4083), False, 'import pytest\n'), ((4262, 4286), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4275, 4286), False, 'import pytest\n'), ((4398, 4422), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4411, 4422), False, 'import pytest\n'), ((4508, 4532), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4521, 4532), False, 'import pytest\n'), ((4624, 4648), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4637, 4648), False, 'import pytest\n'), ((4720, 4744), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4733, 4744), False, 'import pytest\n'), ((4829, 4853), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4842, 4853), False, 'import pytest\n'), ((5045, 5069), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (5058, 5069), False, 'import pytest\n'), ((5118, 5142), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (5131, 5142), False, 'import pytest\n'), ((5189, 5213), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (5202, 5213), False, 'import pytest\n')]
|
import pandas as pd
from shapely.geometry import Point
import geopandas as gpd
import math
import osmnx
import requests
from io import BytesIO
from zipfile import ZipFile
def read_poi_csv(input_file, col_id='id', col_name='name', col_lon='lon', col_lat='lat', col_kwds='kwds', col_sep=';',
kwds_sep=',', source_crs='EPSG:4326', target_crs='EPSG:4326', keep_other_cols=False):
"""Creates a POI GeoDataFrame from an input CSV file.
Args:
input_file (string): Path to the input csv file.
col_id (string): Name of the column containing the POI id (default: `id`).
col_name (string): Name of the column containing the POI name (default: `name`).
col_lon (string): Name of the column containing the POI longitude (default: `lon`).
col_lat (string): Name of the column containing the POI latitude (default: `lat`).
col_kwds (string): Name of the column containing the POI keywords (default: `kwds`).
col_sep (string): Column delimiter (default: `;`).
kwds_sep (string): Keywords delimiter (default: `,`).
source_crs (string): Coordinate Reference System of input data (default: `EPSG:4326`).
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
keep_other_cols (bool): Whether to keep the rest of the columns in the csv file (default: `False`).
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
def lon_lat_to_point(row, c_lon, c_lat):
try:
x_lon = float(row[c_lon])
y_lat = float(row[c_lat])
if math.isnan(x_lon) is False and math.isnan(y_lat) is False:
return Point(x_lon, y_lat)
else:
return float('NaN')
except:
return float('NaN')
pois = pd.read_csv(input_file, delimiter=col_sep, error_bad_lines=False)
init_poi_size = pois.index.size
columns = list(pois)
subset_cols = []
# Columns to Check for N/A, Nulls
if keep_other_cols:
subset_cols.extend(columns)
else:
subset_cols = [col_id, col_lon, col_lat]
if col_name in columns:
subset_cols.append(col_name)
if col_kwds in columns:
subset_cols.append(col_kwds)
# Geometry Column(Uncleaned)
pois['geometry'] = pois.apply(lambda row: lon_lat_to_point(row, col_lon, col_lat), axis=1)
subset_cols.append('geometry')
# Drop Columns Not in subset Columns.
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
# Drop all N/A, Null rows from DataFrame.
pois.dropna(inplace=True)
if init_poi_size - pois.index.size > 0:
print("Skipped", (init_poi_size - pois.index.size), "rows due to errors.")
if col_kwds in columns:
pois[col_kwds] = pois[col_kwds].map(lambda s: s.split(kwds_sep))
source_crs = {'init': source_crs}
target_crs = {'init': target_crs}
pois = gpd.GeoDataFrame(pois, crs=source_crs, geometry=pois['geometry']).to_crs(target_crs).drop(columns=[col_lon,
col_lat])
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def import_osmnx(bound, target_crs='EPSG:4326'):
"""Creates a POI GeoDataFrame from POIs retrieved by OSMNX (https://github.com/gboeing/osmnx).
Args:
bound (polygon): A polygon to be used as filter.
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
# retrieve pois
pois = osmnx.pois.pois_from_polygon(bound)
if len(pois.index) > 0:
# filter pois
pois = pois[pois.amenity.notnull()]
pois_filter = pois.element_type == 'node'
pois = pois[pois_filter]
# restructure gdf
subset_cols = ['osmid', 'amenity', 'name', 'geometry']
columns = list(pois)
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
pois = pois.reset_index(drop=True)
pois = pois.rename(columns={'osmid': 'id', 'amenity': 'kwds'})
pois['kwds'] = pois['kwds'].map(lambda s: [s])
if target_crs != 'EPSG:4326':
target_crs = {'init': target_crs}
pois = pois.to_crs(target_crs)
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def import_osmwrangle(osmwrangle_file, target_crs='EPSG:4326', bound=None):
"""Creates a POI GeoDataFrame from a file produced by OSMWrangle (https://github.com/SLIPO-EU/OSMWrangle).
Args:
osmwrangle_file (string): Path or URL to the input csv file.
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
bound (polygon): A polygon to be used as filter.
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
def lon_lat_to_point(row, c_lon, c_lat):
x_lon = float(row[c_lon])
y_lat = float(row[c_lat])
if math.isnan(x_lon) is False and math.isnan(y_lat) is False:
return Point(x_lon, y_lat)
else:
return float('NaN')
col_sep = '|'
col_id = 'ID'
col_lon = 'LON'
col_lat = 'LAT'
col_name = 'NAME'
col_cat = 'CATEGORY'
col_subcat = 'SUBCATEGORY'
source_crs = {'init': 'EPSG:4326'}
# Load the file
if osmwrangle_file.startswith('http') and osmwrangle_file.endswith('.zip'):
response = requests.get(osmwrangle_file)
zip_file = ZipFile(BytesIO(response.content))
with zip_file.open(zip_file.namelist()[0]) as csvfile:
pois = pd.read_csv(csvfile, delimiter=col_sep, error_bad_lines=False)
else:
pois = pd.read_csv(osmwrangle_file, delimiter=col_sep, error_bad_lines=False)
init_poi_size = pois.index.size
columns = list(pois)
subset_cols = [col_id, col_name, 'kwds', col_lon, col_lat]
# Geometry Column(Uncleaned)
pois['geometry'] = pois.apply(lambda row: lon_lat_to_point(row, col_lon, col_lat), axis=1)
subset_cols.append('geometry')
pois['kwds'] = pois[col_cat] + ',' + pois[col_subcat]
pois['kwds'] = pois['kwds'].map(lambda s: s.split(','))
# Drop Columns Not in subset Columns.
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
# Drop all N/A, Null rows from DataFrame.
pois.dropna(inplace=True)
if init_poi_size - pois.index.size > 0:
print("Skipped", (init_poi_size - pois.index.size), "rows due to errors.")
pois = pois.rename(columns={col_id: 'id', col_name: 'name'})
pois = gpd.GeoDataFrame(pois, crs=source_crs, geometry=pois['geometry']).drop(columns=[col_lon, col_lat])
# Check whether location filter should be applied
if bound is not None:
spatial_filter = pois.geometry.intersects(bound)
pois = pois[spatial_filter]
if target_crs != 'EPSG:4326':
target_crs = {'init': target_crs}
pois = pois.to_crs(target_crs)
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def retrieve_osm_loc(name, buffer_dist=0):
"""Retrieves a polygon from an OSM location.
Args:
name (string): Name of the location to be resolved.
buffer_dist (numeric): Buffer distance in meters.
Returns:
A polygon.
"""
geom = osmnx.core.gdf_from_place(name, buffer_dist=buffer_dist)
if len(geom.index) > 0:
geom = geom.iloc[0].geometry
else:
geom = None
return geom
def to_geojson(gdf, output_file):
"""Exports a GeoDataFrame to a GeoJSON file.
Args:
gdf (GeoDataFrame): The GeoDataFrame object to be exported.
output_file (string): Path to the output file.
"""
gdf.to_file(output_file, driver='GeoJSON')
|
[
"osmnx.pois.pois_from_polygon",
"pandas.read_csv",
"io.BytesIO",
"requests.get",
"shapely.geometry.Point",
"osmnx.core.gdf_from_place",
"geopandas.GeoDataFrame",
"math.isnan"
] |
[((1861, 1926), 'pandas.read_csv', 'pd.read_csv', (['input_file'], {'delimiter': 'col_sep', 'error_bad_lines': '(False)'}), '(input_file, delimiter=col_sep, error_bad_lines=False)\n', (1872, 1926), True, 'import pandas as pd\n'), ((3769, 3804), 'osmnx.pois.pois_from_polygon', 'osmnx.pois.pois_from_polygon', (['bound'], {}), '(bound)\n', (3797, 3804), False, 'import osmnx\n'), ((7580, 7636), 'osmnx.core.gdf_from_place', 'osmnx.core.gdf_from_place', (['name'], {'buffer_dist': 'buffer_dist'}), '(name, buffer_dist=buffer_dist)\n', (7605, 7636), False, 'import osmnx\n'), ((5680, 5709), 'requests.get', 'requests.get', (['osmwrangle_file'], {}), '(osmwrangle_file)\n', (5692, 5709), False, 'import requests\n'), ((5934, 6004), 'pandas.read_csv', 'pd.read_csv', (['osmwrangle_file'], {'delimiter': 'col_sep', 'error_bad_lines': '(False)'}), '(osmwrangle_file, delimiter=col_sep, error_bad_lines=False)\n', (5945, 6004), True, 'import pandas as pd\n'), ((5300, 5319), 'shapely.geometry.Point', 'Point', (['x_lon', 'y_lat'], {}), '(x_lon, y_lat)\n', (5305, 5319), False, 'from shapely.geometry import Point\n'), ((5737, 5762), 'io.BytesIO', 'BytesIO', (['response.content'], {}), '(response.content)\n', (5744, 5762), False, 'from io import BytesIO\n'), ((5846, 5908), 'pandas.read_csv', 'pd.read_csv', (['csvfile'], {'delimiter': 'col_sep', 'error_bad_lines': '(False)'}), '(csvfile, delimiter=col_sep, error_bad_lines=False)\n', (5857, 5908), True, 'import pandas as pd\n'), ((6840, 6905), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', (['pois'], {'crs': 'source_crs', 'geometry': "pois['geometry']"}), "(pois, crs=source_crs, geometry=pois['geometry'])\n", (6856, 6905), True, 'import geopandas as gpd\n'), ((1727, 1746), 'shapely.geometry.Point', 'Point', (['x_lon', 'y_lat'], {}), '(x_lon, y_lat)\n', (1732, 1746), False, 'from shapely.geometry import Point\n'), ((5222, 5239), 'math.isnan', 'math.isnan', (['x_lon'], {}), '(x_lon)\n', (5232, 5239), False, 'import math\n'), ((5253, 5270), 'math.isnan', 'math.isnan', (['y_lat'], {}), '(y_lat)\n', (5263, 5270), False, 'import math\n'), ((1645, 1662), 'math.isnan', 'math.isnan', (['x_lon'], {}), '(x_lon)\n', (1655, 1662), False, 'import math\n'), ((1676, 1693), 'math.isnan', 'math.isnan', (['y_lat'], {}), '(y_lat)\n', (1686, 1693), False, 'import math\n'), ((3016, 3081), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', (['pois'], {'crs': 'source_crs', 'geometry': "pois['geometry']"}), "(pois, crs=source_crs, geometry=pois['geometry'])\n", (3032, 3081), True, 'import geopandas as gpd\n')]
|
import re
import random
from collections import defaultdict
import src.settings as var
from src.utilities import *
from src import debuglog, errlog, plog
from src.decorators import cmd, event_listener
from src.messages import messages
from src.events import Event
KILLS = {} # type: Dict[str, List[str]]
@cmd("kill", chan=False, pm=True, playing=True, phases=("night",))
def wolf_kill(cli, nick, chan, rest):
"""Kills one or more players as a wolf."""
role = get_role(nick)
# eventually cub will listen on targeted_command and block kills that way
if role not in var.WOLF_ROLES - {"wolf cub"}:
return
if nick in var.SILENCED:
pm(cli, nick, messages["silenced"])
return
if var.DISEASED_WOLVES:
pm(cli, nick, messages["ill_wolves"])
return
# eventually crow will listen on targeted_command and block kills that way
# (or more likely, that restriction will be lifted and crow can do both)
if role == "werecrow" and var.OBSERVED.get(nick):
pm(cli, nick, messages["werecrow_transformed_nokill"])
return
pieces = re.split(" +", rest)
victims = []
orig = []
num_kills = 1
if var.ANGRY_WOLVES:
num_kills = 2
i = 0
extra = 0
while i < num_kills + extra:
try:
victim = pieces[i]
except IndexError:
break
if victim.lower() == "and":
extra += 1
i += 1
victim = pieces[i]
victim = get_victim(cli, nick, victim, False)
if not victim:
return
if victim == nick:
pm(cli, nick, messages["no_suicide"])
return
if in_wolflist(nick, victim):
pm(cli, nick, messages["wolf_no_target_wolf"])
return
orig.append(victim)
evt = Event("targeted_command", {"target": victim, "misdirection": True, "exchange": True})
evt.dispatch(cli, var, "kill", nick, victim, frozenset({"detrimental"}))
if evt.prevent_default:
return
victim = evt.data["target"]
victims.append(victim)
i += 1
if len(set(victims)) < len(victims):
pm(cli, nick, messages["wolf_must_target_multiple"])
return
KILLS[nick] = victims
if len(orig) > 1:
# need to expand this eventually
msg = messages["wolf_target_multiple"].format(orig[0], orig[1])
pm(cli, nick, messages["player"].format(msg))
debuglog("{0} ({1}) KILL: {2} ({3}) and {4} ({5})".format(nick, role, victims[0], get_role(victims[0]), victims[1], get_role(victims[1])))
else:
msg = messages["wolf_target"].format(orig[0])
pm(cli, nick, messages["player"].format(msg))
if num_kills > 1:
pm(cli, nick, messages["wolf_target_second"])
debuglog("{0} ({1}) KILL: {2} ({3})".format(nick, role, victims[0], get_role(victims[0])))
if in_wolflist(nick, nick):
relay_wolfchat_command(cli, nick, messages["wolfchat"].format(nick, msg), var.WOLF_ROLES, is_wolf_command=True, is_kill_command=True)
chk_nightdone(cli)
@cmd("retract", "r", chan=False, pm=True, playing=True, phases=("night",))
def wolf_retract(cli, nick, chan, rest):
"""Removes a wolf's kill selection."""
if nick not in KILLS:
return
del KILLS[nick]
pm(cli, nick, messages["retracted_kill"])
relay_wolfchat_command(cli, nick, messages["wolfchat_retracted_kill"].format(nick), var.WOLF_ROLES, is_wolf_command=True, is_kill_command=True)
@event_listener("del_player")
def on_del_player(evt, cli, var, nick, nickrole, nicktpls, death_triggers):
for a,b in list(KILLS.items()):
for n in b:
if n == nick:
KILLS[a].remove(nick)
if a == nick or len(KILLS[a]) == 0:
del KILLS[a]
@event_listener("rename_player")
def on_rename(evt, cli, var, prefix, nick):
kvp = []
for a,b in KILLS.items():
nl = []
for n in b:
if n == prefix:
n = nick
nl.append(n)
if a == prefix:
a = nick
kvp.append((a,nl))
KILLS.update(kvp)
if prefix in KILLS:
del KILLS[prefix]
@event_listener("night_acted")
def on_acted(evt, cli, var, nick, sender):
if nick in KILLS:
evt.data["acted"] = True
@event_listener("transition_day", priority=1)
def on_transition_day(evt, cli, var):
# figure out wolf target
found = defaultdict(int)
# split off into event + wolfcub.py
num_kills = 1
if var.ANGRY_WOLVES:
num_kills = 2
for v in KILLS.values():
for p in v:
if p:
# kill target starting with ! is invalid
# right now nothing does this, but monster eventually will
if p[0] == "!":
continue
found[p] += 1
for i in range(num_kills):
maxc = 0
dups = []
for v, c in found.items():
if c > maxc:
maxc = c
dups = [v]
elif c == maxc:
dups.append(v)
if maxc and dups:
victim = random.choice(dups)
evt.data["victims"].append(victim)
evt.data["bywolves"].add(victim)
evt.data["onlybywolves"].add(victim)
# special key to let us know to randomly select a wolf in case of retribution totem
evt.data["killers"][victim].append("@wolves")
del found[victim]
# this should be moved to an event in kill, where monster prefixes their nick with !
# and fallen angel subsequently removes the ! prefix
if len(var.ROLES["fallen angel"]) == 0:
for monster in var.ROLES["monster"]:
if monster in victims:
evt.data["victims"].remove(monster)
evt.data["bywolves"].discard(monster)
evt.data["onlybywolves"].discard(monster)
@event_listener("exchange_roles")
def on_exchange(evt, cli, var, actor, nick, actor_role, nick_role):
if actor in KILLS:
del KILLS[actor]
if nick in KILLS:
del KILLS[nick]
@event_listener("chk_nightdone", priority=3)
def on_chk_nightdone(evt, cli, var):
if not var.DISEASED_WOLVES:
evt.data["actedcount"] += len(KILLS)
# eventually wolf cub will remove itself from nightroles in wolfcub.py
evt.data["nightroles"].extend(list_players(var.WOLF_ROLES - {"wolf cub"}))
@event_listener("chk_nightdone", priority=20)
def on_chk_nightdone2(evt, cli, var):
if not evt.prevent_default and not var.DISEASED_WOLVES:
# flatten KILLS
kills = set()
for ls in KILLS.values():
kills.update(ls)
# check if wolves are actually agreeing
# allow len(kills) == 0 through as that means that crow was dumb and observed instead
if not var.ANGRY_WOLVES and len(kills) > 1:
evt.data["actedcount"] -= 1
elif var.ANGRY_WOLVES and (len(kills) == 1 or len(kills) > 2):
evt.data["actedcount"] -= 1
@event_listener("transition_night_end", priority=2)
def on_transition_night_end(evt, cli, var):
ps = list_players()
wolves = list_players(var.WOLFCHAT_ROLES)
# roles in wolfchat (including those that can only listen in but not speak)
wcroles = var.WOLFCHAT_ROLES
# roles allowed to talk in wolfchat
talkroles = var.WOLFCHAT_ROLES
# condition imposed on talking in wolfchat (only during day/night, or None if talking is disabled)
wccond = ""
if var.RESTRICT_WOLFCHAT & var.RW_DISABLE_NIGHT:
if var.RESTRICT_WOLFCHAT & var.RW_DISABLE_DAY:
wccond = None
else:
wccond = " during day"
elif var.RESTRICT_WOLFCHAT & var.RW_DISABLE_DAY:
wccond = " during night"
if var.RESTRICT_WOLFCHAT & var.RW_REM_NON_WOLVES:
if var.RESTRICT_WOLFCHAT & var.RW_TRAITOR_NON_WOLF:
wcroles = var.WOLF_ROLES
talkroles = var.WOLF_ROLES
else:
wcroles = var.WOLF_ROLES | {"traitor"}
talkroles = var.WOLF_ROLES | {"traitor"}
elif var.RESTRICT_WOLFCHAT & var.RW_WOLVES_ONLY_CHAT:
if var.RESTRICT_WOLFCHAT & var.RW_TRAITOR_NON_WOLF:
talkroles = var.WOLF_ROLES
else:
talkroles = var.WOLF_ROLES | {"traitor"}
for wolf in wolves:
# should make the cursed information an event that cursedvillager can then add to
# (e.g. an event to change what prefixes are sent with the role message, and a
# 2nd event to change information in parens in player list)
normal_notify = wolf in var.PLAYERS and not is_user_simple(wolf)
role = get_role(wolf)
cursed = "cursed " if wolf in var.ROLES["cursed villager"] and role in wcroles else ""
if normal_notify:
msg = "{0}_notify".format(role.replace(" ", "_"))
cmsg = "cursed_" + msg
try:
if cursed:
try:
pm(cli, wolf, messages[cmsg])
except KeyError:
pm(cli, wolf, messages[msg].format(cursed))
else:
pm(cli, wolf, messages[msg].format(cursed))
except KeyError:
# catchall in case we forgot something above
an = 'n' if role.startswith(("a", "e", "i", "o", "u")) else ""
pm(cli, wolf, messages["undefined_role_notify"].format(an, role))
if len(wolves) > 1 and wccond is not None and role in talkroles:
pm(cli, wolf, messages["wolfchat_notify"].format(wccond))
else:
an = "n" if cursed == "" and role.startswith(("a", "e", "i", "o", "u")) else ""
pm(cli, wolf, messages["wolf_simple"].format(an, cursed, role)) # !simple
pl = ps[:]
random.shuffle(pl)
pl.remove(wolf) # remove self from list
if role in wcroles:
for i, player in enumerate(pl):
prole = get_role(player)
if prole in wcroles:
cursed = ""
if player in var.ROLES["cursed villager"]:
cursed = "cursed "
pl[i] = "\u0002{0}\u0002 ({1}{2})".format(player, cursed, prole)
elif player in var.ROLES["cursed villager"]:
pl[i] = player + " (cursed)"
elif role == "warlock":
for i, player in enumerate(pl):
if player in var.ROLES["cursed villager"]:
pl[i] = player + " (cursed)"
pm(cli, wolf, "Players: " + ", ".join(pl))
if role in var.WOLF_ROLES - {"wolf cub"} and var.DISEASED_WOLVES:
pm(cli, wolf, messages["ill_wolves"])
# TODO: split the following out into their own files (mystic, cub and alpha)
if role == "wolf mystic":
# if adding this info to !myrole, you will need to save off this count so that they can't get updated info until the next night
# # of special villagers = # of players - # of villagers - # of wolves - # of neutrals
numvills = len(ps) - len(list_players(var.WOLFTEAM_ROLES)) - len(list_players(("villager", "vengeful ghost", "time lord", "amnesiac", "lycan"))) - len(list_players(var.TRUE_NEUTRAL_ROLES))
pm(cli, wolf, messages["wolf_mystic_info"].format("are" if numvills != 1 else "is", numvills, "s" if numvills != 1 else ""))
if not var.DISEASED_WOLVES and var.ANGRY_WOLVES and role in var.WOLF_ROLES - {"wolf cub"}:
pm(cli, wolf, messages["angry_wolves"])
if var.ALPHA_ENABLED and role == "alpha wolf" and wolf not in var.ALPHA_WOLVES:
pm(cli, wolf, messages["wolf_bite"])
@event_listener("begin_day")
def on_begin_day(evt, cli, var):
KILLS.clear()
@event_listener("reset")
def on_reset(evt, var):
KILLS.clear()
# vim: set sw=4 expandtab:
|
[
"re.split",
"random.choice",
"random.shuffle",
"src.events.Event",
"src.decorators.event_listener",
"src.settings.OBSERVED.get",
"collections.defaultdict",
"src.decorators.cmd"
] |
[((308, 373), 'src.decorators.cmd', 'cmd', (['"""kill"""'], {'chan': '(False)', 'pm': '(True)', 'playing': '(True)', 'phases': "('night',)"}), "('kill', chan=False, pm=True, playing=True, phases=('night',))\n", (311, 373), False, 'from src.decorators import cmd, event_listener\n'), ((3121, 3194), 'src.decorators.cmd', 'cmd', (['"""retract"""', '"""r"""'], {'chan': '(False)', 'pm': '(True)', 'playing': '(True)', 'phases': "('night',)"}), "('retract', 'r', chan=False, pm=True, playing=True, phases=('night',))\n", (3124, 3194), False, 'from src.decorators import cmd, event_listener\n'), ((3536, 3564), 'src.decorators.event_listener', 'event_listener', (['"""del_player"""'], {}), "('del_player')\n", (3550, 3564), False, 'from src.decorators import cmd, event_listener\n'), ((3832, 3863), 'src.decorators.event_listener', 'event_listener', (['"""rename_player"""'], {}), "('rename_player')\n", (3846, 3863), False, 'from src.decorators import cmd, event_listener\n'), ((4211, 4240), 'src.decorators.event_listener', 'event_listener', (['"""night_acted"""'], {}), "('night_acted')\n", (4225, 4240), False, 'from src.decorators import cmd, event_listener\n'), ((4341, 4385), 'src.decorators.event_listener', 'event_listener', (['"""transition_day"""'], {'priority': '(1)'}), "('transition_day', priority=1)\n", (4355, 4385), False, 'from src.decorators import cmd, event_listener\n'), ((5943, 5975), 'src.decorators.event_listener', 'event_listener', (['"""exchange_roles"""'], {}), "('exchange_roles')\n", (5957, 5975), False, 'from src.decorators import cmd, event_listener\n'), ((6140, 6183), 'src.decorators.event_listener', 'event_listener', (['"""chk_nightdone"""'], {'priority': '(3)'}), "('chk_nightdone', priority=3)\n", (6154, 6183), False, 'from src.decorators import cmd, event_listener\n'), ((6462, 6506), 'src.decorators.event_listener', 'event_listener', (['"""chk_nightdone"""'], {'priority': '(20)'}), "('chk_nightdone', priority=20)\n", (6476, 6506), False, 'from src.decorators import cmd, event_listener\n'), ((7061, 7111), 'src.decorators.event_listener', 'event_listener', (['"""transition_night_end"""'], {'priority': '(2)'}), "('transition_night_end', priority=2)\n", (7075, 7111), False, 'from src.decorators import cmd, event_listener\n'), ((11763, 11790), 'src.decorators.event_listener', 'event_listener', (['"""begin_day"""'], {}), "('begin_day')\n", (11777, 11790), False, 'from src.decorators import cmd, event_listener\n'), ((11844, 11867), 'src.decorators.event_listener', 'event_listener', (['"""reset"""'], {}), "('reset')\n", (11858, 11867), False, 'from src.decorators import cmd, event_listener\n'), ((1107, 1127), 're.split', 're.split', (['""" +"""', 'rest'], {}), "(' +', rest)\n", (1115, 1127), False, 'import re\n'), ((4465, 4481), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (4476, 4481), False, 'from collections import defaultdict\n'), ((991, 1013), 'src.settings.OBSERVED.get', 'var.OBSERVED.get', (['nick'], {}), '(nick)\n', (1007, 1013), True, 'import src.settings as var\n'), ((1839, 1928), 'src.events.Event', 'Event', (['"""targeted_command"""', "{'target': victim, 'misdirection': True, 'exchange': True}"], {}), "('targeted_command', {'target': victim, 'misdirection': True,\n 'exchange': True})\n", (1844, 1928), False, 'from src.events import Event\n'), ((9866, 9884), 'random.shuffle', 'random.shuffle', (['pl'], {}), '(pl)\n', (9880, 9884), False, 'import random\n'), ((5161, 5180), 'random.choice', 'random.choice', (['dups'], {}), '(dups)\n', (5174, 5180), False, 'import random\n')]
|
import shutil
from pathlib import Path
from tempfile import mkdtemp
import pytest
from click.testing import CliRunner
import ape
# NOTE: Ensure that we don't use local paths for these
ape.config.DATA_FOLDER = Path(mkdtemp()).resolve()
ape.config.PROJECT_FOLDER = Path(mkdtemp()).resolve()
@pytest.fixture(scope="session")
def config():
yield ape.config
@pytest.fixture(scope="session")
def data_folder(config):
yield config.DATA_FOLDER
@pytest.fixture(scope="session")
def plugin_manager():
yield ape.networks.plugin_manager
@pytest.fixture(scope="session")
def accounts():
yield ape.accounts
@pytest.fixture(scope="session")
def compilers():
yield ape.compilers
@pytest.fixture(scope="session")
def networks():
yield ape.networks
@pytest.fixture(scope="session")
def chain():
yield ape.chain
@pytest.fixture(scope="session")
def project_folder(config):
yield config.PROJECT_FOLDER
@pytest.fixture(scope="session")
def project(config):
yield ape.Project(config.PROJECT_FOLDER)
@pytest.fixture
def keyparams():
# NOTE: password is 'a'
return {
"address": "7e5f4552091a69125d5dfcb7b8c2659029395bdf",
"crypto": {
"cipher": "aes-128-ctr",
"cipherparams": {"iv": "7bc492fb5dca4fe80fd47645b2aad0ff"},
"ciphertext": "43beb65018a35c31494f642ec535315897634b021d7ec5bb8e0e2172387e2812",
"kdf": "scrypt",
"kdfparams": {
"dklen": 32,
"n": 262144,
"r": 1,
"p": 8,
"salt": "<PASSWORD>",
},
"mac": "6a1d520975a031e11fc16cff610f5ae7476bcae4f2f598bc59ccffeae33b1caa",
},
"id": "ee<PASSWORD>",
"version": 3,
}
@pytest.fixture
def temp_accounts_path(config):
path = Path(config.DATA_FOLDER) / "accounts"
path.mkdir(exist_ok=True, parents=True)
yield path
if path.exists():
shutil.rmtree(path)
@pytest.fixture
def runner(project):
yield CliRunner()
|
[
"pathlib.Path",
"ape.Project",
"click.testing.CliRunner",
"tempfile.mkdtemp",
"shutil.rmtree",
"pytest.fixture"
] |
[((295, 326), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (309, 326), False, 'import pytest\n'), ((365, 396), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (379, 396), False, 'import pytest\n'), ((454, 485), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (468, 485), False, 'import pytest\n'), ((549, 580), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (563, 580), False, 'import pytest\n'), ((623, 654), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (637, 654), False, 'import pytest\n'), ((699, 730), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (713, 730), False, 'import pytest\n'), ((773, 804), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (787, 804), False, 'import pytest\n'), ((841, 872), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (855, 872), False, 'import pytest\n'), ((936, 967), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (950, 967), False, 'import pytest\n'), ((999, 1033), 'ape.Project', 'ape.Project', (['config.PROJECT_FOLDER'], {}), '(config.PROJECT_FOLDER)\n', (1010, 1033), False, 'import ape\n'), ((1828, 1852), 'pathlib.Path', 'Path', (['config.DATA_FOLDER'], {}), '(config.DATA_FOLDER)\n', (1832, 1852), False, 'from pathlib import Path\n'), ((1957, 1976), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (1970, 1976), False, 'import shutil\n'), ((2026, 2037), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (2035, 2037), False, 'from click.testing import CliRunner\n'), ((217, 226), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (224, 226), False, 'from tempfile import mkdtemp\n'), ((271, 280), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (278, 280), False, 'from tempfile import mkdtemp\n')]
|
#pylint: disable=logging-fstring-interpolation
#Standart library imports
import shutil
import os
import time
from typing import Tuple
from pathlib import Path
import re
from shutil import copyfile
import wget
# Local imports
from selenium_driver_updater.util.logger import logger
from selenium_driver_updater.util.exceptions import DriverVersionInvalidException
from selenium_driver_updater.driver_base import DriverBase
class PhantomJS(DriverBase):
"Class for working with Selenium phantomjs binary"
_repo_name = 'ariya/phantomjs'
_tmp_folder_path = 'tmp'
def __init__(self, **kwargs):
kwargs.update(repo_name=PhantomJS._repo_name)
DriverBase.__init__(self, **kwargs)
self.system_name = ''
#assign of specific os
specific_system = str(kwargs.get('system_name', ''))
specific_system = specific_system.replace('linux64', 'linux-x86_64')
specific_system = specific_system.replace('linux32', 'linux-i686').replace('macos', 'macosx')
if specific_system:
self.system_name = "phantomjs-{}-" + f"{specific_system}"
if 'win' in specific_system:
self.system_name = "phantomjs-{}-windows"
if 'linux' in specific_system:
self.system_name = self.system_name + '.tar.bz2'
else:
self.system_name = self.system_name + '.zip'
self.phantomjs_path = self.driver_path
def _get_latest_version_phantomjs(self) -> str:
"""Gets latest phantomjs version
Returns:
str
latest_version (str) : Latest version of phantomjs.
"""
latest_version : str = ''
repo_name = PhantomJS._repo_name
latest_version = self.github_viewer.get_latest_release_tag_by_repo_name(repo_name=repo_name)
logger.info(f'Latest version of phantomjs: {latest_version}')
return latest_version
def _compare_current_version_and_latest_version_phantomjs(self) -> Tuple[bool, str, str]:
"""Compares current version of phantomjs to latest version
Returns:
Tuple of bool, str and str
is_driver_up_to_date (bool) : It true the driver is up to date. Defaults to False.
current_version (str) : Current version of the driver.
latest_version (str) : Latest version of the driver.
"""
is_driver_up_to_date : bool = False
current_version : str = ''
latest_version : str = ''
current_version = super()._get_current_version_driver()
if not current_version:
return is_driver_up_to_date, current_version, latest_version
latest_version = self._get_latest_version_phantomjs()
if current_version == latest_version:
is_driver_up_to_date = True
message = ('Your existing phantomjs is up to date.'
f'current_version: {current_version} latest_version: {latest_version}')
logger.info(message)
return is_driver_up_to_date, current_version, latest_version
def _check_if_phantomjs_is_up_to_date(self) -> str:
"""Сhecks for the latest version, downloads or updates phantomjs binary
Returns:
str
driver_path (str) : Path where phantomjs was downloaded or updated.
"""
driver_path : str = ''
if self.check_driver_is_up_to_date and not self.system_name:
is_driver_up_to_date, current_version, latest_version = self._compare_current_version_and_latest_version_phantomjs()
if is_driver_up_to_date:
return self.phantomjs_path
driver_path = self._download_driver()
if self.check_driver_is_up_to_date and not self.system_name:
is_driver_up_to_date, current_version, latest_version = self._compare_current_version_and_latest_version_phantomjs()
if not is_driver_up_to_date:
message = ('Problem with updating phantomjs'
f'current_version: {current_version} latest_version: {latest_version}')
logger.error(message)
message = 'Trying to download previous latest version of phantomjs'
logger.info(message)
driver_path = self._download_driver(previous_version=True)
return driver_path
def __rename_driver(self, archive_folder_path : str, archive_driver_path : str) -> None:
"""Renames phantomjs if it was given
Args:
archive_folder_path (str) : Path to the main folder
archive_driver_path (str) : Path to the phantomjs archive
"""
renamed_driver_path : str = ''
new_path = archive_folder_path + os.path.sep + self.filename if not archive_folder_path.endswith(os.path.sep) else archive_folder_path + self.filename
if Path(new_path).exists():
Path(new_path).unlink()
os.rename(archive_driver_path, new_path)
renamed_driver_path = self.path + self.filename
if Path(renamed_driver_path).exists():
Path(renamed_driver_path).unlink()
copyfile(new_path, renamed_driver_path)
def main(self) -> str:
"""Main function, checks for the latest version, downloads or updates phantomjs binary or
downloads specific version of phantomjs.
Returns:
str
driver_path (str) : Path where phantomjs was downloaded or updated.
"""
driver_path : str = ''
if not self.version:
driver_path = self._check_if_phantomjs_is_up_to_date()
else:
driver_path = self._download_driver(version=self.version)
return driver_path
def _get_latest_previous_version_phantomjs_via_requests(self) -> str:
"""Gets previous latest phantomjs version
Returns:
str
latest_version_previous (str) : Latest previous version of phantomjs.
"""
latest_previous_version : str = ''
all_versions = []
url = self.setting["PhantomJS"]["LinkAllReleases"]
json_data = self.requests_getter.get_result_by_request(url=url, is_json=True)
values = json_data.get('values')
for value in values:
value_name = value.get('name')
if not 'beta' in value_name:
find_string = re.findall(self.setting["Program"]["wedriverVersionPattern"], value_name)
version = find_string[0] if len(find_string) > 0 else ''
all_versions.append(version)
all_versions = list(set(all_versions))
all_versions.sort(key=lambda s: list(map(int, s.split('.'))))
latest_previous_version = all_versions[len(all_versions)-2]
logger.info(f'Latest previous version of phantomjs: {latest_previous_version}')
return latest_previous_version
def _check_if_version_is_valid(self, url : str) -> None:
"""Checks the specified version for existence.
Args:
url (str) : Full download url of chromedriver.
"""
archive_name : str = url.split("/")[len(url.split("/"))-1]
url_releases : str = self.setting["PhantomJS"]["LinkAllReleases"]
is_found : bool = False
while is_found is False:
json_data = self.requests_getter.get_result_by_request(url=url_releases, is_json=True)
for data in json_data.get('values'):
if data.get('name') == archive_name:
is_found = True
break
url_releases = json_data.get('next')
if not url_releases:
break
if not is_found:
message = f'Wrong version or system_name was specified. archive_name: {archive_name} url: {url}'
raise DriverVersionInvalidException(message)
def _download_driver(self, version : str = '', previous_version : bool = False) -> str:
"""Function to download, delete or upgrade current phantomjs
Args:
version (str) : Specific phantomjs version to download. Defaults to empty string.
previous_version (boll) : If true, phantomjs latest previous version will be downloaded. Defaults to False.
Returns:
str
driver_path (str) : Path to unzipped driver.
"""
url : str = ''
latest_version : str = ''
latest_previous_version : str = ''
driver_path : str = ''
if self.upgrade:
super()._delete_current_driver_for_current_os()
if version:
logger.info(f'Started download phantomjs specific_version: {version}')
url = self.setting["PhantomJS"]["LinkLastReleaseFile"].format(version)
elif previous_version:
latest_previous_version = self._get_latest_previous_version_phantomjs_via_requests()
logger.info(f'Started download phantomjs latest_previous_version: {latest_previous_version}')
url = self.setting["PhantomJS"]["LinkLastReleaseFile"].format(latest_previous_version)
else:
latest_version = self._get_latest_version_phantomjs()
logger.info(f'Started download phantomjs latest_version: {latest_version}')
url = self.setting["PhantomJS"]["LinkLastReleaseFile"].format(latest_version)
if self.system_name:
url = url.replace(url.split("/")[-1], '')
version = [value for key,value in locals().items() if 'version' in key and value][0]
url = url + self.system_name.format(version)
logger.info(f'Started downloading geckodriver for specific system: {self.system_name}')
if any([version, self.system_name ,latest_previous_version]):
self._check_if_version_is_valid(url=url)
archive_name = url.split("/")[-1]
out_path = self.path + archive_name
if Path(out_path).exists():
Path(out_path).unlink()
logger.info(f'Started download phantomjs by url: {url}')
if self.info_messages:
archive_path = wget.download(url=url, out=out_path)
else:
archive_path = wget.download(url=url, out=out_path, bar=None)
time.sleep(2)
logger.info(f'PhantomJS was downloaded to path: {archive_path}')
out_path = self.path
parameters = dict(archive_path=archive_path, out_path=out_path)
self.extractor.extract_and_detect_archive_format(**parameters)
platform : str = self.setting["PhantomJS"]["LastReleasePlatform"]
archive_path_folder = self.path + url.split("/")[-1].replace('.zip', '').replace(".tar.bz2", '') + os.path.sep
archive_path_folder_bin = archive_path_folder + 'bin' + os.path.sep
driver_archive_path = archive_path_folder_bin + platform
if not self.filename:
copyfile(driver_archive_path, self.path + platform)
else:
parameters = dict(archive_folder_path=archive_path_folder_bin, archive_driver_path=driver_archive_path)
self.__rename_driver(**parameters)
if Path(archive_path_folder).exists():
shutil.rmtree(archive_path_folder)
driver_path = self.phantomjs_path
logger.info(f'PhantomJS was successfully unpacked by path: {driver_path}')
if self.chmod:
super()._chmod_driver()
return driver_path
|
[
"selenium_driver_updater.util.logger.logger.info",
"wget.download",
"pathlib.Path",
"selenium_driver_updater.util.exceptions.DriverVersionInvalidException",
"os.rename",
"time.sleep",
"shutil.rmtree",
"shutil.copyfile",
"selenium_driver_updater.util.logger.logger.error",
"selenium_driver_updater.driver_base.DriverBase.__init__",
"re.findall"
] |
[((672, 707), 'selenium_driver_updater.driver_base.DriverBase.__init__', 'DriverBase.__init__', (['self'], {}), '(self, **kwargs)\n', (691, 707), False, 'from selenium_driver_updater.driver_base import DriverBase\n'), ((1842, 1903), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""Latest version of phantomjs: {latest_version}"""'], {}), "(f'Latest version of phantomjs: {latest_version}')\n", (1853, 1903), False, 'from selenium_driver_updater.util.logger import logger\n'), ((4991, 5031), 'os.rename', 'os.rename', (['archive_driver_path', 'new_path'], {}), '(archive_driver_path, new_path)\n', (5000, 5031), False, 'import os\n'), ((5192, 5231), 'shutil.copyfile', 'copyfile', (['new_path', 'renamed_driver_path'], {}), '(new_path, renamed_driver_path)\n', (5200, 5231), False, 'from shutil import copyfile\n'), ((6831, 6910), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""Latest previous version of phantomjs: {latest_previous_version}"""'], {}), "(f'Latest previous version of phantomjs: {latest_previous_version}')\n", (6842, 6910), False, 'from selenium_driver_updater.util.logger import logger\n'), ((10085, 10141), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""Started download phantomjs by url: {url}"""'], {}), "(f'Started download phantomjs by url: {url}')\n", (10096, 10141), False, 'from selenium_driver_updater.util.logger import logger\n'), ((10334, 10347), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (10344, 10347), False, 'import time\n'), ((10357, 10421), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""PhantomJS was downloaded to path: {archive_path}"""'], {}), "(f'PhantomJS was downloaded to path: {archive_path}')\n", (10368, 10421), False, 'from selenium_driver_updater.util.logger import logger\n'), ((11356, 11430), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""PhantomJS was successfully unpacked by path: {driver_path}"""'], {}), "(f'PhantomJS was successfully unpacked by path: {driver_path}')\n", (11367, 11430), False, 'from selenium_driver_updater.util.logger import logger\n'), ((3010, 3030), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['message'], {}), '(message)\n', (3021, 3030), False, 'from selenium_driver_updater.util.logger import logger\n'), ((7886, 7924), 'selenium_driver_updater.util.exceptions.DriverVersionInvalidException', 'DriverVersionInvalidException', (['message'], {}), '(message)\n', (7915, 7924), False, 'from selenium_driver_updater.util.exceptions import DriverVersionInvalidException\n'), ((8699, 8769), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""Started download phantomjs specific_version: {version}"""'], {}), "(f'Started download phantomjs specific_version: {version}')\n", (8710, 8769), False, 'from selenium_driver_updater.util.logger import logger\n'), ((9703, 9795), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""Started downloading geckodriver for specific system: {self.system_name}"""'], {}), "(\n f'Started downloading geckodriver for specific system: {self.system_name}')\n", (9714, 9795), False, 'from selenium_driver_updater.util.logger import logger\n'), ((10201, 10237), 'wget.download', 'wget.download', ([], {'url': 'url', 'out': 'out_path'}), '(url=url, out=out_path)\n', (10214, 10237), False, 'import wget\n'), ((10279, 10325), 'wget.download', 'wget.download', ([], {'url': 'url', 'out': 'out_path', 'bar': 'None'}), '(url=url, out=out_path, bar=None)\n', (10292, 10325), False, 'import wget\n'), ((10978, 11029), 'shutil.copyfile', 'copyfile', (['driver_archive_path', '(self.path + platform)'], {}), '(driver_archive_path, self.path + platform)\n', (10986, 11029), False, 'from shutil import copyfile\n'), ((11269, 11303), 'shutil.rmtree', 'shutil.rmtree', (['archive_path_folder'], {}), '(archive_path_folder)\n', (11282, 11303), False, 'import shutil\n'), ((4150, 4171), 'selenium_driver_updater.util.logger.logger.error', 'logger.error', (['message'], {}), '(message)\n', (4162, 4171), False, 'from selenium_driver_updater.util.logger import logger\n'), ((4272, 4292), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['message'], {}), '(message)\n', (4283, 4292), False, 'from selenium_driver_updater.util.logger import logger\n'), ((4921, 4935), 'pathlib.Path', 'Path', (['new_path'], {}), '(new_path)\n', (4925, 4935), False, 'from pathlib import Path\n'), ((5100, 5125), 'pathlib.Path', 'Path', (['renamed_driver_path'], {}), '(renamed_driver_path)\n', (5104, 5125), False, 'from pathlib import Path\n'), ((6442, 6515), 're.findall', 're.findall', (["self.setting['Program']['wedriverVersionPattern']", 'value_name'], {}), "(self.setting['Program']['wedriverVersionPattern'], value_name)\n", (6452, 6515), False, 'import re\n'), ((8997, 9100), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""Started download phantomjs latest_previous_version: {latest_previous_version}"""'], {}), "(\n f'Started download phantomjs latest_previous_version: {latest_previous_version}'\n )\n", (9008, 9100), False, 'from selenium_driver_updater.util.logger import logger\n'), ((9285, 9360), 'selenium_driver_updater.util.logger.logger.info', 'logger.info', (['f"""Started download phantomjs latest_version: {latest_version}"""'], {}), "(f'Started download phantomjs latest_version: {latest_version}')\n", (9296, 9360), False, 'from selenium_driver_updater.util.logger import logger\n'), ((10015, 10029), 'pathlib.Path', 'Path', (['out_path'], {}), '(out_path)\n', (10019, 10029), False, 'from pathlib import Path\n'), ((11221, 11246), 'pathlib.Path', 'Path', (['archive_path_folder'], {}), '(archive_path_folder)\n', (11225, 11246), False, 'from pathlib import Path\n'), ((4958, 4972), 'pathlib.Path', 'Path', (['new_path'], {}), '(new_path)\n', (4962, 4972), False, 'from pathlib import Path\n'), ((5148, 5173), 'pathlib.Path', 'Path', (['renamed_driver_path'], {}), '(renamed_driver_path)\n', (5152, 5173), False, 'from pathlib import Path\n'), ((10052, 10066), 'pathlib.Path', 'Path', (['out_path'], {}), '(out_path)\n', (10056, 10066), False, 'from pathlib import Path\n')]
|
from django.contrib import admin
from .models import Distribution
admin.site.register(Distribution)
# Register your models here.
|
[
"django.contrib.admin.site.register"
] |
[((67, 100), 'django.contrib.admin.site.register', 'admin.site.register', (['Distribution'], {}), '(Distribution)\n', (86, 100), False, 'from django.contrib import admin\n')]
|
from django.contrib.auth import authenticate, login
from django.shortcuts import render, redirect
from cart.models import Cart
from django.views import View
from .forms import LoginForm, RegistrationForm, CreateCompanyForm
from customer.models import Customer, ShippingAddress
from src.utils.mixins import CustomerMixin
from checkout.models import ApplyOrganization
class LoginView(CustomerMixin, View):
def get(self, request, *args, **kwargs):
if request.user.is_authenticated:
return redirect('catalog')
form = LoginForm()
return render(request, 'customer/login.html', {'form': form})
def post(self, request, *args, **kwargs):
form = LoginForm(request.POST or None)
if form.is_valid():
email = form.cleaned_data['email']
password = form.cleaned_data['password']
user = authenticate(request, email=email, password=password)
if user:
login(request, user)
return redirect('catalog')
return render(request, 'customer/login.html', {'form': form})
class RegistrationView(View):
def get(self, request, *args, **kwargs):
if request.user.is_authenticated:
return redirect('catalog')
form = RegistrationForm()
return render(request, 'customer/register.html', {'form': form})
def post(self, request, *args, **kwargs):
form = RegistrationForm(request.POST or None, request.FILES or None)
if form.is_valid():
new_user = form.save(commit=False)
customer = Customer.objects.create(user=new_user, status="Unrecognized")
customer.save()
cart = Cart.objects.create(customer=customer)
cart.save()
address = ShippingAddress.objects.create(customer=customer)
address.save()
new_user.set_password(form.cleaned_data['<PASSWORD>'])
new_user.save()
return redirect('login')
return render(request, 'customer/register.html', {'form': form})
class CreateCompany(View):
def get(self, request, *args, **kwargs):
if request.user.is_authenticated and request.user.STATUS_AUTH == "Recognized":
form = CreateCompanyForm()
return render(request, 'customer/create_company.html', {'form': form})
return redirect('catalog')
def post(self, request, *args, **kwargs):
if request.user.is_authenticated and request.user.STATUS_AUTH == "Recognized":
form = CreateCompanyForm(request.POST or None, request.FILES or None)
if form.is_valid():
new_company = form.save(commit=False)
new_company.STATUS_COMPANY = "No verify"
new_company.user = request.user
new_company.save()
return redirect('catalog')
return render(request, 'customer/register.html', {'form': form})
|
[
"django.shortcuts.render",
"django.contrib.auth.authenticate",
"django.contrib.auth.login",
"django.shortcuts.redirect",
"cart.models.Cart.objects.create",
"customer.models.Customer.objects.create",
"customer.models.ShippingAddress.objects.create"
] |
[((574, 628), 'django.shortcuts.render', 'render', (['request', '"""customer/login.html"""', "{'form': form}"], {}), "(request, 'customer/login.html', {'form': form})\n", (580, 628), False, 'from django.shortcuts import render, redirect\n'), ((1040, 1094), 'django.shortcuts.render', 'render', (['request', '"""customer/login.html"""', "{'form': form}"], {}), "(request, 'customer/login.html', {'form': form})\n", (1046, 1094), False, 'from django.shortcuts import render, redirect\n'), ((1302, 1359), 'django.shortcuts.render', 'render', (['request', '"""customer/register.html"""', "{'form': form}"], {}), "(request, 'customer/register.html', {'form': form})\n", (1308, 1359), False, 'from django.shortcuts import render, redirect\n'), ((2000, 2057), 'django.shortcuts.render', 'render', (['request', '"""customer/register.html"""', "{'form': form}"], {}), "(request, 'customer/register.html', {'form': form})\n", (2006, 2057), False, 'from django.shortcuts import render, redirect\n'), ((2356, 2375), 'django.shortcuts.redirect', 'redirect', (['"""catalog"""'], {}), "('catalog')\n", (2364, 2375), False, 'from django.shortcuts import render, redirect\n'), ((512, 531), 'django.shortcuts.redirect', 'redirect', (['"""catalog"""'], {}), "('catalog')\n", (520, 531), False, 'from django.shortcuts import render, redirect\n'), ((870, 923), 'django.contrib.auth.authenticate', 'authenticate', (['request'], {'email': 'email', 'password': 'password'}), '(request, email=email, password=password)\n', (882, 923), False, 'from django.contrib.auth import authenticate, login\n'), ((1233, 1252), 'django.shortcuts.redirect', 'redirect', (['"""catalog"""'], {}), "('catalog')\n", (1241, 1252), False, 'from django.shortcuts import render, redirect\n'), ((1582, 1643), 'customer.models.Customer.objects.create', 'Customer.objects.create', ([], {'user': 'new_user', 'status': '"""Unrecognized"""'}), "(user=new_user, status='Unrecognized')\n", (1605, 1643), False, 'from customer.models import Customer, ShippingAddress\n'), ((1691, 1729), 'cart.models.Cart.objects.create', 'Cart.objects.create', ([], {'customer': 'customer'}), '(customer=customer)\n', (1710, 1729), False, 'from cart.models import Cart\n'), ((1776, 1825), 'customer.models.ShippingAddress.objects.create', 'ShippingAddress.objects.create', ([], {'customer': 'customer'}), '(customer=customer)\n', (1806, 1825), False, 'from customer.models import Customer, ShippingAddress\n'), ((1967, 1984), 'django.shortcuts.redirect', 'redirect', (['"""login"""'], {}), "('login')\n", (1975, 1984), False, 'from django.shortcuts import render, redirect\n'), ((2277, 2340), 'django.shortcuts.render', 'render', (['request', '"""customer/create_company.html"""', "{'form': form}"], {}), "(request, 'customer/create_company.html', {'form': form})\n", (2283, 2340), False, 'from django.shortcuts import render, redirect\n'), ((2880, 2937), 'django.shortcuts.render', 'render', (['request', '"""customer/register.html"""', "{'form': form}"], {}), "(request, 'customer/register.html', {'form': form})\n", (2886, 2937), False, 'from django.shortcuts import render, redirect\n'), ((961, 981), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (966, 981), False, 'from django.contrib.auth import authenticate, login\n'), ((1005, 1024), 'django.shortcuts.redirect', 'redirect', (['"""catalog"""'], {}), "('catalog')\n", (1013, 1024), False, 'from django.shortcuts import render, redirect\n'), ((2841, 2860), 'django.shortcuts.redirect', 'redirect', (['"""catalog"""'], {}), "('catalog')\n", (2849, 2860), False, 'from django.shortcuts import render, redirect\n')]
|
from flask import Flask, render_template, request
# from .recommendation import *
# import pickle
import pandas as pd
import numpy as np
# import keras
# from keras.models import load_model
import pickle
def create_app():
# initializes our app
APP = Flask(__name__)
@APP.route('/')
def form():
return render_template('base.html')
@APP.route('/data/', methods=['GET', 'POST'])
def data():
if request.method == 'POST':
# Get form data
name = request.form.get('name')
blurb = request.form.get('blurb', 'default')
country = request.form.get('country', 'default')
backers_count = request.form.get('backers_count', 'default')
prediction = preprocessDataAndPredict(name, blurb, country,
backers_count)
# print(prediction[0])
return render_template('data.html', prediction=prediction[0])
def preprocessDataAndPredict(name, blurb, country, backers_count):
# test_data = (blurb)
test_data = (name, blurb, country, backers_count)
# print(test_data)
test_data = np.array(test_data)
dftest = pd.DataFrame(test_data).T
dftest.columns = ['name', 'blurb', 'country', 'backers_count']
print(dftest)
print(dftest.shape)
# test_data = test_data.reshape(1, -1)
# print(test_data)
#file = open("model.pkl", "wb")
model = pickle.load(
open('model_knn', 'rb'))
# model = pickle.load(
# open('Kickstarter2/kickstarter/kick_model(1)', 'rb'))
prediction = model.predict(dftest)
# print(prediction)
return prediction
# return prediction
return APP
|
[
"flask.render_template",
"flask.Flask",
"flask.request.form.get",
"numpy.array",
"pandas.DataFrame"
] |
[((259, 274), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (264, 274), False, 'from flask import Flask, render_template, request\n'), ((327, 355), 'flask.render_template', 'render_template', (['"""base.html"""'], {}), "('base.html')\n", (342, 355), False, 'from flask import Flask, render_template, request\n'), ((1163, 1182), 'numpy.array', 'np.array', (['test_data'], {}), '(test_data)\n', (1171, 1182), True, 'import numpy as np\n'), ((507, 531), 'flask.request.form.get', 'request.form.get', (['"""name"""'], {}), "('name')\n", (523, 531), False, 'from flask import Flask, render_template, request\n'), ((552, 588), 'flask.request.form.get', 'request.form.get', (['"""blurb"""', '"""default"""'], {}), "('blurb', 'default')\n", (568, 588), False, 'from flask import Flask, render_template, request\n'), ((611, 649), 'flask.request.form.get', 'request.form.get', (['"""country"""', '"""default"""'], {}), "('country', 'default')\n", (627, 649), False, 'from flask import Flask, render_template, request\n'), ((678, 722), 'flask.request.form.get', 'request.form.get', (['"""backers_count"""', '"""default"""'], {}), "('backers_count', 'default')\n", (694, 722), False, 'from flask import Flask, render_template, request\n'), ((899, 953), 'flask.render_template', 'render_template', (['"""data.html"""'], {'prediction': 'prediction[0]'}), "('data.html', prediction=prediction[0])\n", (914, 953), False, 'from flask import Flask, render_template, request\n'), ((1200, 1223), 'pandas.DataFrame', 'pd.DataFrame', (['test_data'], {}), '(test_data)\n', (1212, 1223), True, 'import pandas as pd\n')]
|
# Copyright 2018, <NAME> LLC
# License: Apache License Version 2.0
# -------------------------------------------------------------------------
# registration.py - updates the database to say who is building something
# and what the current settings are, which is used by the file serving
# code to see if it is ok to serve up files in the buildroot. But also
# for record keeping.
# --------------------------------------------------------------------------
from datetime import datetime
import random
import fcntl
import subprocess
import os
from django.utils import timezone
from django.conf import settings
from vespene.common.logger import Logger
from vespene.models.worker import Worker
LOG = Logger()
WORKER_ID_FILE = "/etc/vespene/worker_id"
# =============================================================================
class RegistrationManager(object):
def __init__(self, builder, build):
self.builder = builder
self.build = build
self.project = self.build.project
def create_worker_id(self):
wid = ''.join(random.SystemRandom().choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50))
fd = open(WORKER_ID_FILE, "w+")
fd.write(wid)
fd.close()
return wid
def get_worker_id(self, fd):
return fd.readlines()[0].strip()
def get_worker_record(self, worker_id):
qs = Worker.objects.filter(worker_uid=worker_id)
if not qs.exists():
return None
return qs.first()
# worker_pool = models.ForeignKey('WorkerPool', null=False, on_delete=models.SET_NULL)
# hostname = models.CharField(max_length=1024, null=True)
# port = models.IntField(null=False, default=8080)
# working_dir = models.CharField(max_length=1024, null=True)
# first_checkin = models.DateTimeField(null=True, blank=True)
# last_checkin = models.DateTimeField(null=True, blank=True)
# fileserving_enabled = models.BooleanField(null=False, default=False)
def get_hostname(self):
if settings.FILESERVING_HOSTNAME:
return settings.FILESERVING_HOSTNAME
return self.guess_hostname()
def guess_hostname(self):
return subprocess.check_output("hostname").decode('utf-8').strip()
def get_port(self):
if settings.FILESERVING_PORT:
return settings.FILESERVING_PORT
else:
return 8000
def get_build_root(self):
return settings.BUILD_ROOT
def get_fileserving_enabled(self):
return settings.FILESERVING_ENABLED
def create_worker_record(self, worker_id):
now = datetime.now(tz=timezone.utc)
obj = Worker(
worker_uid = worker_id,
hostname = self.get_hostname(),
port = self.get_port(),
build_root = self.get_build_root(),
first_checkin = now,
last_checkin = now,
fileserving_enabled = self.get_fileserving_enabled()
)
obj.save()
return obj
def update_worker_record(self, worker):
now = datetime.now(tz=timezone.utc)
worker.hostname = self.get_hostname()
worker.port = self.get_port()
worker.build_root = self.get_build_root()
worker.last_checkin = now
worker.fileserving_enabled = self.get_fileserving_enabled()
worker.save()
return worker
def go(self):
"""
Trigger next stage of pipeline if build was successful
"""
if not os.path.exists(WORKER_ID_FILE):
worker_id = self.create_worker_id()
fd = open(WORKER_ID_FILE, "r")
fcntl.flock(fd, fcntl.LOCK_EX)
worker_id = self.get_worker_id(fd)
worker_record = self.get_worker_record(worker_id)
if not worker_record:
worker_record = self.create_worker_record(worker_id)
else:
worker_record = self.update_worker_record(worker_record)
self.build.worker = worker_record
self.build.save()
fcntl.flock(fd, fcntl.LOCK_UN)
|
[
"subprocess.check_output",
"os.path.exists",
"fcntl.flock",
"vespene.models.worker.Worker.objects.filter",
"datetime.datetime.now",
"vespene.common.logger.Logger",
"random.SystemRandom"
] |
[((712, 720), 'vespene.common.logger.Logger', 'Logger', ([], {}), '()\n', (718, 720), False, 'from vespene.common.logger import Logger\n'), ((1420, 1463), 'vespene.models.worker.Worker.objects.filter', 'Worker.objects.filter', ([], {'worker_uid': 'worker_id'}), '(worker_uid=worker_id)\n', (1441, 1463), False, 'from vespene.models.worker import Worker\n'), ((2643, 2672), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (2655, 2672), False, 'from datetime import datetime\n'), ((3096, 3125), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (3108, 3125), False, 'from datetime import datetime\n'), ((3664, 3694), 'fcntl.flock', 'fcntl.flock', (['fd', 'fcntl.LOCK_EX'], {}), '(fd, fcntl.LOCK_EX)\n', (3675, 3694), False, 'import fcntl\n'), ((4052, 4082), 'fcntl.flock', 'fcntl.flock', (['fd', 'fcntl.LOCK_UN'], {}), '(fd, fcntl.LOCK_UN)\n', (4063, 4082), False, 'import fcntl\n'), ((3528, 3558), 'os.path.exists', 'os.path.exists', (['WORKER_ID_FILE'], {}), '(WORKER_ID_FILE)\n', (3542, 3558), False, 'import os\n'), ((1084, 1105), 'random.SystemRandom', 'random.SystemRandom', ([], {}), '()\n', (1103, 1105), False, 'import random\n'), ((2225, 2260), 'subprocess.check_output', 'subprocess.check_output', (['"""hostname"""'], {}), "('hostname')\n", (2248, 2260), False, 'import subprocess\n')]
|
import os
import json
import logging
from collections import defaultdict
import numpy as np
import networkx as nx
import torch
from torch.utils.data import DataLoader
from torch.optim import Adam
from tqdm import tqdm
from milieu.util.util import place_on_cpu, place_on_gpu
from milieu.paper.methods.method import DPPMethod
class MilieuMethod(DPPMethod):
""" GCN method class
"""
def __init__(self, network, diseases, params):
super().__init__(network, diseases, params)
self.dir = params["dir"]
self.adjacency = self.network.adj_matrix
self.diseases = diseases
self.params = params
print(self.params)
if self.params.get("load", False):
self.load_method()
else:
self.train_method(diseases)
self.curr_fold = None
def load_method(self):
"""
"""
logging.info("Loading Params...")
with open(os.path.join(self.dir, "params.json")) as f:
params = json.load(f)["process_params"]["method_params"]
params.update(self.params)
self.params = params
logging.info("Loading Models...")
self.folds_to_models = {}
for model_file in os.listdir(os.path.join(self.dir, "models")):
split = parse.parse("model_{}.tar", model_file)[0]
self.folds_to_models[split] = os.path.join(self.dir,
"models",
model_file)
def train_method(self, diseases):
"""
"""
logging.info("Training Models...")
folds_to_diseases = defaultdict(set)
for disease in diseases.values():
if disease.split == "none":
continue
folds_to_diseases[disease.split].add(disease)
self.folds_to_models = {}
if not(os.path.exists(os.path.join(self.dir, "models"))):
os.mkdir(os.path.join(self.dir, "models"))
for test_fold in folds_to_diseases.keys():
logging.info("Training model for test {}".format(test_fold))
val_fold = str((int(test_fold) - 1) % len(folds_to_diseases))
test_dataset = DiseaseDataset([disease
for disease in folds_to_diseases[test_fold]],
self.network)
val_dataset = DiseaseDataset([disease
for disease in folds_to_diseases[val_fold]],
self.network)
train_dataset = DiseaseDataset([disease
for fold, diseases in folds_to_diseases.items()
if fold != test_fold and fold != val_fold
for disease in diseases],
self.network)
# ensure no data leakage
assert(not set.intersection(*[test_dataset.get_ids(),
train_dataset.get_ids()]))
assert(not set.intersection(*[val_dataset.get_ids(),
train_dataset.get_ids()]))
model = self.train_model(train_dataset, val_dataset)
path = os.path.join(self.dir, "models/model_{}.tar".format(test_fold))
torch.save(model.state_dict(), path)
self.folds_to_models[test_fold] = path
def train_model(self, train_dataset, val_dataset):
""" Trains the underlying model
"""
train_dl = DataLoader(train_dataset,
batch_size=self.params["batch_size"],
shuffle=True,
num_workers=self.params["num_workers"],
pin_memory=self.params["cuda"])
dev_dl = DataLoader(val_dataset,
batch_size=self.params["batch_size"],
shuffle=True,
num_workers=self.params["num_workers"],
pin_memory=self.params["cuda"])
if self.params["model_class"] == "LCIEmbModule":
model = LCIEmbModule(self.params["model_args"], self.network)
else:
model = LCIModule(self.params, self.adjacency)
if self.params["cuda"]:
model = model.cuda()
optimizer = Adam(model.parameters(), lr=self.params["learning_rate"],
weight_decay=self.params["weight_decay"])
logging.info("Starting training for {} epoch(s)".format(self.params["num_epochs"]))
model.train()
train_and_evaluate(
model,
train_dl,
dev_dl,
optimizer,
bce_loss,
metrics,
self.params,
self.dir
)
model.eval()
return model.cpu()
def compute_scores(self, train_pos, disease):
""" Compute the scores predicted by GCN.
Args:
"""
val_pos = None
# Adjacency: Get sparse representation of ppi_adj
N, _ = self.adjacency.shape
X = torch.zeros(1, N)
X[0, train_pos] = 1
if self.params["cuda"]:
X = X.cuda()
if disease.split != self.curr_fold:
if self.params["model_class"] == "LCIEmbModule":
model = LCIEmbModule(self.params["model_args"], self.network)
else:
model = LCIModule(self.params, self.adjacency)
model.load_state_dict(torch.load(self.folds_to_models[disease.split]))
model.eval()
model.cuda()
self.curr_model = model
self.curr_fold = disease.split
Y = self.curr_model(X)
scores = Y.cpu().detach().numpy().squeeze()
return scores
|
[
"torch.load",
"os.path.join",
"collections.defaultdict",
"torch.utils.data.DataLoader",
"json.load",
"logging.info",
"torch.zeros"
] |
[((901, 934), 'logging.info', 'logging.info', (['"""Loading Params..."""'], {}), "('Loading Params...')\n", (913, 934), False, 'import logging\n'), ((1148, 1181), 'logging.info', 'logging.info', (['"""Loading Models..."""'], {}), "('Loading Models...')\n", (1160, 1181), False, 'import logging\n'), ((1622, 1656), 'logging.info', 'logging.info', (['"""Training Models..."""'], {}), "('Training Models...')\n", (1634, 1656), False, 'import logging\n'), ((1685, 1701), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1696, 1701), False, 'from collections import defaultdict\n'), ((3696, 3855), 'torch.utils.data.DataLoader', 'DataLoader', (['train_dataset'], {'batch_size': "self.params['batch_size']", 'shuffle': '(True)', 'num_workers': "self.params['num_workers']", 'pin_memory': "self.params['cuda']"}), "(train_dataset, batch_size=self.params['batch_size'], shuffle=\n True, num_workers=self.params['num_workers'], pin_memory=self.params[\n 'cuda'])\n", (3706, 3855), False, 'from torch.utils.data import DataLoader\n'), ((3990, 4141), 'torch.utils.data.DataLoader', 'DataLoader', (['val_dataset'], {'batch_size': "self.params['batch_size']", 'shuffle': '(True)', 'num_workers': "self.params['num_workers']", 'pin_memory': "self.params['cuda']"}), "(val_dataset, batch_size=self.params['batch_size'], shuffle=True,\n num_workers=self.params['num_workers'], pin_memory=self.params['cuda'])\n", (4000, 4141), False, 'from torch.utils.data import DataLoader\n'), ((5321, 5338), 'torch.zeros', 'torch.zeros', (['(1)', 'N'], {}), '(1, N)\n', (5332, 5338), False, 'import torch\n'), ((1253, 1285), 'os.path.join', 'os.path.join', (['self.dir', '"""models"""'], {}), "(self.dir, 'models')\n", (1265, 1285), False, 'import os\n'), ((1393, 1437), 'os.path.join', 'os.path.join', (['self.dir', '"""models"""', 'model_file'], {}), "(self.dir, 'models', model_file)\n", (1405, 1437), False, 'import os\n'), ((953, 990), 'os.path.join', 'os.path.join', (['self.dir', '"""params.json"""'], {}), "(self.dir, 'params.json')\n", (965, 990), False, 'import os\n'), ((1940, 1972), 'os.path.join', 'os.path.join', (['self.dir', '"""models"""'], {}), "(self.dir, 'models')\n", (1952, 1972), False, 'import os\n'), ((1997, 2029), 'os.path.join', 'os.path.join', (['self.dir', '"""models"""'], {}), "(self.dir, 'models')\n", (2009, 2029), False, 'import os\n'), ((5735, 5782), 'torch.load', 'torch.load', (['self.folds_to_models[disease.split]'], {}), '(self.folds_to_models[disease.split])\n', (5745, 5782), False, 'import torch\n'), ((1019, 1031), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1028, 1031), False, 'import json\n')]
|
import os
import uuid
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
def upload_avatar_to(instance, filename):
filename, ext = os.path.splitext(filename)
return os.path.join(
'avatar_images',
'avatar_{uuid}_{filename}{ext}'.format(
uuid=uuid.uuid4(), filename=filename, ext=ext)
)
class UserProfile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='wagtail_userprofile'
)
submitted_notifications = models.BooleanField(
verbose_name=_('submitted notifications'),
default=True,
help_text=_("Receive notification when a page is submitted for moderation")
)
approved_notifications = models.BooleanField(
verbose_name=_('approved notifications'),
default=True,
help_text=_("Receive notification when your page edit is approved")
)
rejected_notifications = models.BooleanField(
verbose_name=_('rejected notifications'),
default=True,
help_text=_("Receive notification when your page edit is rejected")
)
preferred_language = models.CharField(
verbose_name=_('preferred language'),
max_length=10,
help_text=_("Select language for the admin"),
default=''
)
current_time_zone = models.CharField(
verbose_name=_('current time zone'),
max_length=40,
help_text=_("Select your current time zone"),
default=''
)
avatar = models.ImageField(
verbose_name=_('profile picture'),
upload_to=upload_avatar_to,
blank=True,
)
@classmethod
def get_for_user(cls, user):
return cls.objects.get_or_create(user=user)[0]
def get_preferred_language(self):
return self.preferred_language or settings.LANGUAGE_CODE
def get_current_time_zone(self):
return self.current_time_zone or settings.TIME_ZONE
def __str__(self):
return self.user.get_username()
class Meta:
verbose_name = _('user profile')
verbose_name_plural = _('user profiles')
|
[
"django.db.models.OneToOneField",
"django.utils.translation.ugettext_lazy",
"os.path.splitext",
"uuid.uuid4"
] |
[((205, 231), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (221, 231), False, 'import os\n'), ((441, 553), 'django.db.models.OneToOneField', 'models.OneToOneField', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE', 'related_name': '"""wagtail_userprofile"""'}), "(settings.AUTH_USER_MODEL, on_delete=models.CASCADE,\n related_name='wagtail_userprofile')\n", (461, 553), False, 'from django.db import models\n'), ((2121, 2138), 'django.utils.translation.ugettext_lazy', '_', (['"""user profile"""'], {}), "('user profile')\n", (2122, 2138), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2169, 2187), 'django.utils.translation.ugettext_lazy', '_', (['"""user profiles"""'], {}), "('user profiles')\n", (2170, 2187), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((637, 665), 'django.utils.translation.ugettext_lazy', '_', (['"""submitted notifications"""'], {}), "('submitted notifications')\n", (638, 665), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((707, 772), 'django.utils.translation.ugettext_lazy', '_', (['"""Receive notification when a page is submitted for moderation"""'], {}), "('Receive notification when a page is submitted for moderation')\n", (708, 772), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((851, 878), 'django.utils.translation.ugettext_lazy', '_', (['"""approved notifications"""'], {}), "('approved notifications')\n", (852, 878), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((920, 977), 'django.utils.translation.ugettext_lazy', '_', (['"""Receive notification when your page edit is approved"""'], {}), "('Receive notification when your page edit is approved')\n", (921, 977), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1056, 1083), 'django.utils.translation.ugettext_lazy', '_', (['"""rejected notifications"""'], {}), "('rejected notifications')\n", (1057, 1083), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1125, 1182), 'django.utils.translation.ugettext_lazy', '_', (['"""Receive notification when your page edit is rejected"""'], {}), "('Receive notification when your page edit is rejected')\n", (1126, 1182), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1254, 1277), 'django.utils.translation.ugettext_lazy', '_', (['"""preferred language"""'], {}), "('preferred language')\n", (1255, 1277), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1320, 1354), 'django.utils.translation.ugettext_lazy', '_', (['"""Select language for the admin"""'], {}), "('Select language for the admin')\n", (1321, 1354), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1445, 1467), 'django.utils.translation.ugettext_lazy', '_', (['"""current time zone"""'], {}), "('current time zone')\n", (1446, 1467), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1510, 1544), 'django.utils.translation.ugettext_lazy', '_', (['"""Select your current time zone"""'], {}), "('Select your current time zone')\n", (1511, 1544), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1625, 1645), 'django.utils.translation.ugettext_lazy', '_', (['"""profile picture"""'], {}), "('profile picture')\n", (1626, 1645), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((347, 359), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (357, 359), False, 'import uuid\n')]
|
__author__ = '<NAME>, <EMAIL>'
import uinput
def Simulator():
def __init__(self):
pass
def test1(self):
device = uinput.Device([uinput.KEY_E, uinput.KEY_H, uinput.KEY_L, uinput.KEY_O])
device.emit_click(uinput.KEY_H)
|
[
"uinput.Device"
] |
[((122, 193), 'uinput.Device', 'uinput.Device', (['[uinput.KEY_E, uinput.KEY_H, uinput.KEY_L, uinput.KEY_O]'], {}), '([uinput.KEY_E, uinput.KEY_H, uinput.KEY_L, uinput.KEY_O])\n', (135, 193), False, 'import uinput\n')]
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for catapult.
See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
import re
import sys
_EXCLUDED_PATHS = (
r'(.*[\\/])?\.git[\\/].*',
r'.+\.png$',
r'.+\.svg$',
r'.+\.skp$',
r'.+\.gypi$',
r'.+\.gyp$',
r'.+\.gn$',
r'.*\.gitignore$',
r'.*codereview.settings$',
r'.*AUTHOR$',
r'^CONTRIBUTORS\.md$',
r'.*LICENSE$',
r'.*OWNERS$',
r'.*README\.md$',
r'^dashboard[\\/]dashboard[\\/]api[\\/]examples[\\/].*.js',
r'^dashboard[\\/]dashboard[\\/]templates[\\/].*',
r'^experimental[\\/]heatmap[\\/].*',
r'^experimental[\\/]trace_on_tap[\\/]third_party[\\/].*',
r'^perf_insights[\\/]test_data[\\/].*',
r'^perf_insights[\\/]third_party[\\/].*',
r'^telemetry[\\/]third_party[\\/].*',
r'^third_party[\\/].*',
r'^tracing[\\/]\.allow-devtools-save$',
r'^tracing[\\/]bower\.json$',
r'^tracing[\\/]\.bowerrc$',
r'^tracing[\\/]tracing_examples[\\/]string_convert\.js$',
r'^tracing[\\/]test_data[\\/].*',
r'^tracing[\\/]third_party[\\/].*',
r'^py_vulcanize[\\/]third_party[\\/].*',
r'^common/py_vulcanize[\\/].*', # TODO(hjd): Remove after fixing long lines.
)
_GITHUB_BUG_ID_RE = re.compile(r'#[1-9]\d*')
_MONORAIL_BUG_ID_RE = re.compile(r'[1-9]\d*')
_MONORAIL_PROJECT_NAMES = frozenset({'chromium', 'v8', 'angleproject'})
def CheckChangeLogBug(input_api, output_api):
if not input_api.change.issue:
# If there is no change issue, there won't be a bug yet. Skip the check.
return []
# Show a presubmit message if there is no Bug line or an empty Bug line.
if not input_api.change.BugsFromDescription():
return [output_api.PresubmitNotifyResult(
'If this change has associated bugs on GitHub or Monorail, add a '
'"Bug: <bug>(, <bug>)*" line to the patch description where <bug> can '
'be one of the following: catapult:#NNNN, ' +
', '.join('%s:NNNNNN' % n for n in _MONORAIL_PROJECT_NAMES) + '.')]
# Check that each bug in the BUG= line has the correct format.
error_messages = []
catapult_bug_provided = False
for index, bug in enumerate(input_api.change.BugsFromDescription()):
# Check if the bug can be split into a repository name and a bug ID (e.g.
# 'catapult:#1234' -> 'catapult' and '#1234').
bug_parts = bug.split(':')
if len(bug_parts) != 2:
error_messages.append('Invalid bug "%s". Bugs should be provided in the '
'"<project-name>:<bug-id>" format.' % bug)
continue
project_name, bug_id = bug_parts
if project_name == 'catapult':
if not _GITHUB_BUG_ID_RE.match(bug_id):
error_messages.append('Invalid bug "%s". Bugs in the Catapult '
'repository should be provided in the '
'"catapult:#NNNN" format.' % bug)
catapult_bug_provided = True
elif project_name in _MONORAIL_PROJECT_NAMES:
if not _MONORAIL_BUG_ID_RE.match(bug_id):
error_messages.append('Invalid bug "%s". Bugs in the Monorail %s '
'project should be provided in the '
'"%s:NNNNNN" format.' % (bug, project_name,
project_name))
else:
error_messages.append('Invalid bug "%s". Unknown repository "%s".' % (
bug, project_name))
return map(output_api.PresubmitError, error_messages)
def CheckChange(input_api, output_api):
results = []
try:
sys.path += [input_api.PresubmitLocalPath()]
from catapult_build import bin_checks
from catapult_build import html_checks
from catapult_build import js_checks
from catapult_build import repo_checks
results += input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
results += input_api.RunTests(
input_api.canned_checks.CheckVPythonSpec(input_api, output_api))
results += CheckChangeLogBug(input_api, output_api)
results += js_checks.RunChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
results += html_checks.RunChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
results += repo_checks.RunChecks(input_api, output_api)
results += bin_checks.RunChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
finally:
sys.path.remove(input_api.PresubmitLocalPath())
return results
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
|
[
"re.compile",
"catapult_build.bin_checks.RunChecks",
"catapult_build.repo_checks.RunChecks",
"catapult_build.js_checks.RunChecks",
"catapult_build.html_checks.RunChecks"
] |
[((1468, 1492), 're.compile', 're.compile', (['"""#[1-9]\\\\d*"""'], {}), "('#[1-9]\\\\d*')\n", (1478, 1492), False, 'import re\n'), ((1515, 1538), 're.compile', 're.compile', (['"""[1-9]\\\\d*"""'], {}), "('[1-9]\\\\d*')\n", (1525, 1538), False, 'import re\n'), ((4285, 4359), 'catapult_build.js_checks.RunChecks', 'js_checks.RunChecks', (['input_api', 'output_api'], {'excluded_paths': '_EXCLUDED_PATHS'}), '(input_api, output_api, excluded_paths=_EXCLUDED_PATHS)\n', (4304, 4359), False, 'from catapult_build import js_checks\n'), ((4384, 4460), 'catapult_build.html_checks.RunChecks', 'html_checks.RunChecks', (['input_api', 'output_api'], {'excluded_paths': '_EXCLUDED_PATHS'}), '(input_api, output_api, excluded_paths=_EXCLUDED_PATHS)\n', (4405, 4460), False, 'from catapult_build import html_checks\n'), ((4485, 4529), 'catapult_build.repo_checks.RunChecks', 'repo_checks.RunChecks', (['input_api', 'output_api'], {}), '(input_api, output_api)\n', (4506, 4529), False, 'from catapult_build import repo_checks\n'), ((4545, 4620), 'catapult_build.bin_checks.RunChecks', 'bin_checks.RunChecks', (['input_api', 'output_api'], {'excluded_paths': '_EXCLUDED_PATHS'}), '(input_api, output_api, excluded_paths=_EXCLUDED_PATHS)\n', (4565, 4620), False, 'from catapult_build import bin_checks\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.