metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "a68366/aoc_2018",
"score": 3
} |
#### File: a68366/aoc_2018/day01.py
```python
from collections import defaultdict
def part_1():
with open('input') as f:
return sum(map(int, f.read().split()))
def part_2():
with open('input') as f:
data = f.read().splitlines()
d = defaultdict(int)
d[0] = 1
result = 0
while True:
for line in data:
val = int(line)
result += val
d[result] += 1
if d[result] == 2:
return result
def main():
print('1:', part_1())
print('2:', part_2())
if __name__ == '__main__':
main()
```
#### File: a68366/aoc_2018/day02.py
```python
from collections import Counter
def part_1():
with open('input') as f:
threes = 0
twos = 0
for line in f:
cnt = Counter(line)
flag2 = True
flag3 = True
for val in cnt.values():
if val == 2 and flag2:
twos += 1
flag2 = False
if val == 3 and flag3:
threes += 1
flag3 = False
return threes * twos
def part_2():
with open('input') as f:
lines = f.read().split()
for n, i in enumerate(lines):
for j in lines[n:]:
cnt = 0
same = ''
for sym_1, sym_2 in zip(i, j):
if sym_1 != sym_2:
cnt += 1
else:
same += sym_1
if cnt > 1:
break
if cnt == 1:
return same
return None
def main():
print('1:', part_1())
print('2:', part_2())
if __name__ == '__main__':
main()
```
#### File: a68366/aoc_2018/day16.py
```python
import re
def get_data():
with open('input') as f:
data = []
test = []
data_text, test_text = f.read().split('\n\n\n\n')
for line in data_text.splitlines():
if not line.strip():
continue
values = list(map(int, re.findall(r'(\d+)', line)))
if line.startswith('Before'):
data.append([values])
else:
data[-1].append(values)
for line in test_text.splitlines():
values = list(map(int, re.findall(r'(\d+)', line)))
test.append(values)
return data, test
def init():
registers = [0] * 4
ops = [
lambda regs, a, b, c: regs[a] + regs[b], # addr
lambda regs, a, b, c: regs[a] + b, # addi
lambda regs, a, b, c: regs[a] * regs[b], # mulr
lambda regs, a, b, c: regs[a] * b, # muli
lambda regs, a, b, c: regs[a] & regs[b], # banr
lambda regs, a, b, c: regs[a] & b, # bani
lambda regs, a, b, c: regs[a] | regs[b], # borr
lambda regs, a, b, c: regs[a] | b, # bori
lambda regs, a, b, c: regs[a], # setr
lambda regs, a, b, c: a, # seti
lambda regs, a, b, c: int(a > regs[b]), # gtir
lambda regs, a, b, c: int(regs[a] > b), # gtri
lambda regs, a, b, c: int(regs[a] > regs[b]), # gtrr
lambda regs, a, b, c: int(a == regs[b]), # eqir
lambda regs, a, b, c: int(regs[a] == b), # eqri
lambda regs, a, b, c: int(regs[a] == regs[b]), # eqrr
]
return registers, ops
def part_1(data):
_, ops = init()
result = 0
for inputs in data:
before, line, after = inputs
_, a, b, c = line
cnt = 0
for op in ops:
registers = before[:]
registers[c] = op(registers, a, b, c)
if registers == after:
cnt += 1
if cnt >= 3:
result += 1
return result
def part_2(data, test):
_, ops = init()
repeats = {}
for inputs in data:
before, line, after = inputs
opcode, a, b, c = line
for i, op in enumerate(ops):
registers = before[:]
registers[c] = op(registers, a, b, c)
if registers == after:
repeats.setdefault(opcode, set()).add(i)
found = set()
table = {}
for i in range(16):
for k, v in repeats.items():
if len(v - found) == 1:
table[k] = next(iter(v-found))
found.update(v)
registers = [0] * 4
for inputs in test:
opcode, a, b, c = inputs
op = ops[table[opcode]]
registers[c] = op(registers, a, b, c)
return registers[0]
def main():
data, test = get_data()
print('1:', part_1(data))
print('2:', part_2(data, test))
if __name__ == '__main__':
main()
```
#### File: a68366/aoc_2018/day25.py
```python
import re
def get_data():
with open('input') as f:
data = [tuple(map(int, re.findall(r'(-?\d+)', s))) for s in f]
return data
def distance(x, y):
return sum(abs(a-b) for a, b in zip(x, y))
def part_1(data):
cons = [[data.pop(0)]]
while data:
for c in cons:
for p1 in c:
i = 0
while i < len(data):
p2 = data[i]
if distance(p1, p2) <= 3:
c.append(data.pop(i))
else:
i += 1
if data:
cons.append([data.pop(0)])
return len(cons)
def main():
data = get_data()
print('1:', part_1(data))
if __name__ == '__main__':
main()
``` |
{
"source": "a68b57/rl_for_set_down",
"score": 3
} |
#### File: RL/hrl/objectives.py
```python
import tensorflow as tf
import keras.backend as K
import numpy as np
def huber_loss(y_true, y_pred, max_grad=1.):
"""Calculate the huber loss.
See https://en.wikipedia.org/wiki/Huber_loss
Parameters
----------
y_true: np.array, tf.Tensor
Target value.
y_pred: np.array, tf.Tensor
Predicted value.
max_grad: float, optional
Positive floating point value. Represents the maximum possible
gradient magnitude.
Returns
-------
tf.Tensor
The huber loss.
"""
assert max_grad > 0.
x = y_true - y_pred
if np.isinf(max_grad):
return .5 * K.square(x)
condition = K.abs(x) < max_grad
squared_loss = .5 * K.square(x)
linear_loss = max_grad * (K.abs(x) - .5 * max_grad)
return tf.where(condition, squared_loss, linear_loss) # condition, true, false
def mean_huber_loss(y_true, y_pred, max_grad=1.):
"""Return mean huber loss.
Same as huber_loss, but takes the mean over all values in the
output tensor.
Parameters
----------
y_true: np.array, tf.Tensor
Target value.
y_pred: np.array, tf.Tensor
Predicted value.
max_grad: float, optional
Positive floating point value. Represents the maximum possible
gradient magnitude.
Returns
-------
tf.Tensor
The mean huber loss.
"""
return tf.reduce_mean(huber_loss(y_true, y_pred, max_grad))
```
#### File: RL/hrl/policy.py
```python
import numpy as np
#import attr
DEBUG = 0
class Policy:
"""Base class representing an MDP policy.
Policies are used by the agent to choose actions.
Policies are designed to be stacked to get interesting behaviors
of choices. For instances in a discrete action space the lowest
level policy may take in Q-Values and select the action index
corresponding to the largest value. If this policy is wrapped in
an epsilon greedy policy then with some probability epsilon, a
random action will be chosen.
"""
def select(self, *args):
"""Used by agents to select actions.
Returns
-------
Any:
An object representing the chosen action. Type depends on
the hierarchy of policy instances.
"""
raise NotImplementedError('This method should be overriden.')
class UniformRandomPolicy(Policy):
"""Chooses a discrete action with uniform random probability.
This is provided as a reference on how to use the policy class.
Parameters
----------
num_actions: int
Number of actions to choose from. Must be > 0.
Raises
------
ValueError:
If num_actions <= 0
"""
def __init__(self, num_actions):
assert num_actions >= 1
self.num_actions = num_actions
def select(self):
"""Return a random action index.
This policy cannot contain others (as they would just be ignored).
Returns
-------
int:
Action index in range [0, num_actions)
"""
action = np.random.randint(0, self.num_actions)
if DEBUG:
print('In uniform policy: action {0}'.format(action))
return action, None
def get_config(self): # noqa: D102
return {'num_actions': self.num_actions}
class GreedyPolicy(Policy):
"""Always returns best action according to Q-values.
This is a pure exploitation policy.
"""
def select(self, q_values): # noqa: D102
action = np.argmax(q_values)
if DEBUG:
print('Q-values')
print(q_values)
print('In greedy policy: action {0}'.format(action))
return action, q_values[0][action]
class GreedyEpsilonPolicy(Policy):
"""Selects greedy action or with some probability a random action.
Standard greedy-epsilon implementation. With probability epsilon
choose a random action. Otherwise choose the greedy action.
Parameters
----------
epsilon: float
Initial probability of choosing a random action. Can be changed
over time.
"""
def __init__(self, epsilon, num_actions):
self.epsilon = epsilon
self.num_actions = num_actions
def select(self, q_values):
"""Run Greedy-Epsilon for the given Q-values.
Parameters
----------
q_values: array-like
Array-like structure of floats representing the Q-values for
each action.
Returns
-------
int:
The action index chosen.
"""
rand_num = np.random.rand()
if rand_num < self.epsilon:
action = np.random.randint(self.num_actions)
if DEBUG:
print('GreedyEpsilonPolicy: select randomly')
# print 'random'
else:
action = np.argmax(q_values)
#print 'greedy'
#print q_values
if DEBUG:
print('GreedyEpsilon: epsilon {0} action {1}'.format(self.epsilon, action))
return action, q_values[0][action]
class LinearDecayGreedyEpsilonPolicy(Policy):
"""Policy with a parameter that decays linearly.
Like GreedyEpsilonPolicy but the epsilon decays from a start value
to an end value over k steps.
Parameters
----------
start_value: int, float
The initial value of the parameter
end_value: int, float
The value of the policy at the end of the decay.
num_steps: int
The number of steps over which to decay the value.
"""
def __init__(self, num_actions, start_value, end_value,
num_steps): # noqa: D102
self.policy = GreedyEpsilonPolicy(start_value, num_actions)
self.end_value = end_value
self.start_value = start_value
self.num_steps = num_steps
def select(self, q_values, num_update):
"""Decay parameter and select action.
Parameters
----------
q_values: np.array
The Q-values for each action.
is_training: bool, optional
If true then parameter will be decayed. Defaults to true.
Returns
-------
Any:
Selected action.
"""
# Linear annealed epsilon=x: f(x) = ax + b.
a = -float(self.start_value - self.end_value) / float(self.num_steps)
b = float(self.start_value)
self.policy.epsilon = max(self.end_value, a * float(num_update) + b)
action, q = self.policy.select(q_values)
if DEBUG:
print('LinearDecay: epsilon {0} action {1}'.format(self.policy.epsilon, action))
return action, q
def reset(self):
"""Start the decay over at the start value."""
self.policy.epsilon = self.start_value
```
#### File: rl_for_set_down/RL/rlagent.py
```python
from collections import deque
from keras import Sequential
from keras.layers import Dense, Dropout, BatchNormalization, Activation
from keras.optimizers import Adam, SGD
import random
import numpy as np
import os
from RNN import toolkit
class RLAgent:
def __init__(self, state_size, action_size, testing):
self.action_temp = None
self.state_temp = None
self.state_size = state_size
self.action_size = action_size
self.memory = deque(maxlen=100000)
self.epsilon_min = 0.1
self.learning_rate = 0.005
self.testing = testing
self.gamma_max = 0.99
if self.testing:
self.model = None
self.epsilon = 0.0
self.gamma = 0.98
else:
self.model = self._build_model()
self.target_model = self._build_model()
self.epsilon = 1.0
self.gamma = 0.98
def _build_model(self):
model = Sequential()
model.add(Dense(100, input_shape=(self.state_size,)))
model.add(Activation('sigmoid'))
# model.add(Dropout(0.5))
# model.add(Dense(100))
# model.add(Activation('relu'))
# model.add(Dropout(0.5))
model.add(Dense(self.action_size, activation='linear'))
# model.compile(loss='mse', optimizer=Adam(lr=self.learning_rate))
model.compile(loss='mse', optimizer=SGD(lr=self.learning_rate))
return model
def remember(self, state, action, reward, next_state, done):
self.memory.append((state, action, reward, next_state, done))
def act(self, state):
if np.random.rand() <= self.epsilon:
return random.randrange(self.action_size), 0
act_values = self.model.predict(np.reshape(state, [1, self.state_size]))
return np.argmax(act_values[0]), np.max(act_values)
def replay(self, batch_size):
minibatch = random.sample(self.memory, batch_size)
for state, action, reward, next_state, done in minibatch:
target = reward
if not done:
target = reward + self.gamma * np.amax(self.target_model.predict(next_state)[0])
target_f = self.target_model.predict(state)
target_f[0][action] = target
self.model.fit(state, target_f, epochs=1, verbose=0)
# double q-learning:
# for state, action, reward, next_state, done in minibatch:
# target = self.model.predict(state)
# if done:
# target[0][action] = reward
# if not done:
# a = np.argmax(self.model.predict(next_state)[0])
# target[0][action] = reward + self.gamma * self.target_model.predict(next_state)[0][a]
#
# self.model.fit(state, target, epochs=1, verbose=0)
def target_train(self):
weights = self.model.get_weights()
target_weights = self.target_model.get_weights()
for i in range(len(target_weights)):
target_weights[i] = weights[i]
self.target_model.set_weights(target_weights)
def saveModel(self, dir, name):
if not os.path.exists(dir):
os.makedirs(dir)
model_json = self.model.to_json()
with open(dir + name + ".json", "w") as json:
json.write(model_json)
self.model.save_weights(dir + name + ".h5")
def loadModel(self, dir, name):
model = toolkit.loadModel(dir, name)
self.model = model
```
#### File: RNN/learn_flask/main.py
```python
from flask import Flask, render_template, request, redirect, url_for
import matplotlib.pyplot as plt, mpld3
from RNN import toolkit
import numpy as np
import tensorflow as tf
app = Flask(__name__)
obs_len = 1000
pred_len = 50
high_pass_f = 0.7
low_pass_f = 0.9
dt = 0.2
model_dir = "/home/michael/Desktop/workspace/rl_for_set_down/RNN/model/o%d_p%d_d%.1f_f%.1f-%.1f/" % (obs_len, pred_len, dt, high_pass_f, low_pass_f)
encoder_model = toolkit.loadModel(model_dir, "encoder_model")
decoder_model = toolkit.loadModel(model_dir, "decoder_model")
graph = tf.get_default_graph()
@app.route('/')
def upload():
return render_template('upload.html')
@app.route('/uploader', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
# global graph
# with graph.as_default():
f = request.files['file']
data = toolkit.processRawData(f, obs_len=500000)
num_frame = data.shape[1] // obs_len
for i in range(num_frame):
# plt.clf()
# plt.plot(data[0][i:i+obs_len, :])
input_seq = data[:, i:i+obs_len, :]
pred = toolkit.predict_no_loading(encoder_model, decoder_model, input_seq, pred_len)
# plt.plot(np.concatenate(data[0][i:i+obs_len], pred))
# plt.pause(0.001)
print(pred)
# while True:
# plt.pause(0.005)
# return redirect(url_for('plot', data=data))
@app.route('/plot/<data>')
def plot(data):
plt.plot(np.array(data))
mpld3.show()
if __name__ == '__main__':
app.run(port=5000,debug=True)
```
#### File: rl_for_set_down/RNN/toolkit.py
```python
from keras.models import model_from_json
from matplotlib import pyplot as plt
from scipy.io import loadmat, savemat
import numpy as np
from sklearn import model_selection
import glob
import time, os
import matplotlib.gridspec as gridspec
import tensorflow as tf
from numpy import fft
from statsmodels.tsa.ar_model import AR
from statsmodels.tsa.stattools import adfuller, acf, pacf
graph = tf.get_default_graph()
def loadModel(model_dir, model_name):
json_file = open(model_dir + model_name + '.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
loaded_model.load_weights(model_dir + model_name + '.h5')
return loaded_model
def saveModel(model_dir, model_name, model):
model_json = model.to_json()
with open(model_dir + model_name + ".json", "w") as json:
json.write(model_json)
model.save_weights(model_dir + model_name + ".h5")
def addDim(data):
return np.expand_dims(data, axis=2)
def prepareSplitDataSet(data_dir, test_size=0.2, filename_pred_len=50, filename_obs_len=1000, pred_len=50,
obs_len=1000):
x = []
y = []
for file in sorted(glob.glob(data_dir + "X_o{}_p{}*.mat".format(filename_obs_len, filename_pred_len))):
key = file.split('/')[-1][1:]
y_file = 'Y' + key
x_data = loadmat(file)
x.extend([x_data['X']])
print(file)
for y_file in glob.glob(data_dir + y_file):
print(y_file)
y_data = loadmat(y_file)
y.extend([y_data['Y']])
x = np.array(x)
x = np.transpose(x, axes=(0, 2, 1))
x = np.concatenate(x, axis=0)
y = np.concatenate(np.transpose(np.array(y), axes=(0, 2, 1)), axis=0)
x_train, x_test, y_train, y_test = model_selection.train_test_split(x, y, test_size=test_size)
x_train = x_train[:, -obs_len:]
x_test = x_test[:, -obs_len:]
y_train = y_train[:, 0:pred_len]
y_test = y_test[:, 0:pred_len]
return addDim(x_train), addDim(x_test), addDim(y_train), y_test
def prepareDecoderData(x_train, y_train):
encoder_input_data = x_train
decoder_target_data = y_train
last_col_x = encoder_input_data[:, -1, :]
decoder_input_data = y_train[:, :-1, :]
decoder_input_data = np.insert(decoder_input_data, 0, last_col_x, 1)
return encoder_input_data, decoder_input_data, decoder_target_data
def processRawData(mat_file, len=1000):
x = []
x_data = loadmat(mat_file)
x.extend([x_data['X']])
x = np.array(x)
x = np.transpose(x, axes=(0, 2, 1))
x = np.concatenate(x, axis=0)
return addDim(x[:, -len:])
def getStat(decoded, target):
stat = {}
err = np.abs(np.subtract(decoded, target))
stat['mae'] = np.mean(err)
stat['std'] = np.std(err)
stat['mae_row'] = np.mean(err, axis=1)
stat['std_row'] = np.std(err, axis=1)
print(stat['mae'])
print(stat['std'])
return stat
def predict_no_loading(encoder_model, decoder_model, input_seq, pred_len):
states_value = encoder_model.predict(input_seq)
target_seq = np.zeros((len(input_seq), 1, 1))
stop_condition = False
decoded_seq = []
while not stop_condition:
output_tokens, h, c = decoder_model.predict([target_seq] + states_value)
decoded_seq.append(output_tokens)
if len(decoded_seq) == pred_len:
stop_condition = True
target_seq = output_tokens
states_value = [h, c]
decoded_seq = np.array(decoded_seq).squeeze(axis=3)
pred = np.transpose(decoded_seq, (1, 0, 2)).squeeze(axis=2)
return pred
def predSeq(model_dir, input_seq, pred_len):
encoder_model = loadModel(model_dir, "encoder")
decoder_model = loadModel(model_dir, "decoder")
pred = predict_no_loading(encoder_model, decoder_model, input_seq, pred_len)
return pred
def makeSubplots(num_plot=6, decoded=None, target=None):
fig = plt.figure(figsize=(10, 8))
outer = gridspec.GridSpec(2, 3, wspace=0.3, hspace=0.3)
sel = np.random.choice(len(decoded), num_plot, replace=False)
sel_dec = decoded[sel]
if target is not None:
sel_tar = target[sel]
for i in range(num_plot):
if target is not None:
inner = gridspec.GridSpecFromSubplotSpec(3, 1, subplot_spec=outer[i], wspace=0.1, hspace=0.8)
for j in range(3):
ax = plt.Subplot(fig, inner[j])
if j == 0:
ax.plot(sel_dec[i])
ax.set_title('predicted')
if j == 1:
ax.plot(sel_tar[i], 'r')
ax.set_title('target')
if j == 2:
ax.plot(sel_dec[i])
ax.plot(sel_tar[i], 'r')
error = np.mean(np.abs(sel_dec[i] - sel_tar[i]))
ax.set_title("MAE: %.3f" % error)
fig.add_subplot(ax)
else:
inner = gridspec.GridSpecFromSubplotSpec(1, 1, subplot_spec=outer[i], wspace=0.1, hspace=0.6)
ax = plt.Subplot(fig, inner[0])
ax.plot(sel_dec[i])
ax.set_title('predicted')
fig.add_subplot(ax)
return plt
def evaPred(model_dir=None, decoded=None, target=None, save_pred=False, save_plot=False, plot=True):
pred_dir = model_dir + 'prediction/'
fig_dir = model_dir + 'figures/'
time_str = time.strftime("_%H%M%S")
if not os.path.exists(pred_dir):
os.makedirs(pred_dir)
fig = makeSubplots(decoded=decoded, target=target)
if save_plot:
if not os.path.exists(fig_dir):
os.makedirs(fig_dir)
fig.savefig(fig_dir + 'random_testing_samples' + time_str + '.png')
if save_pred and target is not None:
np.savetxt(pred_dir + 'predicted_' + time_str + '.csv', decoded, delimiter=",")
np.savetxt(pred_dir + 'target_' + time_str + '.csv', target, delimiter=",")
elif save_pred and target is None:
np.savetxt(pred_dir + 'deploy_predicted_' + time_str + '.csv', decoded, delimiter=",")
if plot:
fig.show()
def fourierExtrapolation(x, n_predict):
x = np.array(x).reshape(x.size, )
n = x.size
n_harm = 40
t = np.arange(0, n)
p = np.polyfit(t, x, 1)
x_notrend = x - p[0] * t
x_freqdom = fft.fft(x_notrend)
f = fft.fftfreq(n)
indexes = list(range(n))
# sort indexes by frequency, lower -> higher
indexes.sort(key=lambda i:np.absolute(f[i]))
# t = np.arange(0, n + n_predict)
t = np.arange(0, n_predict)
restored_sig = np.zeros(t.size)
for i in indexes[:1 + n_harm * 2]:
ampli = np.absolute(x_freqdom[i]) / n # amplitude
phase = np.angle(x_freqdom[i]) # phase
restored_sig += ampli * np.cos(2 * np.pi * f[i] * t + phase)
output = restored_sig + p[0] * t
# output = restored_sig
return output
# return output.reshape(output.size, 1)[-n_predict:]
def goodnessOfFit(pred, gt):
de = np.sqrt(np.sum((pred - gt) ** 2))
no = np.sqrt(np.sum(gt ** 2))
fit = 100 * (1 - de / no)
return fit
def makeMatSubset(mat_file, num_sample):
dir = os.path.dirname(os.path.abspath(mat_file))
data = loadmat(mat_file)
data = data['X']
dict = {}
idx = np.random.choice(len(data), num_sample, replace=False)
subset = data[:, idx]
dict['X'] = subset
file_name = os.path.basename(mat_file)
file_name = os.path.splitext(file_name)[0] + '_small'
savemat(dir + '/' + file_name, dict, oned_as='row')
def running_mean(x, N):
cumsum = np.cumsum(np.insert(x, 0, 0))
return (cumsum[N:] - cumsum[:-N]) / float(N)
def computeAR(data, pred_len, lag=15, mode="same"):
data = data[0]
# averaged = np.concatenate((data[0:2,0],running_mean(data,5)))
# acf_1 = acf(averaged, nlags=300)
# pacf_1 = pacf(averaged, nlags=300)
# plt.figure(2)
# plt.plot(acf_1)
# plt.show()
# plt.figure(3)
# plt.plot(pacf_1)
# plt.show()
if mode == "same":
model = AR(endog=data)
model_fit = model.fit(maxlag=lag, disp=False, ic='aic')
y = model_fit.predict(start=len(data), end=len(data) + pred_len - 1, dynamic=False)
else:
y = []
for i in range(pred_len):
model = AR(endog=data)
model_fit = model.fit(disp=False)
pred = model_fit.predict(start=len(data), end=len(data), dynamic=False)
data = data[1:]
data = np.vstack((data, pred))
y.append(pred)
return np.array(y)
def pointWiseMLP(model, data, pred_len):
pred = []
for i in range(pred_len):
feed = data
y = model.predict(feed)
data = data[:, 1:]
data = np.hstack((data, y))
pred.append(y[0])
return np.array(pred)
``` |
{
"source": "a6ln8ka/music-recommender-system",
"score": 3
} |
#### File: src/project/__init__.py
```python
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
"""initialize the database"""
db = SQLAlchemy()
def create_app():
"""This method creates app, sets configuration,
registers bluebrints
"""
app = Flask(__name__)
app.config['SECRET_KEY'] = '9OLWxND4o83j4K4iuopO'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
login_manager.init_app(app)
from .models import User
@login_manager.user_loader
def load_user(user_id):
"""user_loader callback. Used to reload the user object
from the user id stored in session
:param user_id:
"""
return User.query.get(int(user_id))
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
with app.app_context():
db.create_all()
return app
```
#### File: src/project/recommender_system.py
```python
import numpy as np
import pandas as pd
import re
import ast
import os
def col(df, colname = "artists"):
"""
:param df:
:param colname: (Default value = "artists")
"""
return np.array([int(x == colname) for x in df.columns]).argmax()
def query_artists(df, lists = [], full = False, strict = True):
"""
:param df:
:param lists: (Default value = [])
:param full: (Default value = False)
:param strict: (Default value = True)
"""
return pd.concat([query_artist(df, string = name, strict = strict) for name in lists], axis = 0)
def query_artist(df, string = "--", full = False, strict = True):
"""
:param df:
:param string: (Default value = "--")
:param full: (Default value = False)
:param strict: (Default value = True)
"""
lists = []
for i, artist in enumerate(df["artists"]):
if(len(re.findall(string, "".join(artist))) != 0):
if(strict):
if(string == artist):
if(full):
lists.append(df.iloc[i])
else:
lists.append(df.iloc[i, [col(df, "artists"), col(df, "genres")]])
else:
if(full):
lists.append(df.iloc[i])
else:
lists.append(df.iloc[i, [col(df, "artists"), col(df, "genres")]])
if(full):
return pd.DataFrame(lists, columns = df.columns)
else:
return pd.DataFrame(lists, columns = ["artists", "genres"])
def perfect_eval(string):
"""This method evaluates string
:param string:
"""
try:
return ast.literal_eval(string)
except:
return []
def create_random_dict(df_by_artists, length, score):
"""This method is used to test the system. It creates random
dictionary of artists and rates
:param df_by_artists:
:param length:
:param score:
"""
list_of_names = list(set(df_by_artists["artists"]))
random_indices = [round(x) for x in np.random.random(length)*len(list_of_names)]
random_names = pd.Series(list_of_names).iloc[random_indices].values.tolist()
random_rates = [int(round(x)) for x in (score[0] + np.random.random(length)*(score[1]-score[0]))]
name_rate_dict = {}
for index in range(length):
name_rate_dict.update({random_names[index]: random_rates[index]})
return name_rate_dict
def rate_artist(df_by_artists, name_rate_dict):
"""This method selects best-rated genres from the name_rate_dict
:param df_by_artists:
:param name_rate_dict:
"""
#convert the name_rate_series to a pandas dataframe
name_rate_series = pd.DataFrame({"rate": name_rate_dict.values, "artists": name_rate_dict.index})
#create a new dataframe, only selecting the artists and genres columns of artists selected by user
artists_genres = df_by_artists[df_by_artists["artists"].isin(list(name_rate_dict.keys()))][["artists", "genres"]]
#merge both of these
df_name_rate = pd.merge(name_rate_series, artists_genres, on = "artists", how = "inner")
df_x = df_name_rate.copy()
#create the artist-genre-matrix for artists selected by users
for index, genres in enumerate(df_name_rate["genres"]):
for genre in genres:
#artist includes the genre: 1
df_x.at[index, genre] = 1
#artist does not include the genre: 0
df_x = df_x.fillna(0)
#ratings of artists
df_user = df_x["rate"]
#drop all columns except the genre columns
df_genre_matrix = df_x.drop(["artists", "genres", "rate"], axis = 1).reset_index(drop = True)
#find out the genres' ratings
df_profile = df_genre_matrix.transpose().dot(df_user)
return df_profile
def select_artist(df_by_artists, df_rate):
"""This method selects artists which perform the same genre as
artists were given
:param df_by_artists:
:param df_rate:
"""
# save the indices of artists, which include any of the genres in the genre profile
list_of_id = []
for index, row in df_by_artists.iterrows():
for genre in row["genres"]:
if(genre in df_rate.index):
list_of_id.append(index)
#find the unique indices
list_of_id = list(set(list_of_id))
#select the artists and genres columns of the artists including any of the genres in the genre profile
df_select_columns = df_by_artists.iloc[list_of_id, [col(df_by_artists, "artists"), col(df_by_artists, "genres")]]
df_select = df_select_columns.copy()
#create the artist-genre-matrix of new artists
for index, row in df_select_columns.iterrows():
for genre in row['genres']:
#artist includes genre: 1
df_select.at[index, genre] = 1
#artist does not include genre: 0
df_select = df_select.fillna(0)[df_rate.index]
return df_select
def recommend_artist_by_genre(df_by_artists, name_rate_dict, how_many):
"""This method is used to create recommendations based on dictionary
of artists names and rates
:param df_by_artists:
:param name_rate_dict:
:param how_many:
"""
df_by_artists = df_by_artists.copy()
#make sure that genres are list, not string
df_by_artists["genres"] = [perfect_eval(genre) for genre in df_by_artists["genres"]]
#create a name_rate pandas series
name_rate_series = pd.Series(name_rate_dict)
#find out the genre profile of user
df_rate = rate_artist(df_by_artists, name_rate_series)
#create new artists' matrix
df_select = select_artist(df_by_artists, df_rate)
#calculate similarity scores of those artists
affinity_scores = df_select.dot(df_rate)/df_rate.sum()
#sort it in descending order
affinity_scores_sorted = pd.Series(affinity_scores, name = "genre_affinity").sort_values(ascending = False)
#retrieve the names of artists by their indices
artists_in_df = df_by_artists.iloc[affinity_scores_sorted.index, [col(df_by_artists, "artists")]]
#store the artists' names and their similarity scores in a dataframe
resulted_df = pd.concat([affinity_scores_sorted, artists_in_df], axis = 1)
#drop the artists already selected by user and limit the count of artists to a specified amount
output = resulted_df[~resulted_df["artists"].isin(name_rate_series.index)].iloc[:how_many, :]
#create new indices
return output.reset_index()
def songs_dict(name_rate_dict, how_many):
"""This function is used in main.py. It returns dictionary of recommended
songs, which viewed when user presses "get recommendations" button
:param name_rate_dict:
:param how_many:
"""
dir = os.getcwd()
df_by_artists = pd.read_csv(dir + "//data_w_genres.csv")
df_scores = recommend_artist_by_genre(df_by_artists, name_rate_dict, how_many)
return df_scores.to_dict()
``` |
{
"source": "a6n1/caldera",
"score": 2
} |
#### File: objects/secondclass/c_rule.py
```python
import marshmallow as ma
from app.utility.base_object import BaseObject
from app.utility.rule_set import RuleAction
class RuleActionField(ma.fields.Field):
"""
Custom field to handle the RuleAction Enum.
"""
def _serialize(self, value, attr, obj, **kwargs):
if value is None:
return None
return value.value
def _deserialize(self, value, attr, data, **kwargs):
return RuleAction[value]
class RuleSchema(ma.Schema):
trait = ma.fields.String()
match = ma.fields.String()
action = RuleActionField()
@ma.post_load
def build_rule(self, data, **_):
return Rule(**data)
class Rule(BaseObject):
schema = RuleSchema()
def __init__(self, action, trait, match='.*'):
super().__init__()
self.action = action
self.trait = trait
self.match = match
```
#### File: service/interfaces/i_contact_svc.py
```python
import abc
class ContactServiceInterface(abc.ABC):
@abc.abstractmethod
def register(self, contact):
pass
@abc.abstractmethod
def handle_heartbeat(self):
"""
Accept all components of an agent profile and save a new agent or register an updated heartbeat.
:return: the agent object, instructions to execute
"""
pass
@abc.abstractmethod
def build_filename(self):
pass
```
#### File: tests/services/test_file_svc.py
```python
import os
import pytest
from app.utility.payload_encoder import xor_file
from tests import AsyncMock
@pytest.mark.usefixtures(
'init_base_world'
)
class TestFileService:
@pytest.mark.skip
@pytest.fixture
def setup_mock_dataservice(self, mocker):
pass
def test_get_file_no_file_header(self, loop, file_svc):
with pytest.raises(KeyError):
loop.run_until_complete(file_svc.get_file(headers=dict()))
@pytest.mark.skip('remove mocking')
def test_get_file_special_payload(self, loop, mocker, file_svc):
payload = 'unittestpayload'
new_payload_name = 'utp'
payload_content = b'content'
payload_func = AsyncMock(return_value=(payload, new_payload_name))
# patch out read_file and special payload for testing
mocker.patch.object(file_svc, 'read_file', new_callable=AsyncMock, return_value=(payload, payload_content))
mocker.patch.dict(file_svc.special_payloads, {payload: payload_func})
fp, rcontent, display_name = loop.run_until_complete(file_svc.get_file(headers=dict(file=payload, name=new_payload_name)))
payload_func.assert_called_once()
assert display_name == new_payload_name
assert rcontent == payload_content
assert payload in fp
@pytest.mark.skip('remove mocking')
def test_save_get_file(self, loop, mocker, file_svc):
filename = 'unittest-file-save-test'
content = b'content!'
path = 'data'
# create and save a file
data_svc = mocker.Mock()
data_svc.locate = AsyncMock(return_value=[])
mocker.patch.object(file_svc, 'data_svc', new=data_svc)
loop.run_until_complete(file_svc.save_file(filename, content, path))
assert os.path.isfile('./%s/%s' % (path, filename))
# get and check contents
fp, rcontent, display_name = loop.run_until_complete(file_svc.get_file(headers=dict(file=filename)))
assert fp == filename
assert rcontent == content
assert display_name == filename
# delete file
os.remove('./%s/%s' % (path, filename))
def test_create_exfil_sub_directory(self, loop, file_svc):
exfil_dir_name = 'unit-testing-Rocks'
new_dir = loop.run_until_complete(file_svc.create_exfil_sub_directory(exfil_dir_name))
assert os.path.isdir(new_dir)
os.rmdir(new_dir)
@pytest.mark.skip('not ready')
def test_save_multipart_file_upload(self):
pass
@pytest.mark.skip('remove mocking')
def test_find_file_path_no_plugin(self, loop, mocker, file_svc):
data_svc = mocker.Mock()
data_svc.locate = AsyncMock(return_value=[])
mocker.patch.object(file_svc, 'data_svc', new=data_svc)
filename = 'unittest-file-path-test'
path = 'data'
with open('./%s/%s' % (path, filename), 'w') as f:
f.write('test')
_, file_path = loop.run_until_complete(file_svc.find_file_path(filename))
assert file_path == '%s/%s' % (path, filename)
# delete file
os.remove('./%s/%s' % (path, filename))
@pytest.mark.skip('remove mocking')
def test_read_file_nonexistent_file(self, loop, mocker, file_svc):
mocker.patch.object(file_svc, 'find_file_path', new_callable=AsyncMock, return_value=(None, None))
with pytest.raises(FileNotFoundError):
loop.run_until_complete(file_svc.read_file('non-existent-file-for-testing'))
@pytest.mark.skip('remove mocking')
def test_find_file_path_plugin(self, loop, mocker, demo_plugin, tmpdir, file_svc):
def walk_file_path_mock(path, name):
if 'data' in path:
return path
plugin = demo_plugin(enabled=True)
location = 'path/to/file'
data_svc = mocker.Mock()
data_svc.locate = AsyncMock(return_value=[plugin])
mocker.patch.object(file_svc, 'data_svc', new=data_svc)
mocker.patch.object(file_svc, 'walk_file_path', new_callable=AsyncMock, side_effect=walk_file_path_mock)
plugin_name, file_path = loop.run_until_complete(file_svc.find_file_path('testfile', location=location))
print(plugin_name, file_path)
assert plugin_name == plugin.name
assert file_path == os.path.join('plugins', plugin.name, 'data', location)
@pytest.mark.skip('remove mocking')
def test_read_file_noxor(self, loop, mocker, tmpdir, file_svc):
plaintext_fn = 'read-file-nonxortest.txt'
content = b'this is plaintext'
plaintext_file = tmpdir.join(plaintext_fn)
plaintext_file.write(content)
mocker.patch.object(file_svc, 'find_file_path', new_callable=AsyncMock, return_value=(None, str(plaintext_file)))
name, output = loop.run_until_complete(file_svc.read_file(plaintext_fn))
assert name == plaintext_fn
assert output == content
@pytest.mark.skip('remove mocking')
def test_read_file_xor(self, loop, mocker, tmpdir, file_svc):
plaintext_fn = 'xor-plaintext.txt'
xortext_fn = "%s.xored" % plaintext_fn
content = b'this is plaintext'
plaintext_file = tmpdir.join(plaintext_fn)
plaintext_file.write(content)
xored_file = tmpdir.join(xortext_fn)
xor_file(plaintext_file, xored_file)
mocker.patch.object(file_svc, 'find_file_path', new_callable=AsyncMock, return_value=(None, str(xored_file)))
name, nonxored_output = loop.run_until_complete(file_svc.read_file(xortext_fn))
assert name == xortext_fn
assert nonxored_output == content
def test_read_write_result_file(self, tmpdir, file_svc):
link_id = '12345'
output = 'output testing unit'
# write output data
file_svc.write_result_file(link_id=link_id, output=output, location=tmpdir)
# read output data
output_data = file_svc.read_result_file(link_id=link_id, location=tmpdir)
assert output_data == output
@pytest.mark.skip('remove mocking')
def test_add_special_payload(self, loop, mocker, file_svc):
mocker.patch.dict(file_svc.special_payloads)
payload_name = 'unittest12345'
payload_func = AsyncMock
loop.run_until_complete(file_svc.add_special_payload(payload_name, payload_func))
assert file_svc.special_payloads[payload_name] == payload_func
```
#### File: tests/utility/test_base_world.py
```python
import pytest
import yaml
from datetime import datetime
from app.utility.base_world import BaseWorld
class TestBaseWorld:
default_config = dict(name='main', config={'app.contact.http': '0.0.0.0', 'plugins': ['sandcat', 'stockpile']})
default_yaml = dict(test_dir=1, implant_name='unittesting', test_int=1234)
@pytest.fixture
def reset_config(self):
BaseWorld.apply_config(**self.default_config)
yield
BaseWorld._app_configuration = dict()
@pytest.fixture
def yaml_file(self, tmpdir):
f = tmpdir.mkdir('yml').join('test.yml')
yaml_str = yaml.dump(self.default_yaml)
f.write(yaml_str)
assert f.read() == yaml_str
yield f
@pytest.fixture
def text_file(self, tmpdir):
txt_str = 'Hello world!'
f = tmpdir.mkdir('txt').join('test.txt')
f.write(txt_str)
assert f.read() == txt_str
yield f
@pytest.mark.usefixtures('reset_config')
def test_apply_and_retrieve_config(self):
new_config = dict(name='newconfig', config={'app.unit.test': 'abcd12345', 'plugins': ['stockpile']})
BaseWorld.apply_config(**new_config)
assert BaseWorld.get_config(name='newconfig') == new_config['config']
@pytest.mark.usefixtures('reset_config')
def test_get_prop_from_config(self):
assert BaseWorld.get_config(name='main', prop='app.contact.http') == '0.0.0.0'
@pytest.mark.usefixtures('reset_config')
def test_set_prop_from_config(self):
BaseWorld.set_config(name='main', prop='newprop', value='unittest')
assert BaseWorld.get_config(name='main', prop='newprop') == 'unittest'
def test_encode_and_decode_string(self):
plaintext = 'unit testing string'
encoded_text = 'dW5pdCB0ZXN0aW5nIHN0cmluZw=='
encoded_str = BaseWorld.encode_string(plaintext)
assert encoded_str == encoded_text
decoded_str = BaseWorld.decode_bytes(encoded_text)
assert decoded_str == plaintext
def test_jitter(self):
fraction = "1/5"
frac_arr = fraction.split('/')
jitter = BaseWorld.jitter(fraction)
assert jitter >= int(frac_arr[0])
assert jitter <= int(frac_arr[1])
def test_strip_yml_no_path(self):
yaml = BaseWorld.strip_yml(None)
assert yaml == []
def test_strip_yml(self, yaml_file):
yaml = BaseWorld.strip_yml(yaml_file)
assert yaml == [self.default_yaml]
def test_prepend_to_file(self, text_file):
line = 'This is appended!'
BaseWorld.prepend_to_file(text_file, line)
assert 'This is appended!\nHello world!' == text_file.read()
def test_get_current_timestamp(self):
date_format = '%Y-%m-%d %H'
output = BaseWorld.get_current_timestamp(date_format)
cur_time = datetime.now().strftime(date_format)
assert cur_time == output
def test_is_not_base64(self):
assert not BaseWorld.is_base64('not base64')
def test_is_base64(self):
b64str = 'aGVsbG8gd29ybGQgZnJvbSB1bml0IHRlc3QgbGFuZAo='
assert BaseWorld.is_base64(b64str)
def test_walk_file_path_exists_nonxor(self, loop, text_file):
ret = loop.run_until_complete(BaseWorld.walk_file_path(text_file.dirname, text_file.basename))
assert ret == text_file
def test_walk_file_path_notexists(self, loop, text_file):
ret = loop.run_until_complete(BaseWorld.walk_file_path(text_file.dirname, 'not-a-real.file'))
assert ret is None
def test_walk_file_path_xor_fn(self, loop, tmpdir):
f = tmpdir.mkdir('txt').join('xorfile.txt.xored')
f.write("test")
ret = loop.run_until_complete(BaseWorld.walk_file_path(f.dirname, 'xorfile.txt'))
assert ret == f
``` |
{
"source": "a6patch/agnostic",
"score": 3
} |
#### File: agnostic/agnostic/postgres.py
```python
import subprocess
import pg8000
from agnostic import AbstractBackend
class PostgresBackend(AbstractBackend):
''' Support for PostgreSQL. '''
def backup_db(self, backup_file):
'''
Return a ``Popen`` instance that will backup the database to the
``backup_file`` handle.
'''
env = {'PGPASSWORD': self._password}
command = [
'pg_dump',
'-h', self._host,
'-U', self._user,
]
if self._port is not None:
command.append('-p')
command.append(str(self._port))
for schema in self._split_schema():
command.append('-n')
command.append(schema)
command.append(self._database)
process = subprocess.Popen(
command,
env=env,
stdout=backup_file,
stderr=subprocess.PIPE
)
return process
def clear_db(self, cursor):
''' Remove all objects from the database. '''
# Drop tables.
cursor.execute('''
SELECT schemaname, tablename FROM pg_tables
WHERE tableowner = %s
AND schemaname != 'pg_catalog'
AND schemaname != 'information_schema'
''', (self._user,))
tables = ['"{}"."{}"'.format(r[0], r[1]) for r in cursor.fetchall()]
if len(tables) > 0:
sql = 'DROP TABLE {} CASCADE'.format(', '.join(tables))
cursor.execute(sql)
# Drop sequences.
cursor.execute('''
SELECT relname FROM pg_class
WHERE relkind = 'S'
''')
sequences = ['"{}"'.format(row[0]) for row in cursor.fetchall()]
if len(sequences) > 0:
sql = 'DROP SEQUENCE {} CASCADE'.format(','.join(sequences))
cursor.execute(sql)
# Drop custom types, e.g. ENUM types.
cursor.execute('''
SELECT typname FROM pg_type
WHERE typtype = 'e'
''')
types = ['"{}"'.format(row[0]) for row in cursor.fetchall()]
if len(types) > 0:
sql = 'DROP TYPE {} CASCADE'.format(','.join(types))
cursor.execute(sql)
# Drop schema objects.
for schema in self._split_schema():
if schema != 'public':
sql = 'DROP SCHEMA IF EXISTS {} CASCADE'.format(schema)
cursor.execute(sql)
def connect_db(self):
''' Connect to PostgreSQL. '''
connect_args = {
'host': self._host,
'user': self._user,
'password': self._password,
'database': self._database,
}
if self._port is not None:
connect_args['port'] = self._port
db = pg8000.connect(**connect_args)
db.autocommit = True
if self._schema is not None:
cursor = db.cursor()
cursor.execute("SET SCHEMA '{}'".format(self._schema))
return db
def get_schema_command(self):
''' Return a command that will set the current schema. '''
if self._schema is None:
return 'SET search_path = "$user",public;\n'
else:
return 'SET search_path = {};\n'.format(self._schema)
def restore_db(self, backup_file):
'''
Return a ``Popen`` instance that will restore the database from the
``backup_file`` handle.
'''
env = {'PGPASSWORD': self._password}
command = [
'psql',
'-h', self._host,
'-U', self._user,
'-v', 'ON_ERROR_STOP=1', # Fail fast if an error occurs.
]
if self._port is not None:
command.append('-p')
command.append(str(self._port))
command.append(self._database)
process = subprocess.Popen(
command,
env=env,
stdin=backup_file,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE
)
return process
def snapshot_db(self, snapshot_file):
'''
Return a ``Popen`` instance that writes a snapshot to ``snapshot_file``.
'''
env = {'PGPASSWORD': self._password}
command = [
'pg_dump',
'-h', self._host,
'-U', self._user,
'-s', # dump schema only
'-x', # don't dump grant/revoke statements
'-O', # don't dump ownership commands
'--no-tablespaces',
]
if self._port is not None:
command.append('-p')
command.append(str(self._port))
if self._schema is not None:
for schema in self._split_schema():
command.extend(('-n', schema))
command.append(self._database)
process = subprocess.Popen(
command,
env=env,
stdout=snapshot_file,
stderr=subprocess.PIPE
)
return process
def _split_schema(self):
'''
Split schema string into separate schema names.
PostgreSQL allows specifying the schema name as a search path that
look for objects in more than one schema. This method breaks that
search path into individual schema names.
It also replaces the special schema name ``"$user"`` (quotes included)
with the current username, mimicking the ``SET SEARCH PATH TO ...``
behavior in PostgreSQL.
'''
schemas = list()
if self._schema is not None:
for schema in map(str.strip, self._schema.split(',')):
if schema == '"$user"':
schemas.append(self._user)
else:
schemas.append(schema)
return schemas
```
#### File: agnostic/tests/test_cli.py
```python
import os
import shutil
import tempfile
import unittest
from unittest.mock import MagicMock, patch
from click import ClickException
from click.testing import CliRunner
import agnostic
import agnostic.cli
class TestCli(unittest.TestCase):
def setUp(self):
''' Create temporary working directory. '''
self._temp_dir = tempfile.mkdtemp()
self._migrations_dir = self._temp_dir + '/migrations'
self._old_cwd = os.getcwd()
os.makedirs(self._migrations_dir)
os.chdir(self._temp_dir)
def tearDown(self):
''' Remove temporary directory. '''
shutil.rmtree(self._temp_dir)
os.chdir(self._old_cwd)
def run_cli(self, command):
''' Run CLI and log any errors. '''
logging.info('Running CLI with args: %r', args)
result = CliRunner().invoke(agnostic.cli.main, command)
if result.exception is not None:
logging.error('== run_cli exception ==')
logging.error('COMMAND: %s', command)
logging.error('EXIT CODE: %s', result.exit_code)
logging.error('OUTPUT:\n%s', result.output)
raise result.exception
return result
def test_invalid_options(self):
# Invoke CLI with options that pass the argument parser's criteria but
# fail when instantiating a backend.
result = CliRunner().invoke(agnostic.cli.main,
['-t', 'sqlite', '-u', 'root', '-d', 'test.db', '-m',
self._migrations_dir, 'bootstrap'])
self.assertNotEqual(result.exit_code, 0)
@patch('agnostic.cli.click.confirm')
def test_drop_requires_confirm(self, mock_confirm):
result = CliRunner().invoke(agnostic.cli.main,
['-t', 'sqlite', '-d', 'test.db', '-m', self._migrations_dir,
'drop'])
mock_confirm.assert_called_with('Are you 100% positive that you want '
'to do this?')
self.assertNotEqual(result.exit_code, 0)
@patch('agnostic.cli.click.confirm')
def test_tester_requires_confirm(self, mock_confirm):
before = tempfile.mkstemp()[1]
after = tempfile.mkstemp()[1]
result = CliRunner().invoke(agnostic.cli.main,
['-t', 'sqlite', '-d', 'test.db', '-m', self._migrations_dir,
'test', before, after])
os.unlink(before)
os.unlink(after)
mock_confirm.assert_called_with('Are you 100% positive that you want '
'to do this?')
self.assertNotEqual(result.exit_code, 0)
def test_list_no_migrations(self):
result = CliRunner().invoke(agnostic.cli.main,
['-t', 'sqlite', '-d', 'test.db', '-m', self._migrations_dir,
'list'])
self.assertNotEqual(result.exit_code, 0)
def test_get_db_cursor_connect_error(self):
config = MagicMock()
config.debug = False
config.backend.connect_db.side_effect = Exception()
with self.assertRaises(ClickException):
with agnostic.cli._get_db_cursor(config) as (db, cursor):
pass
def test_get_db_cursor_schema_error(self):
config = MagicMock()
config.debug = False
config.backend.get_schema_command.side_effect = Exception()
with self.assertRaises(ClickException):
with agnostic.cli._get_db_cursor(config) as (db, cursor):
pass
def test_get_db_cursor_closes_automatically(self):
config = MagicMock()
config.debug = False
with agnostic.cli._get_db_cursor(config) as (db, cursor):
pass
db.close.assert_called_with()
# Swallows exception on db.close:
with agnostic.cli._get_db_cursor(config) as (db, cursor):
db.close.side_effect = Exception()
@patch('agnostic.cli._wait_for')
def test_snapshot_error(self, mock_wait_for):
mock_wait_for.side_effect = Exception()
result = CliRunner().invoke(agnostic.cli.main, ['-t', 'sqlite', '-d',
'test.db', '-m', self._migrations_dir, 'snapshot', 'snapshot.sql'])
self.assertNotEqual(result.exit_code, 0)
```
#### File: agnostic/tests/test_postgres.py
```python
import os
import unittest
import pg8000
from tests.abstract import AbstractDatabaseTest
class TestPostgreSql(AbstractDatabaseTest, unittest.TestCase):
''' Integration tests for PostgreSQL '''
@property
def db_type(self):
''' The database type as a string. '''
return 'postgres'
@property
def default_db(self):
''' The database to connect when dropping/creating a test database. '''
return 'postgres'
def connect_db(self, user, password, database):
''' Return a connection to the specified database. '''
connect_args = {
'host': os.getenv('POSTGRES_HOST', 'localhost'),
'user': user,
'password': password,
'database': database,
'timeout': 1,
}
try:
connect_args['port'] = os.environ['POSTGRES_PORT']
except KeyError:
pass
db = pg8000.connect(**connect_args)
db.autocommit = True
return db
def table_columns(self, cursor, database, table_name):
''' Return a list of columns in the specified table. '''
sql = '''
SELECT column_name
FROM information_schema.columns
WHERE table_name = %s
ORDER BY ordinal_position
'''
cursor.execute(sql, (table_name,))
return [row[0] for row in cursor.fetchall()]
def table_exists(self, cursor, database, table_name):
''' Return true if the specified table exists. '''
table_query = '''
SELECT COUNT(*)
FROM information_schema.tables
WHERE table_name = %s
'''
cursor.execute(table_query, (table_name,))
return cursor.fetchone()[0] == 1
``` |
{
"source": "a7031x/OpenNRE",
"score": 3
} |
#### File: OpenNRE/model/pcnn_att.py
```python
from framework import Framework
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
def pcnn_att(is_training):
if is_training:
framework = Framework(is_training=True)
else:
framework = Framework(is_training=False)
word_embedding = framework.embedding.word_embedding()
pos_embedding = framework.embedding.pos_embedding()
embedding = framework.embedding.concat_embedding(word_embedding, pos_embedding)
# PCNN. Appoint activation to whatever activation function you want to use.
# There are three more encoders:
# framework.encoder.cnn
# framework.encoder.rnn
# framework.encoder.birnn
x = framework.encoder.pcnn(embedding, FLAGS.hidden_size, framework.mask, activation=tf.nn.relu)
# Selective attention. Setting parameter dropout_before=True means using dropout before attention.
# There are three more selecting method
# framework.selector.maximum
# framework.selector.average
# framework.selector.no_bag
logit, repre = framework.selector.attention(x, framework.scope, framework.label_for_select)
if is_training:
loss = framework.classifier.softmax_cross_entropy(logit)
output = framework.classifier.output(logit)
framework.init_train_model(loss, output, optimizer=tf.train.GradientDescentOptimizer)
framework.load_train_data()
framework.train()
else:
framework.init_test_model(logit)
framework.load_test_data()
framework.test()
```
#### File: OpenNRE/network/selector.py
```python
import tensorflow as tf
import numpy as np
class Selector(object):
def __init__(self, num_classes, is_training = False, drop_prob = None):
self.num_classes = num_classes
self.is_training = is_training
self.dropout = drop_prob
def __call__(self, is_training = False, drop_prob = None):
self.is_training = is_training
self.dropout = drop_prob
def __dropout__(self, x):
if self.dropout:
return tf.layers.dropout(x, rate = self.dropout, training = self.is_training)
else:
return x
def __logits__(self, x, var_scope = None, reuse = tf.AUTO_REUSE):
with tf.variable_scope(var_scope or 'logits', reuse = reuse):
relation_matrix = tf.get_variable('relation_matrix', [self.num_classes, x.shape[1]], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer())
bias = tf.get_variable('bias', [self.num_classes], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer())
logits = tf.matmul(x, tf.transpose(relation_matrix)) + bias
return logits
def __attention_train_logits__(self, x, query, var_scope = None, reuse = None):
with tf.variable_scope(var_scope or 'attention_logits', reuse = reuse):
relation_matrix = tf.get_variable('relation_matrix', [self.num_classes, x.shape[1]], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer())
bias = tf.get_variable('bias', [self.num_classes], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer())
current_attention = tf.nn.embedding_lookup(relation_matrix, query)
attention_logit = tf.reduce_sum(current_attention * x, 1)
return attention_logit
def __attention_test_logits__(self, x, var_scope = None, reuse = None):
with tf.variable_scope(var_scope or 'attention_logits', reuse = reuse):
relation_matrix = tf.get_variable('relation_matrix', [self.num_classes, x.shape[1]], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer())
bias = tf.get_variable('bias', [self.num_classes], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer())
return tf.matmul(x, tf.transpose(relation_matrix))
def no_bag(self, x):
with tf.name_scope("no_bag"):
x = self.__dropout__(x)
return self.__logits__(x, "no_bag_logits", False), x
def attention(self, x, scope, query, dropout_before = False):
with tf.name_scope("attention"):
if self.is_training:
if dropout_before:
x = self.__dropout__(x)
attention_logit = self.__attention_train_logits__(x, query, "attention_logits", False)
tower_repre = []
for i in range(scope.shape[0] - 1):
sen_matrix = x[scope[i]:scope[i + 1]]
attention_score = tf.nn.softmax(tf.reshape(attention_logit[scope[i]:scope[i + 1]], [1, -1]))
final_repre = tf.squeeze(tf.matmul(attention_score, sen_matrix))
tower_repre.append(final_repre)
if not dropout_before:
stack_repre = self.__dropout__(tf.stack(tower_repre))
else:
stack_repre = tf.stack(tower_repre)
return self.__logits__(stack_repre, "attention_logits", True), stack_repre
else:
test_attention_logit = self.__attention_test_logits__(x, "attention_logits", False)
test_tower_output = []
test_repre = []
for i in range(scope.shape[0] - 1):
test_attention_score = tf.nn.softmax(tf.transpose(test_attention_logit[scope[i]:scope[i+1],:]))
final_repre = tf.matmul(test_attention_score, x[scope[i]:scope[i+1]])
logits = self.__logits__(final_repre, "attention_logits", True)
test_repre.append(final_repre)
# test_tower_output.append(tf.diag_part(tf.nn.softmax(logits)))
test_tower_output.append(tf.reduce_max(tf.nn.softmax(logits), axis=0))
test_repre = tf.reshape(tf.stack(test_repre), [scope.shape[0] - 1, self.num_classes, -1])
test_output = tf.reshape(tf.stack(test_tower_output), [scope.shape[0] - 1, self.num_classes])
return test_output, test_repre
def average(self, x, scope, dropout_before = False):
with tf.name_scope("average"):
if dropout_before:
x = self.__dropout__(x)
tower_repre = []
for i in range(scope.shape[0] - 1):
repre_mat = x[scope[i]:scope[i + 1]]
repre = tf.reduce_mean(repre_mat, axis=0)
tower_repre.append(repre)
if not dropout_before:
stack_repre = self.__dropout__(tf.stack(tower_repre))
else:
stack_repre = tf.stack(tower_repre)
return self.__logits__(stack_repre, "average_logits", False), stack_repre
def maximum(self, x, scope, dropout_before = False):
with tf.name_scope("maximum"):
if dropout_before:
x = self.__dropout__(x)
tower_repre = []
for i in range(scope.shape[0] - 1):
repre_mat = x[scope[i]:scope[i + 1]]
logits = self.__logits__(repre_mat, "maximum_logits")
j = tf.argmax(tf.reduce_max(logits, axis = 1), output_type=tf.int32)
tower_repre.append(repre_mat[j])
if not dropout_before:
stack_repre = self.__dropout__(tf.stack(tower_repre))
else:
stack_repre = tf.stack(tower_repre)
return self.__logits__(stack_repre, "maximum_logits", True), stack_repre
``` |
{
"source": "a710128/Lesson9",
"score": 2
} |
#### File: API/capture/Auto_image_detector.py
```python
import os.path as _Os_path
from .model import get_model
from .model import characters
from .model import width
from .model import height
from .model import n_len
from keras.models import *
from keras.layers import *
from keras import backend as K
import cv2
import numpy as np
import tensorflow as tf
model_path = _Os_path.join(_Os_path.sep.join(__file__.split(_Os_path.sep)[:-1]), 'model.predict_new.h5')
predict_model = load_model(model_path)
def Detector_single_image(image , model_dir = 'model.predict_new.h5'):
img = cv2.resize(image, (width, height))
X_test = np.zeros((1, width, height, 3), dtype=np.int16)
X_test[0]=img.transpose(1,0,2)
y_pred = predict_model.predict(X_test)
y_pred = y_pred[:,2:,:]
text_list = K.get_value(K.ctc_decode(y_pred, input_length=np.ones(y_pred.shape[0])*y_pred.shape[1], )[0][0])[:, :n_len]
out = ''.join([characters[x] for x in text_list[0]])
return out
'''
example1
Detector_single_image('/home/liujiashuo/Desktop/img/2BFT.jpg')
Detector_single_image('/home/liujiashuo/Desktop/img/2B3KV.jpg')
Detector_single_image('/home/liujiashuo/Desktop/img/2B4F.jpg')
Detector_single_image(cv2.imread('./TEST/YWP9H.jpg', cv2.IMREAD_ANYCOLOR))
example2
Detector_multi_images('/home/liujiashuo/Desktop/TEST' , 16)
'''
```
#### File: API/capture/model.py
```python
import tensorflow as tf
from keras.models import *
from keras.layers import *
from keras import backend as K
import numpy as np
import random
import datetime
import os
import gc
import string
characters = string.digits + string.ascii_lowercase+string.ascii_uppercase + ' '
width, height, n_len, n_class = 200, 60, 6, len(characters)
def make_parallel(model, gpu_count):
import tensorflow as tf
def get_slice(data, idx, parts):
import tensorflow as tf
shape = tf.shape(data)
size = tf.concat([ shape[:1] // parts, shape[1:] ],axis=0)
stride = tf.concat([ shape[:1] // parts, shape[1:]*0 ],axis=0)
start = stride * idx
return tf.slice(data, start, size)
outputs_all = []
for i in range(len(model.outputs)):
outputs_all.append([])
#Place a copy of the model on each GPU, each getting a slice of the batch
for i in range(gpu_count):
with tf.device('/gpu:%d' % i):
with tf.name_scope('tower_%d' % i) as scope:
inputs = []
#Slice each input into a piece for processing on this GPU
j=0
for x in model.inputs:
input_shape = tuple(x.get_shape().as_list())[1:]
slice_n = Lambda(get_slice, output_shape=input_shape, arguments={'idx':i,'parts':gpu_count},name='input_%d_%d' %(i,j))(x)
inputs.append(slice_n)
j=j+1
outputs = model(inputs)
if not isinstance(outputs, list):
outputs = [outputs]
#Save all the outputs for merging back together later
for l in range(len(outputs)):
outputs_all[l].append(outputs[l])
with tf.device('/cpu:0'):
merged = []
for outputs in outputs_all:
merged.append(concatenate(outputs,axis=0,name='loss_tensor'))
return Model(inputs=model.inputs, outputs=merged)
def ctc_lambda_func(args):
y_pred, labels, input_length, label_length = args
y_pred = y_pred[:, 2:, :]
return K.ctc_batch_cost(labels, y_pred, input_length, label_length)
def get_model(gpu_num=1):
input_tensor = Input((width, height, 3),name="input_tensor")
x = input_tensor
for i in range(3):
x = Conv2D(32,(3, 3), activation='relu')(x)
x = Conv2D(32, (3, 3), activation='relu')(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
conv_shape = x.get_shape()
x = Reshape(name="x_shape_tensor",target_shape=(int(conv_shape[1]), int(conv_shape[2]*conv_shape[3])))(x)
x = Dense(32, activation='relu')(x)
lstm_1 = LSTM(128, return_sequences=True, name='lstm1')(x)
lstm_1b = LSTM(128, return_sequences=True, go_backwards=True,name='lstm1_b')(x)
lstm1_merged = add([lstm_1, lstm_1b])
lstm_2 = LSTM(128, return_sequences=True,name='lstm2')(lstm1_merged)
lstm_2b = LSTM(128, return_sequences=True, go_backwards=True, name='lstm2_b')(lstm1_merged)
x = concatenate([lstm_2, lstm_2b])
x = Dropout(0.25)(x)
x = Dense(n_class, activation='softmax',name="output_tensor")(x)
labels = Input(name='the_labels', shape=[n_len], dtype='float32')
input_length = Input(name='input_length', shape=[1], dtype='int64')
label_length = Input(name='label_length', shape=[1], dtype='int64')
loss_out = Lambda(ctc_lambda_func, output_shape=(1,), name='ctc')([x, labels, input_length, label_length])
model = Model(inputs=[input_tensor, labels, input_length, label_length], outputs=[loss_out],name="base_model")
if gpu_num > 1:
model = make_parallel(model,gpu_num)
model.compile(loss={'loss_tensor': lambda y_true, y_pred: y_pred}, optimizer='adadelta')
else:
model.compile(loss={'ctc': lambda y_true, y_pred: y_pred}, optimizer='adadelta')
return model
```
#### File: Lesson9/API/user.py
```python
import copy
import re
import urllib
import cv2
import numpy as np
import urllib3
from . import config
from .course import Course, courseTimeParser
import datetime
import threading
from queue import Queue
urllib3.disable_warnings()
def capture_idft_worker(qin):
from . import capture
while True:
img, qout = qin.get()
ret = capture.Detector_single_image(img)
qout.put(ret)
queue_input = Queue()
capture_thd = threading.Thread(target=capture_idft_worker, args=(queue_input,))
capture_thd.setDaemon(True)
capture_thd.start()
def multhrdFunc(img):
qout = Queue()
queue_input.put((img, qout))
ret = qout.get()
return ret
class UserException(Exception):
def __init__(self, msg, err):
super(UserException, self).__init__()
self.msg = msg
self.err = err
def __str__(self):
return "UserError : " + self.msg
def __repr__(self):
return '<UserException msg : "%s", errcode : %d>' % (self.msg, self.errcode)
def udpCookie(old_cookie, new_str):
ret = copy.copy(old_cookie)
for sub in new_str.split(','):
key, val = sub.split(';')[0].split('=', 1)
ret[key] = val
return ret
def parseCookie(cookie):
ret = ''
for key in cookie:
ret += key + '=' + cookie[key] + '; '
if ret[-2:] == '; ':
ret = ret[: -2]
return ret
def getCap(data):
img = cv2.imdecode(np.array(np.fromstring(data, np.uint8), dtype='uint8'), cv2.IMREAD_ANYCOLOR)
return img
'''
while True:
cv2.imshow('captcha', img)
key = cv2.waitKey(0)
'''
def makeHeader(cookie=''):
if isinstance(cookie, dict):
c = parseCookie(cookie)
else:
c = cookie
if c != '':
return {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Connection': 'keep-alive',
'Cookie': c
}
else:
return {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Connection': 'keep-alive'
}
class User:
def __init__(self, name, passwd):
self.name = name
self.passwd = <PASSWORD>
self.cookie = {}
self.courses = []
self.broken = False
try:
ctr = 0
while not self.login():
ctr += 1
if ctr > 20:
raise UserException('Unknown Error', -1)
except UserException as e:
print('User:', self.name, 'Error:', e)
self.broken = True
def login(self, veriFunc=multhrdFunc) -> bool:
"""
Login, use vertFunc to get the verification code
:param veriFunc: a function input: Image(numpy), output: Verification Code (str)
:return: True if success
"""
if config.DEBUG:
print("User : %s login!" % self.name)
return True
if self.broken:
raise UserException("Broken user", 3)
self.cookie = {}
http = urllib3.PoolManager()
# get cookie and capture
res = http.request('GET', config.CAPTCHAR_PATH,
headers=makeHeader())
if 'Set-Cookie' in res.headers:
self.cookie = udpCookie(self.cookie, res.headers['Set-Cookie'])
else:
self.broken = True
raise UserException("Failed to get initial cookies!", 0)
# visit login page
http.request('GET', config.LOGIN_PAGE, headers=makeHeader(self.cookie))
if not callable(veriFunc):
self.broken = True
raise UserException("No default verification function now!", 1)
req_body = urllib.parse.urlencode({
'j_username': self.name,
'j_password': <PASSWORD>,
'captchaflag': 'login1',
'_login_image_': veriFunc(getCap(res.data))
})
res = http.request('POST', config.LOGIN_POST_PAGE,
headers=dict(makeHeader(self.cookie), **{
'Content-Type': 'application/x-www-form-urlencoded',
'Charset': 'UTF-8',
'Origin': config.WEB_PREFIX,
'Referer': config.LOGIN_PAGE,
'Cache-Control': 'max-age=0'
}), body= req_body, redirect=False
)
# success
if 'Location' in res.headers and res.headers['Location'].find('/zhjw.do') != -1:
return True
if ('Location' in res.headers and res.headers['Location'].find('login_error=error') != -1):
self.broken = True
raise UserException("Wrong username or password!", 2)
# failure
return False
def udpCourses(self):
if config.DEBUG:
print("User : %s update courses!" % self.name)
return
http = urllib3.PoolManager()
res = http.request('GET', config.COURSE_TABLE_PAGE, headers=makeHeader(self.cookie))
data = res.data.decode("GBK")
ret = re.findall(re.compile(r'<span class="trunk">([^<]*)</span>'), data)
if len(ret) == 0 or len(ret) % 13 != 0:
return False
cs = []
for i in range(13, len(ret), 13):
cs.append(Course(kch=ret[i + 1], kxh=ret[i + 2], teacher=ret[i + 4],
name=ret[i], score=int(ret[i + 6]), time= courseTimeParser(ret[i + 5])))
self.courses = cs
return True
def request(self, method: str, url: str, headers=None, **kwargs):
if headers is None:
headers = {}
assert isinstance(method, str) and isinstance(url, str) and isinstance(headers, dict), "Parameter type error"
http = urllib3.PoolManager()
ret = http.request(method, url, headers=dict(makeHeader(self.cookie), **headers), **kwargs)
if ret is None:
print(datetime.datetime.now().strftime('%m-%d %H:%M:%S'), '[error]', 'User:', self.name,
'request error', 'url:', url, 'headers:', dict(makeHeader(self.cookie), **headers), 'args:', kwargs)
return ret
def __str__(self):
return self.name
def __repr__(self):
return '<User %s, cookie=%s, broken=%s>' % (self.name, str(self.cookie), str(self.broken))
def __hash__(self):
return hash(self.name)
def __eq__(self, other):
return self.name == other.name
```
#### File: a710128/Lesson9/manager.py
```python
import pickle
import threading
from room.room import Room
from API import User, UserException
class Manager:
def __init__(self, rooms=None, ship=None):
if rooms is None:
rooms = []
if ship is None:
ship = {}
assert isinstance(rooms, list) and isinstance(ship, dict), "Parameter type error"
self.rooms = rooms # Room
self.ship = ship # User -> Room
self.__lock = threading.Lock()
def dump(self):
"""pickle.dump({
'rooms': self.rooms,
'ship': self.ship
}, open('backup.pkl', 'wb'))"""
pass
def getUser(self, uid: str) -> User:
assert isinstance(uid, str), "Parameter type error"
ret = None
self.__lock.acquire()
for u in self.ship.keys():
if u.name == uid:
ret = u
break
self.__lock.release()
return ret
def addUser(self, uid: str, passwd: str) -> User:
assert isinstance(uid, str) and isinstance(passwd, str), "Parameter type error"
print('Try to login User:', uid)
tmp_u = self.getUser(uid)
if tmp_u is not None:
print('User:', uid, 'already logined!')
if tmp_u.name == uid and tmp_u.passwd == passwd:
return tmp_u
else:
return None
try:
user = User(uid, passwd)
except UserException as e:
print('User:', uid, e)
return None
self.__lock.acquire()
self.ship[user] = None
print('User:', uid, 'login ok!')
self.__lock.release()
return user
def delUser(self, uid):
user = self.getUser(uid)
if user is None:
return
self.__lock.acquire()
if self.ship[user] is not None:
self.ship[user].delUser(user)
self.ship[user] = None
del self.ship[user]
del user
self.__lock.release()
def exitRoom(self, uid):
if isinstance(uid, User):
user = uid
else:
user = self.getUser(uid)
if user is None:
return
self.__lock.acquire()
if self.ship[user] is not None:
self.ship[user].delUser(user)
self.ship[user] = None
self.__lock.release()
def joinRoom(self, room, user: User):
assert isinstance(room, (Room, int)) and isinstance(user, User), "Parameter type error"
self.__lock.acquire()
if not (self.ship[user] is None):
self.ship[user].delUser(user)
self.ship[user] = None
if isinstance(room, int):
it = 0
while it < len(self.rooms):
if self.rooms[it].roomId == room:
break
it += 1
if it < len(self.rooms):
self.rooms[it].addUser(user)
self.ship[user] = self.rooms[it]
else:
if room in self.rooms:
room.addUser(user)
else:
self.rooms.append(room)
room.addUser(user)
threading.Thread(target=self._roomHandler, args=(room, )).start()
self.ship[user] = room
self.__lock.release()
def _roomHandler(self, room: Room):
room.loop() # start room loop
# del room
self.__lock.acquire()
it = 0
while it < len(self.rooms):
if self.rooms[it] == room:
break
it += 1
assert it < len(self.rooms), "Unknown error when room handler"
for u in room.user:
self.ship[u] = None
del self.rooms[it]
self.__lock.release()
def run(self):
thds = [threading.Thread(target=self._roomHandler, args=(r, )) for r in self.rooms]
for t in thds:
t.start()
def getUserRoom(self, user: User):
return self.ship[user]
def getRoomInfo(self):
ret = []
self.__lock.acquire()
for room in self.rooms:
if not room.broken and (len(room.name) == 0 or room.name[0] != '$'):
ret.append({
'name': room.name,
'desc': room.desc,
'id': room.roomId,
'workers': room.workers
})
self.__lock.release()
return ret
def getRoom(self, rid) -> Room:
ret = None
self.__lock.acquire()
for room in self.rooms:
if room.roomId == rid:
ret = room
break
self.__lock.release()
return ret
def createRoom(self, name: str, desc: str, flh: str, kch: str, kcm: str, user: User, type: int):
room = Room(name, desc, 3, flh, kch, kcm, type)
self.exitRoom(user)
self.joinRoom(room, user)
``` |
{
"source": "A-725-K/GoRaBoN",
"score": 3
} |
#### File: GoRaBoN/plot/plot_results.py
```python
import os
import sys
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.animation as ani
EPOCHS = None
RES_DIR = '../res'
def abort():
print('Usage: python3 plot_results.py <EPOCHS>')
exit(1)
if len(sys.argv) != 2:
abort()
try:
EPOCHS = int(sys.argv[1])
except:
abort()
if EPOCHS < 1:
abort()
fig = plt.figure()
plt.axis('off')
# read and store data from output files
frames = []
for i in range(EPOCHS):
filename = RES_DIR + f'/rbn_{i}.csv'
data = pd.read_csv(filename, header=None, index_col=False).to_numpy()
frames.append([plt.imshow(data, animated=True)])
# clean res directory
if os.name == 'nt':
RES_DIR = RES_DIR.replace('/', '\\')
os.system(f'del /Q {RES_DIR}')
else:
os.system(f'rm -f {RES_DIR}/*')
# save the animation
animation = ani.ArtistAnimation(fig, frames, interval=250, blit=True, repeat_delay=1000)
animation.save(f'{RES_DIR}/RBN.gif')
``` |
{
"source": "a74nh/adventofcode",
"score": 4
} |
#### File: adventofcode/2020/day13.py
```python
import re
fname="day13.input"
from dataclasses import dataclass, field
from typing import List
with open(fname) as f:
data = []
lines=f.readlines()
start_time=int(lines[0])
def get(x):
if x=="x":
return x
else:
return int(x)
busses=[get(i) for i in lines[1].split(",")]
# print(start_time)
# print(busses)
def first_bus_time_post(bus,start_time):
if bus=="x":
return 0
return start_time-(start_time%bus)+bus
first_t=0
first_bus=0
for bus in busses:
t=first_bus_time_post(bus,start_time)
# print(bus,start_time,t)
if t>0 and (first_t ==0 or t<first_t):
first_t=t
first_bus=bus
print((first_t-start_time)*first_bus)
```
#### File: adventofcode/2020/day2.py
```python
import re
fname="day2.input"
with open(fname) as f:
# w, h = [int(x) for x in next(f).split()] # read first line
data = []
for line in f: # read rest of lines
s = re.split('[- +:\n]',line)
d = { "min":int(s[0]), "max":int(s[1]), "letter":s[2], "password":s[4] }
data.append(d)
# print(data)
def count_str(fullstring,match):
x=0
for c in fullstring:
if match == c:
x=x+1
return x
valid=[]
for d in data:
count = count_str(d["password"], d["letter"])
if count >= d["min"] and count <= d["max"]:
valid.append(d["password"])
print(len(valid))
valid=[]
for d in data:
# print(d["password"])
# print(d["min"]+1)
# print(d["max"]+1)
try:
m1 = d["password"][d["min"]-1] == d["letter"]
except:
m1 = False
try:
m2 = d["password"][d["max"]-1] == d["letter"]
except:
m2 = False
if m1 ^ m2:
valid.append(d["password"])
# print("{}: {} [{} {} {}] [{} {} {}] {}".format(m1 ^ m2, d["letter"],
# m1, d["min"], d["password"][d["min"]-1],
# m2, d["max"], d["password"][d["max"]-1],
# d["password"]))
print(len(valid))
```
#### File: adventofcode/2020/day3.py
```python
import re
fname="day3.input"
with open(fname) as f:
data = []
for line in f:
data.append(line.rstrip())
def get_location(data, x, y):
if y >= len(data):
raise ValueError
line=data[y]
x = x % len(line)
# print("{} {} {}".format(x,y,line))
return line[x]
def reached_botton(data, x, y):
if y >= len(data):
return True
return False
def is_tree_at_position(data, x, y):
# print("{} {}".format(x,y))
c = get_location(data, x, y)
if c == "#":
return True
elif c == ".":
return False
else:
print(c)
raise ValueError
def do_run(data,move_x,move_y):
x=0
y=0
num_trees=0
while True:
x=x+move_x
y=y+move_y
if reached_botton(data,x,y):
break
if is_tree_at_position(data,x,y):
num_trees=num_trees+1
return num_trees
t=do_run(data,3,1)
print(t)
run_patterns=[[1,1],[3,1],[5,1],[7,1],[1,2]]
t=1
for p in run_patterns:
t=t*do_run(data,p[0],p[1])
print(t)
```
#### File: adventofcode/2020/day5.py
```python
import re
fname="day5.input"
with open(fname) as f:
data = []
for line in f:
line = line.rstrip()
data.append({"row":line[0:7], "seat":line[7:]})
def decode_data(row_data, min, max):
# print(row_data,min,max)
try:
pivot=int((max-min)/2)+min
if row_data[0] == "F" or row_data[0] == "L":
return decode_data(row_data[1:],min,pivot)
if row_data[0] == "B" or row_data[0] == "R":
return decode_data(row_data[1:],pivot+1,max)
raise ValueError
except IndexError:
return min
def get_row_number(row):
return decode_data(row,0,127)
def get_column_number(seat):
return decode_data(seat,0,7)
def get_seat_id(row_data):
ret = (get_row_number(r["row"])*8) + get_column_number(r["seat"])
# print(row_data,ret)
return ret
def find_missing_seat_id(seat_ids):
for num, name in enumerate(seat_ids, start=seat_ids[0]):
if num != name:
# print(num,name)
return num
seat_ids=[]
max_id=0
for r in data:
sid=get_seat_id(r)
max_id=max(max_id,sid)
seat_ids.append(sid)
seat_ids.sort()
print(max_id)
print(find_missing_seat_id(seat_ids))
``` |
{
"source": "a74nh/armiesofsigmar",
"score": 3
} |
#### File: armiesofsigmar/armiesofsigmar/battalion.py
```python
import copy
import re
from printoption import PrintOption
from unit import Unit
class Battalion(object):
def __init__(self, unit_config):
self.unit_config = unit_config
self.units = []
for c in self.unit_config["units"]:
self.units.append(Unit(c, "unit"))
def __str__(self):
if self.unitsize() == 0:
return ""
line = [("{}({}):[".format(self.name(), self.points()))]
unitline = []
for unit in self.units:
unitstr = unit.str_battalion()
if len(unitstr) > 0:
unitline.append(unitstr)
line.append(", ".join(sorted(unitline, key=lambda x: re.sub('[^A-Za-z]+', '', x).lower())))
line.append("]")
return " ".join(line)
def fullstr(self):
if self.unitsize() == 0:
return ""
line = [("\t{} (Warscroll Battalion)".format(self.name()))]
line.append("\t\tTotal Points: {}".format(self.points()))
unitline = []
for unit in self.units:
unitstr = unit.fullstr(tabs=2)
if len(unitstr) > 0:
unitline.append(unitstr)
line.append("\n".join(sorted(unitline, key=lambda x: re.sub('[^A-Za-z]+', '', x).lower())))
line.append("")
return "\n".join(line)
def __repr__(self):
return "{}:{}".format(self.name(),str(self.units))
def __len__(self):
return len(self.units)
def __getitem__(self,index):
if index < len(self.units):
return self.units[index]
raise IndexError("index out of range")
def __setitem__(self,index,item):
if index < len(self.units):
self.units[index] = item
return
raise IndexError("index out of range")
def is_type(self, unittype):
return "battalion" == unittype
def unitsize(self):
size = 0
for unit in self.units:
size = size + unit.unitsize()
return size
#Points of just the battalion (no units)
def battalion_points(self):
return self.unit_config.get("points", 0)
def points(self):
if self.unitsize() == 0:
return 0
points = self.battalion_points()
for unit in self.units:
points = points + unit.points()
return points
def name(self):
return self.unit_config["name"]
def is_unique(self):
return False
# return self.unit_config.get("unique", False)
def roles(self):
return self.unit_config.get("roles", [])
def keywords(self):
return []
# return self.unit_config.get("keywords", [])
def move(self, wounds_suffered=0):
move = self.unit_config.get("move", 0)
if type(move) is not dict:
return move
if wounds_suffered > self.wounds_per_unit():
wounds_suffered = self.wounds_per_unit()
while wounds_suffered > 0 and move.get(wounds_suffered, None) == None:
wounds_suffered = wounds_suffered - 1
return "{}*".format(move.get(wounds_suffered, 0))
def wounds_per_unit(self):
return self.unit_config.get("wounds", 0)
# Total number of wounds across all units
def total_wounds(self):
return self.wounds_per_unit() * self.unitsize()
def wounds_str(self):
wounds = self.wounds_per_unit()
if self.unitsize() == 1:
return str(wounds)
return "{}({})".format(wounds, wounds * self.unitsize())
def save(self):
save = self.unit_config.get("save", 0)
if type(save) is str and save == "-":
return 6
return save
def save_str(self):
save = self.unit_config.get("save", 0)
if type(save) is str:
return save
return "{}+".format(save)
def bravery(self):
return self.unit_config.get("bravery", 0)
def sum_roles(self, roles):
for unit in self.units:
if unit.count > 0:
for r in unit.roles():
roles[r] = roles.get(r,0) + unit.count
def is_valid(self, restrict_battalion, restrict_config, final=True, showfails=PrintOption.SILENT):
#TODO: Currently only support 1 or 0 instances of a single battalion
count = 0
if self.unitsize() > 0:
count = 1
# Check unit meets min restriction
if final and count < restrict_battalion["min"]:
if showfails.value > PrintOption.SILENT.value:
print "FAIL MIN restrict {} {} {} : {}".format(self.name(), restrict_battalion["min"], count, self)
return False
if self.unitsize() == 0:
return True
# Check unit meets max restriction
if restrict_battalion["max"] != -1 and count >restrict_battalion["max"]:
if showfails.value > PrintOption.SILENT.value:
print "FAIL MAX restrict {} {} {} : {}".format(self.name(), restrict_battalion["max"], count, self)
return False
#Check units and count up roles
for unit in self.units:
#TODO: Restrict from both restrict config and unit_config !!!
restrict_unit = unit.unit_config
restrict_keywords = []
if not unit.is_valid(restrict_unit, restrict_keywords, final, showfails):
return False
return True
```
#### File: armiesofsigmar/armiesofsigmar/configloader.py
```python
import os.path
import yaml
import sys
import itertools
SELF_DIR = os.path.dirname(sys.modules[__name__].__file__)
RULEBOOK_LATEST="ghb2017"
DEFAULT_ARMY_SIZE="vanguard"
def _mergeDictsOverwriteEmpty(d1, d2):
res = d1.copy()
for k,v in d2.items():
if k not in d1 or d1[k] == '':
res[k] = v
return res
def load_units(rulebook=RULEBOOK_LATEST, unitlists=["all"], recursive=False):
ret = {"units":[], "allies":[], "battalions": []}
retdict = {}
for faction in unitlists:
filename = os.path.join(SELF_DIR, "units", "{}_{}.yaml".format(rulebook, faction.replace(" ", "_")))
try:
with open(filename, 'r') as f:
book = yaml.load(f)
for sectionname, section in book.iteritems():
if type(section) is str:
loadedsection = load_units(rulebook, [sectionname], recursive)
ret["units"] = ret["units"] + loadedsection["units"]
else:
filenamew = os.path.join(SELF_DIR, "units", "warscrolls_{}.yaml".format(sectionname.replace(" ", "_")))
with open(filenamew, 'r') as fw:
fbook = yaml.load(fw)
for sectiontype in ["units", "battalions"]:
fsection = fbook[sectionname].get(sectiontype, [])
for unit in section.get(sectiontype, []):
for funit in fsection:
if funit["name"] == unit["name"]:
# print funit["name"]
unit.update(funit)
ret[sectiontype] = ret[sectiontype] + section.get(sectiontype,[])
if not recursive:
ret["allies"] = ret["allies"] + section["allies"]
except IOError:
pass
if not recursive:
ret["allies"] = load_units(rulebook, ret["allies"], True)["units"]
for battalion in ret["battalions"]:
new_units = []
for name, config in battalion["units"].iteritems():
for u in itertools.chain(ret["units"], ret["allies"]):
if u["name"] == name:
new_units.append(_mergeDictsOverwriteEmpty(config, u))
continue
battalion["units"]=new_units
# print ret
return ret
def load_restictions(filename):
with open(filename, 'r') as f:
return yaml.load(f)
def load_rules(rulebook=RULEBOOK_LATEST, size=DEFAULT_ARMY_SIZE):
filename = os.path.join(SELF_DIR, "rules", "{}_{}.yaml".format(rulebook, size))
with open(filename, 'r') as f:
return yaml.load(f)
def load_warscrolls(unitlists=["all"]):
ret = []
for faction in unitlists:
filename = os.path.join(SELF_DIR, "units", "warscrolls_{}.yaml".format(faction.replace(" ", "_")))
with open(filename, 'r') as f:
book = yaml.load(f)
for sectionname, section in book.iteritems():
if type(section) is str:
ret = ret + load_warascolls(rulebook, [sectionname])
else:
ret = ret + section
return ret
```
#### File: armiesofsigmar/armiesofsigmar/unit.py
```python
import re
from printoption import PrintOption
# A unit consists of a number of instances of a single model.
class Unit(object):
def __init__(self, unit_config, unittype):
# Dictionary holding all the stats for a unit
self.unit_config = unit_config
# The number of multiples of a minimum sized unit.
# For example, Drayds has a minimum unit size of 10.
# Therefore, 20 Dryads would have a count of 2.
# Technically, you could have 18 Dryads in a unit, but
# the cost would still be the same as 20. Therefore this
# system disallows that.
self.count = 0
# Type of the unit. Main unit, ally or battalion
self.unittype = unittype
def __str__(self):
if self.count == 0:
return ""
if self.unitsize() > 1:
return "{} {} ({})".format(self.unitsize(),
self.name(),
self.points())
return "{} ({})".format(self.name(),
self.points())
def __repr__(self):
return "{} {} ({})".format(self.unitsize(),
self.name(),
self.points())
def fullstr(self, tabs=1, points=True):
tabs_str = "\t" * tabs
ret = []
if self.count == 0:
return ""
if self.unitsize() > 1:
ret.append("{}{} {} ({} units)".format(tabs_str, self.unitsize(), self.name(), self.count))
else:
ret.append("{}{} {}".format(tabs_str, self.unitsize(), self.name()))
tabs_str = "\t" * (tabs+1)
if points:
ret.append("{}Points: {}".format(tabs_str, self.points()))
if self.roles():
ret.append("{}Roles: {}".format(tabs_str, ", ".join(self.roles())))
ret.append("{}M/W/S/B: {}/{}/{}/{}".format(tabs_str,
self.move(),
self.wounds_str(),
self.save_str(),
self.bravery()))
return "\n".join(ret)
def str_battalion(self):
if self.count == 0:
return ""
if self.count > 1:
return "{} {}".format(self.unitsize(), self.name())
return "{}".format(self.name())
# Increase the multiples of minimum size in the unit
def inc(self, num):
self.count = self.count + num
if self.count < 0:
self.count = 0
def is_type(self, unittype):
return self.unittype == unittype
# The number of individual figures in the unit.
# Always a multiple of unit minimum size.
def unitsize(self):
return self.unit_config["min"] * self.count
def points(self):
# Config points are per minimum unit
return self.unit_config["points"] * self.count
def name(self):
return self.unit_config["name"]
def is_unique(self):
return self.unit_config.get("unique", False)
def roles(self):
return self.unit_config.get("roles", [])
def keywords(self):
return self.unit_config.get("keywords", [])
def move(self, wounds_suffered=0):
move = self.unit_config.get("move", 0)
if type(move) is not dict:
return move
if wounds_suffered > self.wounds_per_unit():
wounds_suffered = self.wounds_per_unit()
while wounds_suffered > 0 and move.get(wounds_suffered, None) == None:
wounds_suffered = wounds_suffered - 1
return "{}*".format(move.get(wounds_suffered, 0))
def wounds_per_unit(self):
return self.unit_config.get("wounds", 0)
# Total number of wounds across all units
def total_wounds(self):
return self.wounds_per_unit() * self.unitsize()
def wounds_str(self):
wounds = self.wounds_per_unit()
if self.unitsize() == 1:
return str(wounds)
return "{}({})".format(wounds, wounds * self.unitsize())
def save(self):
save = self.unit_config.get("save", 0)
if type(save) is str and save == "-":
return 6
return save
def save_str(self):
save = self.unit_config.get("save", 0)
if type(save) is str:
return save
return "{}+".format(save)
def bravery(self):
return self.unit_config.get("bravery", 0)
def is_valid(self, restrict_unit, restrict_keywords, final=True, showfails=PrintOption.SILENT):
# Check unit meets min restriction
if final and self.count < restrict_unit["min"]:
if showfails.value > PrintOption.SILENT.value:
print "FAIL MIN restrict {} {} {} : {}".format(self.name(), restrict_unit["min"], self.count, self)
return False
if self.count == 0:
return True
# Check unit meets max restriction
if restrict_unit["max"] != -1 and self.count >restrict_unit["max"]:
if showfails.value > PrintOption.SILENT.value:
print "FAIL MAX restrict {} {} {} : {}".format(self.name(), restrict_unit["max"], self.count, self)
return False
# Only allow 1 of each unique model
if self.is_unique() and self.count > 1 :
if showfails.value > PrintOption.SILENT.value:
print "FAIL unique {} {} : {}".format(self.name(), self.count, self)
return False
# Check keyword match. Empty list means allow anything
match = False
if not restrict_keywords:
match = True
for restrict_keyword in restrict_keywords:
if restrict_keyword in self.keywords():
match = True
if not match:
if showfails.value > PrintOption.SILENT.value:
print "FAIL Keyword restrict: {} {} {} : {}".format(self.name(), self.keywords(), restrict_keywords, self)
return False
return True
```
#### File: armiesofsigmar/armiesofsigmar/units.py
```python
import copy
import re
from printoption import PrintOption
from unit import Unit
class Units(object):
def __init__(self, units_config):
self.units_config = units_config
self.units = []
for c in units_config:
self.units.append(Unit(c, "unit"))
def __str__(self):
unitline = []
for unit in self.units:
unitstr = str(unit)
if len(unitstr) > 0:
unitline.append(str(unit))
return ", ".join(sorted(unitline, key=lambda x: re.sub('[^A-Za-z]+', '', x).lower()))
def __repr__(self):
return str(self.units)
def __len__(self):
return len(self.units)
def fullstr(self):
unitline = []
for unit in self.units:
unitstr = unit.fullstr()
if len(unitstr) > 0:
unitline.append(unitstr)
return "\n".join(sorted(unitline, key=lambda x: re.sub('[^A-Za-z]+', '', x).lower()))
def __getitem__(self,index):
if index < len(self.units):
return self.units[index]
raise IndexError("index out of range")
def __setitem__(self,index,item):
if index < len(self.units):
self.units[index] = item
return
raise IndexError("index out of range")
def add(self, name):
for u in self.units:
if u.name() == name:
u.inc(1)
return
raise KeyError('Unknown unit: {}'.format(name))
def unitsize(self):
size = 0
for unit in self.units:
size = size + unit.unitsize()
return size
def points(self):
if self.unitsize() == 0:
return 0
points = 0
for unit in self.units:
points = points + unit.points()
return points
def wounds(self):
wounds = 0
for unit in self.units:
wounds = wounds + unit.total_wounds()
return wounds
def _bravery_sum(self):
x = 0
for u in self.units:
x = x + (u.unitsize() * u.bravery())
return x
def _save_mul_wounds_sum(self):
x = 0
for u in self.units:
x = x + (u.total_wounds() * u.save())
return x
def _restrict_unit(self, restrict_config, name, unittype):
default = { "min": 0, "max": -1 }
#TODO: Battalions restrict by default until better support
if unittype == "battalions":
default = { "min": 0, "max": 0 }
return restrict_config[unittype].get(name, restrict_config[unittype].get("__Others", default))
def sum_roles(self, roles):
for unit in self.units:
if unit.count > 0:
for r in unit.roles():
roles[r] = roles.get(r,0) + unit.count
def is_valid(self, restrict_config, final=True, showfails=PrintOption.SILENT):
for unit in self.units:
restrict_unit = self._restrict_unit(restrict_config, unit.name(), "units")
restrict_keywords = restrict_config.get("keywords", [])
if not unit.is_valid(restrict_unit, restrict_keywords, final, showfails):
return False
return True
``` |
{
"source": "a74nh/dotfiles",
"score": 3
} |
#### File: bin/gdb/check_GNU_style.py
```python
import argparse
import sys
from check_GNU_style_lib import check_GNU_style_file
def main():
parser = argparse.ArgumentParser(description='Check GNU coding style.')
parser.add_argument('file', help = 'File with a patch')
parser.add_argument('-f', '--format', default = 'stdio',
help = 'Display format',
choices = ['stdio', 'quickfix'])
args = parser.parse_args()
filename = args.file
format = args.format
if filename == '-':
check_GNU_style_file(sys.stdin, None, format)
else:
with open(filename, 'rb') as diff_file:
check_GNU_style_file(diff_file, 'utf-8', format)
main()
``` |
{
"source": "a76yyyy/ipdata",
"score": 3
} |
#### File: a76yyyy/ipdata/database.py
```python
import pymysql
import sqlite3
import os
from configs import mysql
from func_timeout import func_set_timeout,exceptions
import sys
class mysql_Database:
host = mysql.host
port = mysql.port
user = mysql.user
password = <PASSWORD>
charset = mysql.charset
def __init__(self,*args):
if len(args) == 1:
self.db = args[0]
self.connection = pymysql.connect(host=self.host, port=self.port, user=self.user, password=<PASSWORD>, database=self.db, charset=self.charset)
elif len(args) == 2:
self.db = args[0]
self.connect_timeout = args[1]
self.connection = pymysql.connect(host=self.host, port=self.port, user=self.user, password=<PASSWORD>, database=self.db, charset=self.charset, connect_timeout=self.connect_timeout)
elif len(args) == 3:
self.db = args[0]
self.connect_timeout = args[1]
self.read_timeout = args[2]
self.connection = pymysql.connect(host=self.host, port=self.port, user=self.user, password=<PASSWORD>, database=self.db, charset=self.charset, connect_timeout=self.connect_timeout, read_timeout=self.read_timeout)
else:
print('参数输入错误')
sys.exit()
self.cursor = self.connection.cursor()
def insert(self, query, params):
try:
self.cursor.executemany(query, params)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def execute(self,code):
try:
self.cursor.execute(code)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def executemany(self,code,slist):
try:
self.cursor.executemany(code,slist)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def query(self, query, *args):
self.cursor = self.connection.cursor(pymysql.cursors.DictCursor)# 得到一个可以执行SQL语句并且将结果作为字典返回的游标
result = None
timeout = None
if args and len(args) == 1:
timeout = args[0]
if timeout:
@func_set_timeout(timeout)
def timelimited():
self.cursor.execute(query)
result = self.cursor.fetchall()
return result
try:
result = timelimited()
except exceptions.FunctionTimedOut:
print("timeout!")
self.cursor.close()
return result
else:
self.cursor.execute(query)
result = self.cursor.fetchall()
return result
def __del__(self):
self.connection.close()
class sqlite3_Database:
def __init__(self,db_file,connect_timeout=5.0):
self.connection = sqlite3.connect(db_file,timeout=connect_timeout)
self.cursor = self.connection.cursor()
def insert(self, query, params):
try:
self.cursor.executemany(query, params)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def execute(self,code):
try:
self.cursor.execute(code)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def executemany(self,code,slist):
try:
self.cursor.executemany(code,slist)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
@staticmethod
def dictFactory(cursor,row):
"""将sql查询结果整理成字典形式"""
d={}
for index,col in enumerate(cursor.description):
d[col[0]]=row[index]
return d
def query(self, query, *args):
self.connection.row_factory=self.dictFactory # 得到一个可以执行SQL语句并且将结果作为字典返回的游标
self.cursor = self.connection.cursor()
result = None
timeout = None
if args and len(args) == 1:
timeout = args[0]
if timeout:
@func_set_timeout(timeout)
def timelimited():
self.cursor.execute(query)
result = self.cursor.fetchall()
return result
try:
result = timelimited()
except exceptions.FunctionTimedOut:
print("timeout!")
self.cursor.close()
return result
else:
self.cursor.execute(query)
result = self.cursor.fetchall()
return result
def __del__(self):
self.connection.close()
```
#### File: ipdata/IP_Sync/file_set.py
```python
import os
def file_set(file= None, open_type = None):
if not os.path.exists(file):
if not os.path.exists(os.path.dirname(file)):
os.makedirs(os.path.dirname(file))
if open_type == 'dir':
os.makedirs(file)
if open_type == 'file':
with open(file, 'wb+'):
pass
if not os.path.exists(file):
if os.path.isdir(file):
os.makedirs(file)
elif os.path.isfile(file):
if not os.path.exists(os.path.dirname(file)):
os.makedirs(os.path.dirname(file))
with open(file, 'wb+'):
pass
return 0
return 1
```
#### File: ipdata/IP_Sync/ipv6Update.py
```python
import os
import sys
import re
import time
import logging
import struct
import py7zr
import urllib.request
import __init__
from ipSearch import IPv6Loader
from ipUpdate import get_fetcher
from file_set import file_set
from typing import Union, Optional
try:
from configs import default_txt_update
except :
default_txt_update = False #当数据文件版本无更新时是否默认自动更新ipv6data.txt文件, False为默认不更新。
data_dir = __init__.data_dir
tmp_dir =__init__.tmp_dir
DEFAULT_FILE_LOCATION = __init__.DEFAULT_FILE_LOCATION
logger = logging.getLogger(__name__)
def db_down(filename, version_file):
fetcher = get_fetcher()
curr_version, check_time, update_time = (0, 0, 0)
file_set(version_file,'file')
with open(version_file, "rb+") as handle:
content = handle.read()
if len(content) > 0:
curr_version, check_time, update_time = struct.unpack("<3I", content)
print('本地IPv6数据文件版本: ' + str(curr_version))
print('上次检查更新时间: ' + str(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(check_time))))
print('上次数据更新时间: ' + str(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(update_time))))
print('')
print('开始检查IPv6数据库更新: \n---------------处理中, 请稍候---------------')
check_time = int(time.time())
host = 'http://ip.zxinc.org'
print('地址:' + host)
try:
D = IPv6Loader(filename)
except OSError as e:
print('注意:原IPv6数据文件无法打开:', e, file=sys.stderr)
D = None
if host.lower().startswith('http'):
req = urllib.request.Request(
host,
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0',
})
else:
raise ValueError from None
with urllib.request.urlopen(req, timeout=30) as res:
page = res.read().decode('utf-8')
m = re.search(r'href="([^"]+)".*?版本(\d{8})', page)
date = int(m.group(2))
remote_file = m.group(1)
if D and date <= curr_version:
noup = '当前IPv6数据文件版本 ('+ str(curr_version) + ')无更新!'
print(noup)
with open(version_file, "wb+") as handle:
handle.write(struct.pack("<3I", date, check_time, update_time))
return 0
upstart = 'IPv6数据文件新版本: ' + str(date)
print(upstart)
print( "------------------------------------------- \n " )
print('开始更新IPv6数据文件: '+ filename + '\n---------------处理中, 请稍候---------------')
update_time = int(time.time())
tmp_path = os.path.join(tmp_dir, remote_file)
data = fetcher(tmp_path,f'{host}/{remote_file}')
if not data:
print('下载出错,正在重试...')
data = fetcher(tmp_path,f'{host}/{remote_file}')
if not data:
return -3
try:
with py7zr.SevenZipFile(tmp_path, 'r') as archive:
archive.extract(targets=['ipv6wry.db'],path=tmp_dir)
except:
logger.error(f'解压缩{tmp_path}时出错!')
return -5
if filename is None:
return data
if type(filename) is str:
# save to filename
try:
tmp_path = os.path.join(tmp_dir, 'ipv6wry.db')
with open(tmp_path, 'rb') as f:
d = f.read()
try:
safe_overwrite(filename, d, mode='wb')
finally :
os.remove(tmp_path)
old_c = D.count if D else 0
D = IPv6Loader(filename)
print('已经更新!IPv6数据条数 %d->%d.' % (old_c, D.count),
file=sys.stderr)
with open(version_file, "wb+") as handle:
handle.write(struct.pack("<3I", date, check_time, update_time))
return len(data)
except:
logger.error('保存到最终文件时出错!')
return -6
else:
logger.error('保存到最终文件时出错!')
return -6
def safe_overwrite(fname: str,
data: Union[bytes, str],
*,
method: str = 'write',
mode: str = 'w',
encoding: Optional[str] = None) -> None:
# FIXME: directory has no read perm
# FIXME: symlinks and hard links
tmpname = fname + '.tmp'
# if not using "with", write can fail without exception
with open(tmpname, mode, encoding=encoding) as f:
getattr(f, method)(data)
# see also: https://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
f.flush()
os.fsync(f.fileno())
# if the above write failed (because disk is full etc), the old data should be kept
try:
if os.path.exists(fname):
os.remove(fname)
except :
os.remove(tmpname)
return
os.rename(tmpname, fname)
def db_down_info(filename, version_file,ipv4update=False):
file_set(filename)
file_set(version_file)
ret = db_down(filename, version_file)
if ret > 0:
print('成功写入到%s, %s字节' %
(filename, format(ret, ','))
)
print( "------------------------------------------- \n " )
return 1
if ret == 0:
print( "-------------------------------------------" )
if not ipv4update and not default_txt_update:
print( "正在退出IP数据库更新任务, 请稍候... \n " )
sys.exit(0)
else:
print()
return 0
else:
print('写入失败, 错误代码: %d' % ret)
print( "-------------------------------------------" )
if not default_txt_update:
print( "正在退出IP数据库更新任务, 请稍候... \n " )
sys.exit(1)
else:
print()
return -1
def main():
import argparse
parser = argparse.ArgumentParser(description='ZXinc_IPv6数据库查询与更新')
parser.add_argument('IP', nargs='*', help='要查询的IP')
parser.add_argument('-f', '--file', default=DEFAULT_FILE_LOCATION, help='数据库文件路径')
parser.add_argument('-v6', '--version_file', default=os.path.abspath(data_dir+os.path.sep+"ipv6data_version.bin"), help='数据库版本文件路径')
parser.add_argument('-A', '--all', action='store_true', default=False, help='显示所有记录')
parser.add_argument('-u', '--update', action='store_true', default=False, help='更新数据库')
parser.add_argument('-Q', '--more-quiet', action='store_true', default=False, help='更新数据库时总是不输出内容')
args = parser.parse_args()
if args.update:
db_down_info(args.file,args.version_file)
try:
D = IPv6Loader(args.file)
except Exception as e:
print(e)
db_down_info(args.file,args.version_file)
D = IPv6Loader(args.file)
ips = args.IP
if not ips:
if not args.all:
print(D)
else:
for info in D.iter():
loc = ' '.join(info.info).strip()
print(f'{info.start} - {info.end} {loc}')
elif len(ips) == 1:
print(' '.join(D.lookup(ips[0]).info))
else:
for ip in ips:
print(D.lookup(ip))
if __name__ == '__main__':
main()
``` |
{
"source": "a771853580/fastersever",
"score": 2
} |
#### File: fastrunner/templatetags/custom_tags.py
```python
import json
import time
from django import template
register = template.Library()
@register.filter(name='json_dumps')
def json_dumps(value):
try:
return json.dumps(json.loads(value), indent=4, separators=(',', ': '), ensure_ascii=False)
except Exception:
return value
@register.filter(name='convert_timestamp')
def convert_timestamp(value):
return time.strftime("%Y--%m--%d %H:%M:%S", time.localtime(int(float(value))))
```
#### File: fastrunner/views/run.py
```python
from rest_framework.decorators import api_view
from fastrunner.utils import loader
from rest_framework.response import Response
from fastrunner.utils.parser import Format
from fastrunner import models
"""运行方式
"""
@api_view(['POST'])
def run_api(request):
""" run api by body
"""
name = request.data.pop('config')
api = Format(request.data)
api.parse()
config = None if name == '请选择' else eval(models.Config.objects.get(name=name).body)
summary = loader.debug_api(api.testcase, api.project, config=config)
return Response(summary)
@api_view(['GET'])
def run_api_pk(request, **kwargs):
"""run api by pk and config
"""
api = models.API.objects.get(id=kwargs['pk'])
name = request.query_params["config"]
config = None if name == '请选择' else eval(models.Config.objects.get(name=name).body)
test_case = eval(api.body)
summary = loader.debug_api(test_case, api.project.id, config=config)
return Response(summary)
@api_view(['POST'])
def run_api_tree(request):
"""run api by tree
{
project: int
relation: list
name: str
async: bool
}
"""
# order by id default
project = request.data['project']
relation = request.data["relation"]
back_async = request.data["async"]
name = request.data["name"]
config = request.data["config"]
config = None if name == '请选择' else eval(models.Config.objects.get(name=config).body)
test_case = []
for relation_id in relation:
api = models.API.objects.filter(project__id=project, relation=relation_id).order_by('id').values('body')
for content in api:
test_case.append(eval(content['body']))
if back_async:
loader.async_debug_api(test_case, project, name, config=config)
summary = loader.TEST_NOT_EXISTS
summary["msg"] = "接口运行中,请稍后查看报告"
else:
summary = loader.debug_api(test_case, project, config=config)
return Response(summary)
@api_view(["POST"])
def run_testsuite(request):
"""debug testsuite
{
name: str,
body: dict
}
"""
body = request.data["body"]
project = request.data["project"]
name = request.data["name"]
testcase_list = []
config = None
for test in body:
test = loader.load_test(test, project=project)
if "base_url" in test["request"].keys():
config = test
continue
testcase_list.append(test)
summary = loader.debug_api(testcase_list, project, name=name, config=config)
return Response(summary)
@api_view(["GET"])
def run_testsuite_pk(request, **kwargs):
"""run testsuite by pk
{
project: int,
name: str
}
"""
pk = kwargs["pk"]
test_list = models.CaseStep.objects. \
filter(case__id=pk).order_by("step").values("body")
project = request.query_params["project"]
name = request.query_params["name"]
testcase_list = []
config = None
for content in test_list:
body = eval(content["body"])
if "base_url" in body["request"].keys():
config = eval(models.Config.objects.get(name=body["name"], project__id=project).body)
continue
testcase_list.append(body)
summary = loader.debug_api(testcase_list, project, name=name, config=config)
return Response(summary)
@api_view(['POST'])
def run_suite_tree(request):
"""run suite by tree
{
project: int
relation: list
name: str
async: bool
}
"""
# order by id default
project = request.data['project']
relation = request.data["relation"]
back_async = request.data["async"]
report = request.data["name"]
config = None
testcase = []
for relation_id in relation:
suite = models.Case.objects.filter(project__id=project,
relation=relation_id).order_by('id').values('id', 'name')
for content in suite:
test_list = models.CaseStep.objects. \
filter(case__id=content["id"]).order_by("step").values("body")
# [{scripts}, {scripts}]
testcase_list = []
for content in test_list:
body = eval(content["body"])
if "base_url" in body["request"].keys():
config = eval(models.Config.objects.get(name=body["name"], project__id=project).body)
continue
testcase_list.append(body)
# [[{scripts}, {scripts}], [{scripts}, {scripts}]]
testcase.append(testcase_list)
if back_async:
loader.async_debug_suite(testcase, project, report, suite, config=config)
summary = loader.TEST_NOT_EXISTS
summary["msg"] = "用例运行中,请稍后查看报告"
else:
summary = loader.debug_suite(testcase, project, suite, config=config)
return Response(summary)
@api_view(["POST"])
def run_test(request):
"""debug single test
{
body: dict
project :int
config: null or dict
}
"""
body = request.data["body"]
config = request.data.get("config", None)
project = request.data["project"]
if config:
config = eval(models.Config.objects.get(project=project, name=config["name"]).body)
summary = loader.debug_api(loader.load_test(body), project, config=config)
return Response(summary)
``` |
{
"source": "a7744hsc/LeetCode-Java",
"score": 4
} |
#### File: LeetCode-Java/LeetCode-python/17.letter-combinations-of-a-phone-number.py
```python
from unicodedata import digit
from webbrowser import get
class Solution:
def letterCombinations(self, digits: str) -> List[str]:
map= {
'2':['a','b','c'],
'3':['d','e','f'],
'4':['g','h','i'],
'5':['j','k','l'],
'6':['m','n','o'],
'7':['p','q','r','s'],
'8':['t','u','v'],
'9':['w','x','y','z']
}
if not digits:
return []
def get_combinations(digits,prefix):
if len(digits)==1:
return[prefix+c for c in map[digits]]
else:
result = []
for c in map[digits[0]]:
result+= get_combinations(digits[1:],prefix+c)
return result
return get_combinations(digits,'')
# @lc code=end
```
#### File: LeetCode-Java/LeetCode-python/307.range-sum-query-mutable.py
```python
class NumArray:
def __init__(self, nums: List[int]):
self.binary_indexed_tree = [0]*len(nums)
self.origin_list = [0]*len(nums)
for i,n in enumerate(nums):
self.update(i,n)
def update(self, index: int, val: int) -> None:
delta = val - self.origin_list[index]
self.origin_list[index] = val
if index ==0:
self.binary_indexed_tree[0]= val
index+=1
while index<len(self.binary_indexed_tree):
self.binary_indexed_tree[index]+=delta
index = index + (index&-index)
def sumRange(self, left: int, right: int) -> int:
if right ==0:
return self.binary_indexed_tree[0]
result = 0
if left>0:
if left ==1:
result-=self.binary_indexed_tree[0]
else:
left = left-1
while left>0:
result-=self.binary_indexed_tree[left]
left-=(left&-left)
while right>0:
result+=self.binary_indexed_tree[right]
right-=(right&-right)
return result
# Your NumArray object will be instantiated and called as such:
# obj = NumArray(nums)
# obj.update(index,val)
# param_2 = obj.sumRange(left,right)
# @lc code=end
```
#### File: LeetCode-Java/LeetCode-python/674. Longest Continuous Increasing Subsequence.py
```python
from typing import List
class Solution:
def findLengthOfLCIS(self, nums: List[int]) -> int:
if not nums:
return 0
prev = nums[0]
max_len = 1
current_len = 1
for n in nums[1:]:
if n>prev:
current_len+=1
prev = n
else:
max_len = current_len if current_len > max_len else max_len
current_len=1
prev=n
if current_len > max_len:
max_len = current_len
return max_len
if __name__ == '__main__':
test_cases = [([1,3,5,4,7],3),([2,2,2,2,2],1)]
s = Solution()
for i,o in test_cases:
assert o == s.findLengthOfLCIS(i), f'failed on test case test case {i},{o}'
print('All case passed')
```
#### File: LeetCode-Java/LeetCode-python/74.search-a-2-d-matrix.py
```python
from typing import List
class Solution:
def searchMatrix(self, matrix: List[List[int]], target: int) -> bool:
for inner_list in matrix:
if target>=inner_list[0] and target<=inner_list[-1]:
return target in inner_list
return False
# @lc code=end
```
#### File: LeetCode-Java/LeetCode-python/95.unique-binary-search-trees-ii.py
```python
from collections import deque
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
def __repr__(self) -> str:
stack = deque()
stack.append(self)
ressult = []
while stack:
node = stack.popleft()
if node:
ressult.append(node.val)
if node.left or node.right:
stack.append(node.left)
stack.append(node.right)
else:
ressult.append('#')
return '[' +','.join(map(str, ressult))+']'
from typing import List,Optional
class Solution:
@staticmethod
def generateSubTree(left,right):
if left > right:
return [None]
if left == right:
return [TreeNode(left)]
res = []
for i in range(left,right+1):
left_subtrees = Solution.generateSubTree(left,i-1)
right_subtrees = Solution.generateSubTree(i+1,right)
for l in left_subtrees:
for r in right_subtrees:
root = TreeNode(i)
root.left = l
root.right = r
res.append(root)
return res
def generateTrees(self, n: int) -> List[Optional[TreeNode]]:
return Solution.generateSubTree(1,n) if n else []
if __name__ == "__main__":
s = Solution()
print(s.generateTrees(3))
# @lc code=end
``` |
{
"source": "a7744hsc/TensorFlow-Speech-Recognition-Challenge",
"score": 2
} |
#### File: TensorFlow-Speech-Recognition-Challenge/src/models.py
```python
import tensorflow as tf
def generate_fc_model(fingerprint_input, model_settings, is_training):
fingerprint_size = model_settings['fingerprint_size']
label_count = model_settings['label_count']
weights = tf.Variable(
tf.truncated_normal([fingerprint_size, label_count], stddev=0.001))
bias = tf.Variable(tf.zeros([label_count]))
logits = tf.matmul(fingerprint_input, weights) + bias
return logits, tf.placeholder(tf.float32, name='dropout_prob')
def create_conv_model(fingerprint_input, model_settings, is_training):
if is_training:
dropout_prob = tf.placeholder(tf.float32, name='dropout_prob')
input_frequency_size = model_settings['dct_coefficient_count']
input_time_size = model_settings['spectrogram_length']
fingerprint_4d = tf.reshape(fingerprint_input,
[-1, input_time_size, input_frequency_size, 1])
first_filter_width = 8
first_filter_height = 20
first_filter_count = 64
first_weights = tf.Variable(
tf.truncated_normal(
[first_filter_height, first_filter_width, 1, first_filter_count],
stddev=0.01))
first_bias = tf.Variable(tf.zeros([first_filter_count]))
first_conv = tf.nn.conv2d(fingerprint_4d, first_weights, [1, 1, 1, 1],
'SAME') + first_bias
first_relu = tf.nn.relu(first_conv)
if is_training:
first_dropout = tf.nn.dropout(first_relu, dropout_prob)
else:
first_dropout = first_relu
max_pool = tf.nn.max_pool(first_dropout, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')
second_filter_width = 4
second_filter_height = 10
second_filter_count = 64
second_weights = tf.Variable(
tf.truncated_normal(
[
second_filter_height, second_filter_width, first_filter_count,
second_filter_count
],
stddev=0.01))
second_bias = tf.Variable(tf.zeros([second_filter_count]))
second_conv = tf.nn.conv2d(max_pool, second_weights, [1, 1, 1, 1],
'SAME') + second_bias
second_relu = tf.nn.relu(second_conv)
if is_training:
second_dropout = tf.nn.dropout(second_relu, dropout_prob)
else:
second_dropout = second_relu
second_conv_shape = second_dropout.get_shape()
second_conv_output_width = second_conv_shape[2]
second_conv_output_height = second_conv_shape[1]
second_conv_element_count = int(
second_conv_output_width * second_conv_output_height *
second_filter_count)
flattened_second_conv = tf.reshape(second_dropout,
[-1, second_conv_element_count])
label_count = model_settings['label_count']
final_fc_weights = tf.Variable(
tf.truncated_normal(
[second_conv_element_count, label_count], stddev=0.01))
final_fc_bias = tf.Variable(tf.zeros([label_count]))
final_fc = tf.matmul(flattened_second_conv, final_fc_weights) + final_fc_bias
if is_training:
return final_fc, dropout_prob
else:
return final_fc
``` |
{
"source": "a78239636/lsfm-lzr",
"score": 3
} |
#### File: lsfm-lzr/lsfm/camera.py
```python
import numpy as np
from menpo.transform import Rotation, Translation
from menpo3d.camera import PerspectiveProjection, PerspectiveCamera
# For now we mirror these here - should migrate to menpo conv. constructors
# after https://github.com/menpo/menpo/pull/777 comes in.
def rotation_from_3d_ccw_angle_around_y(theta, degrees=True):
r"""
Convenience constructor for 3D CCW rotations around the y axis
Parameters
----------
theta : `float`
The angle of rotation about the origin
degrees : `bool`, optional
If ``True`` theta is interpreted as a degree. If ``False``, theta is
interpreted as radians.
Returns
-------
rotation : :map:`Rotation`
A 3D rotation transform.
"""
if degrees:
# convert to radians
theta = theta * np.pi / 180.0
return Rotation(np.array([[np.cos(theta), 0, np.sin(theta)],
[0, 1, 0],
[-np.sin(theta), 0, np.cos(theta)]]),
skip_checks=True)
def rotation_from_3d_ccw_angle_around_z(theta, degrees=True):
r"""
Convenience constructor for 3D CCW rotations around the z axis
Parameters
----------
theta : `float`
The angle of rotation about the origin
degrees : `bool`, optional
If ``True`` theta is interpreted as a degree. If ``False``, theta is
interpreted as radians.
Returns
-------
rotation : :map:`Rotation`
A 3D rotation transform.
"""
if degrees:
# convert to radians
theta = theta * np.pi / 180.0
return Rotation(np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]]),
skip_checks=True)
def perspective_camera_for_template(img_shape, focal_length_mult=1.1,
pose_angle_deg=0):
f = np.array(img_shape).max() * focal_length_mult
rot_z = rotation_from_3d_ccw_angle_around_z(180)
rot_y = rotation_from_3d_ccw_angle_around_y(180 + pose_angle_deg)
rotation = rot_z.compose_before(rot_y)
translation = Translation([0, 0, +3])
projection = PerspectiveProjection(f, img_shape)
return PerspectiveCamera(rotation, translation, projection)
```
#### File: lsfm/data/basel.py
```python
from scipy.io import loadmat
from menpo.shape import ColouredTriMesh
import lsfm.io as lio
from . import DATA_DIR, save_template
def load_mean_from_basel(path):
mm = loadmat(str(path))
trilist = mm['tl'][:, [0, 2, 1]] - 1
mean_points = mm['shapeMU'].reshape(-1, 3)
mean_colour = mm['texMU'].reshape(-1, 3) / 255
return ColouredTriMesh(mean_points, trilist=trilist, colours=mean_colour)
def load_basel_template_metadata():
return lio.import_pickle(DATA_DIR / 'basel_template_metadata.pkl')
def generate_template_from_basel_and_metadata(basel, meta):
template = ColouredTriMesh(basel.points[meta['map_tddfa_to_basel']],
trilist=meta['tddfa_trilist'],
colours=basel.colours[
meta['map_tddfa_to_basel']])
template.landmarks['ibug68'] = meta['landmarks']['ibug68']
template.landmarks['nosetip'] = meta['landmarks']['nosetip']
return template
def save_template_from_basel(path):
basel = load_mean_from_basel(path)
meta = load_basel_template_metadata()
template = generate_template_from_basel_and_metadata(basel, meta)
save_template(template, overwrite=True)
```
#### File: lsfm-lzr/lsfm/import_pyl.py
```python
from pathlib import Path
from plyfile import PlyData,PlyProperty, PlyListProperty
import numpy as np
from lsfm import landmark_mesh, landmark_and_correspond_mesh
from menpo.shape import ColouredTriMesh, TexturedTriMesh, TriMesh, PointCloud
import lsfm.io as lio
from lsfm.landmark_my import landmark_mesh_my
def headline(string):
print("\n\n----------{0}----------\n".format(string))
def generate_trilist(points):
from scipy.spatial import Delaunay # expensive
trilist = Delaunay(points).simplices
return trilist
def import_obj():
james = Path('/home/li_gang/TestFile/NewInput2/james.obj')
man = Path('/home/li_gang/TestFile/NewInput2/man.obj')
obj_path = man
mesh = lio.import_mesh(obj_path)
return mesh
#print("mesh = ", mesh)
#landmark_mesh(mesh)
def import_full_ply(filename, verbose=True):
file_dir = str(filename) # 文件的路径
print("ply file name = ", file_dir)
from menpo3d.io.input.mesh.base import vtk_ensure_trilist
import vtk
ply_importer = vtk.vtkPLYReader()
ply_importer.SetFileName(str(file_dir))
ply_importer.Update()
polydata = ply_importer.GetOutput()
trilist = np.require(vtk_ensure_trilist(polydata), requirements=['C'])
plydata = PlyData.read(file_dir)
vertexs = plydata['vertex']
if (verbose is True):
headline("Meta Info of Ply File")
for element in plydata.elements:
print("Meta = {0}".format( element.name))
print(vertexs.dtype)
print("This is TriList : ", trilist)
max_range = 255.0
points_list = []
colors_list = []
for verx in vertexs:
points_list.append( (verx[0], verx[1], verx[2]) )
colors_list.append( (verx[3] * (1.0 / max_range)
, verx[4] * (1.0 / max_range), verx[5]* (1.0 / max_range)) )
nd_point = np.array(points_list, dtype=np.float64)
nd_color = np.array(colors_list, dtype=np.float64)
if (verbose is True):
headline("Ndarray INFO")
print("Point shape = {0} \nColor shape = {1}".format(nd_point.shape, nd_color.shape))
mesh = ColouredTriMesh(nd_point, trilist=trilist, colours=nd_color)
if (verbose is True):
headline("Mesh INFO")
print(mesh)
print("color Shape =", mesh.colours.shape)
print("tri Shape = ", mesh.trilist.shape)
print("tri List = ", mesh.trilist)
return mesh
if __name__ == '__main__':
opt1 = '/home/li_gang/TestFile/HLSInput/face-reconstruction-template.ply'
cmesh = import_full_ply(opt1)
jmesh = import_obj()
if isinstance(jmesh, ColouredTriMesh):
print("yes, I am Textured Mesh")
else:
print("never thought!")
#import_obj()
```
#### File: lsfm-lzr/lsfm/visualize.py
```python
import numpy as np
from menpo.image import Image
from menpo.shape import ColouredTriMesh
from menpo.transform import AlignmentSimilarity
from menpo3d.rasterize import rasterize_mesh
from scipy.stats import chi2
from .camera import perspective_camera_for_template
from .data import load_template
from .shading import lambertian_shading
from matplotlib import pyplot as plt
def rasterize_mesh_at_template(mesh, img_shape=(640, 480),
pose_angle_deg=0, shaded=False):
camera = perspective_camera_for_template(img_shape,
pose_angle_deg=pose_angle_deg)
mesh_aligned = AlignmentSimilarity(mesh, load_template()).apply(mesh)
if shaded:
mesh_aligned = lambertian_shading(mesh_aligned)
return rasterize_mesh(camera.apply(mesh_aligned), img_shape)
def visualize_nicp_weighting(template, weighting):
colours = ((weighting[:, None] * np.array([1, 0, 0])) +
((1 - weighting[:, None]) * np.array([1, 1, 1])))
print('min: {}, max: {}'.format(weighting.min(), weighting.max()))
ColouredTriMesh(template.points, trilist=template.trilist,
colours=colours).view()
def visualize_pruning(w_norm, n_retained,
title='Initial model weights vs theoretical for pruning'):
fig, ax1 = plt.subplots()
ax1.set_title(title)
ax1.hist(w_norm, normed=True, bins=200, alpha=0.6, histtype='stepfilled',
range=[0, n_retained * 5])
ax1.axvline(x=n_retained, linewidth=1, color='r')
ax1.set_ylabel('PDF', color='b')
ax2 = ax1.twinx()
ax2.set_ylabel('Survival Function', color='r')
ax1.set_xlabel('w_norm')
x = np.linspace(chi2.ppf(0.001, n_retained),
chi2.ppf(0.999, n_retained), 100)
ax2.plot(x, chi2.sf(x, n_retained),
'g-', lw=1, alpha=0.6, label='chi2 pdf')
ax1.plot(x, chi2.pdf(x, n_retained),
'r-', lw=1, alpha=0.6, label='chi2 pdf')
def visualize_nicp_result(mesh):
l = rasterize_mesh_at_template(mesh, pose_angle_deg=+20, shaded=True)
r = rasterize_mesh_at_template(mesh, pose_angle_deg=-20, shaded=True)
return Image(np.concatenate([l.pixels, r.pixels], axis=-1))
```
#### File: a78239636/lsfm-lzr/selectInput.py
```python
import os
import shutil
from pathlib import Path
def get_obj_name(files):
for file in files:
if (str(file).endswith(".obj")):
return (str(file).split('.')[0], True)
return (None, False)
def filter_specify_file(root, files, name, type, verbose=False):
output = []
for file in files:
if (file.startswith(name) and os.path.splitext(file)[-1] in type):
output.append(os.path.join(root, file))
if verbose :
if (len(output) != 2) :
print("Filter ERROR\n")
while(1):
pass
print(output)
return output
def inverse_find(base, verbose=False):
output = []
for root, dirs, files in os.walk(base):
name, ans = get_obj_name(files)
if (ans is True):
tmp = filter_specify_file(root, files, name, ['.obj', '.bmp'])
if (len(tmp) > 0):
output.append(tmp)
return output
def copy(output, target_dir, verbose=False):
for file_pair in output:
for file in file_pair:
source = file
target_file = os.path.join(target_dir, source.split('/')[-1])
if verbose:
print("Source = ", file)
print("target = ", target_file)
if Path(target_file).exists():
print("Skipped : {0}".format(source))
else:
shutil.copy(source, target_dir)
if (Path(target_file).exists()):
print("Copyed successfully : {0} ".format(source))
else:
print("Error")
while(1):
pass
if __name__ == '__main__':
base_path = "/home/li_gang/TestFile/dataBase"
target_dir = '/home/li_gang/TestFile/LargeInput1'
out = inverse_find(base_path)
copy(out, target_dir)
``` |
{
"source": "a7b23/CS236G-project",
"score": 2
} |
#### File: CS236G-project/bigan_experiments/evaluate.py
```python
import argparse
import os
import save_features
from classify_linear import eval_linear
from nearest_neighbour_acc_1 import eval_knn
from save_features import get_data_loaders, get_embeddings
join = os.path.join
batch_size = 64
latent_size = 256
cuda_device = "0"
def tocuda(x):
if opt.use_cuda:
return x.cuda()
return x
save_features.tocuda = tocuda
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', required=True,
help='cifar10 | svhn | cifar_mnist_cifar | cifar_mnist_mnist | timagenet',
choices=['cifar10', 'svhn', 'cifar_mnist', 'cifar_mnist_mnist', 'cifar_mnist_cifar', 'timagenet'])
parser.add_argument('--dataroot', default="/atlas/u/a7b23/data", help='path to dataset')
parser.add_argument('--use_cuda', type=bool, default=True)
parser.add_argument('--model_path', required=True)
opt = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = cuda_device
if not opt.dataset == "timagenet":
from model import *
else:
from model_timagenet import *
encoder_state_dict = torch.load(opt.model_path)
netE = Encoder(latent_size, True)
netE.load_state_dict(encoder_state_dict)
netE = tocuda(netE)
print("Model restored")
train_loader, test_loader = get_data_loaders(opt)
train_features, train_labels = get_embeddings(train_loader, netE, None)
test_features, test_labels = get_embeddings(test_loader, netE, None)
print("features inferred")
if opt.dataset != 'cifar_mnist':
knn_acc = eval_knn(train_features, train_labels, test_features, test_labels)
logistic_acc = eval_linear('logistic', train_features, train_labels, test_features, test_labels)
print(f"KNN={knn_acc * 100:.2f}, Linear={logistic_acc * 100:.2f}")
else:
for idx, name in enumerate(['cifar', 'mnist']):
knn_acc = eval_knn(train_features, train_labels[:, idx], test_features, test_labels[:, idx])
logistic_acc = eval_linear('logistic', train_features, train_labels[:, idx], test_features,
test_labels[:, idx])
print(f"{name}: KNN={knn_acc * 100:.2f}, Linear={logistic_acc * 100:.2f}")
```
#### File: CS236G-project/bigan_experiments/nn_model.py
```python
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
class Net(nn.Module):
def __init__(self, inp_size, out_size):
super(Net, self).__init__()
# defining or initialising the layers of the network
self.model = nn.Linear(inp_size, out_size)
self.model.cuda()
self.optim = optim.Adam(self.model.parameters(), lr=1e-2)
self.loss_fn = nn.CrossEntropyLoss()
def fit(self, train_data, train_labels):
train_data = torch.from_numpy(train_data).float()
train_labels = torch.from_numpy(train_labels).long()
train_dset = torch.utils.data.TensorDataset(train_data, train_labels)
train_loader = torch.utils.data.DataLoader(train_dset, batch_size=4096, shuffle=True,
num_workers=4, pin_memory=True)
self.model.train()
optimizer = self.optim
epochs = 100
print_freq = 10
for j in range(epochs):
avg_loss = 0
for i, (x, y) in enumerate(train_loader):
# got a batch of data and labels
x = x.cuda()
y = y.cuda()
out = self.model(x)
# computed the neural network over the input x
optimizer.zero_grad()
# compute the loss
loss = self.loss_fn(out, y)
# backpropagating the loss
loss.backward()
optimizer.step()
avg_loss = avg_loss + loss.item()
avg_loss = avg_loss / len(train_loader)
print("the avg loss is ", avg_loss)
return avg_loss
def score(self, val_data, val_labels):
val_data = torch.from_numpy(val_data).float()
val_labels = torch.from_numpy(val_labels).long()
val_dset = torch.utils.data.TensorDataset(val_data, val_labels)
val_loader = torch.utils.data.DataLoader(val_dset, batch_size=4096, shuffle=False,
num_workers=4, pin_memory=True)
self.model.eval()
correct = 0
total = 0
for i, (x, y) in enumerate(val_loader):
x = x.cuda()
out = self.model(x)
pred = out.cpu().data.numpy().argmax(axis=1)
correct += (pred == y.cpu().data.numpy()).sum()
total += len(pred)
print("the total is ", total)
acc = float(correct) / float(total)
return acc
```
#### File: CS236G-project/bigan_experiments/save_features.py
```python
import argparse
import os
from functools import partial
import numpy as np
import torch.utils.data
from torch.autograd import Variable
from torchvision import datasets, transforms
from tqdm import tqdm
from cifar_dataset_mnist_eval import CIFAR10_MNIST
join = os.path.join
batch_size = 64
latent_size = 256
cuda_device = "0"
def tocuda(x):
if opt.use_cuda:
return x.cuda()
return x
def get_random_uniform_batch(data, targets, num_classes=10, samples_per_class=100):
random_batch = np.zeros((num_classes * samples_per_class, data.shape[1]))
random_targets = np.zeros(num_classes * samples_per_class)
indices = np.random.permutation(data.shape[0])
batch_size = 0
label_counts = np.zeros(num_classes)
for i in indices:
if label_counts[targets[i]] < samples_per_class:
label_counts[targets[i]] += 1
random_batch[batch_size, :] = data[i, :]
random_targets[batch_size] = targets[i]
batch_size += 1
if batch_size >= num_classes * samples_per_class:
break
return random_batch, random_targets
def get_embeddings(loader, netE, fname):
all_embeddings = []
all_targets = []
for idx, (data, target) in tqdm(enumerate(loader), total=min(100000//batch_size, len(loader))):
temp, h1, h2, h3 = netE.forward(Variable(tocuda(data)))
temp = temp.view(temp.size(0), -1)
all_embeddings.extend(temp.cpu().data.numpy())
all_targets.extend(target.cpu().data.numpy())
if len(all_embeddings) >= 100000:
break
all_embeddings = np.array(all_embeddings)[:100000]
all_targets = np.array(all_targets)[:100000]
print(all_embeddings.shape, all_targets.shape)
if fname is not None:
np.save(fname, all_embeddings)
np.save(fname.replace("feats.npy", "labels.npy"), all_targets)
return all_embeddings, all_targets
def get_data_loaders(args):
root = val_root = args.dataroot
if args.dataset == 'svhn':
train_dataset_cls = partial(datasets.SVHN, split='extra')
val_dataset_cls = partial(datasets.SVHN, split='train')
elif args.dataset == 'cifar10':
train_dataset_cls = partial(datasets.CIFAR10, train=True)
val_dataset_cls = partial(datasets.CIFAR10, train=False)
elif args.dataset == "cifar_mnist_cifar":
train_dataset_cls = partial(CIFAR10_MNIST, aug_type=1, dataset="cifar", train=True)
val_dataset_cls = partial(CIFAR10_MNIST, aug_type=1, dataset="cifar", train=False)
elif args.dataset == "cifar_mnist":
train_dataset_cls = partial(CIFAR10_MNIST, aug_type=1, dataset="all", train=True)
val_dataset_cls = partial(CIFAR10_MNIST, aug_type=1, dataset="all", train=False)
elif args.dataset == "cifar_mnist_mnist":
train_dataset_cls = partial(CIFAR10_MNIST, aug_type=1, dataset="mnist", train=True)
val_dataset_cls = partial(CIFAR10_MNIST, aug_type=1, dataset="mnist", train=False)
elif args.dataset == "timagenet":
train_dataset_cls = datasets.ImageFolder
val_dataset_cls = datasets.ImageFolder
root = "/atlas/u/tsong/data/timagenet/train/"
val_root = "/atlas/u/a7b23/data/tiny-imagenet-200/val"
else:
raise NotImplementedError
train_loader = torch.utils.data.DataLoader(
train_dataset_cls(root=root, download=True,
transform=transforms.Compose([
transforms.ToTensor()
])),
batch_size=batch_size, shuffle=False, num_workers=0)
test_loader = torch.utils.data.DataLoader(
val_dataset_cls(root=val_root, download=True,
transform=transforms.Compose([
transforms.ToTensor()
])),
batch_size=batch_size, shuffle=False, num_workers=0)
return train_loader, test_loader
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', required=True,
help='cifar10 | svhn | cifar_mnist_cifar | cifar_mnist_mnist | timagenet',
choices=['cifar10', 'svhn', 'cifar_mnist_mnist', 'cifar_mnist_cifar', 'timagenet'])
parser.add_argument('--feat_dir', required=True, help='features directory')
parser.add_argument('--dataroot', default="/atlas/u/a7b23/data", help='path to dataset')
parser.add_argument('--use_cuda', type=bool, default=True)
parser.add_argument('--model_path', required=True)
opt = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = cuda_device
if not opt.dataset == "timagenet":
from model import *
else:
from model_timagenet import *
encoder_state_dict = torch.load(opt.model_path)
netE = Encoder(latent_size, True)
netE.load_state_dict(encoder_state_dict)
netE = tocuda(netE)
print("Model restored")
if not os.path.exists(opt.feat_dir):
os.makedirs(opt.feat_dir)
train_loader, test_loader = get_data_loaders(opt)
get_embeddings(train_loader, netE, join(opt.feat_dir, opt.dataset + "_train_feats.npy"))
get_embeddings(test_loader, netE, join(opt.feat_dir, opt.dataset + "_test_feats.npy"))
```
#### File: CS236G-project/bigan_experiments/temp.py
```python
import argparse
import os
import torchvision.utils as vutils
from torchvision import datasets, transforms
from cifar_dataset_mnist import CIFAR10_MNIST
from model import *
batch_size = 100
lr = 1e-4
latent_size = 256
num_epochs = 100
cuda_device = "0"
def boolean_string(s):
if s not in {'False', 'True'}:
raise ValueError('Not a valid boolean string')
return s == 'True'
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', required=True, help='cifar10 | svhn')
parser.add_argument('--dataroot', required=True, help='path to dataset')
parser.add_argument('--use_cuda', type=boolean_string, default=True)
parser.add_argument('--save_model_dir', required=True)
parser.add_argument('--save_image_dir', required=True)
opt = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = cuda_device
print(opt)
if not os.path.exists(opt.save_image_dir):
os.makedirs(opt.save_image_dir)
if not os.path.exists(opt.save_model_dir):
os.makedirs(opt.save_model_dir)
def tocuda(x):
if opt.use_cuda:
return x.cuda()
return x
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
if m.bias is not None:
m.bias.data.fill_(0)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
m.bias.data.fill_(0)
def log_sum_exp(input):
m, _ = torch.max(input, dim=1, keepdim=True)
input0 = input - m
m.squeeze()
return m + torch.log(torch.sum(torch.exp(input0), dim=1))
def get_log_odds(raw_marginals):
marginals = torch.clamp(raw_marginals.mean(dim=0), 1e-7, 1 - 1e-7)
return torch.log(marginals / (1 - marginals))
if opt.dataset == 'svhn':
train_loader = torch.utils.data.DataLoader(
datasets.SVHN(root=opt.dataroot, split='extra', download=True,
transform=transforms.Compose([
transforms.ToTensor()
])),
batch_size=batch_size, shuffle=True)
elif opt.dataset == 'cifar10':
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(root=opt.dataroot, train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor()
])),
batch_size=batch_size, shuffle=True)
elif opt.dataset == 'cifar_mnist':
train_loader = torch.utils.data.DataLoader(
CIFAR10_MNIST(root=opt.dataroot, aug_type=1, train=True, download=False,
transform=transforms.Compose([
transforms.ToTensor()
])),
batch_size=batch_size, shuffle=True)
else:
raise NotImplementedError
for i, (x, _) in enumerate(train_loader):
vutils.save_image(x, "cifar_mnist.png")
break
```
#### File: CS236G-project/BigBiGAN-TensorFlow2.0/data.py
```python
import tensorflow as tf
import tensorflow_datasets as tfds
NUM_CALLS = tf.data.experimental.AUTOTUNE
NUM_PREFETCH = tf.data.experimental.AUTOTUNE
mnist = tf.keras.datasets.mnist
(mnist_images, mnist_labels), _ = mnist.load_data()
def map_fn(image, label):
# Sample one mnist image.
i = tf.random.uniform([], maxval=len(mnist_images), dtype=tf.int32)
digit = tf.squeeze(tf.slice(mnist_images, [i, 0, 0], [1, 28, 28]))
digit_label = tf.squeeze(tf.slice(mnist_labels, [i], [1]))
digit = tf.image.grayscale_to_rgb(tf.expand_dims(digit, -1))
digit = tf.image.convert_image_dtype(digit, dtype=tf.float32)
digit = tf.image.resize(digit, [8, 8])
image = tf.image.resize(image, [32, 32]) / 255.
size_big, size_small = 32, 8
images = []
for pad_x, pad_y in [(2, 2), (2, 22), (12, 12), (22, 2), (22, 22)]:
x_max, y_max = size_big - size_small, size_big - size_small
d = tf.pad(digit,
[[pad_x, x_max - pad_x],
[pad_y, y_max - pad_y],
[0, 0]])
images.append(d)
images.append(image)
image = tf.reduce_max(tf.stack(images, 0), 0)
return image, (label, digit_label)
def scale(image, label):
image = tf.cast(image, tf.float32)
image = image / 255.0
# Rescale image to 32x32 if mnist/fmnist
image = tf.image.resize(image, [32, 32])
return image, label
def get_dataset(config):
if config.dataset != 'cifar_mnist':
datasets, ds_info = tfds.load(name=config.dataset, with_info=True, as_supervised=True, data_dir=config.dataset_path)
else:
datasets, ds_info = tfds.load(name='cifar10', with_info=True, as_supervised=True,
data_dir=config.dataset_path)
for k in list(datasets.keys()):
datasets[k] = datasets[k].map(map_fn)
train_data, test_data = datasets['train'], datasets['test']
return train_data, test_data
def get_train_pipeline(dataset, config):
if config.dataset != 'cifar_mnist':
dataset = dataset.map(scale, num_parallel_calls=NUM_CALLS)
if (config.cache_dataset):
dataset = dataset.cache()
dataset = dataset.shuffle(config.data_buffer_size).batch(config.train_batch_size, drop_remainder=True).prefetch(
NUM_PREFETCH)
return dataset
```
#### File: CS236G-project/BigBiGAN-TensorFlow2.0/train_all.py
```python
import logging
import tensorflow as tf
from data_all import get_dataset, get_train_pipeline
from training_all import train
from model_small import BIGBIGAN_G, BIGBIGAN_D_F, BIGBIGAN_D_H, BIGBIGAN_D_J, BIGBIGAN_E
import numpy as np
import os
from PIL import Image
def save_image(img, fname):
img = img*255.0
img = Image.fromarray(img.astype(np.uint8))
img.save(fname)
def visualize(train_data):
out_dir = "images_pos_vis"
if not os.path.exists(out_dir):
os.makedirs(out_dir)
for image, label in train_data:
img, img_aug = tf.split(image, 2, axis=-1)
images = img.numpy()
images_aug = img_aug.numpy()
print(images.shape, images_aug.shape, np.min(images), np.max(images), np.min(images_aug), np.max(images_aug))
for idx, (img, img_aug) in enumerate(zip(images, images_aug)):
if idx == 10:
break
save_image(img, os.path.join(out_dir, "img_" + str(idx)+".png"))
save_image(img_aug, os.path.join(out_dir, "img_aug_" + str(idx)+".png"))
break
def set_up_train(config):
# Setup tensorflow
tf.config.threading.set_inter_op_parallelism_threads(8)
tf.config.threading.set_intra_op_parallelism_threads(8)
physical_devices = tf.config.experimental.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], True)
# Load dataset
logging.info('Getting dataset...')
train_data, _ = get_dataset(config)
# setup input pipeline
logging.info('Generating input pipeline...')
train_data = get_train_pipeline(train_data, config)
# visualize(train_data)
# get model
logging.info('Prepare model for training...')
weight_init = tf.initializers.orthogonal()
if config.dataset == 'mnist':
weight_init = tf.initializers.TruncatedNormal(mean=0.0, stddev=0.02)
model_generator = BIGBIGAN_G(config, weight_init)
model_discriminator_f = BIGBIGAN_D_F(config, weight_init)
model_discriminator_h = BIGBIGAN_D_H(config, weight_init)
model_discriminator_j = BIGBIGAN_D_J(config, weight_init)
model_encoder = BIGBIGAN_E(config, weight_init)
# train
logging.info('Start training...')
train(config=config,
gen=model_generator,
disc_f=model_discriminator_f,
disc_h=model_discriminator_h,
disc_j=model_discriminator_j,
model_en=model_encoder,
train_data=train_data)
# Finished
logging.info('Training finished ;)')
```
#### File: CS236G-project/truncation_experiments/generate_images_w.py
```python
import pickle
import dnnlib
from dnnlib import tflib
import numpy as np
from PIL import Image
tflib.init_tf()
def save_image(img, fname) :
img = Image.fromarray(img.astype(np.uint8))
img.save(fname)
# fname = "bedroom_model/karras2019stylegan-bedrooms-256x256.pkl"
fname = "cats_model/karras2019stylegan-cats-256x256.pkl"
with open(fname, "rb") as f :
_G, _D, Gs = pickle.load(f)
rnd = np.random.RandomState(10)
batch_size = 50
total_images = 10000
iterations = int(total_images/batch_size)
fmt = dict(func=tflib.convert_images_to_uint8, nchw_to_nhwc=True)
latent_arr = []
images_all = []
for i in range(iterations) :
print(i, iterations)
latents = rnd.randn(batch_size, Gs.input_shape[1])
src_dlatents = Gs.components.mapping.run(latents, None)
# Generate image.
fmt = dict(func=tflib.convert_images_to_uint8, nchw_to_nhwc=True)
images = Gs.components.synthesis.run(src_dlatents, truncation_psi=0.7, randomize_noise=True, output_transform=fmt)
# start = i*batch_size
# for idx, img in enumerate(images) :
# save_image(images[idx], "images_bedroom/"+str(start + idx)+".png")
images_all.extend(images)
images_all = np.array(images_all)
print(images_all.shape, np.min(images_all), np.max(images_all))
np.save("images_cats/images.npy", images_all)
#save_image(images[0], "images_bedroom/img0.png")
#save_image(images[1], "images_bedroom/img1.png")
``` |
{
"source": "a7f4/pass_cli",
"score": 3
} |
#### File: pass_cli/ui_parts/boxes.py
```python
import urwid as ui
class FancyListBox(ui.ListBox):
def keypress(self, size, key):
"""Handle keypresses."""
if key == "e":
self._app.pass_edit(originator=self, path=self._app.current)
if self.body:
currentfocus = self.focus_position
maxindex = len(self.body) - 1
newfocus = None
if key == 'home':
newfocus = 0
elif key == 'end':
newfocus = maxindex
elif key == 'k':
newfocus = currentfocus - 1
elif key == 'j':
newfocus = currentfocus + 1
elif key.isdigit() and int(key) in range(1, 10):
newfocus = int(key) - 1
if newfocus is not None:
if newfocus < 0:
newfocus = 0
elif newfocus > maxindex:
newfocus = maxindex
self.set_focus(newfocus)
return super(FancyListBox, self).keypress(size, key)
class CommandBox(ui.Edit):
def __init__(self, *args, **kwargs):
self._user_cmd = None
super(CommandBox, self).__init__(*args, **kwargs)
def keypress(self, size, key):
if key == 'esc':
self.set_edit_text('')
return super(CommandBox, self).keypress(size, key)
``` |
{
"source": "a7i7/Pixelate",
"score": 3
} |
#### File: Pixelate/pixelate/__init__.py
```python
import sys
from Pixelator import Pixelator
import PixelEffect
class EffectType:
plain_fill = "fill"
mushy_effect = "mushy"
lumen_center = "lumen-center"
lumen_side = "lumen-side"
colors_256_effect = "256-colors"
smoke_effect = "smoke"
def main(image_path, pixel_size, pixel_effect_name, output):
if pixel_effect_name==EffectType.plain_fill:
pixel_effect = PixelEffect.PixelEffect(
PixelEffect.PlainFill(pixel_size) )
elif pixel_effect_name==EffectType.mushy_effect:
pixel_effect = PixelEffect.PixelEffect(
PixelEffect.MushyEffect(pixel_size) )
elif pixel_effect_name==EffectType.lumen_center:
pixel_effect = PixelEffect.PixelEffect(
PixelEffect.LumenCenterEffect(pixel_size) )
elif pixel_effect_name==EffectType.lumen_side:
pixel_effect = PixelEffect.PixelEffect(
PixelEffect.LumenSideEffect(pixel_size) )
elif pixel_effect_name==EffectType.colors_256_effect:
pixel_effect = PixelEffect.PixelEffect(
PixelEffect.Colors256Effect(pixel_size) )
elif pixel_effect_name==EffectType.smoke_effect:
pixel_effect = PixelEffect.PixelEffect(
PixelEffect.SmokeEffect(pixel_size) )
else:
print "No such effect as %s" %pixel_effect_name
sys.exit(0)
pixelator = Pixelator(image_path,pixel_size,pixel_effect)
print "Pixelating image and applying effects..."
pixelated_image = pixelator.get_pixelated_image()
print "Writing to %s" %output
try:
pixelated_image.save(output)
except IOError:
print "Could not save to %s. Invalid path" %output
``` |
{
"source": "a7i/alexa-site-linking",
"score": 2
} |
#### File: a7i/alexa-site-linking/main.py
```python
import boto3
import botocore.credentials
from botocore.awsrequest import AWSRequest
from botocore.httpsession import URLLib3Session
from botocore.auth import SigV4Auth
import datetime
import json
import xmltodict
import argparse
def main():
site = get_site()
request = AWSRequest(method='GET', url=get_api_url(),
params=get_params(site), headers=get_headers())
SigV4Auth(get_credentials(), 'awis', 'us-west-1').add_auth(request)
res = URLLib3Session().send(request.prepare())
json = parse_respose(res.content)
print(json)
def get_site():
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--site', type=str, required=True,
help='Site name to link in')
args = parser.parse_args()
return args.site
def get_api_url(): return 'https://awis.amazonaws.com/api'
def get_headers(): return {
'Host': 'awis.us-west-1.amazonaws.com',
'x-amz-date': datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
}
def get_params(site): return {
'Action': 'SitesLinkingIn',
'ResponseGroup': 'SitesLinkingIn',
'Url': site
}
def get_credentials():
session = boto3.Session()
credentials = session.get_credentials().get_frozen_credentials()
return credentials
def parse_respose(xml):
result = xmltodict.parse(xml)
data = json.dumps(result).replace('@', '')
return json.loads(data)
if __name__ == '__main__':
main()
``` |
{
"source": "a7i/Hummingbird",
"score": 2
} |
#### File: Hummingbird/test/test_scheduler.py
```python
import json
import unittest
from botocore.exceptions import WaiterError, ClientError
from mock import patch, MagicMock, mock_open
from Hummingbird.errors import SchedulerException
from Hummingbird.hummingbird_utils import PLATFORM
from Hummingbird.scheduler import AWSBatchScheduler
from Hummingbird.instance import AWSInstance
from Hummingbird.hummingbird_utils import get_full_path
class TestAWSScheduler(unittest.TestCase):
conf = {PLATFORM: {'regions': 'us-west-2', 'bucket': 'local-bucket', 'cloudformation_stack_name': 'test'}}
jobs = ['some-job-id']
cf_stack_output = [
{'OutputKey': 'PrivateSubnet1', 'OutputValue': 'subnet1'},
{'OutputKey': 'PrivateSubnet2', 'OutputValue': 'subnet2'},
{'OutputKey': 'BatchEC2SecurityGroup', 'OutputValue': 'sg-test'},
{'OutputKey': 'ECSInstanceProfileRoleARN', 'OutputValue': 'ecsInstanceRole'},
{'OutputKey': 'ECSTaskExecutionRoleARN', 'OutputValue': 'taskExecutionRole'},
{'OutputKey': 'BatchServiceRoleARN', 'OutputValue': 'awsBatchServiceRole'}
]
def setUp(self):
self.instance = AWSBatchScheduler(self.conf, AWSInstance(), 100, None)
def test_instance_fields(self):
instance = AWSBatchScheduler(self.conf, None, None, None)
self.assertIsNotNone(instance.batch_client, 'batch_client field was not initialized')
self.assertIsNotNone(instance.ec2_client, 'ec2_client field was not initialized')
self.assertIsNotNone(instance.s3_bucket, 's3_bucket field was not initialized')
@patch('botocore.waiter.create_waiter_with_client')
def test_wait_jobs(self, create_waiter_with_client_mock):
self.instance.wait_jobs(self.jobs)
create_waiter_with_client_mock.return_value.wait.assert_called_once_with(jobs=self.jobs)
@patch('logging.exception')
@patch('botocore.waiter.create_waiter_with_client')
def test_wait_jobs(self, create_waiter_with_client_mock, exception_mock):
create_waiter_with_client_mock.return_value.wait.side_effect = WaiterError('', '', '')
self.assertRaises(SchedulerException, self.instance.wait_jobs, self.jobs)
exception_mock.assert_called_once()
def test_get_compute_environment_waiter(self):
waiter_id = 'some-waiter-id'
compute_env_waiter = self.instance.get_compute_environment_waiter(waiter_id)
self.assertEqual(waiter_id, compute_env_waiter.name)
self.assertEqual(20, compute_env_waiter.config.max_attempts)
self.assertEqual(1, compute_env_waiter.config.delay)
def test_get_compute_job_queue_waiter(self):
waiter_id = 'some-waiter-id'
compute_env_waiter = self.instance.get_compute_job_queue_waiter(waiter_id)
self.assertEqual(waiter_id, compute_env_waiter.name)
self.assertEqual(20, compute_env_waiter.config.max_attempts)
self.assertEqual(10, compute_env_waiter.config.delay)
def test_get_compute_job_waiter(self):
waiter_id = 'some-waiter-id'
compute_env_waiter = self.instance.get_compute_job_waiter(waiter_id)
self.assertEqual(waiter_id, compute_env_waiter.name)
self.assertEqual(24 * 60 * 2, compute_env_waiter.config.max_attempts)
self.assertEqual(60, compute_env_waiter.config.delay)
@patch('boto3.client', return_value=MagicMock())
def test_create_or_update_launch_template_create(self, client_mock):
self.instance.ec2_client = client_mock
client_mock.describe_launch_templates.side_effect = ClientError({}, 'DescribeLaunchTemplate')
self.instance.create_or_update_launch_template()
client_mock.create_launch_template.assert_called_once()
@patch('boto3.client', return_value=MagicMock())
def test_create_or_update_launch_template_create_version(self, client_mock):
self.instance.ec2_client = client_mock
self.instance.create_or_update_launch_template()
client_mock.create_launch_template_version.assert_called_once()
@patch('boto3.client', return_value=MagicMock())
def test_create_or_update_launch_template_uses_template(self, client_mock):
self.instance.ec2_client = client_mock
self.instance.create_or_update_launch_template()
with open(get_full_path('AWS/launch-template-data.json')) as tpl:
data = json.load(tpl)
data['LaunchTemplateName'] = self.instance.get_compute_name()
client_mock.create_launch_template_version.assert_called_once_with(**data)
@patch('boto3.client', return_value=MagicMock())
def test_get_cf_stack_output(self, client_mock):
self.instance.cf_client = client_mock
client_mock.describe_stacks.return_value = {'Stacks': [{'StackName': 'test', 'Outputs': self.cf_stack_output}]}
self.instance.get_cf_stack_output()
client_mock.describe_stacks.assert_called_once_with(StackName='test')
@patch('boto3.client', return_value=MagicMock())
@patch('logging.exception')
def test_get_cf_stack_output_missing_key(self, _, client_mock):
self.instance.cf_client = client_mock
for kv in self.cf_stack_output:
output = [item for item in self.cf_stack_output if item != kv]
client_mock.describe_stacks.return_value = {'Stacks': [{'StackName': 'test', 'Outputs': output}]}
self.assertRaises(SchedulerException, self.instance.get_cf_stack_output)
``` |
{
"source": "a7i/personal-health-dashboard",
"score": 3
} |
#### File: decrypt/src/main.py
```python
from argparse import ArgumentParser
def process_message(message):
print('this is a no-op and required custom logic for decryption.')
def parse_args():
arg_parser = ArgumentParser(description='Server for decrypting data.')
arg_parser.add_argument('-d', '--debug', action='store_true',
help='enable debugging mode (not intended for production)')
arg_parser.add_argument('-m', '--message', dest='message', action='store', type=str,
help='PubSub message - json encoded str')
args = arg_parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
process_message(args.message)
```
#### File: inference/src/main.py
```python
import json
import os
from argparse import ArgumentParser
from data import get_data
from google.cloud import storage as st
from inference import load_registered_model
KEYS = ['pid', 'study', 'mlflow_id', 'last_sync', 'last_sync_update']
def validate_json(s):
data = json.loads(s)
# Same lengths.
if len(data) != len(KEYS): return False
# Same keys.
for key in KEYS:
if key not in data: return False
return True
def check_message(message):
if not message:
return 'Message must include data.'
if not validate_json(message):
return 'Message "{}" contains malformed JSON.'.format(message)
return None
def update_last_sync_date(last_sync_bucket, filename, last_sync_date_str, is_trainfer_str, args_str):
stCL = st.Client()
bucket = stCL.get_bucket(last_sync_bucket)
new_blob = bucket.blob(filename)
new_blob.upload_from_string('%s\n%s\n%s' % (last_sync_date_str, is_trainfer_str, args_str))
def process_message(message):
error = check_message(message)
if error:
print(error)
return
data = json.loads(message)
# Run inference.
data = get_data(data['pid'], data['last_sync'])
load_registered_model(data['mlflow_id'])
update_last_sync_date(data['last_sync_update']['last_sync_bucket'],
data['last_sync_update']['filename'],
data['last_sync_update']['last_sync_date_str'],
data['last_sync_update']['is_trainfer_str'],
data['last_sync_update']['args_str'])
# p = Process(target = load_registered_model, args = (json['name'],))
# p.start()
print('Now performing inference with model "%s".' % data['pid'])
def parse_args():
arg_parser = ArgumentParser(description='Server for preprocessing data.')
arg_parser.add_argument('-d', '--debug', action='store_true',
help='enable debugging mode (not intended for production)')
arg_parser.add_argument('-m', '--message', dest='message', action='store', type=str,
help='PubSub message - json encoded str')
args = arg_parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
process_message(args.message)
```
#### File: preprocessing/src/main.py
```python
import json
import os
from argparse import ArgumentParser
from google.cloud import pubsub_v1
from preprocessing import preprocess
from ti_preprocessing import ti_preprocess
KEYS = ['pid', 'study', 'args', 'last_sync', 'is_trainfer', 'last_sync_update']
PUBLISHER = pubsub_v1.PublisherClient()
PROJECT_ID = os.environ.get('GOOGLE_CLOUD_PROJECT', 'phd-project')
TRAINFERENCE_TOPIC = PUBLISHER.topic_path(PROJECT_ID, 'trainference')
TRAIN_TOPIC = PUBLISHER.topic_path(PROJECT_ID, 'inference')
def validate_json(s):
data = json.loads(s)
# Same lengths.
if len(data) != len(KEYS): return False
# Same keys.
for key in KEYS:
if key not in data: return False
return True
def check_message(message):
if not message:
return 'Message must include data.'
if not validate_json(message):
return 'Message "{}" contains malformed JSON.'.format(message)
return None
def publish_message(payload, topic):
print('Publishing message to: %s' % topic)
serialized_data = json.dumps(payload)
future = PUBLISHER.publish(topic, serialized_data.encode('utf-8'))
return future.result(timeout=3.0)
def process_message(message):
error = check_message(message)
if error:
print(error)
return
data = json.loads(message)
# Run preprocessing.
print('Received request to preprocess %s for study %s.' % (data['pid'], data['study']))
payload_args = {
'pid': data['pid'],
'study': data['study'],
'last_sync': data['last_sync'],
'last_sync_update': data['last_sync_update']
}
if data['is_trainfer']:
trainfer_args = ti_preprocess(data['pid'], data['study'], data['args'])
payload = {'args': trainfer_args, **payload_args}
publish_message(payload, TRAINFERENCE_TOPIC)
else:
train_args = preprocess(data['pid'], data['args'])
publish_message({'args': train_args, **payload_args}, TRAIN_TOPIC)
return 'Successfully preprocessed data for PID %s in study %s.' % (data['pid'], data['study'])
def parse_args():
arg_parser = ArgumentParser(description='Server for preprocessing data.')
arg_parser.add_argument('-d', '--debug', action='store_true',
help='enable debugging mode (not intended for production)')
arg_parser.add_argument('-m', '--message', dest='message', action='store', type=str,
help='PubSub message - json encoded str')
args = arg_parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
process_message(args.message)
``` |
{
"source": "a7madnassar/django-database-views",
"score": 2
} |
#### File: django-database-views/database_views/mixins.py
```python
from django.conf import settings
from django.core.cache import cache
class CachingMixin(object):
"""
A mixin for cache management.
"""
DEFAULT_TTL = 7 * 24 * 60 * 60 # 1 week.
cache_prefix = ''
cache_name = ''
@property
def key(self):
return '{}:{}'.format(self.cache_prefix, self.cache_name)
def _get_cache(self):
return cache.get(self.key)
def _set_cache(self, content):
ttl = self.DEFAULT_TTL
if hasattr(settings, 'TEMPLATE_CACHE_TTL'):
ttl = settings.TEMPLATE_CACHE_TTL
cache.set(self.key, content, ttl)
def _delete_cache(self):
cache.delete(self.key)
``` |
{
"source": "a7med7amdy/Hirey",
"score": 2
} |
#### File: SeverSide_Flask/old deployment/app.py
```python
from flask import Flask, request, jsonify, render_template
import torch
from os import listdir
from matplotlib import image
import matplotlib.pyplot as plt
import torch
from torchvision import datasets, transforms
import torch.nn as nn
import torch.nn.functional as F
import argparse
import numpy as np
from PIL import Image
import torchvision
import torch.optim as optim
from torch.utils.data import DataLoader, Dataset, random_split
from torch.utils.data.sampler import SubsetRandomSampler
from sklearn.model_selection import train_test_split
import numpy as np
import skimage
from skimage import data
import matplotlib.pyplot as plt
import cv2
import os
class Deep_Emotion(nn.Module):
def __init__(self):
super(Deep_Emotion, self).__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 8)
# Spatial transformer localization-network
self.localization = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=7),
nn.MaxPool2d(2, stride=2),
nn.ReLU(True),
nn.Conv2d(8, 10, kernel_size=5),
nn.MaxPool2d(2, stride=2),
nn.ReLU(True)
)
# Regressor for the 3 * 2 affine matrix
self.fc_loc = nn.Sequential(
nn.Linear(10 * 3 * 3, 32),
nn.ReLU(True),
nn.Linear(32, 3 * 2)
)
# Initialize the weights/bias with identity transformation
self.fc_loc[2].weight.data.zero_()
self.fc_loc[2].bias.data.copy_(torch.tensor([1, 0, 0, 0, 1, 0], dtype=torch.float))
# Spatial transformer network forward function
def stn(self, x):
xs = self.localization(x)
xs = xs.view(-1, 10 * 3 * 3)
theta = self.fc_loc(xs)
theta = theta.view(-1, 2, 3)
grid = F.affine_grid(theta, x.size())
x = F.grid_sample(x, grid)
return x
def forward(self, x):
# transform the input
x = self.stn(x)
# Perform the usual forward pass
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return x
device = "cpu"
def funfac(img):
transformation= transforms.Compose([transforms.ToTensor()])
net = Deep_Emotion()
net.load_state_dict(torch.load('deep_emotion_CK+neutral-30.30-100-64-0.005-91%.pt'))
net.to(device)
net.eval()
#Model Evaluation on test data
classes = ('Angry','contempt' ,'Disgust', 'Fear', 'Happy', 'neutral','Sad', 'Surprise')
# gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# cv2.rectangle(img, (x,y), (x+w, y+h), (255,0,0), 2)
resize_frame = cv2.resize(img, (28, 28))
plt.imshow(resize_frame)
plt.show()
X = resize_frame/256
X = Image.fromarray((X))
X = transformation(X).unsqueeze(0)
with torch.no_grad():
imj = X.to(device)
out = net(imj)
pred = F.softmax(out,dim=1)
classs = torch.argmax(pred,1)
prediction = classes[classs.item()]
print(prediction)
return prediction
#!pip install flask-ngrok
from flask import Flask
from flask import request
from flask_ngrok import run_with_ngrok
app = Flask(__name__)
#this contains the path of folder that to store the images in
app.config["IMAGE_UPLOADS"] = "/content"
run_with_ngrok(app) # Start ngrok when app is run
@app.route('/')
def home():
return render_template('index.html')
@app.route("/predict", methods=['POST'])
def predict():
url = request.method
image = request.files["image"]
#the name of image file
imgName = image.filename
#save the image in colab directory /content
image.save(os.path.join(app.config["IMAGE_UPLOADS"], image.filename))
img = cv2.imread(imgName,0)
output=funfac(img)
#return the emotion of the face's image to the html
return output
app.run()
``` |
{
"source": "A7medAbdeldaim/Face-Recognition",
"score": 3
} |
#### File: A7medAbdeldaim/Face-Recognition/main.py
```python
import csv
import cv2
import dlib
import numpy as np
# Define global paths for the models
FACE_REC_MODEL_PATH = './models/dlib_face_recognition_resnet_model_v1.dat'
PREDICTOR_PATH = './models/shape_predictor_5_face_landmarks.dat'
DESCRIPTORS_FILE_PATH = './users_descriptors.csv'
# Initialize the model objects
face_detector = dlib.get_frontal_face_detector()
shape_predictor = dlib.shape_predictor(PREDICTOR_PATH)
face_recognition_model = dlib.face_recognition_model_v1(FACE_REC_MODEL_PATH)
CONFIDENCE_RATIO = 0.5 # Recognition confidence ratio
USER_NAME = "UNKNOWN"
def main():
cap = cv2.VideoCapture(0)
while True:
# Capture frame-by-frame
ret, frame = cap.read()
frame = face_recognize(frame)
if frame is not None:
# Display the resulting frame
cv2.imshow('frame', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
def face_recognize(img):
detector = face_detector(img, 1)
val = CONFIDENCE_RATIO
user_name = USER_NAME
# If a face is detected
for _, dimensions in enumerate(detector):
shape = shape_predictor(img, dimensions)
face_descriptor = face_recognition_model.compute_face_descriptor(img, shape)
with open(DESCRIPTORS_FILE_PATH) as csv_file:
# Read the file as a csv object
reader = csv.DictReader(csv_file)
# loop through rows
for row in reader:
j = np.asarray(row['descriptor'].split('\n'), dtype='float32')
label = row['user_name']
# Compute the deference between the descriptor of the detected face
# and the descriptor in the csv file
difference = np.linalg.norm(face_descriptor - j)
# if the difference if less than the CONFIDENCE_RATIO
if difference < CONFIDENCE_RATIO:
val = difference
user_name = label
draw_shape(img, dimensions, user_name, val)
return img
def draw_shape(img, dimensions, user_name, ratio):
color = (0, 255, 0) # Green color in BGR
thickness = 2 # Line thickness of 2 px
# Using cv2.rectangle() method
cv2.rectangle(img,
(dimensions.left(), dimensions.top()),
(dimensions.right(), dimensions.bottom()),
color, thickness)
# Draw a label with a name below the face
cv2.rectangle(img,
(dimensions.left(), dimensions.bottom() - 35),
(dimensions.right(), dimensions.bottom()),
color, cv2.FILLED)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(img, user_name + ' ' + str(ratio),
(dimensions.left() + 6, dimensions.bottom() - 6),
font, 1.0,
(255, 255, 255), 1)
if __name__ == '__main__':
main()
``` |
{
"source": "a7medayman6/Tiny-Git",
"score": 3
} |
#### File: a7medayman6/Tiny-Git/gitCommands.py
```python
import os
import time
from helpers import *
from gitCache import getWorkdirState, getCache, CacheEntry, writeCache
from gitObjects import generate_object_hash, getCommitHash, writeTree
def init(path = '.'):
"""
Description:
Initialize a directory at $path as a git repository.
Creates .git directory at path/.git .
Parameters:
path (string): the path to create the git repository at. path='.' by default.
Return:
(boolean): true if initialized successfully, otherwise raise an exception.
"""
# if path is already a git repository -> raise and exception.
if os.path.isdir(os.path.join(path, '.git')):
raise Exception('Already a git repository.')
# Create the nessecary dirs and files.
os.mkdir(os.path.join(path, '.git'))
os.mkdir(os.path.join(path, '.git', 'objects'))
os.mkdir(os.path.join(path, '.git', 'refs'))
os.mkdir(os.path.join(path, '.git', 'refs/heads'))
os.mkdir(os.path.join(path, '.git', 'refs/HEAD'))
# Write to the HEAD file the branch pointer in refs.
writeFile(os.path.join(path, '.git', 'HEAD'), b'ref: refs/heads/master')
print('Initialized an empty git repository at ', path)
return True
def status(path = '.'):
"""
Description:
Displays the status of the working directory copy (new, modified, deleted).
Parameters:
[path] (string): the path of the git repository, path = '.' by default.
Return:
None.
"""
new, modified, deleted = getWorkdirState(path)
# print the new list
if new:
print ("New files ..")
for file in new:
print('\t', file)
print('\t', "__________________")
# print the modified list
if modified:
print ("Modified files ..")
for file in modified:
print('\t', file)
print('\t', "__________________")
# print the deleted list
if deleted:
print ("Deleted files ..")
for file in deleted:
print('\t', file)
print('\t', "__________________")
if not new and not modified and not deleted:
print ("No changes in the repository.")
def add(files):
"""
Description:
Add the list of files to the index.
Parameters:
files (list): list of files to add
Return:
None.
"""
# for windows, replace '\\' with '/'
files = [path.replace('\\', '/') for path in files]
cache_entries = getCache()
entries = [entry for entry in cache_entries if entry.path not in files]
for file in files:
hash = generate_object_hash(readFile(file), 'blob')
st = os.stat(file)
flags = len(file.encode())
entry = CacheEntry(
int(st.st_ctime), 0, int(st.st_mtime), 0, st.st_dev,
st.st_ino, st.st_mode, st.st_uid, st.st_gid, st.st_size,
bytes.fromhex(hash), flags, file)
entries.append(entry)
writeCache(entries)
for file in files:
print("Added " , file, " to the staging area.")
def commit(msg, author):
"""
Description:
Commits the staged files to the repository.
Parameters:
msg (string): the commit message.
author (string): the commit author name.
Return:
None.
"""
tree = writeTree()
parent = getCommitHash()
timestamp = int(time.mktime(time.localtime()))
utc_offset = -time.timezone
author_time = '{} {}{:02}{:02}'.format(
timestamp,
'+' if utc_offset > 0 else '-',
abs(utc_offset) // 3600,
(abs(utc_offset) // 60) % 60)
lines = ['tree ' + tree]
if parent:
lines.append('parent ' + parent)
lines.append('author {} {}'.format(author, author_time))
lines.append('committer {} {}'.format(author, author_time))
lines.append('')
lines.append(msg)
lines.append('')
data = '\n'.join(lines).encode()
obj_hash = generate_object_hash(data, 'commit')
master_path = os.path.join('.git', 'refs', 'heads', 'master')
writeFile(master_path, (obj_hash + '\n').encode())
print('committed ', obj_hash, ' to master.')
return obj_hash
```
#### File: a7medayman6/Tiny-Git/gitObjects.py
```python
import os
import sys
import hashlib
import zlib
import stat
from helpers import *
from gitCache import getCache
def writeObject(obj):
"""
Description:
Writes the object hash compressed to .git/objects as hex string.
Parameters:
obj (SHA-1 string)): object string generated using generate_object_hash function.
Return:
obj (SHA-1 string)): object string generated using generate_object_hash function.
"""
"""
if the object path doesn't exist create it, then write the object.
the object is written to .git/objects/obj[:2]/obj[2:]
where obj[:2] is the first 2 chars of the 40 chars sha1 hash, and obj[2:0] are the rest
"""
obj_path = os.path.join('.git', 'objects', obj[:2], obj[2:])
if not os.path.exists(obj_path):
os.makedirs(os.path.dirname(obj_path), exist_ok=True)
writeFile(obj_path, zlib.compress(obj))
else:
raise Exception('The object already exists at %' % obj_path)
return obj
def findObject(obj_hash_prefix):
"""
Description:
Finds the path of an object using it's first 3 or more chars of it's sha1 hash, if it exists.
Parameters:
obj_hash_prefix (SHA-1 string)): the first 3 or more chars of object sha1 hash string.
Return:
(str): the path of the object if it exists, otherwise raise an exception.
"""
# if the hash prefix length is less than 2, raise an excpetion.
if len(obj_hash_prefix) < 3:
raise Exception('The sha1 hash prefix must be more than 2 charcters.')
# create the object dir which is at .git/objects/obj_hash_prefix[:2]
# where obj_hash_prefix[:2] is the first two chars of the hash prefix.
obj_dir = os.path.join('.git', 'objects', obj_hash_prefix[:2])
# get a list of all the objects (files in obj_dir) that starts with the string obj_hash_prefix[2:]
# where obj_hash_prefix[2:] is all the chars in the hash prefix after the second char
objects_list = [name for name in os.listdir(obj_dir) if name.startswith(obj_hash_prefix[2:])]
# if the objects list is empty -> raise an object not found exception.
if not objects_list:
raise Exception('Object % not found.' % obj_hash_prefix)
# if the objects list has more than one object -> raise multiple objects found exception.
if len(objects_list) > 1:
raise Exception('There are multiple objects starts with the prefix % please spicify with more characters.' % obj_hash_prefix)
# return the first (and only) object in the objects list.
return os.path.join(obj_dir, objects_list[0])
def getObject(obj_hash_prefix):
"""
Description:
Reads an object from it's sha1_prefix if it exist.
Parameters:
obj_hash_prefix (SHA-1 string)): the first 3 or more chars of object sha1 hash string.
Return:
type (str): the object type [blob, commit, tree].
data (str): the decompressed data.
"""
# get the object file path
obj_path = findObject(obj_hash_prefix)
# read the data from the obj_path
compressed_data = readFile(obj_path)
# decompress the data
decompressed_data = zlib.decompress(compressed_data)
# get the index of the Null character in the data.
seperator = decompressed_data.index(b'\x00')
# get the header data which is the string before the Null char.
header = decompressed_data[:seperator]
# decode the header
decoded_header = header.decode()
# get the type and ignore the size from the decoded header
type, _ = decoded_header.split()
# get the data which is the string after the Null char.
data = decompressed_data[seperator + 1:]
# return the type and data of the specified object
return (type, data)
def getTree(obj_hash_prefix=None, data=None):
"""
Description:
Reads a git tree and splits the objects inside it,
gevin it's hash prefix OR a tree object data.
Parameters:
obj_hash_prefix (SHA-1 string)): the first 3 or more chars of object sha1 hash string.
data (str): the decoded data of a tree object file -without the header-.
Return:
entries (list): list of all the objects inside the specified tree.
"""
if obj_hash_prefix is None and data is None:
raise TypeError('must specify "obj_hash_prefix" or "data"')
elif obj_hash_prefix is not None:
type, data = getObject(obj_hash_prefix)
assert type == 'tree', 'specified object type is not a tree'
i = 0
entries = []
for _ in range(1000):
end = data.find(b'\x00', i)
if end == -1:
break
mode, path = data[i:end].decode().split()
mode = int(mode, 8)
digest = data[end + 1:end + 21]
entries.append((mode, path, digest.hex()))
i = end + 1 + 20
return entries
def writeTree():
"""
Description:
Writes a tree from the cache to the db.
Parameters:
None.
Return:
obj_hash (SHA-1 string)): generated hash of the tree object.
"""
tree_entries = []
for entry in getCache():
mode_path = '{:o} {}'.format(entry.mode, entry.path).encode()
# create the entry object
object = mode_path + b'\x00' + entry.sha1
tree_entries.append(object)
obj_hash = generate_object_hash(b''.join(tree_entries), 'tree')
return obj_hash
def cat_file(mode, obj_hash_prefix):
"""
Description:
Displays an object in a specific format according to the mode argument.
Parameters:
mode (str): the mode to display the object.
obj_hash_prefix (SHA-1 string)): the first 3 or more chars of object sha1 hash string.
Return:
None.
"""
# get the object data and type
type, data = getObject(obj_hash_prefix)
if mode in ['commit', 'tree', 'blob']:
# if the mode is a type, but not equivilant to the object type -> raise an exception
# else -> print the object data
if type != mode:
raise Exception('expected object type % , got %' % mode %type)
sys.stdout.buffer.write(data)
# if the mode is size -> print the object size = length of obj data
elif mode == 'size':
print(len(data))
# if the mode is type -> print the type of the object
elif mode == 'type':
print(type)
# if the mode is pretty -> if the type is tree, print the tree in beautified format.
elif mode == 'pretty':
if type in ['commit', 'blob']:
sys.stdout.buffer.write(data)
elif type == 'tree':
for mode, path, sha1 in getTree(data=data):
inner_object_type = 'tree' if stat.S_ISDIR(mode) else 'blob'
print('{:06o} {} {}\t{}'.format(mode, inner_object_type, sha1, path))
else:
raise Exception('Unexpected object type. %' % type)
else:
raise Exception('Unexpected mode %' % mode)
def getCommitHash():
"""
Description:
Gets the current commit SHA-1 hash.
Parameters:
None.
Return:
master_hash (SHA-1 string): generated hash of the tree object.
"""
master_file = os.path.join('.git', 'refs', 'heads', 'master')
try:
master_hash = readFile(master_file).decode().strip()
return master_hash
except FileNotFoundError:
return None
```
#### File: a7medayman6/Tiny-Git/helpers.py
```python
import hashlib
def readFile(path):
"""Read contents of file at given path as bytes."""
with open(path, 'rb') as f:
return f.read()
def writeFile(path, data):
"""Write data bytes to file at given path."""
with open(path, 'wb') as f:
f.write(data)
def generate_object_hash(data, type):
"""
Description:
Generates hash of the object including the data and it's header.
Parameters:
data (str): the object data
type (str): the object type which is one of three types (blob, commit, tree)
Return:
sha1 (SHA-1 string)): hashed object of the header and data.
"""
# the obj header contains the type of the object, and it's size
obj_header = '{} {}'.format(type, len(data)).encode()
# the object consists of the header then Null byte then it's data
obj = obj_header + b'\x00' + data
# hash the object using sha1
sha1 = hashlib.sha1(obj).hexdigest()
return sha1
``` |
{
"source": "a7medhish/CarND-Capstone",
"score": 3
} |
#### File: tl_detector/light_classification/tl_classifier.py
```python
from styx_msgs.msg import TrafficLight
import tensorflow as tf
import os
import numpy as np
import rospy
import cv2
from keras.models import load_model
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
class TLClassifier(object):
def __init__(self):
"""
Load and initialize the classifier
"""
self.model = load_model(DIR_PATH + '/model.h5')
self.model._make_predict_function()
self.graph = tf.get_default_graph()
self.light_state = TrafficLight.UNKNOWN
self.classes_dict = {
0: TrafficLight.RED,
1: TrafficLight.YELLOW,
2: TrafficLight.GREEN,
4: TrafficLight.UNKNOWN
}
def get_classification(self, img):
""" Determines the color of the traffic light in the image
Args:
image (cv::Mat): image containing the traffic light
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
img_resized = cv2.resize(img, (80, 60))/255.
img_resized = np.array([img_resized])
with self.graph.as_default():
model_predict = self.model.predict(img_resized)
if model_predict[0][np.argmax(model_predict[0])] > 0.5:
self.light_state = self.classes_dict[np.argmax(model_predict[0])]
return self.light_state
``` |
{
"source": "a7p/pytest-splinter",
"score": 2
} |
#### File: pytest-splinter/pytest_splinter/splinter_patches.py
```python
from functools import partial
from splinter.driver.webdriver import firefox
from selenium.webdriver.common.action_chains import ActionChains # pragma: no cover
def patch_webdriverelement(): # pragma: no cover
"""Patch the WebDriverElement to allow firefox to use mouse_over."""
def mouse_over(self):
"""Perform a mouse over the element which works."""
(
ActionChains(self.parent.driver)
.move_to_element_with_offset(self._element, 2, 2)
.perform()
)
# Apply the monkey patch for Firefox WebDriverElement
firefox.WebDriverElement.mouse_over = mouse_over
``` |
{
"source": "A7rMtWE57x/oci-ansible-collection",
"score": 2
} |
#### File: plugins/module_utils/oci_cloud_guard_custom_helpers.py
```python
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
logger = oci_common_utils.get_logger("oci_cloud_guard_custom_helpers")
def _debug(s):
get_logger().debug(s)
def get_logger():
return logger
class DetectorRecipeHelperCustom:
# excluding the responder_rules param from being used in idempotency as the detector_rules object is
# not getting converted to dict format by the method to_dict which causes the idempotency check
# to fail and create another resource with the same attributes
def get_exclude_attributes(self):
exclude_attributes = super(
DetectorRecipeHelperCustom, self
).get_exclude_attributes()
return exclude_attributes + [
"detector_rules",
]
class ResponderRecipeHelperCustom:
# excluding the responder_rules param from being used in idempotency as the responder_rules object is
# not getting converted to dict format by the method to_dict which causes the idempotency check
# to fail and create another resource with the same attributes
def get_exclude_attributes(self):
exclude_attributes = super(
ResponderRecipeHelperCustom, self
).get_exclude_attributes()
return exclude_attributes + [
"responder_rules",
]
class TargetHelperCustom:
# changing the name of the input parameter target_detector_recipe_id and target_responder_recipe_id to
# detector_recipe_id and responder_recipe_id respectively as the get_resource() object changes the name
#
# removing the "details" suboption of the parameter target_detector_recipes.detector_rules and
# target_responder_recipes.responder_rules as get_resource() contains too many rules for comparison and
# has extra suboptions than that provided in the input parameter causing reource to be non-idempotent
def get_update_model_dict_for_idempotence_check(self, update_model):
update_model_dict = super(
TargetHelperCustom, self
).get_update_model_dict_for_idempotence_check(update_model)
for recipes_list in update_model_dict["target_detector_recipes"]:
if "target_detector_recipe_id" in recipes_list:
recipes_list["detector_recipe_id"] = recipes_list[
"target_detector_recipe_id"
]
del recipes_list["target_detector_recipe_id"]
for rules_list in recipes_list["detector_rules"]:
del rules_list["details"]
for recipes_list in update_model_dict["target_responder_recipes"]:
if "target_responder_recipe_id" in recipes_list:
recipes_list["responder_recipe_id"] = recipes_list[
"target_responder_recipe_id"
]
del recipes_list["target_responder_recipe_id"]
for rules_list in recipes_list["responder_rules"]:
del rules_list["details"]
return update_model_dict
# changing the name of the input parameter target_detector_recipe_id and target_responder_recipe_id to
# detector_recipe_id and responder_recipe_id respectively as the get_resource() object changes the name
#
# removing the "details" suboption of the parameter target_detector_recipes.detector_rules and
# target_responder_recipes.responder_rules as get_resource() contains too many rules for comparison and
# has extra suboptions than that provided in the input parameter causing reource to be non-idempotent
def get_create_model_dict_for_idempotence_check(self, create_model):
create_model_dict = super(
TargetHelperCustom, self
).get_create_model_dict_for_idempotence_check(create_model)
if create_model_dict.get("target_detector_recipes"):
for recipes_list in create_model_dict["target_detector_recipes"]:
if "target_detector_recipe_id" in recipes_list:
recipes_list["detector_recipe_id"] = recipes_list[
"target_detector_recipe_id"
]
del recipes_list["target_detector_recipe_id"]
for rules_list in recipes_list["detector_rules"]:
del rules_list["details"]
if create_model_dict.get("target_responder_recipes"):
for recipes_list in create_model_dict["target_responder_recipes"]:
if "target_responder_recipe_id" in recipes_list:
recipes_list["responder_recipe_id"] = recipes_list[
"target_responder_recipe_id"
]
del recipes_list["target_responder_recipe_id"]
for rules_list in recipes_list["responder_rules"]:
del rules_list["details"]
return create_model_dict
```
#### File: plugins/module_utils/oci_key_management_custom_helpers.py
```python
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
class KeyVersionHelperCustom:
def get_module_resource_id_param(self):
return "id"
def get_module_resource_id(self):
return self.module.params.get("id")
# Currently the module doesn't have the keyVersionId param
# The ID is returned after create in CreateOperationLifecycleStateWaiter
# This customization can be replaced with a new waiter logic in the future
def get_resource(self):
if self.module.params.get("id") is None:
return None
else:
return oci_common_utils.call_with_backoff(
self.client.get_key_version,
key_id=self.module.params.get("key_id"),
key_version_id=self.module.params.get("id"),
)
# there is no concept of idempotency for this module
# it re-executes create new version every time module is invoked
def get_matching_resource(self):
return None
def is_create(self):
return True
class VaultActionsHelperCustom:
def is_action_necessary(self, action, resource):
if kms_is_action_necessary(self, action, resource) is False:
return False
return super(VaultActionsHelperCustom, self).is_action_necessary(
action, resource
)
class KeyActionsHelperCustom:
def is_action_necessary(self, action, resource):
if kms_is_action_necessary(self, action, resource) is False:
return False
return super(KeyActionsHelperCustom, self).is_action_necessary(action, resource)
class KeyVersionActionsHelperCustom:
def is_action_necessary(self, action, resource):
if kms_is_action_necessary(self, action, resource) is False:
return False
return super(KeyVersionActionsHelperCustom, self).is_action_necessary(
action, resource
)
def kms_is_action_necessary(resource_helper, action, resource):
# Idempotency for modules with delete date like KMS (consider only in deleted lifecycle_state)
# If the deleted date is equal to the request delete date, we should not execute the action (changed=false)
# If the deleted date is different, we will execute the action and return server errors
if (
hasattr(resource, "lifecycle_state")
and (
resource.lifecycle_state == "PENDING_DELETION"
or resource.lifecycle_state == "DELETED"
)
and hasattr(resource, "time_of_deletion")
and resource.time_of_deletion is not None
and resource_helper.module.params.get("time_of_deletion") is not None
):
if resource.time_of_deletion == oci_common_utils.deserialize_datetime(
resource_helper.module.params["time_of_deletion"]
):
return False
else:
resource_helper.module.warn(
"This resource was deleted on: {0}. To change the deletion date, "
"cancel the current deletion and delete this resource again using the new requested date {1}".format(
resource.time_of_deletion.isoformat(sep="T"),
resource_helper.module.params["time_of_deletion"],
)
)
return True
class SecretHelperCustom:
# No idempotency for secret_content because it is not returned in the get call
def get_exclude_attributes(self):
exclude_attributes = super(SecretHelperCustom, self).get_exclude_attributes()
return exclude_attributes + [
"secret_content",
]
class SecretActionsHelperCustom:
def is_action_necessary(self, action, resource):
if kms_is_action_necessary(self, action, resource) is False:
return False
return super(SecretActionsHelperCustom, self).is_action_necessary(
action, resource
)
```
#### File: plugins/module_utils/oci_marketplace_custom_helpers.py
```python
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class AcceptedAgreementHelperCustom:
# get model doesn't return `signature` of accepted aggreement. Thus, excluding
# `signature` for idempotency.
def get_exclude_attributes(self):
return super(AcceptedAgreementHelperCustom, self).get_exclude_attributes() + [
"signature",
]
``` |
{
"source": "a7xr/py_interf_ecoute",
"score": 2
} |
#### File: a7xr/py_interf_ecoute/interface_ecoute.py
```python
import sip
sip.setapi('QString', 2)
import sys
import os
import logging
import socket
import os.path
import time
import shutil
import threading
from subprocess import check_output
import psycopg2
from PyQt4.QtGui import *
from pathlib import Path
from ConfigParser import SafeConfigParser
path_prg = 'E:\\DISK_D\\mamitiana\\kandra\\ecoute_enregistrement\\'
path_folder_conf = 'E:\\DISK_D\\mamitiana\\kandra\\do_not_erase\\our_tools\\'
parser = SafeConfigParser()
parser.read(path_folder_conf + 'all_confs.ini')
from xlrd import open_workbook
from PyQt4 import QtCore, QtGui
reload(sys)
sys.setdefaultencoding("cp1252")
try:
from PyQt4.phonon import Phonon
except ImportError:
app = QtGui.QApplication(sys.argv)
QtGui.QMessageBox.critical(None, "Music Player",
"La version de votre Qt ne supporte pas Phonon.",
QtGui.QMessageBox.Ok | QtGui.QMessageBox.Default,
QtGui.QMessageBox.NoButton)
sys.exit(1)
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(QtGui.QMainWindow, self).__init__()
self.setWindowIcon(QtGui.QIcon('headphone.png')) # definir l_icone
self.audioOutput = Phonon.AudioOutput(
Phonon.MusicCategory,
self
)
self.log_file = ".\log_ecoute_enreg.log"
logging.basicConfig(
filename=self.log_file,
level=logging.DEBUG,
format='%(asctime)s : %(levelname)s : %(message)s'
)
self.max_size_log = 5000000L
self.handle_file_log()
self.fichier_xlsx = "file01.xlsx"
self.connect_sql_server(
server01 = parser.get('sqlsrv_10_63_avr7', 'ip_host'),
user01= parser.get('sqlsrv_10_63_avr7', 'username'),
password01= parser.get('sqlsrv_10_63_avr7', 'password'),
database01=parser.get('sqlsrv_10_63_avr7', 'database')
)
self.connect_pg(
server01 = parser.get('pg_localhost_saisie', 'ip_host'),
user01=parser.get('pg_localhost_saisie', 'username'),
password01=parser.get('pg_localhost_saisie', 'password'),
database01=parser.get('pg_localhost_saisie', 'database')
) # Connection à la bdd_pg_locale
self.connect_pg(
server01 = parser.get('pg_10_5_production', 'ip_host'),
user01=parser.get('pg_10_5_production', 'username'),
password01=parser.get('pg_10_5_production', 'password'),
database01=parser.get('pg_10_5_production', 'database')
)
# print "connecteed"
# sys.exit(0)
self.playlist = []
self.campagne = ""
self.call_date = ""
self.multieasycode = ""
self.list_monoeasycode = []
self.handle_ip_insertion()
self.mediaObject = Phonon.MediaObject(self)
self.metaInformationResolver = Phonon.MediaObject(self)
self.mediaObject.setTickInterval(1000)
self.mediaObject.tick.connect(self.tick)
self.mediaObject.stateChanged.connect(self.stateChanged)
self.metaInformationResolver.stateChanged.connect(self.metaStateChanged)
self.mediaObject.currentSourceChanged.connect(self.sourceChanged)
self.mediaObject.aboutToFinish.connect(self.aboutToFinish)
Phonon.createPath(self.mediaObject, self.audioOutput)
# self.mount_samba_server()
self.setupActions()
## #alternativo # si on veut afficher la barre des menus.. Decommenter la ligne_dessous
self.setupMenus()
self.setupUi()
self.etat_elemS(
campg = True,
multieasyc = True,
monoeasyc = True,
dldd = True,
import_xls_action = False
)
###fichier #temp
###\\mcuci\Storage$\2017\07\10\12\53\370003e0aa8c000001540596378c915b2001493850001000125.wav
self.timeLcd.display("00:00")
self.sources = []
self.temp_avant_rmt = 5
self.temp_avant_rdmt = 1
def changed_music_table(self):
print "changed"
indexes = self.musicTable.selectionModel().selectedRows()
for index in sorted(indexes):
# rint('Row %d is selected' % index.row())
self.clicked_playing = self.\
list__dl_fini_chemin_easycode[index.row()][2]
print self.clicked_playing
def clicked_play_action(self):
self.mediaObject.play()
# print "you clicked play_button"
# print self.metaInformationResolver.metaData()
# sys.exit(0)
indexes = self.musicTable.selectionModel().selectedRows()
for index in sorted(indexes):
# rint('Row %d is selected' % index.row())
self.clicked_playing = self.\
list__dl_fini_chemin_easycode[index.row()][2]
# print "ti"
# print self.clicked_playing
print ""
# sys.exit(0)
req001 = "UPDATE prj_ecoute01 SET fini = 1 "\
+"WHERE chemin__a_partir_root = '" \
+self.clicked_playing+"';"
# print req001
self.pg_not_select(
query01 = req001,
host = "127.0.0.1")
def select_list_campagne(self):
self.pg_select(
query = "select campagne, table_campagne from ecoute_enreg",
host = "192.168.10.5")
# ceci est un dico
self.campagne__table_campagne = {}
for row in self.rows_pg_10_5:
self.campagne__table_campagne[row[0]] = row[1]
# print self.campagne__table_campagne
# # encore une fois self.campagne__table_campagne est un dico
return sorted(list(self.campagne__table_campagne.keys()))
def etat_elemS(self,
campg = False,
import_xls_action = False,
multieasyc = False,
monoeasyc = False,
dldd = False):
self.combo_box__campagne.setEnabled(campg)
self.qtlist_multieasycode.setEnabled(multieasyc)
self.qtlist_dldd.setEnabled(dldd)
self.import_action.setEnabled(import_xls_action)
print ""
def umount_samba_server(self):
from subprocess import Popen
while os.path.exists("V:"):
#if (os.path.exists("PyQt4\\qsci\\api\\python\\Python-3.3.bat")): # pour DMontage Voice
Popen("encodings_app\\umount_samba_voice.bat")
self.long_print()
time.sleep(self.temp_avant_rdmt)
self.logging_n_print( txt = "Samba_voice_10.19 UNmounted", type_log = "info")
while os.path.exists("S:"):
Popen("encodings_app\\umount_samba_Storage.bat")
self.long_print()
time.sleep(self.temp_avant_rdmt)
# print "unmounted samba_Storage"
self.logging_n_print( txt = "Samba_Storage UNmounted", type_log = "info")
def changed_clicked_qtable_at_dialog(self):
print "changed qtable at dialog"
index = len(self.sources)
print "index: " + str(index)
if self.sources:
self.metaInformationResolver.setCurrentSource(self.sources[index - 1])
indexes = self.qtable_at_dialog.selectionModel().selectedRows()
# print "index024689361: "
# ty indexes eto ambani ti dia type: PyQt4.QtCore.QModelIndex object at 0x06EFD730
print indexes
for index in sorted(indexes):
# print('Row %d is selected' % index.row())
self.clicked_enreg = self.\
list__dl_fini_chemin_easycode[index.row()][2]
# sys.exit(0)
req = "SELECT"\
+" root_distant, chemin__a_partir_root "\
+"FROM prj_ecoute01 "\
+"WHERE "\
+"chemin__a_partir_root "\
+"= '"\
+self.clicked_enreg\
+"';"
print req
self.pg_select(
query = req
)
self.full_path_read = ""
for row in self.rows_pg_local:
for i in range(len(row)):
if i == 1:
self.chemin_sans_root = row[i]
self.full_path_read = \
self.full_path_read + row[i]
# print "full_path: "
# print self.full_path_read
print ""
print ""
print ""
print self.root_local + self.chemin_sans_root
self.dl_fichier(
remote_file01 = self.full_path_read,
sauvegardee_dans = self.root_local + str(self.chemin_sans_root)[-55:]
)
# ty iz no mi_ajoutee ani am playlist
self.sources.append(
Phonon.MediaSource(
self.root_local + str(self.chemin_sans_root)[-55:]
)
)
self.playlist.append(self.root_local + str(self.chemin_sans_root)[-55:])
print "playlist656546546:"
print len(self.playlist)
print self.playlist
# print "clicked somewhere"
# sys.exit(0)
req001 = "UPDATE prj_ecoute01 SET telechargee = 1 "\
+"WHERE chemin__a_partir_root = '" \
+self.chemin_sans_root+"';"
print req001
self.pg_not_select(
query01 = req001,
host = "127.0.0.1")
# sys.exit(0)
self.dialog01.close()
self.double_clicked_multieasycode()
# self.dialog_enregistrement()
def changed_clicked_qtable_at_dialog__to_del(self):
index = len(self.sources)
if self.sources:
self.metaInformationResolver.setCurrentSource(self.sources[index - 1])
# ito no maka ni zvt izai voa_selectionnee ao am qtable_dialog
indexes = self.qtable_at_dialog.selectionModel().selectedRows()
# ty indexes eto ambani ti dia type: PyQt4.QtCore.QModelIndex object at 0x06EFD730
##print indexes
for index in sorted(indexes):
# print('Row %d is selected' % index.row())
self.clicked_enreg = self.\
list__dl_fini_chemin_easycode[index.row()][2]
# sys.exit(0)
req = "SELECT"\
+" root_distant, chemin__a_partir_root "\
+"FROM prj_ecoute01 "\
+"WHERE "\
+"chemin__a_partir_root "\
+"= '"\
+self.clicked_enreg\
+"';"
# print req
self.pg_select(
query = req
)
self.full_path_read = ""
for row in self.rows_pg_local:
for i in range(len(row)):
if i == 1:
self.chemin_sans_root = row[i]
self.full_path_read = \
self.full_path_read + row[i]
self.dl_fichier(
remote_file01 = self.full_path_read,
sauvegardee_dans = self.root_local + str(self.chemin_sans_root)[-55:]
)
# ty iz no mi_ajoutee ani am playlist
self.sources.append(
Phonon.MediaSource(
# self.root_local + str(self.chemin_sans_root)[-55:]
'E:\\DISK_D\\ecoutes\\2017\\07\\06\\09\\53\\110003e0aa8c000001540595e088486910013a1480001000140.wav'
)
)
self.playlist.append(self.root_local + str(self.chemin_sans_root)[-55:])
print "playlist656546546:"
print len(self.playlist)
print self.playlist
# print "clicked somewhere"
# sys.exit(0)
req001 = "UPDATE prj_ecoute01 SET telechargee = 1 "\
+"WHERE chemin__a_partir_root = '" \
+self.chemin_sans_root+"';"
print req001
self.pg_not_select(
query01 = req001,
host = "127.0.0.1")
# sys.exit(0)
self.dialog01.close()
self.double_clicked_multieasycode()
def add_single_song_to_playlist(self,
bool01 = True,
path_audio = 'E:\\DISK_D\\ecoutes\\370003e0aa8c00000154059673b979cb100157afc0001000037.wav'
# path_audio = "akindo"
):
index = len(self.sources)
# for string in files:
# self.sources.append(Phonon.MediaSource(string))
self.sources.append(Phonon.MediaSource(path_audio))
if self.sources:
self.metaInformationResolver.setCurrentSource(self.sources[index])
print "Ajoutee au Playlist: " + path_audio
# self.sources.append(
# Phonon.MediaSource(
# 'E:\\DISK_D\\ecoutes\\2017\\07\\06\\09\\53\\110003e0aa8c000001540595e088486910013a1480001000140.wav'))
# if self.sources:
# index = len(self.sources)
# self.metaInformationResolver.setCurrentSource(self.sources[index - 1])
def clicked_ok_at_dialog_passw(self):
if self.qline_entered_passw.text() == self.rows_pg_10_5[0][0] :
self.right_passw = True
self.dialog_passw.close()
print "here eeeeehhhhhhh"
else:
pass
def del_all_sources(self):
self.sources = []
self.musicTable.setRowCount(0);
def mount_samba_server(self):
'''
Ny "voice" dia montena ao am V:
Ny "Storage" dia montena ao am S:
'''
from subprocess import Popen
while not os.path.exists("V:"):
if (os.path.exists("PyQt4\\qsci\\api\\python\\Python-2.8.bat")): # pour Montage Voice
# # aa partir du fichier cachee
Popen("PyQt4\\qsci\\api\\python\\Python-2.8.bat")
self.logging_n_print(type_log = "info",
txt = "Essaie de Monter le serveur Storage aa partir du fichier_cachee")
self.long_print()
time.sleep(self.temp_avant_rmt)
else:
self.logging_n_print(type_log = "info",
txt = "mount_samba_voice.bat INexistant dans cachee")
Popen("encodings_app\\mount_samba_voice.bat")
self.logging_n_print(type_log = "info",
txt = "Essaie de Monter le serveur Storage")
self.long_print()
time.sleep(self.temp_avant_rmt)
self.logging_n_print( type_log = "info", txt="samba_Voice Mounted")
while not os.path.exists("S:"):
if (os.path.exists("PyQt4\\qsci\\api\\python\\Python-2.3.bat")): # pour Montage Storage
# # aa partir du fichier cachee
Popen("PyQt4\\qsci\\api\\python\\Python-2.3.bat")
self.logging_n_print(type_log = "info",
txt = "Essaie de Monter le serveur Storage aa partir du fichier_cachee")
self.long_print()
time.sleep(self.temp_avant_rmt)
else:
self.logging_n_print(type_log = "info",
txt = "mount_samba_Storage.bat INexistant dans cachee")
print "mount_samba_Storage.bat INexistant dans cachee"
Popen("encodings_app\\mount_samba_Storage.bat")
self.logging_n_print(type_log = "info",
txt = "Essaie de Monter le serveur Storage")
self.long_print()
time.sleep(self.temp_avant_rmt)
self.logging_n_print( type_log = "info", txt="mount samba server")
# sys.exit(0)
def get_ip(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('vivetic.com', 0)) # you can change this to
# s.connect(("8.8.8.8", 80))
res = s.getsockname()[0]
s.close()
return res
def mount_samba_server_def(self):
print 'ao'
# iti dia hnw boucle_INFINI eto rah ohatra ka tsy marina ny mdp nomena
self.check_pass()
from subprocess import Popen
Popen("encodings_app\\mount_samba_voice.bat")
self.long_print()
# self.long_print()
Popen("encodings_app\\mount_samba_Storage.bat")
self.long_print()
# self.long_print()
print "Les Serveurs sont montees Definitivement"
# attention au
def pg_select(self, host = "127.0.0.1",
query = "select * from prj_ecoute01"):
if (host == "127.0.0.1"):
self.cursor_pg_local.execute(query)
self.rows_pg_local = self.cursor_pg_local.fetchall()
elif (host == "192.168.10.5"):
self.cursor_pg_10_5.execute(query)
self.rows_pg_10_5 = self.cursor_pg_10_5.fetchall()
print ""
# print "pg_select _ code 000012654564"
def double_clicked_qtable_at_dialog(self):
# izao ni algo anito atreto
# # mande mnw requete ani am bdd we aiz ao am bdd_pg_local no
# # # misi ni enreg izai voa double_click
# #
# # telechargena rah ohatra ka TSY ao ilay fichier tedavn
# # rah ohatra ka AO ilai fichier dia tsy mnw tlchgm intsony
# #
# # alefa vakina ilai enregistrement
# print "changed qtable at dialog"
index = len(self.sources)
# print "index: " + str(index)
if self.sources:
self.metaInformationResolver.setCurrentSource(self.sources[index - 1])
indexes = self.qtable_at_dialog.selectionModel().selectedRows()
# print "index024689361: "
# ty indexes eto ambani ti dia type: PyQt4.QtCore.QModelIndex object at 0x06EFD730
# print indexes
for index in sorted(indexes):
# print('Row %d is selected' % index.row())
self.clicked_enreg = self.\
list__dl_fini_chemin_easycode[index.row()][2]
# sys.exit(0)
req = "SELECT"\
+" root_distant, chemin__a_partir_root "\
+"FROM prj_ecoute01 "\
+"WHERE "\
+"chemin__a_partir_root "\
+"= '"\
+self.clicked_enreg\
+"';"
# print req
self.pg_select(
query = req
)
self.full_path_read = ""
for row in self.rows_pg_local:
for i in range(len(row)):
if i == 1:
self.chemin_sans_root = row[i]
self.full_path_read = \
self.full_path_read + row[i]
# print "full_path: "
# print self.full_path_read
# print self.root_local + self.chemin_sans_root
self.dl_fichier(
remote_file01 = self.full_path_read,
sauvegardee_dans = self.root_local + str(self.chemin_sans_root)[-55:]
)
# ty iz no mi_ajoutee ani am playlist
chemin_enreg__local01 = self.root_local \
+ str(self.chemin_sans_root)[-55:]
# self.sources.append(
# Phonon.MediaSource(
# chemin_enreg__local01
# )
# )
self.add_single_song_to_playlist(
path_audio = chemin_enreg__local01
)
self.playlist.append(self.root_local + str(self.chemin_sans_root)[-55:])
self.logging_n_print(type_log = "info",
txt = "Ajoutee au Playlist: "+self.root_local + str(self.chemin_sans_root)[-55:])
req001 = "UPDATE prj_ecoute01 SET telechargee = 1 "\
+"WHERE chemin__a_partir_root = '" \
+self.chemin_sans_root+"';"
# print req001
self.pg_not_select(
query01 = req001,
host = "127.0.0.1")
# sys.exit(0)
self.dialog01.close()
self.double_clicked_multieasycode()
# self.dialog_enregistrement()
# def chcek
def double_clicked_multieasycode(self):
# print "double clicked multieasycode"
# print "- mande manao anlay requete lava b amzai"
print ""
print ""
print ""
print ""
print ""
self.campagne = self.combo_box__campagne.currentText()
self.multieasycode = self.qtlist_multieasycode.\
currentItem().\
text()
# alaina aa partir ny: multieasycode, table_campagne
req = "SELECT " \
+ "telechargee, " \
+ "fini, " \
+ "chemin__a_partir_root, " \
+ "multi_easycode " \
+ "root_local, " \
+ "root_distant " \
+ "FROM prj_ecoute01 " \
+ "WHERE table_campagne like '" \
+ self.table_campagne01 \
+ "' AND " + "multi_easycode=" \
+ self.qtlist_multieasycode.currentItem().text()\
+ "ORDER BY chemin__a_partir_root"
self.logging_n_print(type_log = "info",
txt = "Easycode: "+self.qtlist_multieasycode.currentItem().text()
)
self.pg_select(query = req)
# print "printing row"
print ""
print ""
print ""
print ""
# le double_t suivant est fait par expres
listt__dl_fini_chemin_easycode = [] * (len(self.rows_pg_local))
# listt__dl_fini_chemin_easycode = [[] for x in xrange(len(self.rows_pg_local))]
for row in self.rows_pg_local:
list01 = [None] * (len(row))
for i in range(len(row)):
if i == 0:
# download
# testena we 0 v ny row[i]
if row[i] == 0:
list01[i] = False
else:
list01[i] = True
elif i == 1:
# fini
if row[i] == 0:
list01[i] = False
else:
list01[i] = True
else:
list01[i] = row[i]
listt__dl_fini_chemin_easycode.append(list01)
print ""
print ""
print ""
print ""
print ""
print ""
print "List de vos Enregistrements"
self.list__dl_fini_chemin_easycode = listt__dl_fini_chemin_easycode
self.dialog_enregistrement(
list__dl_fini_chemin_easycode = listt__dl_fini_chemin_easycode
)
print ""
print ""
print ""
print ""
print ""
print ""
def sizeHint(self):
return QtCore.QSize(1000, 700)
def connect_sql_server(self,
server01 = '',
user01='',
password01='',
database01=''):
"""
ceci ne devra etre executee qu_une seule fois
- ceci est pour la connection aa la bdd
"""
import _mssql
self.conn_sql_server = _mssql.connect(
server=server01,
user=user01,
password=<PASSWORD>,
database=database01)
if self.conn_sql_server :
self.logging_n_print(type_log = "info",
txt = "connection ok au sql_server!")
# logging.info('I told you so') # will not print anything
else :
self.logging_n_print (
type_log = "info",
txt = "connection au sql_server ECHOUEE")
def extract01(self,
query = ""):
self.conn_sql_server \
.execute_query('SELECT * FROM persons01 WHERE salesrep=%s', 'salesrep01')
for row in self.conn_sql_server:
print "ID=%d, Name=%s" % (row['id'], row['name'])
def extraire_audio(self):
print "t_as cliquee... operation extraction audio"
def double_click_qtlist_easycode(self):
self.campagne = self.combo_box__campagne.currentText()
req = "SELECT (time_stamp, 1, 4) "\
+"+ '\\' + substring(time_stamp, 5, 2) "\
+"+ '\\' + substring(time_stamp, 7, 2) "\
+"+ '\\' + substring(time_stamp, 9, 2) "\
+"+ '\\' + substring(time_stamp, 11, 2) "\
+"+ '\\' + substring(time_stamp, 13, 2) "\
+"+ rec_key + rec_time .codec FROM AVR7.dbo.recording WHERE "\
+"rec_key in (SELECT easy.dbo.[call_thread].[recording_key] FROM " \
+ "easy.dbo."\
+self.campagne\
+" INNER JOIN easy.dbo.data_context ON easy.dbo.data_context.contact = easy.dbo." \
+ self.campagne \
+ ".easycode " \
+"INNER JOIN easy.dbo.thread ON easy.dbo.thread.data_context = easy.dbo.data_context.code " \
+"INNER JOIN easy.dbo.call_thread " \
+"ON easy.dbo.thread.code = easy.dbo.call_thread.code " \
+"WHERE easy.dbo."\
+self.campagne \
+ ".easycode = "\
+self.easycode + ")"
print ""
print ""
print ""
print ""
print ""
print "requete:"
print req
print ""
def has_access_to_server(self):
if not (os.path.exists("\\\\mcuci\\Storage$")):
self.msg_box_information(
"Probleme d'Access au Serveur",
"Vous n'avez pas Access au serveur Storage"
)
sys.exit(0)
else:
if not (os.path.exists("\\\\192.168.10.19\\voice\\")):
self.msg_box_information(
"Probleme d'Access au Serveur",
"Vous n'avez pas Access au serveur Voice"
)
sys.exit(0)
def msg_box_information(self, titre, txt):
QtGui.QMessageBox.information(
self,
titre,
txt
)
def select_chemin(self,
bool01 = True,
table_campagne = "CT_NOMINATION_AS3",
easycode = "17868031"
):
print "dans select_chemin"
req = "SELECT substring(time_stamp, 1, 4) "\
+"+ '\\' + substring(time_stamp, 5, 2) "\
+"+ '\\' + substring(time_stamp, 7, 2) "\
+"+ '\\' + substring(time_stamp, 9, 2) "\
+"+ '\\' + substring(time_stamp, 11, 2) "\
+"+ '\\' + substring(time_stamp, 13, 5) "\
+"+ rec_key + rec_time +'.'+codec as chemin FROM AVR7.dbo.recording WHERE "\
+"rec_key in (SELECT easy.dbo.[call_thread].[recording_key] FROM " \
+ "easy.dbo."\
+self.table_campagne01\
+" INNER JOIN easy.dbo.data_context ON easy.dbo.data_context.contact = easy.dbo." \
+ self.table_campagne01 \
+ ".easycode " \
+"INNER JOIN easy.dbo.thread ON easy.dbo.thread.data_context = easy.dbo.data_context.code " \
+"INNER JOIN easy.dbo.call_thread " \
+"ON easy.dbo.thread.code = easy.dbo.call_thread.code " \
+"WHERE easy.dbo."\
+self.table_campagne01 \
+ ".easycode = "\
+str(easycode) +")"
# print req
#
# sys.exit(0)
# print ""
# print ""
# print ""
# print "requete dans meth__select_chemin : "
# print req
try:
self.conn_sql_server \
.execute_query(req)
except _mssql.MSSQLDatabaseException:
self.msg_box_information(
"Relation fichier Excel et la Campagne choisie",
"Erreur _mssql: " + req
)
return
#dans select_chemin
chemin_enregistrements = []
if len(chemin_enregistrements) == 0:
self.logging_n_print(
type_log = "warning",
txt = "Incoherence de donnees: table(" + self.table_campagne01 + ") - easycode(" + easycode + ")"
)
return
print self.conn_sql_server
sys.exit(0)
for row in self.conn_sql_server:
chemin_enregistrements.\
append(row['chemin'])
return chemin_enregistrements
#fin select_chemin
# ceci est faite apres choix de campagne dans le combo_box
# # on arrive ici quand self.table_campagne01 est remplit
def import_xls(self,
bool01 = True,
#root_local = "E:\\DISK_D\\ecoutes\\",
root_local = ".\\ecoute_enreg\\",
root_distant = "\\\\mcuci\\Storage$\\",
telechargee = "0",
fini = "0",
monoeasy = "17868031"
):
# creation du repertoire qui va contenir les enregistrements
if not os.path.exists(root_local):
os.makedirs(root_local)
self.root_local = root_local
self.root_distant = root_distant
"""
ceci va retourner le Chemin du fichier.xlsx qu_on veut ajouter
"""
# ndr1ndr1 refa atao indrepani ftsn ti d
# on efface le contenu du self.qtlist_multieasycode
self.remove_all_qtlist_multieasycode()
self.remove_all_qtlist_multieasycode()
# on parcours vers le fichier_xls qui contient les easycodes
# # correspondants au campagne choisie
files = QtGui.QFileDialog.getOpenFileNames(
self,
"Veuillez choisir un Fichier Excel APPROPRIEE",
QtGui.QDesktopServices.storageLocation(
QtGui.QDesktopServices.MusicLocation
)
)
if not files:
return
# index = len(self.sources)
# si on a selectionnee plusieurs fichiers lors du selection du fichier_xls
# # qui doit correspondre au campagne choisie alors on affiche une erreur
if len (files) != 1:
# print "Vous avez choisit plusieurs fichiers... Veuillez choisir qu'une seule"
self.logging_n_print(
type_log = "warning",
txt = "Vous avez choisit plusieurs fichiers... Veuillez choisir qu'une seule"
)
self.logging_n_print(txt = ' _ '.join(files))
QtGui.QMessageBox.information(
self,
"Erreur d'Import de fichier",
"Erreur d'Import de fichier\n"
"- Veuillez choisir qu'une seule fichier"
)
# on a importer un fichier NON_xlsx
return
# activer ceci si on accepte l_import de plusieurs fichiers
# for string in files: # string va contenir le chemin du fichier que t_as parcourue
# self.sources.append(Phonon.MediaSource(string)) # self.sources est une liste01.. afaik, il va contenir le playlist01
# string va contenir le chemin du fichier que t_as parcourue
# print string
tmp = files[0]
# on teste si le fichier est un fichier.xlsx
# # si oui on continue
# # # et on monte les samba_serveur
# # si non on stop
if tmp[-5:] == ".xlsx":
self.fichier_xlsx = tmp
# print "fichier_xlsx: " + self.fichier_xlsx
self.logging_n_print(
txt = "Fichier Excel Importee: " + self.fichier_xlsx
)
# on monte les samba_serveurs
# # on demonte les serveurs grace aa self.umount_samba_server
# # qui se trouve en bas
###essaie de supprimer ceci##########################
self.mount_samba_server()
###essaie de supprimer la ligne du dessus############
else:
self.msg_box_information(
"ERREUR de Fichier",
"Votre fichier n'est pas un fichier excel CONVENABLE")
return
self.etat_elemS(
campg = True,
multieasyc = True,
monoeasyc = True,
dldd = True,
import_xls_action = True
)
book = open_workbook(
self.fichier_xlsx
)
# recreer prj_ecoute01@local
# recreer prj_ecoute01_seq@local
drop_query = "DROP TABLE IF EXISTS prj_ecoute01;"
self.pg_not_select(query01 = drop_query)
# print "table: prj_ecoute01 est EFFACEE"
# mnw anle creation_table
create_query = "CREATE TABLE "\
+"prj_ecoute01 " \
+"(" \
+"id_ecoute INTEGER NOT NULL DEFAULT NEXTVAL(('public.prj_ecoute01_seq'::text)::regclass)," \
+ "chemin__a_partir_root CHARACTER VARYING(254)," \
+ "root_local CHARACTER VARYING(128)," \
+ "root_distant CHARACTER VARYING(128)," \
+ "table_campagne CHARACTER VARYING(128)," \
+ "telechargee SMALLINT," \
+ "fini SMALLINT," \
+ "multi_easycode INTEGER," \
+ "mono_easycode INTEGER" \
+")"
drop_seq = "DROP SEQUENCE IF EXISTS prj_ecoute01_seq;"
self.pg_not_select(drop_seq)
create_seq = "CREATE SEQUENCE prj_ecoute01_seq "\
+ "INCREMENT 1 "\
+ "MINVALUE 1 "\
+ "MAXVALUE 9223372036854775807 "\
+ "START 1 "\
+ "CACHE 1;"\
+ "ALTER TABLE prj_ecoute01_seq "\
+ "OWNER TO postgres; "\
+ "GRANT ALL ON TABLE prj_ecoute01_seq TO postgres; "\
+ "COMMENT ON SEQUENCE prj_ecoute01_seq "\
+ "IS 'Une ecoute va avoir une seule id_ecoute';"
self.pg_not_select(create_query)
self.pg_not_select(create_seq)
# fin recreer prj_ecoute01@local
# fin recreer prj_ecoute01_seq@local
#################################################################################
# manomboka eto dia : Formation de la requete_insert #
query_insert = "INSERT INTO " \
+ "prj_ecoute01 "\
+"(chemin__a_partir_root, "\
+"root_local, root_distant, "\
+"telechargee, fini, "\
+"multi_easycode, table_campagne) " +\
"VALUES "
# dans import_xls
sheet0 = book.sheet_by_index(0)
list_multieasycode = []
# alaina ts1r1 ny multi_easycode izai ani am fichier.xlsx
# ni multi_easycode=multieasyc_i dia mety manana enregistrement maro2
for i in range(0, sheet0.nrows):
# multieasyc_i dia meti manana enregistrement maro2
multieasyc_i = sheet0.row_values(i, 0, 1)[0]
# print multieasyc_i
# # 17868031.0
# sys.exit(0)
# ti maka anlai chemin sans tenir compte du root_distant
cheminS = self.select_chemin(
table_campagne = self.combo_box__campagne.currentText(),
easycode = str(multieasyc_i)[:-2]
# ilia ce -2 car multieasyc_i=17868031.0
)
# print cheminS
# sys.exit(0)
# multieasyc_i dia meti manana enregistrement maro2
# # ireto manaraka ireto ni chemin maka ani am enregistrement an_i easycode irai
# sarotsarotra azavaina ti aah!
# hafa ni requete rah ohatra ka ani am farani n easycode ani am
# # fichier.xlsx no jerena ni enregistrement
if (
(i != (sheet0.nrows - 1)) or
(i == (sheet0.nrows - 1)) # tam voloo nisy anle manala anle virgule keli ini
):
cpt_chm = 0
# isakn easycode ao am fichier_xls... alaina ireo fichier_enregistrements mfandrai amn
#
for chemin in cheminS:
test_exist_fichier = self.root_distant + chemin
# on cherche dans voice
samba_ = "\\\\192.168.10.19\\voice\\"
file01 = Path(samba_ + chemin)
if (
# ao am Voice
file01.is_file()
):
root_distant = "\\\\192.168.10.19\\voice\\"
# print root_distant + chemin
else:
samba_ = "\\\\mcuci\\Storage$\\"
file01 = Path(samba_ + chemin)
if (
# ao am Voice
file01.is_file()
):
root_distant = "\\\\mcuci\\Storage$\\"
else:
msg_box_information("Fichier inexistant",
"Le fichier que vous cherchez n'existe pas")
if cpt_chm != (len(cheminS) - 1):
query_insert += "( '"
query_insert += chemin + "', '" \
+ root_local + "', '"\
+ root_distant + "', '"\
+ telechargee + "', '"\
+ fini + "', "\
+ str(int(multieasyc_i)) + ", '" \
+ str(self.table_campagne01)\
+ "'), "
else:
query_insert += "( '"
query_insert += chemin + "', '" \
+ root_local + "', '"\
+ root_distant + "', '"\
+ telechargee + "', '"\
+ fini + "', "\
+ str(int(multieasyc_i)) + ", '"\
+ self.table_campagne01\
+ "'), "
cpt_chm = cpt_chm + 1
list_multieasycode.append(
multieasyc_i
)
# fin_else(else veut dire qu'on est AU dernier ligne du file.xlsx)
query_insert = query_insert[:-2]
# print query_insert
# # INSERT INTO prj_ecoute01 (chemin__a_partir_root, root_local, root_distant, telechargee, fini, multi_easycode, table_campagne) VALUES ( '2017\11\08\15\02\500003e0aa8c000000a805a031c9006790010f3360001000101.wav', '.\ecoute_enreg\', '\\mcuci\Storage$\', '0', '0', 19240025, 'ct_NIP_2018'), ( '2017\11\08\15\02\580003e0aa8c000000a805a031c9f067c0010f33c0001000018.wav', '.\ecoute_enreg\', '\\mcuci\Storage$\', '0', '0', 19240026, 'ct_NIP_2018'), ( '2017\11\08\15\03\270003e0aa8c000000a805a031cbc06830010f34b0001000115.wav', '.\ecoute_enreg\', '\\mcuci\Storage$\', '0', '0', 19240026, 'ct_NIP_2018')
# ty tsy olana fa ilai fanamboarana anlai requete no enjana
try:
self.cursor_pg_local.execute(query_insert)
except psycopg2.ProgrammingError:
self.umount_samba_server()
self.logging_n_print(
txt = query_insert,
type_log = "warning",
)
self.logging_n_print(
txt = str(cheminS),
type_log = "warning",
)
self.msg_box_information(
"Relation fichier Excel et la Campagne choisie",
"La Campagne que vous avez choisie n'est PAS Compatible au fichier Excel" \
+ "\n- Erreur dans Psycopg"
)
# sys.exit(0)
return
# eto ni mnw insertion
self.connect_pg_local_saisie.commit()
#dans import_xls
# on elimine les doublons
list_multieasycode = list(
set(
list_multieasycode
)
)
# on fait un tri
list_multieasycode = sorted(list_multieasycode)
# elem sera un mono_easycode
for elem in list_multieasycode:
if (str(elem)[-2:] == ".0"):
self.qtlist_multieasycode.\
addItem(str(elem)[:-2])
else:
self.qtlist_multieasycode.\
addItem(str(elem))
self.umount_samba_server()
#fin_import_xls
####eto
# asio log_file
# # mbola lava b io... jereo we inn dol ni affichage tsara loggena
# # ni vita farany zan d hoe: rhf mnw import_xls
# atov ani anaty fichier_conf
# # ti mbola ketrika hafa mitsn
def handle_file_log(self):
if (os.path.exists(self.log_file)):
statinfo = os.stat(self.log_file)
if statinfo.st_size > self.max_size_log:
os.remove(self.log_file)
open(self.log_file, 'a').close()
# print ""
def logging_n_print(self,
bool01 = True,
type_log = "info", # OU warning OU debug
txt = "text",
log_only = True
):
if(type_log == "warning"):
logging.warning(txt)
if log_only == False:
print txt
elif (type_log == "info"):
logging.info(txt)
if log_only == False:
print txt
elif ((type_log == "debug") & (log_only == False)):
logging.debug(txt)
if log_only == False:
print txt
elif ((type_log == "debug") & (log_only == True)):
logging.debug(txt)
if log_only == False:
print txt
def handle_ip_insertion(self):
req = "SELECT EXISTS(SELECT ip FROM ecoute_ip WHERE ip LIKE '"\
+str(self.get_ip())\
+"')"
self.pg_select(
query = req,
host = "192.168.10.5")
# print self.rows_pg_10_5[0][0]
if (self.rows_pg_10_5[0][0] == True):
self.logging_n_print(
txt = self.get_ip() + " deja present dans la BDD",
type_log = "info")
else:
self.logging_n_print(
txt = self.get_ip() + " insertion dans la BDD",
type_log = "info")
ins_req = "INSERT INTO ecoute_ip(ip) VALUES ('"+\
self.get_ip()\
+"')"
self.pg_not_select(
query01 = ins_req,
host = "192.168.10.5")
# print "tsy ao"
def check_pass(self):
# maka anlay mot_de_passe
self.pg_select(
query = "SELECT passw FROM pass_infodev WHERE id_pass = (SELECT MAX(id_pass) FROM pass_infodev);",
host = "192.168.10.5")
# print self.rows_pg_10_5[0][0]
self.right_passw = False
while True:
self.dialog_passw = QtGui.QDialog()
self.dialog_passw.setWindowTitle("Mot de Passe")
self.dialog_passw.setMinimumSize(300, 50)
qvbox_layout_dailog_passw = QtGui.QHBoxLayout(self.dialog_passw)
self.button_ok_at_dialog_passw = QtGui.QPushButton("OK", self.dialog_passw)
self.button_ok_at_dialog_passw.clicked.connect(self.clicked_ok_at_dialog_passw)
self.qline_entered_passw = QLineEdit(self.dialog_passw)
self.qline_entered_passw.setEchoMode(QLineEdit.Password)
qvbox_layout_dailog_passw.addWidget(self.button_ok_at_dialog_passw)
qvbox_layout_dailog_passw.addWidget(self.qline_entered_passw)
self.dialog_passw.exec_()
self.logging_n_print(type_log = "warning",
txt = "Tentation d'Entree dans zone Info_Dev")
if self.qline_entered_passw.text() == self.rows_pg_10_5[0][0] :
self.logging_n_print(type_log = "info",
txt = "Entree Reussite dans zone Info_Dev")
break
else:
self.logging_n_print(type_log = "warning",
txt = "Echec d'entree dans zone Info_Dev _ MDP: " \
+ self.qline_entered_passw.text()
)
pass
if self.right_passw == True:
self.logging_n_print(type_log = "info",
txt = "Entree Reussite dans zone Info_Dev")
break
else:
self.logging_n_print(type_log = "warning",
txt = "Echec d'entree dans zone Info_Dev _ MDP: " \
+ self.qline_entered_passw.text()
)
pass
# print self.qline_entered_passw.text()
# iti ilai rhf mnw insertion am inputdialog dia hita dol
# while True:
# entered_passw, ok = QtGui.QInputDialog.getText(self,
# 'Mot De Passe',
# 'Entrer votre Mot De Passe:')
# if ok:
# print "the text which you entered is: " + entered_passw
# if entered_passw == self.rows_pg_10_5[0][0]:
# print "mtov"
# break
# else:
# print "Mot de Passe Incorrect"
# else:
# print "you clicked cancel"
# break
def check_existance_pg_int(self,
bool01 = True,
table01 = "prj_ecoute01",
chp01 = "chemin__a_partir_root",
val = '2017\\06\\23\\12\\05\\460003e0aa8c000001540594d04022591000f86e60001000003.wav'
):
req = "SELECT EXISTS" \
+ "(SELECT * FROM " \
+ table01 \
+ " WHERE " \
+ chp01 \
+ " = "\
+ str(val) \
+ ");"
res = True
self.cursor_pg.execute(req)
rows = self.cursor_pg.fetchall()
# print rows
if len(rows) != 0:
res = True
else:
res = False
print "resultat0564614: " + str(res)
return res
def clicked_bouton_fermer_dialog(self):
self.dialog01.close()
# print "this is from the dialog"
def check_existance_pg_str(self,
bool01 = True,
table01 = "prj_ecoute01",
chp01 = "chemin__a_partir_root",
val = ' 2017\\06\\23\\12\\05\\460003e0aa8c000001540594d04022591000f86e60001000003.wav'):
req = "SELECT EXISTS" \
+ "(SELECT * FROM " \
+ table01 \
+ " WHERE " \
+ chp01 \
+ " = '"\
+ str(val) \
+ "');"
res = True
self.cursor_pg.execute(req)
rows = self.cursor_pg.fetchall()
# print rows
if len(rows) != 0:
res = True
else:
res = False
print "resultat0564614: " + str(res)
return res
def pg_not_select(self,
query01,
host = "127.0.0.1"):
if( host == "127.0.0.1"):
self.cursor_pg_local.execute(query01)
self.connect_pg_local_saisie.commit()
# self.cursor_pg_local.close()
def connect_pg(self,
server01 = '',
user01='',
password01='',
database01=''):
if (server01 == '127.0.0.1'):
self.connect_pg_local_saisie = psycopg2.connect(
"dbname=" + database01
+" user=" + user01
+" password=" + <PASSWORD>
+" host=" + server01
)
self.connect_pg_local_saisie.set_isolation_level(0)
self.cursor_pg_local = self.connect_pg_local_saisie.cursor()
self.logging_n_print (txt = "connection ok au postgresql LOCAL")
elif(server01 == '192.168.10.5'):
self.connect_pg_10_5 = psycopg2.connect(
"dbname=" + database01
+" user=" + user01
+" password=" + <PASSWORD>
+" host=" + server01
)
self.connect_pg_10_5.set_isolation_level(0)
self.cursor_pg_10_5 = self.connect_pg_10_5.cursor()
self.logging_n_print (txt = "connection ok au postgresql 192.168.10.5")
def addFiles(self):
files = QtGui.QFileDialog.getOpenFileNames(self,
"Veuillez choisir un Fichier Audio",
QtGui.QDesktopServices.storageLocation(QtGui.QDesktopServices.MusicLocation))
print files
if not files:
return
index = len(self.sources)
# for string in files:
# self.sources.append(Phonon.MediaSource(string))
self.sources.append(Phonon.MediaSource(files[0]))
if self.sources:
self.metaInformationResolver.setCurrentSource(self.sources[index])
# va s_exe apres choix d_un campagne
def selection_change_combo_campagne(self):
self.etat_elemS(
campg = True,
multieasyc = True,
monoeasyc = True,
dldd = True
)
# print self.combo_box__campagne.currentText()
self.etat_elemS(
campg = True,
multieasyc = True,
monoeasyc = True,
dldd = True,
import_xls_action = True
)
# print "#####################################"
# print list_call_date01
# list_call_date01 = list(set(list_call_date01))
# list_call_date01 = sorted(list_call_date01)
tmp_txt = "Choix Campagne: " + self.combo_box__campagne.currentText()
self.logging_n_print(txt = tmp_txt)
# self.campagne__table_campagne est un dico
# XX.get() prend un clee et la valeur correspondante
self.table_campagne01 = str(self.campagne__table_campagne.get(
self.combo_box__campagne.currentText()
)
)
def addFiles01(self):
files = QtGui.QFileDialog.getOpenFileNames(self, "Veuillez choisir un Fichier Audio",
QtGui.QDesktopServices.storageLocation(QtGui.QDesktopServices.MusicLocation))
if not files:
return
index = len(self.sources)
res = ""
for string in files: # string va contenir le chemin du fichier que t_as parcourue
self.sources.append(Phonon.MediaSource(string)) # self.sources est une liste01.. afaik, il va contenir le playlist01
# string va contenir le chemin du fichier que t_as parcourue
print string
# il est possible de faire une chz comme la suivante
# self.sources.append(Phonon.MediaSource(
# 'E:\DISK_D\mamitiana\zik\Nouveau dossier\Embona(Donnah).mp3'
# )
# )
if self.sources:
self.metaInformationResolver.setCurrentSource(self.sources[index])
def lire_xls_csv(self):
self.campagne = self.combo_box__campagne\
.currentItem()\
.text()
print ""
def lire_csv(self):
import csv
with open('resources.csv', 'rb') as csvfile:
csv01 = csv.reader(
csvfile,
delimiter=' ',
quotechar='|'
)
for row in csv01:
print ', '.join(row)
def lire_xlsx_campagne(
self,
fichier_xlsx = 'file01.xlsx'
):
"""
ceci va retourner tout les list_campagnes qui sont sur le fichier.xlsx
- list_campagnes sont d'abord trier PUIS supprimer_doublons
"""
list_campagnes = []
book = open_workbook(
self.fichier_xlsx # ce fichier doit etre inclus Apres clique du bouton__importer_action
# pour le moment ceci n_est que pour le test
)
sheet0 = book.sheet_by_index(0)
# print sheet0.row_values(0, 1, 2)
for i in range(2, sheet0.nrows):
list_campagnes.append(
sheet0.row_values(i, 0, 1)[0]
)
list_campagnes = list(set(list_campagnes))
list_campagnes = sorted(list_campagnes)
# print "liste campagne:"
print list_campagnes
# print
return list_campagnes
def about_pdf(self):
# print "this is a test"
os.system(".\Manuel_Interface_Ecoute.pdf")
pass
def about(self):
# QtGui.QMessageBox.information(self, "About Music Player",
# "The Music Player example shows how to use Phonon - the "
# "multimedia framework that comes with Qt - to create a "
# "simple music player.")
QtGui.QMessageBox.information(
self,
"Outil pour Ecouter les Appels de Vivetic",
"Outil pour Ecouter les Appels de Vivetic\n"
"- On choisit une Campagne\n"
"- On choisit le fichier Excel qui contient les Easycodes\n"
"- - Tous les Easycodes du fichiers Excel vont s'afficher dans la liste\n"
"-\n"
"- On choisit un easycode\n"
"- - Les Enregistrements d'appel associees au easycode choisit vont s'afficher\n"
"- On double_clique le fichier Audio qu'on veut lire\n"
"- On clique sur bouton(Jouer) pour le mettre dans le Playlist\n"
)
def stateChanged(self, newState, oldState):
if newState == Phonon.ErrorState:
if self.mediaObject.errorType() == Phonon.FatalError:
QtGui.QMessageBox.warning(self, "Fatal Error",
self.mediaObject.errorString())
else:
QtGui.QMessageBox.warning(self, "Error",
self.mediaObject.errorString())
elif newState == Phonon.PlayingState:
self.playAction.setEnabled(False)
self.import_action.setEnabled(True)
self.pauseAction.setEnabled(True)
self.stopAction.setEnabled(True)
elif newState == Phonon.StoppedState:
self.stopAction.setEnabled(False)
self.playAction.setEnabled(True)
self.import_action.setEnabled(True)
self.pauseAction.setEnabled(False)
self.timeLcd.display("00:00")
elif newState == Phonon.PausedState:
self.pauseAction.setEnabled(False)
self.stopAction.setEnabled(True)
self.playAction.setEnabled(True)
self.import_action.setEnabled(True)
def tick(self, time):
displayTime = QtCore.QTime(0, (time / 60000) % 60, (time / 1000) % 60)
self.timeLcd.display(displayTime.toString('mm:ss'))
def tableClicked(self, row, column):
wasPlaying = (self.mediaObject.state() == Phonon.PlayingState)
self.mediaObject.stop()
self.mediaObject.clearQueue()
self.mediaObject.setCurrentSource(self.sources[row])
if wasPlaying:
self.mediaObject.play()
else:
self.mediaObject.stop()
def sourceChanged(self, source): # afaik, ceci va s_exe si on a cliquee sur autre music dans play_list
self.musicTable.selectRow(
self.sources.index(source)
)
self.timeLcd.display('00:00')
def samba_check_file(self,
bool01 = True, # franchement tsy haiko ni dikanito.. misi anio bool01 io maro2 aah
samba_server = "\\\\mcuci\\Storage$\\",
# samba_server = "\\\\192.168.10.19\\voice\\",
remote_file = "2017\\07\\05\\14\\03\\050003e0aa8c000001540595cf1976568001369720001000149.wav"):
path_to_file = samba_server + remote_file
# testena ao am storage
my_file = Path(path_to_file)
if my_file.is_file():
print "ato am storage"
print path_to_file
# sys.exit(0)
return samba_server
else:
samba_server_voice = "\\\\mcuci\\Storage$\\"
path_to_file = samba_server_voice + remote_file
my_file = Path(path_to_file)
if my_file.is_file():
print "ao am voice"
return samba_server_voice
else:
print "fichier inexistant: " + remote_file
return "inexistant"
# return my_file.is_file()
# os.system()
# print ""
def changed_campagne(self):
print "changed campagne"
def clicked_ajouter_dialog(self):
# print indexes
indexes = self.qtable_at_dialog.selectionModel().selectedRows()
for index in sorted(indexes):
# print('Row %d is selected' % index.row())
self.clicked_enreg = self.\
list__dl_fini_chemin_easycode[index.row()][2]
self.playlist.append(self.list__dl_fini_chemin_easycode[index.row()][2])
chemin_enreg__local = self.root_local \
+ self.list__dl_fini_chemin_easycode[index.row()][2][-55:]
self.add_single_song_to_playlist(
path_audio = chemin_enreg__local
)
# print "playlist00212121"
# print self.playlist
# self.sources.append(Phonon.MediaSource('E:\\DISK_D\\ecoutes\\2017\\07\\13\\15\\44\\490003e0aa8c0000015405967956bbc9f0015b3820001000205.wav'))
# for tmp in self.playlist:
# self.sources.append(
# Phonon.MediaSource(
# self.root_local + tmp
# )
# )
# print ""
# print self.root_local + tmp
# self.add_single_song_to_playlist(path_audio = (self.root_local + tmp))
print "clicked ajouter au dialog"
def closeEvent(self, *args, **kwargs):
self.umount_samba_server()
for i in range(3):
self.logging_n_print(txt = "")
self.logging_n_print(txt = "closed")
for i in range(3):
self.logging_n_print(txt = "")
def to_del001(self):
# self.dialog_passw = QtGui.QDialog()
# flo = QFormLayout(self.dialog_passw)
# e1 = QLineEdit()
# self.button_ok_dialog_passw = QtGui.QPushButton(
# "OK"
# )
# self.button_annuler_dialog_passw = QtGui.QPushButton(
# "Annuler"
# )
# flo.addRow("Mot De Passe: ", e1)
# flo.addRow(self.button_ok_dialog_passw)
# flo.addRow(self.button_annuler_dialog_passw)
# # win = QWidget()
# # win.setLayout(flo)
# # win.show()
# raw_input("ato")
# print e1.text()
self.dialog_passw = QtGui.QDialog()
self.dialog_passw.setMinimumSize(300, 50)
qvbox_layout_dailog_passw = QtGui.QHBoxLayout(self.dialog_passw)
self.button_ok_at_dialog_passw = QtGui.QPushButton("OK", self.dialog_passw)
self.button_ok_at_dialog_passw.clicked.connect(self.clicked_ok_at_passw)
self.qline_entered_passw = QLineEdit(self.dialog_passw)
self.qline_entered_passw.setEchoMode(QLineEdit.Password)
qvbox_layout_dailog_passw.addWidget(self.button_ok_at_dialog_passw)
qvbox_layout_dailog_passw.addWidget(self.qline_entered_passw)
self.dialog_passw.exec_()
def dl_fichier (
self,
bool01 = True,
remote_file01 = "\\\\mcuci\\Storage$\\2017\\07\\05\\14\\03\\050003e0aa8c000001540595cf1976568001369720001000149.wav",
sauvegardee_dans = ".\\ato100.wav"):
# print "clicked test"
# sys.exit(0)
if (os.path.exists(sauvegardee_dans)):
print "fichier: " + sauvegardee_dans + " existe dans votre ordi"
# sys.exit(0)
else:
# mlam ... lasa nisi anlay param_bool01 io
# print type (remote_file01)
# print remote_file01
# print type (sauvegardee_dans)
# print sauvegardee_dans
# cmd01 = 'smbget '\
# + remote_file01\
# +' '\
# + sauvegardee_dans
# sys.exit(0)
# os.system(cmd01)
try:
shutil.copy(remote_file01, sauvegardee_dans)
print "Telechargement dans: " + sauvegardee_dans
except IOError:
# print "ERROR"
# print "Je vous prie de Recommencez dans quelques secondes"
# self.msg_box_information(titre = "Erreur Temporaire",
# txt = r"Je vous prie de Refermer le Programme et Reessaier dans quelques secondes")
# self.logging_n_print(type_log = "debug",
# txt = "Fichier Inexistant: " + remote_file01
# )
#
# print "fichier: " \
# + remote_file01 \
# + " est sauvee dans "\
# + sauvegardee_dans
self.logging_n_print(type_log = "debug",
txt = "IOError lors du Telechargement du Fichier: " + remote_file01
)
self.msg_box_information("Veuillez Patienter",
to_unicode("Téléchargement à partir du Serveur"))
self.mount_samba_server()
try:
shutil.copy(remote_file01, sauvegardee_dans)
except IOError:
print "ooops IOError"
finally:
self.umount_samba_server()
def metaStateChanged(self, newState, oldState):
if newState == Phonon.ErrorState:
# QtGui.QMessageBox.warning(self, "Error opening files",
QtGui.QMessageBox.warning(self, "Erreur sur l'ouverture du fichier",
self.metaInformationResolver.errorString()
)
while self.sources and self.sources.pop() != self.metaInformationResolver.currentSource():
pass
return
if \
newState != Phonon.StoppedState \
and \
newState != Phonon.PausedState:
return
if self.metaInformationResolver.currentSource().type() == Phonon.MediaSource.Invalid:
return
metaData = self.metaInformationResolver.metaData()
# title = metaData.get('TITLE', [''])[0]
title = metaData.get('Titre', [''])[0]
if not title:
title = self.metaInformationResolver.currentSource().fileName()
titleItem = QtGui.QTableWidgetItem(title)
titleItem.setFlags(titleItem.flags() ^ QtCore.Qt.ItemIsEditable)
# artist = metaData.get('ARTIST', [''])[0]
artist = metaData.get('Titre01', [''])[0]
artistItem = QtGui.QTableWidgetItem(artist)
artistItem.setFlags(artistItem.flags() ^ QtCore.Qt.ItemIsEditable)
# album = metaData.get('ALBUM', [''])[0]
album = metaData.get('Titre02', [''])[0]
albumItem = QtGui.QTableWidgetItem(album)
albumItem.setFlags(albumItem.flags() ^ QtCore.Qt.ItemIsEditable)
# year = metaData.get('DATE', [''])[0]
year = metaData.get('Titre03', [''])[0]
yearItem = QtGui.QTableWidgetItem(year)
yearItem.setFlags(yearItem.flags() ^ QtCore.Qt.ItemIsEditable)
currentRow = self.musicTable.rowCount()
self.musicTable.insertRow(currentRow)
self.musicTable.setItem(currentRow, 0, titleItem)
self.musicTable.setItem(currentRow, 1, artistItem)
self.musicTable.setItem(currentRow, 2, albumItem)
self.musicTable.setItem(currentRow, 3, yearItem)
if not self.musicTable.selectedItems():
self.musicTable.selectRow(0)
self.mediaObject.setCurrentSource(self.metaInformationResolver.currentSource())
# ajouteed
# if not self.musicTable01.selectedItems():
# self.musicTable01.selectRow(0)
# self.mediaObject.setCurrentSource(self.metaInformationResolver.currentSource())
source = self.metaInformationResolver.currentSource()
index = self.sources.index(self.metaInformationResolver.currentSource()) + 1
if len(self.sources) > index:
self.metaInformationResolver.setCurrentSource(self.sources[index])
else:
self.musicTable.resizeColumnsToContents()
if self.musicTable.columnWidth(0) > 300:
self.musicTable.setColumnWidth(0, 300)
def remove_all_qtlist_multieasycode(self):
# print self.qtlist_multieasycode.count()
for i in xrange(self.qtlist_multieasycode.count()):
# ne comprend pas pourquoi si on fait qu_une seule fois
# # la prochaine_ligne, alors on va supprimer que la moitiee
self.qtlist_multieasycode.takeItem(i)
self.qtlist_multieasycode.takeItem(i)
self.qtlist_multieasycode.takeItem(i)
self.qtlist_multieasycode.takeItem(i)
self.qtlist_multieasycode.takeItem(i)
self.qtlist_multieasycode.takeItem(i)
self.etat_elemS(
campg = True,
multieasyc = True,
monoeasyc = True,
dldd = True,
import_xls_action = False
)
def lire_xlsx__get_call_date(self,
fichier_xlsx = 'E:\\DISK_D\\mamitiana\\kandra\\stuffs\\file01.xlsx',
campgn = "0"):
"""
va retourner les call_date qui sont reliees aa param_campgn
"""
res_list_call_date = []
print "get call_date"
if fichier_xlsx.is_file():
book = open_workbook(
fichier_xlsx # ce fichier doit etre inclus Apres clique du bouton__importer_action
# pour le moment ceci n_est que pour le test
)
sheet0 = book.sheet_by_index(0)
for i in range(1, sheet0.nrows):
if (sheet0.row_values(i, 0, 1)[0] == campgn):
res_list_call_date.append(
sheet0.row_values(i, 1, 2)[0]
)
else:
pass
res_list_call_date
return res_list_call_date
def click_extraire_audio(self):
print "click extraire audio"
def aboutToFinish(self):
# print "about to finish 65654654654"
index = self.sources.index(self.mediaObject.currentSource()) + 1
play_finished = self.list__dl_fini_chemin_easycode[\
self.musicTable.selectionModel().selectedRows()[0].row()\
][2]
req001 = "UPDATE prj_ecoute01 SET fini = 1 "\
+"WHERE chemin__a_partir_root = '" \
+play_finished+"';"
self.pg_not_select(
query01 = req001,
host = "127.0.0.1")
if len(self.sources) > index:
self.mediaObject.enqueue(self.sources[index])
def setupActions(self):
self.playAction = QtGui.QAction(
self.style().standardIcon(QtGui.QStyle.SP_MediaPlay),
"Play",
self,
shortcut="Ctrl+P",
enabled=False,
# triggered=self.mediaObject.play
triggered=self.clicked_play_action
)
self.import_action = QtGui.QAction(
"Importer",
self,
shortcut="Ctrl+I",
enabled=True,
triggered=self.import_xls
)
self.bouton_reinit_elemS = QtGui.QPushButton(
"Reinitialiser"
)
self.bouton_reinit_elemS.clicked.connect(
self.remove_all_qtlist_multieasycode
# (
# text01 = "akondro"
# )
)
self.pauseAction = QtGui.QAction(
self.style().standardIcon(
QtGui.QStyle.SP_MediaPause
),
"Pause", self, shortcut="Ctrl+A", enabled=False,
triggered=self.mediaObject.pause
)
self.stopAction = QtGui.QAction(
self.style().standardIcon(
QtGui.QStyle.SP_MediaStop
),
"Stop",
self, shortcut="Ctrl+S", enabled=False,
triggered=self.mediaObject.stop)
self.nextAction = QtGui.QAction(
self.style().standardIcon(
QtGui.QStyle.SP_MediaSkipForward
),
"Next",
self,
shortcut="Ctrl+N"
)
self.previousAction = QtGui.QAction(
self.style().standardIcon(
QtGui.QStyle.SP_MediaSkipBackward
),
"Previous", self, shortcut="Ctrl+R")
self.addFilesAction = QtGui.QAction(
"Ajouter un &Fichier",
self,
shortcut="Ctrl+F",
triggered=self.addFiles
)
self.exitAction = QtGui.QAction(
"E&xit",
self,
shortcut="Ctrl+X",
triggered=self.close
)
self.aboutAction = QtGui.QAction(
"&Aide",
self,
shortcut="Ctrl+A",
triggered=self.about
)
self.aboutActionPdf = QtGui.QAction(
"Aide avec &PDF",
self,
shortcut="Ctrl+P",
triggered=self.about_pdf
)
self.aboutQtAction = QtGui.QAction(
"A propos de &Qt",
self,
shortcut="Ctrl+Q",
triggered=QtGui.qApp.aboutQt
)
self.info_dev = QtGui.QAction(
"Info Dev",
self,
shortcut="Ctrl+M",
triggered=self.mount_samba_server_def
)
def method01(self):
print "this is a test"
def keyPressEvent(self, event):
# Did the user press the Escape key?
if event.key() == (QtCore.Qt.Key_Control and QtCore.Qt.Key_Shift \
and QtCore.Qt.Key_M and QtCore.Qt.Key_S and QtCore.Qt.Key_N): # QtCore.Qt.Key_Escape is a value that equates to what the operating system passes to python from the keyboard when the escape key is pressed.
# Yes: Close the window
print "step 1"
self.mount_samba_server_def()
elif event.key() == (QtCore.Qt.Key_Control and QtCore.Qt.Key_Shift and QtCore.Qt.Key_I):
print self.get_ip()
# No: Do nothing.
def setupMenus(self):
# fileMenu = self.menuBar().addMenu("&File")
fileMenu = self.menuBar().addMenu("&Fichier")
fileMenu.addAction(self.addFilesAction)
fileMenu.addSeparator()
fileMenu.addAction(self.exitAction)
# aboutMenu = self.menuBar().addMenu("&Help")
aboutMenu = self.menuBar().addMenu("&Aide")
aboutMenu.addAction(self.aboutAction)
aboutMenu.addAction(self.aboutActionPdf)
aboutMenu.addAction(self.aboutQtAction)
# aboutMenu.addAction(self.info_dev)
def long_print(self, nb_void = 5000):
for i in range(nb_void):
print ""
def clicked_ok_at_passw(self):
print "you clicked ok in the password"
def dialog_enregistrement(self,
bool01 = True,
list__dl_fini_chemin_easycode
=
[
[False, True, 'chemin01', 'easycode04'],
[True, False, 'chemin02', 'easycode02'],
[True, False, 'chemin03', 'easycode03'],
[False, True, 'chemin04', 'easycode01'],
[True, True, 'chemin05', 'easycode05'],
]
):
# misi requete insertion
# misi requete update
# #instanciation inside dialog
self.dialog01 = QtGui.QDialog()
qvbox_layout_dialog = QtGui.QHBoxLayout(self.dialog01)
self.button_close_at_dialog = QtGui.QPushButton("Fermer", self.dialog01)
# self.button_ajouter_at_dialog = QtGui.QPushButton("Ajouter", self.dialog01)
self.button_close_at_dialog.clicked.connect(self.clicked_bouton_fermer_dialog)
# self.button_ajouter_at_dialog.clicked.connect(self.clicked_ajouter_dialog)
rows = len(list__dl_fini_chemin_easycode)
cols = len(list__dl_fini_chemin_easycode[0])
self.qtable_at_dialog = QtGui.QTableWidget(rows, cols, self.dialog01)
self.qtable_at_dialog.setSelectionBehavior(QtGui.QTableView.SelectRows)
self.dialog01.setMinimumSize(600, 50)
self.qtable_at_dialog.setStyleSheet(
'''
QTableWidget { max-width: 600px; min-height: 200px;}
'''
)
qvbox_layout_dialog.addWidget(self.button_close_at_dialog)
# qvbox_layout_dialog.addWidget(self.button_ajouter_at_dialog)
qvbox_layout_dialog.addWidget(self.qtable_at_dialog)
# les entetes du table_dialog
self.qtable_at_dialog.setHorizontalHeaderLabels(
['Download', 'Fini', 'Chemin', 'Easycode'])
# self.qtable_at_dialog.itemClicked.connect(
# self.changed_clicked_qtable_at_dialog__to_del
# )
self.qtable_at_dialog\
.doubleClicked.connect(
self.double_clicked_qtable_at_dialog
)
for row in range(len(list__dl_fini_chemin_easycode)):
for col in range(len(list__dl_fini_chemin_easycode[row])):
if col == 0:
# Download
item = QtGui.QTableWidgetItem('')
if(list__dl_fini_chemin_easycode[row][col] == False):
item.setCheckState(QtCore.Qt.Unchecked)
item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
else:
item.setCheckState(QtCore.Qt.Checked)
item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
self.qtable_at_dialog.setItem(row, col, item)
elif col == 1:
# Fini
item = QtGui.QTableWidgetItem()
if(list__dl_fini_chemin_easycode[row][col] == False):
item.setCheckState(QtCore.Qt.Unchecked)
item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
else:
item.setCheckState(QtCore.Qt.Checked)
item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
self.qtable_at_dialog.setItem(row, col, item)
elif col == 2:
# Chemin
item = QtGui.QTableWidgetItem(
list__dl_fini_chemin_easycode[row][col]
)
item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
self.qtable_at_dialog.setItem(row, col, item)
elif col == 3:
# easycode
item = QtGui.QTableWidgetItem(
str(list__dl_fini_chemin_easycode[row][col])
)
item.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled )
self.qtable_at_dialog.setItem(row, col, item)
self.dialog01.setWindowTitle("Les Enregistrements")
self.dialog01.setWindowModality(QtCore.Qt.ApplicationModal)
self.dialog01.exec_()
def clicked_button_dialog(self):
print "clicked button inside dialog"
def setupUi(self):
bar = QtGui.QToolBar()
qtool_bar02 = QtGui.QToolBar()
bar.addAction(self.playAction)
bar.addAction(self.pauseAction)
bar.addAction(self.stopAction)
qtool_bar02.addAction(self.import_action)
self.seekSlider = Phonon.SeekSlider(self)
self.seekSlider.setMediaObject(self.mediaObject)
self.volumeSlider = Phonon.VolumeSlider(self)
self.volumeSlider.setAudioOutput(self.audioOutput)
self.volumeSlider.setSizePolicy(QtGui.QSizePolicy.Maximum,
QtGui.QSizePolicy.Maximum)
volumeLabel = QtGui.QLabel()
volumeLabel.setPixmap(QtGui.QPixmap('images/volume.png'))
palette = QtGui.QPalette()
palette.setBrush(QtGui.QPalette.Light, QtCore.Qt.darkGray)
self.timeLcd = QtGui.QLCDNumber()
self.timeLcd.setPalette(palette)
headers = ("Title", "", "", "")
# headers = ("Title", "Titre01", "Titre02", "Titre03")
#~ #instanciation by default
self.bouton_reinit_source = QtGui.QPushButton(
"Reinitialiser Playlist"
)
self.bouton_reinit_source.clicked.connect(
self.del_all_sources
)
# self.tabwidget = QtGui.QTabWidget()
# self.to_del_qplain01 = QtGui.QPlainTextEdit()
# self.tabwidget.addTab(self.to_del_qplain01, 'QTab Special')
#
# vlayout = QtGui.QVBoxLayout()
# vlayout.addWidget(self.tabwidget)
self.musicTable = QtGui.QTableWidget(0, 4)
self.musicTable.setStyleSheet(
'''
QTableWidget { max-width: 1000px; min-height: 200px;}
'''
)
self.musicTable.itemClicked.connect(self.changed_music_table)
self.qtlist_dldd = QtGui.QListWidget()
# self.qtlist_multieasycode = QtGui.QListWidget()
# self.qtlist_multieasycode.setStyleSheet('''
# QListWidget { max-width: 150px; min-height: 200px;}
# '''
# )
# self.qtlist_multieasycode.addItem("")
#~ ##liaison des elem_graphique avec meth01
# self.qtlist_multieasycode.\
# itemDoubleClicked.\
# connect(self.double_clicked_multieasycode)
# etat des self.(
# combo_box__campagne)
# # au temps = 0
self.musicTable.setHorizontalHeaderLabels(headers)
self.musicTable.setSelectionMode(
QtGui.QAbstractItemView.SingleSelection
)
self.musicTable.setSelectionBehavior(
QtGui.QAbstractItemView.SelectRows
)
self.musicTable.cellPressed.connect(
self.tableClicked
)
seekerLayout = QtGui.QHBoxLayout()
seekerLayout.addWidget(
self.seekSlider
)
seekerLayout.addWidget(
self.timeLcd
)
playbackLayout = QtGui.QHBoxLayout()
qhboxlayout_toolbar_play = QtGui.QHBoxLayout()
playbackLayout.addWidget(bar)
qhboxlayout_toolbar_play.addWidget(qtool_bar02)
playbackLayout.addStretch()
playbackLayout.addWidget(
volumeLabel
)
playbackLayout.addWidget(
self.volumeSlider
)
playbackLayout.addWidget(self.bouton_reinit_source)
# dans _ def setupUi(self):
self.tabwidget = QtGui.QTabWidget()
self.tab_excel_down = QtGui.QWidget()
self.tab_saisie_down = QtGui.QWidget()
self.combo_box__campagne = QtGui.QComboBox(self.tab_excel_down)
self.combo_box__campagne.setStyleSheet('''
QComboBox { max-width: 1000px; min-height: 20px;}
'''
)
self.combo_box__campagne.addItems(
# ["campagne01", "campagne02", "campagne03"]
self.select_list_campagne()
)
self.combo_box__campagne.\
currentIndexChanged.\
connect(
self.selection_change_combo_campagne
)
self.combo_box__campagne.setEnabled(True)
self.button_test = QtGui.QPushButton(self.tab_excel_down)
self.button_test.setText('Importer Excel')
self.button_test.setGeometry(QtCore.QRect(0, 90, 100, 23))
self.qtlist_multieasycode = QtGui.QListWidget(self.tab_excel_down)
self.qtlist_multieasycode.setGeometry(QtCore.QRect(0, 90, 100, 23))
self.qtlist_multieasycode.setStyleSheet('''
QListWidget { max-width: 150px; min-height: 100px;}
'''
)
self.qtlist_multieasycode.addItem("")
self.qtlist_multieasycode.\
itemDoubleClicked.\
connect(self.double_clicked_multieasycode)
vlayout = QtGui.QVBoxLayout()
self.tabwidget.addTab(self.tab_saisie_down, 'Saisie')
self.tabwidget.addTab(self.tab_excel_down, 'Excel')
vlayout.addWidget(self.tabwidget)
# mettre en place les layouts
mainLayout = QtGui.QVBoxLayout()
qvbox_layout_music_table01 = QtGui.QHBoxLayout()
qvbox_layout_music_table02 = QtGui.QHBoxLayout()
# qvbox_layout_music_table01.addWidget(self.combo_box__campagne)
# qvbox_layout_music_table01.addWidget(self.qtlist_multieasycode)
# qvbox_layout_music_table01.addWidget(self.qtlist_monoeasycode)
qvbox_layout_music_table01.addWidget(self.bouton_reinit_elemS)
mainLayout.addWidget(
self.musicTable
)
mainLayout.addLayout(
seekerLayout
)
mainLayout.addLayout(
playbackLayout
)
mainLayout.addLayout(
qhboxlayout_toolbar_play
)
mainLayout.addLayout(
qvbox_layout_music_table01
)
mainLayout.addLayout(
vlayout
)
mainLayout.addLayout(
qvbox_layout_music_table02
)
widget = QtGui.QWidget()
# widget.setStyleSheet("""
# QWidget
# {
# background-color: rgba(0,0,0, 50);
# }
# """
# )
widget.setLayout(
mainLayout
)
self.setCentralWidget(
widget
)
self.setWindowTitle(
# to_unicode("Interface d'écoute des Appels Entrants et Sortants"
u"Interface d'Ecoute des Appels Entrants et Sortants"
)
# fin _ def setupUi(self):
app = QtGui.QApplication(sys.argv)
app.setApplicationName("Music Player")
app.setQuitOnLastWindowClosed(True)
app.setWindowIcon(QtGui.QIcon('headphone.png'))
window = MainWindow()
if __name__ == '__main__':
window.show()
sys.exit(app.exec_())
``` |
{
"source": "a7ypically/InsightFace-REST",
"score": 3
} |
#### File: api_trt/modules/imagedata.py
```python
import time
from typing import List
import cv2
from typing import Union
import numpy as np
import logging
class ImageData:
def __init__(self, image, max_size: List[int] = None):
if max_size is None:
max_size = [640, 480]
if len(max_size) == 1:
max_size = [max_size[0]] * 2
self.orig_image = image
self.transformed_image = self.orig_image
self.const_width = max_size[0]
self.const_height = max_size[1]
self.scale_factor = 1.0
self.resize_ms = 0
def resize_image(self, pad: bool = True, mode: str = 'pad'):
t0 = time.perf_counter()
cw = self.const_width
ch = self.const_height
h, w, _ = self.transformed_image.shape
if mode == 'stretch':
self.transformed_image = cv2.resize(self.transformed_image, dsize=(self.const_width, self.const_height))
else:
self.scale_factor = min(cw / w, ch / h)
# If image is too small, it may contain only single face, which leads to decreased detection accuracy,
# so we reduce scale factor by some factor
if self.scale_factor > 3:
self.scale_factor = self.scale_factor * 0.7
self.transformed_image = cv2.resize(self.transformed_image, (0, 0), fx=self.scale_factor,
fy=self.scale_factor,
interpolation=cv2.INTER_LINEAR)
if pad:
# # Pad right and bottom with black border for fixed image proportions
h, w, _ = self.transformed_image.shape
if w < cw:
self.transformed_image = cv2.copyMakeBorder(self.transformed_image, 0, 0, 0, cw - w,
cv2.BORDER_CONSTANT)
self.left_border = cw - w
if h < ch:
self.transformed_image = cv2.copyMakeBorder(self.transformed_image, 0, ch - h, 0, 0,
cv2.BORDER_CONSTANT)
self.bottom_border = ch - h
self.resize_ms = (time.perf_counter() - t0) * 1000
logging.debug(f'Resizing image took: {self.resize_ms:.3f} ms.')
def resize_image(image, max_size: list = None):
if max_size is None:
max_size = [640, 640]
cw = max_size[0]
ch = max_size[1]
h, w, _ = image.shape
scale_factor = min(cw / w, ch / h)
# If image is too small, it may contain only single face, which leads to decreased detection accuracy,
# so we reduce scale factor by some factor
if scale_factor > 3:
scale_factor = scale_factor * 0.7
transformed_image = cv2.resize(image, (0, 0), fx=scale_factor,
fy=scale_factor,
interpolation=cv2.INTER_LINEAR)
h, w, _ = transformed_image.shape
if w < cw:
transformed_image = cv2.copyMakeBorder(transformed_image, 0, 0, 0, cw - w,
cv2.BORDER_CONSTANT)
if h < ch:
transformed_image = cv2.copyMakeBorder(transformed_image, 0, ch - h, 0, 0,
cv2.BORDER_CONSTANT)
return transformed_image, scale_factor
```
#### File: src/api_trt/prepare_models.py
```python
import os
import logging
from modules.utils.helpers import parse_size, tobool, validate_max_size
from modules.model_zoo.getter import prepare_backend
from modules.configs import Configs
from env_parser import EnvConfigs
log_level = os.getenv('LOG_LEVEL', 'INFO')
logging.basicConfig(
level=log_level,
format='%(asctime)s %(levelname)s - %(message)s',
datefmt='[%H:%M:%S]',
)
def prepare_models(root_dir: str = '/models'):
model_configs = Configs(models_dir=root_dir)
env_configs = EnvConfigs()
rec_name = env_configs.models.rec_name
det_name = env_configs.models.det_name
ga_name = env_configs.models.ga_name
mask_detector = env_configs.models.mask_detector
max_size = env_configs.defaults.max_size
if max_size is None:
max_size = [640, 640]
max_size = validate_max_size(max_size)
models = [model for model in [det_name, rec_name, ga_name, mask_detector] if model is not None]
for model in models:
batch_size = 1
if model_configs.models[model].get('allow_batching'):
if model == det_name:
batch_size = env_configs.models.det_batch_size
else:
batch_size = env_configs.models.rec_batch_size
logging.info(f"Preparing '{model}' model...")
prepare_backend(model_name=model, backend_name=env_configs.models.backend_name, im_size=max_size,
force_fp16=env_configs.models.fp16,
max_batch_size=batch_size, config=model_configs)
logging.info(f"'{model}' model ready!")
if __name__ == "__main__":
prepare_models()
``` |
{
"source": "a801-luadev/toculabs",
"score": 2
} |
#### File: bots/tocutobot/__init__.py
```python
try:
# Try relative imports...
from .forum import ForumClient
from .tfm import TfmClient
except ImportError: # ... and if they do not work, make absolute ones
from forum import ForumClient
from tfm import TfmClient
import asyncio
import os
tfm = None
def setup(loop):
global tfm
api_id = os.getenv("TRANSFROMAGE_ID")
api_token = os.getenv("TRANSFROMAGE_TOKEN")
if api_id is None or not api_id.isdigit():
raise TypeError("TRANSFROMAGE_ID environment variable must exist and be a number.")
if api_token is None:
raise TypeError("TRANSFROMAGE_TOKEN environment variable must exist.")
tfm_name = os.getenv("TOCUTOBOT_NAME")
tfm_pass = <PASSWORD>("<PASSWORD>")
if tfm_name is None:
raise TypeError("TOCUTOBOT_NAME environment variable must exist.")
if tfm_pass is None:
raise TypeError("TOCUTOBOT_PASS environment variable must exist.")
atelier_name = os.getenv("ATELIER_BOT_NAME")
atelier_pass = os.getenv("ATELIER_BOT_PASS")
atelier_host_list = os.getenv("ATELIER_IMAGE_HOST_LIST")
atelier_host_ajax = os.getenv("ATELIER_IMAGE_HOST_AJAX")
if atelier_name is None:
raise TypeError("ATELIER_BOT_NAME environment variable must exist.")
if atelier_pass is None:
raise TypeError("ATELIER_BOT_PASS environment variable must exist.")
if atelier_host_list is None:
raise TypeError("ATELIER_IMAGE_HOST_LIST environment variable must exist.")
if atelier_host_ajax is None:
raise TypeError("ATELIER_IMAGE_HOST_AJAX environment variable must exist.")
forum = ForumClient(atelier_name, atelier_pass, atelier_host_list, atelier_host_ajax, loop=loop)
tfm = TfmClient(api_id, api_token, tfm_name, tfm_pass, forum, loop=loop)
def start(loop):
loop.run_until_complete(tfm.start_running())
def stop(loop):
loop.run_until_complete(tfm.stop())
if __name__ == '__main__':
loop = asyncio.get_event_loop()
setup(loop)
start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
print(end="\r") # Removes ^C from the console
print("Stopping bot...")
stop(loop)
loop.close()
``` |
{
"source": "a815027104/Distrinet",
"score": 3
} |
#### File: mininet/mininet/distrinet.py
```python
import os
import re
import select
import signal
import random
from time import sleep
from itertools import chain, groupby
from math import ceil
from mininet.cli import CLI
from mininet.log import info, error, debug, output, warn
from mininet.node import ( Node, Host, OVSKernelSwitch, DefaultController,
Controller )
from mininet.nodelib import NAT
#from mininet.link import Link, Intf
from mininet.util import ( quietRun, fixLimits, numCores, ensureRoot,
macColonHex, ipStr, ipParse, netParse, ipAdd,
waitListening, BaseString, encode )
from mininet.term import cleanUpScreens, makeTerms
from mininet.link import (Intf, TCIntf)
# DSA ########################
from mininet.dutil import _info
from mininet.cloudlink import (CloudLink)
from mininet.lxc_container import (LxcNode)
from mininet.cloudswitch import (LxcSwitch)
from mininet.cloudcontroller import (LxcRemoteController)
import asyncio
import time
from threading import Thread
from mininet.assh import ASsh
##############################
# Mininet version: should be consistent with README and LICENSE
from mininet.net import VERSION as MININET_VERSION
# Distrinet version
VERSION = "2.0 (Mininet {})".format(MININET_VERSION)
from mininet.net import Mininet
class Distrinet( Mininet ):
"Network emulation with hosts spawned in network namespaces."
def __init__( self, topo=None, switch=LxcSwitch, host=LxcNode,
controller=LxcRemoteController, link=CloudLink, intf=TCIntf,
mapper=None,
build=True, xterms=False, cleanup=False, ipBase='10.0.0.0/8',
adminIpBase='192.168.0.1/8',
autoSetMacs=False, autoPinCpus=False,
listenPort=None, waitConnected=False, waitConnectionTimeout=5,
jump=None, user="root", client_keys=None, master=None, pub_id=None,
**kwargs):
"""Create Mininet object.
topo: Topo (topology) object or None
switch: default Switch class
host: default Host class/constructor
controller: default Controller class/constructor
link: default Link class/constructor
intf: default Intf class/constructor
ipBase: base IP address for hosts,
mapper: mapper to map virtual topology onto physical topology
build: build now from topo?
xterms: if build now, spawn xterms?
cleanup: if build now, cleanup before creating?
inNamespace: spawn switches and controller in net namespaces?
autoSetMacs: set MAC addrs automatically like IP addresses?
autoStaticArp: set all-pairs static MAC addrs?
autoPinCpus: pin hosts to (real) cores (requires CPULimitedHost)?
listenPort: base listening port to open; will be incremented for
each additional switch in the net if inNamespace=False
waitConnected: wait for the switches to be connected to their controller
waitConnectionTimeout: timeout to wait to decide if a switch is connected to its controller
jump: SSH jump host
master: master node"""
self.topo = topo
self.switch = switch
self.host = host
self.controller = controller
self.link = link
self.intf = intf
self.ipBase = ipBase
self.ipBaseNum, self.prefixLen = netParse( self.ipBase )
hostIP = ( 0xffffffff >> self.prefixLen ) & self.ipBaseNum
# Start for address allocation
self.nextIP = hostIP if hostIP > 0 else 1
self.adminIpBase = adminIpBase
self.adminIpBaseNum, self.adminPrefixLen = netParse( self.adminIpBase )
adminIP = ( 0xffffffff >> self.adminPrefixLen ) & self.adminIpBaseNum
# Start for address allocation
self.adminNextIP = adminIP if adminIP > 0 else 1
# self.inNamespace = inNamespace
self.xterms = xterms
self.cleanup = cleanup
self.autoSetMacs = autoSetMacs
# self.autoStaticArp = autoStaticArp
self.autoPinCpus = autoPinCpus
# self.numCores = numCores()
# self.nextCore = 0 # next core for pinning hosts to CPUs
self.listenPort = listenPort
self.waitConn = waitConnected
self.waitConnectionTimeout = waitConnectionTimeout
self.mapper = mapper
#
self.hosts = []
self.switches = []
self.controllers = []
self.links = []
self.loop = asyncio.get_event_loop()
def runforever(loop):
time.sleep(0.001) ### DSA - WTF ?????????????
loop.run_forever()
self.thread = Thread(target=runforever, args=(self.loop,))
self.thread.start()
self.jump = jump
self.user = user
self.pub_id = pub_id
self.client_keys = client_keys
self.masterhost = master
_info ("Connecting to master node\n")
self.masterSsh = ASsh(loop=self.loop, host=self.masterhost, username=self.user, bastion=self.jump, client_keys=self.client_keys)
self.masterSsh.connect()
self.masterSsh.waitConnected()
_info ("connected to master node\n")
self.nameToNode = {} # name to Node (Host/Switch) objects
self.terms = [] # list of spawned xterm processes
self.init() # Initialize Mininet if necessary
self.built = False
if topo and build:
self.build()
# DSA - OK
def addHost( self, name, cls=None, **params ):
"""Add host.
name: name of host to add
cls: custom host class/constructor (optional)
params: parameters for host
returns: added host"""
# Default IP and MAC addresses
defaults = { 'ip': ipAdd( self.nextIP,
ipBaseNum=self.ipBaseNum,
prefixLen=self.prefixLen ) +
'/%s' % self.prefixLen}
if "image" in self.topo.nodeInfo(name):
defaults.update({"image":self.topo.nodeInfo(name)["image"]})
# XXX DSA - doesn't make sense to generate MAC automatically here, we
# keep for compatibility prurpose but never use it...
if self.autoSetMacs:
defaults[ 'mac' ] = macColonHex( self.nextIP )
if self.autoPinCpus:
raise Exception("to be implemented")
# defaults[ 'cores' ] = self.nextCore
# self.nextCore = ( self.nextCore + 1 ) % self.numCores
self.nextIP += 1
defaults.update( params )
if not cls:
cls = self.host
if self.mapper:
defaults.update({"target":self.mapper.place(name)})
h = cls(name=name, **defaults )
self.hosts.append( h )
self.nameToNode[ name ] = h
return h
# DSA - OK
def addSwitch( self, name, cls=None, **params ):
"""Add switch.
name: name of switch to add
cls: custom switch class/constructor (optional)
returns: added switch
side effect: increments listenPort ivar ."""
defaults = { 'listenPort': self.listenPort}
if "image" in self.topo.nodeInfo(name):
defaults.update({"image":self.topo.nodeInfo(name)})
else:
error ("we are missing an image for {} \n".format(name))
exit()
defaults.update( params )
if not cls:
cls = self.switch
if self.mapper:
defaults.update({"target":self.mapper.place(name)})
sw = cls(name=name, **defaults )
self.switches.append( sw )
self.nameToNode[ name ] = sw
return sw
def delSwitch( self, switch ):
"Delete a switch"
self.delNode( switch, nodes=self.switches )
# DSA - OK
def addController( self, name='c0', controller=None, **params ):
"""Add controller.
controller: Controller class
params: Parameters for the controller"""
# Get controller class
params.update({'pub_id':self.pub_id})
if not controller:
controller = self.controller
controller_new = controller(name=name,
loop=self.loop,
master=self.masterSsh,
username=self.user,
bastion=self.jump,
client_keys=self.client_keys,
**params)
self.controllers.append(controller_new)
self.nameToNode[ name ] = controller_new
return controller_new
def delController( self, controller ):
"""Delete a controller
Warning - does not reconfigure switches, so they
may still attempt to connect to it!"""
self.delNode( controller )
def addNAT( self, name='nat0', connect=True, inNamespace=False,
**params):
"""Add a NAT to the Mininet network
name: name of NAT node
connect: switch to connect to | True (s1) | None
inNamespace: create in a network namespace
params: other NAT node params, notably:
ip: used as default gateway address"""
nat = self.addHost( name, cls=NAT, inNamespace=inNamespace,
subnet=self.ipBase, **params )
# find first switch and create link
if connect:
if not isinstance( connect, Node ):
# Use first switch if not specified
connect = self.switches[ 0 ]
# Connect the nat to the switch
self.addLink( nat, connect )
# Set the default route on hosts
natIP = nat.params[ 'ip' ].split('/')[ 0 ]
for host in self.hosts:
if host.inNamespace:
host.setDefaultRoute( 'via %s' % natIP )
return nat
# DSA - OK
def addLink( self, node1, node2, port1=None, port2=None,
cls=None, **params ):
""""Add a link from node1 to node2
node1: source node (or name)
node2: dest node (or name)
port1: source port (optional)
port2: dest port (optional)
cls: link class (optional)
params: additional link params (optional)
returns: link object"""
# Accept node objects or names
node1 = node1 if not isinstance( node1, BaseString ) else self[ node1 ]
node2 = node2 if not isinstance( node2, BaseString ) else self[ node2 ]
options = dict( params )
# Port is optional
if port1 is not None:
options.setdefault( 'port1', port1 )
if port2 is not None:
options.setdefault( 'port2', port2 )
if self.intf is not None:
options.setdefault( 'intf', self.intf )
# Set default MAC - this should probably be in Link
options.setdefault( 'addr1', self.randMac() )
options.setdefault( 'addr2', self.randMac() )
params1 = None
params2 = None
if self.mapper:
lstr = (str(node1), str(node2))
placement = self.mapper.placeLink( lstr)
params1 = placement[0]
params2 = placement[1]
## # define the VXLAN id for the link
## options.setdefault("link_id", self.nextLinkId)
## self.nextLinkId += 1
cls = self.link if cls is None else cls
link = cls( node1=node1, node2=node2, params1=params1, params2=params2, **options )
self.links.append( link )
return link
def delLink( self, link ):
"Remove a link from this network"
raise Exception("Not implementedd")
link.delete()
self.links.remove( link )
def delLinkBetween( self, node1, node2, index=0, allLinks=False ):
"""Delete link(s) between node1 and node2
index: index of link to delete if multiple links (0)
allLinks: ignore index and delete all such links (False)
returns: deleted link(s)"""
links = self.linksBetween( node1, node2 )
if not allLinks:
links = [ links[ index ] ]
for link in links:
self.delLink( link )
return links
def configHosts( self ):
"Configure a set of hosts."
for host in self.hosts:
info( host.name + ' ' )
intf = host.defaultIntf()
if intf:
host.configDefault()
else:
# Don't configure nonexistent intf
host.configDefault( ip=None, mac=None )
# You're low priority, dude!
# BL: do we want to do this here or not?
# May not make sense if we have CPU lmiting...
# quietRun( 'renice +18 -p ' + repr( host.pid ) )
# This may not be the right place to do this, but
# it needs to be done somewhere.
info( '\n' )
# DSA - OK
def buildFromTopo( self, topo=None ):
"""Build mininet from a topology object
At the end of this function, everything should be connected
and up."""
# Possibly we should clean up here and/or validate
# the topo
if self.cleanup:
pass
info( '*** Creating network\n' )
bastion = self.jump
waitStart = False
_ip = "{}/{}".format(ipAdd(self.adminNextIP, ipBaseNum=self.adminIpBaseNum, prefixLen=self.adminPrefixLen), self.adminPrefixLen)
self.adminNextIP += 1
self.host.createMasterAdminNetwork(self.masterSsh, brname="admin-br", ip=_ip)
_info (" admin network created on {}\n".format(self.masterhost))
assert (isinstance(self.controllers, list))
if not self.controllers and self.controller:
# Add a default controller
info( '*** Adding controller\n' )
classes = self.controller
if not isinstance( classes, list ):
classes = [ classes ]
for i, cls in enumerate( classes ):
# Allow Controller objects because nobody understands partial()
if isinstance( cls, Controller ):
self.addController( cls )
else:
self.addController( 'c%d' % i, cls )
# from assh import ASsh
# prepare SSH connection to the master
info( '*** Adding hosts:\n' )
# == Hosts ===========================================================
for hostName in topo.hosts():
_ip = "{}/{}".format(ipAdd( self.adminNextIP, ipBaseNum=self.adminIpBaseNum, prefixLen=self.adminPrefixLen),self.adminPrefixLen)
self.adminNextIP += 1
# __ip= newAdminIp(admin_ip)
self.addHost( name=hostName,
admin_ip= _ip,
loop=self.loop,
master=self.masterSsh,
username=self.user,
bastion=bastion,
client_keys=self.client_keys,
waitStart=waitStart,
**topo.nodeInfo( hostName ))
info( hostName + ' ' )
info( '\n*** Adding switches:\n' )
for switchName in topo.switches():
_ip = "{}/{}".format(ipAdd( self.adminNextIP, ipBaseNum=self.adminIpBaseNum, prefixLen=self.adminPrefixLen),self.adminPrefixLen)
self.adminNextIP += 1
self.addSwitch( name=switchName,
admin_ip=_ip,
loop=self.loop,
master=self.masterSsh,
username=self.user,
bastion=bastion,
client_keys=self.client_keys,
waitStart=waitStart,
**topo.nodeInfo( switchName ))
info( switchName + ' ' )
if not waitStart:
nodes = self.hosts + self.switches
_info ("[starting\n")
for node in nodes:
_info ("connectTarget {} ".format( node.name))
node.connectTarget()
for node in nodes:
node.waitConnectedTarget()
_info ("connectedTarget {} ".format( node.name))
count = 0
for node in nodes:
_info ("createContainer {} ".format( node.name))
node.createContainer()
count += 1
if count > 50:
output("50 nodes created...\n")
sleep(10)
count = 0
for node in nodes:
node.waitCreated()
_info ("createdContainer {} ".format(node.name))
for node in nodes:
_info ("create admin interface {} ".format( node.name))
node.addContainerInterface(intfName="admin", brname="admin-br", wait=False)
for node in nodes:
node.targetSshWaitOutput()
_info ("admin interface created on {} ".format( node.name))
_info ("\n")
cmds = []
for node in nodes:
cmds = cmds + node.connectToAdminNetwork(master=node.masternode.host, target=node.target, link_id=CloudLink.newLinkId(), admin_br="admin-br", wait=False)
if len (cmds) > 0:
cmd = ';'.join(cmds)
self.masterSsh.cmd(cmd)
for node in nodes:
node.configureContainer(wait=False)
for node in nodes:
node.targetSshWaitOutput()
for node in nodes:
_info ("connecting {} ".format( node.name))
node.connect()
for node in nodes:
node.waitConnected()
_info ("connected {} ".format( node.name))
for node in nodes:
_info ("startshell {} ".format( node.name) )
node.asyncStartShell()
for node in nodes:
node.waitStarted()
_info ("startedshell {}".format( node.name))
for node in nodes:
_info ("finalize {}".format( node.name))
node.finalizeStartShell()
_info ("\n")
info( '\n*** Adding links:\n' )
for srcName, dstName, params in topo.links(
sort=True, withInfo=True ):
self.addLink( **params )
info( '(%s, %s) ' % ( srcName, dstName ) )
info( '\n' )
def configureControlNetwork( self ):
"Control net config hook: override in subclass"
raise Exception( 'configureControlNetwork: '
'should be overriden in subclass', self )
def build( self ):
"Build mininet."
if self.topo:
self.buildFromTopo( self.topo )
## self.configureControlNetwork()
info( '*** Configuring hosts\n' )
self.configHosts()
## if self.xterms:
## self.startTerms()
# if self.autoStaticArp:
# self.staticArp()
self.built = True
def startTerms( self ):
"Start a terminal for each node."
if 'DISPLAY' not in os.environ:
error( "Error starting terms: Cannot connect to display\n" )
return
info( "*** Running terms on %s\n" % os.environ[ 'DISPLAY' ] )
cleanUpScreens()
self.terms += makeTerms( self.controllers, 'controller' )
self.terms += makeTerms( self.switches, 'switch' )
self.terms += makeTerms( self.hosts, 'host' )
def stopXterms( self ):
"Kill each xterm."
for term in self.terms:
os.kill( term.pid, signal.SIGKILL )
cleanUpScreens()
def staticArp( self ):
"Add all-pairs ARP entries to remove the need to handle broadcast."
for src in self.hosts:
for dst in self.hosts:
if src != dst:
src.setARP( ip=dst.IP(), mac=dst.MAC() )
# DSA - OK
def start( self ):
"Start controller and switches."
if not self.built:
self.build()
info( '*** Starting controller\n' )
for controller in self.controllers:
info( controller.name + ' ')
controller.start()
info( '\n' )
info( '*** Starting %s switches\n' % len( self.switches ) )
for switch in self.switches:
info( switch.name + ' ')
switch.start( self.controllers )
started = {}
for switch in self.switches:
success = switch.batchStartup([switch])
started.update( { s: s for s in success } )
# for swclass, switches in groupby(
# sorted( self.switches,
# key=lambda s: str( type( s ) ) ), type ):
# switches = tuple( switches )
# if hasattr( swclass, 'batchStartup' ):
# success = swclass.batchStartup( switches )
# started.update( { s: s for s in success } )
info( '\n' )
if self.waitConn:
self.waitConnected()
def stop( self ):
"Stop the switches, hosts and controller(s) "
if self.terms:
info( '*** Stopping %i terms\n' % len( self.terms ) )
self.stopXterms()
info( '*** Stopping %i links\n' % len( self.links ) )
for link in self.links:
info( '.' )
link.stop()
info( '\n' )
info( '*** Stopping %i switches\n' % len( self.switches ) )
stopped = {}
######## for switch in self.switches:
######## success = switch.batchShutdown([switch])
######## stopped.update( { s: s for s in success } )
# for swclass, switches in groupby(
# sorted( self.switches,
# key=lambda s: str( type( s ) ) ), type ):
# switches = tuple( switches )
# if hasattr( swclass, 'batchShutdown' ):
# success = swclass.batchShutdown( switches )
# stopped.update( { s: s for s in success } )
for switch in self.switches:
info( switch.name + ' ' )
if switch not in stopped:
switch.stop()
switch.terminate()
info( '\n' )
info( '*** Stopping %i hosts\n' % len( self.hosts ) )
for host in self.hosts:
info( host.name + ' ' )
host.terminate()
info( '*** Stopping %i controllers\n' % len( self.controllers ) )
for controller in self.controllers:
info( controller.name + ' ' )
controller.stop()
info( '\n' )
info( '*** cleaning master\n' )
# XXX DSA need to find something nicer
for node in self.hosts + self.switches + self.controllers:
_info ("wait {} ".format( node ))
node.targetSshWaitOutput()
for device in node.devicesMaster:
_info ("delete device {} on master ".format(device))
self.masterSsh.cmd("ip link delete {} ".format(device))
_info ("\n")
_info ("\n")
self.loop.stop()
info( '\n*** Done\n' )
# XXX These test methods should be moved out of this class.
# Probably we should create a tests.py for them
def runCpuLimitTest( self, cpu, duration=5 ):
"""run CPU limit test with 'while true' processes.
cpu: desired CPU fraction of each host
duration: test duration in seconds (integer)
returns a single list of measured CPU fractions as floats.
"""
pct = cpu * 100
info( '*** Testing CPU %.0f%% bandwidth limit\n' % pct )
hosts = self.hosts
cores = int( quietRun( 'nproc' ) )
# number of processes to run a while loop on per host
num_procs = int( ceil( cores * cpu ) )
pids = {}
for h in hosts:
pids[ h ] = []
for _core in range( num_procs ):
h.cmd( 'while true; do a=1; done &' )
pids[ h ].append( h.cmd( 'echo $!' ).strip() )
outputs = {}
time = {}
# get the initial cpu time for each host
for host in hosts:
outputs[ host ] = []
with open( '/sys/fs/cgroup/cpuacct/%s/cpuacct.usage' %
host, 'r' ) as f:
time[ host ] = float( f.read() )
for _ in range( duration ):
sleep( 1 )
for host in hosts:
with open( '/sys/fs/cgroup/cpuacct/%s/cpuacct.usage' %
host, 'r' ) as f:
readTime = float( f.read() )
outputs[ host ].append( ( ( readTime - time[ host ] )
/ 1000000000 ) / cores * 100 )
time[ host ] = readTime
for h, pids in pids.items():
for pid in pids:
h.cmd( 'kill -9 %s' % pid )
cpu_fractions = []
for _host, outputs in outputs.items():
for pct in outputs:
cpu_fractions.append( pct )
output( '*** Results: %s\n' % cpu_fractions )
return cpu_fractions
# BL: I think this can be rewritten now that we have
# a real link class.
def configLinkStatus( self, src, dst, status ):
"""Change status of src <-> dst links.
src: node name
dst: node name
status: string {up, down}"""
if src not in self.nameToNode:
error( 'src not in network: %s\n' % src )
elif dst not in self.nameToNode:
error( 'dst not in network: %s\n' % dst )
else:
src = self.nameToNode[ src ]
dst = self.nameToNode[ dst ]
connections = src.connectionsTo( dst )
if len( connections ) == 0:
error( 'src and dst not connected: %s %s\n' % ( src, dst) )
for srcIntf, dstIntf in connections:
result = srcIntf.ifconfig( status )
if result:
error( 'link src status change failed: %s\n' % result )
result = dstIntf.ifconfig( status )
if result:
error( 'link dst status change failed: %s\n' % result )
def interact( self ):
"Start network and run our simple CLI."
self.start()
result = CLI( self )
self.stop()
return result
inited = False
@classmethod
def init( cls ):
"Initialize Mininet"
if cls.inited:
return
cls.inited = True
class MininetWithControlNet( Mininet ):
"""Control network support:
Create an explicit control network. Currently this is only
used/usable with the user datapath.
Notes:
1. If the controller and switches are in the same (e.g. root)
namespace, they can just use the loopback connection.
2. If we can get unix domain sockets to work, we can use them
instead of an explicit control network.
3. Instead of routing, we could bridge or use 'in-band' control.
4. Even if we dispense with this in general, it could still be
useful for people who wish to simulate a separate control
network (since real networks may need one!)
5. Basically nobody ever used this code, so it has been moved
into its own class.
6. Ultimately we may wish to extend this to allow us to create a
control network which every node's control interface is
attached to."""
def configureControlNetwork( self ):
"Configure control network."
self.configureRoutedControlNetwork()
# We still need to figure out the right way to pass
# in the control network location.
def configureRoutedControlNetwork( self, ip='192.168.123.1',
prefixLen=16 ):
"""Configure a routed control network on controller and switches.
For use with the user datapath only right now."""
controller = self.controllers[ 0 ]
info( controller.name + ' <->' )
cip = ip
snum = ipParse( ip )
for switch in self.switches:
info( ' ' + switch.name )
link = self.link( switch, controller, port1=0 )
sintf, cintf = link.intf1, link.intf2
switch.controlIntf = sintf
snum += 1
while snum & 0xff in [ 0, 255 ]:
snum += 1
sip = ipStr( snum )
cintf.setIP( cip, prefixLen )
sintf.setIP( sip, prefixLen )
controller.setHostRoute( sip, cintf )
switch.setHostRoute( cip, sintf )
info( '\n' )
info( '*** Testing control network\n' )
while not cintf.isUp():
info( '*** Waiting for', cintf, 'to come up\n' )
sleep( 1 )
for switch in self.switches:
while not sintf.isUp():
info( '*** Waiting for', sintf, 'to come up\n' )
sleep( 1 )
if self.ping( hosts=[ switch, controller ] ) != 0:
error( '*** Error: control network test failed\n' )
exit( 1 )
info( '\n' )
```
#### File: mininet/mapper/mapper.py
```python
from distriopt import VirtualNetwork
from distriopt.embedding.physical import PhysicalNetwork
from distriopt.embedding.algorithms import (
EmbedBalanced,
# EmbedILP,
EmbedPartition,
EmbedGreedy,
)
from distriopt.packing.algorithms import ( BestFitDopProduct,
FirstFitDecreasingPriority,
FirstFitOrderedDeviation )
from distriopt.packing import CloudInstance
from distriopt.packing.algorithms import BestFitDopProduct,FirstFitDecreasingPriority,FirstFitOrderedDeviation
from random import randint
import subprocess
from pathlib import Path
class DummyMapper(object):
def __init__(self, places={}):
self.places = places
def place(self, node):
return self.places[node]
def placeLink(self, link):
return ({}, {})
class RoundRobinMapper(DummyMapper):
def __init__(self, virtual_topo, physical_topo=[]):
self.physical = physical_topo
self.vNodes = virtual_topo.hosts()+virtual_topo.switches()
self.places = self.__places(self.vNodes, physical_topo)
def __places(self, vNodes, physical_topo):
places={}
i=0
for node in vNodes:
places[node] = physical_topo[i % len(physical_topo)]
i += 1
return places
def place(self, node):
return self.places[node]
class RandomMapper(DummyMapper):
def __init__(self, virtual_topo, physical_topo=[]):
self.physical = physical_topo
self.vNodes = virtual_topo.hosts()+virtual_topo.switches()
self.places = self.__places(self.vNodes, physical_topo)
def __places(self, vNodes, physical_topo):
places={}
for node in vNodes:
places[node] = physical_topo[randint(0,len(physical_topo)-1)]
return places
def place(self, node):
return self.places[node]
class MaxinetMapper(DummyMapper):
def __init__(self, virtual_topo, physical_topo=[], share_path="/Users/giuseppe/Desktop/algo_experiments/algo_experiments/distrinet/mininet/mininet/mapper/shares/equal10.txt"):
self.physical = physical_topo
self.virtual_network = virtual_topo
self.vNodes = virtual_topo.hosts()+virtual_topo.switches()
self.vHosts = virtual_topo.hosts()
self.vSwitches = virtual_topo.switches()
self.vlinks = virtual_topo.links()
self.metis_node_mapping = None
self.node_metis_mapping = None
self.metis_dict = None
maxinet_dict = self.convert_in_maxinet_dict()
# OK
metis_dict = self.convert_in_metis_dict(maxinet_dict=maxinet_dict)
print(metis_dict) # OK
self.create_metis_file(metis_dict=metis_dict, path="/tmp/metis_file") #OK
print("USING {}".format(share_path))
self.run_metis(graph_path="/tmp/metis_file", share_path=share_path) # OK
mapping = self.get_mapping(graph_path="/tmp/metis_file", share_path=share_path) # OK
print(mapping)
mapping_converted = self.convert_mapping(mapping) # OK
print("MAPPING CONVERTED")
print(mapping_converted)
complete_mapping = self.get_mapping_for_all_nodes(mapping_converted) # OK
print("COMPLETE MAPPING")
print(complete_mapping)
print(self.metis_node_mapping)
compute_nodes = sorted(self.physical)
mapping = complete_mapping
sorted_keys = sorted(mapping.keys(), key=lambda x: int(x), reverse=True)
physical_names_mapping = {phy_name: metis_name for phy_name, metis_name in
zip(compute_nodes, sorted_keys)}
metis_name_mapping = {physical_names_mapping[x]: x for x in physical_names_mapping.keys()}
mapping_with_pyhisical_names = {metis_name_mapping[node]: mapping[node] for node in mapping.keys()}
print(mapping_with_pyhisical_names)
self.places = self.__places(mapping_with_pyhisical_names)
print("FINAL")
print(self.places)
def __places(self, mapping):
final = dict()
for physical, list_vnodes in mapping.items():
for v in list_vnodes:
final[v]=physical
return final
def get_mapping(self, graph_path, share_path):
gr_path = Path(graph_path)
if gr_path.is_file():
file_name = gr_path.name
else:
raise RuntimeError()
if Path(share_path).is_file():
physical_hosts = self.get_physical_hosts(share_path)
else:
raise RuntimeError()
mapping_file_name = file_name +".part."+ str(len(physical_hosts))
mapping_file_path = gr_path.parent / mapping_file_name
mapping = {host: [] for host in physical_hosts}
with open(mapping_file_path,"r") as file:
lines = list(map(lambda x:x.strip(), file.readlines()))
for c, m in enumerate(lines):
switch = c + 1
mapping[m].append(switch)
return mapping
def run_metis(self, graph_path, share_path):
n_physical_hosts = len(self.get_physical_hosts(share_path))
cmd=f"gpmetis -ptype=rb -tpwgts={str(share_path)} {str(graph_path)} {n_physical_hosts}"
output = subprocess.check_output(cmd, shell=True)
out = output.decode("utf-8")
return out
def get_mapping_for_all_nodes(self, mapping_node_names):
total_mapping={host: mapping_node_names[host] for host in mapping_node_names.keys()}
for host in total_mapping.keys():
for node in total_mapping[host]:
total_mapping[host] += self.get_connected_hosts(node)
return total_mapping
def get_connected_hosts(self, node_name):
nodes = []
for node in self.getNeighbors(node_name):
if node in self.vHosts:
nodes.append(node)
return nodes
def convert_mapping(self, mapping):
mapping_node_names = {host: [] for host in mapping.keys()}
for host in mapping.keys():
mapping_node_names[host] = [self.metis_node_mapping[node] for node in mapping[host]]
return mapping_node_names
def create_metis_file(self, metis_dict, path):
nodes, edges = len(self.get_metis_nodes()), len(self.get_metis_edges())
sorted_keys = sorted(list(metis_dict.keys()))
metis_lines = [[nodes, edges, "011", "0"]]
for k in sorted_keys:
weight = metis_dict[k]["weight"]
edges = metis_dict[k]["edges"]
line = [weight] + edges
metis_lines.append(line)
with open(Path(path), "w") as file:
for line in metis_lines:
file.write(" ".join([str(x) for x in line]) + "\n")
return metis_lines
def get_physical_hosts(self, share_path):
with open(share_path, "r") as file:
lines = file.readlines()
lines = list(map(lambda x: x.strip(), lines))
while [] in lines:
lines.remove([])
hosts = [x.split('=')[0].strip() for x in lines]
return hosts
def get_metis_nodes(self):
return self.vSwitches
def get_metis_edges(self):
edges = []
for u, v in self.vlinks:
if u in self.vSwitches and v in self.vSwitches:
edges.append((u, v))
return edges
def getNeighbors(self, n):
links = self.vlinks
links = list(filter(lambda x: x[0] == n or x[1] == n, links))
neighbors = set([x[0] for x in links]+[x[1] for x in links] )
neighbors.remove(n)
return list(neighbors)
def convert_in_maxinet_dict(self):
maxinet_nodes = dict()
for n in self.vSwitches:
maxinet_nodes[n] = {"weight": 1, "connected_switches": []}
for n in maxinet_nodes.keys():
connected_nodes = self.getNeighbors(n)
for connected_node in connected_nodes:
if connected_node in self.vHosts:
maxinet_nodes[n]["weight"] += 1
else:
maxinet_nodes[n]["connected_switches"].append(connected_node)
return maxinet_nodes
def req_rate(self, n1, n2):
links = self.virtual_network.links(withInfo=True)
for u, v, d in links:
if (u, v) == (n1,n2) or (v,u) == (n1,n2):
return d["bw"]
raise ValueError("Link {}-{} does not exist")
def convert_in_metis_dict(self, maxinet_dict):
metis_node_mapping = {num+1: node for num, node in enumerate(maxinet_dict.keys())}
node_metis_mapping = {metis_node_mapping[num]: num for num in metis_node_mapping.keys()}
metis_dict = {num: {"weight": None, "edges": []} for num in metis_node_mapping.keys()}
for node in maxinet_dict.keys():
num = node_metis_mapping[node]
metis_dict[num]["weight"] = maxinet_dict[node]["weight"]
for neighboor in maxinet_dict[node]["connected_switches"]:
neighboor_mapped = node_metis_mapping[neighboor]
required_edge_rate = self.req_rate(node, neighboor)
metis_dict[num]["edges"] += [neighboor_mapped, required_edge_rate]
self.metis_node_mapping = metis_node_mapping
self.node_metis_mapping = node_metis_mapping
self.metis_dict = metis_dict
return metis_dict
class BlockMapper(DummyMapper):
def __init__(self, virtual_topo, physical_topo=[],block=10):
self.physical = physical_topo
try:
self.vNodes = zip(sorted(virtual_topo.hosts(), key= lambda x:int(x[1:])),sorted(virtual_topo.switches(), key= lambda x:int(x[1:])))
except:
print("Not a valid Mapper for this instance")
exit(1)
self.places = self.__places(self.vNodes, physical_topo,block)
def __places(self, vNodes, physical_topo,block):
places={}
vNodes= list(vNodes)
if len(physical_topo) < len(vNodes) / block:
raise Exception("Not a valid Mapper for this instance")
for i, (v, s) in enumerate(vNodes):
places[v] = physical_topo[i//block]
places[s] = physical_topo[i//block]
return places
def place(self, node):
return self.places[node]
class Mapper(object):
def __init__(self, virtual_topo, physical_topo, solver=EmbedGreedy):
""" virtual_topo: virtual topology to map
physical_topo: physical topology to map on
solver: solver class to use to solve the mapping"""
self.virtual_topo = VirtualNetwork.from_mininet(virtual_topo)
self.mininet_virtual=virtual_topo
self.physical_topo = PhysicalNetwork.from_files(physical_topo)
self.prob = None
self.solver = solver
self.solve()
self.places= self.__places()
def solve(self, solver=None):
""" Solve the mapping problem of the virtual topology on the physical
one using the specified solver
solver: solver class to use to solve the mapping
"""
if solver is not None:
self.solver = solver
self.prob = self.solver(virtual=self.virtual_topo, physical=self.physical_topo)
time_solution, status = self.prob.solve()
if status == "0" or status == 0:
raise Exception("Failed to solve")
elif status == "-1" or status == - 1:
raise Exception("Unfeasible Problem")
def __places(self):
places={}
vNodes=self.mininet_virtual.hosts()+self.mininet_virtual.switches()
for node in vNodes:
places[node]=self.place(node)
return places
def place(self, node):
""" Returns physical placement of the node
node: node in the virtual topology
return: name of the physical host to use
"""
if self.prob == None:
self.solve()
place = self.prob.solution.node_info(node)
return place
def placeLink(self, link):
""" Returns physical placement of the link
link: link in the virtual topology
returns: list of placements for the link
"""
if self.prob == None:
self.solve()
n1,n2=link
#p1,p2 = self.prob.solution.node_info(n1),self.prob.solution.node_info(n2)
return {},{}
class Packing(object):
def __init__(self, virtual_topo, cloud_prices,solver=BestFitDopProduct):
""" virtual_topo: virtual topology to map
physical_topo: physical topology to map on
solver: solver class to use to solve the mapping"""
self.virtual_topo = VirtualNetwork.from_mininet(virtual_topo)
self.cloud = CloudInstance.read_ec2_instances(vm_type=cloud_prices)
self.mininet_virtual=virtual_topo
self.prob = None
self.solver = solver
self.places=self.__places()
def solve(self, solver=None):
""" Solve the mapping problem of the virtual topology on the physical
one using the specified solver
solver: solver class to use to solve the mapping
"""
if solver is not None:
self.solver = solver
#virtual_network= VirtualNetwork.from_mininet(self.virtual_topo)
self.prob = self.solver(virtual=self.virtual_topo, physical=self.cloud)
time_solution, status = self.prob.solve()
if status == "0":
raise Exception("Failed to solve")
elif status == "-1":
raise Exception("Unfeasible Problem")
def __places(self):
places=dict()
vNodes=self.mininet_virtual.hosts()+self.mininet_virtual.switches()
for node in vNodes:
places[node]=self.place(node)
return places
def place(self, node):
""" Returns physical placement of the node
node: node in the virtual topology
return: name of the physical host to use
"""
if self.prob == None:
self.solve()
place = self.prob.solution.node_info(node)
return place
def placeLink(self, link):
""" Returns physical placement of the link
link: link in the virtual topology
returns: list of placements for the link
"""
if self.prob == None:
self.solve()
place = self.prob.solution.link_mapping[link]
return place
if __name__ == '__main__':
#physical = PhysicalNetwork.from_files("/Users/giuseppe/.distrinet/gros_partial")
virtual_topo = VirtualNetwork.create_fat_tree(k=2, density=2, req_cores=2, req_memory=100,
req_rate=100)
from distriopt.packing import CloudInstance
```
#### File: mininet/provision/googlecloudprovision.py
```python
from mininet.provision.provision import Provision
from time import sleep, time
import boto3
import os
import paramiko
import uuid
from botocore.exceptions import ClientError
import progressbar
conf = Provision.get_configurations()
gcloud_conf = conf["gcloud"]
GCLOUD_REGION = gcloud_conf["region"]
SRC_PLAYBOOKS_DIR = "mininet/provision/playbooks"
DST_PLAYBOOKS_DIR = "/root/playbooks"
VOLUME_SIZE= int(gcloud_conf["volumeSize"])
MAIN_USER = gcloud_conf["user"]
KEY_PAIR_NAME_WORKERS = 'DistrinetKey-' + str(uuid.uuid4().hex)
IP_PERMISSION = gcloud_conf["network_acl"]
IMAGE_NAME_GCLOUD = gcloud_conf["image_name"]
KEY_PAIR_NAME_BASTION = gcloud_conf["key_name_gcloud"]
class distrinetGCloud(Provision):
ec2Resource = boto3.resource('ec2', region_name=GCLOUD_REGION)
ec2Client = boto3.client('ec2', region_name=GCLOUD_REGION)
def __init__(self, VPCName, addressPoolVPC, publicSubnetNetwork, privateSubnetNetwork, bastionHostDescription,
workersHostsDescription, **kwargs):
super(distrinetGCloud, self).__init__(VPCName, addressPoolVPC, publicSubnetNetwork, privateSubnetNetwork,
bastionHostDescription, workersHostsDescription, **kwargs)
self.VPCName = VPCName
self.addressPoolVPC = addressPoolVPC
self.publicSubnetNetwork = publicSubnetNetwork
self.privateSubnetNetwork = privateSubnetNetwork
self.bastionHostDescription = bastionHostDescription
self.workersHostsDescription = workersHostsDescription
self.param = kwargs
@staticmethod
def CreateVPC(VpcName, addressPoolVPC, **kwargs):
"""
Create a vpc Object using boto3.resource('ec2')
:param VpcName: Name of the new vpc
:param addressPoolVPC: Ipv4 Address pool assigned to the vpc, be careful to assign a valid address pool: you can
check if your pool is valid at: https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Subnets.html
:param kwargs: Optional parameters that you can assign to the Vpc
:return: Vpc object from boto3.resource('ec2')
"""
@staticmethod
def CheckResources(VpcNeeded=1, ElasticIpNeeded=2, instancesNeeded=(("t3.2xlarge", 2),)):
pass
@staticmethod
def getAllInstancesInVPC(VpcId):
"""
get all the instances type, with the private ip in the vpc
:param vpcId: Id of the Vpc
:return: list containing tuples of (instance_type, private_ip)
"""
pass
@staticmethod
def removeVPC(VpcId):
"""
Remove the vpc using boto3.resource('ec2')
:param vpcId: Id of the Vpc
:return: client response
Script adapted from https://gist.github.com/vernhart/c6a0fc94c0aeaebe84e5cd6f3dede4ce
TODO: Make it cleaner and more modular, seems to work now, but the code is terrible
"""
pass
@staticmethod
def getImageAMIFromRegion(Region, ImageName):
"""
Return the imageId (ami-xxxxxxxx) for a given ImageName and a given region. Note that an imageId is different
for the same image in a different region.
:param Region: regione name ex. eu-central-1 or us-west-1 etc.
:param ImageName: image Name provided by in the amazon description,
ex. ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190722.1 for Ubuntu bionic
:return: string containing the ImageId
"""
pass
@staticmethod
def modifyEnableDnsSupport(VpcId, Value=True):
"""
Modify the parameter EnableDnsSupport in a given VPC with the new Value
:param VpcId: VpcId where to modify EnableDnsSupport
:param Value: new value of EnableDnsSupport
:return: None
"""
pass
@staticmethod
def modifyEnableDnsHostnames(VpcId, Value=True):
"""
Modify the parameter EnableDnsHostnames in a given VPC with the new Value
:param VpcId: VpcId where to modify EnableDnsHostnames
:param Value: new value of EnableDnsHostnames
:return: None
"""
pass
@staticmethod
def createSubnet(VpcId, subnetName, subnetNetwork, routeTable, **kwargs):
"""
Create a subnet inside a Vpc
:param VpcId: id of a Vpc already created; be careful, the Vpc should be ready, before creating a subnet
:param subnetName: Subnet Name tag associated to the subnet
:param subnetNetwork: Network pool to be assigned, be sure that the subnet is contained in the vpc pool, and
that the subnet pool is valid. You can chec if it is a valid subnet at:
https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Subnets.html
:param routeTable: Route table object associated to the subnet
:param kwargs: Optional parameters that you can assign to the Subnet
:return: subnet object from boto3.resource('ec2')
"""
pass
@staticmethod
def createInternetGateWay(**kwargs):
"""
Create an Internet Gateway
:param kwargs: Optional parameters that you can assign to the gateway
:return: Internet Gateway object from boto3.resource('ec2'), you can access the Id by: InternetGatewayObject.id
"""
pass
@staticmethod
def attachInternetGateWayToVpc(Vpc, InternetGatewayId, **kwargs):
"""
Attach an Internet gateway already created to a vpc
:param Vpc: vpc object
:param InternetGatewayId: internet gateway id
:param kwargs: Optional parameters that you can assign
:return: None
"""
pass
@staticmethod
def createElasticIp(Domain='vpc', **kwargs):
"""
Create a new Elastic Ip.Be careful, you can have at most 5 Elastic Ip per Region
:param Domain: Domain of the Elastic Ip
:param kwargs: Optional parameters that you can assign to the ElasticIp
:return: ElasticIp Client response, you can access the Id by: ElasticIpObject["AllocationId"]
"""
pass
@staticmethod
def assignElasticIp(ElasticIpId, InstanceId):
"""
Assign an Elastic Ip to an instance
:param ElasticIpId: Elastic Ip Id
:param InstanceId: Instance Id, The instance has to be in a valid state
:return: None
"""
pass
@staticmethod
def createNatGateWay(SubnetId, AllocationId, **kwargs):
"""
Create a new Nat Gateway
:param SubnetId: Subnet where to assigne the Nat Gateway
:param AllocationId: ElasticIp Id assigned to the Nat Gateway
:return: NatGateway Client response, you can access the Id by: NatGatewayObject["NatGateway"]["NatGatewayId"]
"""
pass
@staticmethod
def waitNatGateWaysAvailable(NatGatewaysId):
"""
Wait until the NatGateway is in Available state
:param NatGatewaysId: Nat Gateway Id
:return: None
"""
pass
@staticmethod
def createRouteTable(Vpc, TableName, **kwargs):
"""
Create a new route table inside a Vpc
:param Vpc: Vpc Object where to create the table
:param TableName: Tag Name added to the new table
:param kwargs: Optional parameters that you can assign to the RouteTable
:return: Route Table Object
"""
pass
@staticmethod
def addRoute(routeTable, GatewayId, DestinationCidrBlock, **kwargs):
"""
Add new route in the route table
:param routeTable: RouteTable Object
:param GatewayId: Gateway Id to add in the route
:param DestinationCidrBlock: Ip subnet to route. "0.0.0.0/0" for all the traffic
:param kwargs: Optional parameters that you can assign to the route
:return: Route Object
"""
pass
@staticmethod
def runInstances(SubnetId, numberOfInstances, instanceType, KeyName, ImageId, **kwargs):
"""
Run multiple instances in the specified SubnetId, using the specified KeyName pair and The specified Id
:param SubnetId: SubnetId where to run the instance
:param numberOfInstances: Number of instances that you want to run; be careful to not exceed the limits imposed
by Amazon AWS
:param instanceType: Type of instance that you want to run
:param KeyName: KeyName present in your account
:param ImageId: Image AMI id provided by Amazon AWS
:param kwargs: Optional parameters to personalize your image, the correct documentation can be found at:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.run_instances
:return: RunInstances Client response, boto3.client('ec2').run_instances()
"""
pass
@staticmethod
def waitInstancesRunning(instancesIdList):
"""
Wait until all the instance in input are in 'running state'
:param instancesIdList: List of instances Ids
:return: None
"""
pass
@staticmethod
def extractInstancesId(runInstancesResponse):
"""
Take in input the entire response from runInstance method and extract the Ids of all the instances created by
the call
:param runInstancesResponse: runInstance Response
:return: List of Instance Ids
"""
pass
@staticmethod
def createSecurityGroup(VpcId, GroupName="Distrinet", Description="Distrinet", **kwargs):
"""
Create a new Security in the Vpc
:param VpcId: Vpc Id where to create the new Security Group
:param GroupName: Name of the Security Group, 'Distrinet by default'
:param Description: description of the new Security Group
:param kwargs: Optional parameters that you can assign to the Security Group
:return: SecurityGroup Client response, you can access the Id by SecurityGroupResponse["GroupId"]
"""
pass
@staticmethod
def extractSecurityGroupId(createSecurityGroupResponse):
"""
Take in input the entire response from createSecurityGroup method and extract the Id of the Security Group
:param createSecurityGroupResponse: createSecurityGroup response
:return: Security Group Id
"""
pass
@staticmethod
def AuthorizeSecurityGroupTraffic(GroupId, IpPermissions, Directions=[]):
"""
Add a Security Group Rule in the Security Group
:param GroupId: Security Group Id
:param IpPermissions: Description of the rule, Tuo can find the documentation at:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.authorize_security_group_egress
or:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.authorize_security_group_ingress
:param Directions: list of the Direction of the rule, it can be :
['ingress','egress'] or ['egress'] or ['ingress']
:return: tuple containing (ingressData, egressData), in case you dont specify one of the two,
that part will be None.
Example:
>>> i, e = distrinetGCloud.AuthorizeSecurityGroupTraffic("id-xxxx", {"RULES..."},Directions=["egress"])
>>> i
None
>>> e
SOMEDATA....
"""
pass
@staticmethod
def modifyGroupId(instancesId, Groups, **kwargs):
"""
Change the Security Groups Assigned to an instance
:param instancesId: Instance where to modify the Security Groups
:param Groups: List of Security groups Ids to set in the instance
:param kwargs: Optional parameters that you can assign to the boto3.client("ec2").modify_instance_attribute
method, you can find the correct documentation at:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.modify_instance_attribute
:return: boto3.client("ec2").modify_instance_attribute response
"""
pass
@staticmethod
def createKeyPair(KeyName, **kwargs):
"""
Create a new Key pair in Your Account; if the KeyName already exist, it will override it
:param KeyName: Name assigns to the new Kaypair
:param kwargs: Optional parameters that you can assign to boto3.client("ec2").create_key_pair method,
you can find the correct documentation at:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.create_key_pair
:return:
"""
pass
@staticmethod
def grantRootAccess(SshSession):
""" Takes in input an SshSession object, from a User with root privileges, and allows to
connect the root user by ssh
:param SshSession: SshSession from a User with root privileges
:return: the execution results of the commands
"""
commands = []
commands.append('sudo rm /root/.ssh/authorized_keys')
commands.append('sudo cp $HOME/.ssh/authorized_keys /root/.ssh/authorized_keys')
command = ";".join(commands)
return distrinetGCloud.executeCommand(SshSession=SshSession, command=command)
@staticmethod
def setupBastionHost(SshSession, PrivateKey):
"""
Takes in input an SshSession object, from a User with root privileges and copies the PrivateKey in the
$HOME/.ssh/id_rsa and /root/.ssh/id_rsa; after that it update the system and install ansible
:param SshSession: SshSession from a User with root privileges
:param PrivateKey: Private key to install in the host from the SshSession
:return: the execution results of the commands
"""
commands = []
commands.append('sudo echo -e "{}" > $HOME/.ssh/id_rsa'.format(PrivateKey))
commands.append('sudo chmod 0400 $HOME/.ssh/id_rsa')
commands.append('sudo cp $HOME/.ssh/id_rsa /root/.ssh/id_rsa')
commands.append('sudo apt update ')
commands.append('sleep 5')
commands.append('sudo DEBIAN_FRONTEND=noninteractive apt install -y -q software-properties-common')
commands.append('sudo apt-add-repository --yes --update ppa:ansible/ansible')
commands.append('sudo DEBIAN_FRONTEND=noninteractive apt install -y -q ansible')
command = ";".join(commands)
return distrinetGCloud.executeCommand(SshSession=SshSession, command=command)
@staticmethod
def setupMasterAutorizedKeysOnWorkers(SshSession, WorkerHostsIp):
"""
Append the Master authorized_keys on the Slaves Authorized Keys
:param SshSession: SshSession from a User with root privileges
:param WorkerHostsIp: list of the worker host private Ip
:return: None
"""
command = "cat $HOME/.ssh/authorized_keys"
i, sessionAuthorizedKeys, e = distrinetGCloud.executeCommand(SshSession=SshSession, command=command)
for workerIp in WorkerHostsIp:
command = " ssh root@{} echo '{} >> $HOME/.ssh/authorized_keys'".format(workerIp, str(sessionAuthorizedKeys,
"utf-8")[:-1])
distrinetGCloud.executeCommand(SshSession=SshSession, command=command)
@staticmethod
def releaseElasticIP(ElasticIpID):
"""
release An elastic IP
:param ElasticIpID: IP id
:return: client responce
"""
def deploy(self):
pass
def GCloudProvisionHelper(*args, instanceType='t3.2xlarge', volumeSize=10, **kwargs):
pass
def optimizationGCloudHelper(node_assigement):
pass
if __name__ == '__main__':
a={'host_2': ('t3.xlarge', 0), 'host_1': ('t3.xlarge', 0), 'host_3': ('t3.xlarge', 1), 'host_4': ('t3.xlarge', 1),
'aggr_1': ('t3.xlarge', 2), 'core_1': ('t3.xlarge', 2), 'aggr_2': ('t3.xlarge', 3), 'edge_1': ('t3.xlarge', 3),
'edge_2': ('t3.xlarge', 4)}
optimizationGCloudHelper(a)
``` |
{
"source": "a81940595/DNN",
"score": 3
} |
#### File: a81940595/DNN/populate_user_rate.py
```python
import os
import random
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "movierecomend.settings")
django.setup()
strs = 'abcdefghijk_mnopqrstuvwxyz'
from movie.models import *
# 随机生成username
def random_user_name(length=5):
return ''.join(random.choices(strs, k=length))
def random_phone():
res = ''.join([str(random.randint(0, 9)) for _ in range(11)])
return res
def random_mark():
return random.randint(1, 5)
def populate_user(nums):
for i in range(nums):
user_name = random_user_name()
user, created = User.objects.get_or_create(username=user_name, defaults={
'password': <PASSWORD>,
'email': user_name + '@163.com'
})
print(user, 'created:', created)
def populate_user_rating(rating_numbers):
for i in range(rating_numbers):
user = User.objects.order_by('?').first()
movie = Movie.objects.order_by('?').first()
rating, created = Rate.objects.get_or_create(user=user, movie=movie, defaults={"mark": random_mark()})
print(rating, 'created', created)
if __name__ == '__main__':
populate_user(1000)
# 随机生成用户打分 参数为生成数量
populate_user_rating(10000)
``` |
{
"source": "a8252525/IoTTalk",
"score": 3
} |
#### File: crawler/crawler/parse_weather.py
```python
import time
from bs4 import BeautifulSoup
from io import open
import DAN
def get_element(soup, tag, class_name):
data = []
table = soup.find(tag, attrs={'class':class_name})
rows = table.find_all('tr')
del rows[0]
for row in rows:
first_col = row.find_all('th')
cols = row.find_all('td')
cols.insert(0, first_col[0])
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
return data
region ='BaoShan'
file_name = region+".html"
f = open (file_name,'r', encoding='utf-8')
s = f.readlines()
s = ''.join(s)
soup = BeautifulSoup(s, "lxml")
df_tmp = get_element(soup, 'table','BoxTable')
print(df_tmp[0])
#上面爬蟲完
#####################################################
#下面push 到IoTTalk
ServerURL = 'https://5.IoTtalk.tw' #with SSL connection
Reg_addr = None #if None, Reg_addr = MAC address
DAN.profile['dm_name']='0858812_1120'
DAN.profile['df_list']=['Humidity', 'Temperature', 'Total_volume_of_rain', 'temp_wind' ,'windPower', 'wind_dir']
DAN.device_registration_with_retry(ServerURL, Reg_addr)
print('try to push')
while True:
try:
#print(df_tmp[0])
DAN.push ('Humidity', df_tmp[0][8])
DAN.push ('Temperature', df_tmp[0][1])
DAN.push ('Total_volume_of_rain', df_tmp[0][10])
DAN.push ('temp_wind', df_tmp[0][6])
DAN.push ('windPower', df_tmp[0][5])
DAN.push ('wind_dir', df_tmp[0][4])
except Exception as e:
print(e)
if str(e).find('mac_addr not found:') != -1:
print('Reg_addr is not found. Try to re-register...')
DAN.device_registration_with_retry(ServerURL, Reg_addr)
else:
print('Connection failed due to unknow reasons.')
time.sleep(1)
time.sleep(1)
#########################################################################################
"""
import pandas as pd
print ('Region :', region,'Building table ...')
col_list = ['觀測時間', '溫度(°C)', '溫度(°F)', '天氣', '風向', '風力 (m/s)|(級)', '陣風 (m/s)|(級)', '能見度(公里)', '相對溼度(%)', '海平面氣壓(百帕)', '當日累積雨量(毫米)', '日照時數(小時)']
df = pd.DataFrame(columns = col_list)
df_tmp = pd.DataFrame(df_tmp)
df_tmp.columns = col_list
df = pd.concat([df, df_tmp], axis=0)
df = df.reset_index(drop=True)
df.to_csv(( region + '.csv'), encoding = 'utf-8')
"""
``` |
{
"source": "a825338908/ros_detect_planes_from_depth_img",
"score": 3
} |
#### File: a825338908/ros_detect_planes_from_depth_img/run_server.py
```python
from ros_detect_planes_from_depth_img.msg import PlanesResults
from plane_detector import PlaneDetector, PlaneParam
from utils.lib_ros_rgbd_pub_and_sub import DepthImageSubscriber, ColorImageSubscriber, ColorImagePublisher
import rospy
import cv2
import numpy as np
import argparse
import yaml
import os
import sys
def parse_command_line_argumetns():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-c", "--config_file", required=False,
default='config/plane_detector_config.yaml',
help="Path to the plane detecton configuration file. ")
parser.add_argument("-d", "--depth_topic", required=True,
help="Topic name of depth image (no distortion).")
parser.add_argument("-m", "--camera_info", required=True,
help="Path to camera info file. "
"The distortion must be zero."
"Depth and color images must share the same parameters.")
parser.add_argument("-i", "--color_topic", required=False,
default="",
help="Topic name of color image (no distortion). "
"This topic can be empty, "
"because color image is only for visualization purpose."
"If empty, a black image will be used instead.")
args = parser.parse_args(rospy.myargv()[1:])
return args
def read_config_file(config_file_path):
if not os.path.exists(config_file_path):
raise RuntimeError("Config file doesn't exist: " + config_file_path)
rospy.loginfo("Read config from: " + config_file_path)
def read_yaml_file(file_path):
with open(file_path, 'r') as stream:
data = yaml.safe_load(stream)
return data
config = read_yaml_file(config_file_path)
return config
class PlaneResultsPublisher(object):
def __init__(self, topic_name, queue_size=10):
self._pub = rospy.Publisher(
topic_name, PlanesResults, queue_size=queue_size)
def publish(self, plane_params):
'''
Arguments:
plane_params {list of PlaneParam}
'''
res = PlanesResults()
res.N = len(plane_params)
for pp in plane_params:
res.norms.extend(pp.w.tolist())
res.center_3d.extend(pp.pts_3d_center.tolist())
res.center_2d.extend(pp.pts_2d_center.tolist())
res.mask_color.extend(pp.mask_color.tolist())
self._pub.publish(res)
return
def main(args):
# -- Plane detector.
detector = PlaneDetector(args.config_file, args.camera_info)
# -- Set up color/depth image subscribers.
sub_depth = DepthImageSubscriber(args.depth_topic)
if args.color_topic:
sub_color = ColorImageSubscriber(args.color_topic)
else:
sub_color = None
# -- Set up mask/viz/results publishers.
config = read_config_file(args.config_file)
pub_colored_mask = ColorImagePublisher(config["topic_colored_mask"])
pub_image_viz = ColorImagePublisher(config["topic_image_viz"])
pub_results = PlaneResultsPublisher(config["topic_result"])
# -- Wait for subscribing image and detect.
while not rospy.is_shutdown():
if sub_depth.has_image():
# -- Read next color and depth image.
depth = sub_depth.get_image()
if sub_color is not None:
while not sub_color.has_image():
rospy.sleep(0.005)
color = sub_color.get_image()
else:
color = None
# -- Detect plane.
rospy.loginfo("=================================================")
rospy.loginfo("Received an image. Start plane detection.")
list_plane_params, colored_mask, img_viz = detector.detect_planes(
depth, color)
# -- Print result.
for i, plane_param in enumerate(list_plane_params):
plane_param.print_params(index=i+1)
# -- Publish result.
pub_colored_mask.publish(colored_mask)
pub_image_viz.publish(img_viz)
pub_results.publish(list_plane_params)
rospy.loginfo("Publish results completes.")
rospy.loginfo("-------------------------------------------------")
rospy.loginfo("")
if __name__ == '__main__':
node_name = "plane_detector_server"
rospy.init_node(node_name)
args = parse_command_line_argumetns()
main(args)
rospy.logwarn("Node `{}` stops.".format(node_name))
```
#### File: ros_detect_planes_from_depth_img/utils/lib_ransac.py
```python
import numpy as np
import scipy
import scipy.linalg
import time
def random_partition(n, N):
# get n random indices out of [0, 1, 2, ..., N]
indices = np.random.permutation(N)
return indices[:n], indices[n:]
class PlaneModel(object):
def __init__(self, feature_dimension=3):
''' For a plane in 3D world, feature_dimension=3.
Plane model with weights w:
w[0] + w[1]*x + w[2]*y + w[3]*z = 0
'''
self._feature_dimension = 3
def fit_plane(self, points):
''' Fit a plane to the data points.
Return:
w: shape=(4, 0). Plane model:
w[0] + w[1]*x + w[2]*y + w[3]*z = 0
Algorithm: Compute PCA by svd algorithm.
The least axis of PCA is the plane normal direction.
Details:
U, S, W = svd(Xc), where Xc is X subtracted by X's average.
if Xc=3*N, U[:, -1], last col is the plane norm
if Xc=N*3, W[-1, :], last row is the plane norm
Besides, S are square root of eigen values
'''
points = self._check_data(points)
X = points
X_mean = np.mean(X, axis=0) # Squash each column to compute mean.
Xc = X - X_mean[np.newaxis, :]
U, S, W = np.linalg.svd(Xc)
plane_normal = W[-1, :]
'''
Compute the bias:
The fitted plane is this: w[1]*(x-xm)+w[2]*(x-ym)+w[3]*(x-zm)=0
Change it back to the original: w[1]x+w[2]y+w[3]z+(-w[1]xm-w[2]ym-w[3]zm)=0
--> w[0]=-w[1]xm-w[2]ym-w[3]zm
'''
w_0 = np.dot(X_mean, -plane_normal)
w_1 = plane_normal
w = np.concatenate(([w_0], w_1))
return w
def get_error(self, points, w):
''' Compute the distance between each data point and plane.
Arguments:
points: shape=(N, 3).
w: Plane weights. shape=(4, ).
Return:
dists: shape=(N, )
'''
points = self._check_data(points)
dists = np.abs(w[0] + points.dot(w[1:]))/np.linalg.norm(w[1:])
return dists
def _check_data(self, points):
''' Make sure the data shape is (N, 3). '''
if not self._feature_dimension in points.shape:
raise ValueError("Wrong input data shape")
if points.shape[0] == self._feature_dimension:
points = points.T
return points
class RansacPlane(object):
def __init__(self):
pass
def fit(self,
points, # 3xN or Nx3 points of xyz positions.
model, # The PlaneModel.
# n_pts_fit_model: Number of points to sample from source pcd.
# This should be the minumum points number to fit a plane model.
n_pts_fit_model,
n_min_pts_inlier, # Min number of points for a valid plane.
max_iter,
# dist_thresh: A point is considered as inlier if its distance to the plane is smaller than this.
dist_thresh,
# is_print_iter: Print in each iteration when a better model is found.
is_print_iter=False,
is_print_res=True, # Print final results.
):
'''
Return:
is_succeed {bool}
best_w {1D array, size=4}: weight of the detected plane.
Plane model: w[0] + w[1]*x + w[2]*y + w[3]*z = 0.
best_res_inliers {1D array}: Indices of the points in the source point cloud
which are part of the detected plane.
'''
FAILURE_RETURN = False, None, None
# -- Check input
if points.shape[1] != 3: # shape: (3, N) --> (N, 3)
points = points.T
if len(points) < n_min_pts_inlier:
return FAILURE_RETURN
# -- Init variables
N = points.shape[0] # Number of data points.
t0 = time.time() # Timer
n_pts_fit_model += 1
# Variables to store the best model.
best_w = None
best_res_num_inliers = -1
best_res_inliers = []
# -- Start random sampling and fitting.
for i in range(max_iter):
# -- Step 1: Sample some data to fit a model_A.
maybe_idxs = self._sample_indices(n_pts_fit_model, N)
maybe_w, maybe_error, all_error = self._fit_model(
maybe_idxs, points, model)
n_pts_inlier = np.count_nonzero(all_error < dist_thresh)
if n_pts_inlier >= n_min_pts_inlier: # A good model is detected.
# -- Step 2: Use the inliers of model_A to fit a model_B.
# Let's use part of the inliers to fit the model again
also_idxs = np.arange(N)[all_error < dist_thresh]
# Limit the number of also_idxs to avoid using too many points to compute the model
np.random.shuffle(also_idxs)
also_idxs = also_idxs[:n_min_pts_inlier]
# Fit the model
also_w, also_error, all_error = self._fit_model(
also_idxs, points, model)
# -- Step 3: Copmare model_B with best model,
# and decide whether we use model_B or not.
# Select a criteria for evaluating the model.
# Here we use the number of points.
best_inliers = np.arange(N)[all_error < dist_thresh]
n_pts_inlier = len(best_inliers)
if n_pts_inlier > best_res_num_inliers:
if is_print_iter:
print("A better model is found "
"in {}th iter: number of inliers = {}".format(
i, n_pts_inlier))
best_res_num_inliers = n_pts_inlier
best_w = also_w
best_res_inliers = best_inliers
# -- Check if a good model is found.
if best_w is None:
return FAILURE_RETURN
else:
is_succeed = True
# -- Print time cost.
n_inliers = len(best_res_inliers)
if is_print_res:
print("RANSAC performance report:")
print(" Source data points = {}".format(N))
print(" Inlier data points = {}".format(n_inliers))
print(" Iterations = {}".format(max_iter))
print(" Time cost = {:.3} seconds".format(time.time()-t0))
print(" Plane model: w[0] + w[1]*x + w[2]*y + w[3]*z = 0")
print(" Weights: w = {}".format(best_w))
# -- Return result.
return is_succeed, best_w, best_res_inliers
def _sample_indices(self, n_pts_fit_model, N):
rand_indices = np.random.permutation(N)
return rand_indices[:n_pts_fit_model]
def _fit_model(self, maybe_idxs, data, model):
maybe_data = data[maybe_idxs]
maybe_w = model.fit_plane(maybe_data)
maybe_error = model.get_error(maybe_data, maybe_w)
all_error = model.get_error(data, maybe_w)
return maybe_w, maybe_error, all_error
``` |
{
"source": "a84885640/Python",
"score": 4
} |
#### File: Basics/Functions/File.py
```python
varGlobal = 1
def AwesomeFunction():
"This function is awesome"
print("Hello World")
print("My name is bob and no i cannot fix it")
return
AwesomeFunction()
AwesomeFunction()
AwesomeFunction()
def AwesomeFunction2(number1, number2):
"Adds the numbers together"
return number1 + number2
print(AwesomeFunction2(5, 6))
var1 = 5
print(var1)
def ChangeFunction(number1):
"Change function"
number1 = 8
return
print(var1)
def DefaultArg(var1 = 8):
return var1 * 2
print(DefaultArg(9))
print(DefaultArg())
```
#### File: GUI/Radiobutton/File.py
```python
import tkinter
from tkinter import *
def select():
selection = "Awesome option " + str(var.get())
label.config(text = selection)
window = Tk()
var = IntVar()
radio1 = Radiobutton(window, text = "Option 1", variable = var, value = 1, command = select)
radio1.pack( anchor = W )
radio2 = Radiobutton(window, text = "Option 2", variable = var, value = 2, command = select)
radio2.pack( anchor = W )
radio3 = Radiobutton(window, text = "Option 3", variable = var, value = 3, command = select)
radio3.pack( anchor = W)
radio4 = Radiobutton(window, text = "Option 4", variable = var, value = 4, command = select)
radio4.pack( anchor = W)
label = Label(window)
label.pack()
window.mainloop()
``` |
{
"source": "a8568730/Tau-Phah-Ji-Command",
"score": 3
} |
#### File: a8568730/Tau-Phah-Ji-Command/tauphahji_cmd.py
```python
import json
from urllib.parse import urlencode
from http.client import HTTPSConnection
def tàuphahjī(漢羅, **tshamsoo):
conn = HTTPSConnection(
"hokbu.ithuan.tw"
)
tshamsoo = urlencode({
'taibun': 漢羅,
**tshamsoo,
})
headers = {
"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain"
}
conn.request(
"POST",
"/tau",
tshamsoo,
headers,
)
responseStr = conn.getresponse().read().decode('utf-8')
return json.loads(responseStr)
def liânKù(多元書寫, 欲連的key):
if 欲連的key == '漢字':
return liânHànjī(多元書寫)
elif 欲連的key == '臺羅':
return liânTâilô(多元書寫)
def liânHànjī(多元書寫):
return ''.join([i['漢字'].replace('.', '。') for i in 多元書寫])
def liânTâilô(多元書寫):
return '. '.join([i['臺羅'] for i in 多元書寫])
``` |
{
"source": "a868111817/language-model-playground",
"score": 3
} |
#### File: model/_res_sattn_gru/test_signature.py
```python
r"""Test :py:class:`lmp.model._res_sattn_gru` signature."""
import inspect
from inspect import Parameter, Signature
from typing import Dict, Optional
from lmp.model._res_sattn_gru import ResSAttnGRUBlock, ResSAttnGRUModel
from lmp.model._res_sattn_rnn import ResSAttnRNNBlock, ResSAttnRNNModel
from lmp.model._sattn_rnn import SAttnRNNBlock
from lmp.tknzr._base import BaseTknzr
def test_class():
r"""Ensure class signature."""
assert inspect.isclass(ResSAttnGRUBlock)
assert not inspect.isabstract(ResSAttnGRUBlock)
assert issubclass(ResSAttnGRUBlock, ResSAttnRNNBlock)
assert issubclass(ResSAttnGRUBlock, SAttnRNNBlock)
assert inspect.isclass(ResSAttnGRUModel)
assert not inspect.isabstract(ResSAttnGRUModel)
assert issubclass(ResSAttnGRUModel, ResSAttnRNNModel)
def test_class_attribute():
r"""Ensure class attributes' signature."""
assert isinstance(ResSAttnGRUModel.model_name, str)
assert ResSAttnGRUModel.model_name == 'res-sattn-GRU'
assert ResSAttnGRUModel.file_name == 'model-{}.pt'
def test_instance_method():
r"""Ensure instance methods' signature."""
assert hasattr(ResSAttnGRUBlock, '__init__')
assert inspect.signature(ResSAttnGRUBlock.__init__) == Signature(
parameters=[
Parameter(
name='self',
kind=Parameter.POSITIONAL_OR_KEYWORD,
default=Parameter.empty,
),
Parameter(
name='d_hid',
kind=Parameter.KEYWORD_ONLY,
annotation=int,
default=Parameter.empty,
),
Parameter(
name='n_hid_lyr',
kind=Parameter.KEYWORD_ONLY,
annotation=int,
default=Parameter.empty,
),
Parameter(
name='p_hid',
kind=Parameter.KEYWORD_ONLY,
annotation=float,
default=Parameter.empty,
),
Parameter(
name='kwargs',
kind=Parameter.VAR_KEYWORD,
annotation=Optional[Dict],
),
],
return_annotation=Signature.empty,
)
assert hasattr(ResSAttnGRUModel, '__init__')
assert inspect.signature(ResSAttnGRUModel.__init__) == Signature(
parameters=[
Parameter(
name='self',
kind=Parameter.POSITIONAL_OR_KEYWORD,
default=Parameter.empty,
),
Parameter(
name='d_emb',
kind=Parameter.KEYWORD_ONLY,
annotation=int,
default=Parameter.empty,
),
Parameter(
name='d_hid',
kind=Parameter.KEYWORD_ONLY,
annotation=int,
default=Parameter.empty,
),
Parameter(
name='n_hid_lyr',
kind=Parameter.KEYWORD_ONLY,
annotation=int,
default=Parameter.empty,
),
Parameter(
name='n_post_hid_lyr',
kind=Parameter.KEYWORD_ONLY,
annotation=int,
default=Parameter.empty,
),
Parameter(
name='n_pre_hid_lyr',
kind=Parameter.KEYWORD_ONLY,
annotation=int,
default=Parameter.empty,
),
Parameter(
name='p_emb',
kind=Parameter.KEYWORD_ONLY,
annotation=float,
default=Parameter.empty,
),
Parameter(
name='p_hid',
kind=Parameter.KEYWORD_ONLY,
annotation=float,
default=Parameter.empty,
),
Parameter(
name='tknzr',
kind=Parameter.KEYWORD_ONLY,
annotation=BaseTknzr,
default=Parameter.empty,
),
Parameter(
name='kwargs',
kind=Parameter.VAR_KEYWORD,
annotation=Optional[Dict],
),
],
return_annotation=Signature.empty,
)
def test_inherent_method():
r'''Ensure inherent methods' signature are same as base class.'''
assert (
inspect.signature(ResSAttnRNNModel.forward)
==
inspect.signature(ResSAttnGRUModel.forward)
)
assert (
inspect.signature(ResSAttnRNNModel.load)
==
inspect.signature(ResSAttnGRUModel.load)
)
assert (
inspect.signature(ResSAttnRNNModel.loss_fn)
==
inspect.signature(ResSAttnGRUModel.loss_fn)
)
assert (
inspect.signature(ResSAttnRNNModel.pred)
==
inspect.signature(ResSAttnGRUModel.pred)
)
assert (
inspect.signature(ResSAttnRNNModel.ppl)
==
inspect.signature(ResSAttnGRUModel.ppl)
)
assert (
inspect.signature(ResSAttnRNNModel.save)
==
inspect.signature(ResSAttnGRUModel.save)
)
assert (
inspect.signature(ResSAttnRNNModel.train_parser)
==
inspect.signature(ResSAttnGRUModel.train_parser)
)
``` |
{
"source": "a892574222/game",
"score": 2
} |
#### File: game/src/E_2_4.py
```python
import time
import logging
from template import Template,ran
import random
class KO(Template):
def __init__(self):
self.times = 0
self.index = -1
self.repair_flag = 0
self.pair = 0
self.core =[]
super(KO, self).__init__()
def detach(self):
self.get_pic()
if self.is_found(self.ps.battle) and not self.is_found(self.ps.restore):
logging.info("主界面且无修理")
return 1
elif self.is_found(self.ps.emergency) and self.repair_flag==0:
logging.info("队伍重创")
return 18
elif self.is_found(self.ps.e2_4) and not self.repair_flag and not self.is_found(self.ps.normal):
logging.info("任务选择")
return 2
elif self.is_found(self.ps.normal) :
logging.info("选择作战")
return 3
elif self.is_found(self.ps.boss2) and self.is_found(self.ps.start) :
print('start')
logging.info("初始任务地图并且执行")
return 4
elif self.is_found(self.ps.end2_4_1) and self.is_found(self.ps.end2_4_2):
logging.info("结束本局")
return 5
elif self.is_found(self.ps.settlement) or self.is_found(self.ps.share) or self.is_found(self.ps.over):
logging.info("结算")
return 6
elif self.is_found(self.ps.corestar):
self.core=self.find_all(self.ps.core)
logging.info("获得核心")
return 7
elif self.is_found(self.ps.e2_4) and self.repair_flag:
logging.info("任务选择&需要修复")
return 8
elif self.is_found(self.ps.battle) and self.is_found(self.ps.restore):
logging.info("位于主界面且需要修理")
return 13
elif self.is_found(self.ps.fast):
logging.info("快速修理")
return 14
elif self.is_found(self.ps.full):
logging.info("仓库满了")
return 15
elif self.is_found(self.ps.factory) and self.is_found(self.ps.strength):
logging.info("强化")
return 16
else:
time.sleep(0.2)
return 0
def sleep(self):
random1=ran(self.times,self.times+149)
#150次上限,次数越多休息几率越大 时间越多,最长0.75小时
if random1>=150:
self.times=0
time.sleep(18*self.times)
def touch(self):
random1 = ran(1, 4)
#模拟触摸
if random1==3:
x = ran(284, 475)
y = ran(259, 593)
self.mouse_left_click(x, y)
time.sleep(0.2+random.random())
def drag(self):#模拟拖拽
random1= ran(1,2)
if random1==1:
self.mouse_drag(51,103,489,649)
time.sleep(0.2 + random.random())
def solve(self):
while 1:
self.sleep()
#print(self.repair_flag)
index = self.detach()
if index == 1:
self.touch()
self.drag()
self.sleep()
x = ran(490, 616)
y = ran(510, 592)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
elif index == 2:
x=ran(253,763)
y=ran(353,409)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
elif index == 3:
x = ran(403, 495)
y = ran(496, 539)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
elif index == 4:
#基地
x = ran(171, 218)
y = ran(344, 390)
self.mouse_left_click(x, y)
time.sleep(0.5 + random.random())
#确定
x = ran(661, 762)
y = ran(539, 578)
self.mouse_left_click(x, y)
time.sleep(0.3 + random.random())
#开始
x = ran(601, 763)
y = ran(701, 762)
self.mouse_left_click(x, y)
time.sleep(3.5 + random.random()*2)
#基地
x = ran(171, 218)
y = ran(344, 390)
self.mouse_left_click(x, y)
time.sleep(0.2 + random.random() * 2)
#补给
if self.pair == 2:
x = ran(171, 218)
y = ran(344, 390)
self.mouse_left_click(x, y)
time.sleep(0.6 + random.random())
x = ran(650, 770)
y = ran(489, 527)
self.mouse_left_click(x, y)
time.sleep(1.5 + random.random())
self.pair = 0
else:
pass
self.pair = self.pair + 2
#计划
x = ran(10, 89)
y = ran(685, 706)
self.mouse_left_click(x, y)
time.sleep(0.7 + random.random())
#boss
x = ran(481, 526)
y = ran(202, 245)
self.mouse_left_click(x, y)
time.sleep(0.7 + random.random())
#执行
x = ran(659, 759)
y = ran(712, 760)
self.mouse_left_click(x, y)
time.sleep(0.7 + random.random())
elif index == 5:
self.times = self.times+1
x = ran(659, 759)
y = ran(712, 760)
self.mouse_left_click(x, y)
time.sleep(0.7 + random.random())
elif index == 6:
for _ in range(ran(2,5)):
x = ran(100, 700)
y = ran(100, 700)
self.mouse_left_click(x, y)
time.sleep(0.06+random.random()*0.5)
elif index == 7:
if self.core !=[]:
for i in self.core:#选择所有三星
x = ran(i[0][0], i[3][0])
y = ran(i[0][1], i[3][1])
self.mouse_left_click(x, y)
time.sleep(0.6 + random.random() * 0.5)
#选择确定
x = ran(659, 760)
y = ran(342, 413)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
#拆解
x = ran(660, 762)
y = ran(687, 730)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
#确定
x = ran(399, 501)
y = ran(446, 484)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
# 返回
x = ran(10, 51)
y = ran(6, 41)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
else:
x = ran(10, 51)
y = ran(6, 41)
self.mouse_left_click(x, y)
time.sleep(0.5)
x = ran(10, 51)
y = ran(6, 41)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
elif index == 8:#需要修复返回
x = ran(10, 51)
y = ran(6, 41)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
elif index == 13:
#修理
x = ran(590, 619)
y = ran(362, 394)
self.mouse_left_click(x,y)
time.sleep(0.6 + random.random())
time.sleep(3.0)
elif index == 14:
#快速修理
self.repair_flag = 0
x = ran(35, 130)
y = ran(260, 570)
self.mouse_left_click(x, y)
time.sleep(0.6 + random.random())
time.sleep(0.5)
x = ran(10,105)
y = ran(70, 239)
self.mouse_left_click(x, y)
time.sleep(0.1+ random.random())
x = ran(120, 210)
y = ran(70, 239)
self.mouse_left_click(x, y)
time.sleep(0.1+random.random())
#确定
x = ran(662, 766)
y = ran(298, 365)
self.mouse_left_click(x, y)
time.sleep(0.6 + random.random())
x = ran(169, 212)
y = ran(450, 497)
self.mouse_left_click(x, y)
time.sleep(0.5 + random.random())
x = ran(504, 606)
y = ran(453, 493)
self.mouse_left_click(x, y)
time.sleep(0.5 + random.random())
#返回
x = ran(10, 51)
y = ran(6, 41)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
x = ran(10, 51)
y = ran(6, 41)
self.mouse_left_click(x, y)
time.sleep(3 + random.random())
elif index == 15:
x = ran(428, 538)
y = ran(448, 485)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
elif index == 16:#仓库界面
#选择强化角色
x = ran(138, 245)
y = ran(245, 561)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
#选择第一个强化角色
x = ran(9, 108)
y = ran(67, 242)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
#被强化角色(二星枪)
x = ran(267, 371)
y = ran(93, 161)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
#智能选择&确定
x = ran(659, 760)
y = ran(342, 413)
self.mouse_left_click(x, y)
time.sleep(0.5)
x = ran(659, 760)
y = ran(342, 413)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
#强化
x = ran(660, 762)
y = ran(687, 730)
self.mouse_left_click(x, y)
time.sleep(1 + random.random())
#回收拆解
x = ran(4, 105)
y = ran(237, 290)
self.mouse_left_click(x, y)
time.sleep(0.5)
x = ran(4, 105)
y = ran(237, 290)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
#选择拆解人行选项
x = ran(157, 265)
y = ran(85, 149)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
elif index == 18:
self.repair_flag = 1
else:
pass
time.sleep(0.2)
```
#### File: game/src/test.py
```python
import time
import logging
from src.template import Template,ran
import random
class KO(Template):
def __init__(self):
self.times = 0
self.index = -1
self.repair_flag = 0
self.pair = 0
self.core =[]
super(KO, self).__init__()
def detach(self):
self.get_pic()
if self.is_found(self.ps.test):
print('test')
return 1000
else:
return 0
def sleep(self):
random1=ran(self.times,self.times+149)
#150次上限,次数越多休息几率越大 时间越多,最长0.75小时
if random1>=150:
self.times=0
time.sleep(18*self.times)
def touch(self):
random1 = ran(1, 3)
#模拟触摸
if random1==3:
x = ran(284, 475)
y = ran(259, 593)
self.mouse_left_click(x, y)
time.sleep(0.2+random.random())
def solve(self):
while 1:
self.sleep()
#print(self.repair_flag)
index = self.detach()
if index == 11:
self.touch()
self.sleep()
x = ran(490, 616)
y = ran(510, 592)
self.mouse_left_click(x, y)
time.sleep(2 + random.random())
time.sleep(0.2)
``` |
{
"source": "a8/AWattPrice",
"score": 3
} |
#### File: src/awattprice/notifications.py
```python
import asyncio
import json
from datetime import datetime
from math import floor
from pathlib import Path
from typing import List, Optional, Tuple
import arrow # type: ignore
import httpx
import jwt
from box import Box # type: ignore
from configupdater import ConfigUpdater # type: ignore
from dateutil.tz import tzstr
from fastapi import status
from loguru import logger as log
from tenacity import retry, stop_after_attempt, stop_after_delay, wait_exponential # type: ignore
from awattprice import poll
from awattprice.defaults import CURRENT_VAT, Region
from awattprice.token_manager import APNsTokenManager
from awattprice.types import APNSToken
from awattprice.utils import before_log
class DetailedPriceData:
def __init__(self, data: Box, region_identifier: int):
self.data = data
self.region_identifier = region_identifier
def get_user_prices(
self, below_value: int, region_identifier: int, vat_selection: int
) -> Tuple[List, Optional[int]]:
"""Returns a list of prices which drop below or on a certain value. Also returns a
integer which represents the lowest price point in the returned list.
The marketprices of the price points in the returned list have VAT added if the user selected it (if vat_selection is 1).
"""
below_price_data = []
lowest_index = None
current_index = 0
for price_point in self.data.prices:
timezone = tzstr("CET-1CEST,M3.5.0/2,M10.5.0/3").tzname(datetime.fromtimestamp(price_point.start_timestamp))
now_timezone = arrow.utcnow().to(timezone)
midnight = now_timezone.replace(hour=0, minute=0, second=0, microsecond=0)
tomorrow_boundary_start = midnight.shift(days=+1)
tomorrow_boundary_end = midnight.shift(days=+2)
marketprice_with_vat = price_point.marketprice
if region_identifier == 0 and vat_selection == 1:
marketprice_with_vat = round(price_point.marketprice * CURRENT_VAT, 2)
if (
price_point.start_timestamp >= tomorrow_boundary_start.timestamp
and price_point.end_timestamp <= tomorrow_boundary_end.timestamp
):
if marketprice_with_vat <= below_value:
below_price_data.append(
Box(
{
"start_timestamp": price_point.start_timestamp,
"marketprice": marketprice_with_vat,
} # Don't store end timestamp because a price point is always 1 hour long
)
)
if lowest_index is None:
lowest_index = current_index
else:
if marketprice_with_vat < below_price_data[lowest_index].marketprice:
lowest_index = current_index
current_index += 1
return below_price_data, lowest_index
class PriceDropsBelow:
# Use localization keys which are resolved on the client side
title_loc_key = "general.priceGuard"
body_loc_key_sing = "notifications.price_drops_below.body.sing" # Price drops below value only once
body_loc_key_mult = "notifications.price_drops_below.body.mult" # Price drops below value multiple times
collapse_id = "collapse.priceDropsBelowNotification.3DK203W0"
def get_body_loc_key(self, count: int) -> str:
if count == 1:
return self.body_loc_key_sing
else:
return self.body_loc_key_mult
class Notifications:
_is_initialized = False
def __init__(self, config: ConfigUpdater) -> None:
self.below_notification = PriceDropsBelow()
self.encryption_algorithm = "ES256"
try:
dev_team_id_path = Path(config.notifications.dev_team_id).expanduser()
self.dev_team_id = open(dev_team_id_path.as_posix(), "r").readlines()[0].replace("\n", "")
encryption_key_id_path = Path(config.notifications.apns_encryption_key_id).expanduser()
self.encryption_key_id = open(encryption_key_id_path.as_posix(), "r").readlines()[0].replace("\n", "")
encryption_key_path = Path(config.notifications.apns_encryption_key).expanduser()
self.encryption_key = open(encryption_key_path.as_posix(), "r").read()
self.url_path = "/3/device/{}"
except Exception as e:
log.warning(
f"Couldn't read or find file(s) containing required information to send notifications "
f"with APNs. Notifications won't be checked and won't be sent by the backend: {e}."
)
return
if config.notifications.use_sandbox:
log.debug("Using sandbox APNs server.")
self.apns_server_url = "https://api.sandbox.push.apple.com"
self.bundle_id = "me.space8.AWattPrice.dev"
else:
log.debug("Using production APNs server.")
self.apns_server_url = "https://api.push.apple.com"
self.bundle_id = "me.space8.AWattPrice"
self.apns_server_port = 443
self._is_initialized = True
@property
def is_initialized(self):
"""Return True if __init__ was successful."""
return self._is_initialized
async def handle_apns_response(db_manager, token, response, status_code, config):
# For reference of returned response and status codes see:
# https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server/handling_notification_responses_from_apns
if status_code == status.HTTP_200_OK:
return
if status_code in [status.HTTP_400_BAD_REQUEST, status.HTTP_410_GONE]:
remove_token = False
if status_code == status.HTTP_410_GONE and response["reason"] == "Unregistered":
remove_token = True
if status_code == status.HTTP_400_BAD_REQUEST and response["reason"] in [
"BadDeviceToken",
"DeviceTokenNotForTopic",
]:
remove_token = True
if remove_token is True:
token_config = APNSToken(
token=token, region_identifier=0, vat_selection=0, config={}
) # Populate with token and some placeholder values
token_manager = APNsTokenManager(token_config, db_manager)
if not config.general.debug_mode:
token_manager.remove_entry()
log.debug(f"Removed invalid APNs token from database: {response}.")
@retry(
before=before_log(log, "debug"),
stop=(stop_after_delay(60) | stop_after_attempt(8)),
wait=wait_exponential(multiplier=1, min=4, max=10),
reraise=True,
)
async def price_drops_below_notification(
db_manager,
notification_defaults,
config,
price_data,
token,
below_value,
region_identifier,
vat_selection,
):
below_price_data, lowest_index = price_data.get_user_prices(below_value, region_identifier, vat_selection)
if below_price_data and lowest_index is not None:
lowest_point = below_price_data[lowest_index]
log.debug('Sending "Price Drops Below" notification to a user.')
# Get the current timezone (either CET or CEST)
timezone = tzstr("CET-1CEST,M3.5.0/2,M10.5.0/3").tzname(datetime.fromtimestamp(lowest_point.start_timestamp))
lowest_price_start = arrow.get(lowest_point.start_timestamp).to(timezone)
# Full cents, for example 4
lowest_price_floored = floor(lowest_point.marketprice)
# Decimal places of cent, for example 39
lowest_price_decimal = round((lowest_point.marketprice - lowest_price_floored) * 100)
# Together 4,39
formatted_lowest_price = f"{lowest_price_floored},{lowest_price_decimal}"
below_value_floored = floor(below_value)
below_value_decimal = round((below_value - below_value_floored) * 100)
formatted_below_value = f"{below_value_floored},{below_value_decimal}"
encryption_algorithm = notification_defaults.encryption_algorithm
# Set token data
# For reference see: https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server/establishing_a_token-based_connection_to_apns
token_body = {
"iss": notification_defaults.dev_team_id,
"iat": arrow.utcnow().timestamp,
}
token_headers = {
"alg": notification_defaults.encryption_algorithm,
"kid": notification_defaults.encryption_key_id,
}
token_data_encoded = jwt.encode( # JWT is required by APNs for token based authentication
token_body,
notification_defaults.encryption_key,
algorithm=encryption_algorithm,
headers=token_headers,
)
# Set notification payload
# For reference see: https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server/generating_a_remote_notification#2943365
notification_payload = {
"aps": {
"alert": {
"title-loc-key": notification_defaults.below_notification.title_loc_key,
"loc-key": notification_defaults.below_notification.get_body_loc_key(len(below_price_data)),
"loc-args": [
len(below_price_data),
formatted_below_value,
lowest_price_start.format("H"),
formatted_lowest_price,
],
},
"badge": 0,
"sound": "default",
"content-available": 0,
}
}
# Set request headers
# For reference see: https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server/sending_notification_requests_to_apns
request_headers = {
"authorization": f"bearer {token_data_encoded}",
"apns-push-type": "alert",
"apns-topic": notification_defaults.bundle_id,
"apns-expiration": f"{lowest_price_start.timestamp - 3600}",
"apns-priority": "5",
"apns-collapse-id": notification_defaults.below_notification.collapse_id,
}
url = f"{notification_defaults.apns_server_url}:{notification_defaults.apns_server_port}{notification_defaults.url_path.format(token)}"
status_code = None
response = None
async with httpx.AsyncClient(http2=True) as client:
try:
response = await client.post(url, headers=request_headers, data=json.dumps(notification_payload))
except httpx.ConnectTimeout:
log.warning(f"Connect attempt to {url} timed out.")
raise
except httpx.ReadTimeout:
log.warning(f"Read from {url} timed out.")
raise
except Exception as e:
log.warning(f"Unrecognized exception at POST request to {url}: {e}.")
raise
else:
status_code = response.status_code
if response.content.decode("utf-8") == "":
data = {}
else:
try:
data = response.json()
except json.JSONDecodeError as e:
log.warning(f"Couldn't decode response from APNs servers: {e}")
raise
except Exception as e:
log.warning(f"Unknown error while decoding response from APNs servers: {e}")
raise
if response is not None and status_code is not None:
await handle_apns_response(db_manager, token, data, status_code, config)
async def check_and_send(config, data, data_region, db_manager):
# Check which users apply to receive certain notifications and send them to those users.
log.info("Checking and sending notifications.")
notification_defaults = Notifications(config)
if not notification_defaults.is_initialized:
return
all_data_to_check = {}
checked_regions_no_notifications = [] # Already checked regions which don't apply to receive notifications
await db_manager.acquire_lock()
cursor = db_manager.db.cursor()
items = cursor.execute("SELECT * FROM token_storage;").fetchall()
cursor.close()
items = [dict(x) for x in items]
notification_queue = asyncio.Queue()
for notifi_config in items:
try:
configuration = json.loads(notifi_config["configuration"])["config"]
except Exception:
log.warning(
"Internally passed notification configuration of a user couldn't be read "
"while checking if the user should receive notifications."
)
continue
# Check all notification types with following if statment to check if the user
# wants to get any notifications at all
if configuration["price_below_value_notification"]["active"] is True:
region_identifier = notifi_config["region_identifier"]
region = Region(region_identifier)
if region_identifier not in all_data_to_check:
# Runs if a user is in a different region as those which are included in the regions
# to send notification updates.
# Therefor this polls the aWATTar API of the certain region.
if region.value in checked_regions_no_notifications:
continue
if region == data_region:
region_check_notification = True
region_data = data
else:
region_data, region_check_notification = await poll.get_data(config=config, region=region)
if region_check_notification:
log.debug(f"Need to check and send notifications for data region {region.name}.")
all_data_to_check[region.value] = DetailedPriceData(Box(region_data), region.value)
else:
log.debug(f"Don't need to check and send notifications for data region {region.name}.")
checked_regions_no_notifications.append(region.value)
continue
token = notifi_config["token"]
vat_selection = notifi_config["vat_selection"]
if configuration["price_below_value_notification"]["active"] is True:
# If user applies to get price below value notifications add following item to queue
below_value = configuration["price_below_value_notification"]["below_value"]
await notification_queue.put(
(
price_drops_below_notification,
db_manager,
notification_defaults,
config,
all_data_to_check[region.value],
token,
below_value,
region_identifier,
vat_selection,
)
)
tasks = []
while notification_queue.empty() is False:
task = await notification_queue.get()
tasks.append(
asyncio.create_task(
task[0](*[task[i] for i in range(1, 9)])
)
)
await asyncio.gather(*tasks)
await db_manager.release_lock()
log.info("All notifications checked (and sent) and all connections closed.")
``` |
{
"source": "a8-d2/espnet",
"score": 3
} |
#### File: pytorch_backend/transformer/mask.py
```python
import torch
def subsequent_mask(size, device="cpu", dtype=torch.uint8):
"""Create mask for subsequent steps (1, size, size).
:param int size: size of mask
:param str device: "cpu" or "cuda" or torch.Tensor.device
:param torch.dtype dtype: result dtype
:rtype: torch.Tensor
>>> subsequent_mask(3)
[[1, 0, 0],
[1, 1, 0],
[1, 1, 1]]
"""
ret = torch.ones(size, size, device=device, dtype=dtype)
return torch.tril(ret, out=ret)
def target_mask(ys_in_pad, ignore_id):
"""Create mask for decoder self-attention.
:param torch.Tensor ys_pad: batch of padded target sequences (B, Lmax)
:param int ignore_id: index of padding
:param torch.dtype dtype: result dtype
:rtype: torch.Tensor
"""
ys_mask = ys_in_pad != ignore_id
m = subsequent_mask(ys_mask.size(-1), device=ys_mask.device).unsqueeze(0)
return ys_mask.unsqueeze(-2) & m
``` |
{
"source": "a8/discovergy",
"score": 2
} |
#### File: src/discovergy/cli.py
```python
__author__ = "<NAME> <<EMAIL>>"
__copyright__ = "Frank Becker"
__license__ = "mit"
import sys
from discovergy import __version__, poller
from docopt import docopt # type: ignore
from .config import read_config
from .utils import start_logging
def print_help() -> None:
"""Print the help and exit."""
print("The sub command is unknown. Please try again.", end="\n\n")
print(__doc__.format(cmd=sys.argv[0]), file=sys.stderr)
sys.exit(1)
def main():
"""Parse arguments and dispatch to the submodule"""
config = read_config()
start_logging(config)
dispatch = {
"poll": poller.main,
}
arguments = docopt(
__doc__.format(cmd=sys.argv[0]), version=__version__, options_first=True,
)
dispatch.get(arguments["<command>"], print_help)(config)
if __name__ == "__main__":
main()
``` |
{
"source": "a8f/mock-ssh-server",
"score": 2
} |
#### File: mock-ssh-server/mockssh/server.py
```python
import logging
import errno
import os
import select
import socket
import subprocess
import threading
try:
from queue import Queue
except ImportError: # Python 2.7
from Queue import Queue
import paramiko
from mockssh import sftp
__all__ = [
"Server",
]
SERVER_KEY_PATH = os.path.join(os.path.dirname(__file__), "server-key")
class Handler(paramiko.ServerInterface):
log = logging.getLogger(__name__)
def __init__(self, server, client_conn):
self.server = server
self.thread = None
self.command_queues = {}
client, _ = client_conn
self.transport = t = paramiko.Transport(client)
t.add_server_key(paramiko.RSAKey(filename=SERVER_KEY_PATH))
t.set_subsystem_handler("sftp", sftp.SFTPServer)
def run(self):
self.transport.start_server(server=self)
while True:
channel = self.transport.accept()
if channel is None:
break
if channel.chanid not in self.command_queues:
self.command_queues[channel.chanid] = Queue()
t = threading.Thread(target=self.handle_client, args=(channel,))
t.setDaemon(True)
t.start()
def handle_client(self, channel):
try:
command = self.command_queues[channel.chanid].get(block=True)
self.log.debug("Executing %s", command)
p = subprocess.Popen(command, shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
channel.sendall(stdout)
channel.sendall_stderr(stderr)
channel.send_exit_status(p.returncode)
except Exception:
self.log.error("Error handling client (channel: %s)", channel,
exc_info=True)
finally:
channel.close()
def check_auth_publickey(self, username, key):
try:
key_path, known_public_key = self.server._users[username]
if key_path is None:
raise ValueError("Tried to use a key to authorize user " +
username + " who uses a plaintext password")
except KeyError:
self.log.debug("Unknown user '%s'", username)
return paramiko.AUTH_FAILED
if known_public_key == key:
self.log.debug("Accepting public key for user '%s'", username)
return paramiko.AUTH_SUCCESSFUL
self.log.debug("Rejecting public ley for user '%s'", username)
return paramiko.AUTH_FAILED
def check_auth_password(self, username, password):
try:
none, saved_password = self.server._users[username]
if none is not None:
raise ValueError("Tried to use a password to authorize user " +
username + " who uses a key file")
if password == <PASSWORD>:
return paramiko.AUTH_SUCCESSFUL
else:
return paramiko.AUTH_FAILED
except KeyError:
self.log.debug("Unknown user '%s'", username)
return paramiko.AUTH_FAILED
def check_channel_exec_request(self, channel, command):
self.command_queues.setdefault(channel.get_id(), Queue()).put(command)
return True
def check_channel_request(self, kind, chanid):
if kind == "session":
return paramiko.OPEN_SUCCEEDED
return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def get_allowed_auths(self, username):
return "publickey,password"
class Server(object):
log = logging.getLogger(__name__)
def __init__(self, users, host="127.0.0.1", port=0, auth="auto"):
"""
auth: how to authenticate users from users.
One of: - "key" to handle users as uid -> private key path
- "password" to handle users as uid -> password
- "mixed" to handle users as
uid -> (auth_type, password/key path)
- "auto" to handle users as uid -> s where s is
assumed to be a path if it is a file we can open
or a plaintext password otherwise
"""
self._host = host
self._port = port
self._socket = None
self._thread = None
self._users = {}
if auth == "auto":
for uid, password_or_key_path in users.items():
self.add_user(uid, password_or_key_path, keytype="autopass")
elif auth == "key":
for uid, key_path in users.items():
self.add_user(uid, key_path, keytype="auto")
elif auth == "password":
for uid, password in users.items():
self.add_user(uid, password, keytype="password")
elif auth == "mixed":
for uid, (auth_type, password_or_key_path) in users.items():
if auth_type == "auto":
self.add_user(uid, password_or_key_path, keytype="autopass")
elif auth_type == "key":
self.add_user(uid, password_or_key_path, keytype="auto")
elif auth_type == "password":
self.add_user(uid, password_or_key_path, keytype="password")
else:
raise ValueError("Invalid auth_type " + auth_type)
else:
raise ValueError("Invalid auth_type " + auth_type)
def add_user(self, uid, password_or_key_path, keytype="autopass"):
"""
keytype: type of key to use or
"auto" to detect from the file or
"autopass" to detect from the file or if the file can't be
read then treat key as a password
"""
if keytype == "auto" or keytype == "autopass":
try:
with open(password_or_key_path, "r") as file:
line = file.readline().rstrip()
if line.strip() == "-----BEGIN RSA PRIVATE KEY-----":
keytype = "ssh-rsa"
elif line.strip() == "-----BEGIN DSA PRIVATE KEY-----":
keytype = "ssh-dss"
elif line.strip() == "-----BEGIN EC PRIVATE KEY-----":
keytype = "ssh-ecdsa"
else:
try:
if line.split(" ")[0] == "ssh-ed25519":
keytype = "ssh-ed25519"
else:
raise IndexError()
except IndexError:
raise ValueError(password_or_key_path +
" is not a valid supported private key file")
except EnvironmentError as e:
# We really only want to except FileNotFoundError and PermissionError
# but Python2 doesn't have those, so instead except EnvironmentError
# and if the error code isn't ENOENT (FileNotFoundError) or
# EPERM (PermissionError) then raise the error
if e.errno != errno.ENOENT and e.errno != errno.EPERM:
raise e
if keytype == "autopass":
keytype = "password"
else:
raise e
if keytype == "ssh-rsa":
key = paramiko.RSAKey.from_private_key_file(password_or_key_path)
elif keytype == "ssh-dss":
key = paramiko.DSSKey.from_private_key_file(password_or_key_path)
elif keytype == "ssh-ecdsa":
key = paramiko.ECDSAKey.from_private_key_file(password_or_key_path)
elif keytype == "ssh-ed25519":
key = paramiko.Ed25519Key.from_private_key_file(password_or_key_path)
elif keytype == "password":
key = password_or_key_path
else:
raise ValueError("Unable to handle key of type {}".format(keytype))
self._users[uid] = (None if keytype == "password" else password_or_key_path,
key)
def __enter__(self):
self._socket = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
s.bind((self.host, self.port))
except PermissionError as e:
if self.port < 1024:
e.strerror += " (unprivileged users can only use port numbers >= 1024)"
raise e
s.listen(5)
self._thread = t = threading.Thread(target=self._run)
t.setDaemon(True)
t.start()
return self
def _run(self):
sock = self._socket
while sock.fileno() > 0:
self.log.debug("Waiting for incoming connections ...")
rlist, _, _ = select.select([sock], [], [], 1.0)
if rlist:
conn, addr = sock.accept()
self.log.debug("... got connection %s from %s", conn, addr)
handler = Handler(self, (conn, addr))
t = threading.Thread(target=handler.run)
t.setDaemon(True)
t.start()
def __exit__(self, *exc_info):
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
self._thread.join()
self._socket = None
self._thread = None
def client(self, uid):
private_key_path, key_or_pass = self._users[uid]
c = paramiko.SSHClient()
host_keys = c.get_host_keys()
key = paramiko.RSAKey.from_private_key_file(SERVER_KEY_PATH)
host_keys.add(self.host, "ssh-rsa", key)
host_keys.add("[%s]:%d" % (self.host, self.port), "ssh-rsa", key)
c.set_missing_host_key_policy(paramiko.RejectPolicy())
if private_key_path is None:
c.connect(hostname=self.host,
port=self.port,
username=uid,
password=key_or_pass,
allow_agent=False,
look_for_keys=False)
else:
c.connect(hostname=self.host,
port=self.port,
username=uid,
key_filename=private_key_path,
allow_agent=False,
look_for_keys=False)
return c
@property
def port(self):
return self._socket.getsockname()[1] if self._port == 0 else self._port
@property
def host(self):
return self._host
@property
def users(self):
return self._users.keys()
``` |
{
"source": "a8hay/int246-project",
"score": 3
} |
#### File: a8hay/int246-project/api.py
```python
from flask import Flask, jsonify, request
import io
import json
import torch
import torchvision
from torchvision import transforms
import torch.nn as nn
from PIL import Image
app = Flask(__name__)
def get_model():
global model
model = torchvision.models.densenet121(pretrained=True)
num_ftrs = model.classifier.in_features
model.classifier = nn.Sequential(
nn.Linear(num_ftrs, 500),
nn.Linear(500, 2)
)
model.load_state_dict(torch.load("ckpt_densenet121_catdog.pth", map_location=torch.device("cpu")))
model.to("cpu")
model.eval()
print("model loaded")
def preprocess_image(image_bytes, target_size):
image = Image.open(io.BytesIO(image_bytes))
if image.mode != "RGB":
image = image.convert("RGB")
transform = transforms.Compose([
transforms.Resize(target_size),
transforms.ToTensor()
])
image = transform(image).unsqueeze(dim=0)
return image
def get_prediction(image_bytes):
processed_image = preprocess_image(image_bytes, target_size=(128, 128))
with torch.no_grad():
output = model(processed_image)
pred = torch.argmax(output, dim=1)
res = "dog" if pred.item() else "cat"
return res
ALLOWED_EXTENSIONS = {"png", "jpeg", "jpg"}
def allowed_file(filename):
# abc.png, abc.jpeg, abc.jpg
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/')
def hello():
return "hiya"
@app.route('/predict', methods=["POST"])
def predict():
if request.method == "POST":
file = request.files.get("file")
if file is None or file.filename == "":
return jsonify({"error":"no file"})
if not allowed_file(file.filename):
return jsonify({"error":"formated not supported, only jpg, png, jpeg"})
try:
img_bytes = file.read()
pred = get_prediction(img_bytes)
return jsonify({"prediction":pred})
except:
return jsonify({"error":"error during prediction process"})
return jsonify({"bad request":"request is not of post type"})
if __name__ == "__main__":
print("loading pytorch model")
get_model()
app.run(debug=False)
``` |
{
"source": "a8jan/fujinet-pc-launcher",
"score": 2
} |
#### File: fujinet-pc-launcher/launcher/launcher-test.py
```python
import wx
import wx.adv
import sys
import os
from pathlib import Path
from logview import LogFrame
from procmgr import FujiNetMgr
def ref_id():
return wx.NewId()
try: # Work around for wxpython prior to 4.0.2 where wx.NewIdRef first appeared
TestId = wx.NewIdRef()
except AttributeError:
wx.NewIdRef = ref_id
class LedIndicator(wx.Window):
def __init__(self, parent, *args, **kwargs):
if 'size' not in kwargs:
kwargs['size'] = (32, 32)
if 'style' not in kwargs:
kwargs['style'] = wx.BORDER_NONE | wx.TRANSPARENT_WINDOW
wx.Window.__init__(self, parent, *args, **kwargs)
self.active = False
self.transitioning = False
self.SetForegroundColour('#FF0000')
# self.SetBackgroundColour('#000000')
self.SetBackgroundStyle(wx.BG_STYLE_PAINT)
self.off_timer = wx.Timer(self)
self.Bind(wx.EVT_PAINT, self.on_paint)
# self.Bind(wx.EVT_ERASE_BACKGROUND, self.on_erase)
self.Bind(wx.EVT_TIMER, self.on_timer)
def on_paint(self, evt):
dc = wx.PaintDC(self)
gc = wx.GCDC(dc)
# dc.SetBrush(wx.TRANSPARENT_BRUSH)
# gc.SetBrush(wx.TRANSPARENT_BRUSH)
# gc.SetBackgroundMode(wx.TRANSPARENT)
if wx.Platform in ['__WXGTK__', '__WXMSW__']:
gc.SetBackground(wx.TRANSPARENT_BRUSH)
gc.Clear()
gc.SetPen(wx.Pen('#606060'))
color = self.GetForegroundColour() if self.active ^ self.transitioning else '#d5d2ca'
gc.SetBrush(wx.Brush(color))
gc.DrawCircle(self.Size.width//2, self.Size.height//2, min(self.Size.width, self.Size.height)//4)
# def on_erase(self, evt):
# pass
def on_timer(self, evt):
self.transitioning = False
self.Refresh()
def set(self, active=True):
if self.active != active:
self.active = active
if active:
if self.transitioning:
self.off_timer.Stop()
self.transitioning = False
else:
self.Refresh()
else:
self.transitioning = True
self.off_timer.StartOnce(50)
def get(self):
return self.active
class FujiPanel(wx.Window):
MENU_FN_WEBUI = wx.NewIdRef()
MENU_FN_START = wx.NewIdRef()
MENU_FN_STOP = wx.NewIdRef()
MENU_FN_RESTART = wx.NewIdRef()
MENU_HUB_START = wx.NewIdRef()
MENU_HUB_STOP = wx.NewIdRef()
MENU_HUB_RESTART = wx.NewIdRef()
MENU_SD_OPEN = wx.NewIdRef()
MENU_SD_SET = wx.NewIdRef()
MENU_LOG_VIEW = wx.NewIdRef()
MENU_ON_TOP = wx.NewIdRef()
def __init__(self, parent, frame, bmp):
wx.Window.__init__(self, parent)
self.frame = frame
self.bmp = bmp # background bitmap
bmp_size = self.bmp.GetSize()
print("FujiPanel can set transparent:", self.CanSetTransparent())
print("FujiPanel transparent bg:", self.IsTransparentBackgroundSupported())
self.SetClientSize(bmp_size)
# self.Bind(wx.EVT_PAINT, self.on_paint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.on_erase)
# LEDs
# Power
self.power_led = LedIndicator(self, pos=(38, 54))
self.power_led.SetForegroundColour('#FFFFFF')
# Wi-Fi
self.wifi_led = LedIndicator(self, pos=(100, 54))
self.wifi_led.SetForegroundColour('#00A7FF')
# SIO
self.sio_led = LedIndicator(self, pos=(252, 54))
self.sio_led.SetForegroundColour('#FF6262')
# Buttons
# Power
quit_bnt = wx.Button(self, label="Off", pos=(16, 204), size=(48, 32))
quit_bnt.SetToolTip("Quit")
quit_bnt.Bind(wx.EVT_BUTTON, frame.on_exit)
# SD card
sd_bnt = wx.Button(self, label="SD", pos=(bmp_size.width-16-48, 204), size=(48, 32))
sd_bnt.SetToolTip("Open SD folder")
sd_bnt.Bind(wx.EVT_BUTTON, lambda e: self.open_sd_folder())
# Context menu
self.Bind(wx.EVT_RIGHT_DOWN, self.on_right_down)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_FN_WEBUI)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_FN_RESTART)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_LOG_VIEW)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_FN_START)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_FN_STOP)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_FN_RESTART)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_HUB_START)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_HUB_STOP)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_HUB_RESTART)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_ON_TOP)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_SD_OPEN)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=self.MENU_SD_SET)
self.Bind(wx.EVT_MENU, self.on_menu_item, id=wx.ID_ABOUT)
self.Bind(wx.EVT_MENU, frame.on_exit, id=wx.ID_EXIT)
# def on_paint(self, evt):
# dc = wx.PaintDC(self)
# gc = wx.GCDC(dc)
#
# # dc.SetBrush(wx.TRANSPARENT_BRUSH)
# # gc.SetBrush(wx.TRANSPARENT_BRUSH)
# # gc.SetBackgroundMode(wx.TRANSPARENT)
# if wx.Platform in ['__WXGTK__', '__WXMSW__']:
# gc.SetBackground(wx.TRANSPARENT_BRUSH)
# gc.Clear()
#
# def on_erase(self, evt):
# pass
def on_erase(self, evt):
dc = evt.GetDC()
if not dc:
dc = wx.ClientDC(self)
rect = self.GetUpdateRegion().GetBox()
dc.SetClippingRect(rect)
dc.Clear()
dc.DrawBitmap(self.bmp, 0, 0, True)
def set_power_led(self, on: bool):
self.power_led.set(on)
def set_wifi_led(self, on: bool):
self.wifi_led.set(on)
def set_sio_led(self, on: bool):
self.sio_led.set(on)
def open_sd_folder(self):
path = Path(sys.argv[0]).resolve().parent / "fujinet-pc" / "SD"
wx.LaunchDefaultBrowser('file:{}'.format(path))
def select_sd_folder(self):
path = Path(sys.argv[0]).resolve().parent / "fujinet-pc" / "SD"
new_sd_dir = wx.DirSelector("Choose SD folder", str(path))
if new_sd_dir.strip():
print("new SD", new_sd_dir)
def on_right_down(self, evt):
# create and show popup menu
menu = self.create_popup_menu()
self.PopupMenu(menu)
menu.Destroy()
def create_popup_menu(self):
menu = wx.Menu()
menu_fn = wx.Menu()
menu_hub = wx.Menu()
menu_fn.Append(self.MENU_FN_START, u"Start")
menu_fn.Append(self.MENU_FN_STOP, u"Stop")
menu_fn.Append(self.MENU_FN_RESTART, u"Restart")
menu_hub.Append(self.MENU_HUB_START, u"Start")
menu_hub.Append(self.MENU_HUB_STOP, u"Stop")
menu_hub.Append(self.MENU_HUB_RESTART, u"Restart")
menu.Append(self.MENU_FN_WEBUI, u"Open WebUI")
menu.Append(self.MENU_SD_OPEN, u"Open SD folder")
# menu.Append(self.MENU_SD_SET, u"Select SD folder ...")
menu.AppendSubMenu(menu_fn, u"FujiNet")
menu.AppendSubMenu(menu_hub, u"NetSIO hub")
menu.Append(self.MENU_ON_TOP, u"Stay on top", kind=wx.ITEM_CHECK).Check(self.frame.stay_on_top)
menu.Append(self.MENU_LOG_VIEW, u"View log")
menu.AppendSeparator()
menu.Append(wx.ID_ABOUT, u"About")
menu.AppendSeparator()
menu.Append(wx.ID_EXIT, u"&Quit\tCtrl+Q")
return menu
def on_menu_item(self, event):
id = event.GetId()
if id == self.MENU_FN_WEBUI:
wx.LaunchDefaultBrowser('http://localhost:8000') # use BROWSER_NOBUSYCURSOR ?
elif id == self.MENU_FN_START:
# self.frame.fujinet.start_process()
pass
elif id == self.MENU_FN_STOP:
# self.frame.fujinet.stop_process()
pass
elif id == self.MENU_FN_RESTART:
# self.frame.fujinet.restart_process()
pass
elif id == self.MENU_HUB_START:
# self.frame.fujinet.start_process()
pass
elif id == self.MENU_HUB_STOP:
# self.frame.fujinet.stop_process()
pass
elif id == self.MENU_HUB_RESTART:
# self.frame.fujinet.restart_process()
pass
if id == self.MENU_SD_OPEN:
self.open_sd_folder()
elif id == self.MENU_SD_SET:
self.select_sd_folder()
elif id == self.MENU_LOG_VIEW:
self.frame.show_log()
elif id == self.MENU_ON_TOP:
self.frame.toggle_stay_on_top()
elif id == wx.ID_ABOUT:
self.frame.show_about()
class TopFrame(wx.Frame):
def __init__(self, parent):
# wx.Frame.__init__(self, parent, wx.ID_ANY, "FujiNet-PC",
# style=wx.FRAME_SHAPED | wx.SIMPLE_BORDER) # | wx.STAY_ON_TOP)
wx.Frame.__init__(self)
# self.SetBackgroundColour('#00ff00')
# self.SetBackgroundStyle(wx.BG_STYLE_PAINT)
self.SetBackgroundStyle(wx.BG_STYLE_TRANSPARENT)
self.Create(parent, wx.ID_ANY, "FujiNet-PC",
style=wx.FRAME_SHAPED | wx.SIMPLE_BORDER) # | wx.STAY_ON_TOP)
self.stay_on_top = False
self.delta = (0, 0)
print("TopFrame can set transparent:", self.CanSetTransparent())
print("TopFrame transparent bg:", self.IsTransparentBackgroundSupported())
fname = str(Path(sys.argv[0]).parent / "img" / "test.png")
self.bmp = wx.Bitmap()
self.bmp.LoadFile(fname, wx.BITMAP_TYPE_PNG)
bmp_size = self.bmp.GetSize()
self.SetClientSize(bmp_size)
fname = str(Path(sys.argv[0]).parent / "img" / "test-mask.png")
self.mask_bmp = wx.Bitmap()
self.mask_bmp.LoadFile(fname, wx.BITMAP_TYPE_PNG)
# panel = FujiPanel(self, self, self.bmp)
# self.panel = panel
# self.log_view = LogFrame(self, self)
# self.log_view = None
# sd_bnt = wx.Button(panel, label="SD", pos=(bmp_size.width-16-48, 204))
# sd_bnt.SetToolTip("Open SD folder")
# sd_bnt.SetBackgroundColour((56, 53, 53, 255))
# sd_bnt.SetForegroundColour((250, 244, 5, 255))
# sd_bnt.SetSize(48, 32)
#
# a_bnt = wx.Button(panel, label="A", pos=(40, 18))
# a_bnt.SetToolTip("TODO")
# a_bnt.SetBackgroundColour((56, 53, 53, 255))
# a_bnt.SetForegroundColour((250, 244, 5, 255))
# a_bnt.SetSize(32, 32)
#
# b_bnt = wx.Button(panel, label="B", pos=(94, 18))
# b_bnt.SetToolTip("TODO")
# b_bnt.SetBackgroundColour((56, 53, 53, 255))
# b_bnt.SetForegroundColour((250, 244, 5, 255))
# b_bnt.SetSize(32, 32)
#
# c_bnt = wx.Button(panel, label="C", pos=(254, 18))
# c_bnt.SetToolTip("Restart FujiNet")
# c_bnt.SetBackgroundColour((56, 53, 53, 255))
# c_bnt.SetForegroundColour((250, 244, 5, 255))
# c_bnt.SetSize(32, 32)
#
# panel.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.Bind(wx.EVT_LEFT_UP, self.on_left_up)
self.Bind(wx.EVT_MOTION, self.on_mouse_move)
self.Bind(wx.EVT_MENU, self.on_exit, id=wx.ID_EXIT)
self.Bind(wx.EVT_PAINT, self.on_paint)
# hot keys
self.SetAcceleratorTable(wx.AcceleratorTable(
[
wx.AcceleratorEntry(wx.ACCEL_CTRL, ord('Q'), wx.ID_EXIT),
]))
# fname = os.path.join(os.path.dirname(sys.argv[0]), "img", "dragging.png")
# self.cursorDragging = wx.Cursor(fname, wx.BITMAP_TYPE_PNG, 12, 8)
# if wx.Platform != "__WXMAC__":
# # wxMac clips the tooltip to the window shape, YUCK!!!
# self.SetToolTipString("Right-click to close the window\n"
# "Double-click the image to set/unset the window shape")
if wx.Platform == "__WXGTK__":
# wxGTK requires that the window be created before you can
# set its shape, so delay the call to setWindowShape until
# this event.
self.Bind(wx.EVT_WINDOW_CREATE, self.set_window_shape)
else:
# On wxMSW and wxMac the window has already been created, so go for it.
self.set_window_shape()
# self.fujinet = FujiNetMgr(self.log_view.logger)
# self.fujinet.start()
# self.fujinet.start_process()
# dc = wx.ClientDC(self)
# dc.DrawBitmap(self.bmp, 0, 0, True)
def set_window_shape(self, *evt):
r = wx.Region(self.mask_bmp, (0, 0, 0), 0)
# r = wx.Region(self.bmp, (0, 0, 0), 0)
self.SetShape(r)
def on_paint(self, evt):
dc = wx.PaintDC(self)
if wx.Platform == '__WXMSW__':
dc.DrawBitmap(self.bmp, -1, -1, True)
else:
dc.DrawBitmap(self.bmp, 0, 0, True)
# dc.SetBackground(wx.Brush('#ff0000'))
# dc.Clear()
def on_exit(self, evt):
self.Iconize()
# wx.CallAfter(self.fujinet.shutdown) # TODO make non-blocking shutdown
self.Close()
def on_left_down(self, evt):
self.CaptureMouse()
origin_x, origin_y = self.GetPosition()
x, y = self.ClientToScreen(evt.GetPosition())
self.delta = (x - origin_x, y - origin_y)
self.SetCursor(wx.Cursor(wx.CURSOR_HAND))
# self.SetCursor(self.cursorDragging)
def on_left_up(self, evt):
if self.HasCapture():
self.ReleaseMouse()
# self.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
self.SetCursor(wx.Cursor(wx.CURSOR_ARROW))
def on_mouse_move(self, evt):
if evt.Dragging() and evt.LeftIsDown():
pos = evt.GetPosition()
x, y = self.ClientToScreen(pos)
self.Move((x - self.delta[0], y - self.delta[1]))
def toggle_stay_on_top(self):
self.stay_on_top = not self.stay_on_top
style = self.GetWindowStyle()
style = style | wx.STAY_ON_TOP if self.stay_on_top else style & ~wx.STAY_ON_TOP
self.SetWindowStyle(style)
def show_about(self):
info = wx.adv.AboutDialogInfo()
fname = os.path.join(os.path.dirname(sys.argv[0]), "img", "alien48.png")
info.SetIcon(wx.Icon(fname, wx.BITMAP_TYPE_PNG))
# info.SetName(version.NAME)
info.SetName("FujiNet-PC Launcher")
# info.SetVersion(version.VERSION)
info.SetVersion("0.0")
# info.SetDescription(version.DESC)
info.SetDescription("FujiNet-PC Launcher is a wrapper to control FujiNet-PC and NetSIO hub programs.")
info.SetCopyright('(C) 2021 apc')
# info.SetWebSite('http://www.zavreno.cz')
# info.SetLicence(licence)
# info.AddDeveloper('<NAME>')
# info.AddDocWriter('<NAME>')
# info.AddArtist('Unknow Artist')
# info.AddTranslator('Unknown Translator')
wx.adv.AboutBox(info, self)
def show_log(self):
if self.log_view is None:
self.log_view = LogFrame(self, self)
self.log_view.Show()
self.log_view.Raise()
class MyApp(wx.App):
def OnInit(self):
frame = TopFrame(None)
self.SetTopWindow(frame)
frame.Show(True)
return True
app = MyApp()
app.MainLoop()
```
#### File: fujinet-pc-launcher/launcher/logview.py
```python
import wx
from launcher.config import cfg
from typing import Union
class Logger:
def __init__(self, window: wx.Frame):
self.window = window # main window
self.text_control: Union[wx.TextCtrl, None] = None # text control to send log text
self.buffer = ""
self.flush_timer = wx.Timer(window) # timer for text control updates
window.Bind(wx.EVT_TIMER, self.on_flush_timer, self.flush_timer)
def set_text_control(self, log_text: wx.TextCtrl):
self.text_control = log_text
self.flush()
def flush(self):
if self.text_control is not None:
self.text_control.AppendText(self.buffer)
self.buffer = ""
# maintain text size at some level
if self.text_control.GetLastPosition() > 1000000:
self.text_control.Remove(0, 100000)
def write(self, data: str):
wx.CallAfter(self.append_buffer, data)
def append_buffer(self, data: str):
# process input text
if "Running FujiNet" in data:
self.window.set_power_led(True)
elif "FujiNet stopped" in data or "FujiNet ended" in data:
self.window.set_power_led(False)
elif " > CF: " in data:
self.window.set_sio_led(True)
elif " > SIO CMD processed in " in data:
self.window.set_sio_led(False)
elif "Running NetSIO hub" in data:
self.window.set_wifi_led(True)
elif "NetSIO hub stopped" in data or "NetSIO hub ended" in data:
self.window.set_wifi_led(False)
# append text into buffer
self.buffer += data
# maintain buffer size at some level
if len(self.buffer) > 1000000:
self.buffer = self.buffer[100000:]
# run timer to flush buffer
if not self.flush_timer.IsRunning():
self.flush_timer.StartOnce(250)
def on_flush_timer(self, evt):
self.flush()
class LogFrame(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, -1, "{} log".format(cfg.launcher_label or "FujiNet-PC"),
style=wx.DEFAULT_FRAME_STYLE | wx.NO_FULL_REPAINT_ON_RESIZE) # | wx.FRAME_FLOAT_ON_PARENT)
self.panel = LogPanel(self)
self.logger = None
# try to be HiDPI friendly
w, h = self.GetTextExtent("MMMMMMMMMM")
if w >= 240: f = 2.0
elif w >= 180: f = 1.5
elif w >= 150: f = 1.25
else: f = 1.0
self.SetClientSize((int(800*f), int(650*f)))
self.SetMinClientSize((int(640*f), int(480*f)))
self.Bind(wx.EVT_CLOSE, self.on_close)
def on_close(self, evt):
# only hide, if possible
if evt.CanVeto():
self.Hide()
evt.Veto()
return
self.reset_logger()
self.Parent.log_view = None # TODO better
self.Destroy()
def set_logger(self, logger: Logger):
self.logger = logger
logger.set_text_control(self.panel.log_text)
def reset_logger(self):
if self.logger is not None:
self.logger.set_text_control(None)
self.logger = None
class LogPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.log_text: wx.TextCtrl = wx.TextCtrl(self, style=wx.TE_MULTILINE | wx.HSCROLL | wx.TE_READONLY)
font = wx.Font()
font.SetFamily(wx.FONTFAMILY_TELETYPE)
font.SetPointSize(10)
self.log_text.SetFont(font)
self.log_text.SetBackgroundColour('#1E1E1E')
self.log_text.SetForegroundColour('#E0E0E0')
self.log_text.SetDefaultStyle(wx.TextAttr('#E0E0E0'))
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.log_text, 1, wx.ALL | wx.EXPAND)
# vbox.Add(hide_btn, 0, wx.ALL | wx.ALIGN_CENTER, border=3)
self.SetSizer(vbox)
# self.SetBackgroundColour("red")
``` |
{
"source": "a8ksh4/purple",
"score": 2
} |
#### File: purple/extras/performance_counter.py
```python
class PerformanceCounter:
def update(self, status):
print(status.delta)
``` |
{
"source": "a8r5o/mac_solarized",
"score": 2
} |
#### File: vimpager/prototypes/gen_awk_uudecode_lookup_v1.py
```python
from sys import stdout, stderr
def escape(s):
return s.replace('\\', '\\\\').replace('"', '\\"').replace("'", "'\\''")
uu_chars = []
for i in range(0, 64):
uu_chars += chr(i + 32)
left = {}
middle = {}
right = {}
for c1 in uu_chars:
for c2 in uu_chars:
left[ c1 + c2] = chr((( ord(c1) - 32) << 2) + ((ord(c2) - 32) >> 4))
middle[c1 + c2] = chr((((ord(c1) - 32) << 4) & 0xFF) + ((ord(c2) - 32) >> 2))
right[ c1 + c2] = chr((((ord(c1) - 32) << 6) & 0xFF) + (ord(c2) - 32))
for s in left.keys():
stdout.write('l["%s"]=%s;' % (escape(s), ord(left[s])))
for s in middle.keys():
stdout.write('m["%s"]=%s;' % (escape(s), ord(middle[s])))
for s in right.keys():
stdout.write('r["%s"]=%s;' % (escape(s), ord(right[s])))
print ""
``` |
{
"source": "a904919863/51Job_Spider",
"score": 3
} |
#### File: a904919863/51Job_Spider/spider.py
```python
from bs4 import BeautifulSoup
import re,json
# import time
# from selenium import webdriver
# from selenium.webdriver.chrome.options import Options
import urllib.request
import urllib.error
import MySQLdb
#初始化变量
findJobName = re.compile(r'class="jname at">(.*?)</span>')
# chrome_options = Options()
# chrome_options.add_argument('--headless')
# chrome_options.add_argument('--disable-gpu')
# baseurl = r"https://search.51job.com/list/000000,000000,0100%252c7800%252c7300%252c7900%252c7500,01%252c40,9,99,+,2,1.html?lang=c&postchannel=0000&workyear=01&cotype=99°reefrom=04&jobterm=99&companysize=99&ord_field=0&dibiaoid=0&line=&welfare="
# baseurl = r"https://search.51job.com/list/030200%252c080200%252c080400%252c080500%252c090200,000000,0000,00,9,99,python,2,1.html?lang=c&postchannel=0000&workyear=99&cotype=99°reefrom=99&jobterm=99&companysize=99&ord_field=0&dibiaoid=0&line=&welfare="
header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'
}
def askurl(url):
req = urllib.request.Request(url=url,headers = header)
try:
response = urllib.request.urlopen(req)
html = response.read().decode("gbk")
except urllib.error.URLError as e:
if hasattr(e, "code"):
print(e.code)
if hasattr(e, "reason"):
print(e.reason)
return html
def post_salary(item_salary):
if item_salary == "":
return 0
suffix = re.findall(r"[\u4e00-\u9fa5]*/.+",item_salary)[0]
# print (suffix)
Prefix = re.findall("[0-9]*-?[0-9]*",item_salary)[0]
# print(Prefix)
if "-" in Prefix:
bottom_salary = float(re.findall("(.+)-",Prefix)[0])
top_salary = float(re.findall("-(.+)",Prefix)[0])
salary = bottom_salary + (top_salary-bottom_salary) * 0.4
else:
salary = float(Prefix)
if suffix[0] == "万":
salary = salary * 10000
elif suffix[0] == "千":
salary = salary * 1000
if suffix[-1] == "年":
salary = salary / 12
if suffix[-1] == "天":
salary = salary * 21.75 #劳动法月计薪天数
return salary
def getJson(html):
# job51 = open("51job.html",'r',encoding="gbk")
# job51 = BeautifulSoup(html,"html.parser")
data = re.findall(r"\"engine_search_result\":(.+?),\"jobid_count\"",html)
jsonObj = json.loads(data[0])
return jsonObj
# for item in joblist.select(".e"):
# data = [] # 保存一部电影的所有信息
# item = str(item)
# job_name = re.findall(findJobName, item)[0]
# data.append(job_name)
# datalist.append(data)
# print(datalist)
def save2DB(Json):
db = MySQLdb.connect("localhost", "root", "a<PASSWORD>", "51job", charset='gbk' )
cursor = db.cursor()
for item in Json:
sql = """INSERT INTO hangzhou_job(id,
job_name, company, salary, city, trade, companytype_text)
VALUES (0,'{}', '{}', {}, '{}', '{}','{}')""".format(item['job_name'],item['company_name'],post_salary(item['providesalary_text']),item['workarea_text'],item['companyind_text'],item['companytype_text'])
try:
# 执行sql语句
cursor.execute(sql)
# 提交到数据库执行
db.commit()
except:
# Rollback in case there is any error
db.rollback()
# 关闭数据库连接
db.close()
def main():
for i in range(1,11):
baseurl = r"https://search.51job.com/list/080200,000000,7800%252c7300%252c0100%252c2700%252c7500,00,9,99,+,2,{}.html?lang=c&postchannel=0000&workyear=01&cotype=99°reefrom=04&jobterm=99&companysize=99&ord_field=0&dibiaoid=0&line=&welfare=".format(i)
html = askurl(baseurl)
json = getJson(html)
save2DB(json)
print("第{}页爬取成功".format(i))
if __name__ == "__main__":
main()
``` |
{
"source": "a904919863/Spiders_Collection",
"score": 2
} |
#### File: News163_Spider/spiders/news163.py
```python
import scrapy
from msedge.selenium_tools import Edge,EdgeOptions
from News163_Spider.items import News163SpiderItem
class News163Spider(scrapy.Spider):
name = 'news163'
# allowed_domains = ['news.163.com']
start_urls = ['http://news.163.com/']
models_urls = []
def __init__(self):
options = EdgeOptions()
options.use_chromium = True
# options.add_argument("headless")
# options.add_argument("disable-gpu")
#防止打印无用信息 enable-automation规避检测
options.add_experimental_option("excludeSwitches", ['enable-automation', 'enable-logging'])
self.bro = Edge(options = options)
def parse(self, response):
li_list = response.xpath('//*[@id="index2016_wrap"]/div[1]/div[2]/div[2]/div[2]/div[2]/div/ul/li')
alist = [3,4,6,7]
for index in alist:
model_url = li_list[index].xpath('./a/@href').extract_first()
self.models_urls.append(model_url)
for url in self.models_urls:
yield scrapy.Request(url,callback=self.parse_model)
def parse_model(self,response):
div_list = response.xpath('/html/body/div/div[3]/div[4]/div[1]/div[1]/div/ul/li/div/div')
for div in div_list:
title = div.xpath('./div/div[1]/h3/a/text()').extract_first()
news_detail_url = div.xpath('./div/div[1]/h3/a/@href').extract_first()
item = News163SpiderItem()
item['title'] = title
#新闻详情页请求
yield scrapy.Request(url = news_detail_url,callback=self.parse_detail,meta={'item':item})
def parse_detail(self,response):
content = response.xpath('//*[@id="content"]/div[2]//text()').extract()
content = ''.join(content)
item = response.meta['item']
print(item)
item['content'] = content
yield item
def closed(self,spider):
self.bro.quit()
```
#### File: No_12_fbsPro/spiders/fbs.py
```python
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from scrapy_redis.spiders import RedisCrawlSpider
from No_12_fbsPro.items import No12FbsproItem
class FbsSpider(RedisCrawlSpider):
name = 'fbs'
# allowed_domains = ['www.xxx.com']
# start_urls = ['http://www.xxx.com/']
redis_key = 'sun'
rules = (
Rule(LinkExtractor(allow=r'&page=\d+'), callback='parse_item', follow=True),
)
def parse_item(self, response):
li_list = response.xpath('//ul[@class="title-state-ul"]/li')
for li in li_list:
new_num = li.xpath('./span[1]/text()').extract_first()
new_title = li.xpath('./span[3]//text()').extract_first()
item = No12FbsproItem()
item['title'] = new_title
item['new_num'] = new_num
yield item
``` |
{
"source": "A913440/uberduck-ml-dev",
"score": 2
} |
#### File: uberduck_ml_dev/text/symbols.py
```python
__all__ = ['symbols', 'symbols_with_ipa', 'DEFAULT_SYMBOLS', 'IPA_SYMBOLS', 'SYMBOL_SETS', 'symbols_to_sequence',
'arpabet_to_sequence', 'should_keep_symbol', 'symbol_to_id', 'id_to_symbol', 'curly_re', 'words_re']
# Cell
""" from https://github.com/keithito/tacotron """
"""
Defines the set of symbols used in text input to the model.
The default is a set of ASCII characters that works well for English or text that has been run through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details. """
from . import cmudict
_pad = "_"
_punctuation = "!'\",.:;? "
_math = "#%&*+-/[]()"
_special = "@©°½—₩€$"
_accented = "áçéêëñöøćž"
_numbers = "0123456789"
_letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
_letters_ipa = "ɑɐɒæɓʙβɔɕçɗɖðʤəɘɚɛɜɝɞɟʄɡɠɢʛɦɧħɥʜɨɪʝɭɬɫɮʟɱɯɰŋɳɲɴøɵɸθœɶʘɹɺɾɻʀʁɽʂʃʈʧʉʊʋⱱʌɣɤʍχʎʏʑʐʒʔʡʕʢǀǁǂǃˈˌːˑʼʴʰʱʲʷˠˤ˞↓↑→↗↘'̩'ᵻ"
# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as
# uppercase letters):
_arpabet = ["@" + s for s in cmudict.valid_symbols]
# Export all symbols:
symbols = (
list(_pad + _punctuation + _math + _special + _accented + _numbers + _letters)
+ _arpabet
)
symbols_with_ipa = symbols + list(_letters_ipa)
DEFAULT_SYMBOLS = "default"
IPA_SYMBOLS = "ipa"
SYMBOL_SETS = {
DEFAULT_SYMBOLS: symbols,
IPA_SYMBOLS: symbols_with_ipa,
}
# Cell
import re
symbol_to_id = {
DEFAULT_SYMBOLS: {s: i for i, s in enumerate(SYMBOL_SETS[DEFAULT_SYMBOLS])},
IPA_SYMBOLS: {s: i for i, s in enumerate(SYMBOL_SETS[IPA_SYMBOLS])},
}
id_to_symbol = {
DEFAULT_SYMBOLS: {i: s for i, s in enumerate(SYMBOL_SETS[DEFAULT_SYMBOLS])},
IPA_SYMBOLS: {i: s for i, s in enumerate(SYMBOL_SETS[IPA_SYMBOLS])},
}
curly_re = re.compile(r"(.*?)\{(.+?)\}(.*)")
words_re = re.compile(
r"([a-zA-ZÀ-ž]+['][a-zA-ZÀ-ž]{1,2}|[a-zA-ZÀ-ž]+)|([{][^}]+[}]|[^a-zA-ZÀ-ž{}]+)"
)
def symbols_to_sequence(symbols, symbol_set=DEFAULT_SYMBOLS, ignore_symbols=["_", "~"]):
return [
symbol_to_id[symbol_set][s]
for s in symbols
if should_keep_symbol(s, symbol_set, ignore_symbols)
]
def arpabet_to_sequence(text, symbol_set=DEFAULT_SYMBOLS):
return symbols_to_sequence(["@" + s for s in text.split()], symbol_set=symbol_set)
def should_keep_symbol(s, symbol_set=DEFAULT_SYMBOLS, ignore_symbols=["_", "~"]):
return s in symbol_to_id[symbol_set] and s not in ignore_symbols
```
#### File: uberduck_ml_dev/text/util.py
```python
__all__ = ['normalize_numbers', 'expand_abbreviations', 'expand_numbers', 'lowercase', 'collapse_whitespace',
'convert_to_ascii', 'convert_to_arpabet', 'basic_cleaners', 'transliteration_cleaners', 'english_cleaners',
'english_cleaners_phonemizer', 'batch_english_cleaners_phonemizer', 'g2p', 'batch_clean_text', 'clean_text',
'english_to_arpabet', 'cleaned_text_to_sequence', 'text_to_sequence', 'sequence_to_text', 'BATCH_CLEANERS',
'CLEANERS', 'random_utterance', 'utterances']
# Cell
""" from https://github.com/keithito/tacotron """
"""
Cleaners are transformations that run over the input text at both training and eval time.
Cleaners can be selected by passing a comma-delimited list of cleaner names as the "cleaners"
hyperparameter. Some cleaners are English-specific. You'll typically want to use:
1. "english_cleaners" for English text
2. "transliteration_cleaners" for non-English text that can be transliterated to ASCII using
the Unidecode library (https://pypi.python.org/pypi/Unidecode)
3. "basic_cleaners" if you do not want to transliterate (in this case, you should also update
the symbols in symbols.py to match your data).
"""
import re
from typing import List
from g2p_en import G2p
from phonemizer import phonemize
from unidecode import unidecode
from .symbols import curly_re, words_re
g2p = G2p()
# Regular expression matching whitespace:
_whitespace_re = re.compile(r"\s+")
# List of (regular expression, replacement) pairs for abbreviations:
_abbreviations = [
(re.compile("\\b%s\\." % x[0], re.IGNORECASE), x[1])
for x in [
("mrs", "misess"),
("mr", "mister"),
("dr", "doctor"),
("st", "saint"),
("co", "company"),
("jr", "junior"),
("maj", "major"),
("gen", "general"),
("drs", "doctors"),
("rev", "reverend"),
("lt", "lieutenant"),
("hon", "honorable"),
("sgt", "sergeant"),
("capt", "captain"),
("esq", "esquire"),
("ltd", "limited"),
("col", "colonel"),
("ft", "fort"),
]
]
import inflect
import re
_inflect = inflect.engine()
_comma_number_re = re.compile(r"([0-9][0-9\,]+[0-9])")
_decimal_number_re = re.compile(r"([0-9]+\.[0-9]+)")
_pounds_re = re.compile(r"£([0-9\,]*[0-9]+)")
_dollars_re = re.compile(r"\$([0-9\.\,]*[0-9]+)")
_ordinal_re = re.compile(r"[0-9]+(st|nd|rd|th)")
_number_re = re.compile(r"[0-9]+")
def _remove_commas(m):
return m.group(1).replace(",", "")
def _expand_decimal_point(m):
return m.group(1).replace(".", " point ")
def _expand_dollars(m):
match = m.group(1)
parts = match.split(".")
if len(parts) > 2:
return match + " dollars" # Unexpected format
dollars = int(parts[0]) if parts[0] else 0
cents = int(parts[1]) if len(parts) > 1 and parts[1] else 0
if dollars and cents:
dollar_unit = "dollar" if dollars == 1 else "dollars"
cent_unit = "cent" if cents == 1 else "cents"
return "%s %s, %s %s" % (dollars, dollar_unit, cents, cent_unit)
elif dollars:
dollar_unit = "dollar" if dollars == 1 else "dollars"
return "%s %s" % (dollars, dollar_unit)
elif cents:
cent_unit = "cent" if cents == 1 else "cents"
return "%s %s" % (cents, cent_unit)
else:
return "zero dollars"
def _expand_ordinal(m):
return _inflect.number_to_words(m.group(0))
def _expand_number(m):
num = int(m.group(0))
if num > 1000 and num < 3000:
if num == 2000:
return "two thousand"
elif num > 2000 and num < 2010:
return "two thousand " + _inflect.number_to_words(num % 100)
elif num % 100 == 0:
return _inflect.number_to_words(num // 100) + " hundred"
else:
return _inflect.number_to_words(
num, andword="", zero="oh", group=2
).replace(", ", " ")
else:
return _inflect.number_to_words(num, andword="")
def normalize_numbers(text):
text = re.sub(_comma_number_re, _remove_commas, text)
text = re.sub(_pounds_re, r"\1 pounds", text)
text = re.sub(_dollars_re, _expand_dollars, text)
text = re.sub(_decimal_number_re, _expand_decimal_point, text)
text = re.sub(_ordinal_re, _expand_ordinal, text)
text = re.sub(_number_re, _expand_number, text)
return text
def expand_abbreviations(text):
for regex, replacement in _abbreviations:
text = re.sub(regex, replacement, text)
return text
def expand_numbers(text):
return normalize_numbers(text)
def lowercase(text):
return text.lower()
def collapse_whitespace(text):
return re.sub(_whitespace_re, " ", text)
def convert_to_ascii(text):
return unidecode(text)
def convert_to_arpabet(text):
return " ".join(
[
f"{{ {s.strip()} }}" if s.strip() not in ",." else s.strip()
for s in " ".join(g2p(text)).split(" ")
]
)
def basic_cleaners(text):
"""Basic pipeline that lowercases and collapses whitespace without transliteration."""
text = lowercase(text)
text = collapse_whitespace(text)
return text
def transliteration_cleaners(text):
"""Pipeline for non-English text that transliterates to ASCII."""
text = convert_to_ascii(text)
text = lowercase(text)
text = collapse_whitespace(text)
return text
def english_cleaners(text):
"""Pipeline for English text, including number and abbreviation expansion."""
text = convert_to_ascii(text)
text = lowercase(text)
text = expand_numbers(text)
text = expand_abbreviations(text)
text = collapse_whitespace(text)
return text
def english_cleaners_phonemizer(text):
"""Pipeline for English text to phonemization, including number and abbreviation expansion."""
text = convert_to_ascii(text)
text = lowercase(text)
text = expand_numbers(text)
text = expand_abbreviations(text)
text = phonemize(
text,
language="en-us",
backend="espeak",
strip=True,
preserve_punctuation=True,
with_stress=True,
)
text = collapse_whitespace(text)
return text
def batch_english_cleaners_phonemizer(text: List[str]):
batch = []
for t in text:
t = convert_to_ascii(t)
t = lowercase(t)
t = expand_numbers(t)
t = expand_abbreviations(t)
batch.append(t)
batch = phonemize(
batch,
language="en-us",
backend="espeak",
strip=True,
preserve_punctuation=True,
with_stress=True,
)
batch = [collapse_whitespace(t) for t in batch]
return batch
# Cell
import random
from .symbols import (
DEFAULT_SYMBOLS,
IPA_SYMBOLS,
id_to_symbol,
symbols_to_sequence,
arpabet_to_sequence,
)
BATCH_CLEANERS = {
"english_cleaners_phonemizer": batch_english_cleaners_phonemizer,
}
CLEANERS = {
"english_cleaners": english_cleaners,
"english_cleaners_phonemizer": english_cleaners_phonemizer,
"basic_cleaners": basic_cleaners,
"transliteration_cleaners": transliteration_cleaners,
}
def batch_clean_text(text: List[str], cleaner_names):
for name in cleaner_names:
cleaner = BATCH_CLEANERS[name]
text = cleaner(text)
return text
def clean_text(text, cleaner_names):
for name in cleaner_names:
cleaner = CLEANERS[name]
text = cleaner(text)
return text
def english_to_arpabet(english_text):
arpabet_symbols = g2p(english_text)
def cleaned_text_to_sequence(cleaned_text, symbol_set):
return symbols_to_sequence(cleaned_text, symbol_set=symbol_set, ignore_symbols=[])
def text_to_sequence(text, cleaner_names, p_arpabet=0.0, symbol_set=DEFAULT_SYMBOLS):
"""Converts a string of text to a sequence of IDs corresponding to the symbols in the text.
The text can optionally have ARPAbet sequences enclosed in curly braces embedded
in it. For example, "Turn left on {HH AW1 S S T AH0 N} Street."
Args:
text: string to convert to a sequence
cleaner_names: names of the cleaner functions to run the text through
Returns:
List of integers corresponding to the symbols in the text
"""
sequence = []
# Check for curly braces and treat their contents as ARPAbet:
while len(text):
m = curly_re.match(text)
if not m:
cleaned = clean_text(text, cleaner_names)
words_and_nonwords = words_re.findall(cleaned)
cleaned_words = []
for w, nw in words_and_nonwords:
if w and random.random() < p_arpabet:
cleaned_words.append(convert_to_arpabet(w))
elif w:
cleaned_words.append(w)
else:
cleaned_words.append(nw)
for word in cleaned_words:
if word.startswith("{"):
sequence += arpabet_to_sequence(word)
else:
sequence += symbols_to_sequence(word)
break
cleaned = clean_text(m.group(1), cleaner_names)
sequence += text_to_sequence(cleaned, cleaner_names, p_arpabet)
sequence += arpabet_to_sequence(m.group(2))
text = m.group(3)
return sequence
def sequence_to_text(sequence, symbol_set=DEFAULT_SYMBOLS):
"""Converts a sequence of IDs back to a string"""
result = ""
for symbol_id in sequence:
if symbol_id in id_to_symbol[symbol_set]:
s = id_to_symbol[symbol_set][symbol_id]
# Enclose ARPAbet back in curly braces:
if len(s) > 1 and s[0] == "@":
s = "{%s}" % s[1:]
result += s
return result.replace("}{", " ")
# Cell
import random
utterances = [
"Stop posting about Among Us, I'm tired of seeing it!",
"My friends on TikTok send me memes, on Discord it's fucking memes.",
"I'd just like to interject for a moment.",
"What you're referring to as Linux, is in fact, gnu slash Linux.",
"Wow! That was intense! Woo I just flew in from the new ruins level and boy are my arms tired.",
"Oh my god! They killed Kenny!",
"It needs to be about, twenty percent cooler.",
"Hey relax guy! I'm just your average joe! Take a rest!",
"I'm not bad, I'm just drawn that way.",
"Alright! we're here just sitting in the car. I want you to show me if you can get far.",
"Isn't it nice to have a computer that will talk to you?",
"This is where we hold them. This is where we fight!",
"I'll have two number nines, a number nine large, a number six with extra dip.",
"A number seven, two number forty fives, one with cheese, and a large soda.",
"Can you tell me how to get to Sesame Street?",
"You know what they say, all toasters toast toast.",
"Don't turn me into a marketable plushie!",
"I am speaking straight opinions, and that's all that matters.",
"Excuse me sir, but it appears that a package has arrived in the mailbox as of recent.",
"I'm going to order pizza, look at me, I'm on the phone, right now.",
"I started calling and I am hungry to the bone.",
"so while I wait, I start to sing the song of my people I know it since I was a baby.",
"When I was a lad, I ate four dozen eggs every morning to help me get large.",
"Now that I'm grown I eat five dozen eggs, so I'm roughly the size of a barge!",
"There's no crying. There's no crying in baseball.",
"Sphinx of black quartz, judge my vow.",
"Go to the Winchester, have a pint, and wait for all of this to blow over.",
"You should really stop pressing this button.",
"Minecraft is honestly a block game.",
"I like that song. Let it play.",
"When a zebras in the zone, leave him alone!",
"The FitnessGram Pacer Test is a multistage aerobic capacity test that progressively gets more difficult as it continues.",
"The 20 meter pacer test will begin in 30 seconds.",
"The running speed starts slowly, but gets faster each minute after you hear this signal. beep.",
"A single lap should be completed each time you hear this sound. ding.",
"Remember to run in a straight line, and run as long as possible.",
"The second time you fail to complete a lap before the sound, your test is over.",
"The test will begin on the word start. On your mark, get ready, start.",
"Oh my gosh. Nemo's swimming out to sea!",
"Go back. I want to be monkey!",
"Whoops! You have to put the C D in your computer.",
"Now the animators are gonna have to draw all this fire!",
"The mitochondria is the powerhouse of the cell.",
"Now that's something you don't see every day!",
"You know, what can I say? I die hard.",
"Gosh darn it Kris, where the heck are we?",
"This is a test voice message.",
"I swear the toilet was full of guacamole when I bought it!",
"Did you ever hear the Tragedy of <NAME> the wise?",
"I thought not. It's not a story the Jedi would tell you, it's a sith legend.",
"<NAME> was a dark lord of the Sith, so powerful and so wise",
"He could use the force to influence the midichlorians to create life.",
"Never gonna give you up. Never gonna let you down.",
"I am the Milkman. My milk is delicious.",
"I'm just like my country. I'm young, scrappy, and hungry, and I am not throwing away my shot.",
"I'm still a piece of garbage.",
"Looks like you're the first one here! Use the people tab on your watch to invite your friends to join you!",
]
def random_utterance():
return utterances[random.randint(0, len(utterances) - 1)]
``` |
{
"source": "a919gg/spider",
"score": 3
} |
#### File: spider/crawl/crawler.py
```python
import os
import sys
import threading
import logging
from crawl.url import get_urls
from threads.threads import WorkerThread
from modules.debug import debug
logger = logging.getLogger("spider.crawler")
class CrawlerConf(object):
"""
docstring for CrawlerConf
"""
def __init__(self, opt, conf):
super(CrawlerConf, self).__init__()
self.opt = opt
self.conf = conf
def crawler(self):
"""
广度搜索入口
"""
logger.info("crawler normal loading.")
if self.opt.dbg:
debug("crawler normal loading.")
self.conf.url_current.append(self.opt.url)
if self.opt.dbg:
debug("root url", self.conf.url_current)
try:
depth = 1
while depth <= self.opt.deep and len(self.conf.url_current) and \
self.conf.url_count < self.opt.pages:
if self.opt.dbg:
debug("current depth", depth)
logger.info("current depth : " + str(depth))
depth = depth + 1
self._crawler_download_url()
self._crawler_update_url()
if self.opt.dbg:
debug("crawler normal quit.")
except Exception:
if self.opt.dbg:
debug("crawler abnormal quit.")
debug("crawler", sys.exc_info())
raise CrawlerConfError
def _crawler_download_url(self):
"""
开始当前url列表
"""
logger.info("crawler_download_url normal loading.")
i = 0
while i < len(self.conf.url_current):
j = 0
while i + j < len(self.conf.url_current) and j < self.opt.thread_number and \
self.conf.url_count <= self.opt.pages:
self.conf.url_count += 1
work_thread = WorkerThread(
self.conf,
self.conf.url_current[i + j])
self.conf.thread_pool.append(work_thread)
work_thread.start()
j += 1
i += j
for x in self.conf.thread_pool:
x.join(30)
self.conf.thread_pool = []
if self.conf.url_count > self.opt.pages:
break
self.conf.url_current = []
logger.info("crawler_download_url normal quit.")
def _crawler_update_url(self):
"""
更新当前下载url列表
"""
logger.info("crawler_update_url normal loading.")
if self.opt.dbg:
debug("crawler_update_url normal loading.")
debug("url_pages", self.conf.url_pages)
url_new = []
for s in self.conf.url_pages:
url_new += get_urls(s)
self.conf.url_current = list(set(url_new) -
set(self.conf.url_success) -
set(self.conf.url_failed))
if self.opt.dbg:
debug("url_current", self.conf.url_current)
logger.info("crawler_update_url normal quit.")
class CrawlerConfError(Exception):
"""
docstring for CrawlerConfError
"""
pass
if __name__ == "__main__":
pass
```
#### File: spider/modules/debug.py
```python
def debug(arg1, *args):
"""
打印debug信息
"""
list = []
print '[debug>>>]',
list.append(str(arg1))
for arg in args:
list.append(str(arg))
getstr = ' : '.join(list)
print getstr
``` |
{
"source": "A91y/password-generator",
"score": 4
} |
#### File: A91y/password-generator/Password-Generator.py
```python
alphaList = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", " ", "%", "\\", "|", "=", "[", "]", "<", ">", "{", "}", "@", "#", "$", "_", "&", "-", "+", "(", ")", "*", '"', "'", ":", ";", "!", "?", ",", ".", "-", "~", "^"]
# print(len(alphaList)) # 98
# Creating File To append the passwords
file_name = input("Enter name of file to be created (without extention) : ") + ".txt"
print(file_name)
with open(file_name, "w") as f:
f.close()
# Passwords for 1 key
def passwd1():
print(94)
f = open(file_name, "a")
para1 = [[i] for i in alphaList]
# print(para1)
arr1 = ["".join([j for j in i]) for i in para1]
# print(arr1)
# char = 0
for i in arr1:
f.write(i + "\n")
# print(i, end = "\n")
# char += 1
# print(char)
f.close()
# Passwords for 2 key
def passwd2():
print(94**2)
f = open(file_name, "a")
para2 = [[i, j] for i in alphaList for j in alphaList]
arr2 = ["".join([j for j in i]) for i in para2]
for i in arr2:
f.write(i + "\n")
f.close()
# Passwords for 3 key
def passwd3():
print(94**3)
f = open(file_name, "a")
para3 = [[i, j, k]
for i in alphaList for j in alphaList for k in alphaList]
arr3 = ["".join([j for j in i]) for i in para3]
for i in arr3:
f.write(i + "\n")
f.close()
# Passwords for 4 key
def passwd4():
print(94**4)
f = open(file_name, "a")
para4 = [[i, j, k, l]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList]
arr4 = ["".join([j for j in i]) for i in para4]
for i in arr4:
f.write(i + "\n")
f.close()
# Passwords for 5 key
def passwd5():
print(94**5)
f = open(file_name, "a")
para5 = [[i, j, k, l, m]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList]
arr5 = ["".join([j for j in i]) for i in para5]
for i in arr5:
f.write(i + "\n")
f.close()
# Passwords for 6 key
def passwd6():
print(94**6)
f = open(file_name, "a")
para6 = [[i, j, k, l, m, n]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList]
arr6 = ["".join([j for j in i]) for i in para6]
for i in arr6:
f.write(i + "\n")
f.close()
# Passwords for 7 key
def passwd7():
print(94**7)
f = open(file_name, "a")
para7 = [[i, j, k, l, m, n, o]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList]
arr7 = ["".join([j for j in i]) for i in para7]
for i in arr7:
f.write(i + "\n")
f.close()
# Passwords for 8 key
def passwd8():
print(94**8)
f = open(file_name, "a")
para8 = [[i, j, k, l, m, n, o, p]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList]
arr8 = ["".join([j for j in i]) for i in para8]
for i in arr8:
f.write(i + "\n")
f.close()
# Passwords for 9 key
def passwd9():
print(94**9)
f = open(file_name, "a")
para9 = [[i, j, k, l, m, n, o, p, q]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList]
arr9 = ["".join([j for j in i]) for i in para9]
for i in arr9:
f.write(i + "\n")
f.close()
# Passwords for 10 key
def passwd10():
print(94**10)
f = open(file_name, "a")
para10 = [[i, j, k, l, m, n, o, p, q, r]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList]
arr10 = ["".join([j for j in i]) for i in para10]
for i in arr10:
f.write(i + "\n")
f.close()
# Passwords for 11 key
def passwd11():
print(94**11)
f = open(file_name, "a")
para11 = [[i, j, k, l, m, n, o, p, q, r, s]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList]
arr11 = ["".join([j for j in i]) for i in para11]
for i in arr11:
f.write(i + "\n")
f.close()
# Passwords for 12 key
def passwd12():
print(94**12)
f = open(file_name, "a")
para12 = [[i, j, k, l, m, n, o, p, q, r, s, t]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList]
arr12 = ["".join([j for j in i]) for i in para12]
for i in arr12:
f.write(i + "\n")
f.close()
# Passwords for 13 key
def passwd13():
print(94**13)
f = open(file_name, "a")
para13 = [[i, j, k, l, m, n, o, p, q, r, s, t, u]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList]
arr13 = ["".join([j for j in i]) for i in para13]
for i in arr13:
f.write(i + "\n")
f.close()
# Passwords for 14 key
def passwd14():
print(94**14)
f = open(file_name, "a")
para14 = [[i, j, k, l, m, n, o, p, q, r, s, t, u, v]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList for v in alphaList]
arr14 = ["".join([j for j in i]) for i in para14]
for i in arr14:
f.write(i + "\n")
f.close()
# Passwords for 15 key
def passwd15():
print(94**15)
f = open(file_name, "a")
para15 = [[i, j, k, l, m, n, o, p, q, r, s, t, u, v, w]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList for v in alphaList for w in alphaList]
arr15 = ["".join([j for j in i]) for i in para15]
for i in arr15:
f.write(i + "\n")
f.close()
# Passwords for 16 key
def passwd16():
print(94**16)
f = open(file_name, "a")
para16 = [[i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList for v in alphaList for w in alphaList for x in alphaList]
arr16 = ["".join([j for j in i]) for i in para16]
for i in arr16:
f.write(i + "\n")
f.close()
# Passwords for 17 key
def passwd17():
print(94**17)
f = open(file_name, "a")
para17 = [[i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList for v in alphaList for w in alphaList for x in alphaList for y in alphaList]
arr17 = ["".join([j for j in i]) for i in para17]
for i in arr17:
f.write(i + "\n")
f.close()
# Passwords for 18 key
def passwd18():
print(94**18)
f = open(file_name, "a")
para18 = [[i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList for v in alphaList for w in alphaList for x in alphaList for y in alphaList for z in alphaList]
arr18 = ["".join([j for j in i]) for i in para18]
for i in arr18:
f.write(i + "\n")
f.close()
# Passwords for 19 key
def passwd19():
print(94**19)
f = open(file_name, "a")
para19 = [[i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z, a]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList for v in alphaList for w in alphaList for x in alphaList for y in alphaList for z in alphaList for a in alphaList]
arr19 = ["".join([j for j in i]) for i in para19]
for i in arr19:
f.write(i + "\n")
f.close()
# Passwords for 20 key
def passwd20():
print(94**20)
f = open(file_name, "a")
para20 = [[i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z, a, b]
for i in alphaList for j in alphaList for k in alphaList for l in alphaList for m in alphaList for n in alphaList for n in alphaList for o in alphaList for p in alphaList for q in alphaList for r in alphaList for s in alphaList for t in alphaList for u in alphaList for v in alphaList for w in alphaList for x in alphaList for y in alphaList for z in alphaList for a in alphaList for b in alphaList]
arr20 = ["".join([j for j in i]) for i in para20]
for i in arr20:
f.write(i + "\n")
f.close()
if __name__ == "__main__":
try:
charkey = int(
(input("Enter the no. of characters passwords must be (maximum - 20): ")))
except Exception as e:
print("Give a numeric value only...")
if charkey == 1:
passwd1()
elif charkey == 2:
passwd2()
elif charkey == 3:
passwd3()
elif charkey == 4:
passwd4()
elif charkey == 5:
passwd5()
elif charkey == 6:
passwd6()
elif charkey == 7:
passwd7()
elif charkey == 8:
passwd8()
elif charkey == 9:
passwd9()
elif charkey == 10:
passwd10()
elif charkey == 11:
passwd11()
elif charkey == 12:
passwd12()
elif charkey == 13:
passwd13()
elif charkey == 14:
passwd14()
elif charkey == 15:
passwd15()
elif charkey == 16:
passwd16()
elif charkey == 17:
passwd17()
elif charkey == 18:
passwd18()
elif charkey == 19:
passwd19()
elif charkey == 20:
passwd20()
else:
print("Do You Want To Kill Me?")
``` |
{
"source": "a93-git/codingame-solutions",
"score": 4
} |
#### File: clash/fastest/fast-clash9.py
```python
import sys
import math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
s = input()
# Write an action using print
# To debug: print("Debug messages...", file=sys.stderr)
def phone(s):
res = ''
res = res + '({0})'.format(s[:3])
res = res + ' {0}'.format(s[3:6])
res = res + '-{0}'.format(s[6:])
return res
print(phone(s))
``` |
{
"source": "a93-git/scraping",
"score": 3
} |
#### File: a93-git/scraping/scrapeDilbert.py
```python
import bs4
import urllib.request
import requests
import datetime
import multiprocessing
import time
def construct_date_list():
""" Returns a list of dates from 01-01-1989 to current date
Note: The first Dilbert comics on its website is from 16th April 1989
"""
start = datetime.datetime.strptime("1989-04-16", "%Y-%m-%d")
end = datetime.datetime.strptime(datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d"), "%Y-%m-%d")
date_generated = [start + datetime.timedelta(days=x) for x in range(0, (end-start).days + 1)]
return date_generated
def construct_url_list(date_list):
""" Constructs a set of urls for Dilbert comics
Urls follow this format - Fixed part - http://dilbert.com/strip
Vaiable part - date
"""
base_url = "http://dilbert.com/strip/"
url_list = []
for i in date_list:
temp = base_url + str(i).strip('00:00:00').strip()
url_list.append(temp)
return url_list
def move_files():
""" Moves files in yearwise folders """
pass
f = open('success.txt', 'w')
f2 = open('failed.txt', 'w')
def scraper(url_list):
""" Put everything together and scrape """
# Scrape
for i in url_list:
time.sleep(2)
try:
r = requests.get(i)
c = r.content
soup = bs4.BeautifulSoup(c, "html.parser")
tag = soup.find("img", {"class" : "img-comic"})
link = tag['src']
filename = i.strip("http://dilbert.com/strip/")
urllib.request.urlretrieve(link, filename)
f.write(str(url_list.index(i) + 1))
except:
f2.write(str(i.strip("http://dilbert.com/strip/")))
if __name__ == '__main__':
# Retrieve list of urls
url_list = construct_url_list(construct_date_list())
scraper(url_list)
f.close()
f2.close()
move_files()
```
#### File: a93-git/scraping/scrapeGarfield.py
```python
from datetime import date, timedelta
from urllib.request import urlretrieve
from multiprocessing.pool import ThreadPool
from time import time as timer
import os
from time import sleep
base_url = "https://d1ejxu6vysztl5.cloudfront.net/comics/garfield/"
# Calculate days from the 1st day to today
d1 = date(1978, 6, 19) # start date
d2 = date.today() # end date
delta = d2 - d1 # timedelta
# Get the dates
datelist = []
for i in range(delta.days + 1):
datelist.append((d1 + timedelta(days=i)))
# Create urls
url_list = []
for i in datelist:
url_list.append(base_url + str(i.year) + "/" + str(i) + ".gif")
print(len(url_list))
# Grab urls
def get_comic(i):
""" Takes in a url and retrieves the comic from that url
Return: Returns a tuple of url, and error"""
filename = i[-14:]
try:
urlretrieve(i, filename)
return None, None
except Exception as e:
return i, e
start = timer()
# Map the get_comic function over each element of url_list list
results = ThreadPool(40).imap_unordered(get_comic, url_list)
for url, error in results:
if error is None:
pass
else:
print("Error in fetching %r: %s" %(url, error))
print("Elapsed time: %s" %(timer() - start,))
``` |
{
"source": "a9455/G-Poratal",
"score": 2
} |
#### File: G-Poratal/GSMaP/GSMaP.py
```python
import os
import h5py
import glob
import shutil
import numpy as np
import pandas as pd
import datetime as dt
from dateutil.relativedelta import relativedelta
from ftplib import FTP
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import geopandas as gpd
from shapely.ops import cascaded_union
#######################################################################################
# common
#######################################################################################
def OutputInit():
"""
取得条件の書き出し
"""
print("------------------取得条件--------------------")
print(f"host = {host}")
print(f"user = {user}")
print(f"pw = {pw}")
print(f"TargetFor = {TargetFor}")
print(f"lon = {lat}")
print(f"lat = {lon}")
print(f"start = {start}")
print(f"end = {end}")
print(f"delta time = {delta}")
print(f"ElementNum = {ElementNum}")
print("---------------------------------------------")
### prepare folder
def RemakeFolder(path):
"""
データをリセットし、フォルダを再作成
"""
if os.path.exists(path)==True:
shutil.rmtree(path)
os.makedirs(path)
def MakeFolder():
"""
フォルダの再構成
"""
RemakeFolder("./OUTPUT/ROW")
RemakeFolder("./OUTPUT/CSV")
RemakeFolder("./OUTPUT/PNG")
### get date info from datetime
def DatetimeToElement(date):
"""
datetime型から年月日時を取得する
年 :4桁
月日時:2桁
"""
year = str(date.year)
month = str(date.month).zfill(2)
day = str(date.day).zfill(2)
hour = str(date.hour).zfill(2)
return year, month, day, hour
### make file list
def MakeFileList(start:list, end:list, delta):
"""
取得時間のリストを作成
start : 取得開始時間
end : 取得終了時間
"""
print("Make File List", end="")
Sdate = dt.datetime(*start)
Edate = dt.datetime(*end)
FilePaths = []
## 時間
if delta == "H":
dirA = "standard/GSMaP/3.GSMAP.H/Unknown"
while Sdate<=Edate:
yy, mm, dd, hh = DatetimeToElement(Sdate)
dirB = f"{yy}/{mm}/{dd}/GPMMRG_MAP_{yy[2:]}{mm}{dd}{hh}00_H_L3S_MCH_Unknown.h5"
FilePaths.append(f"{dirA}/{dirB}")
Sdate = Sdate + dt.timedelta(hours=1)
## 月
elif delta == "M":
dirA = "standard/GSMaP/3.GSMAP.M/Unknown"
while Sdate<=Edate:
yy, mm, dd, hh = DatetimeToElement(Sdate)
dirB = f"{yy}/GPMMRG_MAP_{yy[2:]}{mm}_M_L3S_MCM_Unknown.h5"
FilePaths.append(f"{dirA}/{dirB}")
Sdate = Sdate + relativedelta(months=1)
print(f" Total {len(FilePaths)} files")
return dirA.replace("/Unknown",""), FilePaths
def Scale(lon, lat):
londeg = lon[1] - lon[0]
latdeg = lat[1] - lat[0]
if londeg >= latdeg:
londeg = 16
latdeg = latdeg/londeg * 16
else:
londeg = londeg/latdeg * 16
latdeg = 16
return londeg, latdeg
#######################################################################################
# ftp server
#######################################################################################
def GetSateliteVarsion(ftp, dirA):
Satelite = ftp.nlst(dirA)
for s in range(0,len(Satelite)):
Satelite[s] = Satelite[s].replace(f"{dirA}/","")
return Satelite
def DownloadFile(ftp, Filepath, Satelites):
for s in Satelites:
filepath = Filepath.replace("Unknown", s)
filename = filepath.split("/")[-1]
try:
with open(f"./OUTPUT/ROW/{filename}", "wb") as f: # make new file to write data
ftp.retrbinary('RETR %s' % filepath, f.write) # write data
break
except Exception:
os.remove(f"./OUTPUT/ROW/{filename}") # if Error occured, maked file is deleted
return filename
#######################################################################################
# file processing
#######################################################################################
def ReadRowFile(file, ElementNum):
data = h5py.File(file, 'r')
G1 = data.keys()
ELEMENT = []
for i in G1:
G2 = data[i].keys()
for f in G2:
ELEMENT.append([i, f])
LAT, LON = 0, 1
if ElementNum == -1:
for i in range(0,len(ELEMENT)):
print(i,ELEMENT[i][0],ELEMENT[i][1])
ELE = int(input("PLEASE CHOOSE ELEMENT NUMBER\n"))
else:
ELE = int(ElementNum)
LON1, LON2 = ELEMENT[LON][0], ELEMENT[LON][1]
LAT1, LAT2 = ELEMENT[LAT][0], ELEMENT[LAT][1]
G1, G2 = ELEMENT[ELE][0], ELEMENT[ELE][1]
### READ DATA
LON = np.array(data[LON1][LON2], dtype=float)
LAT = np.array(data[LAT1][LAT2], dtype=float)
DATA = np.array(data[G1][G2], dtype=float)
data.close()
out = [LON, LAT, DATA]
return out, G2, ELE
def OutputCsv(data, lon, lat, G2, Savefilepath):
LON, LAT, DATA = data[0], data[1], data[2]
## Reshape
LON = np.reshape(LON, ((LON.shape[0] * LON.shape[1], 1)))
LAT = np.reshape(LAT, ((LAT.shape[0] * LAT.shape[1], 1)))
DATA = np.reshape(DATA, ((DATA.shape[0] * DATA.shape[1], 1)))
## concat
out = np.append(LON, LAT, axis=1)
out = np.append(out, DATA, axis=1)
df = []
for i in range(0,out.shape[0]):
x, y = out[i,0], out[i,1]
if lon[0]<=x and x<=lon[1] and lat[0]<=y and y<=lat[1]:
df.append([np.round(out[i,0],2), np.round(out[i,1],2), np.round(out[i,2],2)])
df = pd.DataFrame(df, columns=["LON", "LAT", G2])
df.to_csv(Savefilepath, index=None)
def OutputPng(data, lon, lat, G2, Savefilep):
## prepare vars
LON, LAT, DATA = data[0], data[1], data[2]
xmin, xmax = lon[0], lon[1]
ymin, ymax = lat[0], lat[1]
## get draw area
plons = np.where((xmin<=LON) & (LON<=xmax), 1, 0) # if within⇒1, not within⇒0
plats = np.where((ymin<=LAT) & (LAT<=ymax), 1, 0) # if within⇒1, not within⇒0
DATA = DATA * plons * plats # within⇒DATA, not within⇒0
plat, plon = [], []
for i in range(0, DATA.shape[0]):
if np.sum(DATA[i,:]) != 0:
plat.append(i)
for i in range(0, DATA.shape[1]):
if np.sum(DATA[:,i]) != 0:
plon.append(i)
LON = LON[plat, :][:,plon]
LAT = LAT[plat, :][:,plon]
DATA = DATA[plat, :][:,plon]
## draw
Sx, Sy = Scale(lon, lat)
plt.rcParams["figure.figsize"] = [Sy,Sx]
plt.rcParams["font.size"] = 20
fig = plt.figure()
ax = fig.add_subplot(111)
pp = ax.pcolormesh(LON, LAT, DATA, cmap="rainbow",
shading="gouraud", norm=LogNorm())
cbar = fig.colorbar(pp, ax=ax)
cbar.set_label(G2)
plt.xlabel("LON")
plt.ylabel("LAT")
plt.savefig(Savefilep, bbox_inches="tight")
plt.close()
#######################################################################################
# shp processing
#######################################################################################
def ReadShp():
shplist = os.listdir("./SHP")
area = []
for shp in shplist:
shpfile = glob.glob(f"./SHP/{shp}/*.shp")[0]
shpdata = gpd.read_file(shpfile)
shpdata = cascaded_union(shpdata.geometry)
area.append([shp, shpdata])
RemakeFolder(f"./OUTPUT/PIP/{shp}")
return area
def PIP(shpname, shpdata):
files = glob.glob("./OUTPUT/CSV/*.csv")
for file in files:
filename = file.split("/")[-1]
data = pd.read_csv(file)
data["Inarea"] = 0
data = gpd.GeoDataFrame(data,geometry=gpd.points_from_xy(data.LON,data.LAT))
data = data.set_crs("EPSG:4326")
for point in range(0, data.shape[0]):
if shpdata.contains(data.loc[point,"geometry"])==True:
data.loc[point,"Inarea"] = 1
pipdata = data[data["Inarea"]==1].drop(["Inarea","geometry"], axis=1)
pipdata.to_csv(f"./OUTPUT/PIP/{shpname}/{filename}", index=None)
#######################################################################################
# MAIN
#######################################################################################
if __name__ == "__main__":
from init import *
OutputInit() # output initial condition
# mode : 0:ダウンロードのみ,1:データ処理のみ,2:ダウンロード・データ処理
if mode!=1:
MakeFolder() # prepare output folder
print("accessing ftp server...", end="" ) # access ftp server
ftp = FTP(host=host, user=user, passwd=pw)
print("\rOK, accessed ftp server ")
Satelite, FileList = MakeFileList(start, end, delta) # download files list
Satelite = GetSateliteVarsion(ftp, Satelite) # get satelite version
## download files from ftp
filenames = []
for File in FileList:
filename = DownloadFile(ftp, File, Satelite)
filenames.append(filename)
print("Finish, closed ftp server")
ftp.quit() # close ftp server
else:
print("Not download files mode")
filenames = os.listdir("./OUTPUT/ROW/")
## Read download files, output as csv
if mode!=0:
print("Start to output csv(png)")
for filename in filenames:
file = "./OUTPUT/ROW/" + filename # get filename (.h5 filename)
Savefilec = "./OUTPUT/CSV/" + filename.replace(".h5", ".csv") # get filename (to save csv)
Savefilep = "./OUTPUT/PNG/" + filename.replace(".h5", ".png") # get filename (to save png)
data, Ele, ElementNum = ReadRowFile(file, ElementNum) # read file from h5 file
OutputCsv(data, lon, lat, Ele, Savefilec) # output csv file
OutputPng(data, lon, lat, Ele, Savefilep) # output png file
## point in polygon
print("Point In Polygon")
shplist = ReadShp()
for shp in shplist:
PIP(shp[0], shp[1])
else:
print("Not output csv(png) mode")
``` |
{
"source": "a952855/stockgetweb",
"score": 3
} |
#### File: stockgetweb/crawl/crawl.py
```python
import urllib.request as req
import bs4
import json
import time
import random
import requests
import pandas as pd
HEADERS = ("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, "
"like Gecko) Chrome/75.0.3770.100 Safari/537.36")
def crawlSelectType():
url = 'https://www.twse.com.tw/zh/page/trading/exchange/BFT41U.html'
request = req.Request(url, headers={'User-Agent': HEADERS})
with req.urlopen(request) as response:
data = response.read().decode('utf-8')
root = bs4.BeautifulSoup(data, 'html.parser')
select = root.find("select", class_='board')
indexes = [str(x+1).zfill(2) for x in range(31)]
selectTypes = dict()
for t in select:
if t.string != '\n' and t['value'] in indexes:
selectTypes[t['value']] = t.string
return selectTypes
def crawlStockBasis(_type='', date=''):
time.sleep(random.randint(0, 10))
url = ("https://www.twse.com.tw/exchangeReport/BWIBBU_d?"
f"response=json&date=&selectType={_type}&_={date}")
request = requests.get(url, headers={'User-Agent': HEADERS})
data = bs4.BeautifulSoup(request.content, 'html.parser')
jsonData = json.loads(data.text)
if jsonData['stat'] == 'OK':
columns = ['StockNo', 'StockName', 'Yield', 'Year', 'PE', 'PB', 'YQ']
dfStock = pd.DataFrame(jsonData['data'], columns=columns)
dfStock['type'] = _type
return dfStock
return pd.DataFrame()
def crawlRoeRoa(_id):
time.sleep(random.randint(0, 10))
url = ("https://histock.tw/stock/financial.aspx?"
f"no={_id}&t=3&st=2&q=3")
try:
request = requests.get(url, headers={'User-Agent': HEADERS})
root = bs4.BeautifulSoup(request.text, 'html.parser')
table = root.find('table', class_="tb-stock tbBasic")
data = table.find_all('tr')
# example: \n201832.11%17.34%\n
strData = data[1].text
strData = strData.replace('\n', '')[4:]
roe = strData.split('%')[0]
roa = strData.split('%')[1]
return roe, roa
except:
return '-', '-'
def crawlEps(_id):
time.sleep(random.randint(0, 10))
url = (f"https://histock.tw/stock/{_id}/"
"%E6%AF%8F%E8%82%A1%E7%9B%88%E9%A4%98")
try:
request = requests.get(url, headers={'User-Agent': HEADERS})
root = bs4.BeautifulSoup(request.text, 'html.parser')
table = root.find('table', class_="tb-stock text-center tbBasic")
data = table.find_all('tr')[5].find_all('td')
for i in range(len(data), -1, -1):
if data[i].text != '-':
return data[i].text
except:
return '-'
def collectStockData():
types = crawlSelectType()
df = pd.DataFrame()
for t in types:
df = pd.concat([df, crawlStockBasis(t)], axis=0)
df.sort_values('StockNo', inplace=True)
df['type'] = df['StockNo'].apply(lambda x: list(df[df.StockNo == x]['type']))
df.drop_duplicates(subset='StockNo', inplace=True)
print('Crawling ROE, ROA ...')
df[['ROE', 'ROA']] = df['StockNo'].apply(lambda x:
pd.Series(crawlRoeRoa(x)))
print('Crawing EPS ...')
df['EPS'] = df['StockNo'].apply(lambda x: crawlEps(x))
df.to_csv('test.csv', index=False)
df.set_index('StockNo', inplace=True)
stocks = df.to_dict('index')
stocks = {k: json.dumps(v) for k, v in stocks.items()}
return stocks
if __name__ == '__main__':
print(collectStockData())
#df = crawlStockBasis('01')
#print(df)
#print(crawlRoeRoa('2025'))
#print(crawlEps('2025'))
``` |
{
"source": "a96123155/protein-vae",
"score": 3
} |
#### File: protein-vae/metal_gen/metal_VAE_pytorch.py
```python
import torch
import torch.nn.functional as nn
import torch.optim as optim
from torch.autograd import Variable
import torch.nn.functional as F
import numpy as np
import argparse
Variable(
from sklearn.metrics import accuracy_score
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split as tts
# =============================================================================
# Sort out Command Line arguments
# =============================================================================
parser = argparse.ArgumentParser()
parser.add_argument("-lr", type=float,
help="lr", default=5e-4)
parser.add_argument("-batch_size", type=int,
help="batch_size train", default=10000)
parser.add_argument("-batch_size_test", type=int,
help="batch_size test", default=25)
parser.add_argument("-num_epochs", type=int,
help="num_epochs", default=1000)
parser.add_argument("-latent_dim", type=int,
help="latent_dim", default=16)
parser.add_argument("-device", type=int,
help="device", default=0)
parser.add_argument("-dataset", type=str,
help="dataset", default="nostruc")# its either struc or nostruc
args = parser.parse_args()
args_dict = vars(args)
# =============================================================================
# Switches for what you want the model to do
# =============================================================================
cuda=False # for training with gpu, make it true. For inference with cpu make false
load=True # load in the model (default provided is 16 dimensional for nostruc data)
train=False # Make true to train the model presuming you have the dataset
new_metal=True # Make true to produce 'batch_size' samples of a given protein
# see the docs on github for description of how to do this
# =============================================================================
# Dataset loading and specifying values
# =============================================================================
if cuda:
if args_dict["dataset"]=="nostruc":
data = np.load('/scratch0/DeNovo/assembled_data_mbflip.npy')
else:
data = np.load('/scratch0/DeNovo/assembled_data_mbflip_fold.npy')
print(data.shape)
data, data_test=tts(data, test_size=0.15, shuffle=True)
n=data.shape[0]
else:
print("No DATA")
if args_dict["dataset"]=="nostruc":
X_dim=3088
else:
X_dim=4353
if cuda:
os.environ["CUDA_VISIBLE_DEVICES"]=str(args_dict['device'])
#spec batch size
batch_size=args_dict['batch_size']
#learning rate
lr=args_dict['lr']
# layer sizes
hidden_size=[512,256,128,args_dict['latent_dim']]
# =============================================================================
# Module
# =============================================================================
class feed_forward(torch.nn.Module):
def __init__(self, input_size, hidden_sizes, batch_size):
super().__init__()
self.input_size = input_size
self.hidden_sizes = hidden_sizes
self.batch_size = batch_size
self.fc = torch.nn.Linear(input_size, hidden_sizes[0]) # 2 for bidirection
self.BN = torch.nn.BatchNorm1d(hidden_sizes[0])
self.fc1 = torch.nn.Linear(hidden_sizes[0], hidden_sizes[1])
self.BN1 = torch.nn.BatchNorm1d(hidden_sizes[1])
self.fc2 = torch.nn.Linear(hidden_sizes[1], hidden_sizes[2])
self.BN2 = torch.nn.BatchNorm1d(hidden_sizes[2])
self.fc3_mu = torch.nn.Linear(hidden_sizes[2], hidden_sizes[3])
self.fc3_sig = torch.nn.Linear(hidden_sizes[2], hidden_sizes[3])
if args_dict["dataset"]=="struc":
self.fc4 = torch.nn.Linear(hidden_sizes[3]+1273, hidden_sizes[2])
else:
self.fc4 = torch.nn.Linear(hidden_sizes[3]+8, hidden_sizes[2])
self.BN4 = torch.nn.BatchNorm1d(hidden_sizes[2])
self.fc5 = torch.nn.Linear(hidden_sizes[2], hidden_sizes[1])
self.BN5 = torch.nn.BatchNorm1d(hidden_sizes[1])
self.fc6 = torch.nn.Linear(hidden_sizes[1], hidden_sizes[0])
self.BN6 = torch.nn.BatchNorm1d(hidden_sizes[0])
if args_dict["dataset"]=="struc":
self.fc7 = torch.nn.Linear(hidden_sizes[0], input_size-1273)
else:
self.fc7 = torch.nn.Linear(hidden_sizes[0], input_size-8)
def sample_z(self, mu, log_var):
# Using reparameterization trick to sample from a gaussian
if cuda:
eps = torch.randn(self.batch_size, self.hidden_sizes[-1]).cuda()
else:
eps = torch.randn(self.batch_size, self.hidden_sizes[-1])
return mu + torch.exp(log_var / 2) * eps
def forward(self, x, code, struc=None):
###########
# Encoder #
###########
# get the code from the tensor
# add the conditioned code
if args_dict["dataset"]!="struc":
x = torch.cat((x,code),1)
else:
x = torch.cat((x,code,struc),1)
# Layer 0
out1 = self.fc(x)
out1 = nn.relu(self.BN(out1))
# Layer 1
out2 = self.fc1(out1)
out2 = nn.relu(self.BN1(out2))
# Layer 2
out3 = self.fc2(out2)
out3 = nn.relu(self.BN2(out3))
# Layer 3 - mu
mu = self.fc3_mu(out3)
# layer 3 - sig
sig = nn.softplus(self.fc3_sig(out3))
###########
# Decoder #
###########
# sample from the distro
sample= self.sample_z(mu, sig)
# add the conditioned code
if args_dict["dataset"]!="struc":
sample = torch.cat((sample, code),1)
else:
sample = torch.cat((sample, code, struc),1)
# Layer 4
out4 = self.fc4(sample)
out4 = nn.relu(self.BN4(out4))
# Layer 5
out5 = self.fc5(out4)
out5 = nn.relu(self.BN5(out5))
# Layer 6
out6 = self.fc6(out5)
out6 = nn.relu(self.BN6(out6))
# Layer 7
out7 = nn.sigmoid(self.fc7(out6))
return out7, mu, sig
# =============================================================================
# Training
# =============================================================================
# init the networks
if cuda:
ff = feed_forward(X_dim, hidden_size, batch_size).cuda()
else:
ff = feed_forward(X_dim, hidden_size, batch_size)
# change the loading bit here
if load:
ff.load_state_dict(torch.load("models/metal16_nostruc", map_location=lambda storage, loc: storage))
# Loss and Optimizer
solver = optim.Adam(ff.parameters(), lr=lr)
burn_in_counter = 0
tick = 0
# number of epochs
num_epochs=args_dict['num_epochs']
if train:
for its in range(num_epochs):
#############################
# TRAINING
#############################
ff.train()
scores=[]
data=shuffle(data)
print("Grammar Cond. - Epoch: {0}/{1} Latent: {2}".format(its,num_epochs,hidden_size[-1]))
for it in range(n // batch_size):
if args_dict["dataset"]=="nostruc":
x_batch=data[it * batch_size: (it + 1) * batch_size]
code = x_batch[:,-8:]
x_batch = x_batch[:,:3080]
if cuda:
X = torch.from_numpy(x_batch).cuda().type(torch.cuda.FloatTensor)
C = torch.from_numpy(code).cuda().type(torch.cuda.FloatTensor)
else:
X = torch.from_numpy(x_batch).type(torch.FloatTensor)
C = torch.from_numpy(code).type(torch.FloatTensor)
else:
x_batch=data[it * batch_size: (it + 1) * batch_size]
code = x_batch[:,-8:]
structure = x_batch[:,3080:-8]
x_batch = x_batch[:,:3080]
if cuda:
X = torch.from_numpy(x_batch).cuda().type(torch.cuda.FloatTensor)
C = torch.from_numpy(code).cuda().type(torch.cuda.FloatTensor)
S = torch.from_numpy(structure).cuda().type(torch.cuda.FloatTensor)
else:
X = torch.from_numpy(x_batch).type(torch.FloatTensor)
C = torch.from_numpy(code).type(torch.FloatTensor)
S = torch.from_numpy(structure).type(torch.FloatTensor)
#turf last gradients
solver.zero_grad()
if args_dict["dataset"]=="struc":
# Forward
x_sample, z_mu, z_var = ff(X, C, S)
else:
x_sample, z_mu, z_var = ff(X, C)
# Loss
recon_loss = nn.binary_cross_entropy(x_sample, X, size_average=False) # by setting to false it sums instead of avg.
kl_loss = 0.5 * torch.sum(torch.exp(z_var) + z_mu**2 - 1. - z_var)
#kl_loss=KL_Div(z_mu,z_var,unit_gauss=True,cuda=True)
kl_loss = kl_loss*burn_in_counter
loss = recon_loss + kl_loss
# Backward
loss.backward()
# Update
solver.step()
len_aa=140*22
y_label=np.argmax(x_batch[:,:len_aa].reshape(batch_size,-1,22), axis=2)
y_pred =np.argmax(x_sample[:,:len_aa].cpu().data.numpy().reshape(batch_size,-1,22), axis=2)
# can use argmax again for clipping as it uses the first instance of 21
# loop with 256 examples is only about 3 milliseconds
for idx, row in enumerate(y_label):
scores.append(accuracy_score(row[:np.argmax(row)],y_pred[idx][:np.argmax(row)]))
print("Tra Acc: {0}".format(np.mean(scores)))
if its==(num_epochs-1):
with open('latent_results_'+str(args_dict["dataset"])+'.txt', 'a') as f:
f.write(str(args_dict['latent_dim'])+' train '+str(np.mean(scores)))
if its>300 and burn_in_counter<1.0:
burn_in_counter+=0.003
#############################
# Validation
#############################
scores=[]
ff.eval()
for it in range(data_test.shape[0] // batch_size):
x_batch=data_test[it * batch_size: (it + 1) * batch_size]
if args_dict["dataset"]=="nostruc":
x_batch=data[it * batch_size: (it + 1) * batch_size]
code = x_batch[:,-8:]
x_batch = x_batch[:,:3080]
if cuda:
X = torch.from_numpy(x_batch).cuda().type(torch.cuda.FloatTensor)
C = torch.from_numpy(code).cuda().type(torch.cuda.FloatTensor)
else:
X = torch.from_numpy(x_batch).type(torch.FloatTensor)
C = torch.from_numpy(code).type(torch.FloatTensor)
else:
x_batch=data[it * batch_size: (it + 1) * batch_size]
code = x_batch[:,-8:]
structure = x_batch[:,3080:-8]
x_batch = x_batch[:,:3080]
if cuda:
X = torch.from_numpy(x_batch).cuda().type(torch.cuda.FloatTensor)
C = torch.from_numpy(code).cuda().type(torch.cuda.FloatTensor)
S = torch.from_numpy(structure).cuda().type(torch.cuda.FloatTensor)
else:
X = torch.from_numpy(x_batch).type(torch.FloatTensor)
C = torch.from_numpy(code).type(torch.FloatTensor)
S = torch.from_numpy(structure).type(torch.FloatTensor)
if args_dict["dataset"]=="struc":
# Forward
x_sample, z_mu, z_var = ff(X, C, S)
else:
x_sample, z_mu, z_var = ff(X, C)
len_aa=140*22
y_label=np.argmax(x_batch[:,:len_aa].reshape(batch_size,-1,22), axis=2)
y_pred =np.argmax(x_sample[:,:len_aa].cpu().data.numpy().reshape(batch_size,-1,22), axis=2)
for idx, row in enumerate(y_label):
scores.append(accuracy_score(row[:np.argmax(row)],y_pred[idx][:np.argmax(row)]))
print("Val Acc: {0}".format(np.mean(scores)))
if its==(num_epochs-1):
with open('latent_results_'+str(args_dict["dataset"])+'.txt', 'a') as f:
f.write(str(args_dict['latent_dim'])+' test '+str(np.mean(scores)))
# saves if its running on gpu
if cuda:
torch.save(ff.state_dict(), 'metal'+str(args_dict['latent_dim'])+"_"+str(args_dict['dataset']))
def newMetalBinder(model,data,name):
"""
Generates a new sequence based on a metal code and a grammar.
The data array is (4353,) where the first 3080 are the
sequence, the next 1265 are the fold and the final 8 are the metal
binding flags. Fold is optional
"""
scores=[]
#model into eval mode
model.eval()
if args_dict["dataset"]=="nostruc":
code = np.tile(data[-8:],(model.batch_size,1))
x = np.tile(data[:3080],(model.batch_size,1))
if cuda:
X = torch.from_numpy(x).cuda().type(torch.cuda.FloatTensor)
C = torch.from_numpy(code).cuda().type(torch.cuda.FloatTensor)
else:
X = torch.from_numpy(x).type(torch.FloatTensor)
C = torch.from_numpy(code).type(torch.FloatTensor)
else:
code = np.tile(data[-8:],(model.batch_size,1))
structure = np.tile(data[3080:-8],(model.batch_size,1))
x = np.tile(data[:3080],(model.batch_size,1))
if cuda:
X = torch.from_numpy(x).cuda().type(torch.cuda.FloatTensor)
C = torch.from_numpy(code).cuda().type(torch.cuda.FloatTensor)
S = torch.from_numpy(structure).cuda().type(torch.cuda.FloatTensor)
else:
X = torch.from_numpy(x).type(torch.FloatTensor)
C = torch.from_numpy(code).type(torch.FloatTensor)
S = torch.from_numpy(structure).type(torch.FloatTensor)
if args_dict["dataset"]=="struc":
# Forward
x_sample, z_mu, z_var = ff(X, C, S)
else:
x_sample, z_mu, z_var = ff(X, C)
len_aa=140*22
y_label=np.argmax(x[:,:len_aa].reshape(batch_size,-1,22), axis=2)
y_pred =np.argmax(x_sample[:,:len_aa].cpu().data.numpy().reshape(batch_size,-1,22), axis=2)
#np.save(name,y_pred)
print(x_sample[:,:len_aa].cpu().data.numpy().reshape(batch_size,-1,22).shape)
np.save(name,x_sample[:,:len_aa].cpu().data.numpy().reshape(batch_size,-1,22))
for idx, row in enumerate(y_label):
scores.append(accuracy_score(row[:np.argmax(row)],y_pred[idx][:np.argmax(row)]))
print("Accuracy: {0}".format(np.mean(scores)))
return
if new_metal:
name="prots_nomet"
g1=np.load(name+".npy")
g1=g1[1]
if len(g1.shape)<2:
newMetalBinder(ff,g1,name+"_out")
else:
for idx, row in enumerate(g1):
newMetalBinder(ff,row,name+"_out_"+str(idx))
```
#### File: protein-vae/produce_sequences/utils.py
```python
import numpy as np
seq_len = 140
gap_char = '-'
spe_char = 'X'
aas = ['G', 'A', 'L', 'M', 'F', 'W', 'K', 'Q', 'E', 'S',
'P', 'V', 'I', 'C', 'Y', 'H', 'R', 'N', 'D', 'T']
seq_choices = aas + [spe_char, gap_char]
n_symbols = len(seq_choices)
def seq_to_vec(seq):
assert len(seq) <= seq_len
seq_ind = [seq_choices.index(gap_char)] * seq_len
for i, aa in enumerate(seq):
seq_ind[i] = seq_choices.index(aa)
vec = [0] * seq_len * n_symbols
for i, j in enumerate(seq_ind):
vec[i * n_symbols + j] = 1
return np.array(vec)
# Convert output vector back to human-readable form
def vec_to_seq(vec):
seq_info = list(vec[:3080])
seq = ""
for i in range(seq_len):
seq += seq_choices[np.argmax(seq_info[i*n_symbols:(i+1)*n_symbols])]
#seq=seq[:seq.find("-")]
seq=seq.replace("-","")
return seq
``` |
{
"source": "a962332430/Proxy_pool",
"score": 2
} |
#### File: Proxy_pool/Proxy_pool/tester.py
```python
import asyncio
import aiohttp
import time
import sys
from aiohttp import ClientError
from Proxy_pool.db import MySqlClient
from Proxy_pool.setting import *
from Proxy_pool.utils import get_current_time
class Tester(object):
def __init__(self):
self.mysql = MySqlClient()
async def test_single_ip(self, ip):
"""
测试单个代理
:param ip:
:return:
"""
conn = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(connector=conn) as session:
try:
if isinstance(ip, bytes):
ip = ip.decode('utf-8')
real_ip = 'http://' + str(ip)
print('正在测试', ip)
async with session.get(TEST_URL, proxy=real_ip, timeout=15, allow_redirects=False) as response:
if response.status in VALID_STATUS_CODES:
# 加分
self.mysql.increase(ip)
print('代理可用', ip)
else:
self.mysql.decrease(ip)
print('请求响应码不合法 ', response.status, 'IP', ip)
except (ClientError, aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError, AttributeError):
self.mysql.decrease(ip)
print('代理请求失败', ip)
def run(self):
"""
测试主函数
:return:
"""
print('=====测试器执行开始=====')
try:
count = self.mysql.count()
print('当前剩余', count, '个代理')
for i in range(0, count, BATCH_TEST_SIZE):
start = i
stop = min(i + BATCH_TEST_SIZE, count)
print('正在测试第', start + 1, '-', stop, '个代理')
test_ip_group = self.mysql.batch(start, stop)
loop = asyncio.get_event_loop()
tasks = [self.test_single_ip(ip_tuple[0]) for ip_tuple in test_ip_group]
loop.run_until_complete(asyncio.wait(tasks))
sys.stdout.flush()
time.sleep(5)
except Exception as e:
print('测试器发生错误', e.args)
print('=====测试器执行结束=====')
``` |
{
"source": "a96tudor/pytest-console-scripts",
"score": 2
} |
#### File: pytest-console-scripts/tests/test_run_scripts.py
```python
import os
import subprocess
import sys
import mock
import py
import pytest
import virtualenv
# Template for creating setup.py for installing console scripts.
SETUP_TEMPLATE = """
import setuptools
setuptools.setup(
name='{script_name}',
version='1.0',
py_modules=['{script_name}'],
zip_safe=False,
entry_points={{
'console_scripts': ['{cmd}={script_name}:main']
}}
)
"""
class VEnvWrapper:
"""Wrapper for virtualenv that can execute code inside of it."""
def __init__(self, path):
self.path = path
def _update_env(self, env):
bin_dir = self.path.join('bin').strpath
env['PATH'] = bin_dir + ':' + env.get('PATH', '')
env['VIRTUAL_ENV'] = self.path.strpath
env['PYTHONPATH'] = ':'.join(sys.path)
def run(self, cmd, *args, **kw):
"""Run a command in the virtualenv."""
self._update_env(kw.setdefault('env', os.environ))
print(kw['env']['PATH'], kw['env']['PYTHONPATH'])
subprocess.check_call(cmd, *args, **kw)
def install_console_script(self, cmd, script_path):
"""Run setup.py to install console script into this virtualenv."""
script_dir = script_path.dirpath()
script_name = script_path.purebasename
setup_py = script_dir.join('setup.py')
setup_py.write(SETUP_TEMPLATE.format(cmd=cmd, script_name=script_name))
self.run(['python', 'setup.py', 'develop'], cwd=str(script_dir))
@pytest.fixture(scope='session')
def pcs_venv(tmpdir_factory):
"""Virtualenv for testing console scripts."""
venv = tmpdir_factory.mktemp('venv')
virtualenv.create_environment(venv.strpath)
yield VEnvWrapper(venv)
@pytest.fixture(scope='session')
def console_script(pcs_venv, tmpdir_factory):
"""Console script exposed as a wrapper in python `bin` directory.
Returned value is a `py.path.local` object that corresponds to a python
file whose `main` function is exposed via console script wrapper. The
name of the command is available via it `command_name` attribute.
The fixture is made session scoped for speed. The idea is that every test
will overwrite the content of the script exposed by this fixture to get
the behavior that it needs.
"""
script = tmpdir_factory.mktemp('script').join('console_script.py')
script.write('def main(): pass')
pcs_venv.install_console_script('console-script', script)
def replace(new_source):
"""Replace script source."""
script.write(new_source)
pyc = script.strpath + 'c'
if os.path.exists(pyc):
# Remove stale bytecode that causes heisenbugs on py27.
os.remove(pyc)
script.replace = replace
return script
@pytest.fixture(params=['inprocess', 'subprocess'])
def launch_mode(request):
"""Launch mode: inprocess|subprocess."""
return request.param
@pytest.fixture
def test_script_in_venv(pcs_venv, console_script, tmpdir, launch_mode):
"""A fixture that tests provided script with provided test."""
def run(script_src, test_src, **kw):
"""Test provided script with a provided test."""
console_script.replace(script_src)
test = tmpdir.join('test.py')
test.write(test_src)
# Execute pytest with the python of the virtualenv we created,
# otherwise it would be executed with the python that runs this test,
# which is wrong.
test_cmd = [
'python',
'-m', 'pytest',
'--script-launch-mode=' + launch_mode,
test.strpath,
]
pcs_venv.run(test_cmd, **kw)
return run
@pytest.mark.parametrize('script,test', [
(
"""
from __future__ import print_function
def main():
print(u'hello world')
print('hello world')
""",
r"""
def test_hello_world(script_runner):
ret = script_runner.run('console-script')
print(ret.stderr)
assert ret.success
assert ret.stdout == 'hello world\nhello world\n'
""",
),
# Script that exits abnormally.
(
"""
import sys
def main():
sys.exit('boom')
""",
r"""
def test_exit_boom(script_runner):
ret = script_runner.run('console-script')
assert not ret.success
assert ret.stdout == ''
assert ret.stderr == 'boom\n'
""",
),
# Script that has an uncaught exception.
(
"""
import sys
def main():
raise TypeError('boom')
""",
r"""
def test_throw_exception(script_runner):
ret = script_runner.run('console-script')
assert not ret.success
assert ret.returncode == 1
assert ret.stdout == ''
assert 'TypeError: boom' in ret.stderr
""",
),
# Script that changes to another directory. The test process should remain
# in the directory where it was (this is particularly relevant if we run
# the script inprocess).
(
"""
from __future__ import print_function
import os
import sys
def main():
os.chdir(sys.argv[1])
print(os.getcwd())
""",
r"""
import os
def test_preserve_cwd(script_runner, tmpdir):
dir1 = tmpdir.mkdir('dir1')
dir2 = tmpdir.mkdir('dir2')
os.chdir(str(dir1))
ret = script_runner.run('console-script', str(dir2))
assert ret.stdout == str(dir2) + '\n'
assert os.getcwd() == str(dir1)
""",
),
# Send input to tested script's stdin.
(
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write('simon says ' + line)
""",
r"""
import io
def test_stdin(script_runner):
ret = script_runner.run('console-script', stdin=io.StringIO(u'foo\nbar'))
assert ret.success
assert ret.stdout == 'simon says foo\nsimon says bar'
""",
),
])
def test_run_script(test_script_in_venv, script, test):
test_script_in_venv(script, test)
def test_run_script_with_cwd(test_script_in_venv, tmpdir):
test_script_in_venv(
"""
from __future__ import print_function
import os
def main():
print(os.getcwd())
""",
r"""
def test_cwd(script_runner):
ret = script_runner.run('console-script', cwd='{cwd}')
assert ret.success
assert ret.stdout == '{cwd}\n'
""".format(cwd=tmpdir),
)
``` |
{
"source": "a96tudor/wemake-python-styleguide",
"score": 3
} |
#### File: wemake_python_styleguide/logics/nodes.py
```python
import ast
def is_literal(node: ast.AST) -> bool:
"""
Checks for nodes that contains only constants.
If the node contains only literals it will be evaluated.
When node relies on some other names, it won't be evaluated.
"""
try:
ast.literal_eval(node)
except ValueError:
return False
else:
return True
```
#### File: wemake_python_styleguide/logics/variables.py
```python
import ast
from typing import Iterable, Optional
from wemake_python_styleguide import constants
from wemake_python_styleguide.options.defaults import MIN_VARIABLE_LENGTH
def is_wrong_variable_name(name: str, to_check: Iterable[str]) -> bool:
"""
Checks that variable is not prohibited by explicitly listing it's name.
>>> is_wrong_variable_name('wrong', ['wrong'])
True
>>> is_wrong_variable_name('correct', ['wrong'])
False
>>> is_wrong_variable_name('_wrong', ['wrong'])
True
>>> is_wrong_variable_name('wrong_', ['wrong'])
True
>>> is_wrong_variable_name('wrong__', ['wrong'])
False
>>> is_wrong_variable_name('__wrong', ['wrong'])
False
"""
for name_to_check in to_check:
choices_to_check = [
name_to_check,
'_{0}'.format(name_to_check),
'{0}_'.format(name_to_check),
]
if name in choices_to_check:
return True
return False
def is_upper_case_name(name: Optional[str]) -> bool:
"""
Checks that attribute name has no upper-case letters.
>>> is_upper_case_name('camelCase')
True
>>> is_upper_case_name('UPPER_CASE')
True
>>> is_upper_case_name('camel_Case')
True
>>> is_upper_case_name('snake_case')
False
>>> is_upper_case_name('snake')
False
>>> is_upper_case_name('snake111')
False
>>> is_upper_case_name('__variable_v2')
False
>>> is_upper_case_name(None)
False
"""
return name is not None and any(character.isupper() for character in name)
def is_too_short_variable_name(
name: Optional[str],
min_length: int = MIN_VARIABLE_LENGTH,
) -> bool:
"""
Checks for too short variable names.
>>> is_too_short_variable_name('test')
False
>>> is_too_short_variable_name(None)
False
>>> is_too_short_variable_name('o')
True
>>> is_too_short_variable_name('_')
False
>>> is_too_short_variable_name('z1')
False
>>> is_too_short_variable_name('z', min_length=1)
False
"""
if name is None:
return False
return name != constants.UNUSED_VARIABLE and len(name) < min_length
def is_private_variable(name: Optional[str]) -> bool:
"""
Checks if variable has private name pattern.
>>> is_private_variable(None)
False
>>> is_private_variable('regular')
False
>>> is_private_variable('__private')
True
>>> is_private_variable('_protected')
False
>>> is_private_variable('__magic__')
False
"""
return (
name is not None and name.startswith('__') and not name.endswith('__')
)
def is_variable_name_with_underscored_number(name: str) -> bool:
"""
Checks for variable names with underscored number.
>>> is_variable_name_with_underscored_number('star_wars_episode2')
False
>>> is_variable_name_with_underscored_number(None)
False
>>> is_variable_name_with_underscored_number('come2_me')
False
>>> is_variable_name_with_underscored_number('_')
False
>>> is_variable_name_with_underscored_number('z1')
False
>>> is_variable_name_with_underscored_number('star_wars_episode_2')
True
>>> is_variable_name_with_underscored_number('come_2_me')
True
>>> is_variable_name_with_underscored_number('iso_123_456')
False
"""
pattern = constants.UNDERSCORED_NUMBER_PATTERN
return name is not None and pattern.match(name) is not None
def is_same_variable(left: ast.AST, right: ast.AST) -> bool:
"""Ensures that nodes are the same variable."""
if isinstance(left, ast.Name) and isinstance(right, ast.Name):
return left.id == right.id
return False
def get_assigned_name(node: ast.AST) -> Optional[str]:
"""
Returns variable names for node that are just assigned.
Returns ``None`` for nodes that are used in a different manner.
"""
if isinstance(node, ast.Name) and isinstance(node.ctx, ast.Store):
return node.id
if isinstance(node, ast.Attribute) and isinstance(node.ctx, ast.Store):
return node.attr
if isinstance(node, ast.ExceptHandler):
return getattr(node, 'name', None)
return None
``` |
{
"source": "a980545075/mayuan",
"score": 2
} |
#### File: mayuan/analysis/XFAnalyze_moScriptTable.py
```python
import os
import json
from win32api import GetFileVersionInfo, LOWORD, HIWORD
from idc import *
from idaapi import *
'''
XFAnalyze_moScriptTable.py, <NAME>, siberas, 2016
finds:
- all entrypoints for getter/setter methods of object-properties
- all entrypoints for scripting methods
- vtable functions (as good as possible) <- still rather crude implementation...
'''
# unset this if you don't want the func names to be overwritten!
setnames = True
# if you want to log output to a file set dolog to True...
dolog = False
logfile = os.path.abspath(".\\log.txt")
if len(ARGV) > 1:
import datetime
logfile = ARGV[1] + "\\log_%s.txt" % datetime.datetime.now().isoformat().replace(":", "_")
xfadb = AskFile(0, "*.json", "Select the XFAdb_v941.json file")
fh = open(xfadb)
objdict_v941 = json.loads(fh.read())
fh.close()
meth_type_off = 0x8
meth_getScriptTable_off = 0x34
textstart = get_segm_by_name(".text").startEA
textend = get_segm_by_name(".text").endEA
datastart = get_segm_by_name(".data").startEA
dataend = get_segm_by_name(".data").endEA
SELF_FLAG = pow(2, 12)
def isValidCode(addr):
if textstart <= addr < textend:
return True
return False
def isValidData(addr):
if datastart <= addr < dataend:
return True
return False
def seek_vtable_end(addr):
while(Dword(addr) and isValidCode(Dword(addr))):
addr += 4
if len(list(XrefsTo(addr))) != 0:
break
return addr
def getvtptr(addr):
return list(XrefsTo(addr, 0))[0].frm - meth_type_off # get first xref == offset vtable in this case
def log(s):
if dolog:
fh = open(logfile, "a")
fh.write(s + "\n")
fh.close()
print s
symcount = 0 # count how many symbols we set
def createsym(address, name, symlog=True):
global symcount
symcount += 1
s = "[+] MakeName: 0x%x -> %s" % (address, name)
try:
dem = Demangle(name, INF_LONG_DN)
if dem != None:
s += " (%s)" % dem
except:
pass
if symlog == True:
log(s)
if setnames:
name = name.replace("#", "HASHSYM_")
set_name(address, name, SN_NOWARN)
SetFunctionFlags(address, SELF_FLAG)
targetfile = os.path.abspath(GetInputFile())
log("[+] analyzing file %s" % targetfile)
log("[+] waiting for initial analysis to finish...")
autoWait()
info = GetFileVersionInfo(targetfile, "\\")
ms = info['FileVersionMS']
ls = info['FileVersionLS']
ver = "%02d.%02d.%05d.%05d" % (HIWORD(ms), LOWORD(ms), HIWORD(ls), LOWORD(ls))
majorver = "%d" % HIWORD(ms)
if majorver != "10" and majorver != "11":
majorver = "DC"
log("[+] target version: Adobe Reader %s (version %s)" % (majorver, ver))
objdict = {}
# now parse the moScriptTable structures
for typeid in objdict_v941:
'''
objdict_v941 is a dictionary with typeid strings as keys and object dicts as values
each object dictionary has following structure:
{
"name": <STR: NAME OF OBJECT>,
"vtaddr": <STR: VTABLE ADDRESS>,
"vtlen": <STR: LENGTH OF VTABLE>,
"vtable_funcs": <ARRAY: VTABLE FUNCTION DICTS (see below)>,
"hierarchy": <ARRAY: CLASS HIERARCHY, eg. ["boolean", "content", "node", "tree", "object"]>,
"properties": <DICT (see below)>,
"scriptmethods":<DICT (see below)>
}
"vtable_funcs":
the vtable function dicts have rva, name and the undecorated name as entries, eg.:
{
"rva": "0x7a23e6",
"name": "_ZN11XFANodeImpl6removeEi",
"undecorated": "XFANodeImpl::remove(int)"
}
"properties":
dictionary where keys are the various hierarchy levels.
values are arrays of dictionaries containing "getter", "setter" and "name" entries
example:
{
"node": [ { "getter": "0x79b1a0", "name": "isContainer", "setter": "0x0" },
{ "getter": "0x79aa16", "name": "isNull", "setter": "0x0" },
...
],
"tree": [ { "getter": "0x7dde8a", "name": "nodes", "setter": "0x0" },
{ "getter": "0x7dc08a", "name": "name", "setter": "0x7dbffe" },
...
],
...
}
"scriptmethods":
dictionary where keys are the various hierarchy levels.
values are arrays of dictionaries containing "rva" and "name" entries
example:
{
"node": [ { "rva": "0x79cff2", "name": "clone" },
{ "rva": "0x79d3ba", "name": "isPropertySpecified" },
...
],
"manifest": [ { "rva": "0x78c394", "name": "evaluate" },
{ "rva": "0x78bd4a", "name": "execValidate" },
...
],
...
}
'''
obj = objdict_v941[typeid]
typeid = int(typeid, 16)
log("\n\n[+] obj %s, typeid 0x%x" % (obj["name"], typeid))
log("[+] original obj from AR for Solaris v941: len(properties): %d, len(methods): %d, len(vtable): %d" % (len(obj["properties"]), len(obj["scriptmethods"]), obj["vtlen"]))
# reset call hierarchy, scripting method, properties and the vtable addresses for the new object
obj["hierarchy"] = []
obj["scriptmethods"] = {}
obj["properties"] = {}
obj["vtaddr"] = "0"
log("[+] finding moScriptTable data ptr...")
pattern_addr = FindBinary(0, SEARCH_DOWN, "b8 %x %x 00 00 c3" % (typeid & 0xff, typeid >> 8)) # mov eax, XXYYh; retn
if pattern_addr == BADADDR:
log("[-] could not find binary pattern for 'mov eax, %x; retn' => skip it!" % typeid)
continue
log("[+] found 'mov eax, %x; retn' binary pattern @ 0x%x" % (typeid, pattern_addr))
vtable_address = getvtptr(pattern_addr)
obj["vtaddr"] = "0x%x" % vtable_address
log("[+] vtable start @ 0x%x" % vtable_address)
createsym(vtable_address, obj["name"] + "_vtable", symlog=False)
# check size of vtable
vtable_end_address = seek_vtable_end(vtable_address)
vtentries = ( vtable_end_address - vtable_address ) / 4
# yes, this is a weak heuristic! remove it if you don't like it ;)
# most methods WILL match, some XFA methods have been removed and added, though.
# so this will NOT be accurate if count(added_vt_methods) == count(removed_vt_methods)!
if obj["vtlen"] == vtentries:
pass
else:
# if the count does NOT match then obviously methods were removed or added.
# for xfa objects we can at least match until the XFATreeImpl methods:
# XFAObjectImpl has 20 methods which stay the same and XFATreeImpl also has 20 unchanged methods
if vtentries >= 0x50: # => xfa-obj (see below)
obj["vtable_funcs"] = obj["vtable_funcs"][:40]
else:
obj["vtable_funcs"] = []
# update vtable entry count
obj["vtlen"] = vtentries
# object class contains getScriptTable method. length(object vtable) == 20 => 20*4 = 80 = 0x50 is minimum vtable size
if vtentries < 0x50:
log("[!] no XFA obj (len(vtable) == %d too small) => no getScriptTable method... skip it!" % (vtentries*4))
objdict["0x%x" % typeid] = obj
continue
getScriptTable_ptr = vtable_address + meth_getScriptTable_off
getScriptTable = Dword(getScriptTable_ptr)
createsym(getScriptTable, obj["name"] + "_getScriptTable", symlog=False)
log("[+] %s::getScriptTable ptr @ 0x%x" % (obj["name"], getScriptTable_ptr))
log("[+] %s::getScriptTable @ 0x%x" % (obj["name"], getScriptTable))
# for solaris we have something like the following instr:
# mov eax, ds:(_ZN14XFASubformImpl13moScriptTableE_ptr - 118AAA4h)[ecx]
# extract _ZN14XFASubformImpl13moScriptTableE_ptr via regex
moScriptTable = None
fnc = get_func(getScriptTable)
if fnc is None:
log("[!] getScriptTable could not be found => skip object!")
objdict["0x%x" % typeid] = obj
continue
# simple heuristic check to make sure we're in getScriptTable.
# The function is small. maximum of ~0x18 bytes
if fnc.endEA - fnc.startEA > 0x20:
log("[!] function too big. this is probably not getScriptTable...! => skip object!")
objdict["0x%x" % typeid] = obj
continue
cnt = 0
for head in Heads(start=fnc.startEA, end=fnc.endEA):
if GetOpnd(head, 0) == "eax" and isValidData(GetOperandValue(head, 1)):
moScriptTable = GetOperandValue(head, 1)
if not moScriptTable:
raise Exception("could not find moScriptTable offset")
log("[+] %s::moScriptTable @ 0x%x" % (obj["name"], moScriptTable))
createsym(moScriptTable, obj["name"] + "_moScriptTable", symlog=False)
while(1): # we break if we hit 0 => end of the class hierarchy!
classname = GetString(Dword(moScriptTable+0x04), -1, ASCSTR_C)
if len(obj["hierarchy"]) == 0:
log("[+] parsing %s.moScriptTable @ 0x%x" % (classname, moScriptTable))
else:
log("[+] parsing %s.moScriptTable (subclass) @ 0x%x" % (classname, moScriptTable))
createsym(moScriptTable, classname + "_moScriptTable", symlog=False)
obj["hierarchy"].append(classname)
####> PARSE PROPERTIES
# { "tree" : [ { "name": "", "rva": "" }, ... ], "node": [...]...}
obj["properties"][classname] = []
propsptr = Dword(moScriptTable+0x08)
if propsptr != 0:
log("[+] parsing props @ 0x%x" % propsptr)
createsym(propsptr, "properties_table__%s_%s" % (obj["name"], classname), symlog=False)
while(Dword(propsptr) != BADADDR and Dword(propsptr)):
propname = GetString(Dword(Dword(Dword(Dword(propsptr)))), -1, ASCSTR_C)
# for assignment obj = "hi" instead of obj.value = "hi"
if Dword(propsptr) == 0 or Dword(Dword(propsptr)) == 0 or propname == None:
propname = "value_direct"
createsym(Dword(propsptr), "property_struct__%s_%s_%s" % (obj["name"], classname, propname), symlog=False)
createsym(Dword(Dword(propsptr)), "ptr_ptr_string_" + propname, symlog=False)
createsym(Dword(Dword(Dword(propsptr))), "ptr_string_" + propname, symlog=False)
getter = Dword(Dword(propsptr)+4)
setter = Dword(Dword(propsptr)+8)
obj["properties"][classname].append( { "name" : propname, "getter" : "0x%x" % getter, "setter" : "0x%x" % setter } )
#print "prop %s, get 0x%x, set 0x%x" % (propname, getter, setter)
propsptr += 4
log("[+] found %d properties" % len(obj["properties"][classname]))
else:
log("[!] no properties found")
####> PARSE METHODS
obj["scriptmethods"][classname] = []
methptr = Dword(moScriptTable+0x0c)
if methptr != 0:
createsym(methptr, "method_table__%s_%s" % (obj["name"], classname), symlog=False)
log("[+] parsing methods @ 0x%x" % methptr)
while(Dword(methptr) != BADADDR and Dword(methptr)):
methodname = GetString(Dword(Dword(Dword(Dword(methptr)))), -1, ASCSTR_C)
createsym(Dword(methptr), "method_struct__%s_%s_%s" % (obj["name"], classname, methodname), symlog=False)
createsym(Dword(Dword(methptr)), "ptr_ptr_string_" + methodname, symlog=False)
createsym(Dword(Dword(Dword(methptr))), "ptr_string_" + methodname, symlog=False)
funcaddr = Dword(Dword(methptr)+4)
if funcaddr != 0:
obj["scriptmethods"][classname].append( { "name" : methodname, "rva" : "0x%x" % funcaddr } )
methptr += 4
log("[+] found %d methods" % len(obj["scriptmethods"][classname]))
else:
log("[!] no methods found")
# deref the first dword to continue - break if 0 (end of class hierarchy!)
moScriptTable = Dword(moScriptTable)
if moScriptTable == 0:
break
log("[+] finished moScriptTable parsing for object %s!" % obj["name"])
####> NOW SET METHOD AND PROPERTY SYMBOLS
log("[+] set methods..." )
for classname in obj["hierarchy"]:
c = 0
for method_dict in obj["scriptmethods"][classname]:
c += 1
method_addr = int(method_dict["rva"], 16)
if method_addr != 0:
# check if we've already set a name for this method. if this is the case we omit
# the object name as part of the method name since it would be misleading
method_name = "METHOD_%s_%s_%s" % (obj["name"], classname, method_dict["name"])
long_method_name = ""
if Name(method_addr) != "":
long_method_name = method_name
method_name = "METHOD_%s_%s" % (classname, method_dict["name"])
log("[%d/%d] set method '%s' => %s" % (c, len(obj["scriptmethods"][classname]), method_name, method_dict["rva"]))
createsym(method_addr, method_name)
# add long method name to see in the function header which objects call reference this method
if long_method_name not in GetFunctionCmt(method_addr, 0).split("\n"):
SetFunctionCmt(funcaddr, GetFunctionCmt(method_addr, 0) + "\n" + long_method_name, 0)
log("[+] set properties...")
for classname in obj["hierarchy"]:
c = 0
for property_dict in obj["properties"][classname]:
c += 1
for type in ["getter", "setter"]:
property_func = int(property_dict[type], 16)
if property_func != 0:
# set getter name just like we did it above
if Name(property_func) != "":
property_name = "%s_%s_%s" % (type.upper(), classname, property_dict["name"])
else:
property_name = "%s_%s_%s_%s" % (type.upper(),obj["name"], classname, property_dict["name"])
log("[%d/%d] set property %s '%s' => %s" % (c, len(obj["properties"][classname]), type, property_name, property_dict[type]))
createsym(property_func, property_name)
objdict["0x%x" % typeid] = obj
# setting vtable method infos is partly incorrect!!
# remove this if you want to make sure everything is correct...
log("[+] now setting vtable method information...")
for typeid in objdict:
obj = objdict[typeid]
typeid = int(typeid, 16)
log("[+] setting vtable infos for object %s with vtable @ 0x%x" % (obj["name"], int(obj["vtaddr"], 16)))
offset = 0
for method_dict in obj["vtable_funcs"]:
method_addr = Dword(int(obj["vtaddr"], 16) + offset)
log("[%d/%d] %s @ 0x%x" % (offset/4, len(obj["vtable_funcs"]), method_dict["name"], method_addr))
createsym(method_addr, method_dict["name"])
offset += 4
'''
xfadb = AskFile(1, "XFADB_AR_%s_(%s).json" % (majorver, ver), "Select output file for XFADB")
if xfadb != None:
fh = open(xfadb, "wb")
fh.write(json.dumps(objdict, indent = 2))
fh.close()
'''
log("[+] done!")
```
#### File: exploitation/sample_exploits/ar_buggery_pykd.py
```python
from pykd import *
def dbgCB(x):
# ".printf \"DBG: %ma\\r\\n\", poi(poi(esp+c)+10);"
s = dbgCommand(".printf \"%ma\\r\\n\", poi(poi(esp+c)+10);")
if s.startswith("write0:") == True:
targetaddr = int(s.split("write0:")[-1], 16)
print "[!] perform write0 to targetaddress 0x%x" % targetaddr
cmd = "ed %x 0" % targetaddr
print "cmd: %s" % cmd
print "before:"
print dbgCommand("dd %x" % ((targetaddr &~ 0xf)-0x10))
dbgCommand(cmd)
print "after:"
print dbgCommand("dd %x" % ((targetaddr &~ 0xf)-0x10))
else:
print s
# in order to be able to resolve the symbol you need to add the Acroform.pdb file for your Reader version!
addy = int(dbgCommand("? AcroForm!METHOD_node_isPropertySpecified").strip().split(" ")[-1], 16)
print "[!] set bp for xfa.isPropertySpecified at 0x%x" % addy
print setBp(addy, dbgCB)
go()
``` |
{
"source": "a9872a9872/ptt_web_image_crawler",
"score": 3
} |
#### File: a9872a9872/ptt_web_image_crawler/main.py
```python
import os
import re
import requests
from bs4 import BeautifulSoup
from argparse import ArgumentParser
from multiprocessing import Pool
ptt_host = 'https://www.ptt.cc'
cookies = {'over18': '1'}
parser = ArgumentParser()
parser.add_argument('--board', '-b', help="選擇 PTT 看板 ,預設為美食版 e.g. Food", default='Food')
parser.add_argument('--pages', '-p', help="設定頁數上限,預設為全部 e.g. 10", default=False)
args = parser.parse_args()
def get_html_soup(url):
html = requests.get(url, cookies=cookies).text
return BeautifulSoup(html, 'html.parser')
def get_newest_page():
html = requests.get(ptt_host + f'/bbs/{args.board}/index.html', cookies=cookies).text
soup = BeautifulSoup(html, 'html.parser')
pre_page = soup.find('a', string='‹ 上頁')
return int(re.search(r'\d+', pre_page['href']).group()) + 1
def download_image(page, a, i):
filename = f'./img/{args.board}/{page}/{i}.jpg'
os.makedirs(os.path.dirname(filename), exist_ok=True)
img = requests.get(a['href']).content
with open(filename, 'wb') as file:
file.write(img)
def crawling(page):
titles = get_html_soup(ptt_host + f'/bbs/{args.board}/index{page}.html').find_all(class_='title')
i = 1
for title in titles:
try:
main_content = get_html_soup(ptt_host + title.a['href']).find(id='main-content')
a_tags = main_content.find_all('a', href=re.compile('^https://i.imgur.com'))
for a in a_tags:
download_image(page, a, i)
print(f'download page {page} number {i} image')
i += 1
except TypeError:
pass
if __name__ == '__main__':
newest_page = get_newest_page()
pages = int(args.pages)
last_page = 1 if not pages else newest_page - pages
pool = Pool()
pool.map(crawling, range(newest_page, last_page, -1))
``` |
{
"source": "A9-dev/pynotes",
"score": 3
} |
#### File: pynotes/pynotes/__main__.py
```python
import sys
import win10toast
import argparse
import json
import pathlib
import os
from colorama import init as c_init
from colorama import Fore, Style
c_init()
json_path = None
data = None
def main():
global json_path
global data
json_path = "\\".join(
str(pathlib.Path(__file__).absolute()).split("\\")[:-2]) + "\\db.json"
with open(json_path) as json_file:
data = json.load(json_file)
currentDir = str(pathlib.Path().absolute())
isInitialised = currentDir in [
x["dir"] for x in data["projects"]]
parser = argparse.ArgumentParser(
description="Pynotes is a terminal based app that lets you add notes to specific projects and directories")
commands_parser = parser.add_subparsers(help='commands', dest="command")
parser.add_argument(
"-q", "--quiet", help="Run command with no output", action="store_true")
init_parser = commands_parser.add_parser(
"initdir", help="Initialise current directory as a project, letting you automatically filter viewed notes to the ones added to the directory if the command was ran from that directory")
init_parser.add_argument(
"-p", "--project", help="", action="store", type=str)
add_project_parser = commands_parser.add_parser(
"add-project", help="Add a project which you can add notes to and view notes from")
add_project_parser.add_argument(
"projectName", help="", action="store", type=str)
add_note_parser = commands_parser.add_parser(
"add-note", help="Lets you add a note to a project, a directory, or a global project")
add_note_parser.add_argument("note", help="", type=str)
add_note_parser.add_argument(
"-p", "--project", help="", action="store", type=str)
view_parser = commands_parser.add_parser(
"view", help="Lets you view your notes, defaulted to the current dir if ran from an initialised dir, then to the project if specified, and then global")
view_args = view_parser.add_mutually_exclusive_group()
view_args.add_argument("-a", "--all", default=False, action="store_true")
view_args.add_argument("-p", "--project", help="",
action="store", type=str)
remove_parser = commands_parser.add_parser(
"remove", help="Lets you remove a project or note.")
remove_parser.add_argument("-p", "--project", help="",
action="store", type=str)
remove_parser.add_argument("-n", "--note", help="", action="store", type=int)
args = parser.parse_args()
if args.command == "initdir":
if not isInitialised:
project_dir = currentDir
project_name = project_dir.split("\\")[-1]
canInit = True
if args.project:
isExisting = False
for i in range(len(data['projects'])):
if data['projects'][i]['projectName'] == args.project:
if data['projects'][i]["dir"]:
canInit = False
else:
isExisting = True
index = i
if canInit:
project_name = args.project
else:
print(
Fore.RED + "Cannot init, {} is already initialised!".format(args.project))
canInit = False
if canInit:
if isExisting:
data["projects"][index]["dir"] = currentDir
try:
with open(json_path, 'w') as json_file:
json.dump(data, json_file)
except Exception as e:
print(Fore.RED + repr(e))
else:
addProject(project_name, project_dir)
if not args.quiet:
print(Fore.GREEN +
"Initialised project {0}!".format(project_name))
else:
print(
Fore.RED + "Cannot init directory, {} is already initialised!".format(currentDir))
elif args.command == "add-project":
canAdd = True
for i in data['projects']:
if(i['projectName'] == args.projectName):
canAdd = False
if canAdd:
addProject(args.projectName)
if not args.quiet and canAdd:
print(Fore.GREEN + "Added project {0}".format(args.projectName))
else:
print(
Fore.RED + "Cannot create project, project with the name %s already exists!" % (args.projectName))
elif args.command == "add-note":
if not args.project:
for i in data['projects']:
if (currentDir == i['dir']):
try:
i['notes'].append(args.note)
with open(json_path, 'w') as json_file:
json.dump(data, json_file)
if not args.quiet:
print(Fore.GREEN + "Successfully added note to %s!" %
(i['projectName']))
except Exception as e:
print(Fore.RED + repr(e))
else:
found = False
for i in data['projects']:
if(i['projectName'] == args.project):
try:
found = True
i['notes'].append(args.note)
with open(json_path, 'w') as json_file:
json.dump(data, json_file)
if not args.quiet:
print(Fore.GREEN + "Successfully added note!")
except Exception as e:
print(Fore.RED + repr(e))
if not found:
print(Fore.RED + "Project %s not found!" % args.project)
elif args.command == "view":
notes = []
useCurrentDir = False
found = True
if(not args.all):
if(args.project):
found = False
projectName = args.project
for i in data["projects"]:
if i["projectName"] == args.project:
notes = i['notes']
found = True
if not found:
print(Fore.RED + "Project %s not found!" % args.project)
else:
for i in data['projects']:
if (currentDir == i['dir']):
useCurrentDir = True
projectName = i['projectName']
notes = i['notes']
if not useCurrentDir:
projectName = "Global"
for i in data["projects"]:
if i["projectName"] == "Global":
notes = i["notes"]
if found:
print(Style.RESET_ALL + 'Notes from: ' +
Fore.GREEN + projectName)
if not notes:
print(Fore.RED + " " + "No notes!")
i = 1
for j in notes:
print(Fore.RED + ' ' + str(i) +
": " + Style.RESET_ALL + j)
i += 1
else:
for i in data["projects"]:
k = 1
print(Style.RESET_ALL + 'Notes from: %s' %
(Fore.GREEN + i["projectName"]))
if(not i["notes"]):
print(Fore.RED + " "+"No notes!")
for j in i["notes"]:
print(Fore.RED + ' ' + str(k) +
": " + Style.RESET_ALL + str(j))
k += 1
elif args.command == "remove":
removed = False
if args.note:
if args.project:
found = False
for i in data['projects']:
if i['projectName'] == args.project:
found = True
try:
i['notes'].pop(args.note-1)
removed = True
except Exception as e:
print(Fore.RED + "Invalid note number!")
if not found and not args.quiet:
print(Fore.RED + "Project %s not found!"%(args.project))
# Nice
# REMOVE args.note FROM args.project
elif isInitialised:
pass
# REMOVE args.note FROM project["dir"] == currentDir
else:
pass
# REMOVE args.note FROM Global
else:
if args.project:
found = False
for i in range(len(data["projects"])):
if data["projects"][i]["projectName"] == args.project:
found = True
index = i
if found:
data["projects"].pop(index)
removed = True
if not args.quiet:
print(Fore.GREEN + "Removed project, %s!"%(args.project))
else:
if not args.quiet:
print(Fore.RED + "Project, %s not found!"%(args.project))
elif isInitialised:
index = 0
for i in range(len(data["projects"])):
if data["projects"][i]["dir"] == currentDir:
index = i
data["projects"].pop(index)
removed = True
if not args.quiet:
print(Fore.GREEN + "Removed project with dir, %s!"%(currentDir))
else:
print(Fore.RED + "Please specify a project or note to remove!")
if removed:
try:
with open(json_path, 'w') as json_file:
json.dump(data, json_file)
except Exception as e:
print(Fore.RED + repr(e))
def addProject(project_name, project_dir=""):
data["projects"].append(
{"projectName": project_name, "dir": project_dir, "notes": []})
with open(json_path, 'w') as json_file:
json.dump(data, json_file)
if __name__ == '__main__':
main()
'''
TODO:
1. Add notifications.
- Set reminders either at a specific time or after a certain length of time.
- Create/Find an icon.
2. Change searches with isInitialised.
'''
``` |
{
"source": "A9K5/Python_Flask",
"score": 3
} |
#### File: 03_09_20/proj1/login_manager.py
```python
from flask import Flask, render_template, request, redirect, session
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager, UserMixin, login_user, login_required, logout_user, current_user, fresh_login_required
from urllib.parse import urljoin,urlparse
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////Users/ankurkumar/Documents/Git/Python_Flask/03_09_20/proj1/login.db'
app.config['SECRET_KEY'] = "Thisissercret"
app.config['USE_SESSION_FOR_NEXT'] = True
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
login_manager.login_message = 'You need to login!'
login_manager.refresh_view = 'login'
login_manager.needs_refresh_message = 'You need to relogin to acces this page.'
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30), unique=True)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# @app.route('/')
# def index():
# user = User.query.filter_by(username='Anthony').first()
# login_user(user)
# return "You r logged in. "
@app.route('/login')
def login():
# session['next'] = request.args.get('next')
return render_template('login.html')
# def is_safe_url(target):
# ref_url = urlparse(request.host_url)
# test_url = urlparse(url_join(request.host_url, target))
# return test_url.scheme in ('http','https') and ref_url.netloc == test_url.netloc
@app.route('/logmein',methods=['POST'])
def logmein():
username = request.form["username"]
user = User.query.filter_by(username=username).first()
if not user:
return "<h1>usr not found</h1>"
login_user(user, remember = True)
print(session)
if "next" in session and session['next']!=None :
next = session['next']
return redirect(next)
return "<h1>u r now logged in </h1>"
@app.route('/logout')
@login_required
def logout():
logout_user()
return "You r noew logged out"
@app.route('/home')
@login_required
def home():
return ("Current User "+ current_user.username)
@app.route('/fresh')
@fresh_login_required
def fresh():
return "<h1>you have a fresh login </h1>"
if __name__=='__main__':
app.run(debug=True)
```
#### File: 05_09_20/proj1/app.py
```python
from flask import Flask, jsonify, request, make_response
import jwt
import datetime
from functools import wraps
app = Flask(__name__)
app.config['SECRET_KEY'] = '<PASSWORD>issecret'
def token_required(f):
@wraps(f)
def decorated( *args, **kwargs):
token = request.args.get('token')
if not token:
return jsonify({'message':'Token not found'}),403
try:
jwt.decode(token,app.config['SECRET_KEY'])
except:
return jsonify({'message':' Token is invalid'}),403
return f(*args, **kwargs)
return decorated
@app.route('/protected')
@token_required
def protected():
return jsonify({'message':'Protected view'})
@app.route('/unprotected')
def unprotected():
return jsonify({'message':'Un-Protected view'})
@app.route('/login')
def login():
auth = request.authorization
if auth and auth.password == 'password':
token = jwt.encode({ 'user':auth.username, 'exp':datetime.datetime.utcnow()+datetime.timedelta(seconds=10*60 )},app.config['SECRET_KEY'])
return jsonify( {'token':token.decode('UTF-8')} )
return make_response('Could\'nt verfiy',401,{'WWW-Authenticate' : 'Basic realm="Login Required"'})
if __name__ == "__main__":
app.run(debug=True)
# http://127.0.0.1:5000/login
# http://127.0.0.1:5000/unprotected
# http://1172.16.58.3:5000/protected?token=<KEY>
```
#### File: 06_09_20/proj1/app.py
```python
from flask import Flask, request, jsonify, make_response
from flask_sqlalchemy import SQLAlchemy
import uuid
from werkzeug.security import generate_password_hash, check_password_hash
import jwt
import datetime
from functools import wraps
app = Flask(__name__)
app.config['SECRET_KEY'] = 'thisissecret'
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://///Users/ankurkumar/Documents/Git/Python_Flask/06_09_20/proj1/user.db"
db = SQLAlchemy(app)
class User(db.Model):
id = db.Column(db.Integer,primary_key=True)
public_id = db.Column(db.String(50), unique=True)
name = db.Column(db.String(50))
password = db.Column(db.String(80))
admin = db.Column(db.Boolean)
class Todo(db.Model):
id = db.Column(db.Integer,primary_key=True)
text = db.Column(db.String(50))
compile = db.Column(db.Boolean)
user_id = db.Column(db.String(50))
def token_required(f):
@wraps(f)
def decorated(*args,**kwargs):
token = None
if 'x-access-token' in request.headers:
token = request.headers['x-access-token']
if not token:
return jsonify({'message':'Token is missing!'}),401
try:
data = jwt.decode(token,app.config['SECRET_KEY'])
current_user = User.query.filter_by(public_id = data['public_id']).first()
except:
return jsonify({ 'message' : 'Token is invalid'}),401
return f(current_user,*args,**kwargs)
return decorated
@app.route('/user',methods=['GET'])
@token_required
def get_all_users(current_user):
if not current_user.admin:
return jsonify({'message':'Cannot perform this opertation!'})
users = User.query.all()
output = []
for user in users:
user_data = { }
user_data['public_id'] = user.public_id
user_data['name'] = user.name
user_data['password'] = <PASSWORD>
user_data['admin'] = user.admin
output.append(user_data)
return jsonify({'user':output})
@app.route('/user/<public_id>',methods=['GET'])
@token_required
def get_one_user(current_user,public_id):
if not current_user.admin:
return jsonify({'message':'Cannot perform this opertation!'})
user = User.query.filter_by(public_id=public_id).first()
if not user:
return jsonify({'message':'User not found'})
user_data = { }
user_data['public_id'] = user.public_id
user_data['name'] = user.name
user_data['password'] = <PASSWORD>
user_data['admin'] = user.admin
return jsonify({'user':user_data})
@app.route('/user', methods=['POST'])
@token_required
def create_user(current_user):
if not current_user.admin:
return jsonify({'message':'Cannot perform this opertation!'})
data = request.get_json()
print(data)
hashed_password = generate_password_hash( data["password"], method='sha256')
new_user = User( public_id = str(uuid.uuid4()), name = data['name'], password = <PASSWORD>, admin=False )
db.session.add(new_user)
db.session.commit()
return jsonify( {'message':'New user Created . '})
@app.route('/user/<public_id>',methods=['PUT'])
@token_required
def promote_user(current_user,public_id):
if not current_user.admin:
return jsonify({'message':'Cannot perform this opertation!'})
user = User.query.filter_by(public_id=public_id).first()
if not user:
return jsonify({'message':'User not found'})
user.admin = True
db.session.commit()
return jsonify({'message' : 'User has been promoted.'})
@app.route('/user/<public_id>',methods=['DELETE'])
@token_required
def delete_user(current_user,public_id):
if not current_user.admin:
return jsonify({'message':'Cannot perform this opertation!'})
user = User.query.filter_by(public_id=public_id).first()
if not user:
return jsonify({'message':'User not found'})
db.session.delete(user)
db.session.commit()
return jsonify({'message':'The user has been deleted.'})
@app.route('/login',methods=['GET'])
def login():
auth = request.authorization
if not auth or not auth.username or not auth.password:
return make_response('Could not verify',401,{'WWW-Authenticate':'Basic realm="Login Required!"' })
user = User.query.filter_by(name=auth.username).first()
if not user:
return make_response('Could not verify',401,{'WWW-Authenticate':'Basic realm="Login Required!"' })
if check_password_hash(user.password,auth.password):
token = jwt.encode({'public_id':user.public_id , 'exp':datetime.datetime.utcnow()+datetime.timedelta(minutes=40)},app.config['SECRET_KEY'])
return jsonify({'token': token.decode('UTF-8')})
return make_response('Could not verify',401,{'WWW-Authenticate':'Basic realm="Login Required!"' })
@app.route('/todo', methods=['GET'])
@token_required
def get_all_todos(current_user):
todos = Todo.query.filter_by(user_id=current_user.id).all()
output = []
for todo in todos:
todo_data = {}
todo_data['id'] = todo.id
todo_data['text'] = todo.text
todo_data['compile'] = todo.compile
output.append(todo_data)
return jsonify({'message':output})
@app.route('/todo/<todo_id>', methods=['GET'])
@token_required
def get_one_todo(current_user,todo_id):
todo = Todo.query.filter_by(id=todo_id,user_id=current_user.id).first()
if not todo:
return jsonify({ 'message':'Todo Not Found.'})
todo_data = {}
todo_data['id'] = todo.id
todo_data['text'] = todo.text
todo_data['compile'] = todo.compile
return jsonify( todo_data )
@app.route('/todo', methods=['POST'])
@token_required
def create_todo(current_user):
data = request.get_json()
new_todo = Todo(text=data['text'],compile=False, user_id = current_user.id)
db.session.add(new_todo)
db.session.commit()
return jsonify({ 'message':'Todo created!' })
@app.route('/todo/<todo_id>',methods=['PUT'])
@token_required
def complete_todo(current_user,todo_id):
todo = Todo.query.filter_by(id=todo_id,user_id=current_user.id).first()
if not todo:
return jsonify({ 'message':'Todo Not Found.'})
todo.compile = True
db.session.commit()
return jsonify({ 'message':'Todo item is complete'})
@app.route('/todo/<todo_id>',methods=['DELETE'])
@token_required
def delete_todo(current_user,todo_id):
return ''
if __name__ == "__main__":
app.run(debug=True)
```
#### File: Python_Flask/09MongoCRUD/app.py
```python
from flask import Flask, render_template, request, redirect
from pymongo import MongoClient
from bson.objectid import ObjectId
# from bson.code import Code
conn = MongoClient()
db = conn.iotdash #conn.name_of_db
collection = db.iotdash2 #db.table_name
app = Flask(__name__)
@app.route('/')
def index():
return render_template("dash.html")
@app.route('/create')
def create():
return render_template("insert/newentry.html")
@app.route('/create1')
def student():
id = request.args['id']
name = request.args['name']
city = request.args['city']
rec_id1 = collection.insert_one({ "ID": int(id) ,"NAME": name ,"CITY": city })
#print(" Could not create table .")
print (rec_id1)
return redirect('/')#render_template"insert.html",result1 = rec_id1)
#iotdash2 is the name of the table[collection] to be created
@app.route('/read')
def reader():
try:
conn = MongoClient()
print("Successfully connected .")
except:
print(" Could not connect to MongoDb.")
db = conn.iotdash
ret = db.iotdash2.find({})
#dict=[]
#for i in ret:
# dict.append(i)
# print(i)
#print(cursor)
return render_template("display/displayread.html",ret1s = ret )
@app.route('/newupdate' , methods = ['GET','POST'])
def newupdate():
id = request.values.get("_id")
task = collection.find({"_id":ObjectId(id)})
return render_template('newupdate.html',tasks = task)
@app.route('/action', methods=['POST'])
def action():
id1 = request.values.get("_id")
name = request.values.get("name")
id = request.values.get("id")
city = request.values.get("city")
collection.update({"_id":ObjectId(id1)} , {'$set':{"NAME":name , "ID":id , "CITY":city }})
return redirect("/")
@app.route('/newdelete',methods=['GET'])
def newdelete():
id1 = request.values.get("_id")
collection.remove( {"_id":ObjectId(id1)} )
return redirect("/")
@app.route('/update')
def update():
return render_template("update.html")
@app.route('/Mongoupdate',methods=['GET','POST'])
def Mongoupdate():
#if request.method == 'GET':
oldxname = request.args['oldxname'] # request.values.get['oldname']
newxname = request.args['newxname']
print(oldxname,newxname)
#oldxname = request.values.get("oldxname")
#newxname = request.values.get("newxname")
try:
conn = MongoClient()
print("Successfully connected .")
except:
print(" Could not connect to MongoDb.")
try :
db = conn.iotdash
db.iotdash2.update({ "x" : int(oldxname) },{ '$set': { "x" : int(newxname) }})
except:
print("--->")
return render_template("display/updatedisplay.html")
@app.route('/delete')
def delete():
return render_template("delete.html")
@app.route('/Mongodelete',methods=['POST','GET'])
def Mongodelete():
if request.method == 'POST':
delname = request.form['delname']
collection.remove({ "x" : int(delname) })
return render_template("display/deletedisplay.html")
if __name__ == '__main__':
app.run(debug = True)
```
#### File: mongoengine/base/utils.py
```python
import re
class LazyRegexCompiler:
"""Descriptor to allow lazy compilation of regex"""
def __init__(self, pattern, flags=0):
self._pattern = pattern
self._flags = flags
self._compiled_regex = None
@property
def compiled_regex(self):
if self._compiled_regex is None:
self._compiled_regex = re.compile(self._pattern, self._flags)
return self._compiled_regex
def __get__(self, instance, owner):
return self.compiled_regex
def __set__(self, instance, value):
raise AttributeError("Can not set attribute LazyRegexCompiler")
```
#### File: Python_Flask/flaskdynamo_v1_completeWithCardsTwoPage_API_Version_json_req_type (copy)/2paginator.py
```python
from flask import Flask, render_template, request, redirect, jsonify
from flask_cors import CORS, cross_origin
from datetime import datetime
from flask import Blueprint
from flask_paginate import Pagination, get_page_parameter
import botocore
import boto3
import decimal
import logging
import time
import argparse
import json
import random
import string
from boto3.dynamodb.conditions import Key, Attr
# dynamodb = boto3.resource('dynamodb')#, region_name='us-west-2', endpoint_url="http://localhost:8000")
# table = dynamodb.Table('IOT4')
# AllowedActions = ['both', 'publish', 'subscribe']
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
@app.route('/temp2')
def temp2():
# startToken = request.values.get("startToken")
# print(startToken)
client = boto3.client('dynamodb')
paginator = client.get_paginator('scan')
tasks = 1
startToken = 1
response_iterator = paginator.paginate(
TableName="IOT4",
# Limit=3
PaginationConfig={
'MaxItems': 5,
'PageSize': 5,
# 'StartingToken':startToken #page['LastEvaluatedKey']
}
)
for page in response_iterator:
# print(page['Items'])
# print(page)
tasks = page['Items']
# return (jsonify(page['Items']))
for key in page:
if key == "LastEvaluatedKey":
print(page['LastEvaluatedKey']['_id']['S'])
startToken = page['LastEvaluatedKey']['_id']['S']
return render_template('2paginator.html',startToken = startToken, tasks= tasks)
# if page['LastEvaluatedKey'] != Null:
# startToken = page['LastEvaluatedKey']['_id']['S']
# print (startToken)
# else:
# startToken = 0
# print(startToken)
# print(response_iterator)
# return response_iterator
@app.route('/temp3',methods=['POST'])
def temp3():
startToken = request.values.get("startToken")
print(startToken)
client = boto3.client('dynamodb')
paginator = client.get_paginator('scan')
tasks = 1
# startToken = 1
response_iterator = paginator.paginate(
TableName="IOT4",
# Limit=3
PaginationConfig={
'MaxItems': 5,
'PageSize': 5,
'StartingToken': { '_id':'<PASSWORD>6 15:<PASSWORD>' } #page['LastEvaluatedKey']
}
)
print(response_iterator)
for kay,val in response_iterator:
print(kay)
# for page in response_iterator:
# # print(page['Items'])
# # print(page)
# tasks = page['Items']
# return (jsonify(page['Items']))
# for key in page:
# if key == "LastEvaluatedKey":
# print(page['LastEvaluatedKey']['_id']['S'])
# startToken = page['LastEvaluatedKey']['_id']['S']
# return (jsonify(startToken))
# return render_template('2paginator.html',startToken = startToken, tasks= tasks)
@app.route('/temp4')
def temp4():
# startToken = request.values.get("startToken")
# print(startToken)
client = boto3.client('dynamodb')
paginator = client.get_paginator('scan')
tasks = 1
# startToken = 1
response_iterator = paginator.paginate(
TableName="IOT4",
# Limit=3
PaginationConfig={
'MaxItems': 5,
'PageSize': 5
}
)
for page in response_iterator:
print(page)
return("qwe")
if __name__ == '__main__':
app.run(debug = True, host="192.168.0.117") # 192.168.43.140
```
#### File: Python_Flask/ServerMonitoringDashboard2/net_conn.py
```python
import socket
from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM
import psutil
AD = "-"
AF_INET6 = getattr(socket, 'AF_INET6', object())
proto_map = {
(AF_INET, SOCK_STREAM): 'tcp',
(AF_INET6, SOCK_STREAM): 'tcp6',
(AF_INET, SOCK_DGRAM): 'udp',
(AF_INET6, SOCK_DGRAM): 'udp6',
}
def main():
templ = "%-5s %-30s %-30s %-13s %-6s %s"
# print(psutil.net_connections(kind='inet'))
print(templ % (
"Proto", "Local address", "Remote address", "Status", "PID",
"Program name"))
proc_names = {}
jsonmsg = []
for p in psutil.process_iter(attrs=['pid', 'name']):
proc_names[p.info['pid']] = p.info['name']
for c in psutil.net_connections(kind='inet'):
laddr = "%s:%s" % (c.laddr)
raddr = ""
if c.raddr:
raddr = "%s:%s" % (c.raddr)
jsonmsg.append({"Proto": proto_map[(c.family, c.type)],
"Local address": laddr,
"Remote address": raddr or AD,
"Status": c.status,
"PID": c.pid or AD,
"Program name": proc_names.get(c.pid, '?')[:15]
})
# print(templ % (
# proto_map[(c.family, c.type)],
# laddr,
# raddr or AD,
# c.status,
# c.pid or AD,
# proc_names.get(c.pid, '?')[:15],
# ))
print(jsonmsg)
if __name__ == '__main__':
main()
```
#### File: ServerMonitoringDashboard3/cpustat/views.py
```python
from flask_classy import FlaskView, route
from flask import request
import json
class CPUSTAT(FlaskView):
def index(self):
return "API stuff"
@route('/show/')
def show(self):
return "Show API"
@route('/cpustat/dict/', methods=['POST']) # with limit in the query
def cpustatdict(self):
data = (request.data.decode('utf-8'))
dataDict = json.loads(data)
lim = dataDict["limit"]
finmsg = {}
from pymongo import MongoClient
conn = MongoClient()
db = conn.iotdash5
cursor = db.alluse.find({}, {"_id": 0}).sort('_id', -1).limit(lim)
data = [x for x in cursor] # Output with count as in aws dynamodb
finmsg["Items"] = data
finmsg["Count"]= len(data)
# print (finmsg)
return json.dumps(finmsg)
@route('/cpuusers/', methods=['GET'])
def cpuusers(self):
import psutil
msg=[]
finmsg={}
for c in psutil.users():
msg.append({
"name":c.name,
"terminal":c.terminal,
"host":c.host,
"started":c.started,
"pid":c.pid
})
finmsg["Items"] = msg
finmsg["Count"] = len(msg)
return json.dumps(finmsg)
@route('/cpupid/',methods=['GET'])
def cpupid(self):
import psutil
procs = {p.pid: p.info for p in psutil.process_iter(attrs=['name', 'username'])}
return json.dumps(procs)
```
#### File: Python_Flask/To-Do-List-using-Flask-and-MongoDB-master/app.py
```python
from flask import Flask, render_template,request,redirect,url_for # For flask implementation
from pymongo import MongoClient # Database connector
from bson.objectid import ObjectId # For ObjectId to work
client = MongoClient('localhost', 27017) #Configure the connection to the database
db = client.camp2016 #Select the database
todos = db.todo #Select the collection
app = Flask(__name__)
title = "TODO with Flask"
heading = "ToDo Reminder"
#modify=ObjectId()
def redirect_url():
return request.args.get('next') or \
request.referrer or \
url_for('index')
@app.route("/list")
def lists ():
#Display the all Tasks
todos_l = todos.find()
a1="active"
return render_template('index.html',a1=a1,todos=todos_l,t=title,h=heading)
@app.route("/")
@app.route("/uncompleted")
def tasks ():
#Display the Uncompleted Tasks
todos_l = todos.find({"done":"no"})
a2="active"
return render_template('index.html',a2=a2,todos=todos_l,t=title,h=heading)
@app.route("/completed")
def completed ():
#Display the Completed Tasks
todos_l = todos.find({"done":"yes"})
a3="active"
return render_template('index.html',a3=a3,todos=todos_l,t=title,h=heading)
@app.route("/done")
def done ():
#Done-or-not ICON
id=request.values.get("_id")
task=todos.find({"_id":ObjectId(id)})
if(task[0]["done"]=="yes"):
todos.update({"_id":ObjectId(id)}, {"$set": {"done":"no"}})
else:
todos.update({"_id":ObjectId(id)}, {"$set": {"done":"yes"}})
redir=redirect_url() # Re-directed URL i.e. PREVIOUS URL from where it came into this one
# if(str(redir)=="http://localhost:5000/search"):
# redir+="?key="+id+"&refer="+refer
return redirect(redir)
#@<EMAIL>("/add")
#def add():
# return render_template('add.html',h=heading,t=title)
@app.route("/action", methods=['POST'])
def action ():
#Adding a Task
name=request.values.get("name")
desc=request.values.get("desc")
date=request.values.get("date")
pr=request.values.get("pr")
todos.insert({ "name":name, "desc":desc, "date":date, "pr":pr, "done":"no"})
return redirect("/list")
@app.route("/remove")
def remove ():
#Deleting a Task with various references
key=request.values.get("_id")
todos.remove({"_id":ObjectId(key)})
return redirect("/")
@app.route("/update")
def update ():
id=request.values.get("_id")
task=todos.find({"_id":ObjectId(id)})
return render_template('update.html',tasks=task,h=heading,t=title)
@app.route("/action3", methods=['POST'])
def action3 ():
#Updating a Task with various references
name=request.values.get("name")
desc=request.values.get("desc")
date=request.values.get("date")
pr=request.values.get("pr")
id=request.values.get("_id")
todos.update({"_id":ObjectId(id)}, {'$set':{ "name":name, "desc":desc, "date":date, "pr":pr }})
return redirect("/")
@app.route("/search", methods=['GET'])
def search():
#Searching a Task with various references
key=request.values.get("key")
refer=request.values.get("refer")
if(key=="_id"):
todos_l = todos.find({refer:ObjectId(key)})
else:
todos_l = todos.find({refer:key})
return render_template('searchlist.html',todos=todos_l,t=title,h=heading)
@app.route("/about")
def about():
return render_template('credits.html',t=title,h=heading)
if __name__ == "__main__":
app.run(debug=True)
# Careful with the debug mode..
``` |
{
"source": "A9K5/Resume-Scraper",
"score": 3
} |
#### File: A9K5/Resume-Scraper/7.py
```python
import PyPDF2
import sys
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from pdfminer3.converter import PDFPageAggregator
from pdfminer3.converter import TextConverter
from io import StringIO
from collections import defaultdict
import re
import pandas as pd
def Solve(text):
z = []
s = ""
out = {}
for i in text:
if i == '\n':
s.replace("\n","")
z.append((s.encode('ascii', 'ignore')).decode("utf-8"))
s = ""
else:
s += i
for i in z:
if re.search("Name ",i):
out["Name"] = i[4:]
if re.search("Mob",i):
out["Mob"] = i[7:]
put = text[text.index("Skill")+6:text.index("Experience")-1]
out["Skill"]= (put.encode('ascii', 'ignore')).decode("utf-8").replace("\n","")
return out
path = sys.argv[1]
# path = "./Resume2.pdf"
pdf = PyPDF2.PdfFileReader(open(path, "rb"))
fp = open(path, 'rb')
num_of_pages = pdf.getNumPages()
# print("num_of_pages ",num_of_pages)
extract = ""
output = pd.DataFrame()
for i in range(0,num_of_pages):
inside = [i]
pagenos=set(inside)
rsrcmgr = PDFResourceManager()
retstr = StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)
interpreter = PDFPageInterpreter(rsrcmgr, device)
password = ""
maxpages = 0
caching = True
text = ""
# print(pagenos)
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages, password=password,caching=caching, check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue()
retstr.truncate(0)
out = Solve(text)
output = output.append(out, ignore_index=True)
output.to_csv('./List2.csv', index = False)
``` |
{
"source": "a9w/Fat2_polarizes_WAVE",
"score": 3
} |
#### File: functions/measure/region.py
```python
import numpy as np
import skfmm
from skimage.measure import find_contours, approximate_polygon
from ..segment import interface_endpoints_coords, edge_between_neighbors
from ..utils import points_to_angle
def measure_one_hemijunction(cell_s, cell_r, interface):
"""
Measure traits of a single hemijunction.
Parameters
----------
cell_s : 2D bool ndarray
Pixels in the sending cell are True, rest are False
cell_r : 2D bool ndarray
Pixels in the receiving cell are True, rest are False
interface : 2D bool ndarray
Pixels in the interface are True, rest are False
Returns
-------
hj_traits : dict with these keys
"hj_area_px2" (int) number of pixels in the hemijunction
"tip_coords" (tuple of ints) coordinates of hemijunction tip
"base_coords" (tuple of ints) coordinates of hemijunction base
"prot_len_px" (float) protrusion length in px units
"prot_angle_rad" (float) angle from base to tip in radians
"prot_angle_deg" (float) angle from base to tip in degrees
"edge_len_nonstrt_px" (float) the curving, pixelated length of the interface
"edge_len_strt_px" (float) the straight-line length of the interface
"edge_angle_rad" (float)
"edge_angle_deg" (float)
"endpoint_1_coords" (tuple of floats) coordinates of one interface endpoint
"endpoint_2_coords" (tuple of floats) coordinates of other interface endpoint
"""
# Measure hemijunction traits
hj_area_px2 = np.sum(np.logical_and(cell_r, interface))
hj_traits = {"hj_area_px2": hj_area_px2}
tip, base, length_internal = protrusion_length_internal_path(
cell_s, cell_r, interface
)
hj_traits["tip_coords"] = tip
hj_traits["base_coords"] = base
hj_traits["prot_len_px"] = length_internal
angle_rad = points_to_angle(base, tip)
hj_traits["prot_angle_rad"] = angle_rad
hj_traits["prot_angle_deg"] = np.degrees(angle_rad)
# Measure "edge" traits (traits of the place where two cells meet)
hj_traits["edge_len_nonstrt_px"] = interface_length_wiggly(cell_s, cell_r)
hj_traits["edge_len_strt_px"] = interface_length_segment(cell_s, cell_r)
e1, e2 = interface_endpoints_coords(cell_s, cell_r)
edge_rad = points_to_angle(e1, e2)
hj_traits["edge_angle_rad"] = edge_rad
hj_traits["edge_angle_deg"] = np.degrees(edge_rad)
hj_traits["endpoint_1_coords"] = e1
hj_traits["endpoint_2_coords"] = e2
return hj_traits
def interface_length_segment(cell_a, cell_b):
"""
Measure straight-line length of an interface.
Parameters
----------
cell_a, cell_b: 2D bool ndarrays
Pixels in the cells are True, rest are False
Returns
-------
length : float
Segment length connecting the interface corners
"""
e1, e2 = interface_endpoints_coords(cell_a, cell_b)
length = np.linalg.norm(np.array(e1) - np.array(e2))
return length
def interface_length_wiggly(cell_a, cell_b):
"""
Measures the curvy length of an interface.
Parameters
----------
call_a, cell_B : 2D bool ndarrays
Pixels in the cells are True, rest are False
Returns
-------
length : float
Wiggly length of cell interface, calculated as the perimeter of the
interface mask, divided by 2, minus 2 (to account for its width)
"""
edge = edge_between_neighbors(cell_a, cell_b)
length = polygonal_perimeter(edge) / 2 - 2
return length
def neighbor_distance_cv(center_pt, neighbor_pts):
"""
Calculate the coefficient of variation of distances from a point.
Parameters
----------
center_pt : ndarray with shape (2,)
Coordinates of the point from which distances are measured
neighbor_pts : list of ndarrays with shape (2,)
Coordinates of neighbor points
Returns
-------
cv : float
Where there are N included regions
"""
distances = []
for neighbor_pt in neighbor_pts:
distances.append(np.linalg.norm(center_pt - neighbor_pt))
distances_array = np.array(distances)
stdev = np.std(distances_array)
mean = np.mean(distances_array)
cv = stdev / mean
return cv
def polygonal_perimeter(shape, tolerance=1):
"""
Use the polygonal approximation of a pixelated shape to estimate its perimeter.
Parameters
----------
shape: 2D bool ndarray
Pixels in the shape are True, rest are False.
tolerance: float
"Maximum distance from original points of polygon to approximated polygonal
chain. If tolerance is 0, the original coordinate array is returned".
Higher tolerance means fewer vertices in the polygon approximation.
Returns
-------
total : float
Calculated as the lengths of a series of line segments of all contours.
"""
contours = find_contours(shape, 0.5, fully_connected="high")
total = 0
for contour in contours:
coords = approximate_polygon(contour, tolerance=tolerance)
# Distance from last coordinate to first
perimeter = np.linalg.norm(coords[-1] - coords[0])
# Add the distances between the rest of the successive coordinate pairs
for i in range(1, coords.shape[0]):
segment_length = np.linalg.norm(coords[i - 1] - coords[i])
perimeter += segment_length
total += perimeter
return total
def protrusion_length_internal_path(cell_s, cell_r, interface):
"""
Measure length of a protrusion as an internal path.
Each protrusion is defined in terms of a sending cell,
a receiving cell, and an interface mask that falls between
the two cells. Here, the FMM algorithm is used to trace the path
from the sending cell into the hemijunction. The pixel with the
highest value is called the 'tip'. The length of the path to that
point is called the 'length'.
Then again FMM is used to trace from the tip back to the sending cell.
The pixel with the lowest value is called the 'base'.
Parameters
----------
cell_s : 2D bool ndarray
Pixels in the sending cell are True, rest are False
cell_r : 2D bool ndarray
Pixels in the receiving cell are True, rest are False
interface : 2D bool ndarray
Pixels in the interface are True, rest are False
Returns
-------
length : float
Internal path length from cell_s to farthest point in interface
tip_single : tuple of 2 ints
Coordinates of the farthest point in interface, and if there are
multiple tied pixels, take the first of them
base_single : tuple of 2 ints
Coordinates of point on sending cell that is closest
to the protrusion tip, and if there are multiple tied pixels, take
the first of them
"""
# Make some boolean masks that will be needed
not_r_nor_s = np.invert(np.logical_or(cell_r, cell_s))
cell_r_not_interface = np.logical_and(cell_r, np.invert(interface))
# FMM from cell_s into interface
mask = np.logical_or(cell_r_not_interface, not_r_nor_s)
phi = np.ma.MaskedArray(np.invert(cell_s), mask)
dist_in_interface_from_s = skfmm.distance(phi)
# Maximum value in interface is the "length". Note that is possible
# for multiple pixels to 'tie' for farthest; take first by default.
length = np.max(dist_in_interface_from_s)
tip = np.nonzero(dist_in_interface_from_s == length)
# March from tip to cell_s
tip_zero = np.ones(np.shape(cell_s))
tip_zero[tip[0][0], tip[1][0]] = 0
phi = np.ma.MaskedArray(tip_zero, mask)
dist_in_cell_and_int_from_tip = skfmm.distance(phi)
# Get coordinates of base
dist_in_cell = np.ma.MaskedArray(dist_in_cell_and_int_from_tip, cell_r)
base = np.nonzero(dist_in_cell == np.min(dist_in_cell))
# If there are multiple tip and base points, just keep first ones
tip_out = (tip[0][0], tip[1][0])
base_out = (base[0][0], base[1][0])
return tip_out, base_out, length
def protrusion_angle(tip, base):
"""
Return angle from base to tip.
Parameters
----------
tip : tuple of 2 ints
base: tuple of 2 ints
Returns
-------
angle : float
Angle from base to tip, in radians
"""
tip_coords = np.ravel(np.array(tip))
base_coords = np.ravel(np.array(base))
protrusion_vector = tip_coords - base_coords
angle = np.arctan2(protrusion_vector[0], protrusion_vector[1])
return angle
```
#### File: functions/segment/tissue.py
```python
import numpy as np
from scipy.ndimage import binary_fill_holes
from skimage.filters import gaussian, threshold_local, threshold_otsu
from skimage.measure import label
from skimage.morphology import (
binary_dilation,
binary_erosion,
disk,
remove_small_objects,
)
from skimage.segmentation import clear_border, flood_fill, watershed
from ..utils import validate_mask, dilate_simple
from .interface import (
edge_between_neighbors,
interface_endpoints_mask,
interface_shape_edge_method,
refine_junction,
trim_interface,
)
def epithelium_watershed(
im,
mask=None,
im_seeds=None,
blurring_sigma=3,
threshold_sigma=51,
erosions=3,
min_obj_size=100,
make_background_seed=False,
background_seed_dilations=0,
):
"""
Watershed an epithelium.
Take a 2D micrograph of an epithelium and segment it into labeled
cell regions. Expects an input image in which connected regions
of relatively dark pixels are separated by relatively lighter
pixels.
If im_seeds is supplied, loop over the new seeds (im_labeled_centers),
and if one of them overlaps enough with a label in im_seeds, erode the
labeled region and then copy it into im_labeled_centers before running the
watershed.
Parameters
----------
im : 2D ndarray
Micrograph with cell interface label
mask : 2D bool ndarray, same shape as im
True pixels are kept, False pixels are masked
im_weights : 2D ndarray, same shape as im
Segmentation is done on im * im_weights
An array of 1s leaves im unchanged
blurring_sigma : int
Sigma of Gaussian kernel used to blur the image
threshold_sigma : int
Sigma of Gaussian for locally adaptive threshold function
erosions : int
Number of erosions to perform when separating region centers
min_obj_size : int
Objects with an area smaller than this threshold are removed
make_background_seed : bool
Whether to expand mask and then floodfill background to make a
unitary background seed for watershed.
background_seed_dilations : int
How many dilations to apply to mask before floodfilling background
Returns
-------
im_labeled_regions : 2D ndarray
Each object has a unique integer ID
"""
mask = validate_mask(im, mask)
# Gaussian blur
im_blurred = gaussian(im, sigma=blurring_sigma, preserve_range=True)
# Adaptive threshold, inverting image
adap_th = threshold_local(im_blurred, block_size=threshold_sigma)
im_thresholded = im_blurred < adap_th
# Dilate mask
if make_background_seed:
for _ in range(background_seed_dilations):
mask = dilate_simple(mask)
# Set masked pixels to zero
im_thresholded[mask == 0] = 0
# Fill holes if no background seed needed
if make_background_seed:
im_ready_to_erode = flood_fill(im_thresholded, (0, 0), True)
else:
im_ready_to_erode = binary_fill_holes(im_thresholded)
# Erode objects
im_eroded = np.copy(im_ready_to_erode)
for _ in range(erosions):
im_eroded = binary_erosion(im_eroded)
# Remove small objects
im_seg = remove_small_objects(im_eroded, min_size=min_obj_size)
# Label regions
im_labeled_centers = label(im_seg)
# Incorporate im_seeds into im_labeled_centers before watershed
if im_seeds is not None:
for lab in np.unique(im_labeled_centers):
seed_region = im_seeds[im_labeled_centers == lab]
if np.any(seed_region == 0):
im_labeled_centers[im_labeled_centers == lab] = 0
im_labeled_centers[im_seeds != 0] = im_seeds[im_seeds != 0]
# Watershed segmentation using the labeled centers as seeds
im_labeled_regions = watershed(im_blurred, im_labeled_centers, mask=mask)
return im_labeled_regions
def largest_object_mask(im, blurring_sigma=15, threshold="adaptive"):
"""
Make a mask of the largest bright object in an image.
Make a mask containing the largest bright region of an image following
Gaussian blurring to remove small-scale variation. Bright object is True,
other regions False. Accepts optional blurring sigma and threshold value
arguments, or else uses default blurring_sigma and adaptive thresholding.
Parameters
----------
im: 2D ndarray
Grayscale image to be masked with bright features, dark background
blurring_sigma: int
Sigma of Gaussian kernel used to blur the image
threshold: int or str "adaptive"
Threshold to separate object from background pixels.
If "adaptive", Otsu's adaptive thresholding is used.
Returns
-------
mask: 2D bool ndarray
Same shape as im. True where largest bright object was identified,
False elsewhere
"""
im_blurred = gaussian(im, sigma=blurring_sigma, preserve_range=True)
if threshold == "adaptive":
threshold = threshold_otsu(im_blurred)
im_thresholded = im_blurred > threshold
if np.amax(im_thresholded) == False:
raise ValueError("All image intensities are below the threshold")
else:
im_labeled_regions = label(im_thresholded)
mask_with_holes = (
im_labeled_regions
== np.argmax(np.bincount(im_labeled_regions.flat)[1:]) + 1
)
mask = binary_fill_holes(mask_with_holes)
return mask
def select_border_adjacent(im):
"""
Select regions of image that are adjacent to image border.
Parameters
----------
im : 2D ndarray
Regions labeled with unique values
Returns
-------
border_adjacent : bool ndarray
True where regions are adjacent to border
"""
border_adjacent = clear_border(label(im)) == 0
return border_adjacent
def select_in_field(im, mask=None):
"""
Select regions that are adjacent to neither border nor mask.
Parameters
----------
im : 2D ndarray
Regions labeled with unique values
mask : bool ndarray
Optional mask, same shape as im
Returns
-------
in_field : bool ndarray
True where regions are with True part of mask, and are
not adjacent to mask edge nor image border
"""
mask = validate_mask(im, mask)
# Make the masks that will be combined
mask_adjacent = select_mask_adjacent(im, mask)
masked_or_mask_adjacent = np.logical_or(mask_adjacent, np.invert(mask))
border_adjacent = select_border_adjacent(im)
# Combine and invert the masks
excluded = np.logical_or(masked_or_mask_adjacent, border_adjacent)
in_field = np.invert(excluded)
return in_field
def select_mask_adjacent(im, mask=None):
"""
Select regions of image that are adjacent to a mask.
Parameters
----------
im : ndarray
Regions labeled with unique values
mask : bool ndarray
Optional mask, same shape as im
Returns
-------
mask_adjacent : bool ndarray
True where regions within mask are adjacent to mask;
returns all False if no mask is provided
"""
if mask is None or np.all(mask):
return np.zeros(np.shape(im), dtype=bool)
# Apply mask, then relabel so that labels count from 1 sequentially
im_masked = np.copy(im) * mask
im_labels = label(im_masked)
regions = np.unique(im_labels)
mask_eroded = binary_erosion(mask)
# Get IDs in True part of mask adjacent to False part of mask
peripheral_ids = np.unique(np.invert(mask_eroded) * im_labels)
# Make bool array of same length as regions, True where
# region ID are adjacent to the mask
peripheral_bools = np.isin(regions, peripheral_ids)
# Apply bool array to labeled image to mask final mask
mask_adjacent = peripheral_bools[im_labels] * mask
return mask_adjacent
def segment_hemijunctions(
im_labels, im_intensities, edge_range=(10, 200), area_range=(20, 2000)
):
"""
Segment all hemijuctions of a tissue labeled with a cell membrane marker.
Ignores all regions in im_labels that have an ID of 0.
Parameters
----------
im_labels : 2D ndarray
Segmented micrograph
im_intensities : 2D ndarray
Corresponding image of pixel intensities
Returns
-------
im_labels_refined : 2D ndarray
Same shape and label set as im_labels, but the interfaces have been
refined by converted each cell-cell interface to the shortest path line
through the segmented fluorescent interface mask.
im_labels_hjs : 2D ndarray
A labeled image with the same overall shape as im_labels, but instead
of the cells proper, it is the hemijunctions that are labeled, with
each labeled with the same integer ID as the cell that "sent" it.
"""
# Get the set of neighbors for each cell
cells_and_neighbors = neighbor_array_nr(im_labels)
# A place to store the interfaces and refined labeled regions
im_labels_hjs = np.zeros_like(im_labels)
im_labels_refined = np.copy(im_labels)
for pair in cells_and_neighbors:
if 0 not in pair:
# Make a bool image for each cell in the pair
cell_1_lab, cell_2_lab = pair[0], pair[1]
cell_1 = im_labels == cell_1_lab
cell_2 = im_labels == cell_2_lab
# Crudely measure edge length, check that it falls within range
int_edge_len = np.sum(edge_between_neighbors(cell_1, cell_2))
if int_edge_len > edge_range[0] and int_edge_len < edge_range[1]:
interface = interface_shape_edge_method(im_intensities, cell_1, cell_2)
interface = trim_interface(cell_1, cell_2, interface)
int_area = np.sum(interface)
if int_area > area_range[0] and int_area < area_range[1]:
# Update cell segmentation
try:
cell_1_new, cell_2_new = refine_junction(
cell_1, cell_2, interface
)
im_labels_refined[
np.logical_and(cell_1_new, interface)
] = cell_1_lab
im_labels_refined[
np.logical_and(cell_2_new, interface)
] = cell_2_lab
# Store HJ shapes
hj_2 = np.logical_and(interface, cell_1_new)
im_labels_hjs[hj_2] = cell_2_lab
hj_1 = np.logical_and(interface, cell_2_new)
im_labels_hjs[hj_1] = cell_1_lab
except Exception:
print(
f" Interface refinement failed.\n"
f" cell IDs: {cell_1_lab}, {cell_2_lab}"
)
else:
# Print cell info if the interface mask is the wrong area
print(
f" Interface with area outside of specified range.\n"
f" cell IDs: {cell_1_lab}, {cell_2_lab}\n"
f" interface area: {int_area}"
)
# Print cell info if the interface edge is the wrong length
else:
print(
f" Interface with edge length outside of specified range.\n"
f" cell IDs: {cell_1_lab}, {cell_2_lab}\n"
f" edge length: {int_edge_len}"
)
return im_labels_refined, im_labels_hjs
def cell_edges_mask(im, edge_dilation_factor, mask=None, periphery_excluded=True):
"""
Make a bool mask of all edge regions between segmented cells.
Parameters
----------
im : 2D ndarray
Regions labeled with unique values. 0 regions are treated as
background, masked out.
edge_dilation_factor: int
Radius of the disk-shaped structuring element by which the edges
will be dilated (in px)
mask : bool ndarray
Optional mask, same shape as im
periphery_excluded : bool
Whether edges of cells touching the image or mask border
should be included in the returned mask
Returns
-------
edges_mask : 2D bool ndarray
True where dilated cell edges are, elsewhere False
"""
# Make mask of region to be included
mask = validate_mask(im, mask)
mask = mask * (im > 1)
if periphery_excluded is True:
mask = select_in_field(im, mask)
im_inbounds = im * mask
# Make array of cell neighbor pairs (non-redundant)
neighbor_pairs_raw = neighbor_array_nr(im_inbounds)
neighbor_pairs = neighbor_pairs_raw[neighbor_pairs_raw[:, 1] > 0]
# Make structuring element for edge dilation
edge_dil_shape = disk(edge_dilation_factor)
# Looping through all neighbor pairs, find edges, add to edge mask
edges_mask = np.zeros_like(im, dtype=bool)
for i in range(len(neighbor_pairs)):
cell_a = im == neighbor_pairs[i][0]
cell_b = im == neighbor_pairs[i][1]
edge = edge_between_neighbors(cell_a, cell_b)
edge_dil = binary_dilation(edge, selem=edge_dil_shape)
edges_mask[edge_dil] = True
return edges_mask
def cell_interiors_mask(im, edge_dilation_factor, mask=None, periphery_excluded=True):
"""
Make a bool mask of non-edge regions of segmented cells.
Parameters
----------
im : 2D ndarray
Regions labeled with unique values. 0 regions are treated as
background, masked out.
edge_dilation_factor: int
Radius of the disk-shaped structuring element by which the edges
will be dilated (in px)
mask : bool ndarray
Optional mask, same shape as im
periphery_excluded : bool
Whether interiors of cells touching the image or mask border
should be included in the returned mask
Returns
-------
interiors_mask : 2D bool ndarray
True in non-cell-edge regions, elsewhere false
"""
# Make structuring element for edge dilation
edge_dil_shape = disk(edge_dilation_factor)
# Make mask of region to be included
mask = validate_mask(im, mask)
mask = mask * (im > 1)
if periphery_excluded is True:
mask = select_in_field(im, mask)
# Remove edges at periphery
mask = binary_erosion(mask, selem=edge_dil_shape)
im_inbounds = im * mask
# Make array of cell neighbor pairs (non-redundant)
neighbor_pairs_raw = neighbor_array_nr(im_inbounds)
neighbor_pairs = neighbor_pairs_raw[neighbor_pairs_raw[:, 1] > 0]
# Loop through neighbor pairs, find edges, remove from interiors_mask
interiors_mask = im_inbounds > 0
for i in range(len(neighbor_pairs)):
cell_a = im == neighbor_pairs[i][0]
cell_b = im == neighbor_pairs[i][1]
edge = edge_between_neighbors(cell_a, cell_b)
edge_dil = binary_dilation(edge, selem=edge_dil_shape)
interiors_mask[edge_dil] = False
return interiors_mask
def cell_vertices_mask(im, vertex_dilation_factor, mask=None, periphery_excluded=True):
"""
Make a bool mask of all vertex regions of segmented cells.
Parameters
----------
im : 2D ndarray
Regions labeled with unique values. 0 regions are treated as
background, masked out.
vertex_dilation_factor: int
Radius of the disk-shaped structuring element by which the vertices
will be dilated (in px)
mask : bool ndarray
Optional mask, same shape as im
periphery_excluded : bool
Whether vertices of the regions touching the image or mask border
should be included in the returned mask
Returns
-------
vertex_mask_dil : 2D bool ndarray
True where dilated cell vertices are, elsewhere False
"""
# Make mask of region to be included
mask = validate_mask(im, mask)
mask = mask * (im > 1)
if periphery_excluded is True:
mask = select_in_field(im, mask)
im_inbounds = im * mask
# Make array of cell neighbor pairs (non-redundant)
neighbor_pairs_raw = neighbor_array_nr(im_inbounds)
neighbor_pairs = neighbor_pairs_raw[neighbor_pairs_raw[:, 1] > 0]
# Loop through neighbor pairs, find interface endpoints,
# add to vertex_mask
vertex_mask = np.zeros_like(im)
for i in range(len(neighbor_pairs)):
cell_a = im == neighbor_pairs[i][0]
cell_b = im == neighbor_pairs[i][1]
vertices = interface_endpoints_mask(cell_a, cell_b)
vertex_mask[vertices] = True
# Dilate the vertices
vertex_dil_shape = disk(vertex_dilation_factor)
vertex_mask_dil = binary_dilation(vertex_mask, selem=vertex_dil_shape)
return vertex_mask_dil
def neighbor_array_nr(im, mask=None, periphery_excluded=True):
"""
Make an non-redundant array of neighbor region pairs.
Take a 2D ndarray with regions labeled by integers, and return a
list of two element lists. First element is an integer label of a
a region. Second element is an array with shape (N,) where N is the
number of regions neighboring the first element label. The array
stores the set of neighbor labels.
Parameters
----------
im : 2D ndarray
Labeled image with unique integers for every region
mask : 2D bool ndarray
True pixels are kept, False pixels are masked
periphery_excluded : bool
Returns
-------
neighbor_array : TODO finish
"""
mask = validate_mask(im, mask)
# Increment all the labels, to make sure there is no zero
# Zeros will be reserved for masked pixels
im2 = np.copy(im) + np.ones(np.shape(im), dtype="uint16")
# Set masked pixels to zero
im2[mask == False] = 0
# Determine the of region IDs to be returned
unique_labels = np.unique(im2)
if periphery_excluded:
unique_labels_in_field = np.unique(im2 * select_in_field(im2, mask))
else:
unique_labels_in_field = np.copy(unique_labels)
# Iterate over labels, appending to a list of pairs
neighbor_list = []
for id in list(unique_labels):
if id != 0 and id in unique_labels_in_field:
region = im2 == id
dilated = dilate_simple(region)
neighbors_plus_self = set(np.unique(np.extract(dilated, im2)) - 1)
neighbors = neighbors_plus_self - set([id - 1])
# Make a (2,n) array of this id and its neighbor ids
a = np.array(list(neighbors))
b = np.full_like(a, id - 1)
neighbor_list.append(np.vstack((b, a)).T)
# Redundant array of all neighbor pairs
neighbor_array = np.vstack(tuple(neighbor_list))
# Remove duplicates by keeping cases where first is greater than second
keepers = neighbor_array[:, 0] > neighbor_array[:, 1]
neighbor_array = neighbor_array[keepers]
return neighbor_array
```
#### File: functions/utils/path.py
```python
import os
import re
def select_files(input_dir, file_labels):
"""
Find a set of files from an input directory.
Written to make it easier to grab processed image files.
Parameters
----------
input_dir : str
Path to a directory with input files
file_labels : str or list of str
String to find after basename in file, or list of such strings.
Each one must complete the file name when appended to a basename.
Returns
-------
out_dict_ls : list of dicts
Checks all dataset in input_dir. For each one, if a file is present
for each element of file_labels, then a dict is added to out_dict_ls
for that dataset. This dict always includes at least two keys:
"basename" : str name of the basefile for the dataset
"basefile" : str path to the basefile for the dataset
It also includes a key for each element of file_labels. For each one
the value is a str of the path to the matching file
"""
if isinstance(file_labels, str):
file_labels = [file_labels]
out_dict_ls, basenames = [], []
# Make a list of all basenames in the directory
with os.scandir(input_dir) as input_dir_path_ls:
for item in input_dir_path_ls:
basename = get_basename(item)
if basename and not item.is_dir():
basenames.append(basename)
basenames_unique = list(set(basenames))
# If the basefile can be found for a basename, add a dict to out_dict_ls
input_dir_ls = os.listdir(input_dir)
for basename in basenames_unique:
dict_tmp = {"basename": basename}
for lab in file_labels:
file_to_check = f"{basename}{lab}"
if file_to_check in input_dir_ls:
dict_tmp[lab] = os.path.join(input_dir, file_to_check)
if f"{basename}.czi" in input_dir_ls:
dict_tmp["basefile"] = os.path.join(input_dir, f"{basename}.czi")
elif f"{basename}.tif" in input_dir_ls:
dict_tmp["basefile"] = os.path.join(input_dir, f"{basename}.tif")
if len(dict_tmp.keys()) == len(file_labels) + 2:
out_dict_ls.append(dict_tmp)
return out_dict_ls
def get_basename(path):
"""
Get the basename from a Path object.
Here basename is defined as this format:
[alphanumeric string]_[digits]
It is expected that this will be at the beginning of a file name.
Parameters
----------
path : Path object
Returns
-------
None if no match is found, or basename as a str if there is a match.
"""
m = re.search("^\w+_\d+(?=[\._])", path.name)
if m is None:
return None
else:
return m[0]
``` |
{
"source": "aa0910/short_text_clustering",
"score": 3
} |
#### File: code/proposed_model/nmf.py
```python
import pickle
import numpy
import random
lines = open('word_ids.py', 'r').readline()
word_to_id = eval(lines)
components = None
with open('components_dump.txt', 'br') as c:
components = pickle.load(c)
windows_sum = sum(components[0]) # windows
not_windows_sum = sum(components[1]) # not windows
# recall = probability of detecting 0 (windows) when it really is 0
# false positive rate = probability of detecting 0 (windows) when it really is 1
def get_score(filename, expected, threshold_val):
correct = 0
total = 0
unmatched = 0
true_positive = 0
true_negative = 0
false_positive = 0
false_negative = 0
with open(filename, 'r') as c:
lines = c.readlines()
for line in lines:
score_for_0 = 0.0
score_for_1 = 0.0
significant_words = 0
new_line = line.lower().strip().replace("\"", "")
words = new_line.split(" ")
for word in words:
if word in word_to_id:
if components[0][word_to_id[word]] > threshold_val or components[1][word_to_id[word]] > threshold_val:
score_for_0 += (components[0][word_to_id[word]] / windows_sum)
score_for_1 += (components[1][word_to_id[word]] / not_windows_sum)
significant_words += 1
#print(word)
if score_for_0 == score_for_1:
final_category = "Undecided"
unmatched += 1
score_for_0 += random.randint(1,100000)
score_for_1 += random.randint(1,100000)
#false_positive += 0.5
#true_positive += 0.5
#false_positive += 1
#false_negative += 1
elif score_for_0 > score_for_1:
final_category = "0"
if expected == 0:
correct += 1
true_positive += 1
else:
false_positive += 1
else:
final_category = "1"
if expected == 1:
correct += 1
true_negative += 1
else:
false_negative += 1
#print("%s, sig words: %s, 0: %s, 1: %s, final category: %s" % (line, significant_words, score_for_0, score_for_1, final_category))
total += 1
incorrect = total - correct - unmatched
# correct is true positive rate
return [correct, incorrect, total, unmatched, true_positive, true_negative, false_positive, false_negative]
not_windows_correct_rate = []
windows_correct_rate = []
total_size = 0
csvfile = open('roc.csv', 'w+')
for threshold in range(0, 101):
scaled_threshold = threshold / 10.0
correct, incorrect, total, unmatched, true_positive, true_negative, false_positive, false_negative = get_score('../windows/out_non_random100.csv', 1, scaled_threshold)
correct2, incorrect2, total2, unmatched2, true_positive2, true_negative2, false_positive2, false_negative2 = get_score('../windows/out_random100.csv', 0, scaled_threshold)
#print("windows", correct, incorrect, total, unmatched)
csvfile.write('%s,%s,%s,%s,%s\n' % (scaled_threshold, total, unmatched, false_positive / total, true_positive2 / (total2)))
csvfile.flush()
csvfile.close()
``` |
{
"source": "aa1000/DeepImageRetrieval",
"score": 3
} |
#### File: aa1000/DeepImageRetrieval/DeepImageUtils.py
```python
import numpy as np
import os
from platform import platform
from glob import iglob
from keras.applications.inception_resnet_v2 import InceptionResNetV2, preprocess_input
from keras.applications.imagenet_utils import decode_predictions
from keras.preprocessing import image
from keras.layers import GlobalAveragePooling2D, Lambda
from keras.backend import l2_normalize
from keras.models import Model
# Full InceptionResNetV2 to use for classifiying and categorizing images to create the database
# We use the categories of inception net as a sort of a hierarchy to reduce search times and better retrieve images
inception_category = InceptionResNetV2(include_top=True, weights='imagenet')
# We also use the conv layers of InceptionResNetV2 without the fully connected outpuy layers to act as a feature extractor of our images
inception_conv = InceptionResNetV2(include_top=False, weights='imagenet')
# We add a GlobalAveragePooling layer after the conv layers to be able to reduce the the dimentionality of the output
# conv layers have an output of shape (1, 8, 8, 1536) and by taking the global average pooling of each channel so a (1, 1536) tensor
global_pooling = GlobalAveragePooling2D()(inception_conv.output)
# then we normalize the average pooled tensor so the results are more consistant when calculating the distance or KNN later
norm_lambda = Lambda(lambda x: l2_normalize(x,axis=1))(global_pooling)
# the input is the normal InceptionResNetV2 input of (n, 299, 299, 3) and the output is the normalised features of the norm layer
feature_extractor = Model(inputs=[inception_conv.input], outputs=[norm_lambda])
# compile model (just to be able to use it? we don't require any training on the layers we added)
feature_extractor.compile(optimizer='rmsprop', loss='mse')
def LoadAndProcessImage(img_path):
# load the image in the right size for the InceptionResNetV2 model
img = image.load_img(img_path, target_size=(299, 299))
# turn the image object into an RGB pixel array
img = image.img_to_array(img)
# Keras' Inception can work on a batch of images at a t a time so the input has to be 4D (n, width, hight, channels)
img = np.expand_dims(img, axis=0)
# preprocessing the image to be a valid input for Inception (0, 255) pixel values -> (-1, 1)
return preprocess_input(img)
def PredictImageCategory(img_path):
# load the image and prepare it to be a proper input for InceptionResNet
img = LoadAndProcessImage(img_path)
# predict the class of the image
preds = inception_category.predict(img)
# turn the output into understanable named categories
# the model can predict the class of multiple images at once but here we would use only one image
# so we can take only the first entry in the output list
decoded_preds = decode_predictions(preds)[0]
# return the string names of the categories only
return [decoded_preds[i][1] for i in range(0, len(decoded_preds))]
# return a generator that iterates over all (supported) image file paths in the given path (path must end in /)
def GetAllImagesInPath(path):
jpg_path = os.path.join(path, '**/*.jpg')
jpeg_path = os.path.join(path, '**/*.jpeg')
bmp_path = os.path.join(path, '**/*.bmp')
png_path = os.path.join(path, '**/*.png')
image_paths = []
image_paths.extend( iglob(jpg_path, recursive=True) )
image_paths.extend( iglob(jpeg_path, recursive=True) )
image_paths.extend( iglob(bmp_path, recursive=True) )
image_paths.extend( iglob(png_path, recursive=True) )
# windows is case insensitive so we don't need to add this
if not platform().startswith('Windows'):
jpg_path = os.path.join(path, '**/*.JPG')
jpeg_path = os.path.join(path, '**/*.JPEG')
bmp_path = os.path.join(path, '**/*.BMP')
png_path = os.path.join(path, '**/*.PNG')
image_paths.extend( iglob(jpg_path, recursive=True) )
image_paths.extend( iglob(jpeg_path, recursive=True) )
image_paths.extend( iglob(bmp_path, recursive=True) )
image_paths.extend( iglob(png_path, recursive=True) )
return image_paths
def GetAllFolderInPath(path):
query = os.path.join(path, '**/')
return iglob(query, recursive=True)
def GetAllFeaturesInPath(path):
query = os.path.join(path, '**/*.npz')
return iglob(query, recursive=True)
def OpenImage(img_path):
# load the image object
img = image.load_img(img_path)
# turn the image object into an RGB pixel array and then into a float array so PyPlot can read it
return image.img_to_array(img)/255.
def Path2Name(img_path):
return img_path.split('/')[-1]
def CreateImageFeaturesVector(img_path):
img = LoadAndProcessImage(img_path)
features_vector = feature_extractor.predict(img)
return features_vector.flatten()
``` |
{
"source": "aa1000/ImageRetrieval",
"score": 3
} |
#### File: aa1000/ImageRetrieval/ImageUtils.py
```python
import cv2
import numpy as np
from sklearn.cluster import KMeans
from skimage.feature import greycomatrix, greycoprops
from skimage import data
histsogram_centers_file_name = 'HistogramCenters.npy'
n_indexed_colors=256
n_color_histogram_categories=64
dct2_size = 100
GLCM_resize_size = 200
GLCM_step = 20
# speed-up opencv using multithreads
cv2.setUseOptimized(True)
cv2.setNumThreads(8)
def CreateIndexedColorClasses(feature_vectors):
# an approximation is more than enough, no need to have high accuracy and waste computing time
kmeans = KMeans(n_clusters=n_indexed_colors, n_init =1, tol=0.001, max_iter=100, random_state=0, n_jobs=1, algorithm='full')
kmeans.fit(pixels_vector)
return kmeans.cluster_centers_
def ImgPathToPixelVector(img_path):
img = cv2.imread(img_path)
# a 200*200 image preserves most color information and it's enough for indexing colors
img = cv2.resize(img,(200,200))
reshaped_image = img.reshape((-1,3))
reshaped_image = np.float32(reshaped_image)
return reshaped_image
def RGBToIndex(img, color_classes):
# reconstruct the kmeans from center information
kmeans = KMeans(n_clusters=n_indexed_colors, random_state=0)
kmeans.cluster_centers_ = color_classes
# Reshape the image into a vector of pixels
pixel_vector = img.reshape(-1, 3)
# Get the nearest class for each pixel
labels = kmeans.predict(pixel_vector)
# Reshape the indexed image to the height and width of the original
return_img = labels
rows, cols, channels = img.shape
return return_img.reshape(rows, cols)
def IndexedImageToRGB(indexed_img, color_classes):
# create a new array of true color pixels
rg_colors = color_classes[indexed_img.flatten()]
# reshape the size of the new array to have 3 color channels and the dimensions of the original
rows, cols = indexed_img.shape
return rg_colors.reshape(rows, cols, 3)
def CreateColorHistogram(img):
# Calculte and normalize the histogram for each channel then append them
histogram = cv2.calcHist([img],[0],None,[n_color_histogram_categories],[0,256])
histogram = cv2.normalize(histogram, None)
ch1_histogram = cv2.calcHist([img],[1],None,[n_color_histogram_categories],[0,256])
ch1_histogram = cv2.normalize(ch1_histogram, None)
histogram = np.vstack((histogram, ch1_histogram))
ch2_histogram = cv2.calcHist([img],[2],None,[n_color_histogram_categories],[0,256])
ch2_histogram = cv2.normalize(ch2_histogram, None)
histogram = np.vstack((histogram, ch2_histogram))
return histogram
def CreateIndexedColorHistogram(img, color_classes):
indexed_img = RGBToIndex(img, color_classes)
# cv2 calcHist only works with uint8 arrays (?) so we are limited to 256 colors
indexed_img = indexed_img.astype(np.uint8)
histogram = cv2.calcHist([indexed_img],[0],None,[n_indexed_colors],[0,n_indexed_colors])
histogram = cv2.normalize(histogram, None)
return histogram
def CreateDCT2(img):
# only the gray image is needed for the dct
grey_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# normalise the image content
NormImg = np.float32(grey_img)/255.0
Dct2 = cv2.dct(NormImg)
Dct2Out = np.zeros([dct2_size,dct2_size])
Dct2Out = Dct2[:dct2_size,:dct2_size]
return Dct2Out.reshape(-1, 1)
def CreateGLCM(img):
grey_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
resized_img = cv2.resize(grey_img,(GLCM_resize_size, GLCM_resize_size))
energy_features = []
contrast_features = []
sz = resized_img.shape
for i in range(0,sz[0],GLCM_step):
for j in range(0,sz[1],GLCM_step):
patch = resized_img[i:i+GLCM_step,j:j+GLCM_step]
#greycomatrix parameters:
# 1) patch : part of image to generate co-occurance matrix for
# 2 & 3): separation vector neighbor [1] and angle in radians [0] "1 to the right"
# 4) symmetric = True: add the matrix to its transpose
# 5) normed = True: divide each element in matrix by number of elements in it
glcm = greycomatrix(patch, [1], [0], 256, symmetric=True, normed=True)
energy_features.append(greycoprops(glcm, 'energy')[0, 0])
contrast_features.append(greycoprops(glcm, 'contrast')[0, 0])
out_glsm_features = np.array(energy_features)
out_glsm_features = np.vstack((out_glsm_features, contrast_features))
return out_glsm_features.reshape(-1, 1)
def CreateImageFeaturesVector(img, colors_classes):
# Create the features of each category
features_vector = CreateColorHistogram(img)
indexed_histogram_features = CreateIndexedColorHistogram(img, colors_classes)
features_vector = np.vstack((features_vector, indexed_histogram_features))
dct2_features = CreateDCT2(img)
features_vector = np.vstack((features_vector, dct2_features))
GLSM_features = CreateGLCM(img)
features_vector = np.vstack((features_vector, GLSM_features))
return features_vector
``` |
{
"source": "Aa1024xx/fuzzy-commitment",
"score": 3
} |
#### File: fuzzy-commitment/fcs/test_fcs.py
```python
import unittest
import random
import secrets
import fcs
class TestFCS(unittest.TestCase):
def setUp(self):
self.threshold = 2
self.witness = secrets.token_bytes(32)
self.cs = fcs.FCS(len(self.witness)*8, self.threshold)
self.commitment = self.cs.commit(self.witness)
def random_flip_witness(self, numbits: int) -> bytes:
witness_mod = bytearray(self.witness)
bit_nums = random.sample(range(len(self.witness) * 8), numbits)
for bit_num in bit_nums:
witness_mod[bit_num // 8] ^= (1 << (bit_num % 8))
return bytes(witness_mod)
def test_unaltered_witness(self):
valid, key = self.cs.verify(self.commitment, self.witness)
self.assertTrue(valid)
def test_altered_tolerable(self):
witness_mod = self.random_flip_witness(self.threshold)
valid, key = self.cs.verify(self.commitment, witness_mod)
self.assertTrue(valid)
def test_altered_intolerable(self):
witness_mod = self.random_flip_witness(self.threshold + 1)
valid, key = self.cs.verify(self.commitment, witness_mod)
self.assertFalse(valid)
class TestFCSTwo(unittest.TestCase):
def setUp(self):
witlen = 1152
self.threshold = 277
self.witness = secrets.token_bytes((witlen+7)//8)
self.cs = fcs.FCS(len(self.witness)*8, self.threshold)
self.commitment = self.cs.commit(self.witness)
def random_flip_witness(self, numbits: int) -> bytes:
witness_mod = bytearray(self.witness)
bit_nums = random.sample(range(len(self.witness) * 8), numbits)
for bit_num in bit_nums:
witness_mod[bit_num // 8] ^= (1 << (bit_num % 8))
return bytes(witness_mod)
def test_unaltered_witness(self):
valid, key = self.cs.verify(self.commitment, self.witness)
self.assertTrue(valid)
def test_altered_tolerable(self):
witness_mod = self.random_flip_witness(self.threshold)
valid, key = self.cs.verify(self.commitment, witness_mod)
self.assertTrue(valid)
def test_altered_intolerable(self):
witness_mod = self.random_flip_witness(self.threshold+200)
valid, key = self.cs.verify(self.commitment, witness_mod)
self.assertFalse(valid)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "aa10402tw/RealTime-Segmentation",
"score": 2
} |
#### File: RealTime-Segmentation/seg_transforms/autoaugment.py
```python
import math
from enum import Enum
from typing import List, Tuple, Optional, Dict
import torch
from torch import Tensor
if __name__ == "__main__":
from torchvision_0_11_1_transforms import functional as F
from torchvision_0_11_1_transforms.functional import InterpolationMode
else:
from .torchvision_0_11_1_transforms import functional as F
from .torchvision_0_11_1_transforms.functional import InterpolationMode
#__all__ = ["AutoAugmentPolicy", "AutoAugment", "RandAugment", "TrivialAugmentWide"]
__all__ = ["RandAugment"]
def _apply_op(
img: Tensor, op_name: str, magnitude: float, interpolation: InterpolationMode, fill: Optional[List[float]]
):
if op_name == "ShearX":
img = F.affine(
img,
angle=0.0,
translate=[0, 0],
scale=1.0,
shear=[math.degrees(magnitude), 0.0],
interpolation=interpolation,
fill=fill,
)
elif op_name == "ShearY":
img = F.affine(
img,
angle=0.0,
translate=[0, 0],
scale=1.0,
shear=[0.0, math.degrees(magnitude)],
interpolation=interpolation,
fill=fill,
)
elif op_name == "TranslateX":
img = F.affine(
img,
angle=0.0,
translate=[int(magnitude), 0],
scale=1.0,
interpolation=interpolation,
shear=[0.0, 0.0],
fill=fill,
)
elif op_name == "TranslateY":
img = F.affine(
img,
angle=0.0,
translate=[0, int(magnitude)],
scale=1.0,
interpolation=interpolation,
shear=[0.0, 0.0],
fill=fill,
)
elif op_name == "Rotate":
img = F.rotate(img, magnitude, interpolation=interpolation, fill=fill)
elif op_name == "Brightness":
img = F.adjust_brightness(img, 1.0 + magnitude)
elif op_name == "Color":
img = F.adjust_saturation(img, 1.0 + magnitude)
elif op_name == "Contrast":
img = F.adjust_contrast(img, 1.0 + magnitude)
elif op_name == "Sharpness":
img = F.adjust_sharpness(img, 1.0 + magnitude)
elif op_name == "Posterize":
img = F.posterize(img, int(magnitude))
elif op_name == "Solarize":
img = F.solarize(img, magnitude)
elif op_name == "AutoContrast":
img = F.autocontrast(img)
elif op_name == "Equalize":
img = F.equalize(img)
elif op_name == "Invert":
img = F.invert(img)
elif op_name == "Identity":
pass
else:
raise ValueError(f"The provided operator {op_name} is not recognized.")
return img
class RandAugment(torch.nn.Module):
r"""RandAugment data augmentation method based on
`"RandAugment: Practical automated data augmentation with a reduced search space"
<https://arxiv.org/abs/1909.13719>`_.
If the image is torch Tensor, it should be of type torch.uint8, and it is expected
to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
If img is PIL Image, it is expected to be in mode "L" or "RGB".
Args:
num_ops (int): Number of augmentation transformations to apply sequentially.
magnitude (int): Magnitude for all the transformations.
num_magnitude_bins (int): The number of different magnitude values.
interpolation (InterpolationMode): Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
fill (sequence or number, optional): Pixel fill value for the area outside the transformed
image. If given a number, the value is used for all bands respectively.
"""
def __init__(
self,
num_ops: int = 2,
magnitude: int = 9,
num_magnitude_bins: int = 31,
interpolation: InterpolationMode = InterpolationMode.NEAREST,
fill: Optional[List[float]] = None,
) -> None:
super().__init__()
self.num_ops = num_ops
self.magnitude = magnitude
self.num_magnitude_bins = num_magnitude_bins
self.interpolation = interpolation
self.fill = fill
def _augmentation_space(self, num_bins: int, image_size: List[int]) -> Dict[str, Tuple[Tensor, bool]]:
return {
# op_name: (magnitudes, signed)
"Identity": (torch.tensor(0.0), False),
"ShearX": (torch.linspace(0.0, 0.3, num_bins), True),
"ShearY": (torch.linspace(0.0, 0.3, num_bins), True),
"TranslateX": (torch.linspace(0.0, 150.0 / 331.0 * image_size[0], num_bins), True),
"TranslateY": (torch.linspace(0.0, 150.0 / 331.0 * image_size[1], num_bins), True),
"Rotate": (torch.linspace(0.0, 30.0, num_bins), True),
"Brightness": (torch.linspace(0.0, 0.9, num_bins), True),
"Color": (torch.linspace(0.0, 0.9, num_bins), True),
"Contrast": (torch.linspace(0.0, 0.9, num_bins), True),
"Sharpness": (torch.linspace(0.0, 0.9, num_bins), True),
"Posterize": (8 - (torch.arange(num_bins) / ((num_bins - 1) / 4)).round().int(), False),
"Solarize": (torch.linspace(255.0, 0.0, num_bins), False),
"AutoContrast": (torch.tensor(0.0), False),
"Equalize": (torch.tensor(0.0), False),
}
def forward(self, img: Tensor, label: Tensor) -> tuple:
"""
img (PIL Image or Tensor): Image to be transformed.
label (PIL Image or Tensor): Label to be transformed.
Returns:
PIL Image or Tensor: Transformed image & label.
"""
fill = self.fill
if isinstance(img, Tensor):
if isinstance(fill, (int, float)):
fill = [float(fill)] * F.get_image_num_channels(img)
elif fill is not None:
fill = [float(f) for f in fill]
for _ in range(self.num_ops):
op_meta = self._augmentation_space(self.num_magnitude_bins, F.get_image_size(img))
op_index = int(torch.randint(len(op_meta), (1,)).item())
op_name = list(op_meta.keys())[op_index]
magnitudes, signed = op_meta[op_name]
magnitude = float(magnitudes[self.magnitude].item()) if magnitudes.ndim > 0 else 0.0
if signed and torch.randint(2, (1,)):
magnitude *= -1.0
img = _apply_op(img, op_name, magnitude, interpolation=self.interpolation, fill=fill)
if op_name in ["Identity", "ShearX", "ShearY", "TranslateX", "TranslateY", "Rotate"]:
label = _apply_op(label, op_name, magnitude, interpolation=self.interpolation, fill=fill)
return img, label
def __repr__(self) -> str:
s = self.__class__.__name__ + "("
s += "num_ops={num_ops}"
s += ", magnitude={magnitude}"
s += ", num_magnitude_bins={num_magnitude_bins}"
s += ", interpolation={interpolation}"
s += ", fill={fill}"
s += ")"
return s.format(**self.__dict__)
if __name__ == "__main__":
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
root = "G:/Codes/RealTime-Segementation/datasets/VOC2012"
img_path = f"{root}/JPEGImages/2007_000033.jpg"
label_path = f"{root}/SegmentationObject/2007_000033.png"
img = Image.open(img_path)
label = Image.open(label_path).convert('RGB')
plt.subplot(221), plt.title("Ori Image"), plt.imshow(np.asarray(img))
plt.subplot(222), plt.title("Ori Label"), plt.imshow(np.asarray(label))
transforms = RandAugment(magnitude=15)
img, label = transforms(img, label)
plt.subplot(223), plt.title("Ori Image"), plt.imshow(np.asarray(img))
plt.subplot(224), plt.title("Ori Label"), plt.imshow(np.asarray(label))
plt.show()
```
#### File: RealTime-Segmentation/seg_transforms/cutmix.py
```python
import numpy as np
import torch
from torch import Tensor
def rand_bbox(size, lam):
W = size[2]
H = size[3]
cut_rat = np.sqrt(1. - lam)
cut_w = np.int(W * cut_rat)
cut_h = np.int(H * cut_rat)
# uniform
cx = np.random.randint(W)
cy = np.random.randint(H)
bbx1 = np.clip(cx - cut_w // 2, 0, W)
bby1 = np.clip(cy - cut_h // 2, 0, H)
bbx2 = np.clip(cx + cut_w // 2, 0, W)
bby2 = np.clip(cy + cut_h // 2, 0, H)
return bbx1, bby1, bbx2, bby2
class CutMix(torch.nn.Module):
""" CutMix based on "CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features"
adpoted from https://github.com/clovaai/CutMix-PyTorch/blob/2d8eb68faff7fe4962776ad51d175c3b01a25734/train.py#L279
"""
def __init__(
self,
beta: int=1,
cutmix_prob: float=0.3,
device: str='cpu',
) -> None:
super().__init__()
self.beta = beta
self.cutmix_prob = cutmix_prob
self.device = device
def forward(self, inputs: Tensor, labels: Tensor) -> tuple:
"""
img (PIL Image or Tensor): Image to be transformed.
label (PIL Image or Tensor): Label to be transformed.
Returns:
PIL Image or Tensor: Transformed image & label.
"""
r = np.random.rand(1)
if self.beta > 0 and r < self.cutmix_prob:
# generate mixed sample
lam = np.random.beta(self.beta, self.beta)
rand_index = torch.randperm(inputs.size()[0]).to(self.device)
bbx1, bby1, bbx2, bby2 = rand_bbox(inputs.size(), lam)
inputs[:, :, bbx1:bbx2, bby1:bby2] = inputs[rand_index, :, bbx1:bbx2, bby1:bby2]
labels[:, bbx1:bbx2, bby1:bby2] = labels[rand_index, bbx1:bbx2, bby1:bby2]
return inputs, labels
```
#### File: RealTime-Segmentation/seg_transforms/transforms.py
```python
import random
import numpy as np
import torch
from torchvision import transforms as T
from torchvision.transforms import functional as F
def pad_if_smaller(img, size, fill=0):
size = (size, size) if isinstance(size, int) else size
h, w = size
min_size = min(img.size)
ow, oh = img.size
if ow < w or oh < h:
padh = h - oh if oh < h else 0
padw = w - ow if ow < w else 0
img = F.pad(img, (0, 0, padw, padh), fill=fill)
return img
class Compose:
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target):
for t in self.transforms:
image, target = t(image, target)
return image, target
class RandomResize:
def __init__(self, min_size, max_size=None):
self.min_size = (min_size, min_size) if isinstance(min_size, int) else min_size
if max_size is None:
max_size = min_size
self.max_size = (max_size, max_size) if isinstance(max_size, int) else max_size
def __call__(self, image, target):
h = random.randint(self.min_size[0], self.max_size[1])
w = random.randint(self.min_size[0], self.max_size[1])
size = (h, w)
image = F.resize(image, size)
target = F.resize(target, size, interpolation=T.InterpolationMode.NEAREST)
return image, target
class RandomHorizontalFlip:
def __init__(self, flip_prob):
self.flip_prob = flip_prob
def __call__(self, image, target):
if random.random() < self.flip_prob:
image = F.hflip(image)
target = F.hflip(target)
return image, target
class RandomCrop:
def __init__(self, size):
self.size = (size, size) if isinstance(size, int) else size
def __call__(self, image, target):
image = pad_if_smaller(image, self.size)
target = pad_if_smaller(target, self.size, fill=255)
crop_params = T.RandomCrop.get_params(image, self.size)
image = F.crop(image, *crop_params)
target = F.crop(target, *crop_params)
return image, target
class CenterCrop:
def __init__(self, size):
self.size = (size, size) if isinstance(size, int) else size
def __call__(self, image, target):
image = F.center_crop(image, self.size)
target = F.center_crop(target, self.size)
return image, target
class PILToTensor:
def __call__(self, image, target):
image = F.pil_to_tensor(image)
target = torch.as_tensor(np.array(target), dtype=torch.int64)
return image, target
class ConvertImageDtype:
def __init__(self, dtype):
self.dtype = dtype
def __call__(self, image, target):
image = F.convert_image_dtype(image, self.dtype)
return image, target
class Normalize:
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, image, target):
image = F.normalize(image, mean=self.mean, std=self.std)
return image, target
if __name__ == "__main__":
from PIL import Image
import matplotlib.pyplot as plt
root = "G:/Codes/RealTime-Segementation/datasets/VOC2012"
img_path = f"{root}/JPEGImages/2007_000033.jpg"
label_path = f"{root}/SegmentationObject/2007_000033.png"
img = Image.open(img_path)
label = Image.open(label_path).convert('RGB')
plt.subplot(221), plt.title("Ori Image"), plt.imshow(np.asarray(img))
plt.subplot(222), plt.title("Ori Label"), plt.imshow(np.asarray(label))
transforms = Compose([
RandomResize((256, 256)),
PILToTensor()
])
img, label = transforms(img, label)
print(img.shape)
print(label.shape)
plt.subplot(223), plt.title("Ori Image"), plt.imshow(np.asarray(img[0]))
plt.subplot(224), plt.title("Ori Label"), plt.imshow(np.asarray(label))
plt.show()
``` |
{
"source": "Aa1141415869/vulfocus",
"score": 2
} |
#### File: vulfocus-api/tasks/views.py
```python
from django.shortcuts import render
from rest_framework import viewsets
from .serializers import TaskSetSerializer
from django.http import JsonResponse
from rest_framework.decorators import action
from .models import TaskInfo
from dockerapi.common import R
import django.utils.timezone as timezone
import json
# Create your views here.
class TaskSet(viewsets.ReadOnlyModelViewSet):
serializer_class = TaskSetSerializer
queryset = TaskInfo.objects.all().order_by('-create_date')
@action(methods=["get"], detail=True, url_path='get')
def get_task(self, request, pk=None):
task_info = self.get_object()
task_msg = task_info.task_msg
if task_info.task_status == 1:
return JsonResponse(R.running(msg="执行中", data=str(task_info.task_id)))
task_info.is_show = True
task_info.save()
if task_msg:
msg = json.loads(task_msg)
if msg["status"] == 200:
if not msg["data"]:
msg["data"] = {
"_now": int(timezone.now().timestamp())
}
else:
msg["data"]["_now"] = int(timezone.now().timestamp())
return JsonResponse(msg, status=200)
else:
return JsonResponse(msg, status=msg["status"])
return JsonResponse(R.ok())
@action(methods=["post"], detail=True, url_path='batch')
def get_batch_task(self, request, pk=None):
task_id_str = request.POST.get("task_ids", "")
task_id_list = task_id_str.split(",")
task_list = TaskInfo.objects.filter(task_id__in=task_id_list)
result = {}
for task_info in task_list:
result[str(task_info.task_id)] = {
"status": task_info.task_status,
"data": json.loads(task_info.task_msg)
}
return JsonResponse(R.ok(data=result))
``` |
{
"source": "aa1215/cssi_2018",
"score": 3
} |
#### File: fortune-teller/solution/model.py
```python
from google.appengine.ext import ndb
class Movie(ndb.Model):
title = ndb.StringProperty()
# media_type = ndb.StringProperty(required=True, default="Movie")
runtime = ndb.IntegerProperty(required=False)
rating = ndb.FloatProperty(required=False)
year = ndb.IntegerProperty(required=False)
# def __init__(self, movie_title, run_time, user_rating):
# self.title = movie_title
# self.runtime_mins = run_time
# self.rating = user_rating
class User(ndb.Model):
username = ndb.StringProperty(required=True)
password = ndb.StringProperty(required=True)
billing = ndb.StringProperty(required=True)
email = ndb.StringProperty(required=True)
# def __init__(self, user, passw, bill, mail):
# self.username = user
# self.password = <PASSWORD>
# self.bill = bill
# self.email = mail
# class TVShow(ndb.model):
# title = ndb.StringProperty(required=True)
# genre = ndb.StringProperty(required=True)
```
#### File: fortune-teller/start/main.py
```python
import webapp2
import os
import random
import jinja2
def get_fortune():
#add a list of fortunes to the empty fortune_list array
fortune_list=['the grass is greener where you water it',
'doubt kills more dreams than failure ever will',
'be a voice, not an echo',
'happy thoughts',
'c\'est la vie',
'see the good',
'create your own sunshine']
#use the random library to return a random element from the array
random_fortune = random.choice(fortune_list)
return random_fortune
#remember, you can get this by searching for jinja2 google app engine
template_loader = jinja2.FileSystemLoader(searchpath="./")
jinja_current_directory = jinja2.Environment(loader=template_loader)
class FortuneHandler(webapp2.RequestHandler):
def get(self):
self.response.write(get_fortune())
results_template = jinja_current_directory.get_template('templates/fortune-results.html')
self.response.write(results_template.render())
#add a post method
#def post(self):
class HelloHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello World. Welcome to the root route of my app')
class GoodbyeHandler(webapp2.RequestHandler):
def get(self):
self.response.write('My response is Goodbye World.')
#the route mapping
app = webapp2.WSGIApplication([
#this line routes the main url ('/') - also know as
#the root route - to the Fortune Handler
('/', HelloHandler),
('/predict', FortuneHandler), #maps '/predict' to the FortuneHandler
('/goodbye', GoodbyeHandler)
], debug=True)
``` |
{
"source": "aa1371/pandas",
"score": 3
} |
#### File: tseries/offsets/test_yqm_offsets.py
```python
import pytest
import pandas as pd
from pandas.tseries.offsets import (
BMonthBegin,
BMonthEnd,
BQuarterBegin,
BQuarterEnd,
BYearBegin,
BYearEnd,
MonthBegin,
MonthEnd,
QuarterBegin,
QuarterEnd,
YearBegin,
YearEnd,
)
@pytest.mark.parametrize("n", [-2, 1])
@pytest.mark.parametrize(
"cls",
[
MonthBegin,
MonthEnd,
BMonthBegin,
BMonthEnd,
QuarterBegin,
QuarterEnd,
BQuarterBegin,
BQuarterEnd,
YearBegin,
YearEnd,
BYearBegin,
BYearEnd,
],
)
def test_apply_index(cls, n):
offset = cls(n=n)
rng = pd.date_range(start="1/1/2000", periods=100000, freq="T")
ser = pd.Series(rng)
res = rng + offset
assert res.freq is None # not retained
assert res[0] == rng[0] + offset
assert res[-1] == rng[-1] + offset
res2 = ser + offset
# apply_index is only for indexes, not series, so no res2_v2
assert res2.iloc[0] == ser.iloc[0] + offset
assert res2.iloc[-1] == ser.iloc[-1] + offset
``` |
{
"source": "aa-182758/pandas",
"score": 2
} |
#### File: core/arrays/numpy_.py
```python
from __future__ import annotations
import numpy as np
from pandas._libs import lib
from pandas._typing import (
Dtype,
NpDtype,
Scalar,
npt,
)
from pandas.compat.numpy import function as nv
from pandas.core.dtypes.cast import construct_1d_object_array_from_listlike
from pandas.core.dtypes.dtypes import PandasDtype
from pandas.core.dtypes.missing import isna
from pandas.core import (
arraylike,
nanops,
ops,
)
from pandas.core.arraylike import OpsMixin
from pandas.core.arrays._mixins import NDArrayBackedExtensionArray
from pandas.core.construction import ensure_wrapped_if_datetimelike
from pandas.core.strings.object_array import ObjectStringArrayMixin
class PandasArray(
OpsMixin,
NDArrayBackedExtensionArray,
ObjectStringArrayMixin,
):
"""
A pandas ExtensionArray for NumPy data.
This is mostly for internal compatibility, and is not especially
useful on its own.
Parameters
----------
values : ndarray
The NumPy ndarray to wrap. Must be 1-dimensional.
copy : bool, default False
Whether to copy `values`.
Attributes
----------
None
Methods
-------
None
"""
# If you're wondering why pd.Series(cls) doesn't put the array in an
# ExtensionBlock, search for `ABCPandasArray`. We check for
# that _typ to ensure that users don't unnecessarily use EAs inside
# pandas internals, which turns off things like block consolidation.
_typ = "npy_extension"
__array_priority__ = 1000
_ndarray: np.ndarray
_dtype: PandasDtype
_internal_fill_value = np.nan
# ------------------------------------------------------------------------
# Constructors
def __init__(self, values: np.ndarray | PandasArray, copy: bool = False):
if isinstance(values, type(self)):
values = values._ndarray
if not isinstance(values, np.ndarray):
raise ValueError(
f"'values' must be a NumPy array, not {type(values).__name__}"
)
if values.ndim == 0:
# Technically we support 2, but do not advertise that fact.
raise ValueError("PandasArray must be 1-dimensional.")
if copy:
values = values.copy()
dtype = PandasDtype(values.dtype)
super().__init__(values, dtype)
@classmethod
def _from_sequence(
cls, scalars, *, dtype: Dtype | None = None, copy: bool = False
) -> PandasArray:
if isinstance(dtype, PandasDtype):
dtype = dtype._dtype
# error: Argument "dtype" to "asarray" has incompatible type
# "Union[ExtensionDtype, str, dtype[Any], dtype[floating[_64Bit]], Type[object],
# None]"; expected "Union[dtype[Any], None, type, _SupportsDType, str,
# Union[Tuple[Any, int], Tuple[Any, Union[int, Sequence[int]]], List[Any],
# _DTypeDict, Tuple[Any, Any]]]"
result = np.asarray(scalars, dtype=dtype) # type: ignore[arg-type]
if (
result.ndim > 1
and not hasattr(scalars, "dtype")
and (dtype is None or dtype == object)
):
# e.g. list-of-tuples
result = construct_1d_object_array_from_listlike(scalars)
if copy and result is scalars:
result = result.copy()
return cls(result)
@classmethod
def _from_factorized(cls, values, original) -> PandasArray:
return original._from_backing_data(values)
def _from_backing_data(self, arr: np.ndarray) -> PandasArray:
return type(self)(arr)
# ------------------------------------------------------------------------
# Data
@property
def dtype(self) -> PandasDtype:
return self._dtype
# ------------------------------------------------------------------------
# NumPy Array Interface
def __array__(self, dtype: NpDtype | None = None) -> np.ndarray:
return np.asarray(self._ndarray, dtype=dtype)
def __array_ufunc__(self, ufunc: np.ufunc, method: str, *inputs, **kwargs):
# Lightly modified version of
# https://numpy.org/doc/stable/reference/generated/numpy.lib.mixins.NDArrayOperatorsMixin.html
# The primary modification is not boxing scalar return values
# in PandasArray, since pandas' ExtensionArrays are 1-d.
out = kwargs.get("out", ())
result = ops.maybe_dispatch_ufunc_to_dunder_op(
self, ufunc, method, *inputs, **kwargs
)
if result is not NotImplemented:
return result
if "out" in kwargs:
# e.g. test_ufunc_unary
return arraylike.dispatch_ufunc_with_out(
self, ufunc, method, *inputs, **kwargs
)
if method == "reduce":
result = arraylike.dispatch_reduction_ufunc(
self, ufunc, method, *inputs, **kwargs
)
if result is not NotImplemented:
# e.g. tests.series.test_ufunc.TestNumpyReductions
return result
# Defer to the implementation of the ufunc on unwrapped values.
inputs = tuple(x._ndarray if isinstance(x, PandasArray) else x for x in inputs)
if out:
kwargs["out"] = tuple(
x._ndarray if isinstance(x, PandasArray) else x for x in out
)
result = getattr(ufunc, method)(*inputs, **kwargs)
if ufunc.nout > 1:
# multiple return values; re-box array-like results
return tuple(type(self)(x) for x in result)
elif method == "at":
# no return value
return None
elif method == "reduce":
if isinstance(result, np.ndarray):
# e.g. test_np_reduce_2d
return type(self)(result)
# e.g. test_np_max_nested_tuples
return result
else:
# one return value; re-box array-like results
return type(self)(result)
# ------------------------------------------------------------------------
# Pandas ExtensionArray Interface
def isna(self) -> np.ndarray:
return isna(self._ndarray)
def _validate_scalar(self, fill_value):
if fill_value is None:
# Primarily for subclasses
fill_value = self.dtype.na_value
return fill_value
def _values_for_factorize(self) -> tuple[np.ndarray, int]:
return self._ndarray, -1
# ------------------------------------------------------------------------
# Reductions
def any(
self,
*,
axis: int | None = None,
out=None,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_any((), {"out": out, "keepdims": keepdims})
result = nanops.nanany(self._ndarray, axis=axis, skipna=skipna)
return self._wrap_reduction_result(axis, result)
def all(
self,
*,
axis: int | None = None,
out=None,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_all((), {"out": out, "keepdims": keepdims})
result = nanops.nanall(self._ndarray, axis=axis, skipna=skipna)
return self._wrap_reduction_result(axis, result)
def min(self, *, axis: int | None = None, skipna: bool = True, **kwargs) -> Scalar:
nv.validate_min((), kwargs)
result = nanops.nanmin(
values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna
)
return self._wrap_reduction_result(axis, result)
def max(self, *, axis: int | None = None, skipna: bool = True, **kwargs) -> Scalar:
nv.validate_max((), kwargs)
result = nanops.nanmax(
values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna
)
return self._wrap_reduction_result(axis, result)
def sum(
self, *, axis: int | None = None, skipna: bool = True, min_count=0, **kwargs
) -> Scalar:
nv.validate_sum((), kwargs)
result = nanops.nansum(
self._ndarray, axis=axis, skipna=skipna, min_count=min_count
)
return self._wrap_reduction_result(axis, result)
def prod(
self, *, axis: int | None = None, skipna: bool = True, min_count=0, **kwargs
) -> Scalar:
nv.validate_prod((), kwargs)
result = nanops.nanprod(
self._ndarray, axis=axis, skipna=skipna, min_count=min_count
)
return self._wrap_reduction_result(axis, result)
def mean(
self,
*,
axis: int | None = None,
dtype: NpDtype | None = None,
out=None,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_mean((), {"dtype": dtype, "out": out, "keepdims": keepdims})
result = nanops.nanmean(self._ndarray, axis=axis, skipna=skipna)
return self._wrap_reduction_result(axis, result)
def median(
self,
*,
axis: int | None = None,
out=None,
overwrite_input: bool = False,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_median(
(), {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}
)
result = nanops.nanmedian(self._ndarray, axis=axis, skipna=skipna)
return self._wrap_reduction_result(axis, result)
def std(
self,
*,
axis: int | None = None,
dtype: NpDtype | None = None,
out=None,
ddof=1,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_stat_ddof_func(
(), {"dtype": dtype, "out": out, "keepdims": keepdims}, fname="std"
)
result = nanops.nanstd(self._ndarray, axis=axis, skipna=skipna, ddof=ddof)
return self._wrap_reduction_result(axis, result)
def var(
self,
*,
axis: int | None = None,
dtype: NpDtype | None = None,
out=None,
ddof=1,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_stat_ddof_func(
(), {"dtype": dtype, "out": out, "keepdims": keepdims}, fname="var"
)
result = nanops.nanvar(self._ndarray, axis=axis, skipna=skipna, ddof=ddof)
return self._wrap_reduction_result(axis, result)
def sem(
self,
*,
axis: int | None = None,
dtype: NpDtype | None = None,
out=None,
ddof=1,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_stat_ddof_func(
(), {"dtype": dtype, "out": out, "keepdims": keepdims}, fname="sem"
)
result = nanops.nansem(self._ndarray, axis=axis, skipna=skipna, ddof=ddof)
return self._wrap_reduction_result(axis, result)
def kurt(
self,
*,
axis: int | None = None,
dtype: NpDtype | None = None,
out=None,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_stat_ddof_func(
(), {"dtype": dtype, "out": out, "keepdims": keepdims}, fname="kurt"
)
result = nanops.nankurt(self._ndarray, axis=axis, skipna=skipna)
return self._wrap_reduction_result(axis, result)
def skew(
self,
*,
axis: int | None = None,
dtype: NpDtype | None = None,
out=None,
keepdims: bool = False,
skipna: bool = True,
):
nv.validate_stat_ddof_func(
(), {"dtype": dtype, "out": out, "keepdims": keepdims}, fname="skew"
)
result = nanops.nanskew(self._ndarray, axis=axis, skipna=skipna)
return self._wrap_reduction_result(axis, result)
# ------------------------------------------------------------------------
# Additional Methods
def to_numpy(
self,
dtype: npt.DTypeLike | None = None,
copy: bool = False,
na_value=lib.no_default,
) -> np.ndarray:
result = np.asarray(self._ndarray, dtype=dtype)
if (copy or na_value is not lib.no_default) and result is self._ndarray:
result = result.copy()
if na_value is not lib.no_default:
result[self.isna()] = na_value
return result
# ------------------------------------------------------------------------
# Ops
def __invert__(self) -> PandasArray:
return type(self)(~self._ndarray)
def __neg__(self) -> PandasArray:
return type(self)(-self._ndarray)
def __pos__(self) -> PandasArray:
return type(self)(+self._ndarray)
def __abs__(self) -> PandasArray:
return type(self)(abs(self._ndarray))
def _cmp_method(self, other, op):
if isinstance(other, PandasArray):
other = other._ndarray
other = ops.maybe_prepare_scalar_for_op(other, (len(self),))
pd_op = ops.get_array_op(op)
other = ensure_wrapped_if_datetimelike(other)
with np.errstate(all="ignore"):
result = pd_op(self._ndarray, other)
if op is divmod or op is ops.rdivmod:
a, b = result
if isinstance(a, np.ndarray):
# for e.g. op vs TimedeltaArray, we may already
# have an ExtensionArray, in which case we do not wrap
return self._wrap_ndarray_result(a), self._wrap_ndarray_result(b)
return a, b
if isinstance(result, np.ndarray):
# for e.g. multiplication vs TimedeltaArray, we may already
# have an ExtensionArray, in which case we do not wrap
return self._wrap_ndarray_result(result)
return result
_arith_method = _cmp_method
def _wrap_ndarray_result(self, result: np.ndarray):
# If we have timedelta64[ns] result, return a TimedeltaArray instead
# of a PandasArray
if result.dtype == "timedelta64[ns]":
from pandas.core.arrays import TimedeltaArray
return TimedeltaArray._simple_new(result)
return type(self)(result)
# ------------------------------------------------------------------------
# String methods interface
_str_na_value = np.nan
``` |
{
"source": "AA1999/Disthon",
"score": 2
} |
#### File: discord/abc/discordobject.py
```python
from __future__ import annotations
from datetime import datetime
from pydantic import BaseModel
from ..types.snowflake import Snowflake
class DiscordObject(BaseModel):
id: Snowflake
created_at: datetime
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return self.id.id >> 22
```
#### File: discord/activity/baseactivity.py
```python
from __future__ import annotations
from datetime import datetime
from typing import Optional
from ..types.activitypayload import ActivityPayload
class BaseActivity:
_created_at: Optional[datetime]
__slots__ = ("_created_at",)
def __init(self, **kwargs):
self._created_at = kwargs.pop("created_at", None)
@property
def created_at(self):
return self._created_at
def to_dict(self) -> ActivityPayload:
payload = ActivityPayload()
payload["created_at"] = self.created_at
return payload
```
#### File: discord/api/intents.py
```python
from __future__ import annotations
from typing import ClassVar, Dict
from ..exceptions import InvalidIntent
class Intents:
VALID_INTENTS: ClassVar[Dict[str, int]] = {
"guilds": 0,
"members": 1,
"bans": 2,
"emojis": 3,
"integrations": 4,
"webhooks": 5,
"invites": 6,
"voice": 7,
"presence": 8,
"message": 9,
"reaction": 10,
"typing": 11,
"dm_message": 12,
"dm_reaction": 13,
"dm_typing": 14,
}
def __init__(self, **kwargs):
self.value = 0
for arg in kwargs:
arg = arg.lower()
try:
self.value = (
self.value + (1 << self.VALID_INTENTS[arg])
if kwargs[arg]
else self.value
)
except KeyError:
raise InvalidIntent(
arg, f"Invalid intent {arg}. Please check your spelling."
)
def __setattr__(self, name, value):
if name == "value":
super().__setattr__(name, value)
return
if name not in self.VALID_INTENTS:
raise InvalidIntent(
name,
"Specified value is not in the list of valid intents. Please check your spelling",
)
bit = 1 << self.VALID_INTENTS[name]
if value:
self.value += bit
else:
self.value -= bit
@classmethod
def none(cls):
return cls()
@classmethod
def all(cls):
kwargs = {name: True for name in Intents.VALID_INTENTS}
return cls(**kwargs)
@classmethod
def default(cls):
kwargs = {name: True for name in Intents.VALID_INTENTS}
kwargs["members"] = False
kwargs["presence"] = False
return cls(**kwargs)
```
#### File: discord/channels/basechannel.py
```python
from __future__ import annotations
from ..abc.discordobject import DiscordObject
from ..types.snowflake import Snowflake
class BaseChannel(DiscordObject):
__slots__ = ("_id", "_name", "_mention")
_id: Snowflake
_name: str
@property
def id(self) -> Snowflake:
return self._id
@property
def name(self):
return self._name
@property
def mention(self):
return f"<#{self._id}>"
@property
def created_at(self):
return
```
#### File: Disthon/discord/client.py
```python
from __future__ import annotations
import asyncio
import inspect
import sys
import traceback
import typing
from copy import deepcopy
from .api.handler import Handler
from .api.intents import Intents
from .api.websocket import WebSocket
class Client:
def __init__(
self,
*,
intents: typing.Optional[Intents] = Intents.default(),
respond_self: typing.Optional[bool] = False,
loop: typing.Optional[asyncio.AbstractEventLoop] = None,
) -> None:
self._loop: asyncio.AbstractEventLoop = loop or asyncio.get_event_loop()
self.intents = intents
self.respond_self = respond_self
self.stay_alive = True
self.handler = Handler()
self.lock = asyncio.Lock()
self.closed = False
self.events = {}
async def login(self, token: str) -> None:
self.token = token
async with self.lock:
self.info = await self.handler.login(token)
async def connect(self) -> None:
while not self.closed:
socket = WebSocket(self, self.token)
async with self.lock:
g_url = await self.handler.gateway()
if not isinstance(self.intents, Intents):
raise TypeError(
f"Intents must be of type Intents, got {self.intents.__class__}"
)
self.ws = await asyncio.wait_for(socket.start(g_url), timeout=30)
while True:
await self.ws.receive_events()
async def alive_loop(self, token: str) -> None:
await self.login(token)
try:
await self.connect()
finally:
await self.close()
async def close(self) -> None:
await self.handler.close()
def run(self, token: str):
def stop_loop_on_completion(_):
self._loop.stop()
future = asyncio.ensure_future(self.alive_loop(token), loop=self._loop)
future.add_done_callback(stop_loop_on_completion)
self._loop.run_forever()
if not future.cancelled():
return future.result()
def event(self, event: str = None):
def wrapper(func):
self.add_listener(func, event)
return func
return wrapper
def add_listener(
self, func: typing.Callable, event: typing.Optional[str] = None
) -> None:
event = event or func.__name__
if not inspect.iscoroutinefunction(func):
raise TypeError(
"The callback is not a valid coroutine function. Did you forget to add async before def?"
)
if event in self.events:
self.events[event].append(func)
else:
self.events[event] = [func]
async def handle_event(self, msg):
event: str = "on_" + msg["t"].lower()
# create a global on_message event for either guild or dm messages
if event in ("on_message_create", "on_dm_message_create"):
global_message = deepcopy(msg)
global_message["t"] = "MESSAGE"
await self.handle_event(global_message)
for coro in self.events.get(event, []):
try:
await coro(msg)
except Exception as error:
print(f"Ignoring exception in event {coro.__name__}", file=sys.stderr)
traceback.print_exception(
type(error), error, error.__traceback__, file=sys.stderr
)
```
#### File: Disthon/discord/exceptions.py
```python
from __future__ import annotations
import traceback
from http.client import HTTPException
from typing import TYPE_CHECKING, Optional, Union
if TYPE_CHECKING:
from aiohttp import ClientWebSocketResponse
from .interactions import Interaction
class DiscordException(Exception):
_message: str
_code: int
def __init__(self, message: str):
self._message = message
def print_traceback(self):
traceback.print_exception(DiscordException, self, self.__traceback__)
def __repr__(self):
return f"Error message: {self._message}"
def __str__(self):
return self._message
class DiscordHTTPException(DiscordException, HTTPException):
_code: int
def __init__(self, message: str, code: int):
self._code = code
super().__init__(message)
def __str__(self):
return f"Error {self._code}: {self._message}"
def __repr__(self):
return f"Error code: {self._code} Message: {self._message}"
@property
def code(self):
return self._code
class DiscordClientException(DiscordException):
def __init__(self, message: str):
super().__init__(message)
class DiscordConnectionClosed(DiscordClientException):
_code: Optional[int]
_shard_id: Optional[int]
def __init__(
self,
socket: ClientWebSocketResponse,
*,
shard_id: Optional[int],
code: Optional[int] = None,
):
self._code = code or socket.close_code or -1
self._shard_id = shard_id
super().__init__(f"Shard {shard_id} closed with code {code}.")
@property
def code(self):
return self._code
@property
def shard_id(self):
return self._shard_id
class DiscordForbidden(DiscordHTTPException):
def __init__(self, message: str = "Access forbidden for requested object."):
super().__init__(message=message, code=403)
class DiscordNotFound(DiscordHTTPException):
def __init__(self, message: str = "Requested object not found."):
super().__init__(message=message, code=404)
class DiscordGatewayNotFound(DiscordNotFound):
def __init__(self, message: str = "Requested gateway not found."):
super().__init__(message=message)
class DiscordInteractionResponded(DiscordClientException):
_interaction: Interaction
def __init__(self, interaction: Interaction):
self._interaction = interaction
super().__init__("This interaction has already been responded to before.")
class DiscordInvalidArgument(DiscordClientException):
def __init__(self, message: str):
super().__init__(message)
class DiscordInvalidData(DiscordClientException):
def __init__(self, message: str):
super().__init__(message)
class DiscordNoMoreItems(DiscordException):
def __init__(self, message: str):
super().__init__(message)
class DiscordNotAuthorized(DiscordHTTPException):
def __init__(
self, message: str = "Access to the requested object is not authorized."
):
super().__init__(message=message, code=401)
class DiscordPrivilegedIntentsRequired(DiscordClientException):
_shard_id: Optional[int]
def __init__(self, shard_id: Optional[int]):
self._shard_id = shard_id
msg = (
"Shard %s is requesting privileged intents that have not been explicitly enabled in the "
"developer portal. It is recommended to go to https://discord.com/developers/applications/ "
"and explicitly enable the privileged intents within your application's page. If this is not "
"possible, then consider disabling the privileged intents instead."
)
super().__init__(msg % shard_id)
@property
def shard_id(self):
return self._shard_id
class DiscordServerError(DiscordHTTPException):
def __init__(self, message: str = "Internal server error."):
super().__init__(message=message, code=500)
class InvalidSnowflakeException(Exception):
_value: str
_message: str
def __init__(self, value: str, message: str):
self._value = value
self._message = message
class InvalidIntent(ValueError):
_message: str
_value: str
def __init__(self, value: str, message: str):
self._value = value
self._message = message
def __str__(self) -> str:
return f"Error message: {self._message} for {self._value}"
def __repr__(self) -> str:
return f"Message: {self._message}"
class InvalidColor(ValueError):
_message: str
_value: Union[int, str]
def __init__(self, value: Union[int, str], message: str) -> None:
self._value = value
self._message = message
super().__init__(message)
class EmptyField(ValueError):
_message: str
def __init__(self, message: str = "Given field cannot be empty"):
self._message = message
super().__init__(message)
class DiscordChannelNotFound(DiscordNotFound):
def __init__(self, message: str = "Requested channel not found."):
super().__init__(message=message)
class DiscordChannelForbidden(DiscordForbidden):
def __init__(self, message: str = "Access forbidden for requested channel."):
super().__init__(message=message)
```
#### File: Disthon/discord/guild.py
```python
from __future__ import annotations
from typing import NamedTuple, Optional
from .abc.discordobject import DiscordObject
from .channels.guildchannel import GuildChannel
from .role import Role
from .types.guildpayload import GuildPayload
from .types.snowflake import Snowflake
from .user.member import Member
from .user.user import User
class BanEntry(NamedTuple):
user: User
reason: Optional[str]
class GuildLimit(NamedTuple):
filesize: int
emoji: int
channels: int
roles: int
categories: int
bitrate: int
stickers: int
class Guild(DiscordObject):
__slots__ = (
"region"
"owner_id"
"mfa.level"
"name"
"id"
"_members"
"_channels"
"_vanity"
"_banner"
)
_roles: set[Role]
me: Member
owner_id: Snowflake
def __init__(self, data: GuildPayload):
self._members: dict[Snowflake, Member] = {}
self._channels: dict[Snowflake, GuildChannel] = {}
self._roles = set()
def _add_channel(self, channel: GuildChannel, /) -> None:
self._channels[channel.id] = channel
def _delete_channel(self, channel: DiscordObject) -> None:
self._channels.pop(channel.id, None)
def add_member(self, member: Member) -> None:
self._members[member.id] = member
def add_roles(self, role: Role) -> None:
for p in self._roles.values:
p.postion += not p.is_default()
# checks if role is @everyone or not
self._roles[role.id] = role
```
#### File: types/enums/speakingstate.py
```python
from enum import IntEnum
class SpeakingState(IntEnum):
none = 0
voice = 1
soundshare = 2
priority = 4
def __str__(self):
return self.name
def __int__(self):
return self.value
```
#### File: types/enums/stickerformattype.py
```python
from __future__ import annotations
from enum import IntEnum
class StickerFormatType(IntEnum):
png = 1
apng = 2
lottie = 3
@property
def file_extension(self) -> str:
lookup: dict[StickerFormatType, str] = {
StickerFormatType.png: "png",
StickerFormatType.apng: "png",
StickerFormatType.lottie: "json",
}
return lookup[self]
```
#### File: discord/utils/datetime.py
```python
from __future__ import annotations
from typing import Optional
import arrow
def utcnow():
return arrow.utcnow()
def parse_time(timestamp: Optional[str]):
if timestamp:
return arrow.get(timestamp)
return None
``` |
{
"source": "aa25desh/femshop",
"score": 3
} |
#### File: src/Python/state.py
```python
from enum import Enum, unique
from state_constants import *
from domain import *
class State:
def __init__(self, configDict):
self.configDict = {}
for param in RequiredParameters:
if param in configDict:
self.setParameter(param, configDict[param])
del configDict[param]
else:
self.setParameter(param)
#merge in options with string keys
self.configDict = {**self.configDict, **configDict}
def setParameter(self, id, value=None):
key = id
if isinstance(id, RequiredParameters):
key = id.name
if value is None:
#use default
value = id.value
self.configDict[key] = value
def getParameter(self, id):
key = id
if isinstance(id, RequiredParameters):
key = id.name
return self.configDict[key]
def generateCode(self, type=GenerationType.CPP):
print("Generating Code")
#enum value is the default if unset by user.
#can be simple value, enum, or full class.
class RequiredParameters(Enum):
Dimensions = 2
Solver = Solvers.CG
Domain = Domain([(0,1),(0,1)], {}, Geometry.Square, Decomposition.Unstructured)
TimeStepper = TimeSteppers.RK4
OutputFormat = OutputFormats.VTK
FunctionSpace = FunctionSpaces.LEGENDRE
NonlinearSolver = NonlinearSolvers.NEWTON_RAPHSON
#... and many more ...
``` |
{
"source": "AA-284-Team-AA/Prop_Analysis",
"score": 3
} |
#### File: AA-284-Team-AA/Prop_Analysis/FuelOxTrades.py
```python
import numpy as np
import matplotlib.pyplot as plt
from rocketcea.cea_obj_w_units import CEA_Obj
from rocketcea.cea_obj import add_new_fuel
def maintrade(fuel, oxi, testlims, N, P, testvar):
## inputs
# range of chamber pressures to test (psi) (default: 400; can be list of numbers or range i.e. range(100,400,100))
# testvar = "OF" # pick from OF, ER
# oxi = "N2O"
# fuel = "PMMA" # fuel is controlled by function input
# OF = o/f ratio
# ER = nozzle expansion ratio (Ae/At) (Default)
# TESTLIMS: vector array (2 elements)--upper and lower limit of sweep in either OF or ER
# TESTVAR: "OF" or "ER"
# SETS UP and RUNS TRADES based on inputs
pltname = oxi+fuel+"_"+testvar+"_" # prefix for plot image files
if testvar == "ER":
OFratio = testvar[1] # default=1.5
supAR = np.linspace(testlims[0],testlims[1],num=N,endpoint=True) # vary nozzle expansion ratio --this will make the supAR array
ISP, Cstar, PCPE, cpcv = ARtrade(fuel,oxi,P,N,OFratio,supAR,pltname) # runs expansion ratio trade
xval = supAR
elif testvar == "OF":
OFratio = np.linspace(testlims[0],testlims[1], num=N, endpoint=True) # vary O/F ratio (by mass) --this will make the OFratio array properly
ISP, Cstar, PCPE, cpcv = OFtrade(fuel,oxi,P,N,OFratio,pltname) # runs O/F ratio trade
xval = OFratio
return ISP, Cstar, PCPE, cpcv, xval
# def findER(fu,ox,pcpe): # finds the ideal nozzle expansion ratio
# C = CEA_Obj(oxName=ox, fuelName=fu,
# isp_units='sec',
# cstar_units='m/s') # define CEA object to operate on for rocketCEA
# PCPE_fe = C.get_eps_at_PcOvPe(Pc=P,PcOvPe=pcpe)
# return PCPE_fe
# the XXtrade functions work as templates for running any trade you might want. Just add more get_"" from rocketcea to work with more variables along with corresponding input
def ARtrade(fu,ox,P,N,OFratio,supAR,pltname): # expansion ratio trade
# fu: name of fuel (string, as defined in rocketcea documentation or newly added fuels)
# ox: name of ox (string, as defined in rocketcea documentation of newly added fuels)
# P: chamber pressure (either a single number, list of numbers, or range())
# N: number of desired supersonic area ratios (nozzle expansion ratio) to sweep over
# OFratio: fixed O/F ratio for this trade
# supAR: values of supersonic area ratios (length of this list must match value of N)
C = CEA_Obj(oxName=ox, fuelName=fu,
isp_units='sec',
cstar_units='m/s') # define CEA object to operate on for rocketCEA
if isinstance(P,int)==True: # if P is only one value
y = 1
else:
y = len(P)
# preallocate vars
ISP = np.zeros([y,supAR.shape[0]]) # isp
Cstar = np.zeros([y,supAR.shape[0]]) # cstar eff
PCPE = np.zeros([y,supAR.shape[0]]) # pc/pe
cpcv = np.zeros([y,supAR.shape[0]]) # ratio of specific heats in thrust chamber
for x in range(y):
if y==1:
Pc = P # integers can't be called :(
legends = str(Pc)
else:
Pc = P[x] # chamber pressure
legends = P
for i in range(N):
ISP[x,i] = C.get_Isp(Pc=Pc, MR=OFratio, eps=supAR[i]) # ISP vacuum
Cstar[x,i] = C.get_Cstar(Pc=Pc, MR=OFratio) # Cstar efficiency
PCPE[x,i] = C.get_PcOvPe(Pc=Pc, MR=OFratio, eps=supAR[i]) # Pc/Pe
cpcv[x,i] = C.get_Chamber_Cp(Pc=Pc, MR=OFratio, eps=supAR[i]) # cp/cv
# generate plots for ISP, Cstar, and Pchamb/Pexit. Replace the last input with the vectory array of pressures
# plots(supAR,ISP,"Ae/At","ISP (s)", pltname+"isp.png" , legends ) # isp plot
# plots(supAR,Cstar,"Ae/At","Cstar", pltname+"cstar.png" , legends ) # Cstar plot
# plots(supAR,PCPE,"Ae/At","Pc/Pe", pltname+"pcpe.png" , legends ) # Pc/Pe plot
return ISP, Cstar, PCPE, cpcv
def OFtrade(fu,ox,P,N,OFratio,pltname): # O/F ratio trade (OFratio needs to be a vector array)
# fu: name of fuel (string, as defined in rocketcea documentation or newly added fuels)
# ox: name of ox (string, as defined in rocketcea documentation of newly added fuels)
# P: chamber pressure (either a single number, list of numbers, or range())
# N: number of desired O/F ratios to sweep over
# OFratio: values of O/F ratios (length of this list must match value of N)
# supAR: fixed nozzle expansion ratio
C = CEA_Obj(oxName=ox, fuelName=fu,
isp_units='sec',
cstar_units='m/s') # define CEA object to operate on for rocketCEA
if isinstance(P,int)==True: # if P is only one value
y = 1
else:
y = len(P)
# preallocate vars
ISP = np.zeros([y,OFratio.shape[0]]) # isp
Cstar = np.zeros([y,OFratio.shape[0]]) # cstar eff
PCPE = np.zeros([y,OFratio.shape[0]]) # pc/pe
cpcv = np.zeros([y,OFratio.shape[0]]) # ratio of specific heats in thrust chamber
fe_pcpe = np.zeros([y,OFratio.shape[0]]) # nozzle area ratio for fully expanded flow
for x in range(y):
if y==1:
Pc = P # integers can't be called :(
legends = str(Pc)
else:
Pc = P[x] # chamber pressure
legends = P
pr = Pc/14.7 # pc/pe for fully expanded flo
for i in range(N):
fe_pcpe[x,:]= C.get_eps_at_PcOvPe(Pc=Pc, MR=OFratio[i], PcOvPe=pr)
ISP[x,i] = C.get_Isp(Pc=Pc, MR=OFratio[i], eps=fe_pcpe[x,i]) # ISP vacuum
Cstar[x,i] = C.get_Cstar(Pc=Pc, MR=OFratio[i]) # Cstar efficiency
fe_pcpe[x,i] = C.get_PcOvPe(Pc=Pc, MR=OFratio[i], eps=fe_pcpe[x,i]) # Pc/Pe
cpcv[x,i] = C.get_Chamber_Cp(Pc=Pc, MR=OFratio[i], eps=fe_pcpe[x,i]) # cp/cv
# generate plots for ISP, Cstar, and Pchamb/Pexit
# plots(OFratio,ISP,"O/F ratio","ISP (s)", pltname+"isp.png" , legends ) # isp plot
# plots(OFratio,Cstar,"O/F ratio","Cstar", pltname+"cstar.png" , legends ) # Cstar plot
# plots(OFratio,PCPE,"O/F ratio","Pc/Pe", pltname+"pcpe.png" , legends ) # Pc/Pe plot
return ISP, Cstar, fe_pcpe, cpcv
def plots(xvals,yvals,xname,yname,pltname,labels,plttit): # function to generate plots of the inputted variables
plt.figure()
if yvals.ndim==1:
plt.plot(xvals,yvals[:], ms=10, label=str(labels))
plt.xlim(min(xvals),max(xvals))
else:
for i in range(yvals.shape[0]): # can handle multiple lines (i.e. ISP vs. O/F at various chamber pressures)
plt.plot(xvals,yvals[i,:], ms=10, label=str(labels[i]))
plt.xlabel(xname)
plt.ylabel(yname)
plt.title(plttit)
plt.legend(loc="lower right")
plt.savefig(pltname)
plt.close
### ANALYSIS SET UP ###
# defining fuel will add it's information to the master list of fuels to run rocketCEA with.
# define PMMA
card_str = '''
fuel PMMA C 5 H 8 O 2
h,kj=-430.5 t(k)=299.82
''' # <NAME>'s recommendation for modeling PMMA
add_new_fuel('PMMA', card_str) # rocketCEA function to add PMMA to possible inputs
# define HTPB
card_str2 = '''
fuel HTPB C 7.3165 H 10.3360 O 0.1063
h,kj/mol= 456 t(k)=298.15 rho=0.9220
'''
add_new_fuel('HTPB', card_str2) # rocketCEA function to add HTPB to possible inputs
# define ABS (monomer of ABS)
card_str3 = '''
fuel ABS C 3 H 3 N 1
h,kj/mol=172 t(k)=299.82
'''
add_new_fuel('ABS (Monomer)', card_str3) # rocketCEA function to add HTPB to possible inputs
# define Paraffin
card_str4 = '''
fuel Paraffin C 32 H 66 wt%=100.00
h,kj/mol=-938 t(k)=298
'''
add_new_fuel('Paraffin', card_str4)
### BEGIN CODE TO RUN TRADES
testfs = ["PMMA","HTPB","ABS (Monomer)","Paraffin"]
testox = "GOX"
testvar = "OF" # pick OF or ER
N = 100 # number of points in trade study
P = 150 # chamber pressure, psi
pr = P/14.7 # pressure ratio Pc/Pe for fully expanded flow
ISPf = np.zeros([len(testfs),N]) # ISP comparing all fuels
Cstarf = np.zeros([len(testfs),N]) # Cstar comparing all fuels
PCPEf = np.zeros([len(testfs),N]) # Pchamb/Pexit
cpcvf = np.zeros([len(testfs),N]) # ratio of specific heats
fe_pcpe = np.zeros([len(testfs),N]) # nozzle area ratio for fully expanded flow
pltlbls = [] # labels for plot legend
for i in range(len(testfs)): # labels for each line in plot
pltlbls.append(testfs[i] + "/" + testox)
# currently setup for runs with only ONE chamber pressure selection
for f in range(len(testfs)):
ISPf[f,:],Cstarf[f,:],PCPEf[f,:],cpcvf[f,:],xvar = maintrade(testfs[f],testox,[0.1,10],N,P,testvar) # currently overwriting xvar every time
# save plots of results
pltname = testox + "_Fuel Comparison_PC=" + str(P) + "_"
plttit = "P_c = " + str(P) + ",Fully Expanded"
if testvar == "OF":
plots(xvar,ISPf,"O/F Ratio","ISP (s)", pltname + "isp",pltlbls, "O/F Ratio vs. ISP, " + plttit)
plots(xvar,Cstarf,"O/F Ratio","Cstar (m/s)",pltname + "cstar",pltlbls, "O/F Ratio vs. Cstar, " + plttit)
plots(xvar,cpcvf,"O/F Ratio","Cp/Cv", pltname + "cpcv",pltlbls, "O/F Ratio vs. Cp/Cv, " + plttit)
plots(xvar,PCPEf,"O/F Ratio","Ae/At", pltname + "aeat",pltlbls, "O/F Ratio vs. Ae/At, " + plttit)
elif testvar == "ER":
plots(xvar,PCPEf,"Ae/At","Pc/Pe","Fuel Expansion Ratio Comparison.png",pltlbls)
# Note: for default case, Pc/Pe corresponding to fully expanded nozzle is 25.0
# why = np.reshape(np.array([[ISPf[0,:]],[Cstarf[0,:]]]),[2,50])
# plots(xvar,ISPf[0,:],"O/F","ISP and Cstar","ISP_Cstar Optimization",testfs)
```
#### File: Prop_Analysis/TemporalAnalysis/utils.py
```python
import numpy as np
from rocketcea.cea_obj_w_units import CEA_Obj
from scipy.optimize import fsolve
def get_exit_pressure(Pc, gamma, AR, guess=101325.):
'''
Find exit pressure from Pc, gam and AR.
'''
def f(x):
temp1 = ((gamma + 1)/2) ** (1/(gamma -1))
temp2 = (x/Pc) ** (1/gamma)
temp3 = (gamma+1) / (gamma-1)
temp4 = 1 - (x/Pc)**((gamma-1)/gamma);
return temp1*temp2*np.sqrt(temp3*temp4) - 1/AR
guess = 101325.
Pe = fsolve(f, guess, xtol=1e-10)
return Pe[0]
def get_CEA_results(C, Pc, Pe, OF, AR):
PcOvPe = Pc/Pe
_, gam_chamber = C.get_Chamber_MolWt_gamma(Pc=Pc, MR=OF, eps=AR)
ISP = C.get_Isp(Pc=Pc, MR=OF, eps=AR)
Cstar = C.get_Cstar(Pc=Pc, MR=OF)
return gam_chamber, ISP, Cstar
def get_thrust_coeff(Pc, Pe, P0, gamma, AR):
temp1 = (2*gamma**2) / (gamma-1)
temp2 = (2/(gamma+1)) ** ((gamma+1)/(gamma-1))
temp3 = 1 - (Pe/Pc) ** ((gamma-1)/gamma)
temp4 = (Pe-P0)/Pc * AR
return np.sqrt(temp1 * temp2 * temp3) + temp4
def get_mdots_from_Cstar(Pc, At, OF, Cstar, eff_Cstar=1.):
mdot = Pc*At / (eff_Cstar*Cstar)
mdot_fuel = mdot / (OF+1)
mdot_ox = OF * mdot_fuel
return mdot, mdot_ox, mdot_fuel
def get_radius_from_flow_rates(mdot_ox, mdot_fuel, rho_fuel, L, a, n):
R = ( \
mdot_fuel/(a*(mdot_ox/np.pi)**n) * 1./(rho_fuel*2*np.pi*L) \
) ** (1/(1-2*n))
return R
def get_exit_velocity(Pc, Pe, at, gamma):
Mach_exit = np.sqrt( \
2/(gamma-1) * ( (Pc/Pe)**( (gamma-1)/gamma ) - 1) )
tmp = (Pe/Pc)**( (gamma-1)/(2*gamma) ) * ((gamma+1)/2)**(1/2)
return tmp * Mach_exit * at
def get_mdot_fuel(R, L, rho_fuel, mdot_ox, a, n):
return 2*np.pi*R*L * rho_fuel * a*(mdot_ox/(np.pi*R**2))**n
``` |
{
"source": "aa840/ModSeminario_Py",
"score": 3
} |
#### File: ModSeminario_Py/Python_Modified_Seminario_Method/bonds_calculated_printed.py
```python
def bonds_calculated_printed(outputfilefolder, vibrational_scaling_squared, bond_list, bond_lengths, atom_names, eigenvalues, eigenvectors, coords):
#This function uses the Seminario method to find the bond
#parameters and print them to file
import numpy as np
from force_constant_bond import force_constant_bond
#Open output file bond parameters are written to
fid = open((outputfilefolder + 'Modified_Seminario_Bonds'), 'w')
k_b = np.zeros(len(bond_list))
bond_length_list = np.zeros(len(bond_list))
unique_values_bonds = [] # Used to find average values
for i in range(0, len(bond_list)):
AB = force_constant_bond(bond_list[i][0], bond_list[i][1],eigenvalues, eigenvectors, coords)
BA = force_constant_bond(bond_list[i][1], bond_list[i][0],eigenvalues, eigenvectors, coords)
# Order of bonds sometimes causes slight differences, find the mean
k_b[i] = np.real(( AB + BA ) /2);
# Vibrational_scaling takes into account DFT deficities/ anharmocity
k_b[i] = k_b[i] * vibrational_scaling_squared
bond_length_list[i] = bond_lengths[bond_list[i][0]][bond_list[i][1]]
fid.write(atom_names[bond_list[i][0]] + '-' + atom_names[bond_list[i][1]] + ' ')
fid.write(str("%#.5g" % k_b[i])+ ' ' + str("%#.4g" % bond_length_list[i]) + ' ' +
str(bond_list[i][0] + 1) + ' ' + str(bond_list[i][1] + 1))
fid.write('\n')
unique_values_bonds.append([atom_names[bond_list[i][0]], atom_names[bond_list[i][1]], k_b[i], bond_length_list[i], 1 ])
fid.close()
return unique_values_bonds
```
#### File: ModSeminario_Py/Python_Modified_Seminario_Method/modified_Seminario_method.py
```python
def modified_Seminario_method(inputfilefolder, outputfilefolder, vibrational_scaling):
# Program to implement the Modified Seminario Method
# Written by <NAME>, TCM, University of Cambridge
# Reference using AEA Allen, <NAME>, <NAME>, J. Chem. Theory Comput. (2018), doi:10.1021/acs.jctc.7b00785
# Pass as arguements the input folder containing the zmat.log/lig.log,
# lig.fchk and optional Zmat.z file, the output folder where the new
# parameters will be written and the vibrational frequency scaling constant
# required.
print(inputfilefolder)
import time
import numpy as np
import os.path
from input_data_processing import input_data_processing
from bonds_calculated_printed import bonds_calculated_printed
from angles_calculated_printed import angles_calculated_printed
from average_values_across_classes import average_values_across_classes
from sb_file_new_parameters import sb_file_new_parameters
#Create log file
fid_log = open((inputfilefolder + 'MSM_log'), "w")
fid_log.write('Modified Seminario Method \n')
fid_log.write('Parametrization started for files in folder' + inputfilefolder + '\n')
fid_log.write('Time is now: '+ time.strftime('%X %x %Z') + '\n')
#Square the vibrational scaling used for frequencies
vibrational_scaling_squared = vibrational_scaling**2;
#Import all input data
[ bond_list, angle_list, coords, N, hessian, atom_names ] = input_data_processing( inputfilefolder)
#Find bond lengths
bond_lengths = np.zeros((N, N))
for i in range (0,N):
for j in range(0,N):
diff_i_j = np.array(coords[i,:]) - np.array(coords[j,:])
bond_lengths[i][j] = np.linalg.norm(diff_i_j)
eigenvectors = np.empty((3, 3, N, N), dtype=complex)
eigenvalues = np.empty((N, N, 3), dtype=complex)
partial_hessian = np.zeros((3, 3))
for i in range(0,N):
for j in range(0,N):
partial_hessian = hessian[(i * 3):((i + 1)*3),(j * 3):((j + 1)*3)]
[a, b] = np.linalg.eig(partial_hessian)
eigenvalues[i,j,:] = (a)
eigenvectors[:,:,i,j] = (b)
# The bond values are calculated and written to file
unique_values_bonds = bonds_calculated_printed( outputfilefolder, vibrational_scaling_squared, bond_list, bond_lengths, atom_names, eigenvalues, eigenvectors, coords )
# The angle values are calculated and written to file
unique_values_angles = angles_calculated_printed( outputfilefolder, vibrational_scaling_squared, angle_list, bond_lengths, atom_names, eigenvalues, eigenvectors, coords )
#The final section finds the average bond and angle terms for each
#bond/angle class if the .z exists to supply angle/bond classes and then
#writes the new terms to a .sb file
if os.path.exists(inputfilefolder + 'Zmat.z'):
average_values_across_classes( unique_values_bonds, unique_values_angles, outputfilefolder )
sb_file_new_parameters(outputfilefolder, 'Python_Modified_Scaled')
import sys
modified_Seminario_method( sys.argv[1], sys.argv[2], float(sys.argv[3]) )
```
#### File: ModSeminario_Py/Python_Modified_Seminario_Method/unit_vector_N.py
```python
def unit_vector_N(u_BC, u_AB):
# Calculates unit normal vector which is perpendicular to plane ABC
import numpy as np
cross_product = np.cross(u_BC, u_AB)
norm_u_N = np.linalg.norm(cross_product)
u_N = cross_product / norm_u_N
return u_N
``` |
{
"source": "aa88bb/Introduction-to-programming-using-python",
"score": 3
} |
#### File: aa88bb/Introduction-to-programming-using-python/chapter6.py
```python
def addAll(n1,n2):
result = 0
for i in range(n1,n2+1):
result += i
return result
print(addAll(1,10))
```
#### File: aa88bb/Introduction-to-programming-using-python/chapter7.py
```python
class TV:
def __init__(self):
self.channel = 1
self.volumeLevel = 1
self.on = False
def __init__(self,a,b,c):
self.channel = a
self.volumeLevel = b
self.on = c
def turnOn(self):
self.on = True
def setChannel(self,channel):
self.channel = channel
myTV = TV(89,2,False)
# myTV.channel = 8
# myTV.setChannel(888)
print(myTV.channel)
``` |
{
"source": "aa989190f363e46d/rceth-drugs-watcher",
"score": 2
} |
#### File: drugRegSpider/spiders/drugsSpider.py
```python
import scrapy
from drugRegSpider.items import DrugregSpiderItem as dri
from scrapy.http import FormRequest
import logging
class DrugsSpider(scrapy.Spider):
name = "DrugsSpider"
allowed_domains = ["rceth.by"]
def _getReq(self, letter, pageNum = 1, controllerState = u''):
logging.info(u"Make query %s-%i" % (letter,pageNum))
_url = u'https://rceth.by/Refbank/reestr_lekarstvennih_sredstv/results'
_frmRqst = {u'FProps[0].IsText': u'True',
u'FProps[0].Name': u'N_LP',
u'FProps[0].CritElems[0].Num': u'1',
u'FProps[0].CritElems[0].Val': letter,
u'FProps[0].CritElems[0].Crit': u'Start',
u'FProps[0].CritElems[0].Excl': u'false',
u'FOpt.VFiles': u'true',
u'FOpt.VEField1': u'true',
u'IsPostBack': u'true',
u'PropSubmit': u'FOpt_PageN',
u'ValueSubmit': u'%i' % (pageNum,),
u'FOpt.PageC': u'100',
u'FOpt.OrderBy': u'N_LP',
u'FOpt.DirOrder': u'asc',
u'QueryStringFind': controllerState
}
return FormRequest(_url, formdata=_frmRqst, callback=self.parse,
meta={'currLetter': letter,'currPageNum': pageNum})
def start_requests(self):
_initSeq = u'АБВГДЕЖЗИКЛМНОПРСТУФХЦЧЭЮЯ0123456789'
self.traversed = {l:[] for l in _initSeq}
return [self._getReq(l) for l in _initSeq]
def extract_text(self,elem):
return elem.xpath('text()').extract()[0].strip()
def parse(self, response):
currLetter = response.meta['currLetter']
currPageNum = response.meta['currPageNum']
for i in range(1,len(response.xpath('//a[@name="FOpt_PageN"]'))):
if not i in self.traversed[currLetter]:
controllerState = response.xpath(u'//input[@id="QueryStringFind"]/@value').extract()[0]
self.traversed[currLetter].append(i)
yield self._getReq(currLetter,i+1, controllerState)
for tr in response.xpath('//div[@class="table-view"]/table/tbody/tr'):
currRow = tr.xpath('td')
currItem = dri()
currItem["name"] = currRow[1].xpath('a/text()').extract()[0].strip()
currItem["mnn"] = self.extract_text(currRow[2])
currItem["lForm"] = self.extract_text(currRow[3])
currItem["manufacturer"] = self.extract_text(currRow[4])
currItem["invoker"] = self.extract_text(currRow[5])
currItem["certNum"] = self.extract_text(currRow[6])
currItem["regDtBegin"] = self.extract_text(currRow[7])
currItem["regDtExpire"] = self.extract_text(currRow[8])
currItem["originality"] = self.extract_text(currRow[9])
currItem["manuals"] = '\n'.join([u':'.join([a.xpath('text()').extract()[0],a.xpath('@href').extract()[0].split('/')[-1]]) for a in currRow[1].xpath('span/a')])
currItem["file_urls"] = [u for u in [u'https://www.rceth.by%s' % (href,) for href in currRow[1].xpath('span/a/@href').extract()]]
yield currItem
``` |
{
"source": "aa989190f363e46d/scraper103by",
"score": 2
} |
#### File: scraper_103_by/spiders/grodnenskoe_rup_pharmacia_pharmacy.py
```python
import scrapy
from scraper_103_by.items import Scraper103ByItem
import logging
class GrodnenskoeRupPharmaciaPharmacySpider(scrapy.Spider):
name = "grodnenskoe_rup_pharmacia_pharmacy"
allowed_domains = ["apteka.103.by"]
# номера аптек гродненского РУП
# в нотации 103.by
gr_ph_nums = ["1", "106", "109", "110", "113", "114",
"84", "89" "119", "124", "131", "134",
"135", "143", "145", "147", "149", "150",
"152", "153", "157", "158", "159", "160",
"164", "170", "174", "175", "18", "180",
"186", "188", "19", "190", "195", "199",
"2", "200", "203", "205", "215", "220",
"221", "223", "230", "232", "3", "36",
"43", "49", "63", "79", "131_nochnoy",
"114_nochnoy"]
# for debug
#gr_ph_nums = ["1"]
url_tmplt = u'http://www.apteka.103.by/pharmacies/grodnenskoe_rup_pharmacia_pharmacy_%s'
# Адреса первых страниц списков товаров аптек
start_urls = [url_tmplt % (num,) for num in gr_ph_nums]
traversed = []
def parse(self, response):
# сбор собственно данных
curr_apt = response.xpath('//h1[@class="title02"]/text()').extract()[0].strip()
#logging.warning(curr_apt)
for tr in response.xpath('//table[@class="table02"]/tr'):
curr_item = Scraper103ByItem()
curr_item["source"] = curr_apt
curr_item["asset"] = tr.xpath('td[@class="th01"]/span[@class="link"]/text()').extract()[0].strip()
curr_item["manufacturer"] = tr.xpath('td[@class="th01"]/span[@class="name"]/text()').extract()[0].strip()
curr_item["quant"] = tr.xpath('td[@class="th03"]/strong/text()').extract()[0].strip()
curr_item["price"] = tr.xpath('td[@class="th04"]/span[@class="price"]/text()').extract()[0].strip()
yield curr_item
# обработка пагинаций
curr_pg = response.xpath('//div[@class="pager line"]/a[@class="current"]/@href').extract()[0].strip()
if curr_pg not in self.traversed:
self.traversed.append(curr_pg)
for pg in response.xpath('//div[@class="pager line"]/a/@href').extract():
if pg.strip() not in self.traversed:
self.traversed.append(pg.strip())
yield scrapy.http.Request('http://apteka.103.by%s' % (pg,))
``` |
{
"source": "aaa1191/ArchiveBox",
"score": 2
} |
#### File: archivebox/index/sql.py
```python
__package__ = 'archivebox.index'
import re
from io import StringIO
from pathlib import Path
from typing import List, Tuple, Iterator
from django.db.models import QuerySet
from django.db import transaction
from .schema import Link
from ..util import enforce_types, parse_date
from ..config import (
OUTPUT_DIR,
TAG_SEPARATOR_PATTERN,
)
### Main Links Index
@enforce_types
def parse_sql_main_index(out_dir: Path=OUTPUT_DIR) -> Iterator[Link]:
from core.models import Snapshot
return (
Link.from_json(page.as_json(*Snapshot.keys))
for page in Snapshot.objects.all()
)
@enforce_types
def remove_from_sql_main_index(snapshots: QuerySet, atomic: bool=False, out_dir: Path=OUTPUT_DIR) -> None:
if atomic:
with transaction.atomic():
return snapshots.delete()
return snapshots.delete()
@enforce_types
def write_link_to_sql_index(link: Link):
from core.models import Snapshot, ArchiveResult
info = {k: v for k, v in link._asdict().items() if k in Snapshot.keys}
tag_list = list(dict.fromkeys(
tag.strip() for tag in re.split(TAG_SEPARATOR_PATTERN, link.tags or '')
))
info.pop('tags')
try:
info["timestamp"] = Snapshot.objects.get(url=link.url).timestamp
except Snapshot.DoesNotExist:
while Snapshot.objects.filter(timestamp=info["timestamp"]).exists():
info["timestamp"] = str(float(info["timestamp"]) + 1.0)
snapshot, _ = Snapshot.objects.update_or_create(url=link.url, defaults=info)
snapshot.save_tags(tag_list)
for extractor, entries in link.history.items():
for entry in entries:
if isinstance(entry, dict):
result, _ = ArchiveResult.objects.get_or_create(
snapshot_id=snapshot.id,
extractor=extractor,
start_ts=parse_date(entry['start_ts']),
defaults={
'end_ts': parse_date(entry['end_ts']),
'cmd': entry['cmd'],
'output': entry['output'],
'cmd_version': entry.get('cmd_version') or 'unknown',
'pwd': entry['<PASSWORD>'],
'status': entry['status'],
}
)
else:
result, _ = ArchiveResult.objects.update_or_create(
snapshot_id=snapshot.id,
extractor=extractor,
start_ts=parse_date(entry.start_ts),
defaults={
'end_ts': parse_date(entry.end_ts),
'cmd': entry.cmd,
'output': entry.output,
'cmd_version': entry.cmd_version or 'unknown',
'pwd': <PASSWORD>,
'status': entry.status,
}
)
return snapshot
@enforce_types
def write_sql_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR) -> None:
for link in links:
# with transaction.atomic():
# write_link_to_sql_index(link)
write_link_to_sql_index(link)
@enforce_types
def write_sql_link_details(link: Link, out_dir: Path=OUTPUT_DIR) -> None:
from core.models import Snapshot
# with transaction.atomic():
# try:
# snap = Snapshot.objects.get(url=link.url)
# except Snapshot.DoesNotExist:
# snap = write_link_to_sql_index(link)
# snap.title = link.title
try:
snap = Snapshot.objects.get(url=link.url)
except Snapshot.DoesNotExist:
snap = write_link_to_sql_index(link)
snap.title = link.title
tag_list = list(dict.fromkeys(
tag.strip() for tag in re.split(TAG_SEPARATOR_PATTERN, link.tags or '')
))
snap.save()
snap.save_tags(tag_list)
@enforce_types
def list_migrations(out_dir: Path=OUTPUT_DIR) -> List[Tuple[bool, str]]:
from django.core.management import call_command
out = StringIO()
call_command("showmigrations", list=True, stdout=out)
out.seek(0)
migrations = []
for line in out.readlines():
if line.strip() and ']' in line:
status_str, name_str = line.strip().split(']', 1)
is_applied = 'X' in status_str
migration_name = name_str.strip()
migrations.append((is_applied, migration_name))
return migrations
@enforce_types
def apply_migrations(out_dir: Path=OUTPUT_DIR) -> List[str]:
from django.core.management import call_command
null, out = StringIO(), StringIO()
call_command("makemigrations", interactive=False, stdout=null)
call_command("migrate", interactive=False, stdout=out)
out.seek(0)
return [line.strip() for line in out.readlines() if line.strip()]
@enforce_types
def get_admins(out_dir: Path=OUTPUT_DIR) -> List[str]:
from django.contrib.auth.models import User
return User.objects.filter(is_superuser=True)
``` |
{
"source": "aaa121/API-Text-Streaming",
"score": 3
} |
#### File: aaa121/API-Text-Streaming/Project_1.py
```python
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import read_write_file_algorithm as rd
import time
ckey='A7wThk54hZMHwPPUgz5ynivJc'
csecret='<KEY>'
atoken='<KEY>'
asecret='<KEY>'
class listener(StreamListener):
def on_data(self, raw_data):
try:
#print(raw_data)
rd.read_write_file('Trumps.txt',raw_data)
return True
except BaseException:
print('Failed on data')
time.sleep(5)
def on_error(self, status_code):
print(status)
auth=OAuthHandler(ckey,csecret) # This set the authentication parameters
auth.set_access_token(atoken,asecret) # This set the auth for the token
twitterStream=Stream(auth,listener()) #set the stream pass-through
twitterStream.filter(track=["Trump"])
``` |
{
"source": "aaa121/Big-Data-Analytics",
"score": 4
} |
#### File: aaa121/Big-Data-Analytics/random_string_generator.py
```python
import string
import random
import timeit
def random_string(length):
rndstring=''
for i in range(length):
rndstring+=random.choice(string.ascii_letters)
return rndstring
print(random_string(6))
print(random_string(6).upper())
print(random_string(6).lower())
#Store the random string in a dictionary
email_id={}
email_pg={}
for i in range(0,5):
email_id[random_string(3)]=random_string(6).upper()
email_pg[i] = random_string(5).lower()
print(email_id)
print(email_pg)
print(email_pg[3])
print(round(timeit.timeit(),3))
``` |
{
"source": "AAA3A-AAA3A/AAA3A-cogs",
"score": 2
} |
#### File: AAA3A-cogs/antinuke/antinuke.py
```python
from .AAA3A_utils.cogsutils import CogsUtils # isort:skip
from redbot.core import commands # isort:skip
from redbot.core.i18n import Translator, cog_i18n # isort:skip
from redbot.core.bot import Red # isort:skip
import discord # isort:skip
import typing # isort:skip
from typing import List, Optional, Tuple, Union
from redbot.core import Config
# Credits:
# Thanks to @epic guy on Discord for the basic syntax (command groups, commands) and also commands (await ctx.send, await ctx.author.send, await ctx.message.delete())!
# Thanks to TrustyJAID for the code (a bit modified to work here and to improve as needed) for the log messages sent! (https://github.com/TrustyJAID/Trusty-cogs/tree/master/extendedmodlog)
# Thanks to the developers of the cogs I added features to as it taught me how to make a cog! (Chessgame by WildStriker, Captcha by Kreusada, Speak by Epic guy and Rommer by Dav)
# Thanks to all the people who helped me with some commands in the #coding channel of the redbot support server!
_ = Translator("AntiNuke", __file__)
@cog_i18n(_)
class AntiNuke(commands.Cog):
"""A cog to remove all permissions from a person who deletes a channel!"""
def __init__(self, bot):
self.bot: Red = bot
self.config: Config = Config.get_conf(
self,
identifier=947269490247,
force_registration=True,
)
self.antinuke_guild = {
"logschannel": None, # The channel for logs.
"enabled": False, # Enable the possibility.
"user_dm": True, # Enable the user dm.
"number_detected_member": 1, # Number.
"number_detected_bot": 1, # Number.
}
self.antinuke_member = {
"count": 0, # The count of channel's deletes.
"old_roles": [], # The roles to be handed in if it wasn't a nuke.
}
self.config.register_guild(**self.antinuke_guild)
self.config.register_member(**self.antinuke_member)
self.cogsutils = CogsUtils(cog=self)
self.cogsutils._setup()
@commands.Cog.listener()
async def on_guild_channel_delete(self, old_channel: discord.abc.GuildChannel):
"""Remove all permissions from a user if they delete a channel.
"""
config = await self.config.guild(old_channel.guild).all()
logschannel = config["logschannel"]
actual_state_enabled = config["enabled"]
actual_state_user_dm = config["user_dm"]
actual_number_detected_member = config["number_detected_member"]
actual_number_detected_bot = config["number_detected_bot"]
logschannel = config["logschannel"]
perp, reason = await self.get_audit_log_reason(
old_channel.guild, old_channel, discord.AuditLogAction.channel_delete
)
logschannel = self.bot.get_channel(logschannel)
if perp is None:
return
if perp == old_channel.guild.owner:
return
if perp == old_channel.guild.me:
return
actual_count = await self.config.member(perp).count()
if actual_state_enabled:
if not perp.bot:
actual_number_detected = actual_number_detected_member
if actual_number_detected == 0:
return
else:
actual_number_detected = actual_number_detected_bot
if actual_number_detected == 0:
return
actual_count += 1
if actual_count >= actual_number_detected:
await self.config.member(perp).count.clear()
old_roles = perp.roles.copy()
old_roles.remove(old_channel.guild.default_role)
old_roles = [
r for r in old_roles if r.position < old_channel.guild.me.top_role.position and not r.managed
]
rolelist_name = [r.name for r in old_roles]
rolelist_mention = [r.mention for r in old_roles]
if actual_state_user_dm:
await perp.send(_("All your roles have been taken away because you have deleted channel #{old_channel}.\nYour former roles: {rolelist_name}").format(**locals()))
if old_channel.guild.me.guild_permissions.manage_roles:
# await perp.edit(roles=[], reason=f"All roles in {perp} ({perp.id}) roles have been removed as a result of the antinuke system being triggered on this server.")
for role in old_roles:
try:
await perp.remove_roles(role, reason=_("All roles in {perp} ({perp.id}) roles have been removed as a result of the antinuke system being triggered on this server.").format(**locals()))
except Exception:
pass
await self.config.member(perp).old_roles.set(old_roles)
if logschannel:
embed: discord.Embed = discord.Embed()
embed.title = _("The user {perp.name}#{perp.discriminator} has deleted the channel #{old_channel.name}!").format(**locals())
embed.description = _("To prevent him from doing anything else, I took away as many roles as my current permissions would allow.\nUser mention: {perp.mention} - User ID: {perp.id}").format(**locals())
embed.color = discord.Colour.dark_teal()
embed.set_author(name=perp, url=perp.display_avatar if self.cogsutils.is_dpy2 else perp.avatar_url, icon_url=perp.display_avatar if self.cogsutils.is_dpy2 else perp.avatar_url)
embed.add_field(
inline=False,
name=_("Before I intervened, the user had the following roles:").format(**locals()),
value=rolelist_mention)
logschannel = self.bot.get_channel(logschannel)
await logschannel.send(embed=embed)
else:
await self.config.member(perp).count.set(actual_count)
return
else:
return
async def get_audit_log_reason(
self,
guild: discord.Guild,
target: Union[discord.abc.GuildChannel, discord.Member, discord.Role],
action: discord.AuditLogAction,
) -> Tuple[Optional[discord.abc.User], Optional[str]]:
perp = None
reason = None
if guild.me.guild_permissions.view_audit_log:
async for log in guild.audit_logs(limit=5, action=action):
if log.target.id == target.id:
perp = log.user
if log.reason:
reason = log.reason
break
return perp, reason
@commands.guild_only()
@commands.guildowner()
@commands.group(name="setantinuke", aliases=["antinukeset"])
async def configuration(self, ctx: commands.Context):
"""Configure AntiNuke for your server."""
@configuration.command(aliases=["lchann", "lchannel", "logschan", "logchannel", "logsc"], usage="<text_channel_or_'none'>")
async def logschannel(self, ctx: commands.Context, *, channel: typing.Optional[discord.TextChannel]=None):
"""Set a channel where events are registered.
``channel``: Text channel.
You can also use "None" if you wish to remove the logging channel.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
if channel is None:
await self.config.guild(ctx.guild).logschannel.clear()
await ctx.send(_("Logging channel removed.").format(**locals()))
return
needperm = await self.check_permissions_in_channel(["embed_links", "read_messages", "read_message_history", "send_messages", "attach_files"], channel)
if needperm:
await ctx.send(_("The bot does not have at least one of the following permissions in this channel: `embed_links`, `read_messages`, `read_message_history`, `send_messages`, `attach_files`.").format(**locals()))
return
await self.config.guild(ctx.guild).logschannel.set(channel.id)
await ctx.send(_("Logging channel registered: {channel.mention}.").format(**locals()))
async def check_permissions_in_channel(self, permissions: List[str], channel: discord.TextChannel):
"""Function to checks if the permissions are available in a guild.
This will return a list of the missing permissions.
"""
return [
permission
for permission in permissions
if not getattr(channel.permissions_for(channel.guild.me), permission)
]
@configuration.command(name="enable", aliases=["activate"], usage="<true_or_false>")
async def enable(self, ctx: commands.Context, state: bool):
"""Enable or disable AntiNuke.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_enabled = config["enabled"]
if actual_state_enabled is state:
await ctx.send(_("AntiNuke is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).enabled.set(state)
await ctx.send(_("AntiNuke state registered: {state}.").format(**locals()))
@configuration.command(name="userdm", aliases=["dm"], usage="<true_or_false>")
async def userdm(self, ctx: commands.Context, state: bool):
"""Enable or disable User DM.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.guild(ctx.guild).all()
actual_state_user_dm = config["user_dm"]
if actual_state_user_dm is state:
await ctx.send(_("User DM is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).user_dm.set(state)
await ctx.send(_("User DM state registered: {state}.").format(**locals()))
@configuration.command(name="nbmember", aliases=["membernb"], usage="<int>")
async def nbmember(self, ctx: commands.Context, int: int):
"""Number Detected - Member
Before action, how many deleted channels should be detected?
`0' to disable this protection.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
await self.config.guild(ctx.guild).number_detected_member.set(int)
await ctx.send(_("Number Detected - Member registered: {int}.").format(**locals()))
@configuration.command(name="nbbot", aliases=["botsnb"], usage="<int>")
async def nbbot(self, ctx: commands.Context, int: int):
"""Number Detected - Bot
Before action, how many deleted channels should be detected?
`0' to disable this protection.
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
await self.config.guild(ctx.guild).number_detected_bot.set(int)
await ctx.send(_("Number Detected - Bot registered: {int}.").format(**locals()))
@configuration.command(name="resetuser", aliases=["userreset"], usage="<int>")
async def resetuser(self, ctx: commands.Context, user: discord.Member, give_roles: bool = False):
"""Reset number detected for a user
"""
if not ctx.author.id == ctx.guild.owner.id:
await ctx.send(_("Only the owner of this server can access these commands!").format(**locals()))
return
config = await self.config.member(user).all()
if give_roles:
old_roles = config["old_roles"]
old_roles = [ctx.guild.get_role(r) for r in old_roles]
old_roles = [
r for r in old_roles if r.position < ctx.guild.me.top_role.position and not r.managed
]
if not old_roles == []:
# await user.edit(roles=old_roles, reason=f"All former roles of {user} ({user.id}) have been restored at the request of the server owner.")
await user.add_roles(*old_roles, reason=_("All former roles of {user} ({user.id}) have been restored at the request of the server owner.").format(**locals()))
await ctx.send(_("Restored roles for {user.name} ({user.id}).").format(**locals()))
await self.config.member(user).count.clear()
await self.config.member(user).old_roles.clear()
await ctx.send(_("Count removed for {user.name} ({user.id}).").format(**locals()))
```
#### File: AAA3A-cogs/discordmodals/__init__.py
```python
from .AAA3A_utils.cogsutils import CogsUtils # isort:skip
from redbot.core.bot import Red # isort:skip
import json
from pathlib import Path
from .discordmodals import DiscordModals
with open(Path(__file__).parent / "info.json") as fp:
__red_end_user_data_statement__ = json.load(fp)["end_user_data_statement"]
async def setup(bot: Red):
cog = DiscordModals(bot)
await CogsUtils().add_cog(bot, cog)
```
#### File: discordsearch/AAA3A_utils/cogsutils.py
```python
from redbot.core import commands # isort:skip
import discord # isort:skip
import typing # isort:skip
import asyncio
import contextlib
import datetime
import inspect
import logging
import math
import os
import platform
import re
import string
import sys
import traceback
from copy import copy
from io import StringIO
from pathlib import Path
from random import choice
from time import monotonic
import aiohttp
import pip
from rich.console import Console
from rich.table import Table
import redbot
from redbot import version_info as red_version_info
from redbot.cogs.downloader.converters import InstalledCog
from redbot.cogs.downloader.repo_manager import Repo
from redbot.core._diagnoser import IssueDiagnoser
from redbot.core.bot import Red
from redbot.core.data_manager import basic_config, cog_data_path, config_file, instance_name, storage_type
from redbot.core.utils.chat_formatting import bold, box, error, humanize_list, humanize_timedelta, inline, pagify, text_to_file, warning
from redbot.core.utils.menus import start_adding_reactions
from redbot.core.utils.predicates import MessagePredicate, ReactionPredicate
from redbot.logging import RotatingFileHandler
from redbot.vendored.discord.ext import menus
__all__ = ["CogsUtils", "Loop", "Captcha", "Buttons", "Dropdown", "Modal", "Reactions"]
def _(untranslated: str):
return untranslated
def no_colour_rich_markup(*objects: typing.Any, lang: str = "") -> str:
"""
Slimmed down version of rich_markup which ensure no colours (/ANSI) can exist
https://github.com/Cog-Creators/Red-DiscordBot/pull/5538/files (Kowlin)
"""
temp_console = Console( # Prevent messing with STDOUT's console
color_system=None,
file=StringIO(),
force_terminal=True,
width=80,
)
temp_console.print(*objects)
return box(temp_console.file.getvalue(), lang=lang) # type: ignore
class CogsUtils(commands.Cog):
"""Tools for AAA3A-cogs!"""
def __init__(self, cog: typing.Optional[commands.Cog]=None, bot: typing.Optional[Red]=None):
if cog is not None:
if isinstance(cog, str):
cog = bot.get_cog(cog)
self.cog: commands.Cog = cog
self.bot: Red = self.cog.bot if hasattr(self.cog, "bot") else bot
self.DataPath: Path = cog_data_path(cog_instance=self.cog)
elif bot is not None:
self.cog: commands.Cog = None
self.bot: Red = bot
else:
self.cog: commands.Cog = None
self.bot: Red = None
self.__authors__ = ["AAA3A"]
self.__version__ = 1.0
self.interactions = {"slash": [], "buttons": [], "dropdowns": [], "added": False, "removed": False}
if self.cog is not None:
if hasattr(self.cog, "__authors__"):
if isinstance(self.cog.__authors__, typing.List):
self.__authors__ = self.cog.__authors__
else:
self.__authors__ = [self.cog.__authors__]
del self.cog.__authors__
elif hasattr(self.cog, "__author__"):
if isinstance(self.cog.__author__, typing.List):
self.__authors__ = self.cog.__author__
else:
self.__authors__ = [self.cog.__author__]
del self.cog.__author__
self.cog.__authors__ = self.__authors__
if hasattr(self.cog, "__version__"):
if isinstance(self.cog.__version__, typing.List):
self.__version__ = self.cog.__version__
del self.cog.__version__
self.cog.__version__ = self.__version__
if hasattr(self.cog, "__func_red__"):
if not isinstance(self.cog.__func_red__, typing.List):
self.cog.__func_red__ = []
else:
self.cog.__func_red__ = []
if hasattr(self.cog, "interactions"):
if isinstance(self.cog.interactions, typing.Dict):
self.interactions = self.cog.interactions
self.loops: typing.Dict = {}
self.repo_name: str = "AAA3A-cogs"
self.all_cogs: typing.List = [
"AntiNuke",
"AutoTraceback",
"Calculator",
"ClearChannel",
"CmdChannel",
"CtxVar",
"DiscordModals",
"DiscordSearch",
"EditFile",
"EditRole",
"ExportChannel",
"GetLoc",
"Ip",
"Medicat", # Private cog.
"MemberPrefix",
"ReactToCommand",
"RolesButtons",
"SimpleSanction",
"Sudo",
"TicketTool",
"TransferChannel"
]
self.all_cogs_dpy2: typing.List = [
"AntiNuke",
"AutoTraceback",
"Calculator",
"ClearChannel",
"CmdChannel",
"CtxVar",
"DiscordModals",
"DiscordSearch",
"EditFile",
"EditRole",
"ExportChannel",
"GetLoc",
"Ip",
"Medicat", # Private cog.
"MemberPrefix",
"ReactToCommand",
"RolesButtons",
"SimpleSanction",
"Sudo",
"TicketTool",
"TransferChannel"
]
if self.cog is not None:
if self.cog.__class__.__name__ not in self.all_cogs_dpy2 and self.cog.__class__.__name__ in self.all_cogs:
if self.is_dpy2 or redbot.version_info >= redbot.VersionInfo.from_str("3.5.0"):
raise RuntimeError(f"{self.cog.__class__.__name__} needs to be updated to run on dpy2/Red 3.5.0. It's best to use `[p]cog update` with no arguments to update all your cogs, which may be using new dpy2-specific methods.")
@property
def is_dpy2(self) -> bool:
"""
Returns True if the current redbot instance is running under dpy2.
"""
return discord.version_info.major >= 2
def format_help_for_context(self, ctx: commands.Context):
"""Thanks Simbad!"""
context = super(type(self.cog), self.cog).format_help_for_context(ctx)
s = "s" if len(self.__authors__) > 1 else ""
return f"{context}\n\n**Author{s}**: {humanize_list(self.__authors__)}\n**Version**: {self.__version__}"
def format_text_for_context(self, ctx: commands.Context, text: str, shortdoc: typing.Optional[bool]=False):
text = text.replace(" ", "")
context = super(type(ctx.command), ctx.command).format_text_for_context(ctx, text)
if shortdoc:
return context
s = "s" if len(self.__authors__) > 1 else ""
return f"{context}\n\n**Author{s}**: {humanize_list(self.__authors__)}\n**Version**: {self.__version__}"
def format_shortdoc_for_context(self, ctx: commands.Context):
sh = super(type(ctx.command), ctx.command).short_doc
try:
return super(type(ctx.command), ctx.command).format_text_for_context(ctx, sh, shortdoc=True) if sh else sh
except Exception:
return super(type(ctx.command), ctx.command).format_text_for_context(ctx, sh) if sh else sh
async def red_delete_data_for_user(self, **kwargs):
"""Nothing to delete"""
return
async def red_get_data_for_user(self, *args, **kwargs) -> typing.Dict[typing.Any, typing.Any]:
return {}
def cog_unload(self):
self._end()
async def cog_command_error(self, ctx: commands.Context, error: Exception):
if self.cog is None:
return
if isinstance(error, commands.CommandInvokeError):
asyncio.create_task(ctx.bot._delete_delay(ctx))
self.cog.log.exception(f"Exception in command '{ctx.command.qualified_name}'.", exc_info=error.original)
message = f"Error in command '{ctx.command.qualified_name}'. Check your console or logs for details.\nIf necessary, please inform the creator of the cog in which this command is located. Thank you."
exception_log = f"Exception in command '{ctx.command.qualified_name}.'\n"
exception_log += "".join(traceback.format_exception(type(error), error, error.__traceback__))
if "USERPROFILE" in os.environ:
exception_log = exception_log.replace(os.environ["USERPROFILE"], "{USERPROFILE}")
if "HOME" in os.environ:
exception_log = exception_log.replace(os.environ["HOME"], "{HOME}")
ctx.bot._last_exception = exception_log
await ctx.send(inline(message))
else:
await ctx.bot.on_command_error(ctx=ctx, error=error, unhandled_by_cog=True)
@staticmethod
def sanitize_output(ctx: commands.Context, input_: str) -> str:
"""Hides the bot's token from a string."""
token = ctx.bot.http.token
if "USERPROFILE" in os.environ:
input_ = input_.replace(os.environ["USERPROFILE"], "{USERPROFILE}")
input_ = input_.replace(os.environ["USERPROFILE"].lower(), "{USERPROFILE}")
if "HOME" in os.environ:
input_ = input_.replace(os.environ["HOME"], "{HOME}")
input_ = input_.replace(os.environ["HOME"].lower(), "{HOME}")
return re.sub(re.escape(token), "[EXPUNGED]", input_, re.I)
async def add_cog(self, bot: Red, cog: commands.Cog):
"""
Load a cog by checking whether the required function is awaitable or not.
"""
value = bot.add_cog(cog)
if inspect.isawaitable(value):
cog = await value
else:
cog = value
if hasattr(cog, "initialize"):
await cog.initialize()
return cog
def _setup(self):
"""
Adding additional functionality to the cog.
"""
if self.cog is not None:
self.cog.cogsutils = self
self.init_logger()
if "format_help_for_context" not in self.cog.__func_red__:
setattr(self.cog, 'format_help_for_context', self.format_help_for_context)
# for command in self.cog.walk_commands():
# setattr(command, 'format_text_for_context', self.format_text_for_context)
# setattr(command, 'format_shortdoc_for_context', self.format_shortdoc_for_context)
if "red_delete_data_for_user" not in self.cog.__func_red__:
setattr(self.cog, 'red_delete_data_for_user', self.red_delete_data_for_user)
if "red_get_data_for_user" not in self.cog.__func_red__:
setattr(self.cog, 'red_get_data_for_user', self.red_get_data_for_user)
if "cog_unload" not in self.cog.__func_red__:
setattr(self.cog, 'cog_unload', self.cog_unload)
if "cog_command_error" not in self.cog.__func_red__:
setattr(self.cog, 'cog_command_error', self.cog_command_error)
asyncio.create_task(self._await_setup())
self.bot.remove_listener(self.on_command_error)
self.bot.add_listener(self.on_command_error)
self.bot.remove_command("getallfor")
self.bot.add_command(getallfor)
async def _await_setup(self):
"""
Adds dev environment values, slash commands add Views.
"""
await self.bot.wait_until_red_ready()
try:
to_update, local_commit, online_commit = await self.to_update()
if to_update:
self.cog.log.warning(f"Your {self.cog.__class__.__name__} cog, from {self.repo_name}, is out of date. You can update your cogs with the 'cog update' command in Discord.")
else:
self.cog.log.debug(f"{self.cog.__class__.__name__} cog is up to date.")
except self.DownloaderNotLoaded:
pass
except asyncio.TimeoutError:
pass
except ValueError:
pass
except Exception as e: # really doesn't matter if this fails so fine with debug level
self.cog.log.debug(f"Something went wrong checking if {self.cog.__class__.__name__} cog is up to date.", exc_info=e)
self.add_dev_env_value()
if self.is_dpy2:
if not hasattr(self.bot, "tree"):
self.bot.tree = discord.app_commands.CommandTree(self.bot)
if not self.interactions == {}:
if "added" in self.interactions:
if not self.interactions["added"]:
if "slash" in self.interactions:
for slash in self.interactions["slash"]:
try:
self.bot.tree.add_command(slash, guild=None)
except Exception as e:
if hasattr(self.cog, "log"):
self.cog.log.error(f"The slash command `{slash.name}` could not be added correctly.", exc_info=e)
if "button" in self.interactions:
for button in self.interactions["button"]:
try:
self.bot.add_view(button, guild=None)
except Exception:
pass
self.interactions["removed"] = False
self.interactions["added"] = True
await self.bot.tree.sync(guild=None)
def _end(self):
"""
Removes dev environment values, slash commands add Views.
"""
self.close_logger()
self.remove_dev_env_value()
for loop in self.loops:
self.loops[loop].stop_all()
if not self.at_least_one_cog_loaded:
self.bot.remove_listener(self.on_command_error)
self.bot.remove_command("getallfor")
asyncio.create_task(self._await_end())
async def _await_end(self):
if self.is_dpy2:
if not self.interactions == {}:
if "removed" in self.interactions:
if not self.interactions["removed"]:
if "slash" in self.interactions:
for slash in self.interactions["slash"]:
try:
self.bot.tree.remove_command(slash, guild=None)
except Exception as e:
if hasattr(self.cog, "log"):
self.cog.log.error(f"The slash command `{slash.name}` could not be removed correctly.", exc_info=e)
if "button" in self.interactions:
for button in self.interactions["button"]:
try:
self.bot.remove_view(button, guild=None)
except Exception:
pass
self.interactions["added"] = False
self.interactions["removed"] = True
await asyncio.sleep(2)
await self.bot.tree.sync(guild=None)
def init_logger(self):
"""
Prepare the logger for the cog.
Thanks to @laggron42 on GitHub! (https://github.com/laggron42/Laggron-utils/blob/master/laggron_utils/logging.py)
"""
self.cog.log = logging.getLogger(f"red.{self.repo_name}.{self.cog.__class__.__name__}")
formatter = logging.Formatter(
"[{asctime}] {levelname} [{name}] {message}", datefmt="%Y-%m-%d %H:%M:%S", style="{"
)
# logging to a log file
# file is automatically created by the module, if the parent foler exists
cog_path = cog_data_path(cog_instance=self.cog)
if cog_path.exists():
file_handler = RotatingFileHandler(
stem=self.cog.__class__.__name__,
directory=cog_path,
maxBytes=1_000_0,
backupCount=0,
encoding="utf-8",
)
# file_handler.doRollover()
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
self.cog.log.addHandler(file_handler)
def close_logger(self):
"""
Closes the files for the logger of a cog.
"""
for handler in self.cog.log.handlers:
handler.close()
self.cog.log.handlers = []
def add_dev_env_value(self):
"""
If the bot owner is X, then add several values to the development environment, if they don't already exist.
Even checks the id of the bot owner in the variable of my Sudo cog, if it is installed and loaded.
"""
sudo_cog = self.bot.get_cog("Sudo")
if sudo_cog is None:
owner_ids = self.bot.owner_ids
else:
if hasattr(sudo_cog, "all_owner_ids"):
if len(sudo_cog.all_owner_ids) == 0:
owner_ids = self.bot.owner_ids
else:
owner_ids = self.bot.owner_ids + sudo_cog.all_owner_ids
else:
owner_ids = self.bot.owner_ids
if 829612600059887649 in owner_ids:
if self.is_dpy2:
to_add = {
self.cog.__class__.__name__: lambda x: self.cog,
"CogsUtils": lambda ctx: CogsUtils,
"Loop": lambda ctx: Loop,
"Captcha": lambda ctx: Captcha,
"Buttons": lambda ctx: Buttons,
"Dropdown": lambda ctx: Dropdown,
"Modal": lambda ctx: Modal,
"Reactions": lambda ctx: Reactions,
"Menu": lambda ctx: Menu,
"discord": lambda ctx: discord,
"redbot": lambda ctx: redbot,
"Red": lambda ctx: Red,
"typing": lambda ctx: typing,
"inspect": lambda ctx: inspect
}
else:
to_add = {
self.cog.__class__.__name__: lambda x: self.cog,
"CogsUtils": lambda ctx: CogsUtils,
"Loop": lambda ctx: Loop,
"Captcha": lambda ctx: Captcha,
"Menu": lambda ctx: Menu,
"discord": lambda ctx: discord,
"redbot": lambda ctx: redbot,
"Red": lambda ctx: Red,
"typing": lambda ctx: typing,
"inspect": lambda ctx: inspect
}
for name, value in to_add.items():
try:
try:
self.bot.remove_dev_env_value(name)
except KeyError:
pass
self.bot.add_dev_env_value(name, value)
except RuntimeError:
pass
except Exception as e:
self.cog.log.error(f"Error when adding the value `{name}` to the development environment.", exc_info=e)
Dev = self.bot.get_cog("Dev")
if Dev is not None:
setattr(Dev, 'sanitize_output', self.sanitize_output)
def remove_dev_env_value(self):
"""
If the bot owner is X, then remove several values to the development environment, if they don't already exist.
Even checks the id of the bot owner in the variable of my Sudo cog, if it is installed and loaded.
"""
sudo_cog = self.bot.get_cog("Sudo")
if sudo_cog is None:
owner_ids = self.bot.owner_ids
else:
if hasattr(sudo_cog, "all_owner_ids"):
if len(sudo_cog.all_owner_ids) == 0:
owner_ids = self.bot.owner_ids
else:
owner_ids = self.bot.owner_ids + sudo_cog.all_owner_ids
else:
owner_ids = self.bot.owner_ids
if 829612600059887649 in owner_ids:
try:
self.bot.remove_dev_env_value(self.cog.__class__.__name__)
except Exception:
pass
@commands.Cog.listener()
async def on_command_error(self, ctx: commands.Context, error: commands.CommandError):
"""
Record all exceptions generated by commands by cog and by command in `bot.last_exceptions_cogs`.
All my cogs will add this listener if it doesn't exist, so I need to record this in a common variable. Also, this may be useful to others.
"""
try:
IGNORED_ERRORS = (
commands.UserInputError,
commands.DisabledCommand,
commands.CommandNotFound,
commands.CheckFailure,
commands.NoPrivateMessage,
commands.CommandOnCooldown,
commands.MaxConcurrencyReached,
commands.BadArgument,
commands.BadBoolArgument,
)
if ctx.cog is not None:
cog = ctx.cog.__class__.__name__
else:
cog = "None"
if ctx.command is None:
return
if isinstance(error, IGNORED_ERRORS):
return
if not hasattr(self.bot, "last_exceptions_cogs"):
self.bot.last_exceptions_cogs = {}
if "global" not in self.bot.last_exceptions_cogs:
self.bot.last_exceptions_cogs["global"] = []
if error in self.bot.last_exceptions_cogs["global"]:
return
self.bot.last_exceptions_cogs["global"].append(error)
if isinstance(error, commands.CommandError):
traceback_error = "".join(traceback.format_exception(type(error), error, error.__traceback__)).replace(os.environ["USERPROFILE"], "{USERPROFILE}")
else:
traceback_error = f"Traceback (most recent call last): {error}"
if "USERPROFILE" in os.environ:
traceback_error = traceback_error.replace(os.environ["USERPROFILE"], "{USERPROFILE}")
if "HOME" in os.environ:
traceback_error = traceback_error.replace(os.environ["HOME"], "{HOME}")
if cog not in self.bot.last_exceptions_cogs:
self.bot.last_exceptions_cogs[cog] = {}
if ctx.command.qualified_name not in self.bot.last_exceptions_cogs[cog]:
self.bot.last_exceptions_cogs[cog][ctx.command.qualified_name] = []
self.bot.last_exceptions_cogs[cog][ctx.command.qualified_name].append(traceback_error)
except Exception:
pass
async def ConfirmationAsk(
self,
ctx: commands.Context,
text: typing.Optional[str]=None,
embed: typing.Optional[discord.Embed]=None,
file: typing.Optional[discord.File]=None,
timeout: typing.Optional[int]=60,
timeout_message: typing.Optional[str]=_("Timed out, please try again").format(**locals()),
way: typing.Optional[typing.Literal["buttons", "dropdown", "reactions", "message"]] = "buttons",
message: typing.Optional[discord.Message]=None,
put_reactions: typing.Optional[bool]=True,
delete_message: typing.Optional[bool]=True,
reactions: typing.Optional[typing.Iterable[typing.Union[str, discord.Emoji]]]=["✅", "❌"],
check_owner: typing.Optional[bool]=True,
members_authored: typing.Optional[typing.Iterable[discord.Member]]=[]):
"""
Allow confirmation to be requested from the user, in the form of buttons/dropdown/reactions/message, with many additional options.
"""
if not self.is_dpy2 and way == "buttons" or not self.is_dpy2 and way == "dropdown":
way = "reactions"
if message is None:
if not text and not embed and not file:
if way == "buttons":
text = _("To confirm the current action, please use the buttons below this message.").format(**locals())
if way == "dropdown":
text = _("To confirm the current action, please use the dropdown below this message.").format(**locals())
if way == "reactions":
text = _("To confirm the current action, please use the reactions below this message.").format(**locals())
if way == "message":
text = _("To confirm the current action, please send yes/no in this channel.").format(**locals())
if not way == "buttons" and not way == "dropdown":
message = await ctx.send(content=text, embed=embed, file=file)
if way == "reactions":
if put_reactions:
try:
start_adding_reactions(message, reactions)
except discord.HTTPException:
way = "message"
async def delete_message(message: discord.Message):
try:
return await message.delete()
except discord.HTTPException:
pass
if way == "buttons":
view = Buttons(timeout=timeout, buttons=[{"style": 3, "label": "Yes", "emoji": reactions[0], "custom_id": "ConfirmationAsk_Yes"}, {"style": 4, "label": "No", "emoji": reactions[1], "custom_id": "ConfirmationAsk_No"}], members=[ctx.author.id] + list(ctx.bot.owner_ids) if check_owner else [] + [x.id for x in members_authored])
message = await ctx.send(content=text, embed=embed, file=file, view=view)
try:
interaction, function_result = await view.wait_result()
if str(interaction.data["custom_id"]) == "ConfirmationAsk_Yes":
if delete_message:
await delete_message(message)
return True
elif str(interaction.data["custom_id"]) == "ConfirmationAsk_No":
if delete_message:
await delete_message(message)
return False
except TimeoutError:
if delete_message:
await delete_message(message)
if timeout_message is not None:
await ctx.send(timeout_message)
return None
if way == "dropdown":
view = Dropdown(timeout=timeout, options=[{"label": "Yes", "emoji": reactions[0], "value": "ConfirmationAsk_Yes"}, {"label": "No", "emoji": reactions[1], "value": "ConfirmationAsk_No"}], members=[ctx.author.id] + list(ctx.bot.owner_ids) if check_owner else [] + [x.id for x in members_authored])
message = await ctx.send(content=text, embed=embed, file=file, view=view)
try:
interaction, values, function_result = await view.wait_result()
if str(values[0]) == "ConfirmationAsk_Yes":
if delete_message:
await delete_message(message)
return True
elif str(values[0]) == "ConfirmationAsk_No":
if delete_message:
await delete_message(message)
return False
except TimeoutError:
if delete_message:
await delete_message(message)
if timeout_message is not None:
await ctx.send(timeout_message)
return None
if way == "reactions":
view = Reactions(bot=ctx.bot, message=message, remove_reaction=False, timeout=timeout, reactions=reactions, members=[ctx.author.id] + list(ctx.bot.owner_ids) if check_owner else [] + [x.id for x in members_authored])
try:
reaction, user, function_result = await view.wait_result()
if str(reaction.emoji) == reactions[0]:
end_reaction = True
if delete_message:
await delete_message(message)
return True
elif str(reaction.emoji) == reactions[1]:
end_reaction = True
if delete_message:
await delete_message(message)
return False
except TimeoutError:
if delete_message:
await delete_message(message)
if timeout_message is not None:
await ctx.send(timeout_message)
return None
if way == "message":
def check(msg):
if check_owner:
return msg.author.id == ctx.author.id or msg.author.id in ctx.bot.owner_ids or msg.author.id in [x.id for x in members_authored] and msg.channel == ctx.channel and msg.content in ("yes", "y", "no", "n")
else:
return msg.author.id == ctx.author.id or msg.author.id in [x.id for x in members_authored] and msg.channel == ctx.channel and msg.content in ("yes", "y", "no", "n")
# This makes sure nobody except the command sender can interact with the "menu"
try:
end_reaction = False
msg = await ctx.bot.wait_for("message", timeout=timeout, check=check)
# waiting for a a message to be sended - times out after x seconds
if msg.content in ("yes", "y"):
end_reaction = True
if delete_message:
await delete_message(message)
await delete_message(msg)
return True
elif msg.content in ("no", "n"):
end_reaction = True
if delete_message:
await delete_message(message)
await delete_message(msg)
return False
except asyncio.TimeoutError:
if not end_reaction:
if delete_message:
await delete_message(message)
if timeout_message is not None:
await ctx.send(timeout_message)
return None
async def invoke_command(self, author: discord.User, channel: discord.TextChannel, command: str, prefix: typing.Optional[str]=None, message: typing.Optional[discord.Message]=None, message_id: typing.Optional[str]="".join(choice(string.digits) for i in range(18)), timestamp: typing.Optional[datetime.datetime]=datetime.datetime.now()) -> typing.Union[commands.Context, discord.Message]:
"""
Invoke the specified command with the specified user in the specified channel.
"""
bot = self.bot
if prefix is None:
prefixes = await bot.get_valid_prefixes(guild=channel.guild)
prefix = prefixes[0] if len(prefixes) < 3 else prefixes[2]
old_content = f"{command}"
content = f"{prefix}{old_content}"
if message is None:
message_content = content
author_dict = {"id": f"{author.id}", "username": author.display_name, "avatar": author.avatar, 'avatar_decoration': None, 'discriminator': f"{author.discriminator}", "public_flags": author.public_flags, "bot": author.bot}
channel_id = channel.id
timestamp = str(timestamp).replace(" ", "T") + "+00:00"
data = {"id": message_id, "type": 0, "content": message_content, "channel_id": f"{channel_id}", "author": author_dict, "attachments": [], "embeds": [], "mentions": [], "mention_roles": [], "pinned": False, "mention_everyone": False, "tts": False, "timestamp": timestamp, "edited_timestamp": None, "flags": 0, "components": [], "referenced_message": None}
message = discord.Message(channel=channel, state=bot._connection, data=data)
else:
message = copy(message)
message.content = content
context = await bot.get_context(message)
if context.valid:
author.bot = False
context.author = author
context.guild = channel.guild
context.channel = channel
await bot.invoke(context)
else:
message.content = old_content
message.author = author
message.channel = channel
bot.dispatch("message", message)
return context if context.valid else message
def get_embed(self, embed_dict: typing.Dict) -> typing.Dict[discord.Embed, str]:
data = embed_dict
if data.get("embed"):
data = data["embed"]
elif data.get("embeds"):
data = data.get("embeds")[0]
if timestamp := data.get("timestamp"):
data["timestamp"] = timestamp.strip("Z")
if data.get("content"):
content = data["content"]
del data["content"]
else:
content = ""
for x in data:
if data[x] is None:
del data[x]
elif isinstance(data[x], typing.Dict):
for y in data[x]:
if data[x][y] is None:
del data[x][y]
try:
embed = discord.Embed.from_dict(data)
length = len(embed)
if length > 6000:
raise commands.BadArgument(
f"Embed size exceeds Discord limit of 6000 characters ({length})."
)
except Exception as e:
raise commands.BadArgument(
f"An error has occurred.\n{e})."
)
back = {"embed": embed, "content": content}
return back
def datetime_to_timestamp(self, dt: datetime.datetime, format: typing.Literal["f", "F", "d", "D", "t", "T", "R"]="f") -> str:
"""
Generate a Discord timestamp from a datetime object.
<t:TIMESTAMP:FORMAT>
Parameters
----------
dt : datetime.datetime
The datetime object to use
format : TimestampFormat, by default `f`
The format to pass to Discord.
- `f` short date time | `18 June 2021 02:50`
- `F` long date time | `Friday, 18 June 2021 02:50`
- `d` short date | `18/06/2021`
- `D` long date | `18 June 2021`
- `t` short time | `02:50`
- `T` long time | `02:50:15`
- `R` relative time | `8 days ago`
Returns
-------
str
Formatted timestamp
Thanks to vexutils from Vexed01 in GitHub! (https://github.com/Vexed01/Vex-Cogs/blob/master/timechannel/vexutils/chat.py)
"""
t = str(int(dt.timestamp()))
return f"<t:{t}:{format}>"
async def get_hook(self, channel: discord.TextChannel):
"""
Create a discord.Webhook object. It tries to retrieve an existing webhook created by the bot or to create it itself.
"""
try:
for webhook in await channel.webhooks():
if webhook.user.id == self.bot.user.id:
hook = webhook
break
else:
hook = await channel.create_webhook(
name="red_bot_hook_" + str(channel.id)
)
except discord.errors.NotFound: # Probably user deleted the hook
hook = await channel.create_webhook(name="red_bot_hook_" + str(channel.id))
return hook
def check_permissions_for(self, channel: typing.Union[discord.TextChannel, discord.VoiceChannel, discord.DMChannel], user: discord.User, check: typing.Union[typing.List, typing.Dict]):
"""
Check all permissions specified as an argument.
"""
if getattr(channel, "guild", None) is None:
return True
permissions = channel.permissions_for(user)
if isinstance(check, typing.List):
new_check = {}
for p in check:
new_check[p] = True
check = new_check
for p in check:
if getattr(permissions, f"{p}", None):
if check[p]:
if not getattr(permissions, f"{p}"):
return False
else:
if getattr(permissions, f"{p}"):
return False
return True
def create_loop(self, function, name: typing.Optional[str]=None, days: typing.Optional[int]=0, hours: typing.Optional[int]=0, minutes: typing.Optional[int]=0, seconds: typing.Optional[int]=0, function_args: typing.Optional[typing.Dict]={}, limit_count: typing.Optional[int]=None, limit_date: typing.Optional[datetime.datetime]=None, limit_exception: typing.Optional[int]=None):
"""
Create a loop like Loop, but with default values and loop object recording functionality.
"""
if name is None:
name = f"{self.cog.__class__.__name__}"
if datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds).total_seconds() == 0:
seconds = 900 # 15 minutes
loop = Loop(cogsutils=self, name=name, function=function, days=days, hours=hours, minutes=minutes, seconds=seconds, function_args=function_args, limit_count=limit_count, limit_date=limit_date, limit_exception=limit_exception)
if f"{loop.name}" in self.loops:
self.loops[f"{loop.name}"].stop_all()
self.loops[f"{loop.name}"] = loop
return loop
async def captcha(self, member: discord.Member, channel: discord.TextChannel, limit: typing.Optional[int]=3, timeout: typing.Optional[int]=60, why: typing.Optional[str]=""):
"""
Create a Captcha challenge like Captcha, but with default values.
"""
return await Captcha(cogsutils=self, member=member, channel=channel, limit=limit, timeout=timeout, why=why).realize_challenge()
def get_all_repo_cogs_objects(self):
"""
Get a dictionary containing the objects or None of all my cogs.
"""
cogs = {}
for cog in self.all_cogs:
object = self.bot.get_cog(f"{cog}")
if object is not None:
cogs[f"{cog}"] = object if hasattr(object, "cogsutils") else None
else:
cogs[f"{cog}"] = None
for cog in self.bot.cogs.values():
if hasattr(cog, "cogsutils"):
if getattr(cog.cogsutils, "repo_name", None) == "AAA3A-cogs":
if f"{cog.__class__.__name__}" not in cogs or cogs[f"{cog.__class__.__name__}"] is None:
cogs[f"{cog.__class__.__name__}"] = cog
return cogs
def at_least_one_cog_loaded(self):
"""
Return True if at least one cog of all my cogs is loaded.
"""
at_least_one_cog_loaded = False
for object in self.get_all_repo_cogs_objects().values:
if object is not None:
at_least_one_cog_loaded = True
break
return at_least_one_cog_loaded
def add_all_dev_env_values(self):
"""
Add values to the development environment for all my loaded cogs. Not really useful anymore, now that my cogs use AAA3A_utils.
"""
cogs = self.get_all_repo_cogs_objects()
for cog in cogs:
if cogs[cog] is not None:
try:
CogsUtils(cog=cogs[cog]).add_dev_env_value()
except Exception:
pass
def class_instance_to_dict(self, instance):
"""
Convert a class instance into a dictionary, while using ids for all sub-attributes.
"""
original_dict = instance.__dict__
new_dict = self.to_id(original_dict)
return new_dict
def to_id(self, original_dict: typing.Dict):
"""
Return a dict with ids for all sub-attributes
"""
new_dict = {}
for e in original_dict:
if isinstance(original_dict[e], typing.Dict):
new_dict[e] = self.to_id(original_dict[e])
elif hasattr(original_dict[e], "id"):
new_dict[e] = int(original_dict[e].id)
elif isinstance(original_dict[e], datetime.datetime):
new_dict[e] = float(datetime.datetime.timestamp(original_dict[e]))
else:
new_dict[e] = original_dict[e]
return new_dict
def generate_key(self, number: typing.Optional[int]=10, existing_keys: typing.Optional[typing.List]=[], strings_used: typing.Optional[typing.List]={"ascii_lowercase": True, "ascii_uppercase": False, "digits": True, "punctuation": False, "others": []}):
"""
Generate a secret key, with the choice of characters, the number of characters and a list of existing keys.
"""
strings = []
if "ascii_lowercase" in strings_used:
if strings_used["ascii_lowercase"]:
strings += string.ascii_lowercase
if "ascii_uppercase" in strings_used:
if strings_used["ascii_uppercase"]:
strings += string.ascii_uppercase
if "digits" in strings_used:
if strings_used["digits"]:
strings += string.digits
if "punctuation" in strings_used:
if strings_used["punctuation"]:
strings += string.punctuation
if "others" in strings_used:
if isinstance(strings_used["others"], typing.List):
strings += strings_used["others"]
while True:
# This probably won't turn into an endless loop
key = "".join(choice(strings) for i in range(number))
if key not in existing_keys:
return key
def await_function(self, function, function_args: typing.Optional[typing.Dict]={}):
"""
Allow to use an asynchronous function, from a non-asynchronous function.
"""
task = asyncio.create_task(self.do_await_function(function=function, function_args=function_args))
return task
async def do_await_function(self, function, function_args: typing.Optional[typing.Dict]={}):
try:
await function(**function_args)
except Exception as e:
if hasattr(self.cogsutils.cog, "log"):
self.cog.log.error(f"An error occurred with the {function.__name__} function.", exc_info=e)
async def delete_message(self, message: discord.Message):
"""
Delete a message, ignoring any exceptions.
Easier than putting these 3 lines at each message deletion for each cog.
"""
try:
await message.delete()
except discord.HTTPException:
pass
async def check_in_listener(self, output, allowed_by_whitelist_blacklist: typing.Optional[bool]=True):
"""
Check all parameters for the output of any listener.
Thanks to Jack! (https://discord.com/channels/133049272517001216/160386989819035648/825373605000511518)
"""
if isinstance(output, discord.Message):
# check whether the message was sent in a guild
if output.guild is None:
raise discord.ext.commands.BadArgument()
# check whether the message author isn't a bot
if output.author is None:
raise discord.ext.commands.BadArgument()
if output.author.bot:
raise discord.ext.commands.BadArgument()
# check whether the bot can send message in the given channel
if output.channel is None:
raise discord.ext.commands.BadArgument()
if not self.check_permissions_for(channel=output.channel, user=output.guild.me, check=["send_messages"]):
raise discord.ext.commands.BadArgument()
# check whether the cog isn't disabled
if self.cog is not None:
if await self.bot.cog_disabled_in_guild(self.cog, output.guild):
raise discord.ext.commands.BadArgument()
# check whether the channel isn't on the ignore list
if not await self.bot.ignored_channel_or_guild(output):
raise discord.ext.commands.BadArgument()
# check whether the message author isn't on allowlist/blocklist
if allowed_by_whitelist_blacklist:
if not await self.bot.allowed_by_whitelist_blacklist(output.author):
raise discord.ext.commands.BadArgument()
if isinstance(output, discord.RawReactionActionEvent):
# check whether the message was sent in a guild
output.guild = self.bot.get_guild(output.guild_id)
if output.guild is None:
raise discord.ext.commands.BadArgument()
# check whether the message author isn't a bot
output.author = output.guild.get_member(output.user_id)
if output.author is None:
raise discord.ext.commands.BadArgument()
if output.author.bot:
raise discord.ext.commands.BadArgument()
# check whether the bot can send message in the given channel
output.channel = output.guild.get_channel(output.channel_id)
if output.channel is None:
raise discord.ext.commands.BadArgument()
if not self.check_permissions_for(channel=output.channel, user=output.guild.me, check=["send_messages"]):
raise discord.ext.commands.BadArgument()
# check whether the cog isn't disabled
if self.cog is not None:
if await self.bot.cog_disabled_in_guild(self.cog, output.guild):
raise discord.ext.commands.BadArgument()
# check whether the channel isn't on the ignore list
if not await self.bot.ignored_channel_or_guild(output):
raise discord.ext.commands.BadArgument()
# check whether the message author isn't on allowlist/blocklist
if allowed_by_whitelist_blacklist:
if not await self.bot.allowed_by_whitelist_blacklist(output.author):
raise discord.ext.commands.BadArgument()
if self.is_dpy2:
if isinstance(output, discord.Interaction):
# check whether the message was sent in a guild
if output.guild is None:
raise discord.ext.commands.BadArgument()
# check whether the message author isn't a bot
if output.author is None:
raise discord.ext.commands.BadArgument()
if output.author.bot:
raise discord.ext.commands.BadArgument()
# check whether the bot can send message in the given channel
if output.channel is None:
raise discord.ext.commands.BadArgument()
if not self.check_permissions_for(channel=output.channel, user=output.guild.me, check=["send_messages"]):
raise discord.ext.commands.BadArgument()
# check whether the cog isn't disabled
if self.cog is not None:
if await self.bot.cog_disabled_in_guild(self.cog, output.guild):
raise discord.ext.commands.BadArgument()
# check whether the message author isn't on allowlist/blocklist
if allowed_by_whitelist_blacklist:
if not await self.bot.allowed_by_whitelist_blacklist(output.author):
raise discord.ext.commands.BadArgument()
return
async def to_update(self, cog_name: typing.Optional[str]=None):
if cog_name is None:
cog_name = self.cog.__class__.__name__
cog_name = cog_name.lower()
downloader = self.bot.get_cog("Downloader")
if downloader is None:
raise self.DownloaderNotLoaded(_("The cog downloader is not loaded.").format(**locals()))
if await self.bot._cog_mgr.find_cog(cog_name) is None:
raise ValueError(_("This cog was not found in any cog path."))
local = discord.utils.get(await downloader.installed_cogs(), name=cog_name)
if local is None:
raise ValueError(_("This cog is not installed on this bot.").format(**locals()))
local_commit = local.commit
repo = local.repo
if repo is None:
raise ValueError(_("This cog has not been installed from the cog Downloader.").format(**locals()))
repo_owner, repo_name, repo_branch = (re.compile(r"(?:https?:\/\/)?git(?:hub|lab).com\/(?P<repo_owner>[A-z0-9-_.]*)\/(?P<repo>[A-z0-9-_.]*)(?:\/tree\/(?P<repo_branch>[A-z0-9-_.]*))?", re.I).findall(repo.url))[0]
repo_branch = repo.branch
async with aiohttp.ClientSession() as session:
async with session.get(f"https://api.github.com/repos/{repo_owner}/{repo_name}/git/refs/heads/{repo_branch}", timeout=3) as r:
online = await r.json()
if online is None or "object" not in online or "sha" not in online["object"]:
raise asyncio.IncompleteReadError(_("No results could be retrieved from the git api.").format(**locals()), None)
online_commit = online["object"]["sha"]
return online_commit != local_commit, local_commit, online_commit
async def autodestruction(self):
"""
Cog self-destruct.
Will of course never be used, just a test.
"""
downloader = self.bot.get_cog("Downloader")
if downloader is not None:
poss_installed_path = (await downloader.cog_install_path()) / self.cog.__class__.__name__.lower()
if poss_installed_path.exists():
with contextlib.suppress(commands.ExtensionNotLoaded):
self.bot.unload_extension(self.cog.__class__.__name__.lower())
await self.bot.remove_loaded_package(self.cog.__class__.__name__.lower())
await downloader._delete_cog(poss_installed_path)
await downloader._remove_from_installed([discord.utils.get(await downloader.installed_cogs(), name=self.cog.__class__.__name__.lower())])
else:
raise self.DownloaderNotLoaded(_("The cog downloader is not loaded.").format(**locals()))
class DownloaderNotLoaded(Exception):
pass
class Loop():
"""
Create a loop, with many features.
Thanks to Vexed01 on GitHub! (https://github.com/Vexed01/Vex-Cogs/blob/master/timechannel/loop.py and https://github.com/Vexed01/vex-cog-utils/vexutils/loop.py)
"""
def __init__(self, cogsutils: CogsUtils, name: str, function, days: typing.Optional[int]=0, hours: typing.Optional[int]=0, minutes: typing.Optional[int]=0, seconds: typing.Optional[int]=0, function_args: typing.Optional[typing.Dict]={}, limit_count: typing.Optional[int]=None, limit_date: typing.Optional[datetime.datetime]=None, limit_exception: typing.Optional[int]=None) -> None:
self.cogsutils: CogsUtils = cogsutils
self.name: str = name
self.function = function
self.function_args = function_args
self.interval: float = datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds).total_seconds()
self.limit_count: int = limit_count
self.limit_date: datetime.datetime = limit_date
self.limit_exception: int = limit_exception
self.stop_manually: bool = False
self.stop: bool = False
self.expected_interval = datetime.timedelta(seconds=self.interval)
self.iter_count: int = 0
self.iter_exception: int = 0
self.currently_running: bool = False # whether the loop is running or sleeping
self.last_result = None
self.last_exc: str = "No exception has occurred yet."
self.last_exc_raw: typing.Optional[BaseException] = None
self.last_iter: typing.Optional[datetime.datetime] = None
self.next_iter: typing.Optional[datetime.datetime] = None
self.loop = self.cogsutils.bot.loop.create_task(self.loop())
async def start(self):
if self.cogsutils.is_dpy2:
async with self.cogsutils.bot:
self.cogsutils.bot.loop.create_task(self.loop())
else:
self.cogsutils.bot.loop.create_task(self.loop())
async def wait_until_iter(self) -> None:
now = datetime.datetime.utcnow()
time = now.timestamp()
time = math.ceil(time / self.interval) * self.interval
next_iter = datetime.datetime.fromtimestamp(time) - now
seconds_to_sleep = (next_iter).total_seconds()
if not self.interval <= 60:
if hasattr(self.cogsutils.cog, "log"):
self.cogsutils.cog.log.debug(f"Sleeping for {seconds_to_sleep} seconds until next iter...")
await asyncio.sleep(seconds_to_sleep)
async def loop(self) -> None:
await self.cogsutils.bot.wait_until_red_ready()
await asyncio.sleep(1)
if hasattr(self.cogsutils.cog, "log"):
self.cogsutils.cog.log.debug(f"{self.name} loop has started.")
if float(self.interval) % float(3600) == 0:
try:
start = monotonic()
self.iter_start()
self.last_result = await self.function(**self.function_args)
self.iter_finish()
end = monotonic()
total = round(end - start, 1)
if not self.interval <= 60:
if hasattr(self.cogsutils.cog, "log"):
self.cogsutils.cog.log.debug(f"{self.name} initial loop finished in {total}s.")
except Exception as e:
if hasattr(self.cogsutils.cog, "log"):
self.cogsutils.cog.log.exception(f"Something went wrong in the {self.name} loop.", exc_info=e)
self.iter_error(e)
self.iter_exception += 1
# both iter_finish and iter_error set next_iter as not None
assert self.next_iter is not None
self.next_iter = self.next_iter.replace(
minute=0
) # ensure further iterations are on the hour
if await self.maybe_stop():
return
await self.sleep_until_next()
while True:
try:
start = monotonic()
self.iter_start()
self.last_result = await self.function(**self.function_args)
self.iter_finish()
end = monotonic()
total = round(end - start, 1)
if not self.interval <= 60:
if hasattr(self.cogsutils.cog, "log"):
self.cogsutils.cog.log.debug(f"{self.name} iteration finished in {total}s.")
except Exception as e:
if hasattr(self.cogsutils.cog, "log"):
self.cogsutils.cog.log.exception(f"Something went wrong in the {self.name} loop.", exc_info=e)
self.iter_error(e)
if await self.maybe_stop():
return
if float(self.interval) % float(3600) == 0:
await self.sleep_until_next()
else:
if not self.interval == 0:
await self.wait_until_iter()
async def maybe_stop(self):
if self.stop_manually:
self.stop_all()
if self.limit_count is not None:
if self.iter_count >= self.limit_count:
self.stop_all()
if self.limit_date is not None:
if datetime.datetime.timestamp(datetime.datetime.now()) >= datetime.datetime.timestamp(self.limit_date):
self.stop_all()
if self.limit_exception:
if self.iter_exception >= self.limit_exception:
self.stop_all()
if self.stop:
return True
return False
def stop_all(self):
self.stop = True
self.next_iter = None
self.loop.cancel()
if f"{self.name}" in self.cogsutils.loops:
if self.cogsutils.loops[f"{self.name}"] == self:
del self.cogsutils.loops[f"{self.name}"]
return self
def __repr__(self) -> str:
return (
f"<friendly_name={self.name} iter_count={self.iter_count} "
f"currently_running={self.currently_running} last_iter={self.last_iter} "
f"next_iter={self.next_iter} integrity={self.integrity}>"
)
@property
def integrity(self) -> bool:
"""
If the loop is running on time (whether or not next expected iteration is in the future)
"""
if self.next_iter is None: # not started yet
return False
return self.next_iter > datetime.datetime.utcnow()
@property
def until_next(self) -> float:
"""
Positive float with the seconds until the next iteration, based off the last
iteration and the interval.
If the expected time of the next iteration is in the past, this will return `0.0`
"""
if self.next_iter is None: # not started yet
return 0.0
raw_until_next = (self.next_iter - datetime.datetime.utcnow()).total_seconds()
if raw_until_next > self.expected_interval.total_seconds(): # should never happen
return self.expected_interval.total_seconds()
elif raw_until_next > 0.0:
return raw_until_next
else:
return 0.0
async def sleep_until_next(self) -> None:
"""Sleep until the next iteration. Basically an "all-in-one" version of `until_next`."""
await asyncio.sleep(self.until_next)
def iter_start(self) -> None:
"""Register an iteration as starting."""
self.iter_count += 1
self.currently_running = True
self.last_iter = datetime.datetime.utcnow()
self.next_iter = datetime.datetime.utcnow() + self.expected_interval
# this isn't accurate, it will be "corrected" when finishing is called
def iter_finish(self) -> None:
"""Register an iteration as finished successfully."""
self.currently_running = False
# now this is accurate. imo its better to have something than nothing
def iter_error(self, error: BaseException) -> None:
"""Register an iteration's error."""
self.currently_running = False
self.last_exc_raw = error
self.last_exc = "".join(
traceback.format_exception(type(error), error, error.__traceback__)
)
def get_debug_embed(self) -> discord.Embed:
"""Get an embed with infomation on this loop."""
table = Table("Key", "Value")
table.add_row("expected_interval", str(self.expected_interval))
table.add_row("iter_count", str(self.iter_count))
table.add_row("currently_running", str(self.currently_running))
table.add_row("last_iterstr", str(self.last_iter) or "Loop not started")
table.add_row("next_iterstr", str(self.next_iter) or "Loop not started")
raw_table_str = no_colour_rich_markup(table)
now = datetime.datetime.utcnow()
if self.next_iter and self.last_iter:
table = Table("Key", "Value")
table.add_row("Seconds until next", str((self.next_iter - now).total_seconds()))
table.add_row("Seconds since last", str((now - self.last_iter).total_seconds()))
processed_table_str = no_colour_rich_markup(table)
else:
processed_table_str = "Loop hasn't started yet."
emoji = "✅" if self.integrity else "❌"
embed = discord.Embed(title=f"{self.name} Loop: `{emoji}`")
embed.add_field(name="Raw data", value=raw_table_str, inline=False)
embed.add_field(
name="Processed data",
value=processed_table_str,
inline=False,
)
exc = self.last_exc
if len(exc) > 1024:
exc = list(pagify(exc, page_length=1024))[0] + "\n..."
embed.add_field(name="Exception", value=box(exc), inline=False)
return embed
class Captcha():
"""
Captcha for an member in a text channel.
Thanks to Kreusada for this code! (https://github.com/Kreusada/Kreusada-Cogs/blob/master/captcha/)
"""
def __init__(self, cogsutils: CogsUtils, member: discord.Member, channel: discord.TextChannel, limit: typing.Optional[int]=3, timeout: typing.Optional[int]=60, why: typing.Optional[str]=""):
self.cogsutils: CogsUtils = cogsutils
self.member: discord.Member = member
self.guild: discord.Guild = member.guild
self.channel: discord.TextChannel = channel
self.why: str = why
self.limit: int = limit
self.timeout: int = timeout
self.message: discord.Message = None
self.code: str = None
self.running: bool = False
self.tasks: list = []
self.trynum: int = 0
self.escape_char = "\u200B"
async def realize_challenge(self) -> None:
is_ok = None
timeout = False
try:
while is_ok is not True:
if self.trynum > self.limit:
break
try:
self.code = self.generate_code()
await self.send_message()
this = await self.try_challenging()
except TimeoutError:
timeout = True
break
except self.AskedForReload:
self.trynum += 1
continue
except TypeError:
continue
except self.LeftGuildError:
leave_guild = True
break
if this is False:
self.trynum += 1
is_ok = False
else:
is_ok = True
if self.message is not None:
try:
await self.message.delete()
except discord.HTTPException:
pass
failed = self.trynum > self.limit
except self.MissingPermissions as e:
raise self.MissingPermissions(e)
except Exception as e:
if hasattr(self.cogsutils.cog, "log"):
self.cogsutils.cog.log.error("An unsupported error occurred during the captcha.", exc_info=e)
raise self.OtherException(e)
finally:
if timeout:
raise TimeoutError()
if failed:
return False
if leave_guild:
raise self.LeftGuildError("User has left guild.")
return True
async def try_challenging(self) -> bool:
"""Do challenging in one function!
"""
self.running = True
try:
received = await self.wait_for_action()
if received is None:
raise self.LeftGuildError("User has left guild.")
if hasattr(received, "content"):
# It's a message!
try:
await received.delete()
except discord.HTTPException:
pass
error_message = ""
try:
state = await self.verify(received.content)
except self.SameCodeError:
error_message += error(bold(_("Code invalid. Do not copy and paste.").format(**locals())))
state = False
else:
if not state:
error_message += warning("Code invalid.")
if error_message:
await self.channel.send(error_message, delete_after=3)
return state
else:
raise self.AskedForReload("User want to reload Captcha.")
except TimeoutError:
raise TimeoutError()
finally:
self.running = False
def generate_code(self, put_fake_espace: typing.Optional[bool] = True):
code = self.cogsutils.generate_key(number=8, existing_keys=[], strings_used={"ascii_lowercase": False, "ascii_uppercase": True, "digits": True, "punctuation": False})
if put_fake_espace:
code = self.escape_char.join(list(code))
return code
def get_embed(self) -> discord.Embed:
"""
Get the embed containing the captcha code.
"""
embed_dict = {
"embeds": [
{
"title": _("Captcha").format(**locals()) + _(" for {self.why}").format(**locals()) if not self.why == "" else "",
"description": _("Please return me the following code:\n{box(str(self.code))}\nDo not copy and paste.").format(**locals()),
"author": {
"name": f"{self.member.display_name}",
"icon_url": self.member.display_avatar if self.is_dpy2 else self.member.avatar_url
},
"footer": {
"text": _("Tries: {self.trynum} / Limit: {self.limit}").format(**locals())
}
}
]
}
embed = self.cogsutils.get_embed(embed_dict)["embed"]
return embed
async def send_message(self) -> None:
"""
Send a message with new code.
"""
if self.message is not None:
try:
await self.message.delete()
except discord.HTTPException:
pass
embed = self.get_embed()
try:
self.message = await self.channel.send(
embed=embed,
delete_after=900, # Delete after 15 minutes.
)
except discord.HTTPException:
raise self.MissingPermissions("Cannot send message in verification channel.")
try:
await self.message.add_reaction("🔁")
except discord.HTTPException:
raise self.MissingPermissions("Cannot react in verification channel.")
async def verify(self, code_input: str) -> bool:
"""Verify a code."""
if self.escape_char in code_input:
raise self.SameCodeError
if code_input.lower() == self.code.replace(self.escape_char, "").lower():
return True
else:
return False
async def wait_for_action(self) -> typing.Union[discord.Reaction, discord.Message, None]:
"""Wait for an action from the user.
It will return an object of discord.Message or discord.Reaction depending what the user
did.
"""
self.cancel_tasks() # Just in case...
self.tasks = self._give_me_tasks()
done, pending = await asyncio.wait(
self.tasks,
timeout=self.timeout,
return_when=asyncio.FIRST_COMPLETED,
)
self.cancel_tasks()
if len(done) == 0:
raise TimeoutError()("User didn't answer.")
try: # An error is raised if we return the result and when the task got cancelled.
return done.pop().result()
except asyncio.CancelledError:
return None
def cancel_tasks(self) -> None:
"""Cancel the ongoing tasks."""
for task in self.tasks:
task: asyncio.Task
if not task.done():
task.cancel()
def _give_me_tasks(self) -> typing.List:
def leave_check(u):
return u.id == self.member.id
return [
asyncio.create_task(
self.cogsutils.bot.wait_for(
"reaction_add",
check=ReactionPredicate.with_emojis(
"🔁", message=self.message, user=self.member
)
)
),
asyncio.create_task(
self.cogsutils.bot.wait_for(
"message",
check=MessagePredicate.same_context(
channel=self.channel,
user=self.member,
)
)
),
asyncio.create_task(self.cogsutils.bot.wait_for("user_remove", check=leave_check))
]
class MissingPermissions(Exception):
pass
class AskedForReload(Exception):
pass
class SameCodeError(Exception):
pass
class LeftGuildError(Exception):
pass
class OtherException(Exception):
pass
if CogsUtils().is_dpy2:
class Buttons(discord.ui.View):
"""Create Buttons easily."""
def __init__(self, timeout: typing.Optional[float]=180, buttons: typing.Optional[typing.List]=[{}], members: typing.Optional[typing.List]=None, check: typing.Optional[typing.Any]=None, function: typing.Optional[typing.Any]=None, function_args: typing.Optional[typing.Dict]={}, infinity: typing.Optional[bool]=False):
"""style: ButtonStyle, label: Optional[str], disabled: bool, custom_id: Optional[str], url: Optional[str], emoji: Optional[Union[str, Emoji, PartialEmoji]], row: Optional[int]"""
for button_dict in buttons:
if "custom_id" not in button_dict:
button_dict["custom_id"] = "CogsUtils" + "_" + CogsUtils().generate_key(number=10)
self.buttons_dict_instance = {"timeout": timeout, "buttons": [b.copy() for b in buttons], "members": members, "check": check, "function": function, "function_args": function_args, "infinity": infinity}
super().__init__(timeout=timeout)
self.infinity = infinity
self.interaction_result = None
self.function_result = None
self.members = members
self.check = check
self.function = function
self.function_args = function_args
self.clear_items()
self.buttons = []
self.buttons_dict = []
self.done = asyncio.Event()
for button_dict in buttons:
if "style" not in button_dict:
button_dict["style"] = int(discord.ButtonStyle(2))
if "label" not in button_dict and "emoji" not in button_dict:
button_dict["label"] = "Test"
button = discord.ui.Button(**button_dict)
self.add_item(button)
self.buttons.append(button)
self.buttons_dict.append(button_dict)
def to_dict_cogsutils(self, for_Config: typing.Optional[bool]=False):
buttons_dict_instance = self.buttons_dict_instance
if for_Config:
buttons_dict_instance["check"] = None
buttons_dict_instance["function"] = None
return buttons_dict_instance
@classmethod
def from_dict_cogsutils(cls, buttons_dict_instance: typing.Dict):
return cls(**buttons_dict_instance)
async def interaction_check(self, interaction: discord.Interaction):
if self.check is not None:
if not self.check(interaction):
await interaction.response.send_message("You are not allowed to use this interaction.", ephemeral=True)
return True
if self.members is not None:
if interaction.user.id not in self.members:
await interaction.response.send_message("You are not allowed to use this interaction.", ephemeral=True)
return True
self.interaction_result = interaction
if self.function is not None:
self.function_result = await self.function(self, interaction, **self.function_args)
self.done.set()
if not self.infinity:
self.stop()
return True
async def on_timeout(self):
self.done.set()
self.stop()
async def wait_result(self):
self.done = asyncio.Event()
await self.done.wait()
interaction, function_result = self.get_result()
if interaction is None:
raise TimeoutError()
return interaction, function_result
def get_result(self):
return self.interaction_result, self.function_result
class Dropdown(discord.ui.View):
"""Create Dropdown easily."""
def __init__(self, timeout: typing.Optional[float]=180, placeholder: typing.Optional[str]="Choose a option.", min_values: typing.Optional[int]=1, max_values: typing.Optional[int]=1, *, options: typing.Optional[typing.List]=[{}], members: typing.Optional[typing.List]=None, check: typing.Optional[typing.Any]=None, function: typing.Optional[typing.Any]=None, function_args: typing.Optional[typing.Dict]={}, infinity: typing.Optional[bool]=False):
"""label: str, value: str, description: Optional[str], emoji: Optional[Union[str, Emoji, PartialEmoji]], default: bool"""
self.dropdown_dict_instance = {"timeout": timeout, "placeholder": placeholder, "min_values": min_values, "max_values": max_values, "options": [o.copy() for o in options], "members": members, "check": check, "function": function, "function_args": function_args, "infinity": infinity}
super().__init__(timeout=timeout)
self.infinity = infinity
self.dropdown = self.Dropdown(placeholder=placeholder, min_values=min_values, max_values=max_values, options=options, members=members, check=check, function=function, function_args=function_args, infinity=self.infinity)
self.add_item(self.dropdown)
def to_dict_cogsutils(self, for_Config: typing.Optional[bool]=False):
dropdown_dict_instance = self.dropdown_dict_instance
if for_Config:
dropdown_dict_instance["check"] = None
dropdown_dict_instance["function"] = None
return dropdown_dict_instance
@classmethod
def from_dict_cogsutils(cls, dropdown_dict_instance: typing.Dict):
return cls(**dropdown_dict_instance)
async def on_timeout(self):
self.dropdown.done.set()
self.stop()
async def wait_result(self):
self.done = asyncio.Event()
await self.dropdown.done.wait()
interaction, values, function_result = self.get_result()
if interaction is None:
raise TimeoutError()
return interaction, values, function_result
def get_result(self):
return self.dropdown.interaction_result, self.dropdown.values_result, self.dropdown.function_result
class Dropdown(discord.ui.Select):
def __init__(self, placeholder: typing.Optional[str]="Choose a option.", min_values: typing.Optional[int]=1, max_values: typing.Optional[int]=1, *, options: typing.Optional[typing.List]=[], members: typing.Optional[typing.List]=None, check: typing.Optional[typing.Any]=None, function: typing.Optional[typing.Any]=None, function_args: typing.Optional[typing.Dict]={}, infinity: typing.Optional[bool]=False):
self.infinity = infinity
self.interaction_result = None
self.values_result = None
self.function_result = None
self.members = members
self.check = check
self.function = function
self.function_args = function_args
self._options = []
self.options_dict = []
self.done = asyncio.Event()
for option_dict in options:
if "label" not in option_dict and "emoji" not in option_dict:
option_dict["label"] = "Test"
option = discord.SelectOption(**option_dict)
self._options.append(option)
self.options_dict.append(option_dict)
super().__init__(placeholder=placeholder, min_values=min_values, max_values=max_values, options=self._options)
async def callback(self, interaction: discord.Interaction):
if self.check is not None:
if not self.check(interaction):
await interaction.response.send_message("You are not allowed to use this interaction.", ephemeral=True)
return True
if self.members is not None:
if interaction.user.id not in self.members:
await interaction.response.send_message("You are not allowed to use this interaction.", ephemeral=True)
return True
self.interaction_result = interaction
self.values_result = self.values
if self.function is not None:
self.function_result = await self.function(self, interaction, self.values, **self.function_args)
self.done.set()
if not self.infinity:
self.view.stop()
class Modal(discord.ui.Modal):
"""Create Modal easily."""
def __init__(self, title: typing.Optional[str]="Form", timeout: typing.Optional[float]=None, inputs: typing.Optional[typing.List]=[{}], members: typing.Optional[typing.List]=None, check: typing.Optional[typing.Any]=None, function: typing.Optional[typing.Any]=None, function_args: typing.Optional[typing.Dict]={}):
"""name: str, label: str, style: TextStyle, custom_id: str, placeholder: Optional[str], default: Optional[str], required: bool, min_length: Optional[int], max_length: Optional[int], row: Optional[int]"""
for input_dict in inputs:
if "custom_id" not in input_dict:
input_dict["custom_id"] = "CogsUtils" + "_" + CogsUtils().generate_key(number=10)
self.modal_dict_instance = {"title": title, "timeout": timeout, "inputs": [i.copy() for i in inputs], "function": function, "function_args": function_args}
super().__init__(title=title, timeout=timeout)
self.title = title
self.interaction_result = None
self.values_result = None
self.function_result = None
self.members = members
self.check = check
self.function = function
self.function_args = function_args
self.inputs = []
self.inputs_dict = []
self.done = asyncio.Event()
for input_dict in inputs:
if "label" not in input_dict:
input_dict["label"] = "Test"
if "style" in input_dict:
if isinstance(input_dict["style"], int):
input_dict["style"] = discord.ui.text_input.TextStyle(input_dict["style"])
input = discord.ui.text_input.TextInput(**input_dict)
self.add_item(input)
self.inputs.append(input)
self.inputs_dict.append(input_dict)
def to_dict_cogsutils(self, for_Config: typing.Optional[bool]=False):
modal_dict_instance = self.modal_dict_instance
if for_Config:
modal_dict_instance["function"] = None
return modal_dict_instance
@classmethod
def from_dict_cogsutils(cls, modal_dict_instance: typing.Dict):
return cls(**modal_dict_instance)
async def on_submit(self, interaction: discord.Interaction):
if self.check is not None:
if not self.check(interaction):
await interaction.response.send_message("You are not allowed to use this interaction.", ephemeral=True)
return True
if self.members is not None:
if interaction.user.id not in self.members:
await interaction.response.send_message("You are not allowed to use this interaction.", ephemeral=True)
return True
self.interaction_result = interaction
self.values_result = self.inputs
if self.function is not None:
self.function_result = await self.function(self, self.interaction_result, self.values_result, **self.function_args)
self.done.set()
self.stop()
async def on_timeout(self):
self.done.set()
self.stop()
async def wait_result(self):
self.done = asyncio.Event()
await self.done.wait()
interaction, values, function_result = self.get_result()
if interaction is None:
raise TimeoutError()
return interaction, values, function_result
def get_result(self):
return self.interaction_result, self.values_result, self.function_result
class Reactions():
"""Create Reactions easily."""
def __init__(self, bot: Red, message: discord.Message, remove_reaction: typing.Optional[bool]=True, timeout: typing.Optional[float]=180, reactions: typing.Optional[typing.List]=["✅", "❌"], members: typing.Optional[typing.List]=None, check: typing.Optional[typing.Any]=None, function: typing.Optional[typing.Any]=None, function_args: typing.Optional[typing.Dict]={}, infinity: typing.Optional[bool]=False):
self.reactions_dict_instance = {"message": message, "timeout": timeout, "reactions": reactions, "members": members, "check": check, "function": function, "function_args": function_args, "infinity": infinity}
self.bot = bot
self.message = message
self.remove_reaction = remove_reaction
self.timeout = timeout
self.infinity = infinity
self.reaction_result = None
self.user_result = None
self.function_result = None
self.members = members
self.check = check
self.function = function
self.function_args = function_args
self.reactions = reactions
self.done = asyncio.Event()
self.r = False
asyncio.create_task(self.wait())
def to_dict_cogsutils(self, for_Config: typing.Optional[bool]=False):
reactions_dict_instance = self.reactions_dict_instance
if for_Config:
reactions_dict_instance["bot"] = None
reactions_dict_instance["message"] = None
reactions_dict_instance["check"] = None
reactions_dict_instance["function"] = None
return reactions_dict_instance
@classmethod
def from_dict_cogsutils(cls, reactions_dict_instance: typing.Dict):
return cls(**reactions_dict_instance)
async def wait(self):
if not self.r:
await start_adding_reactions(self.message, self.reactions)
self.r = True
predicates = ReactionPredicate.same_context(message=self.message)
result = False
try:
while True:
if result:
break
tasks = [asyncio.create_task(self.bot.wait_for("reaction_add", check=predicates))]
done, pending = await asyncio.wait(
tasks, timeout=self.timeout, return_when=asyncio.FIRST_COMPLETED
)
for task in pending:
task.cancel()
if len(done) == 0:
raise TimeoutError()
reaction, user = done.pop().result()
result = await self.reaction_check(reaction, user)
except TimeoutError:
await self.on_timeout()
async def reaction_check(self, reaction: discord.Reaction, user: discord.User):
async def remove_reaction(remove_reaction, message: discord.Message, reaction: discord.Reaction, user: discord.User):
if remove_reaction:
try:
await message.remove_reaction(emoji=reaction, member=user)
except discord.HTTPException:
pass
if not str(reaction.emoji) in self.reactions:
await remove_reaction(self.remove_reaction, self.message, reaction, user)
return False
if self.check is not None:
if not self.check(reaction, user):
await remove_reaction(self.remove_reaction, self.message, reaction, user)
return False
if self.members is not None:
if user.id not in self.members:
await remove_reaction(self.remove_reaction, self.message, reaction, user)
return False
await remove_reaction(self.remove_reaction, self.message, reaction, user)
self.reaction_result = reaction
self.user_result = user
if self.function is not None:
self.function_result = await self.function(self, reaction, user, **self.function_args)
self.done.set()
if self.infinity:
return True
else:
return False
async def on_timeout(self):
self.done.set()
async def wait_result(self):
self.done = asyncio.Event()
await self.done.wait()
reaction, user, function_result = self.get_result()
if reaction is None:
raise TimeoutError()
return reaction, user, function_result
def get_result(self):
return self.reaction_result, self.user_result, self.function_result
class Menu():
"""Create Menus easily."""
def __init__(self, pages: typing.List[typing.Union[typing.Dict[str, typing.Union[str, discord.Embed]], discord.Embed, str]], timeout: typing.Optional[int]=180, delete_after_timeout: typing.Optional[bool]=False, way: typing.Optional[typing.Literal["buttons", "reactions", "dropdown"]]="buttons", controls: typing.Optional[typing.Dict]={"⏮️": "left_page", "◀️": "prev_page", "❌": "close_page", "▶️": "next_page", "⏭️": "right_page"}, page_start: typing.Optional[int]=0, check_owner: typing.Optional[bool]=True, members_authored: typing.Optional[typing.Iterable[discord.Member]]=[]):
self.ctx = None
self.pages = pages
self.timeout = timeout
self.delete_after_timeout = delete_after_timeout
self.way = way
self.controls = controls.copy()
self.check_owner = check_owner
self.members_authored = members_authored
if not CogsUtils().is_dpy2 and self.way == "buttons" or not CogsUtils().is_dpy2 and self.way == "dropdown":
self.way = "reactions"
if not isinstance(self.pages[0], (discord.Embed, str)):
raise RuntimeError("Pages must be of type discord.Embed or str.")
self.source = self._SimplePageSource(items=pages)
if not self.source.is_paginating():
for emoji, name in controls.items():
if name in ["left_page", "prev_page", "next_page", "right_page"]:
del self.controls[emoji]
self.message = None
self.view = None
self.current_page = page_start
async def start(self, ctx: commands.Context):
"""
Used to start the menu displaying the first page requested.
Parameters
----------
ctx: `commands.Context`
The context to start the menu in.
"""
self.ctx = ctx
if self.way == "reactions":
asyncio.create_task(redbot.core.utils.menus.menu(ctx, pages=self.pages, controls=redbot.core.utils.menus.DEFAULT_CONTROLS, page=self.current_page, timeout=self.timeout))
return
if self.way == "buttons":
self.view = Buttons(timeout=self.timeout, buttons=[{"emoji": str(e), "custom_id": str(n)} for e, n in self.controls.items()], members=[self.ctx.author.id] + list(self.ctx.bot.owner_ids) if self.check_owner else [] + [x.id for x in self.members_authored], infinity=True)
await self.send_initial_message(ctx, ctx.channel)
elif self.way == "reactions":
await self.send_initial_message(ctx, ctx.channel)
self.view = Reactions(bot=self.ctx.bot, message=self.message, remove_reaction=True, timeout=self.timeout, reactions=[str(e) for e in self.controls.keys()], members=[self.ctx.author.id] + list(self.ctx.bot.owner_ids) if self.check_owner else [] + [x.id for x in self.members_authored], infinity=True)
elif self.way == "dropdown":
self.view = Dropdown(timeout=self.timeout, options=[{"emoji": str(e), "label": str(n).replace("_", " ").capitalize()} for e, n in self.controls.items()], members=[self.ctx.author.id] + list(self.ctx.bot.owner_ids) if self.check_owner else [] + [x.id for x in self.members_authored], infinity=True)
await self.send_initial_message(ctx, ctx.channel)
try:
while True:
if self.way == "buttons":
interaction, function_result = await self.view.wait_result()
response = interaction.data["custom_id"]
elif self.way == "reactions":
reaction, user, function_result = await self.view.wait_result()
response = self.controls[str(reaction.emoji)]
elif self.way == "dropdown":
interaction, values, function_result = await self.view.wait_result()
response = str(values[0])
if response == "left_page":
self.current_page = 0
elif response == "prev_page":
self.current_page += -1
elif response == "close_page":
if self.way == "buttons" or self.way == "dropdown":
self.view.stop()
await self.message.delete()
break
elif response == "next_page":
self.current_page += 1
elif response == "right_page":
self.current_page = self.source.get_max_pages() - 1
kwargs = await self.get_page(self.current_page)
if self.way == "buttons" or self.way == "dropdown":
try:
await interaction.response.edit_message(**kwargs) # , view=self.view.from_dict_cogsutils(self.view.to_dict_cogsutils())
except discord.errors.InteractionResponded:
await self.message.edit(**kwargs)
else:
await self.message.edit(**kwargs)
except TimeoutError:
await self.on_timeout()
async def send_initial_message(self, ctx: commands.Context, channel: discord.abc.Messageable):
self.author = ctx.author
self.ctx = ctx
kwargs = await self.get_page(self.current_page)
self.message = await channel.send(**kwargs, view=self.view if self.way in ["buttons", "dropdown"] else None)
return self.message
async def get_page(self, page_num: int):
try:
page = await self.source.get_page(page_num)
except IndexError:
self.current_page = 0
page = await self.source.get_page(self.current_page)
value = await self.source.format_page(self, page)
if isinstance(value, typing.Dict):
return value
elif isinstance(value, str):
return {"content": value, "embed": None}
elif isinstance(value, discord.Embed):
return {"embed": value, "content": None}
async def on_timeout(self):
if self.delete_after_timeout:
await self.message.delete()
else:
if self.way == "buttons":
self.view.stop()
await self.message.edit(view=None)
elif self.way == "reactions":
try:
await self.message.clear_reactions()
except discord.HTTPException:
try:
await self.message.remove_reaction(*self.controls.keys(), self.ctx.bot.user)
except discord.HTTPException:
pass
elif self.way == "dropdown":
self.view.stop()
await self.message.edit(view=None)
class _SimplePageSource(menus.ListPageSource):
def __init__(self, items: typing.List[typing.Union[typing.Dict[str, typing.Union[str, discord.Embed]], discord.Embed, str]]):
super().__init__(items, per_page=1)
async def format_page(
self, view, page: typing.Union[typing.Dict[str, typing.Union[str, discord.Embed]], discord.Embed, str]
) -> typing.Union[str, discord.Embed]:
return page
@commands.is_owner()
@commands.command(hidden=True)
async def getallfor(ctx: commands.Context, all: typing.Optional[typing.Literal["all", "ALL"]]=None, page: typing.Optional[int]=None, repo: typing.Optional[typing.Union[Repo, typing.Literal["AAA3A", "aaa3a"]]]=None, check_updates: typing.Optional[bool]=False, cog: typing.Optional[InstalledCog]=None, command: typing.Optional[str]=None):
"""Get all the necessary information to get support on a bot/repo/cog/command.
With a html file.
"""
if all is not None:
repo = None
cog = None
command = None
check_updates = False
if repo is not None:
_repos = [repo]
else:
_repos = [None]
if cog is not None:
_cogs = [cog]
else:
_cogs = [None]
if command is not None:
_commands = [command]
else:
_commands = [None]
if command is not None:
object_command = ctx.bot.get_command(_commands[0])
if object_command is None:
await ctx.send(_("The command `{command}` does not exist.").format(**locals()))
return
_commands = [object_command]
downloader = ctx.bot.get_cog("Downloader")
if downloader is None:
if CogsUtils(bot=ctx.bot).ConfirmationAsk(ctx, _("The cog downloader is not loaded. I can't continue. Do you want me to do it?").format(**locals())):
await ctx.invoke(ctx.bot.get_command("load"), "downloader")
downloader = ctx.bot.get_cog("Downloader")
else:
return
installed_cogs = await downloader.config.installed_cogs()
loaded_cogs = [c.lower() for c in ctx.bot.cogs]
if repo is not None:
rp = _repos[0]
if not isinstance(rp, Repo) and not "AAA3A".lower() in rp.lower():
await ctx.send(_("Repo by the name `{rp}` does not exist.").format(**locals()))
return
if not isinstance(repo, Repo):
found = False
for r in await downloader.config.installed_cogs():
if "AAA3A".lower() in str(r).lower():
_repos = [downloader._repo_manager.get_repo(str(r))]
found = True
break
if not found:
await ctx.send(_("Repo by the name `{rp}` does not exist.").format(**locals()))
return
if check_updates:
cogs_to_check, failed = await downloader._get_cogs_to_check(repos={_repos[0]})
cogs_to_update, libs_to_update = await downloader._available_updates(cogs_to_check)
cogs_to_update, filter_message = downloader._filter_incorrect_cogs(cogs_to_update)
to_update_cogs = [c.name.lower() for c in cogs_to_update]
if all is not None:
_repos = []
for r in installed_cogs:
_repos.append(downloader._repo_manager.get_repo(str(r)))
_cogs = []
for r in installed_cogs:
for c in installed_cogs[r]:
_cogs.append(await InstalledCog.convert(ctx, str(c)))
_commands = []
for c in ctx.bot.all_commands:
cmd = ctx.bot.get_command(str(c))
if cmd.cog is not None:
_commands.append(cmd)
repo = True
cog = True
command = True
IS_WINDOWS = os.name == "nt"
IS_MAC = sys.platform == "darwin"
IS_LINUX = sys.platform == "linux"
if IS_LINUX:
import distro # pylint: disable=import-error
python_executable = sys.executable
python_version = ".".join(map(str, sys.version_info[:3]))
pyver = f"{python_version} ({platform.architecture()[0]})"
pipver = pip.__version__
redver = red_version_info
dpy_version = discord.__version__
if IS_WINDOWS:
os_info = platform.uname()
osver = f"{os_info.system} {os_info.release} (version {os_info.version})"
elif IS_MAC:
os_info = platform.mac_ver()
osver = f"Mac OSX {os_info[0]} {os_info[2]}"
elif IS_LINUX:
osver = f"{distro.name()} {distro.version()}".strip()
else:
osver = "Could not parse OS, report this on Github."
driver = storage_type()
data_path_original = Path(basic_config["DATA_PATH"])
if "USERPROFILE" in os.environ:
data_path = Path(str(data_path_original).replace(os.environ["USERPROFILE"], "{USERPROFILE}"))
_config_file = Path(str(config_file).replace(os.environ["USERPROFILE"], "{USERPROFILE}"))
python_executable = Path(str(python_executable).replace(os.environ["USERPROFILE"], "{USERPROFILE}"))
if "HOME" in os.environ:
data_path = Path(str(data_path_original).replace(os.environ["HOME"], "{HOME}"))
_config_file = Path(str(config_file).replace(os.environ["HOME"], "{HOME}"))
python_executable = Path(str(python_executable).replace(os.environ["HOME"], "{HOME}"))
disabled_intents = (
", ".join(
intent_name.replace("_", " ").title()
for intent_name, enabled in ctx.bot.intents
if not enabled
)
or "None"
)
uptime = humanize_timedelta(timedelta=datetime.datetime.utcnow() - ctx.bot.uptime)
async def can_run(command):
try:
await command.can_run(ctx, check_all_parents=True, change_permission_state=False)
except Exception:
return False
else:
return True
def get_aliases(command, original):
if alias := list(command.aliases):
if original in alias:
alias.remove(original)
alias.append(command.name)
return alias
def get_perms(command):
final_perms = ""
def neat_format(x):
return " ".join(i.capitalize() for i in x.replace("_", " ").split())
user_perms = []
if perms := getattr(command.requires, "user_perms"):
user_perms.extend(neat_format(i) for i, j in perms if j)
if perms := command.requires.privilege_level:
if perms.name != "NONE":
user_perms.append(neat_format(perms.name))
if user_perms:
final_perms += "User Permission(s): " + ", ".join(user_perms) + "\n"
if perms := getattr(command.requires, "bot_perms"):
if perms_list := ", ".join(neat_format(i) for i, j in perms if j):
final_perms += "Bot Permission(s): " + perms_list
return final_perms
def get_cooldowns(command):
cooldowns = []
if s := command._buckets._cooldown:
txt = f"{s.rate} time{'s' if s.rate>1 else ''} in {humanize_timedelta(seconds=s.per)}"
try:
txt += f" per {s.type.name.capitalize()}"
# This is to avoid custom bucketype erroring out stuff (eg:licenseinfo)
except AttributeError:
pass
cooldowns.append(txt)
if s := command._max_concurrency:
cooldowns.append(f"Max concurrent uses: {s.number} per {s.per.name.capitalize()}")
return cooldowns
async def get_diagnose(ctx, command):
issue_diagnoser = IssueDiagnoser(ctx.bot, ctx, ctx.channel, ctx.author, command)
await issue_diagnoser._prepare()
diagnose_result = []
result = await issue_diagnoser._check_until_fail(
"",
(
issue_diagnoser._check_global_call_once_checks_issues,
issue_diagnoser._check_disabled_command_issues,
issue_diagnoser._check_can_run_issues,
),
)
if result.success:
diagnose_result.append(_("All checks passed and no issues were detected."))
else:
diagnose_result.append(_("The bot has been able to identify the issue."))
details = issue_diagnoser._get_details_from_check_result(result)
if details:
diagnose_result.append(bold(_("Detected issue: ")) + details)
if result.resolution:
diagnose_result.append(bold(_("Solution: ")) + result.resolution)
diagnose_result.extend(issue_diagnoser._get_message_from_check_result(result))
return diagnose_result
async def get_all_config(cog: commands.Cog):
config = {}
if not hasattr(cog, 'config'):
return config
try:
config["global"] = await cog.config.all()
config["users"] = await cog.config.all_users()
config["guilds"] = await cog.config.all_guilds()
config["members"] = await cog.config.all_members()
config["roles"] = await cog.config.all_roles()
config["channels"] = await cog.config.all_channels()
except Exception:
return config
return config
use_emojis = False
check_emoji = "✅" if use_emojis else True
cross_emoji = "❌" if use_emojis else False
##################################################
os_table = Table("Key", "Value", title="Host machine informations")
os_table.add_row("OS version", str(osver))
os_table.add_row("Python executable", str(python_executable))
os_table.add_row("Python version", str(pyver))
os_table.add_row("Pip version", str(pipver))
raw_os_table_str = no_colour_rich_markup(os_table)
##################################################
red_table = Table("Key", "Value", title="Red instance informations")
red_table.add_row("Red version", str(redver))
red_table.add_row("Discord.py version", str(dpy_version))
red_table.add_row("Instance name", str(instance_name))
red_table.add_row("Storage type", str(driver))
red_table.add_row("Disabled intents", str(disabled_intents))
red_table.add_row("Data path", str(data_path))
red_table.add_row("Metadata file", str(_config_file))
red_table.add_row("Uptime", str(uptime))
red_table.add_row("Global prefixe(s)", str(await ctx.bot.get_valid_prefixes()).replace(f"{ctx.bot.user.id}", "{bot_id}"))
if ctx.guild is not None:
if not await ctx.bot.get_valid_prefixes() == await ctx.bot.get_valid_prefixes(ctx.guild):
red_table.add_row("Guild prefixe(s)", str(await ctx.bot.get_valid_prefixes(ctx.guild)).replace(f"{ctx.bot.user.id}", "{bot_id}"))
raw_red_table_str = no_colour_rich_markup(red_table)
##################################################
context_table = Table("Key", "Value", title="Context")
context_table.add_row("Channel type", str(f"discord.{ctx.channel.__class__.__name__}"))
context_table.add_row("Bot permissions value (guild)", str(ctx.guild.me.guild_permissions.value if ctx.guild is not None else "Not in a guild."))
context_table.add_row("Bot permissions value (channel)", str(ctx.channel.permissions_for(ctx.guild.me).value if ctx.guild is not None else ctx.channel.permissions_for(ctx.bot.user).value))
context_table.add_row("User permissions value (guild)", str(ctx.author.guild_permissions.value if ctx.guild is not None else "Not in a guild."))
context_table.add_row("User permissions value (channel)", str(ctx.channel.permissions_for(ctx.author).value))
raw_context_table_str = no_colour_rich_markup(context_table)
##################################################
if repo is not None:
raw_repo_table_str = []
for repo in _repos:
if not check_updates:
cogs_table = Table("Name", "Commit", "Loaded", "Pinned", title=f"Cogs installed for {repo.name}")
else:
cogs_table = Table("Name", "Commit", "Loaded", "Pinned", "To update", title=f"Cogs installed for {repo.name}")
for _cog in installed_cogs[repo.name]:
_cog = await InstalledCog.convert(ctx, _cog)
if not check_updates:
cogs_table.add_row(str(_cog.name), str(_cog.commit), str(check_emoji if _cog.name in loaded_cogs else cross_emoji), str(check_emoji if _cog.pinned else cross_emoji))
else:
cogs_table.add_row(str(_cog.name), str(_cog.commit), str(check_emoji if _cog.name in loaded_cogs else cross_emoji), str(check_emoji if _cog.pinned else cross_emoji), str(check_emoji if _cog.name in to_update_cogs else cross_emoji))
raw_repo_table_str.append(no_colour_rich_markup(cogs_table))
else:
raw_repo_table_str = None
##################################################
if cog is not None:
raw_cogs_table_str = []
for cog in _cogs:
cog_table = Table("Key", "Value", title=f"Cog {cog.name}")
cog_table.add_row("Name", str(cog.name))
cog_table.add_row("Repo name", str(cog.repo_name))
cog_table.add_row("Hidden", str(check_emoji if cog.hidden else cross_emoji))
cog_table.add_row("Disabled", str(check_emoji if cog.disabled else cross_emoji))
cog_table.add_row("Required cogs", str([r for r in cog.required_cogs]))
cog_table.add_row("Requirements", str([r for r in cog.requirements]))
cog_table.add_row("Short", str(cog.short))
cog_table.add_row("Min bot version", str(cog.min_bot_version))
cog_table.add_row("Max bot version", str(cog.max_bot_version))
cog_table.add_row("Min python version", str(cog.min_python_version))
cog_table.add_row("Author", str([a for a in cog.author]))
cog_table.add_row("Commit", str(cog.commit))
raw_cog_table_str = no_colour_rich_markup(cog_table)
raw_cogs_table_str.append(raw_cog_table_str)
else:
raw_cogs_table_str = None
##################################################
if command is not None:
raw_commands_table_str = []
for command in _commands:
command_table = Table("Key", "Value", title=f"Command {command.qualified_name}")
command_table.add_row("Qualified name", str(command.qualified_name))
command_table.add_row("Cog name", str(command.cog_name))
command_table.add_row("Short description", str(command.short_doc))
command_table.add_row("Syntax", str(f"{ctx.clean_prefix}{command.qualified_name} {command.signature}"))
command_table.add_row("Hidden", str(command.hidden))
command_table.add_row("Parents", str(command.full_parent_name if not command.full_parent_name == "" else None))
command_table.add_row("Can see", str(await command.can_see(ctx)))
command_table.add_row("Can run", str(await can_run(command)))
command_table.add_row("Params", str(command.clean_params))
command_table.add_row("Aliases", str(get_aliases(command, command.qualified_name)))
command_table.add_row("Requires", str(get_perms(command)))
command_table.add_row("Cooldowns", str(get_cooldowns(command)))
command_table.add_row("Is on cooldown", str(command.is_on_cooldown(ctx)))
if ctx.guild is not None:
diagnose_result = await get_diagnose(ctx, command)
c = 0
for x in diagnose_result:
c += 1
if c == 1:
command_table.add_row("Issue Diagnose", str(x))
else:
command_table.add_row("", str(x).replace("✅", "").replace("❌", ""))
raw_command_table_str = no_colour_rich_markup(command_table)
raw_commands_table_str.append(raw_command_table_str)
cog = command.cog.__class__.__name__ if command.cog is not None else "None"
if hasattr(ctx.bot, "last_exceptions_cogs") and cog in ctx.bot.last_exceptions_cogs and command.qualified_name in ctx.bot.last_exceptions_cogs[cog]:
raw_errors_table = []
error_table = Table("Last error recorded for this command")
error_table.add_row(str(ctx.bot.last_exceptions_cogs[cog][command.qualified_name][len(ctx.bot.last_exceptions_cogs[cog][command.qualified_name]) - 1]))
raw_errors_table.append(no_colour_rich_markup(error_table))
else:
raw_errors_table = None
else:
raw_commands_table_str = None
raw_errors_table = None
##################################################
if _cogs is not None and len(_cogs) == 1:
cog = None
for name, value in ctx.bot.cogs.items():
if name.lower() == _cogs[0].name.lower():
cog = value
break
if cog is not None:
config_table = Table("", title=f"All Config for {cog.__class__.__name__}")
config_table.add_row(str(await get_all_config(cog)))
raw_config_table_str = no_colour_rich_markup(config_table)
else:
raw_config_table_str = None
else:
raw_config_table_str = None
##################################################
response = [raw_os_table_str, raw_red_table_str, raw_context_table_str]
for x in [raw_repo_table_str, raw_cogs_table_str, raw_commands_table_str, raw_errors_table, raw_config_table_str]:
if x is not None:
if isinstance(x, typing.List):
for y in x:
response.append(y)
elif isinstance(x, str):
response.append(x)
to_html = to_html_getallfor.replace("{AVATAR_URL}", str(ctx.bot.user.display_avatar) if CogsUtils().is_dpy2 else str(ctx.bot.user.avatar_url)).replace("{BOT_NAME}", str(ctx.bot.user.name)).replace("{REPO_NAME}", str(getattr(_repos[0], "name", None) if all is None else "All")).replace("{COG_NAME}", str(getattr(_cogs[0], "name", None) if all is None else "All")).replace("{COMMAND_NAME}", str(getattr(_commands[0], "qualified_name", None) if all is None else "All"))
message_html = message_html_getallfor
end_html = end_html_getallfor
count_page = 0
try:
if page is not None and page - 1 in [0, 1, 2, 3, 4, 5, 6, 7]:
response = [response[page - 1]]
except ValueError:
pass
for page in response:
if page is not None:
count_page += 1
if count_page == 1:
to_html += message_html.replace("{MESSAGE_CONTENT}", str(page).replace("```", "").replace("<", "<").replace("\n", "<br>")).replace("{TIMESTAMP}", str(ctx.message.created_at.strftime("%b %d, %Y %I:%M %p")))
else:
to_html += message_html.replace(' <div class="chatlog__messages">', ' </div> <div class="chatlog__message ">').replace("{MESSAGE_CONTENT}", str(page).replace("```", "").replace("<", "<").replace("\n", "<br>")).replace('<span class="chatlog__timestamp">{TIMESTAMP}</span> ', "")
if all is None and "Config" not in page:
for p in pagify(page):
p = p.replace("```", "")
p = box(p)
await ctx.send(p)
to_html += end_html
if CogsUtils().check_permissions_for(channel=ctx.channel, user=ctx.me, check=["send_attachments"]):
await ctx.send(file=text_to_file(text=to_html, filename="diagnostic.html"))
to_html_getallfor = """
<!--
Thanks to @mahtoid for this transcript! It was retrieved from : https://github.com/mahtoid/DiscordChatExporterPy. Then all unnecessary elements were removed and the header was modified.
-->
<!DOCTYPE html>
<html lang="en">
<head>
<title>Diagnostic</title>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width" />
<style>
@font-face {
font-family: Whitney;
src: url(https://cdn.jsdelivr.net/gh/mahtoid/DiscordUtils@master/whitney-300.woff);
font-weight: 300;
}
@font-face {
font-family: Whitney;
src: url(https://cdn.jsdelivr.net/gh/mahtoid/DiscordUtils@master/whitney-400.woff);
font-weight: 400;
}
@font-face {
font-family: Whitney;
src: url(https://cdn.jsdelivr.net/gh/mahtoid/DiscordUtils@master/whitney-500.woff);
font-weight: 500;
}
@font-face {
font-family: Whitney;
src: url(https://cdn.jsdelivr.net/gh/mahtoid/DiscordUtils@master/whitney-600.woff);
font-weight: 600;
}
@font-face {
font-family: Whitney;
src: url(https://cdn.jsdelivr.net/gh/mahtoid/DiscordUtils@master/whitney-700.woff);
font-weight: 700;
}
body {
font-family: "Whitney", "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 17px;
}
a {
text-decoration: none;
}
.markdown {
max-width: 100%;
line-height: 1.3;
overflow-wrap: break-word;
}
.preserve-whitespace {
white-space: pre-wrap;
}
.pre {
font-family: "Consolas", "Courier New", Courier, monospace;
}
.pre--multiline {
margin-top: 0.25em;
padding: 0.5em;
border: 2px solid;
border-radius: 5px;
}
.pre--inline {
padding: 2px;
border-radius: 3px;
font-size: 0.85em;
}
.emoji {
width: 1.25em;
height: 1.25em;
margin: 0 0.06em;
vertical-align: -0.4em;
}
.emoji--small {
width: 1em;
height: 1em;
}
.emoji--large {
width: 2.8em;
height: 2.8em;
}
/* Chatlog */
.chatlog {
max-width: 100%;
}
.chatlog__message-group {
display: grid;
margin: 0 0.6em;
padding: 0.9em 0;
border-top: 1px solid;
grid-template-columns: auto 1fr;
}
.chatlog__timestamp {
margin-left: 0.3em;
font-size: 0.75em;
}
/* General */
body {
background-color: #36393e;
color: #dcddde;
}
a {
color: #0096cf;
}
.pre {
background-color: #2f3136 !important;
}
.pre--multiline {
border-color: #282b30 !important;
color: #b9bbbe !important;
}
/* Chatlog */
.chatlog__message-group {
border-color: rgba(255, 255, 255, 0.1);
}
.chatlog__timestamp {
color: rgba(255, 255, 255, 0.2);
}
/* === INFO === */
.info {
display: flex;
max-width: 100%;
margin: 0 5px 10px 5px;
}
.info__bot-icon-container {
flex: 0;
}
.info__bot-icon {
max-width: 95px;
max-height: 95px;
}
.info__metadata {
flex: 1;
margin-left: 10px;
</style>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.15.6/styles/solarized-dark.min.css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.15.6/highlight.min.js"></script>
<script>
<!-- Code Block Markdown (```lang```) -->
document.addEventListener('DOMContentLoaded', () => {
document.querySelectorAll('.pre--multiline').forEach((block) => {
hljs.highlightBlock(block);
});
});
</script>
</head>
<body>
<div class="info">
<div class="info__bot-icon-container">
<img class="info__bot-icon" src="{AVATAR_URL}" />
</div>
<div class="info__metadata">
<div class="info__report-name">Diagnostic</div>
<div class="info__report-infos">Bot name: {BOT_NAME}</div>
<div class="info__report-infos">Repo name: {REPO_NAME}</div>
<div class="info__report-infos">Cog name: {COG_NAME}</div>
<div class="info__report-infos">Command name: {COMMAND_NAME}</div>
</div>
</div>
<div class="chatlog">
<div class="chatlog__message-group">"""
message_html_getallfor = """ <div class="chatlog__messages">
<span class="chatlog__timestamp">{TIMESTAMP}</span> <div class="chatlog__message ">
<div class="chatlog__content">
<div class="markdown">
<span class="preserve-whitespace"><div class="pre pre--multiline nohighlight">{MESSAGE_CONTENT}</div></span>
</div>
</div>"""
end_html_getallfor = """
</div>
</div>
</body>
</html>"""
```
#### File: AAA3A-cogs/discordsearch/discordsearch.py
```python
from .AAA3A_utils.cogsutils import CogsUtils, Menu # isort:skip
from redbot.core import commands # isort:skip
from redbot.core.i18n import Translator, cog_i18n # isort:skip
from redbot.core.bot import Red # isort:skip
import discord # isort:skip
import typing # isort:skip
import argparse
import dateparser
import datetime
from redbot.core.utils.chat_formatting import bold, underline
from redbot.core.utils.common_filters import URL_RE
from time import monotonic
# Credits:
# Thanks to @epic guy on Discord for the basic syntax (command groups, commands) and also commands (await ctx.send, await ctx.author.send, await ctx.message.delete())!
# Thanks to the developers of the cogs I added features to as it taught me how to make a cog! (Chessgame by WildStriker, Captcha by Kreusada, Speak by Epic guy and Rommer by Dav)
# Thanks to all the people who helped me with some commands in the #coding channel of the redbot support server!
_ = Translator("DiscordSearch", __file__)
@cog_i18n(_)
class DiscordSearch(commands.Cog):
"""A cog to edit roles!"""
def __init__(self, bot):
self.bot: Red = bot
self.cogsutils = CogsUtils(cog=self)
self.cogsutils._setup()
@commands.guildowner()
@commands.guild_only()
@commands.command(name="discordsearch", aliases=["dsearch"])
async def discordsearch(self, ctx: commands.Context, channel: typing.Optional[discord.TextChannel]=None, *args: str):
"""Search for a message on Discord in a channel.
Warning: The bot uses the api for each search.
Arguments:
`--author @user1 --author user2#1234 --author 0123456789`
`--mention @user1 --mention user2#1234 --mention 0123456789`
`--before now`
`--after "25/12/2000 00h00"`
`--pinned true`
`--content "AAA3A-cogs"`
`--contain link --contain embed --contain file`
"""
if not args:
await ctx.send_help()
return
try:
args = await SearchArgs().convert(ctx, args)
except commands.BadArgument as e:
await ctx.send(e)
return
authors = args.authors
mentions = args.mentions
before = args.before
after = args.after
pinned = args.pinned
content = args.content
contains = args.contains
if channel is None:
channel = ctx.channel
if not any([setting is not None for setting in [authors, mentions, before, after, pinned, content, contains]]):
await ctx.send("You must provide at least one parameter.")
return
args_str = [
underline("--- Settings of search ---"),
bold("Authors:") + " " + (", ".join([author.mention for author in authors]) if authors is not None else "None"),
bold("Mentions:") + " " + (", ".join([mention.mention for mention in mentions]) if mentions is not None else "None"),
bold("Before:") + " " + f"{before}",
bold("After:") + " " + f"{after}",
bold("Pinned:") + " " + f"{pinned}",
bold("Content:") + " " + f"{content}",
bold("Contains:") + " " + (", ".join([contain for contain in contains]) if contains is not None else "None")
]
args_str = "\n".join(args_str)
async with ctx.typing():
start = monotonic()
messages: typing.List[discord.Message] = []
async for message in channel.history(limit=None, oldest_first=False, before=before, after=after):
if authors is not None and message.author not in authors:
continue
if mentions is not None and not any([True for mention in message.mentions if mention in mentions]):
continue
if pinned is not None and not message.pinned == pinned:
continue
if content is not None and not content.lower() in message.content.lower():
continue
if contains is not None:
if "link" in contains:
regex = URL_RE.findall(message.content.lower())
if regex == []:
continue
if "embed" in contains and len(message.embeds) == 0:
continue
if "file" in contains and len(message.attachments) == 0:
continue
messages.append(message)
embeds = []
if len(messages) == 0:
not_found = True
else:
not_found = False
if not not_found:
count = 0
for message in messages:
count += 1
embed: discord.Embed = discord.Embed()
embed.title = f"Search in #{channel.name} ({channel.id})"
embed.description = args_str
embed.url = message.jump_url
embed.set_author(name=f"{message.author.display_name} ({message.author.id})")
embed.add_field(name=f"Message {message.id}", value=(message.content if len(message.content) < 1025 else message.content[:1020] + "...") if message.content is not None else "None")
embed.timestamp = message.created_at
embed.set_thumbnail(url="https://us.123rf.com/450wm/sommersby/sommersby1610/sommersby161000062/66918773-recherche-ic%C3%B4ne-plate-recherche-ic%C3%B4ne-conception-recherche-ic%C3%B4ne-web-vecteur-loupe.jpg")
embed.set_footer(text=f"Page {count}/{len(messages)}", icon_url="https://us.123rf.com/450wm/sommersby/sommersby1610/sommersby161000062/66918773-recherche-ic%C3%B4ne-plate-recherche-ic%C3%B4ne-conception-recherche-ic%C3%B4ne-web-vecteur-loupe.jpg")
embeds.append(embed)
else:
embed: discord.Embed = discord.Embed()
embed.title = f"Search in #{channel.name}"
embed.add_field(name="Result:", value="Sorry, I could not find any results.")
embed.timestamp = datetime.datetime.now()
embed.set_thumbnail(url="https://us.123rf.com/450wm/sommersby/sommersby1610/sommersby161000062/66918773-recherche-ic%C3%B4ne-plate-recherche-ic%C3%B4ne-conception-recherche-ic%C3%B4ne-web-vecteur-loupe.jpg")
embed.set_footer(text=f"Page 1/1", icon_url="https://us.123rf.com/450wm/sommersby/sommersby1610/sommersby161000062/66918773-recherche-ic%C3%B4ne-plate-recherche-ic%C3%B4ne-conception-recherche-ic%C3%B4ne-web-vecteur-loupe.jpg")
embeds.append(embed)
end = monotonic()
total = round(end - start, 1)
for embed in embeds:
embed.title = f"Search in #{channel.name} ({channel.id}) in {total}s"
await Menu(pages=embeds).start(ctx)
class NoExitParser(argparse.ArgumentParser):
def error(self, message):
raise commands.BadArgument(message)
class SearchArgs():
def parse_arguments(self, arguments: str):
parser = NoExitParser(
description="Selection args for DiscordSearch.", add_help=False
)
parser.add_argument("--author", dest="authors", nargs="+")
parser.add_argument("--mention", dest="mentions", nargs="+")
parser.add_argument("--before", dest="before")
parser.add_argument("--after", dest="after")
parser.add_argument("--pinned", dest="pinned")
parser.add_argument("--content", dest="content", nargs="*")
parser.add_argument("--contain", dest="contains", nargs="+")
return parser.parse_args(arguments)
async def convert(self, ctx: commands.Context, arguments):
self.ctx = ctx
async with ctx.typing():
args = self.parse_arguments(arguments)
if args.authors is not None:
self.authors = []
for author in args.authors:
author = await discord.ext.commands.MemberConverter().convert(ctx, author)
if author is None:
raise commands.BadArgument("`--author` must be a member.")
self.authors.append(author)
else:
self.authors = None
if args.mentions is not None:
self.mentions = []
for mention in args.mentions:
mention = await discord.ext.commands.MemberConverter().convert(ctx, mention)
if mention is None:
raise commands.BadArgument("`--mention` must be a member.")
self.mentions.append(mention)
else:
self.mentions = None
self.before = await self.DateConverter().convert(ctx, args.before) if args.before is not None else args.before
self.after = await self.DateConverter().convert(ctx, args.after) if args.after is not None else args.after
if args.pinned is not None:
args.pinned = str(args.pinned)
if args.pinned.lower() in ("true", "y", "yes"):
self.pinned = True
elif args.pinned.lower() in ("false", "n", "no"):
self.pinned = False
else:
raise commands.BadArgument("`--pinned` must be a bool.")
else:
self.pinned = args.pinned
self.content = "".join(args.content) if args.content is not None else args.content
if args.contains is not None:
self.contains = []
for contain in args.contains:
if contain.lower() not in ("link", "embed", "file"):
raise commands.BadArgument("`--contain` must be `link`, `embed` or `file`.")
self.contains.append(contain.lower())
else:
self.contains = None
return self
class DateConverter(commands.Converter):
"""Date converter which uses dateparser.parse()."""
async def convert(self, ctx: commands.Context, arg: str) -> datetime.datetime:
parsed = dateparser.parse(arg)
if parsed is None:
raise commands.BadArgument("Unrecognized date/time.")
return parsed
```
#### File: AAA3A-cogs/editrole/__init__.py
```python
from .AAA3A_utils.cogsutils import CogsUtils # isort:skip
from redbot.core.bot import Red # isort:skip
import asyncio
import json
from pathlib import Path
from .editrole import EditRole
with open(Path(__file__).parent / "info.json") as fp:
__red_end_user_data_statement__ = json.load(fp)["end_user_data_statement"]
old_editrole = None
async def setup_after_ready(bot):
global old_editrole
await bot.wait_until_red_ready()
cog = EditRole(bot)
old_editrole = bot.get_command("editrole")
if old_editrole:
bot.remove_command(old_editrole.name)
await CogsUtils().add_cog(bot, cog)
async def setup(bot: Red):
asyncio.create_task(setup_after_ready(bot))
def teardown(bot: Red):
bot.add_command(old_editrole)
```
#### File: AAA3A-cogs/rolesbuttons/converters.py
```python
from redbot.core import commands # isort:skip
from redbot.core.i18n import Translator # isort:skip
import discord # isort:skip
import typing # isort:skip
import re
_ = Translator("RolesButtons", __file__)
class RoleHierarchyConverter(discord.ext.commands.RoleConverter):
"""Similar to d.py's RoleConverter but only returns if we have already
passed our hierarchy checks.
"""
async def convert(self, ctx: commands.Context, argument: str) -> discord.Role:
if not ctx.me.guild_permissions.manage_roles:
raise discord.ext.commands.BadArgument("I require manage roles permission to use this command.")
try:
role = await commands.RoleConverter().convert(ctx, argument)
except commands.BadArgument:
raise
else:
if getattr(role, "is_bot_managed", None) and role.is_bot_managed():
raise discord.ext.commands.BadArgument(_("The {role.mention} role is a bot integration role and cannot be assigned or removed.").format(**locals()))
if getattr(role, "is_integration", None) and role.is_integration():
raise discord.ext.commands.BadArgument(_("The {role.mention} role is an integration role and cannot be assigned or removed.").format(**locals()))
if getattr(role, "is_premium_subscriber", None) and role.is_premium_subscriber():
raise discord.ext.commands.BadArgument(_("The {role.mention} role is a premium subscriber role and can only be assigned or removed by Nitro boosting the server.").format(**locals()))
if role >= ctx.me.top_role:
raise discord.ext.commands.BadArgument(_("The {role.mention} role is higher than my highest role in the discord hierarchy.").format(**locals()))
if role >= ctx.author.top_role and ctx.author.id != ctx.guild.owner_id:
raise discord.ext.commands.BadArgument(_("The {role.mention} role is higher than your highest role in the discord hierarchy.").format(**locals()))
return role
class RoleEmojiConverter(discord.ext.commands.Converter):
async def convert(self, ctx: commands.Context, argument: str) -> typing.Tuple[discord.Role, str]:
arg_split = re.split(r";|,|\||-", argument)
try:
role, emoji = arg_split
except Exception:
raise discord.ext.commands.BadArgument(_("Role Emoji must be a role followed by an emoji separated by either `;`, `,`, `|`, or `-`.").format(**locals()))
custom_emoji = None
try:
custom_emoji = await commands.PartialEmojiConverter().convert(ctx, emoji.strip())
except commands.BadArgument:
pass
if not custom_emoji:
custom_emoji = str(emoji)
try:
role = await RoleHierarchyConverter().convert(ctx, role.strip())
except commands.BadArgument:
raise
return role, custom_emoji
```
#### File: AAA3A-cogs/tickettool/settings.py
```python
from .AAA3A_utils.cogsutils import CogsUtils # isort:skip
if CogsUtils().is_dpy2:
from .AAA3A_utils.cogsutils import Buttons # isort:skip
else:
from dislash import ActionRow, Button, ButtonStyle # isort:skip
from redbot.core import commands # isort:skip
from redbot.core.i18n import Translator, cog_i18n # isort:skip
import discord # isort:skip
import typing # isort:skip
_ = Translator("TicketTool", __file__)
class settings(commands.Cog):
@commands.guild_only()
@commands.admin_or_permissions(administrator=True)
@commands.group(name="setticket", aliases=["ticketset"])
async def configuration(self, ctx: commands.Context):
"""Configure TicketTool for your server."""
pass
@configuration.command(name="enable", usage="<true_or_false>")
async def enable(self, ctx: commands.Context, state: bool):
"""Enable or disable Ticket System
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
config = await self.config.guild(ctx.guild).settings.all()
if config["category_open"] is None or config["category_close"] is None or config["admin_role"] is None:
await ctx.send(_("You cannot enable the ticket system on this server if you have not configured the following options:\n"
"- The category of open tickets : `{ctx.prefix}setticket categoryopen <category>`\n"
"- The category of close tickets : `{ctx.prefix}setticket categoryclose <category>`\n"
"- The admin role has full access to the tickets : `{ctx.prefix}setticket adminrole <role>`\n"
"All other parameters are optional or have default values that will be used.").format(**locals()))
return
actual_enable = config["enable"]
if actual_enable is state:
await ctx.send(_("Ticket System is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.enable.set(state)
await ctx.send(_("Ticket System state registered: {state}.").format(**locals()))
@configuration.command(aliases=["lchann", "lchannel", "logschan", "logchannel", "logsc"], usage="<text_channel_or_'none'>")
async def logschannel(self, ctx: commands.Context, *, channel: typing.Optional[discord.TextChannel]=None):
"""Set a channel where events are registered.
``channel``: Text channel.
You can also use "None" if you wish to remove the logging channel.
"""
if channel is None:
await self.config.guild(ctx.guild).settings.logschannel.clear()
await ctx.send(_("Logging channel removed.").format(**locals()))
return
needperm = await self.check_permissions_in_channel(["embed_links", "read_messages", "read_message_history", "send_messages", "attach_files"], channel)
if needperm:
await ctx.send(_("The bot does not have at least one of the following permissions in this channel: `embed_links`, `read_messages`, `read_message_history`, `send_messages`, `attach_files`.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.logschannel.set(channel.id)
await ctx.send(_("Logging channel registered: {channel.mention}.").format(**locals()))
@configuration.command(usage="<category_or_'none'>")
async def categoryopen(self, ctx: commands.Context, *, category: typing.Optional[discord.CategoryChannel]=None):
"""Set a category where open tickets are created.
``category``: Category.
You can also use "None" if you wish to remove the open category.
"""
if category is None:
await self.config.guild(ctx.guild).settings.category_open.clear()
await ctx.send(_("Category Open removed.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.category_open.set(category.id)
await ctx.send(_("Category Open registered: {category.name}.").format(**locals()))
@configuration.command(usage="<category_or_'none'>")
async def categoryclose(self, ctx: commands.Context, *, category: typing.Optional[discord.CategoryChannel]=None):
"""Set a category where close tickets are created.
``category``: Category.
You can also use "None" if you wish to remove the close category.
"""
if category is None:
await self.config.guild(ctx.guild).settings.category_close.clear()
await ctx.send(_("Category Close removed.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.category_close.set(category.id)
await ctx.send(_("Category Close registered: {category.name}.").format(**locals()))
@configuration.command(usage="<role_or_'none'>")
async def adminrole(self, ctx: commands.Context, *, role: typing.Optional[discord.Role]=None):
"""Set a role for administrators of the ticket system.
``role``: Role.
You can also use "None" if you wish to remove the admin role.
"""
if role is None:
await self.config.guild(ctx.guild).settings.admin_role.clear()
await ctx.send(_("Admin Role removed.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.admin_role.set(role.id)
await ctx.send(_("Admin Role registered: {role.name}.").format(**locals()))
@configuration.command(usage="<role_or_'none'>")
async def supportrole(self, ctx: commands.Context, *, role: typing.Optional[discord.Role]=None):
"""Set a role for helpers of the ticket system.
``role``: Role.
You can also use "None" if you wish to remove the support role.
"""
if role is None:
await self.config.guild(ctx.guild).settings.support_role.clear()
await ctx.send(_("Support Role removed.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.support_role.set(role.id)
await ctx.send(_("Support Role registered: {role.name}.").format(**locals()))
@configuration.command(usage="<role_or_'none'>")
async def ticketrole(self, ctx: commands.Context, *, role: typing.Optional[discord.Role]=None):
"""Set a role for creaters of a ticket.
``role``: Role.
You can also use "None" if you wish to remove the ticket role.
"""
if role is None:
await self.config.guild(ctx.guild).settings.ticket_role.clear()
await ctx.send(_("Ticket Role removed.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.ticket_role.set(role.id)
await ctx.send(_("Ticket Role registered: {role.name}.").format(**locals()))
@configuration.command(usage="<role_or_'none'>")
async def viewrole(self, ctx: commands.Context, *, role: typing.Optional[discord.Role]=None):
"""Set a role for viewers of tickets.
``role``: Role.
You can also use "None" if you wish to remove the view role.
"""
if role is None:
await self.config.guild(ctx.guild).settings.view_role.clear()
await ctx.send(_("View Role removed.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.view_role.set(role.id)
await ctx.send(_("View Role registered: {role.name}.").format(**locals()))
@configuration.command(usage="<role_or_'none'>")
async def pingrole(self, ctx: commands.Context, *, role: typing.Optional[discord.Role]=None):
"""Set a role for pings on ticket creation.
``role``: Role.
You can also use "None" if you wish to remove the ping role.
"""
if role is None:
await self.config.guild(ctx.guild).settings.ping_role.clear()
await ctx.send(_("Ping Role removed.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.ping_role.set(role.id)
await ctx.send(_("Ping Role registered: {role.name}.").format(**locals()))
@configuration.command(usage="<int>")
async def nbmax(self, ctx: commands.Context, int: int):
"""Max Number of tickets for a member.
"""
if int == 0:
await ctx.send(_("Disable the system instead.").format(**locals()))
return
await self.config.guild(ctx.guild).nb_max.set(int)
await ctx.send(_("Max Number registered: {int}.").format(**locals()))
@configuration.command(usage="<true_or_false>")
async def modlog(self, ctx: commands.Context, state: bool):
"""Enable or disable Modlog.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
config = await self.config.guild(ctx.guild).settings.all()
actual_create_modlog = config["create_modlog"]
if actual_create_modlog is state:
await ctx.send(_("Modlog is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.create_modlog.set(state)
await ctx.send(_("Modlog state registered: {state}.").format(**locals()))
@configuration.command(usage="<true_or_false>")
async def closeonleave(self, ctx: commands.Context, state: bool):
"""Enable or disable Close on Leave.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
config = await self.config.guild(ctx.guild).settings.all()
actual_close_on_leave = config["close_on_leave"]
if actual_close_on_leave is state:
await ctx.send(_("Close on Leave is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.close_on_leave.set(state)
await ctx.send(_("Close on Leave state registered: {state}.").format(**locals()))
@configuration.command(usage="<true_or_false>")
async def createonreact(self, ctx: commands.Context, state: bool):
"""Enable or disable Create on React ``.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
config = await self.config.guild(ctx.guild).settings.all()
actual_create_on_react = config["create_on_react"]
if actual_create_on_react is state:
await ctx.send(_("Create on React is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.create_on_react.set(state)
await ctx.send(_("Create on React state registered: {state}.").format(**locals()))
@configuration.command(aliases=["colour", "col", "embedcolor", "embedcolour"], usage="<color_or_'none'>")
async def color(self, ctx: commands.Context, *, color: typing.Optional[discord.Color]=None):
"""Set a colour fort the embed.
``color``: Color.
You can also use "None" if you wish to reset the color.
"""
if color is None:
await self.config.guild(ctx.guild).settings.color.clear()
config = await self.config.guild(ctx.guild).settings.all()
actual_color = config["color"]
actual_thumbnail = config["thumbnail"]
embed: discord.Embed = discord.Embed()
embed.color = actual_color
embed.set_thumbnail(url=actual_thumbnail)
embed.title = _("Configure the embed").format(**locals())
embed.description = _("Reset color:").format(**locals())
embed.add_field(
name=_("Color:").format(**locals()),
value=f"{actual_color}")
message = await ctx.send(embed=embed)
return
await self.config.guild(ctx.guild).settings.color.set(color.value)
config = await self.config.guild(ctx.guild).settings.all()
actual_color = config["color"]
actual_thumbnail = config["thumbnail"]
embed: discord.Embed = discord.Embed()
embed.title = _("Configure the embed").format(**locals())
embed.description = _("Set color:").format(**locals())
embed.color = actual_color
embed.set_thumbnail(url=actual_thumbnail)
embed.add_field(
name=_("Color:").format(**locals()),
value=f"{actual_color}")
message = await ctx.send(embed=embed)
@configuration.command(aliases=["picture", "thumb", "link"], usage="<link_or_'none'>")
async def thumbnail(self, ctx: commands.Context, *, link = None):
"""Set a thumbnail fort the embed.
``link``: Thumbnail link.
You can also use "None" if you wish to reset the thumbnail.
"""
if link is None:
await self.config.guild(ctx.guild).settings.thumbnail.clear()
config = await self.config.guild(ctx.guild).settings.all()
actual_thumbnail = config["thumbnail"]
actual_color = config["color"]
embed: discord.Embed = discord.Embed()
embed.title = _("Configure the embed").format(**locals())
embed.description = _("Reset thumbnail:").format(**locals())
embed.set_thumbnail(url=actual_thumbnail)
embed.color = actual_color
embed.add_field(
name=_("Thumbnail:").format(**locals()),
value=f"{actual_thumbnail}")
message = await ctx.send(embed=embed)
return
await self.config.guild(ctx.guild).settings.thumbnail.set(link)
config = await self.config.guild(ctx.guild).settings.all()
actual_thumbnail = config["thumbnail"]
actual_color = config["color"]
embed: discord.Embed = discord.Embed()
embed.title = _("Configure the embed").format(**locals())
embed.description = _("Set thumbnail:").format(**locals())
embed.set_thumbnail(url=actual_thumbnail)
embed.color = actual_color
embed.add_field(
name=_("Thumbnail:").format(**locals()),
value=f"{actual_thumbnail}")
message = await ctx.send(embed=embed)
@configuration.command(name="auditlogs", aliases=["logsaudit"], usage="<true_or_false>")
async def showauthor(self, ctx: commands.Context, state: bool):
"""Make the author of each action concerning a ticket appear in the server logs.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
config = await self.config.guild(ctx.guild).settings.all()
actual_audit_logs = config["audit_logs"]
if actual_audit_logs is state:
await ctx.send(_("Audit Logs is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.audit_logs.set(state)
await ctx.send(_("Audit Logs state registered: {state}.").format(**locals()))
@configuration.command(name="closeconfirmation", aliases=["confirm"], usage="<true_or_false>")
async def confirmation(self, ctx: commands.Context, state: bool):
"""Enable or disable Close Confirmation.
Use `True` (Or `yes`) to enable or `False` (or `no`) to disable.
"""
config = await self.config.guild(ctx.guild).settings.all()
actual_close_confirmation = config["close_confirmation"]
if actual_close_confirmation is state:
await ctx.send(_("Close Confirmation is already set on {state}.").format(**locals()))
return
await self.config.guild(ctx.guild).settings.close_confirmation.set(state)
await ctx.send(_("Close Confirmation state registered: {state}.").format(**locals()))
@configuration.command(name="message")
async def message(self, ctx: commands.Context, channel: typing.Optional[discord.TextChannel]=None):
if channel is None:
channel = ctx.channel
config = await self.config.guild(ctx.guild).settings.all()
actual_color = config["color"]
actual_thumbnail = config["thumbnail"]
embed: discord.Embed = discord.Embed()
embed.title = str(config["embed_button"]["title"])
embed.description = str(config["embed_button"]["description"]).replace('{prefix}', f'{ctx.prefix}')
embed.set_thumbnail(url=actual_thumbnail)
embed.color = actual_color
embed.set_footer(text=ctx.guild.name, icon_url=ctx.guild.icon or "" if self.cogsutils.is_dpy2 else ctx.guild.icon_url or "")
if self.cogsutils.is_dpy2:
view = Buttons(timeout=None, buttons=[{"style": 2, "label": _("Create ticket").format(**locals()), "emoji": "🎟️", "custom_id": "create_ticket_button", "disabled": False}], function=self.bot.get_cog("TicketTool").on_button_interaction, infinity=True)
await channel.send(embed=embed, view=view)
else:
button = ActionRow(
Button(
style=ButtonStyle.grey,
label=_("Create ticket"),
emoji="🎟️",
custom_id="create_ticket_button",
disabled=False
)
)
await channel.send(embed=embed, components=[button])
async def check_permissions_in_channel(self, permissions: typing.List[str], channel: discord.TextChannel):
"""Function to checks if the permissions are available in a guild.
This will return a list of the missing permissions.
"""
return [
permission
for permission in permissions
if not getattr(channel.permissions_for(channel.guild.me), permission)
]
@commands.is_owner()
@configuration.command(name="purge", hidden=True)
async def command_purge(self, ctx: commands.Context, confirmation: typing.Optional[bool]=False):
"""Purge all existing tickets in the config. Does not delete any channels. All commands associated with the tickets will no longer work.
"""
config = await self.bot.get_cog("TicketTool").get_config(ctx.guild)
if not confirmation:
embed: discord.Embed = discord.Embed()
embed.title = _("Do you really want to purge all the tickets in the config?").format(**locals())
embed.description = _("Does not delete any channels. All commands associated with the tickets will no longer work.").format(**locals())
embed.color = config["color"]
embed.set_author(name=ctx.author.name, url=ctx.author.display_avatar if self.cogsutils.is_dpy2 else ctx.author.avatar_url, icon_url=ctx.author.display_avatar if self.cogsutils.is_dpy2 else ctx.author.avatar_url)
response = await CogsUtils().ConfirmationAsk(ctx, embed=embed)
if not response:
return
count = 0
to_remove = []
data = await ctx.bot.get_cog("TicketTool").config.guild(ctx.guild).tickets.all()
for channel in data:
count += 1
to_remove.append(channel)
for channel in to_remove:
del data[str(channel)]
await ctx.bot.get_cog("TicketTool").config.guild(ctx.guild).tickets.set(data)
await ctx.send(_("{count} tickets have been removed from the config.").format(**locals()))
``` |
{
"source": "AAA3A-AAA3A/Kreusada-Cogs",
"score": 2
} |
#### File: Kreusada-Cogs/captcha/events.py
```python
import logging
# Local
from abc import ABCMeta
from traceback import format_exception
from discord import Forbidden, Member
from redbot.core import commands
from redbot.core.utils.chat_formatting import bold, error
from .abc import MixinMeta
from .api import Challenge
from .errors import AlreadyHaveCaptchaError
log = logging.getLogger("red.predeactor.captcha")
class Listeners(MixinMeta, metaclass=ABCMeta):
async def runner(self, member: Member):
allowed = await self.basic_check(member)
if allowed:
challenge = await self.create_challenge_for(member)
# noinspection PyBroadException
try:
await self.realize_challenge(challenge)
except Exception as e:
log.critical(
f"An unexpected error happened!\n"
f"Guild Name & ID: {challenge.guild.name} | {challenge.guild.id}\n"
f"Error: {format_exception(type(e), e, e.__traceback__)}"
)
finally:
await self.delete_challenge_for(member)
async def cleaner(self, member: Member):
try:
challenge = self.obtain_challenge(member)
except KeyError:
return
try:
await challenge.cleanup_messages()
await self.send_or_update_log_message(
challenge.guild,
bold("User has left the server."),
challenge.messages["logs"],
member=challenge.member,
)
except Exception as e:
log.critical(
f"An unexpected error happened!\n"
f"Guild Name & ID: {challenge.guild.name} | {challenge.guild.id}"
f"Error: {format_exception(type(e), e, e.__traceback__)}"
)
finally:
await self.delete_challenge_for(member)
async def skip_challenge(self, author: Member, challenge: Challenge):
roles = [
challenge.guild.get_role(role)
for role in await self.data.guild(challenge.guild).autoroles()
]
try:
await self.congratulation(challenge, roles)
await self.remove_temprole(challenge)
await self.send_or_update_log_message(
challenge.guild,
f"✅ Captcha skipped by {author.name} ({author.id}).",
challenge.messages["logs"],
allowed_tries=(challenge.trynum, challenge.limit),
member=challenge.member,
)
await self.send_or_update_log_message(
challenge.guild,
bold("Roles added, Captcha skipped."),
challenge.messages["logs"],
member=challenge.member,
)
except commands.MissingPermissions:
try:
await challenge.member.send(
f"Please contact the administrator of {challenge.guild.name} in order to obtain "
"access to the server, I was unable to give you the roles on the server."
)
except Forbidden:
await challenge.channel.send(
challenge.member.mention
+ ": "
+ f"Please contact the administrator of {challenge.guild.name} in order to obtain "
"access to the server, I was unable to give you the roles on the server.",
delete_after=10,
)
logmsg = challenge.messages["logs"]
await self.send_or_update_log_message(
challenge.guild,
error(bold("Permission missing for giving roles! Member alerted.")),
logmsg,
member=challenge.member,
)
finally:
try:
await challenge.cleanup_messages()
except commands.MissingPermissions:
await self.send_or_update_log_message(
challenge.guild,
error(
bold(
"Missing permissions for deleting all messages for verification!"
)
),
challenge.messages.get("logs"),
member=challenge.member,
)
return True
@commands.Cog.listener()
async def on_member_join(self, member: Member):
await self.runner(member)
@commands.Cog.listener()
async def on_member_remove(self, member: Member):
await self.cleaner(member)
@commands.guildowner()
@commands.command()
async def captcha(self, ctx, *members: Member):
"""Start a captcha challenge for the specified members."""
await ctx.send("Running Captcha challenges... this may take a while!")
await ctx.trigger_typing()
time = await self.data.guild(ctx.guild).timeout()
await self.data.guild(ctx.guild).timeout.set(20)
for member in members:
try:
await self.runner(member)
except AlreadyHaveCaptchaError:
await ctx.send(
f"The user {member.name} ({member.id}) already have a captcha challenge running."
)
await self.data.guild(ctx.guild).timeout.set(time)
message = (
"**The challenge has finished for the following members:**\n(unless the user already had a challenge in progress)\n"
+ ", ".join(member.name for member in members if not member.bot)
)
if any(member.bot for member in members):
message += (
"\n\n**The following members were not challenged because they were bots:**\n"
+ ", ".join(member.name for member in members if member.bot)
)
await ctx.send(message)
@commands.guildowner()
@commands.command(aliases=["bypasscaptcha"])
async def skipcaptcha(self, ctx, *members: Member):
"""Cancel a captcha challenge for the specified members."""
await ctx.send("Cancelling Captcha challenges...")
await ctx.trigger_typing()
for member in members:
try:
challenge = self.obtain_challenge(member)
except KeyError:
await ctx.send(
f"The user {member.name} ({member.id}) is not challenging any Captcha."
)
else:
try:
await self.skip_challenge(ctx.author, challenge)
except Exception:
pass
finally:
await self.delete_challenge_for(member)
message = (
"**The challenge has cancelled for the following members:**\n(unless the user did not have a current challenge)\n"
+ ", ".join(member.name for member in members if not member.bot)
)
await ctx.send(message)
``` |
{
"source": "aaaaaaaalesha/oop-and-design-patterns",
"score": 4
} |
#### File: week_2/code_refactoring/polylines.py
```python
import pygame
class Polyline:
def __init__(self, color=None):
self.__points = []
self.__speeds = []
self.__N = 0
self.color = pygame.Color(*(color or (255, 255, 255)))
def add_point(self, point, speed):
self.__points.append(point)
self.__speeds.append(speed)
self.__N += 1
def add_points(self, points, speeds):
self.__points.extend(points)
self.__speeds.extend(speeds)
self.__N += len(points)
def remove_point(self):
self.__points.pop()
self.__speeds.pop()
self.__N -= 1
def clear(self):
self.__points = []
self.__speeds = []
self.__N = 0
def set_points(self, screen_width, screen_height, mul):
for p in range(self.__N):
self.__points[p] += mul * self.__speeds[p]
if self.__points[p].x > screen_width or self.__points[p].x < 0:
self.__speeds[p].x = - self.__speeds[p].x
if self.__points[p].y > screen_height or self.__points[p].y < 0:
self.__speeds[p].y = - self.__speeds[p].y
def draw_points(self, display, style="points", width=3):
if style == "line":
for p_n in range(-1, self.__N - 1):
pygame.draw.line(display, self.color, self.__points[p_n].int_pair(
), self.__points[p_n + 1].int_pair(), width)
elif style == "points":
for p in self.__points:
pygame.draw.circle(display, self.color, p.int_pair(), width)
class Knot(Polyline):
def __init__(self, color=None, addition_points=5):
Polyline.__init__(self)
self.__points = []
self.__speeds = []
self.__N = 0
self.__count = addition_points
self.color = pygame.Color(*(color or (255, 255, 255)))
def __set_count(self, x):
self.__count = x if 0 < x < 30 else self.__count
self.__get_knot()
def __get_count(self):
return self.__count
addition_points = property(__get_count, __set_count)
def __get_point(self, points, alpha, deg=None):
if deg is None:
deg = len(points) - 1
if deg == 0:
return points[0]
return points[deg] * alpha + self.__get_point(points, alpha, deg - 1) * (1 - alpha)
def __get_points(self, base_points):
alpha = 1 / self.__count
res = []
for i in range(self.__count):
res.append(self.__get_point(base_points, i * alpha))
return res
def __get_knot(self):
Polyline.clear(self)
if len(self.__points) < 3:
return []
for i in range(-2, len(self.__points) - 2):
ptn = list()
ptn.append((self.__points[i] + self.__points[i + 1]) * 0.5)
ptn.append(self.__points[i + 1])
ptn.append((self.__points[i + 1] + self.__points[i + 2]) * 0.5)
Polyline.add_points(self, self.__get_points(ptn), [])
def add_point(self, point, speed):
self.__points.append(point)
self.__speeds.append(speed)
self.__N += 1
self.__get_knot()
def remove_point(self):
self.__points.pop()
self.__speeds.pop()
self.__N -= 1
self.__get_knot()
def set_points(self, screen_width, screen_height, mul):
for p in range(self.__N):
self.__points[p] += mul * self.__speeds[p]
if self.__points[p].x > screen_width or self.__points[p].x < 0:
self.__speeds[p].x = - self.__speeds[p].x
if self.__points[p].y > screen_height or self.__points[p].y < 0:
self.__speeds[p].y = - self.__speeds[p].y
self.__get_knot()
```
#### File: oop-and-design-patterns/week_5/Service.py
```python
import pygame
import random
import yaml
import os
import Objects
OBJECT_TEXTURE = os.path.join("texture", "objects")
ENEMY_TEXTURE = os.path.join("texture", "enemies")
ALLY_TEXTURE = os.path.join("texture", "ally")
def create_sprite(img, sprite_size, mmp_tile):
icon = pygame.image.load(img).convert_alpha()
icon_mmp = pygame.transform.scale(icon, (mmp_tile, mmp_tile))
icon = pygame.transform.scale(icon, (sprite_size, sprite_size))
sprite = pygame.Surface((sprite_size, sprite_size), pygame.HWSURFACE)
sprite_mmp = pygame.Surface((mmp_tile, mmp_tile), pygame.HWSURFACE)
sprite.blit(icon, (0, 0))
sprite_mmp.blit(icon_mmp, (0, 0))
return sprite, sprite_mmp
def reload_game(engine, hero):
global level_list
level_list_max = len(level_list) - 1
engine.level += 1
hero.position = [1, 1]
engine.objects = []
generator = level_list[min(engine.level, level_list_max)]
_map = generator['map'].get_map()
engine.load_map(_map)
engine.add_objects(generator['obj'].get_objects(_map))
engine.add_hero(hero)
def restore_hp(engine, hero):
if random.randint(1, 10) == 1:
engine.score -= 0.05
engine.hero = Objects.EvilEye(hero)
engine.notify("You were cursed: unlucky")
else:
engine.score += 0.1
hero.hp = hero.max_hp
engine.notify("HP restored")
def apply_blessing(engine, hero):
if hero.gold >= int(20 * 1.5 ** engine.level) - 2 * hero.stats["intelligence"]:
engine.score += 0.2
hero.gold -= int(20 * 1.5 ** engine.level) - \
2 * hero.stats["intelligence"]
if random.randint(0, 1) == 0:
engine.hero = Objects.Blessing(hero)
engine.notify("Blessing applied")
else:
engine.hero = Objects.Berserk(hero)
engine.notify("Berserk applied")
else:
engine.score -= 0.1
engine.notify("Nothing happened")
def remove_effect(engine, hero):
if hero.gold >= int(10 * 1.5 ** engine.level) - 2 * hero.stats["intelligence"] and "base" in dir(hero):
hero.gold -= int(10 * 1.5 ** engine.level) - \
2 * hero.stats["intelligence"]
engine.hero = hero.base
engine.hero.calc_max_HP()
engine.notify("Effect removed")
else:
engine.notify("Nothing happened")
def add_gold(engine, hero):
if random.randint(1, 10) == 1:
engine.score -= 0.05
engine.hero = Objects.Weakness(hero)
engine.notify("You were cursed: weak")
else:
engine.score += 0.1
gold = int(random.randint(10, 1000) * (1.1 ** (engine.hero.level - 1)))
hero.gold += gold
engine.notify(f"{gold} gold added")
def fight(engine, enemy, hero):
enemy_value = enemy.stats['strength'] + enemy.stats['endurance'] + \
enemy.stats['intelligence'] + enemy.stats['luck']
hero_value = sum(hero.stats.values())
while random.randint(1, enemy_value + hero_value) > hero_value and hero.hp > 0:
hero.hp -= 1
if hero.hp > 0:
engine.score += 1
hero.exp += enemy.xp
engine.notify("Defeated enemy!")
hero.level_up()
else:
engine.game_process = False
engine.notify("Lost!")
engine.notify("GAME OVER!!!")
def enhance(engine, hero):
engine.score += 0.2
engine.hero = Objects.Enhance(hero)
hero.hp = max(hero.max_hp, hero.hp)
engine.notify("You was enhanced!")
class MapFactory(yaml.YAMLObject):
@classmethod
def from_yaml(cls, loader, node):
def get_end(loader, node):
return {'map': EndMap.Map(), 'obj': EndMap.Objects()}
def get_random(loader, node):
return {'map': RandomMap.Map(), 'obj': RandomMap.Objects()}
def get_special(loader, node):
data = loader.construct_mapping(node)
try:
rat = data["rat"]
except KeyError:
rat = 0
try:
knight = data["knight"]
except KeyError:
knight = 0
ret = {}
_map = SpecialMap.Map()
_obj = SpecialMap.Objects()
_obj.config = {'rat': rat, 'knight': knight}
ret["map"] = _map
ret["obj"] = _obj
return ret
def get_empty(loader, node):
return {'map': EmptyMap.Map(), 'obj': EmptyMap.Objects()}
data = loader.construct_mapping(node)
try:
rat = data["rat"]
except KeyError:
rat = 0
try:
knight = data["knight"]
except KeyError:
knight = 0
_obj = cls.create_objects()
_obj.config = {'rat': rat, 'knight': knight}
return {'map': cls.create_map(), 'obj': _obj}
@classmethod
def create_map(cls):
return cls.Map()
@classmethod
def create_objects(cls):
return cls.Objects()
class EndMap(MapFactory):
yaml_tag = "!end_map"
class Map:
def __init__(self):
self.Map = ['000000000000000000000000000000000000000',
'0 0',
'0 0',
'0 0 0 000 0 0 00000 0 0 0',
'0 0 0 0 0 0 0 0 0 0 0',
'0 000 0 0 00000 0000 0 0 0',
'0 0 0 0 0 0 0 0 0 0 0',
'0 0 0 000 0 0 00000 00000 0',
'0 0 0',
'0 0',
'000000000000000000000000000000000000000'
]
self.Map = list(map(list, self.Map))
for i in self.Map:
for j in range(len(i)):
i[j] = wall if i[j] == '0' else floor1
def get_map(self):
return self.Map
class Objects:
def __init__(self):
self.objects = []
def get_objects(self, _map):
return self.objects
class RandomMap(MapFactory):
yaml_tag = "!random_map"
class Map:
w, h = 39, 25
def __init__(self):
w = self.w
h = self.h
self.Map = [[0 for _ in range(w)] for _ in range(h)]
for i in range(w):
for j in range(h):
if i == 0 or j == 0 or i == w - 1 or j == h - 1:
self.Map[j][i] = wall
else:
self.Map[j][i] = [wall, floor1, floor2, floor3, floor1,
floor2, floor3, floor1, floor2][random.randint(0, 8)]
def get_map(self):
return self.Map
class Objects:
def __init__(self):
self.objects = []
def get_objects(self, _map):
w, h = 38, 24
for obj_name in object_list_prob['objects']:
prop = object_list_prob['objects'][obj_name]
for i in range(random.randint(prop['min-count'], prop['max-count'])):
coord = (random.randint(1, w), random.randint(1, h))
intersect = True
while intersect:
intersect = False
if _map[coord[1]][coord[0]] == wall:
intersect = True
coord = (random.randint(1, w), random.randint(1, h))
continue
for obj in self.objects:
if coord == obj.position or coord == (1, 1):
intersect = True
coord = (random.randint(1, w), random.randint(1, h))
self.objects.append(Objects.Ally(
prop['sprite'], prop['action'], coord))
for obj_name in object_list_prob['ally']:
prop = object_list_prob['ally'][obj_name]
for i in range(random.randint(prop['min-count'], prop['max-count'])):
coord = (random.randint(1, w), random.randint(1, h))
intersect = True
while intersect:
intersect = False
if _map[coord[1]][coord[0]] == wall:
intersect = True
coord = (random.randint(1, w), random.randint(1, h))
continue
for obj in self.objects:
if coord == obj.position or coord == (1, 1):
intersect = True
coord = (random.randint(1, w), random.randint(1, h))
self.objects.append(Objects.Ally(
prop['sprite'], prop['action'], coord))
for obj_name in object_list_prob['enemies']:
prop = object_list_prob['enemies'][obj_name]
for i in range(random.randint(0, 5)):
coord = (random.randint(1, w), random.randint(1, h))
intersect = True
while intersect:
intersect = False
if _map[coord[1]][coord[0]] == wall:
intersect = True
coord = (random.randint(1, w), random.randint(1, h))
continue
for obj in self.objects:
if coord == obj.position or coord == (1, 1):
intersect = True
coord = (random.randint(1, w), random.randint(1, h))
self.objects.append(Objects.Enemy(
prop['sprite'], prop, prop['experience'], coord))
return self.objects
class SpecialMap(MapFactory):
yaml_tag = "!special_map"
class Map:
def __init__(self):
self.Map = ['000000000000000000000000000000000000000',
'0 0',
'0 0 0',
'0 0 0 0000 0 0 00 00 0 0',
'0 0 0 0 0 0 0 0 00 0 0 00',
'0 000 0000 0000 0 0 0 00',
'0 0 0 0 0 0 0 0 0 0 00',
'0 0 0 0 0000 0 0 0 0 0',
'0 0 0',
'0 0',
'000000000000000000000000000000000000000'
]
self.Map = list(map(list, self.Map))
for i in self.Map:
for j in range(len(i)):
i[j] = wall if i[j] == '0' else floor1
def get_map(self):
return self.Map
class Objects:
def __init__(self):
self.objects = []
self.config = {}
def get_objects(self, _map):
w, h = 10, 38
for obj_name in object_list_prob['objects']:
prop = object_list_prob['objects'][obj_name]
for i in range(random.randint(prop['min-count'], prop['max-count'])):
coord = (random.randint(1, h), random.randint(1, w))
intersect = True
while intersect:
intersect = False
if _map[coord[1]][coord[0]] == wall:
intersect = True
coord = (random.randint(1, h),
random.randint(1, w))
continue
for obj in self.objects:
if coord == obj.position or coord == (1, 1):
intersect = True
coord = (random.randint(1, h),
random.randint(1, w))
self.objects.append(Objects.Ally(
prop['sprite'], prop['action'], coord))
for obj_name in object_list_prob['ally']:
prop = object_list_prob['ally'][obj_name]
for i in range(random.randint(prop['min-count'], prop['max-count'])):
coord = (random.randint(1, h), random.randint(1, w))
intersect = True
while intersect:
intersect = False
if _map[coord[1]][coord[0]] == wall:
intersect = True
coord = (random.randint(1, h),
random.randint(1, w))
continue
for obj in self.objects:
if coord == obj.position or coord == (1, 1):
intersect = True
coord = (random.randint(1, h),
random.randint(1, w))
self.objects.append(Objects.Ally(
prop['sprite'], prop['action'], coord))
for enemy, count in self.config.items():
prop = object_list_prob['enemies'][enemy]
for i in range(random.randint(0, count)):
coord = (random.randint(1, h), random.randint(1, w))
intersect = True
while intersect:
intersect = False
if _map[coord[1]][coord[0]] == wall:
intersect = True
coord = (random.randint(1, h),
random.randint(1, w))
continue
for obj in self.objects:
if coord == obj.position or coord == (1, 1):
intersect = True
coord = (random.randint(1, h),
random.randint(1, w))
self.objects.append(Objects.Enemy(
prop['sprite'], prop, prop['experience'], coord))
return self.objects
class EmptyMap(MapFactory):
yaml_tag = "!empty_map"
@classmethod
def from_yaml(cls, loader, node):
return {'map': EmptyMap.Map(), 'obj': EmptyMap.Objects()}
class Map:
def __init__(self):
self.Map = [[]]
def get_map(self):
return self.Map
class Objects:
def __init__(self):
self.objects = []
def get_objects(self, _map):
return self.objects
wall = [0]
floor1 = [0]
floor2 = [0]
floor3 = [0]
def service_init(sprite_size, tile, full=True):
global object_list_prob, level_list
global wall
global floor1
global floor2
global floor3
wall[0] = create_sprite(os.path.join("texture", "wall.png"), sprite_size, tile)
floor1[0] = create_sprite(os.path.join("texture", "Ground_1.png"), sprite_size, tile)
floor2[0] = create_sprite(os.path.join("texture", "Ground_2.png"), sprite_size, tile)
floor3[0] = create_sprite(os.path.join("texture", "Ground_3.png"), sprite_size, tile)
file = open("objects.yml", "r")
object_list_tmp = yaml.load(file.read(), Loader=yaml.Loader)
if full:
object_list_prob = object_list_tmp
object_list_actions = {'reload_game': reload_game,
'add_gold': add_gold,
'apply_blessing': apply_blessing,
'remove_effect': remove_effect,
'restore_hp': restore_hp,
'fight': fight,
'enhance': enhance}
for obj in object_list_prob['objects']:
prop = object_list_prob['objects'][obj]
prop_tmp = object_list_tmp['objects'][obj]
prop['sprite'][0] = create_sprite(
os.path.join(OBJECT_TEXTURE, prop_tmp['sprite'][0]), sprite_size, tile)
prop['action'] = object_list_actions[prop_tmp['action']]
for ally in object_list_prob['ally']:
prop = object_list_prob['ally'][ally]
prop_tmp = object_list_tmp['ally'][ally]
prop['sprite'][0] = create_sprite(
os.path.join(ALLY_TEXTURE, prop_tmp['sprite'][0]), sprite_size, tile)
prop['action'] = object_list_actions[prop_tmp['action']]
for enemy in object_list_prob['enemies']:
prop = object_list_prob['enemies'][enemy]
prop_tmp = object_list_tmp['enemies'][enemy]
prop['sprite'][0] = create_sprite(
os.path.join(ENEMY_TEXTURE, prop_tmp['sprite'][0]), sprite_size, tile)
prop['action'] = object_list_actions['fight']
file.close()
if full:
file = open("levels.yml", "r")
level_list = yaml.load(file.read(), Loader=yaml.Loader)['levels']
level_list.append({'map': EndMap.Map(), 'obj': EndMap.Objects()})
file.close()
``` |
{
"source": "aaaaaaaalesha/stack_machine",
"score": 4
} |
#### File: stack_machine/src/stack.py
```python
from collections import deque
class Stack(deque):
"""
Best way for stack implementation based on collections.deque.
Source: https://www.geeksforgeeks.org/stack-in-python/
"""
def __str__(self):
"""Overrides the string representation of my stack."""
repr_version = self.copy()
repr_version.reverse()
return "Stack" + list(repr_version).__str__()
def empty(self):
return len(self) == 0
def top(self):
"""Accesses the top element."""
head = self.pop()
self.push(head)
return head
def push(self, value):
"""Inserts element at the top."""
return self.append(value)
pop = deque.pop
```
#### File: stack_machine/tests/test_stack.py
```python
import unittest
from src.stack import Stack
class StackTestCase(unittest.TestCase):
def test_stack_empty(self):
stack = Stack()
self.assertTrue(stack.empty())
stack.push(1)
self.assertFalse(stack.empty())
stack.push(2)
self.assertEqual(stack.__str__(), "Stack[2, 1]")
self.assertFalse(stack.empty())
stack.clear()
self.assertTrue(stack.empty())
def test_stack_pop_top(self):
stack = Stack()
self.assertTrue(stack.empty())
stack.push(1)
self.assertFalse(stack.empty())
self.assertEqual(stack.top(), 1)
stack.push(2)
self.assertEqual(stack.top(), 2)
stack.push(3)
self.assertEqual(stack.top(), 3)
self.assertEqual(stack.__str__(), "Stack[3, 2, 1]")
self.assertEqual(stack.pop(), 3)
self.assertEqual(stack.pop(), 2)
self.assertEqual(stack.pop(), 1)
self.assertTrue(stack.empty())
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "aaaaaaason/basic-app",
"score": 2
} |
#### File: alembic/versions/2e1e0392518b_create_user_table.py
```python
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '2<PASSWORD>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', postgresql.UUID(), nullable=False),
sa.Column('email', sa.String(length=128), nullable=False),
sa.Column('username', sa.String(length=128), nullable=False),
sa.Column('password', sa.String(), nullable=False),
sa.Column('create_time', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('update_time', sa.TIMESTAMP(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
# ### end Alembic commands ###
```
#### File: basic_app/lib/password.py
```python
import argon2
from basic_app.lib import config
class PasswordHasher:
"""Define base password hasher."""
def hash(self, password: str) -> str:
"""Hash the password."""
raise NotImplementedError
def verify(self, password: str, hash: str) -> bool:
"""Verify the password is matched."""
raise NotImplementedError
def check_rehash(self) -> bool:
"""Check if the user password needs rehash."""
raise NotImplementedError
class Argon2PasswordHasher(PasswordHasher):
"""Password hasher using Argon2."""
def __init__(self, conf: config.Config):
# We can also refer to:
# https://cheatsheetseries.owasp.org/cheatsheets/Password_Storage_Cheat_Sheet.html#salting
self._hasher = argon2.PasswordHasher(
memory_cost=conf.argon2_memory_cost,
time_cost=conf.argon2_time_cost,
parallelism=conf.argon2_parallelism,
hash_len=conf.argon2_hash_len,
type=argon2.Type.ID,
)
def hash(self, password: str) -> str:
return self._hasher.hash(password)
def verify(self, password: str, hash: str) -> bool:
return self._hasher.verify(hash, password)
def check_rehash(self, hash: str) -> bool:
return self._hasher.check_needs_rehash(hash)
```
#### File: src/basic_app/__main__.py
```python
import basic_app
from basic_app import (
argument,
)
from basic_app.lib import (
config,
logging,
uvicorn,
)
app = None
def main():
"""Our entrypoint."""
args = argument.parse_args()
conf = config.setup(args.envfile)
logging.setup(conf)
basic_app.setup(conf)
global app
app = basic_app.API()
uvicorn.run(
"basic_app.__main__:app",
host=conf.host,
port=int(conf.port),
)
if __name__ == "__main__":
main()
```
#### File: basic_app/models/user.py
```python
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as pg
from basic_app.models import base
class User(base.Base):
"""Defines user table"""
__tablename__ = 'user'
id = sa.Column('id', pg.UUID(as_uuid=True), primary_key=True)
email = sa.Column('email', sa.String(128), unique=True, nullable=False)
username = sa.Column('username', sa.String(128), nullable=False)
password = sa.Column('password', sa.String(), nullable=False)
create_time = sa.Column('create_time',
sa.TIMESTAMP(timezone=True), nullable=False)
update_time = sa.Column('update_time',
sa.TIMESTAMP(timezone=True), nullable=False)
def __repr__(self):
return '<User(id={},email={})>'.format(self.id, self.email)
def to_dict(self):
return {c.name: getattr(self, c.name, None) for c in self.__table__.columns}
```
#### File: basic_app/routers/google_signin.py
```python
import logging
from fastapi import (
APIRouter,
Cookie,
Form,
Request,
responses,
)
from fastapi.templating import Jinja2Templates
from google.oauth2 import id_token
from google.auth.transport import requests
from basic_app.lib import exception
router = APIRouter()
_controller = None
_templates = Jinja2Templates(directory="./src/basic_app/templates")
@router.get("/google-signin", response_class=responses.HTMLResponse)
async def signin_view(request: Request):
return await _controller.signin_view(request)
@router.post("/google-signin")
async def google_signin(credential: str = Form(None),
csrf_token: str = Form(None),
csrf_cookie: str = Cookie(None)):
return await _controller.google_signin(credential, csrf_token, csrf_cookie)
class GoogleSignin:
"""Define router."""
def __init__(self, app_host: str, client_id: str):
global _controller
_controller = self
self._app_host = app_host
self._client_id = client_id
async def signin_view(self, request: Request):
"""The entrypoint of GET /google-signin request."""
return _templates.TemplateResponse('google-signin.html', {
"request": request,
"google_client_id": self._client_id,
"app_host": self._app_host,
})
async def google_signin(self, credential, csrf_token, csrf_cookie):
"""The entrypoint of POST /google-signin request."""
if not csrf_cookie:
exception.AppException(
code=exception.ErrorCode.INVALID_INPUT,
message="No CSRF token in Cookie.",
)
if not csrf_token:
exception.AppException(
code=exception.ErrorCode.INVALID_INPUT,
message="No CSRF token in body.",
)
if csrf_cookie != csrf_token:
exception.AppException(
code=exception.ErrorCode.INVALID_INPUT,
message="Failed to verify double submit cookie.",
)
try:
# Specify the CLIENT_ID of the app that accesses the backend:
_ = id_token.verify_oauth2_token(credential, requests.Request(), self._client_id)
logging.info("Sign in succeeded.")
# TODO: Save user into database and create session.
except ValueError:
raise exception.AppException(
code=exception.ErrorCode.AUTHENTICATION_FAIL,
message="Failed to verify ID token.",
)
```
#### File: src/tests/helper.py
```python
import datetime as dt
import httpx
import basic_app
def get_http_client() -> httpx.AsyncClient:
"""Get an async http client for sending request.
Returns:
HTTP client.
"""
app = basic_app.API()
return httpx.AsyncClient(
app=app,
base_url='http://localhost'
)
def parse_datetime(time: str) -> dt.datetime:
return dt.datetime.strptime(time, '%Y-%m-%dT%H:%M:%S.%f')
``` |
{
"source": "aaaaaaason/tx-study-tool",
"score": 3
} |
#### File: src/tests/test_case.py
```python
import pytest
from txtool import (
case,
)
@pytest.mark.medium
def test_case_reader():
# Given I choose a valid case and a config object.
run_case = 'src/tests/data/basic_case.yaml'
# When I create a new case reader.
reader = case.Reader(run_case)
# Then I should get proper output.
engine = reader.get_engine()
assert engine == 'postgres', (
f'Expect to get "postgres", but got "{engine}"')
# Check setup:
setup_steps = reader.get_steps_for_setup()
assert len(setup_steps) == 1,\
f'Expect setup steps is a list with len 1, got {len(setup_steps)}'
step = setup_steps[0]
assert step.session_id == -1, (
'Expect to use connection id -1 for every test setup step, '
f'but got {step.session_id}.')
assert step.statement == (
'CREATE TABLE IF NOT EXISTS item (id INT PRIMARY KEY);'),\
'Got unexpected setup step.'
# Check teardown:
teardown_steps = reader.get_steps_for_teardown()
assert len(teardown_steps) == 1,\
f'Expect teardown steps is a list with len 1, got {len(teardown_steps)}'
step = teardown_steps[0]
assert step.session_id == -1, (
'Expect to use connection id -1 for every test teardown step, '
f'but got {step.session_id}.')
assert step.statement == 'DROP TABLE IF EXISTS item;',\
'Got unexpected teardown step.'
# Check steps:
steps = reader.get_steps()
assert len(steps) == 2,\
f'Expect steps is a list with len 2, got {len(steps)}'
step = steps[0]
assert step.session_id == 1, (
f'Expect to get connection id 1 but got {step.session_id}.')
assert step.statement == (
"INSERT INTO item (id, name, count) VALUES (1, 'book', 5);"),\
'Got unexpected step.'
step = steps[1]
assert step.session_id == 0, (
f'Expect to get connection id 0 but got {step.session_id}.')
assert step.statement == 'SELECT * FROM item;',\
'Got unexpected step.'
@pytest.mark.medium
def test_case_reader_case_not_found():
# Given I choose a valid case and a config object.
run_case = 'src/tests/data/not_found_case.yaml'
# Then I should get exception.
with pytest.raises(case.CaseNotFoundError):
# When I create a new case reader.
case.Reader(run_case)
@pytest.mark.medium
def test_case_reader_engine_not_found():
# Given I choose a valid case and a config object.
run_case = 'src/tests/data/no_engine_case.yaml'
# Then I should get exception.
with pytest.raises(case.EngineNotFoundError):
# When I create a new case reader.
case.Reader(run_case).get_engine()
@pytest.mark.medium
def test_case_reader_setup_not_found():
# Given I choose a valid case and a config object.
run_case = 'src/tests/data/no_setup_case.yaml'
# Then I should get exception.
with pytest.raises(case.SetupStepsNotFoundError):
# When I create a new case reader.
case.Reader(run_case).get_steps_for_setup()
@pytest.mark.medium
def test_case_reader_teardown_not_found():
# Given I choose a valid case and a config object.
run_case = 'src/tests/data/no_teardown_case.yaml'
# Then I should get exception.
with pytest.raises(case.TeardownStepsNotFoundError):
# When I create a new case reader.
case.Reader(run_case).get_steps_for_teardown()
@pytest.mark.medium
def test_case_reader_steps_not_found():
# Given I choose a valid case and a config object.
run_case = 'src/tests/data/no_step_case.yaml'
# Then I should get exception.
with pytest.raises(case.StepsNotFoundError):
# When I create a new case reader.
case.Reader(run_case).get_steps()
```
#### File: src/tests/test_logging.py
```python
import logging
import pytest
import txtool.logging as lg
@pytest.mark.small
def test_get_logging_level():
# When
# pylint: disable=protected-access
level = lg._get_logging_level("DEBUG")
# Then
assert level == logging.DEBUG, f"Got unexpected log level \"{level}\""
@pytest.mark.small
def test_get_logging_level_default_level():
# When
# pylint: disable=protected-access
level = lg._get_logging_level("")
# Then
assert level == logging.INFO, f"Got unexpected log level \"{level}\""
```
#### File: src/txtool/step.py
```python
import dataclasses
@dataclasses.dataclass
class Step:
"""Represents a step on database operation."""
_session_id: int
_statement: str
@property
def session_id(self) -> int:
return self._session_id
@property
def statement(self) -> str:
return self._statement.replace('\n', '')
``` |
{
"source": "Aaaaace/ad_killer",
"score": 3
} |
#### File: Aaaaace/ad_killer/components.py
```python
from tkinter import *
class Msgbox(object):
'''
消息提示窗
'''
def __init__(self, parent, text):
self.msg = Toplevel(parent)
self.msg.title('Message')
label = Label(self.msg, text=text)
label.pack()
confirm_button = Button(self.msg, text='confirm', command=self.confirm)
cancel_button = Button(self.msg, text='cancel', command=self.cancel)
confirm_button.pack(side=LEFT)
cancel_button.pack(side=RIGHT)
self.msg.mainloop()
def confirm(self):
pass
def cancel(self):
pass
```
#### File: Aaaaace/ad_killer/UI.py
```python
import tkinter as tk
import xml.etree.ElementTree as ET
import os
os.chdir(os.path.split(os.sys.argv[0])[0])
APPDATA = 'appdata.xml'
class App(object):
def __init__(self, master):
self.frame = tk.Frame(master)
self.frame.pack()
self._create_widget()
# 准备数据
if not os.path.isfile(APPDATA):
self._create_appdata()
self._applyappdata()
def _create_widget(self):
'''
创建组件
'''
label1 = tk.Label(self.frame, text='输入要禁止的进程名')
# 用于输入新的禁止进程名
self.process_name_input = tk.Entry(
self.frame, show=None, font=('Arial', 14))
# 用于添加禁止进程到禁止名单
self.add_button = tk.Button(
self.frame, text='add', command=self.add_process)
label2 = tk.Label(self.frame, text='已禁止的进程')
# 用于显示已禁止的进程名
self.forbidden_listbox = tk.Listbox(self.frame)
self.button_frame = tk.Frame(self.frame)
# 用于删除在禁止名单中的项目
self.del_button = tk.Button(
self.button_frame, text='del', command=self.del_process)
# 保存键
self.save_button = tk.Button(
self.button_frame, text='save', command=self.save_data)
# 用于清空在禁止名单中的项目
self.clear_button = tk.Button(
self.button_frame, text='clear', command=self.clear_process)
# 退出键
self.quit_button = tk.Button(
self.button_frame, text='quit', fg='red', command=self.frame.quit)
# pack
label1.grid(row=0, column=0, sticky=tk.W)
self.process_name_input.grid(row=1, column=0, sticky=tk.W)
self.process_name_input.focus_set()
self.add_button.grid(row=1, column=1)
label2.grid(row=2, column=0, sticky=tk.W)
self.forbidden_listbox.grid(row=3, column=0, sticky=tk.W)
self.button_frame.grid(row=3, column=1)
self.del_button.pack(fill=tk.X)
self.save_button.pack(fill=tk.X)
self.clear_button.pack(fill=tk.X)
self.quit_button.pack(fill=tk.X)
def add_process(self):
'''
添加禁止进程
'''
name = self.process_name_input.get()
if name:
self.forbidden_listbox.insert(tk.END, name)
# 写入DOM
if not self._appdata.find('.//forbidden_list'):
ET.SubElement(self._appdata.find('.//data'), 'forbidden_list')
forbidden_item = ET.SubElement(self._appdata.find(
'.//forbidden_list'), 'forbidden_item')
forbidden_item.text = name
self.process_name_input.delete(0, tk.END)
def del_process(self):
'''
删除禁止进程
'''
name = self.forbidden_listbox.get(tk.ACTIVE)
self.forbidden_listbox.delete(tk.ACTIVE)
forbidden_list = self._appdata.find('.//forbidden_list')
for forbidden_item in forbidden_list:
if forbidden_item.text == name:
forbidden_list.remove(forbidden_item)
break
def clear_process(self):
'''
清空进程
'''
msg = tk.Toplevel(self.frame)
msg.title('Message')
label = tk.Label(msg, text='确定清空?')
confirm = tk.Button(msg, text='confirm')
cancel = tk.Button(msg, text='cancel')
label.pack()
confirm.pack(side=tk.LEFT)
cancel.pack(side=tk.RIGHT)
msg.mainloop()
def save_data(self):
'''
将数据保存到xml文件中
'''
self._appdata.write(APPDATA)
def _create_appdata(self):
'''
不存在数据文件时,创建数据文件
'''
f = open(APPDATA, 'w')
f.write(
'''
<?xml version="1.0"?>
<appdata>
<data></data>
<configure></configure>
</appdata>''')
f.close()
def _applyappdata(self):
'''
从数据文件中获取数据
包括:
forbidden_list
'''
self._appdata = ET.parse(APPDATA)
forbidden_list = self._appdata.find('.//forbidden_list')
if not forbidden_list:
return
for forbidden_item in list(forbidden_list):
self.forbidden_listbox.insert(tk.END, forbidden_item.text)
root = tk.Tk()
root.title('ad_killer')
app = App(root)
root.mainloop()
root.destroy()
``` |
{
"source": "Aaaaace/flappybird-pygame",
"score": 3
} |
#### File: flappybird-pygame/lib/controllers.py
```python
import os
import pygame
from pygame.locals import *
import sprites
from asserts import SOUND_FLAP
def gethitmask(surface):
'''
<summary>根据alpha获取surface的碰撞体积</summary>
<param name="surfaces">surface列表</param>
<return name="hitmasks">hitmask列表(list)</param>
'''
width, height = surface.get_size()
hitmask = []
for x in range(width):
hitmask.append([])
for y in range(height):
hitmask[x].append(bool(surface.get_at((x, y))[3]))
return hitmask
def crashdetect_mask(rect1, hitmask1, rect2, hitmask2):
'''
<summary>使用hitmask的碰撞检测,若碰撞则返回True</summary>
<param name="rect1">第一个sprite的rect(左上角坐标、宽高)</param>
<param name="hitmask1">第一个sprite的hitmask</param>
<param name="rect2">第二个sprite的rect</param>
<param name="hitmask2">第二个sprite的hitmask</param>
'''
overlap_area = rect1.clip(rect2)
if overlap_area.width == 0 or overlap_area.height == 0:
return False
x1_start, y1_start = overlap_area.x - rect1.x, overlap_area.y - rect1.y
x2_start, y2_start = overlap_area.x - rect2.x, overlap_area.y - rect2.y
for x in range(overlap_area.width):
for y in range(overlap_area.height):
if hitmask1[x1_start+x][y1_start+y] and hitmask2[x2_start+x][y2_start+y]:
return True
return False
class BirdController(object):
'''
<sunmmary>小鸟控制器,控制小鸟的速度、角度、加速度、振翅动作</sunmmary>
'''
_flapspeedyacc = -300 # y方向振翅后速度(px/s)
_initspeedy = -100 # y方向速度(px/s)
_gravityspeedyacc = 900 # y方向重力加速度(px/s2)
_flapangleacc = 30 # 振翅后角度
_initangle = 0 # 角度
_gravityangleacc = -80 # 角度加速度
def __init__(self, bird):
super().__init__()
self.passed_time = 0 # 过去的时间(ms)
self.bird = bird
self.bird_angle = self._initangle # 初始角度
self.bird_speedy = self._initspeedy # 初始速度
self.bird.masks = [
gethitmask(pygame.transform.rotate(self.bird.image, angle*5)) \
for angle in range(-18, 7)
]
self.bird.mask = self.bird.masks[int(self.bird_angle/5 + 18.5)]
self.bird.mask_sizes = [
pygame.transform.rotate(self.bird.image, angle*5).get_size() \
for angle in range(-18, 7)
]
# 音效
self.sound_flap = pygame.mixer.Sound(SOUND_FLAP[int(os.name != 'nt')])
def flap(self):
self.sound_flap.play()
self.bird_angle = self._flapangleacc
self.bird_speedy = self._flapspeedyacc
def update(self, passed_time):
'''更新速度、方向,并更新当前帧、位置'''
self.bird.update(passed_time)
self.passed_time += passed_time
# 方向(角度)
self.bird_angle += passed_time/1000 * self._gravityangleacc
if self.bird_angle <= -90:
self.bird_angle = -90
# 速度和位置
self.bird_speedy += passed_time/1000 * self._gravityspeedyacc
movementy = self.passed_time*self.bird_speedy//1000
self.bird.rect.y += movementy
self.passed_time -= movementy*1000/self.bird_speedy
if self.bird.rect.y < -self.bird.width:
self.bird.rect.y = -self.bird.width
self.bird.image = \
pygame.transform.rotate(self.bird.image, self.bird_angle)
# 碰撞蒙版
idx = int(self.bird_angle/5 + 18.5)
self.bird.mask = self.bird.masks[idx]
self.bird.rect[2:4] = self.bird.mask_sizes[idx]
```
#### File: flappybird-pygame/lib/game.py
```python
import os, sys
print(os.getcwd())
import pygame
from pygame.locals import *
import sprites
import controllers
from gameobjects import Vec2d
import random
from constants import *
from asserts import PIPE_LIST, BACKGROUND_LIST, STARTMESSAGE, GAMEOVERMESSAGE
from asserts import SOUND_HIT, SOUND_DIE
# from itertools import cycle
SCREEN = None # 窗口
FPSclock = None # 总时钟
# 函数使用的全局变量
swing_passed_time = 0
def run():
# 初始设置
pygame.init()
pygame.event.set_blocked(MOUSEMOTION)
pygame.display.set_caption('Flappy Bird')
global FPSclock, SCREEN
SCREEN = pygame.display.set_mode((SCREENWIDTH, SCREENHEIGHT), 0, 32)
FPSclock = pygame.time.Clock()
# 游戏循环
while True:
sprites_start = startMenu()
message_over = maingame(sprites_start)
gameover(message_over)
def startMenu():
'''
创建背景、小鸟、地面,再传递到maingame中
'''
def birdswing(bird, passed_time, swingderection):
'''小鸟在游戏开始前上下摆动'''
global swing_passed_time
swing_passed_time += passed_time
movementy = swing_passed_time//20
swing_passed_time -= movementy*20
if swingderection:
bird.rect.y -= movementy
if bird.rect.y < BIRDY - SWINGSCOPE:
bird.rect.y = 2*(BIRDY - SWINGSCOPE) - bird.rect.y
return not swingderection
return swingderection
else:
bird.rect.y += movementy
if bird.rect.y > BIRDY + SWINGSCOPE:
bird.rect.y = 2*(BIRDY + SWINGSCOPE) - bird.rect.y
return not swingderection
return swingderection
# 小鸟
bird = sprites.Bird(Vec2d([BIRDX, BIRDY]), random.randint(0, 2))
birdswingdirection = True # 开始界面中小鸟上下摆动的方向,True表示上
# 地面
base = sprites.Base()
# 背景和开始信息
background = pygame.image.load(random.choice(BACKGROUND_LIST)).convert()
startmessage = pygame.image.load(STARTMESSAGE).convert_alpha()
message_position = Vec2d(
(SCREENWIDTH-startmessage.get_width())//2,
(SCREENHEIGHT-startmessage.get_height()-base.height)//2
)
while True:
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
elif (event.type == KEYDOWN and event.key in (K_SPACE, K_UP, )) \
or event.type == MOUSEBUTTONDOWN:
return {
'bird': bird,
'background': background,
'base': base,
}
passed_time = FPSclock.tick(FPS)
bird.update(passed_time)
birdswingdirection = birdswing(bird, passed_time, birdswingdirection)
base.update(passed_time)
SCREEN.blit(background, (0, 0))
SCREEN.blit(startmessage, message_position)
SCREEN.blit(bird.image, bird.rect)
SCREEN.blit(base.image, base.position)
pygame.display.update()
def maingame(sprites_start):
'''
游戏开始后的内容
'''
score = 0
maygetscore = False
scorefigure = sprites.ScoreFigure()
# 管道
pipes = {}
pipecolor = random.randint(0, 1)
pipepositionys = [random.randint(PP_MIN, PP_MAX) for _ in range(2)]
pipeexample = pygame.image.load(PIPE_LIST[pipecolor]).convert_alpha()
pipeexample_inv = pygame.transform.flip(pipeexample, 0, 1)
pipe_width, pipe_height = pipeexample.get_size()
pipes['top'] = (
sprites.Pipe(Vec2d(2*SCREENWIDTH, pipepositionys[0]), pipeexample_inv.copy()),
sprites.Pipe(Vec2d(2*SCREENWIDTH+PIPESLOT, pipepositionys[1]), pipeexample_inv.copy()),
)
pipes['bottom'] = (
sprites.Pipe(Vec2d(2*SCREENWIDTH, pipepositionys[0]+pipe_height+SLOT), pipeexample.copy()),
sprites.Pipe(Vec2d(2*SCREENWIDTH+PIPESLOT, pipepositionys[1]+pipe_height+SLOT), pipeexample.copy()),
)
for idx in range(2):
pipes['top'][idx].mask = controllers.gethitmask(pipes['top'][idx].image)
pipes['bottom'][idx].mask = controllers.gethitmask(pipes['bottom'][idx].image)
# 从startMenu获取的sprites
bird = sprites_start['bird']
background = sprites_start['background']
base = sprites_start['base']
# 小鸟控制器
birdcontroller = controllers.BirdController(bird)
birdcontroller.flap() # 振翅
# 主循环
while True:
# 事件监听
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
elif event.type == KEYDOWN and event.key == K_r:
return None
elif (event.type == KEYDOWN and event.key in (K_SPACE, K_UP, )) \
or event.type == MOUSEBUTTONDOWN:
birdcontroller.flap() # 振翅
passed_time = FPSclock.tick(FPS)
# 更新小鸟、地面、管道
birdcontroller.update(passed_time)
base.update(passed_time)
up_y1 = pipes['top'][0].update(passed_time)
up_y2 = pipes['top'][1].update(passed_time)
pipes['bottom'][0].update(passed_time, up_y1)
pipes['bottom'][1].update(passed_time, up_y2)
scorefigure.update(score)
if maygetscore and \
(bird.rect.x > pipes['top'][0].rect.x \
or bird.rect.x > pipes['top'][1].rect.x):
score += 1
maygetscore = False
if bird.rect.x < pipes['top'][0].rect.x \
and bird.rect.x < pipes['top'][1].rect.x:
maygetscore = True
# 碰撞检测
if bird.rect[1] + bird.image.get_width()/1.5 > base.position[1] or \
crashdetect(bird,(
pipes['top'][0], pipes['top'][1],
pipes['bottom'][0], pipes['bottom'][1],)):
pygame.mixer.Sound(SOUND_HIT[0]).play()
return{
'birdcontroller': birdcontroller,
'pipes': pipes,
'base': base,
'background': background,
'scorefigure': scorefigure,
}
# 绘制
SCREEN.blit(background, (0, 0))
SCREEN.blits((
(pipes['top'][0].image, pipes['top'][0].rect),
(pipes['bottom'][0].image, pipes['bottom'][0].rect),
(pipes['top'][1].image, pipes['top'][1].rect),
(pipes['bottom'][1].image, pipes['bottom'][1].rect),
))
SCREEN.blit(base.image, base.position)
SCREEN.blit(scorefigure.image, scorefigure.rect)
SCREEN.blit(bird.image, bird.rect)
pygame.display.update()
def gameover(sprites_end):
'''
结束动画和结束信息
'''
if sprites_end is None:
return None
pygame.mixer.Sound(SOUND_DIE[0]).play()
base = sprites_end['base']
background = sprites_end['background']
pipes = sprites_end['pipes']
birdcontroller = sprites_end['birdcontroller']
scorefigure = sprites_end['scorefigure']
birdcontroller.bird_angle = 30
birdcontroller.bird_speedy = 100
birdcontroller._gravityspeedyacc *= 1.5
birdcontroller._gravityangleacc *= 4
bird = birdcontroller.bird
gameovermessage = pygame.image.load(GAMEOVERMESSAGE).convert_alpha()
gameoverwidth, gameoverheight = gameovermessage.get_size()
SCREEN.blit(gameovermessage,
((SCREENWIDTH-gameoverwidth)//2, int(0.33*SCREENHEIGHT)))
pygame.display.update()
FPSclock.tick()
# 游戏结束动画
while True:
passed_time = FPSclock.tick(FPS)
birdcontroller.update(passed_time)
if bird.rect[1] + bird.image.get_width()/1.5 > base.position[1]:
break
SCREEN.blit(background, (0, 0))
SCREEN.blits((
(pipes['top'][0].image, pipes['top'][0].rect),
(pipes['bottom'][0].image, pipes['bottom'][0].rect),
(pipes['top'][1].image, pipes['top'][1].rect),
(pipes['bottom'][1].image, pipes['bottom'][1].rect),
(base.image, base.position),
(scorefigure.image, scorefigure.rect),
))
# SCREEN.blit(base.image, base.position)
SCREEN.blit(bird.image, bird.rect)
SCREEN.blit(gameovermessage,
((SCREENWIDTH-gameoverwidth)//2, int(0.35*SCREENHEIGHT)))
pygame.display.update()
while True:
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type in (KEYDOWN, MOUSEBUTTONDOWN):
return None
def crashdetect(bird, pipelist):
'''游戏中对小鸟与管道的碰撞检测'''
for pipe in pipelist:
if controllers.crashdetect_mask(
bird.rect, bird.mask, pipe.rect, pipe.mask
):
return True
return False
if __name__ == '__main__':
MAIN_DIR = os.path.split(os.path.abspath(sys.argv[0]))[0]
os.chdir(MAIN_DIR)
os.chdir('..') # 调用assets内数据需要到该文件夹
run()
``` |
{
"source": "AAAAAHui/SKOS_in_EA",
"score": 3
} |
#### File: SKOS_in_EA/Query/query.py
```python
import requests
import sys
import json
#get full concept description
def query(path, scheme, uri):
if(scheme == 'all_BegrippenXL_vocabularies'):
a = requests.get('https://www.begrippenxl.nl/rest/v1/data?uri=' + uri + '&format=application/ld%2Bjson')
else:
a = requests.get('https://www.begrippenxl.nl/rest/v1/' + scheme + '/data?uri=' + uri + '&format=application/ld%2Bjson')
# print(concept)
if(a.status_code==200):
data = json.dumps(a.json());
# f=open("D:/Project/Testing/" + concept + ".json", "w")
f=open(path + "concept.json", "w")
f.write(data)
f.close()
return
# keyword = sys.argv[3]
# if(len(sys.argv) > 4):
# for i in range(4, len(sys.argv)):
# keyword = keyword + ' ' + sys.argv[i]
query(sys.argv[1], sys.argv[2], sys.argv[3])
#retrieve("vensters", "http://purl.edustandaard.nl/concept/f5b84813-ba3f-463b-93f7-2b270cc88be8", "D:\\Project\\Testing\\")
``` |
{
"source": "AAAAAXfda/CLI-For-Zoom",
"score": 2
} |
#### File: CLI-For-Zoom/lib/zoomconfig.py
```python
import sqlite3
import platform
import os
import filetype
from shutil import copyfile
import uuid
from collections import namedtuple
from pathlib import Path
import sys
import ffmpeg
from enum import Enum
VirtualBackground = namedtuple('VirtualBackground', 'path name type custom_index thumb_path')
class VirtualBackgroundType(Enum):
DEFAULT_IMAGE = 0
CUSTOM_IMAGE = 1
DEFAULT_VIDEO = 2
CUSTOM_VIDEO = 3
class ZoomConfig:
background_path_key = 'com.zoom.client.saved.video.replace_bk_path_1'
background_data_key = 'com.zoom.client.saved.video.replace_bk_data_1'
background_types_custom = (VirtualBackgroundType.CUSTOM_IMAGE.value, VirtualBackgroundType.CUSTOM_VIDEO.value)
background_types_image = (VirtualBackgroundType.DEFAULT_IMAGE.value, VirtualBackgroundType.CUSTOM_IMAGE.value)
background_types_video = (VirtualBackgroundType.DEFAULT_VIDEO.value, VirtualBackgroundType.CUSTOM_VIDEO.value)
def __init__(self, data_dir):
self.backgrounds_dir = data_dir / "VirtualBkgnd_Custom"
self.video_thumbs_dir = data_dir / "VirtualBkgnd_VideoThumb"
self.conn = sqlite3.connect(data_dir / "zoomus.db")
def get_background(self):
return self.get_current_background_path()
def export_backgrounds(self, export_dir):
Path(export_dir).mkdir(exist_ok=True)
for background in self.get_backgrounds():
source_path = background.path.encode("utf-8")
target_path = os.path.join(background.name.encode("utf-8"),
background.name.encode("utf-8"))
# append file extension if we can infer one
kind = filetype.guess(source_path)
if kind:
target_path += ".".encode("utf-8") + \
kind.extension.encode("utf-8")
copyfile(source_path, target_path)
def import_background(self, source_path):
kind = filetype.guess(source_path)
if kind is None:
print("skipping file because unable to determine format: " + source_path)
return
target_path = None
name = Path(source_path).stem
type = None
custom_index = None
thumb_path = None
if kind.mime.startswith("image"):
type = VirtualBackgroundType.CUSTOM_IMAGE
# copy the image to the zoom virtual backgrounds directory
target_path = str(self.backgrounds_dir / str(uuid.uuid4()))
# todo: change this to generate PNGs
ffmpeg.input(source_path).filter('scale', 'min(in_w,1920)', -1).filter('crop', 'min(in_w,1920)', 'min(in_h,1080)', 0, '(max(in_w-1080,0))/2').output(target_path, format='mjpeg').run()
elif kind.mime.startswith("video"):
type = VirtualBackgroundType.CUSTOM_VIDEO
# we do not copy videos, presumably for size reasons
target_path = source_path
# generate video thumbnail
thumb_path = str(self.video_thumbs_dir / str(uuid.uuid4()))
# todo: change this to generate BMPs
ffmpeg.input(source_path).filter('scale', 320, -1).output(thumb_path, format='mjpeg', vframes=1).run()
else:
print("skipping file for unsupported mime type: " + kind.mime + " from " + source_path)
return
custom_index = type.value * 100
background = VirtualBackground(path=target_path, name=name, type=type.value, custom_index=custom_index, thumb_path=thumb_path)
c = self.conn.cursor()
c.execute('INSERT OR IGNORE INTO zoom_conf_video_background_a (path,name,type,customIndex,thumbPath) VALUES(?,?,?,?,?)', background)
self.conn.commit()
return background
def import_backgrounds(self, source_path):
root = Path(source_path)
if root.is_dir():
for child in root.iterdir():
if child.is_file():
print(child.resolve())
self.import_background(str(child.resolve()))
elif root.is_file():
self.import_background(source_path)
else:
raise Exception("import called on something that is neither a file nor directory: " + source_path)
def get_current_background_path(self):
c = self.conn.cursor()
c.execute('SELECT value FROM zoom_kv WHERE key=?', (ZoomConfig.background_path_key,))
row = c.fetchone()
if row:
return row[0]
else:
return None
def remove_current_background(self):
c = self.conn.cursor()
c.execute('DELETE FROM zoom_kv WHERE key=?', (ZoomConfig.background_path_key,))
self.conn.commit()
def set_background(self, path):
self.remove_current_background()
if not path:
return
# validate that the file exists
p = Path(path)
if not p.exists():
raise Exception("file not found: " + path)
elif not p.is_file():
raise Exception("cannot set background to a non-file: " + path)
background = self.import_background(path)
# update the background path key to point to the file
c = self.conn.cursor()
c.execute('INSERT INTO zoom_kv VALUES (?,?,?)',
(ZoomConfig.background_path_key, background.path, 'ZoomChat'))
# also update background data key to indicate whether the background is image or video
c.execute('SELECT value FROM zoom_kv WHERE key = ?', (ZoomConfig.background_data_key,))
row = c.fetchone()
value = row[0].split(":")
if background.type in ZoomConfig.background_types_image:
value[4] = "1"
elif background.type in ZoomConfig.background_types_video:
value[4] = "2"
else:
raise Exception("unhandled type: " + str(background.type))
value = ":".join(value)
c.execute('UPDATE zoom_kv SET value = ? WHERE key = ?', (value, ZoomConfig.background_data_key))
self.conn.commit()
def delete_custom_backgrounds(self):
c = self.conn.cursor()
current_background = self.get_current_background_path()
for row in c.execute('SELECT path, thumbPath FROM zoom_conf_video_background_a WHERE type in (?,?)', ZoomConfig.background_types_custom):
path = row[0]
thumb_path = row[1]
if current_background and path == current_background:
self.remove_current_background()
if path.startswith(str(self.backgrounds_dir)):
Path(path).unlink(missing_ok=True)
if thumb_path and thumb_path.startswith(str(self.video_thumbs_dir)):
Path(thumb_path).unlink(missing_ok=True)
c.execute('DELETE FROM zoom_conf_video_background_a WHERE type in (?,?)', ZoomConfig.background_types_custom)
self.conn.commit()
def get_backgrounds(self):
c = self.conn.cursor()
backgrounds = []
for row in c.execute('SELECT path, name, type, customIndex, thumbPath FROM zoom_conf_video_background_a WHERE type in (?,?)', ZoomConfig.background_types_custom):
path = row[0]
name = row[1]
type = row[2]
custom_index = row[3]
thumb_path = row[4]
background = VirtualBackground(path=path, name=name, type=type, custom_index=custom_index, thumb_path=thumb_path)
backgrounds.append(background)
return backgrounds
def close(self):
self.conn.close()
def __del__(self):
self.conn.close()
def open():
if platform.system() == "Darwin":
data_dir = Path("~/Library/Application Support/zoom.us/data").expanduser()
elif platform.system() == "Windows":
data_dir = Path(os.getenv("APPDATA")) / "Zoom" / "data"
else:
raise Exception("unsupported system: " + platform.system())
return ZoomConfig(data_dir)
``` |
{
"source": "AAAAAXfda/XBOT-discord",
"score": 2
} |
#### File: AAAAAXfda/XBOT-discord/buildings.py
```python
def upgradePrice(label, level):
priceDico = {
"maison":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"carrieres":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"scieries":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"fonderies":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"mines":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"mairie":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"magasin_de_peche":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"restaurant":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
"camp_d_entrainement":{
"2":None,
"3":None,
"4":None,
"5":None,
"6":None,
"7":None,
"8":None,
"9":None,
"10":None,
},
}
```
#### File: XBOT-discord/cogs/profile.py
```python
import disnake
from disnake.ext import commands
# modules perso
from configcreator import Config
import database
CONFIG = Config()
class Counter(disnake.ui.View):
# Define the actual button
# When pressed, this increments the number displayed until it hits 5.
# When it hits 5, the counter button is disabled and it turns green.
# note: The name of the function does not matter to the library
@disnake.ui.button(label="0", style=disnake.ButtonStyle.red)
async def count(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
number = int(button.label) if button.label else 0
if number + 1 >= 5:
button.style = disnake.ButtonStyle.green
button.disabled = True
button.label = str(number + 1)
# Make sure to update the message with our updated selves
await interaction.response.edit_message(view=self)
@disnake.ui.button(label="click to send a message", style=disnake.ButtonStyle.green)
async def sendButtonMessage(
self, button: disnake.ui.Button, interaction: disnake.MessageInteraction
):
await interaction.channel.send("Hey !")
class ProfileCommandsCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.slash_command(
name="profile", description="Commande de test.", test_guilds=CONFIG.test_guilds
)
async def profile(self, inter):
data = database.getUserData(inter.user.id)
member = inter.author
await inter.response.send_message(
"Vous avez sélectionné une commande de test. Utilisez plutôt la commande `/town` pour consulter votre profil.",
view=Counter(),
)
database.incrementCommandCount(inter.user.id)
def setup(bot):
bot.add_cog(ProfileCommandsCog(bot))
# >o)
# (_> HM
```
#### File: XBOT-discord/cogs/stats.py
```python
import disnake
from disnake import player
from disnake import file
from disnake.ext import commands, tasks
from disnake.ext.commands import Param
# modules perso
from configcreator import Config
import database
# autres modules
import datetime
import platform
import psutil
import sys
import os
import csv
from matplotlib.figure import Figure
from io import BytesIO
CONFIG = Config()
async def getEmbed(name, bot, member, interaction):
userData = database.getUserData(member.id)
emojisDico = {"Serveur": "🖥️", "Utilisateur": "👤", "Bot": "🤖", "Jeu": "🎮"}
imagesDico = {
"Serveur": interaction.guild.icon.url,
"Utilisateur": member.display_avatar.url,
"Bot": bot.user.display_avatar.url,
"Jeu": "https://media.discordapp.net/attachments/889447462476087307/894217216118112277/9b805ec85ef4808bbf9e9f196c70c077.jpg",
}
embed = disnake.Embed(
title=emojisDico[name] + " " + name,
description="",
timestamp=datetime.datetime.utcnow(),
)
embed.set_thumbnail(url=imagesDico[name])
embed.set_footer(
text=bot.user.name + " - requested by " + str(interaction.author),
icon_url=interaction.author.display_avatar.url,
)
if name == "Utilisateur":
strStats = f""":alarm_clock: Création du compte : `{datetime.datetime.utcfromtimestamp(userData["stats"]["creation_du_compte"])}`
:fishing_pole_and_fish: Poissons pêchés : `{userData["stats"]["poissons_peches"]}`
:keyboard: Commandes utilisées : `{userData["stats"]["commandes_utilisees"]}`
"""
embed.add_field(name="Stats", value=strStats)
elif name == "Bot":
uptime = int(datetime.datetime.utcnow().timestamp() - CONFIG.boottime)
min, sec = divmod(uptime, 60)
hours, min = divmod(min, 60)
embed.add_field(
name="Informations système :",
value=f""":penguin: OS : `{platform.platform()}`
:snake: Python Version : `{sys.version}`
:alarm_clock: Bot uptime : `{int(hours)}h{int(min)}m{int(sec)}s`
""",
inline=False,
)
embed.add_field(
name="Performance :",
value=f""":memo: System CPU usage : `{psutil.cpu_percent()}`%
:file_cabinet: System RAM usage : `{round(psutil.virtual_memory().used / (1024.0**3),2)}`GB/`{round(psutil.virtual_memory().total / (1024.0**3),2)}`GB (`{psutil.virtual_memory()[2]}`%)
:file_cabinet: RAM used by the program : `{round(psutil.Process(os.getpid()).memory_info()[0] / (1024.0**3),2)}`GB
:floppy_disk: Disk usage : `{round(psutil.disk_usage('/').used / (1024.0**3),2)}`GB/`{round(psutil.disk_usage('/').total / (1024.0**3),2)}`GB (`{psutil.disk_usage('/').percent}`%)
""",
inline=False,
)
elif name == "Serveur":
serverStatsStr = f"""Nom : **{interaction.guild.name}**
Id : `{interaction.guild.id}`
Nombre de membres : `{interaction.guild.member_count}`
Salons : `{len(interaction.guild.text_channels)}` textuels, `{len(interaction.guild.voice_channels)}` vocaux.
Propriétaire : `{interaction.guild.owner_id}`
Créé le : `{interaction.guild.created_at}`
"""
embed.add_field(name="Général", value=serverStatsStr)
elif name == "Jeu":
fullDb = database.getDatabase()
nombreCommandes = sum(i["stats"]["commandes_utilisees"] for i in fullDb)
jeuStatsStr = f""":people_wrestling: Nombre de joueurs : {len(fullDb)}
:satellite: Commandes utilisées : {nombreCommandes}
"""
embed.add_field(name="Général", value=jeuStatsStr)
playerCount = []
serverCount = []
commandCount = []
time = []
with open("stats.csv") as csv_file:
csv_reader = csv.reader(csv_file, delimiter=",")
for row in csv_reader:
playerCount.append(int(row[1]))
time.append(datetime.datetime.utcfromtimestamp(int(row[0])))
serverCount.append(int(row[2]))
commandCount.append(int(row[3]))
# sampling automatique des données pour toujours avoir entre 500 et 1000 valeurs.
sampleSize = len(playerCount)
try:
# or any other value up to 1000, so it is in your specified limit
step_size = sampleSize // 500
playerCount = playerCount[::step_size]
time = time[::step_size]
serverCount = serverCount[::step_size]
commandCount = commandCount[::step_size]
except:
pass
fig = Figure()
fig.set_size_inches(15, 11)
axis = fig.subplots(3)
fig.suptitle(
f"Données mises à jour toutes les 12h. Sampling dynamique des données : {len(time)}/{sampleSize} valeurs"
)
axis[0].plot(time, playerCount)
axis[0].grid(b=True)
axis[0].set_title("Nombre de joueurs")
axis[1].plot(time, serverCount)
axis[1].grid(b=True)
axis[1].set_title("Nombre de serveurs")
axis[2].plot(time, commandCount)
axis[2].grid(b=True)
axis[2].set_title("Commandes utilisées")
buf = BytesIO()
fig.savefig(buf, format="png")
buf.seek(0)
channel = bot.get_channel(894965795417907220)
message = await channel.send(file=disnake.File(buf, "sample.png"))
embed.set_image(url=message.attachments[0].url)
return embed
class Dropdown(disnake.ui.Select):
def __init__(self, bot, member):
self.bot = bot
self.member = member
options = [
disnake.SelectOption(
label="Utilisateur",
description="Des informations par rapport à l'utilisateur spécifié.",
emoji="👤",
),
disnake.SelectOption(
label="Serveur",
description="Des informations par rapport au serveur.",
emoji="🖥️",
),
disnake.SelectOption(
label="Bot",
description="Des informations par rapport au bot.",
emoji="🤖",
),
disnake.SelectOption(
label="Jeu",
description="Des informations par rapport au jeu.",
emoji="🎮",
),
]
super().__init__(
placeholder="Sélectionnez un menu",
min_values=1,
max_values=1,
options=options,
)
async def callback(self, interaction: disnake.MessageInteraction):
statLabel = self.values[0]
await interaction.response.edit_message(
embed=await getEmbed(statLabel, self.bot, self.member, interaction)
)
class DropdownView(disnake.ui.View):
def __init__(self, bot, member):
super().__init__()
self.add_item(Dropdown(bot, member))
class StatsCommandsCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.slash_command(
name="stats",
description="Affiche des statistiques par rapport aux utilisateurs, au serveur, au bot et au jeu.",
test_guilds=CONFIG.test_guilds,
)
async def stats(
self, inter, member: disnake.Member = Param(lambda inter: inter.author)
):
"""Affiche des stats par rapport aux utilisateurs, au serveur, au bot et au jeu"""
userData = database.getUserData(member.id)
view = DropdownView(self.bot, member)
embed = await getEmbed("Utilisateur", self.bot, member, inter)
await inter.response.send_message(
embed=embed,
view=view,
)
database.incrementCommandCount(inter.user.id)
@tasks.loop(seconds=60 * 60 * 24)
async def statsWriter(self):
allData = database.getDatabase()
playerNumber = len(allData)
servers = len(self.bot.guilds)
nombreCommandes = sum(i["stats"]["commandes_utilisees"] for i in allData)
stats = [
int(datetime.datetime.utcnow().timestamp()),
playerNumber,
servers,
nombreCommandes,
]
with open("stats.csv", encoding="utf-8", mode="a", newline="") as f:
writer = csv.writer(f)
writer.writerow(stats)
print(
f"{datetime.datetime.now()} | Stockage des stats dans le document csv effectué avec succès."
)
def setup(bot):
bot.add_cog(StatsCommandsCog(bot))
# >o)
# (_> HM
``` |
{
"source": "aaaaronlin/barc",
"score": 2
} |
#### File: src/lab2/key_listener.py
```python
import rospy
import time, sys, select, termios, tty
from barc.msg import ECU
# initialize global
acc = 0
d_f = 0
settings = termios.tcgetattr(sys.stdin)
def getKey():
global settings
tty.setraw(sys.stdin.fileno())
select.select([sys.stdin], [], [], 0)
key = sys.stdin.read(1)
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
return key
def controller():
global acc, d_f
settings = termios.tcgetattr(sys.stdin)
# initialize node
rospy.init_node('key_listener', anonymous=True)
# topic subscriptions / publications
state_pub = rospy.Publisher('ecu', ECU, queue_size = 10)
# set node rate
loop_rate = 50
dt = 1.0 / loop_rate
rate = rospy.Rate(loop_rate)
t0 = time.time()
# set initial conditions
d_f = 0
acc = 0
while not rospy.is_shutdown():
# get keyboard inputs
key = getKey()
if key == 'w':
acc = 0.1
if key == 's':
acc = -0.1
if key == 'a':
d_f += 0.01
if key == 'd':
d_f -= 0.01
# publish information
state_pub.publish( ECU(acc, d_f) )
# wait
rate.sleep()
if __name__ == '__main__':
try:
controller()
except rospy.ROSInterruptException:
pass
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
``` |
{
"source": "aaaaronlin/TelloPy",
"score": 3
} |
#### File: tellopy/examples/record_log.py
```python
from time import sleep
import tellopy
import datetime
import os
file = None
write_header = True
def handler(event, sender, data, **args):
global file
global write_header
drone = sender
if event is drone.EVENT_LOG_DATA:
if file == None:
path = '%s/Desktop/tello-%s.csv' % (
os.getenv('HOME'),
datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S'))
file = open(path, 'w')
if write_header:
file.write('%s\n' % data.format_cvs_header())
write_header = False
file.write('%s\n' % data.format_cvs())
if event is drone.EVENT_FLIGHT_DATA or event is drone.EVENT_LOG_DATA:
print('record_log: %s: %s' % (event.name, str(data)))
def test():
drone = tellopy.Tello()
try:
drone.subscribe(drone.EVENT_FLIGHT_DATA, handler)
drone.subscribe(drone.EVENT_LOG_DATA, handler)
drone.record_log_data()
drone.connect()
drone.wait_for_connection(60.0)
drone.takeoff()
sleep(5)
drone.clockwise(100)
sleep(5)
drone.clockwise(0)
drone.down(50)
sleep(2)
drone.up(50)
sleep(2)
drone.up(0)
drone.land()
sleep(5)
except Exception as ex:
print(ex)
finally:
drone.quit()
if __name__ == '__main__':
test()
```
#### File: tests/tello_qt/tello_qt.py
```python
from __future__ import division, print_function, absolute_import
import sys
from PyQt5.QtCore import Qt
from PyQt5 import uic, QtWidgets
from thread_qt import WifiThread
qtCreatorFile = "mainwindow.ui"
Ui_MainWindow, QtBaseClass = uic.loadUiType(qtCreatorFile)
class MyApp(QtWidgets.QMainWindow, Ui_MainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
Ui_MainWindow.__init__(self)
self.setupUi(self)
self.status_label.setAutoFillBackground(True)
self.status_palette_connected = self.status_label.palette()
self.status_palette_connected.setColor(self.status_label.backgroundRole(), Qt.green)
self.status_palette_connected.setColor(self.status_label.foregroundRole(), Qt.black)
self.status_palette_disconnected = self.status_label.palette()
self.status_palette_disconnected.setColor(self.status_label.backgroundRole(), Qt.yellow)
self.status_palette_disconnected.setColor(self.status_label.foregroundRole(), Qt.black)
self.status_palette_paused = self.status_label.palette()
self.status_palette_paused.setColor(self.status_label.backgroundRole(), Qt.gray)
self.status_palette_paused.setColor(self.status_label.foregroundRole(), Qt.black)
self.wifi_thread = WifiThread()
self.wifi_thread.updated.connect(self.handler)
self.wifi_thread.start()
def handler(self):
if self.wifi_thread.wifi_active:
text = self.wifi_thread.wifi_ssid
palette = self.status_palette_connected
else:
text = 'no connection'
palette = self.status_palette_disconnected
self.status_label.setPalette(palette);
self.status_label.setText(text)
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
window = MyApp()
window.show()
sys.exit(app.exec_())
```
#### File: TelloPy/tests/test.py
```python
from time import sleep
import tellopy
from tellopy._internal.utils import *
prev_flight_data = None
def handler(event, sender, data, **args):
global prev_flight_data
drone = sender
if event is drone.EVENT_CONNECTED:
print('connected')
drone.start_video()
drone.set_exposure(0)
drone.set_video_encoder_rate(4)
elif event is drone.EVENT_FLIGHT_DATA:
if prev_flight_data != str(data):
print(data)
prev_flight_data = str(data)
elif event is drone.EVENT_TIME:
print('event="%s" data=%d' % (event.getname(), data[0] + data[1] << 8))
elif event is drone.EVENT_VIDEO_FRAME:
pass
else:
print('event="%s" data=%s' % (event.getname(), str(data)))
def test():
drone = tellopy.Tello()
try:
# drone.set_loglevel(d.LOG_ALL)
drone.subscribe(drone.EVENT_CONNECTED, handler)
# drone.subscribe(drone.EVENT_WIFI, handler)
# drone.subscribe(drone.EVENT_LIGHT, handler)
drone.subscribe(drone.EVENT_FLIGHT_DATA, handler)
# drone.subscribe(drone.EVENT_LOG, handler)
drone.subscribe(drone.EVENT_TIME, handler)
drone.subscribe(drone.EVENT_VIDEO_FRAME, handler)
drone.connect()
# drone.takeoff()
# time.sleep(5)
drone.down(50)
sleep(3)
drone.up(50)
sleep(3)
drone.down(0)
sleep(2)
drone.land()
sleep(5)
except Exception as ex:
print(ex)
show_exception(ex)
finally:
drone.quit()
print('end.')
if __name__ == '__main__':
test()
``` |
{
"source": "aaaasule/DjangoRESTTpp",
"score": 2
} |
#### File: DjangoRESTTpp/App/ext.py
```python
from flask_caching import Cache
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
migrate = Migrate()
cache = Cache()
def init_ext(app):
db.init_app(app)
migrate.init_app(app, db)
cache.init_app(app)
```
#### File: DjangoRESTTpp/App/__init__.py
```python
from flask import Flask
from App.apis import init_api
from App.ext import init_ext
from App.middleware import load_middleware
from App.settings import envs
from App.views import init_views
def create_app(env):
app = Flask(__name__)
app.config.from_object(envs.get(env))
init_ext(app)
init_api(app)
load_middleware(app)
init_views(app)
return app
```
#### File: models/movie_user/movie_user_model.py
```python
from werkzeug.security import generate_password_hash, check_password_hash
from App.ext import db
from App.models import ModelBase
from App.models.movie_user.model_constant import PERMISSION_NONE
class MovieUserModel(ModelBase):
username = db.Column(db.String(32), unique=True)
_password = db.Column(db.String(256))
phone = db.Column(db.Integer, unique=True)
is_delete = db.Column(db.Boolean, default=False)
permission = db.Column(db.Integer, default=PERMISSION_NONE)
@property
def password(self):
raise Exception("can't access")
@password.setter
def password(self, password_value):
self._password = generate_password_hash(password_value)
def check_password(self, password_value):
return check_password_hash(self._password, password_value)
```
#### File: DjangoRESTTpp/App/settings.py
```python
def create_url(info):
engine = info.get("engine")
driver = info.get("driver")
user = info.get("user")
password = info.get("password")
host = info.get("host")
port = info.get("port")
db = info.get("db")
return "{}+{}://{}:{}@{}:{}/{}".format(engine, driver, user, password, host, port, db)
class Config(object):
DEBUG = False
SECRET_KEY = "<KEY>"
SQLALCHEMY_TRACK_MODIFICATIONS = False
# 缓存设置
CACHE_TYPE = "simple"
class DevelopConfig(Config):
DEBUG = True
info = {
"engine": "mysql",
"driver": "pymysql",
"user": "root",
"password": "<PASSWORD>",
"host": "localhost",
"port": 3306,
"db": "FlaskTpp"
}
SQLALCHEMY_DATABASE_URI = create_url(info)
class TestConfig(Config):
pass
class ProductConfig(Config):
pass
envs = {
"develop": DevelopConfig,
"test": TestConfig,
"product": ProductConfig
}
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.