id
stringlengths 2
8
| text
stringlengths 16
264k
| dataset_id
stringclasses 1
value |
---|---|---|
1608600
|
# -*- coding: utf-8 -*-
#############################################################################
# #
# <NAME> GMBH #
# STUTTGART #
# #
# Alle Rechte vorbehalten - All rights reserved #
# #
#############################################################################
# ____ ____ _____ ______ __ __ #
# / __ ) / __ \/ ___/ / ____// / / / #
# / __ |/ / / /\__ \ / / / /_/ / #
# / /_/ // /_/ /___/ // /___ / __ / #
# /_____/ \____//____/ \____//_/ /_/ #
# #
#############################################################################
# This script implements the web-service for performing CAN-Configuration
# test
# Base path for the service: /hyapi/service/
# Service endpoint: /data-stream-gui : Demos data streaming on a simple
# web-page with a counter
# /data-stream-cli : Streams data from a text file
# /data-stream-cli-2 : Streams data from an a2l file
###############################################################################
from flask import current_app as app
from flask import make_response,Blueprint
from flask import request
from flask import Flask, Response
import subprocess,os,shutil
import uuid
import json
import time
from datetime import datetime
###############################################################################
dataStream_bp = Blueprint('dataStream_bp', __name__,
template_folder='templates',
static_folder='static',
url_prefix='/hyapi/service')
"""
Initialization of all the necessary paths required for the script to run.
"""
REQUEST_ID = str(uuid.uuid4())
"""
Initialization of all the necessary paths required for the script to run.
"""
try:
from application import HyApi_constants as CONSTANTS
print("INFO: GEN_UML: Imported configuration from package successfully.")
except:
import HyApi_constants as CONSTANTS
print("DEBUG: GEN_UML: Running in Safe Mode: Imported alternative configuration.")
from . import DS_constants as Const
DEMO_TXT_FILE = Const.DEMO_TXT_FILE
DEMO_A2L_FILE = Const.DEMO_A2L_FILE
################################ Init Databases ##############################
try:
from application import Data_Controller as dc
print("INFO: CAN_TEST: DB Controller initialized successfully.")
except:
import Data_Controller as dc
print("DEBUG: CAN_TEST: Safe Mode: Imported alternative DBC.")
API_KEY_DB = dc.getApiKeysFromDb()
REQ_HISTORY = dc.getRequestHistory()
FILES_ON_SERVER = dc.getFilesOnServer()
def isInitSuccessful():
if API_KEY_DB == False:
return False
if REQ_HISTORY == False:
return False
return True
"""
Returns timeStamp at the time of function call
"""
def getTimeStamp():
return datetime.now().strftime(("%Y-%m-%d %H:%M:%S"))
"""
Function to create a db entry for the current file upload
writes the entry to the json db defined in the script
return : Boolean - indicating the status of write to json operation
"""
def createRequestDetails(fileName,fileID,md5Checksum,mStatus,mResponse,requestID):
try:
ext = fileName.rsplit(".",1)[1].upper()
except:
print("ERROR: CM_Validation: Could not extract file's extension.")
ext = "-"
#create a dictionary with details of the uploaded file
requestDetails = {
"FileName" :fileName,
"FileType" :ext,
"FileID" :fileID,
"RequestID" :requestID,
"MD5" :md5Checksum,
"Status" :mStatus,
"HttpResponse" :mResponse,
"timeStamp" :getTimeStamp()
}
if dc.updateHistory(requestDetails,requestID):
print("INFO: CM_Validation: History log updated.")
else:
print("ERROR: CM_Validation: Request could not be added to history log.")
return requestDetails
"""
apikeyInfoFunc
authenticates the apiKey passed in the request.
Passes user value directly to the function configured with the endpoint.
param : accessID : apiKey
returns: user corresponding to the apiKey.
"""
def isAccessIdAuthentic(accessID):
API_KEY_DB = dc.getApiKeysFromDb()
if API_KEY_DB != False:
if accessID in API_KEY_DB.keys():
customerDataDict = API_KEY_DB[accessID]
print("INFO: CM_Validation: Access ID authenticated. Requester identified as: "+ customerDataDict["customer"])
return True
else:
print("ERROR: CM_Validation: Invalid AccessID, request denied.")
return False
def getAccessId():
# get access-id and signature from the request.
# if none provided set it to empty string.
# an error will be raised subsequently.
try:
accessID = request.args.get('access-id')
print("INFO: CM_Validation: Access-id received: " + str(accessID))
except:
accessID = ""
print("ERROR: CM_Validation: Access-id not available in the request.")
return accessID
def stream_template(template_name, **context):
# http://flask.pocoo.org/docs/patterns/streaming/#streaming-from-templates
app.update_template_context(context)
t = app.jinja_env.get_template(template_name)
rv = t.stream(context)
# uncomment if you don't need immediate reaction
##rv.enable_buffering(5)
return rv
@dataStream_bp.route('/data-stream-gui')
def index():
def g():
for i in range(0,10000):
time.sleep(.1) # an artificial delay
yield i
return Response(stream_template('index.html', data=g()))
@dataStream_bp.route('/data-stream-cli')
def streamTxtFile():
def generate():
with open(DEMO_TXT_FILE) as file:
for i in file:
time.sleep(.1) # an artificial delay
yield i
return Response(generate(), mimetype='text/csv')
@dataStream_bp.route('/data-stream-cli-2')
def streamA2LFile():
def generate():
with open(DEMO_A2L_FILE) as file:
for i in file:
time.sleep(.1) # an artificial delay
yield i
return Response(generate(), mimetype='text/csv')
|
StarcoderdataPython
|
3558498
|
import sys
from threading import Thread
sys.path.append('/Users/rodrigobresan/Documents/dev/github/anti_spoofing/spoopy/spoopy')
import os
import cv2
from tools.file_utils import file_helper
def extract_rbd_saliency_folder(folder_path, output_root):
frames = file_helper.get_frames_from_folder(folder_path)
threads = []
for frame in frames:
path_frame = os.path.join(folder_path, frame)
output_path = os.path.join(output_root, frame)
thread_item = Thread(target=extract_rbd_saliency, args=(path_frame, output_path))
threads.append(thread_item)
thread_item.start()
for thread in threads:
thread.join()
def extract_rbd_saliency(file_path, output_path):
print('Before extracting for frame: ', file_path)
#rbd = saliency.get_saliency_rbd(file_path).astype('uint8')
#cv2.imwrite(output_path, rbd)
print('Done extracting saliency')
if __name__ == '__main__':
extract_rbd_saliency_folder('/Users/rodrigobresan/Documents/dev/github/anti_spoofing/spoopy/spoopy/static/results/cbsr_2/cbsr_test/fake/24_HR_3/raw',
'/Users/rodrigobresan/Documents/dev/github/anti_spoofing/spoopy/spoopy/static/results/cbsr_2/cbsr_test/fake/24_HR_3/saliency_aligned')
|
StarcoderdataPython
|
3256644
|
"""Module containing the tests for the default scenario."""
# Standard Python Libraries
import os
# Third-Party Libraries
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY_FILE"]
).get_hosts("all")
@pytest.mark.parametrize(
"directory", [{"path": "/var/cyhy/cyhy-mailer", "mode": "0o755"}]
)
def test_packages(host, directory):
"""Test that the appropriate directories were created."""
assert host.file(directory["path"]).exists
assert host.file(directory["path"]).is_directory
assert oct(host.file(directory["path"]).mode) == directory["mode"]
@pytest.mark.parametrize("f", ["/var/cyhy/cyhy-mailer/docker-compose.yml"])
def test_command(host, f):
"""Test that appropriate files exist."""
assert host.file(f).exists
assert host.file(f).is_file
|
StarcoderdataPython
|
1697971
|
<reponame>seanandrews/DSHARP_CPDs
import os, sys, time
import numpy as np
from gen_mdl import gen_mdl
names = ['dx5_incl2', 'dx5_PA5', 'incl2_PA5', 'incl2_PA5_dx5',
'zr3_dx5', 'zr3_dy5', 'zr3_incl2', 'zr3_PA5']
pars = [ [37., 110., 150., 0.0, 1., 15., 0.5, 0.005, 0.000], # dx5mas/i+2
[35., 115., 150., 0.0, 1., 15., 0.5, 0.005, 0.000], # dy5mas/PA+5
[37., 115., 150., 0.0, 1., 15., 0.5, 0.000, 0.000], # i+2/PA+5
[37., 115., 150., 0.0, 1., 15., 0.5, 0.005, 0.000], # i+2/PA+5/dx5
[35., 110., 150., 0.3, 1., 15., 0.5, 0.005, 0.000], # zr3/dx5
[35., 110., 150., 0.3, 1., 15., 0.5, 0.000, 0.005], # zr3/dy5
[37., 110., 150., 0.3, 1., 15., 0.5, 0.000, 0.000], # zr3/i+2
[35., 115., 150., 0.3, 1., 15., 0.5, 0.000, 0.000] ] # zr3/PA+5
for i in range(len(names)):
bar = gen_mdl(pars[i], names[i])
|
StarcoderdataPython
|
53469
|
<reponame>voreille/plc_seg<filename>src/models/layers.py
import tensorflow as tf
class ResidualLayer2D(tf.keras.layers.Layer):
def __init__(self, *args, activation='relu', **kwargs):
super().__init__()
self.filters = args[0]
self.conv = tf.keras.layers.Conv2D(*args,
**kwargs,
activation=activation)
self.activation = activation
self.bn_1 = tf.keras.layers.BatchNormalization()
self.bn_2 = None
self.proj = None
def build(self, input_shape):
self.c_in = input_shape[1]
if input_shape[1] != self.filters:
self.proj = tf.keras.layers.Conv2D(self.filters,
1,
activation=self.activation)
self.bn_2 = tf.keras.layers.BatchNormalization()
def call(self, x, training=None):
if self.proj:
return self.bn_1(self.conv(x)) + self.bn_2(self.proj(x))
else:
return self.bn_1(self.conv(x)) + x
class ResidualLayer(tf.keras.layers.Layer):
def __init__(self, *args, activation='relu', **kwargs):
super().__init__()
self.filters = args[0]
self.conv = tf.keras.layers.Conv3D(*args,
**kwargs,
activation=activation)
self.activation = activation
self.bn_1 = tf.keras.layers.BatchNormalization()
self.bn_2 = None
self.proj = None
def build(self, input_shape):
self.c_in = input_shape[1]
if input_shape[1] != self.filters:
self.proj = tf.keras.layers.Conv3D(self.filters,
1,
activation=self.activation)
self.bn_2 = tf.keras.layers.BatchNormalization()
def call(self, x, training=None):
if self.proj:
return self.bn_1(self.conv(x), training=training) + self.bn_2(self.proj(x), training=training)
else:
return self.bn_1(self.conv(x), training=training) + x
class ResidualLayer2D(tf.keras.layers.Layer):
def __init__(self, *args, activation='relu', **kwargs):
super().__init__()
self.filters = args[0]
self.conv = tf.keras.layers.Conv2D(*args,
**kwargs,
activation=activation)
self.activation = activation
self.bn_1 = tf.keras.layers.BatchNormalization()
self.bn_2 = None
self.proj = None
def build(self, input_shape):
self.c_in = input_shape[1]
if input_shape[1] != self.filters:
self.proj = tf.keras.layers.Conv2D(self.filters,
1,
activation=self.activation)
self.bn_2 = tf.keras.layers.BatchNormalization()
def call(self, x, training=None):
if self.proj:
return self.bn_1(self.conv(x), training=training) + self.bn_2(
self.proj(x), training=training)
else:
return self.bn_1(self.conv(x), training=training) + x
class SENormLayer(tf.keras.layers.Layer):
pass
|
StarcoderdataPython
|
6479096
|
'''
python中 内置函数 __init__方法 和 __new__方法 区别
'''
class display(object):
def __init__(self, *args, **kwargs):
print("init")
def __new__(cls, *args, **kwargs):
print("new")
a=display()
|
StarcoderdataPython
|
6501619
|
##
##
try:
import http.client as httpcl
except ImportError:
import httplib as httpcl
from dynamicserialize import DynamicSerializationManager
from dynamicserialize.dstypes.com.raytheon.uf.common.serialization.comm.response import ServerErrorResponse
from dynamicserialize.dstypes.com.raytheon.uf.common.serialization import SerializableExceptionWrapper
#
# Provides a Python-based interface for executing Thrift requests.
#
#
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 09/20/10 dgilling Initial Creation.
#
#
#
class ThriftClient:
# How to call this constructor:
# 1. Pass in all arguments separately (e.g.,
# ThriftClient.ThriftClient("localhost", 9581, "/services"))
# will return a Thrift client pointed at http://localhost:9581/services.
# 2. Pass in all arguments through the host string (e.g.,
# ThriftClient.ThriftClient("localhost:9581/services"))
# will return a Thrift client pointed at http://localhost:9581/services.
# 3. Pass in host/port arguments through the host string (e.g.,
# ThriftClient.ThriftClient("localhost:9581", "/services"))
# will return a Thrift client pointed at http://localhost:9581/services.
def __init__(self, host, port=9581, uri="/services"):
hostParts = host.split("/", 1)
if (len(hostParts) > 1):
hostString = hostParts[0]
self.__uri = "/" + hostParts[1]
self.__httpConn = httpcl.HTTPConnection(hostString)
else:
if (port is None):
self.__httpConn = httpcl.HTTPConnection(host)
else:
self.__httpConn = httpcl.HTTPConnection(host, port)
self.__uri = uri
self.__dsm = DynamicSerializationManager.DynamicSerializationManager()
def sendRequest(self, request, uri="/thrift"):
message = self.__dsm.serializeObject(request)
self.__httpConn.connect()
self.__httpConn.request("POST", self.__uri + uri, message)
response = self.__httpConn.getresponse()
if (response.status != 200):
raise ThriftRequestException("Unable to post request to server")
rval = self.__dsm.deserializeBytes(response.read())
self.__httpConn.close()
# let's verify we have an instance of ServerErrorResponse
# IF we do, through an exception up to the caller along
# with the original Java stack trace
# ELSE: we have a valid response and pass it back
try:
forceError = rval.getException()
raise ThriftRequestException(forceError)
except AttributeError:
pass
return rval
class ThriftRequestException(Exception):
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
|
StarcoderdataPython
|
334
|
<filename>lib/galaxy/tool_util/deps/container_resolvers/__init__.py<gh_stars>1-10
"""The module defines the abstract interface for resolving container images for tool execution."""
from abc import (
ABCMeta,
abstractmethod,
abstractproperty,
)
import six
from galaxy.util.dictifiable import Dictifiable
@six.python_2_unicode_compatible
@six.add_metaclass(ABCMeta)
class ContainerResolver(Dictifiable):
"""Description of a technique for resolving container images for tool execution."""
# Keys for dictification.
dict_collection_visible_keys = ['resolver_type', 'can_uninstall_dependencies']
can_uninstall_dependencies = False
def __init__(self, app_info=None, **kwds):
"""Default initializer for ``ContainerResolver`` subclasses."""
self.app_info = app_info
self.resolver_kwds = kwds
def _get_config_option(self, key, default=None):
"""Look in resolver-specific settings for option and then fallback to
global settings.
"""
if self.app_info and hasattr(self.app_info, key):
return getattr(self.app_info, key)
else:
return default
@abstractmethod
def resolve(self, enabled_container_types, tool_info, **kwds):
"""Find a container matching all supplied requirements for tool.
The supplied argument is a :class:`galaxy.tool_util.deps.containers.ToolInfo` description
of the tool and its requirements.
"""
@abstractproperty
def resolver_type(self):
"""Short label for the type of container resolution."""
def _container_type_enabled(self, container_description, enabled_container_types):
"""Return a boolean indicating if the specified container type is enabled."""
return container_description.type in enabled_container_types
def __str__(self):
return "%s[]" % self.__class__.__name__
|
StarcoderdataPython
|
3309684
|
<gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-08-28 23:35
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('rememberTheCheese', '0014_auto_20160826_1818'),
]
operations = [
migrations.AlterField(
model_name='subtask',
name='deadline',
field=models.DateTimeField(default=datetime.datetime(2016, 8, 31, 23, 35, 49, 874765, tzinfo=utc), verbose_name='deadline'),
),
migrations.AlterField(
model_name='task',
name='deadline',
field=models.DateTimeField(default=datetime.datetime(2016, 8, 31, 23, 35, 49, 605022, tzinfo=utc), verbose_name='deadline'),
),
]
|
StarcoderdataPython
|
11218088
|
from text_parser import Scam_parser
from model import Gated_Transformer_XL
import config_text as config
from utils import shuffle_ragged_2d, inputs_to_labels
import numpy as np
import tensorflow as tf
import argparse
import os
import pathlib
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-np', '--npy_dir', type=str, default='npy_text',
help='Directory where the npy files are stored')
arg_parser.add_argument('-ch', '--checkpoint_dir', type=str, default='checkpoints_text',
help='Directory where the saved weights will be stored')
arg_parser.add_argument('-p', '--checkpoint_period', type=int, default=1,
help='Number of epochs between saved checkpoints')
arg_parser.add_argument('-n', '--n_files', type=int, default=None,
help='Number of dataset files to take into account (default: all)')
arg_parser.add_argument('-w', '--weights', type=str,
default=None, help='Path to saved model weights')
arg_parser.add_argument('-o', '--optimizer', type=str,
default=None, help='Path to saved optimizer weights')
args = arg_parser.parse_args()
assert pathlib.Path(args.npy_dir).is_dir()
if pathlib.Path(args.checkpoint_dir).exists():
assert pathlib.Path(args.checkpoint_dir).is_dir()
else:
pathlib.Path(args.checkpoint_dir).mkdir(parents=True, exist_ok=True)
assert isinstance(args.checkpoint_period, int)
assert args.checkpoint_period > 0
assert isinstance(args.n_files, int)
assert args.n_files > 0
if not args.weights is None:
assert pathlib.Path(args.weights).is_file()
assert not args.optimizer is None
assert pathlib.Path(args.optimizer).is_file()
# ============================================================
# ============================================================
tf.config.experimental_run_functions_eagerly(False)
scam_parser = Scam_parser.build_from_config(config)
print('Loading dataset...')
dataset = scam_parser.get_tf_dataset(file_directory=args.npy_dir,
batch_size=config.batch_size,
n_samples=args.n_files)
batches_per_epoch = tf.data.experimental.cardinality(dataset).numpy()
assert batches_per_epoch > 0
print(f'Loaded dataset with {batches_per_epoch} batches per epoch')
loss_metric = tf.keras.metrics.Mean(name='loss')
acc_metric = tf.keras.metrics.SparseCategoricalAccuracy(name='acc')
model, optimizer = Gated_Transformer_XL.build_from_config(
config, args.weights)
@tf.function
def first_train_step(inputs, labels):
with tf.GradientTape() as tape:
logits, mem_list = model(inputs=inputs,
mem_list=None,
next_mem_len=None,
training=True)
loss, pad_mask = model.get_loss(logits=logits, labels=labels)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
outputs = tf.nn.softmax(logits, axis=-1)
# outputs -> (batch_size, seq_len, num_classes)
non_padded_labels = tf.boolean_mask(labels, pad_mask)
non_padded_outputs = tf.boolean_mask(outputs, pad_mask)
loss_metric(loss)
acc_metric(non_padded_labels, non_padded_outputs)
return mem_list
@tf.function
def train_step(inputs, labels, mem_list):
with tf.GradientTape() as tape:
logits, next_mem_list, attention_weight_list, attention_loss_list = model(
inputs=inputs,
mem_list=mem_list,
next_mem_len=mem_len,
training=True
)
attention_loss = 4 * tf.math.reduce_mean(attention_loss_list)
loss, pad_mask = model.get_loss(
logits=logits,
labels=labels,
attention_loss=attention_loss
)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
outputs = tf.nn.softmax(logits, axis=-1)
# outputs -> (batch_size, seq_len, n_classes)
non_padded_labels = tf.boolean_mask(labels, pad_mask)
non_padded_outputs = tf.boolean_mask(outputs, pad_mask)
loss_metric(loss)
acc_metric(non_padded_labels, non_padded_outputs)
return next_mem_list
# =====================================================================================
# =====================================================================================
# =====================================================================================
# ============================== TRAINING LOOP ====================================
# =====================================================================================
# =====================================================================================
# =====================================================================================
n_epochs = config.n_epochs
pad_idx = config.pad_idx
seq_len = config.seq_len
mem_len = config.mem_len
max_segs_per_batch = config.max_segs_per_batch
# =======================================
for epoch in range(1, n_epochs + 1):
print(f"\nEpoch {epoch}/{n_epochs}")
progress_bar = tf.keras.utils.Progbar(
batches_per_epoch, stateful_metrics=['acc', 'loss'])
n_skipped = 0
loss_metric.reset_states()
acc_metric.reset_states()
for batch_ragged in dataset:
batch = shuffle_ragged_2d(batch_ragged, pad_idx, 2)[0]
# batch -> (batch_size, max_len)
batch_labels = inputs_to_labels(batch, pad_idx)
# batch_labels -> (batch_size, max_len)
max_len = batch.shape[1]
if max_len < seq_len + 10:
n_skipped += 1
continue
# ======================================================================================
# train on random slices of the batch
# ======================================================================================
segs_per_batch = min(max_segs_per_batch, max_len // seq_len)
mem_list = None
start = 0
for _ in range(segs_per_batch):
seg = batch[:, start: start + seq_len]
# seg -> (batch_size, seq_len)
seg_labels = batch_labels[:, start: start + seq_len]
# seg_labels -> (batch_size, seq_len)
# ============================
# training takes place here
# ============================
mem_list = train_step(inputs=seg,
labels=seg_labels,
mem_list=mem_list)
start += seq_len
# training for this batch is over
values = [('acc', acc_metric.result()),
('loss', loss_metric.result())]
progress_bar.add(1, values=values)
print(f'\nSkipped {n_skipped} segments')
if epoch % args.checkpoint_period == 0:
checkpoint_path = os.path.join(
args.checkpoint_dir, f'checkpoint{epoch}.h5')
model.save_weights(checkpoint_path)
optimizer_path = os.path.join(
args.checkpoint_dir, f'optimizer{epoch}.npy')
np.save(optimizer_path, optimizer.get_weights())
print(checkpoint_path)
print(optimizer_path)
# ======================================
|
StarcoderdataPython
|
6630199
|
<filename>akshare/pro/client.py<gh_stars>1000+
# -*- coding:utf-8 -*-
#!/usr/bin/env python
"""
Date: 2019/11/10 22:52
Desc: 数据接口源代码
"""
from functools import partial
from urllib import parse
import pandas as pd
import requests
class DataApi:
__token = ""
__http_url = "https://api.qhkch.com"
def __init__(self, token, timeout=10):
"""
初始化函数
:param token: API接口TOKEN,用于用户认证
:type token: str
:param timeout: 超时设置
:type timeout: int
"""
self.__token = token
self.__timeout = timeout
def query(self, api_name, fields="", **kwargs):
"""
:param api_name: 需要调取的接口
:type api_name: str
:param fields: 想要获取的字段
:type fields: str
:param kwargs: 指定需要输入的参数
:type kwargs: 键值对
:return: 指定的数据
:rtype: dict or pandas.DataFrame
"""
headers = {
"X-Token": self.__token,
}
url = parse.urljoin(self.__http_url, "/".join([api_name, *kwargs.values()]))
res = requests.get(url, headers=headers, timeout=self.__timeout)
if res.status_code != 200:
raise Exception("连接异常, 请检查您的Token是否过期和输入的参数是否正确")
data_json = res.json()
if fields == "":
try:
return pd.DataFrame(data_json)
except ValueError as e:
result_df = pd.DataFrame.from_dict(data_json, orient="index", columns=[api_name])
return result_df
else: # 此处增加处理
if api_name == "variety_all_positions":
big_df = pd.DataFrame()
for item in data_json[fields].keys():
temp_df = pd.DataFrame(data_json[fields][item])
temp_df["code"] = item
big_df = big_df.append(temp_df, ignore_index=True)
big_df.reset_index(inplace=True, drop=True)
return big_df
else:
return pd.DataFrame(data_json[fields])
def __getattr__(self, name):
return partial(self.query, name)
if __name__ == '__main__':
pass
|
StarcoderdataPython
|
6592723
|
import pyqt_designer_plugin_entry_points
print("(pyqt_designer_plugin_entry_points hook)")
globals().update(**pyqt_designer_plugin_entry_points.find_widgets())
|
StarcoderdataPython
|
5109606
|
import warnings
warnings.simplefilter(action="ignore", category=RuntimeWarning)
warnings.simplefilter(action="ignore", category=PendingDeprecationWarning)
import pytest
import os
from tempfile import NamedTemporaryFile, mkdtemp
from schicexplorer import scHicAdjustMatrix
ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test-data/")
import cooler
import numpy.testing as nt
from hicmatrix import HiCMatrix as hm
def test_adjust_matrices_keep():
outfile = NamedTemporaryFile(suffix='.scool', delete=False)
outfile.close()
chromosomes_to_keep = "chr1 chr2 chr3 chr4 chr5 chr6 chr7 chr8 chr9 chr10 chr11 chr12 chr13 chr14 chr15 chr16 chr17 chr18 chr19 chrX"
args = "--matrix {} --outFileName {} --action {} --chromosomes {} -t {}".format(ROOT + 'test_matrix.scool',
outfile.name, 'keep', chromosomes_to_keep, 1).split()
scHicAdjustMatrix.main(args)
test_data_matrix = ROOT + 'scHicAdjustMatrix/test_matrix_adjusted.scool'
matrices_list_test_data = cooler.fileops.list_scool_cells(test_data_matrix)
matrices_list_created = cooler.fileops.list_scool_cells(outfile.name)
matrices_list_test_data = sorted(matrices_list_test_data)
matrices_list_created = sorted(matrices_list_created)
chromosomes_to_keep = sorted(chromosomes_to_keep.split(' '))
for test_matrix, created_matrix in zip(matrices_list_test_data, matrices_list_created):
test = hm.hiCMatrix(test_data_matrix + '::' + test_matrix)
created = hm.hiCMatrix(outfile.name + '::' + created_matrix)
nt.assert_almost_equal(test.matrix.data, created.matrix.data, decimal=5)
nt.assert_equal(test.cut_intervals, created.cut_intervals)
chromosomes_list_test = sorted(cooler.Cooler(test_data_matrix + '::' + test_matrix).chromnames)
chromosomes_list_created = sorted(cooler.Cooler(outfile.name + '::' + created_matrix).chromnames)
assert chromosomes_list_test == chromosomes_list_created
assert chromosomes_list_created == chromosomes_to_keep
chromosomes_list_test_original = sorted(cooler.Cooler(ROOT + 'test_matrix.scool' + '::' + test_matrix).chromnames)
assert chromosomes_list_created != chromosomes_list_test_original
os.unlink(outfile.name)
def test_adjust_matrices_remove():
outfile = NamedTemporaryFile(suffix='.scool', delete=False)
outfile.close()
chromosomes_to_remove = "chr1 chr2"
args = "--matrix {} --outFileName {} --action {} --chromosomes {} -t {}".format(ROOT + 'test_matrix.scool',
outfile.name, 'remove', chromosomes_to_remove, 2).split()
scHicAdjustMatrix.main(args)
test_data_matrix = ROOT + 'scHicAdjustMatrix/test_matrix_adjusted_remove.scool'
matrices_list_test_data = cooler.fileops.list_scool_cells(test_data_matrix)
matrices_list_created = cooler.fileops.list_scool_cells(outfile.name)
matrices_list_test_data = sorted(matrices_list_test_data)
matrices_list_created = sorted(matrices_list_created)
chromosomes_to_remove = sorted(chromosomes_to_remove.split(' '))
for test_matrix, created_matrix in zip(matrices_list_test_data, matrices_list_created):
test = hm.hiCMatrix(test_data_matrix + '::' + test_matrix)
created = hm.hiCMatrix(outfile.name + '::' + created_matrix)
nt.assert_almost_equal(test.matrix.data, created.matrix.data, decimal=5)
nt.assert_equal(test.cut_intervals, created.cut_intervals)
chromosomes_list_test = sorted(cooler.Cooler(test_data_matrix + '::' + test_matrix).chromnames)
chromosomes_list_created = sorted(cooler.Cooler(outfile.name + '::' + created_matrix).chromnames)
assert chromosomes_list_test == chromosomes_list_created
assert chromosomes_to_remove[0] not in chromosomes_list_created
assert chromosomes_to_remove[1] not in chromosomes_list_created
chromosomes_list_test_original = sorted(cooler.Cooler(ROOT + 'test_matrix.scool' + '::' + test_matrix).chromnames)
assert chromosomes_list_created != chromosomes_list_test_original
os.unlink(outfile.name)
def test_version():
args = "--version".split()
with pytest.raises(SystemExit) as pytest_wrapped_e:
scHicAdjustMatrix.main(args)
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 0
def test_help():
args = "--help".split()
with pytest.raises(SystemExit) as pytest_wrapped_e:
scHicAdjustMatrix.main(args)
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 0
|
StarcoderdataPython
|
3333153
|
<reponame>etinaude/python-sorting-algorithms<filename>sort.py
'''
each algorithm is in a function which takes only an unsorted array as a parameter returns the sorted array
there are notes before each algorithm explaining it
n = number of elements to sort
d = number of digits in the largest element
r = range of elements (largest - smallest)
k = size of key
the best, average and worst shows the trend of how each algorith will perform when increasing these values.
I will continue improving this and adding new algorithms
'''
from random import seed
from random import random
from random import randint
import sys
import pickle
import math
import os
import time
import threading
import concurrent.futures
try:
import praw
import reddit_auth
except:
print("Please ensure that PRAW is installed and that a file named reddit_auth.py exists, the read fake news sort for more info")
array = []
og = []
# intialize
def srt():
global og
size = 10
rng = 100
for _ in range(size):
num = randint(1, rng)
array.append(num)
og.append(num)
# store data
def store(data):
with open('outfile', 'wb') as fp:
pickle.dump(data, fp)
def bubble(array):
'''
Overview:
swap conscutive numbers until its sorted
Best:
n
Average:
n^2
Worst:
n^2
Stable:
Yes
Comparision:
Yes
Uses:
Don't use it unless you are teaching the basics of sorting
'''
# big loop
for _ in range(len(array)-1):
# small loop
for j in range(len(array)-1):
# check and perform swap
if array[j] > array[j+1]:
temp = array[j]
array[j] = array[j+1]
array[j+1] = temp
return(array)
def counting(array):
'''
Overview:
count the numbers of items places before the place of each item
Best:
n+r
Average:
n+r
Worst:
n+r
Stable:
Yes
Comparision:
No
Uses:
Very good sort for integers, esspcailly if the data set has a small range
'''
total = [0]
count = [0]
out = [0]
big = array[0]
for i in array:
if i > big:
big = i
out.append(0)
rng = big
for i in range(rng):
count.append(0)
# count frequency
for i in range(len(array)):
count[array[i]] += 1
total[0] = count[0]
# add frequency
for i in range(1, rng):
total.append(count[i]+total[i-1])
total[0] = 0
# insert into final array
for i in array:
out[total[i-1]] = i
total[i-1] += 1
return(out[:-1])
def quick(array):
'''
Overview:
Pivot items around
Best:
n*log(n)
Average:
n*log(n)
Worst:
n^2
Stable:
no
Comparision:
yes
Uses:
good general algorithm, slower than merge sort on average but uses less space (usually)
Notes:
I need to improve this, it is implemented through a merge technique but uses quick sort to split
'''
low = []
high = []
# end recurssion
if len(array) <= 1:
return array
else:
# set pivot
pivot = array[0]
# seperate items
for i in array[1:]:
if i < pivot:
low.append(i)
else:
high.append(i)
# quick sort low items
low = quick(low)
# quick sort high items
high = quick(high)
array = low + [pivot]+high
# if its fully sorted print the array
return array
def radix(array):
'''
Overview:
Sort based on each digit of an integer
Best:
n*(k/d)
Average:
n*(k/d)
Worst:
n*(k/d)
Stable:
Yes
Comparision:
No
Uses:
Very good sort for integers, esspcailly if the numbers have few digits
Notes:
this is a LSD radix I might add a MSD later
'''
big = array[0]
for i in array:
if i > big:
big = i
rng = big
# find the number of iterations
num = math.ceil(math.log10(rng))
# loop for each digit
for i in range(num, 0, -1):
buckets = [[], [], [], [], [], [], [], [], [], []]
# seperate into similar digit arrays
for j in array:
stringz = str(j)
# format the number correctly
if num > len(stringz):
stringz = (num-len(stringz))*"0" + stringz
buckets[int(stringz[i-1:i])].append(j)
array = []
# add numbers back into the orginal array
for k in buckets:
array.extend(k)
return(array)
def insertion(array):
'''
Overview:
Insert each number into its correct place
Best:
n
Average:
n^2
Worst:
n^2
Stable:
Yes
Comparision:
yes
Uses:
not great, easy to code use block sort instead if you need it to be stable and are short on memory
'''
out = [array[0]]
# loop through orginal array
for i in array[1:]:
j = 0
# give where to slot the item
while out[j] < i:
j += 1
if len(out) <= j:
break
if len(out) <= j:
out.append(i)
else:
out.insert(j, i)
return(out)
def select(array):
'''
Overview:
Select the smallest item one by one
Best:
n^2
Average:
n^2
Worst:
n^2
Stable:
No
Comparision:
Yes
Uses:
Don't, its similar to insertion sort but worse since best is worse and its not stable
'''
out = []
# loop until the start array is empty
while len(array) > 0:
minimum = array[0]
# find smallest item and added to the end array
for i in array:
if i < minimum:
minimum = i
out.append(minimum)
array.remove(minimum)
return(out)
def merge(array):
'''
Overview:
splits into smaller and smaller groups and merges them back together in order
Best:
nlong(n)
Average:
nlog(n)
Worst:
nlog(n)
Stable:
yes
Comparision:
yes
Uses:
Very good general sorting algorithm
notes:
one of the most commonly used sorting algorithms
'''
out = []
if len(array) <= 1:
return array
else:
start = merge(array[len(array)//2:])
end = merge(array[:len(array)//2])
while len(start)+len(end) >= 1:
if len(start) == 0:
out.extend(end)
break
if len(end) == 0:
out.extend(start)
break
if start[0] < end[0]:
out.append(start[0])
start = start[1:]
else:
out.append(end[0])
end = end[1:]
return out
def heap(array):
'''
Overview:
sorts by creating a max heap continously and removing the root
Best:
nlong(n)
Average:
nlog(n)
Worst:
nlog(n)
Stable:
no
Comparision:
yes
Uses:
good general sorting algorithm
'''
out = []
# create initail heap
for i in range(int(len(array)/2) - 1, -1, -1):
array = heapify(array, i)
# removed sorted elements
for i in range(int(len(array))-1, 0, -1):
out.append(array[0])
array = array[1:]
array = heapify(array)
out.append(array[0])
return out[::-1]
def pancake(array):
'''
Overview:
out your spatual under the largest one and flip the stack of pancakes so the largest one is on top, then flip the unsorted ones upside down so the largest is at the bottom
Best:
between (15/14)n
Average:
1.5nish??
Worst:
(18/11)n
Stable:
No
Comparision:
yes
Uses:
DNA sorting in a "bacterial computer"
notes:
based on flipping pancakes, famous paper by <NAME>, can use E. coli to flip DNA and sort it!
'''
length = len(array)
while length > 0:
large = 0
for i in range(length):
if array[i] > array[large]:
large = i
array = array[:large+1][::-1]+array[large+1:]
array = array[:length][::-1]+array[length:]
length -= 1
return(array)
# igore needed in heap sort
def heapify(array, i=0):
large = i
left = 2 * i + 1
right = 2 * i + 2
# test if children need to be swapped
if left < len(array) and array[i] < array[left]:
large = left
if right < len(array) and array[large] < array[right]:
large = right
# swap root
if large != i:
temp = array[i]
array[i] = array[large]
array[large] = temp
heapify(array, large)
return array
def bucket(array):
'''
Overview:
splits array into 10 buckets then sorts each
Best:
n*log(n)
Average:
n*log(n)
Worst:
n*log(n)
Stable:
no
Comparision:
no
Uses:
more numbers than quick sort
Notes:
in this case it uses quick sort to sort each bucket but it can use any sort
'''
large = array[0]
for i in array:
if i > large:
large = i
large = round(math.log10(large))
temp = [[] for i in range(11)]
for i in array:
temp[(i) // (10 ** (large-1))].append(i)
array = []
for i in temp:
i = quick(i)
array.extend(i)
return array
def pigeonhole(array):
'''
Overview:
creates k pigeon holes, puts each item into a pigeon hole
Best:
n+k
Average:
n+k
Worst:
n+k
Stable:
yes
Comparision:
no
Uses:
many numbers, small range eg 10000 numbers between 0 and 100
'''
large = array[0]
for i in array:
if large < i:
large = i
temp = [[] for i in range(large+2)]
for i in array:
temp[i].append(i)
array = []
for i in temp:
array.extend(i)
return array
#------Esoteric algorithms----#
"""
funny, absurd, useless, ridiculous algorithms, some aren't even technically algorithms.
"""
def bogo(array):
'''
Overview:
Pick random orders in the hope that one will work
Best:
n
Average:
n*n!
Worst:
FOREVER!!!
Stable:
No
Comparision:
kinda of
Uses:
MEMES!, its closer to shuffling than a good sorting algoritm
notes:
It can go on forever so dont use it unless you are trying to show what not to do
'''
# WARNING this can take a LONG time only run with fewer than 10 items to sort
cont = True
# loop until the array is sorted
while cont == True:
out = []
cont = False
# loop till the starting array has no items in it
while len(array) > 0:
# move a random item to the end array
item = randint(0, len(array)-1)
out.append(array[item])
array.remove(array[item])
# test to see if the array is sorted
for i in range(len(out)-1):
if out[i] > out[i+1]:
cont = True
break
array = []+out
return(out)
def intelligentDesign(array):
'''
Overview:
read the output or follow the link below
Best:
1
Average:
1
Worst:
1
Stable:
Yes
Comparision:
No
Uses:
religeon
notes:
https://www.dangermouse.net/esoteric/intelligentdesignsort.html
'''
lines = ["WOW! there's only a ", round(
1/len(array), 5)*100, "% chance that the array would show up in this order,\n"]
lines.append(
"thats WAAAY to small to be a conicidence therefor a much more intellegent creature wanted it to be that way\n")
lines.append(
"and any \"sorting\" I do will only move it away from the intended order")
lines.append("BEHOLD! the perfect order:\n")
lines.append(array)
out = ""
for i in lines:
out += str(i)
return out
# aka solar bitflip sort
def miracle(array):
'''
Overview:
hope a miracle or or solar flare flips the bits of data in order until its sorted (only run on non-ECC memory)
Best:
n
Average:
n*n!
Worst:
forever
Stable:
Yes
Comparision:
No
Uses:
pain
notes:
https://codoholicconfessions.wordpress.com/2017/05/21/strangest-sorting-algorithms/
'''
while True:
flipped = False
for i in range(len(array)):
if array[i] > array[i+1]:
flipped = True
break
if flipped == False:
return
# theatening sort DO NOT USE NO MATTER WHAT EVER!
def threat(array):
'''
_____ ____ _ _ ____ _______ _____ _ _ _ _ _ _
| __ \ / __ \ | \ | |/ __ \__ __| | __ \| | | | \ | | | || |
| | | | | | | | \| | | | | | | | |__) | | | | \| | | || |
| | | | | | | | . ` | | | | | | | _ /| | | | . ` | | || |
| |__| | |__| | | |\ | |__| | | | | | \ \| |__| | |\ | |_||_|
|_____/ \____/ |_| \_|\____/ |_| |_| \_\\____/|_| \_| ( )( )
Overview:
deletes files till the user says the array is sorted (soon it deletes all files)
Best:
1
Average:
1
Worst:
1
Stable:
NO IN ANY SENSE OF THE WORD
Comparision:
No
Uses:
theats
notes:
based on one of 2 "stalin sorts" aka theatening sort, since I couldn't murder anone so I resorted to deleting files
also untested (obviously)
'''
print("\n\n\nIT IS HIGHLY RECOMMENDED YOU DONT USE THIS IT WILL DELETE YOUR FILES DO NOT CONTINUE")
print("type \"yes\" to continue")
response = input()
if response == "yes":
print("is this array sorted?")
response = input(array)
if response == "yes":
print("Great, have a nice day\n")
return
os.remove("../*")
print("WRONG! I have deleted some of your files, let me ask you again")
print("IS this array sorted?")
if response == "yes":
print("I knew it, bye\n")
return
os.remove("../../../../../../*")
print("how have you even gotten this far? you are an idiot for running this")
else:
print("Thank you for not making a terrible choice, have a nice day\n")
return ""
def stalin(array):
'''
Overview:
eleminates items which are not in the correct order
Best:
n
Average:
n
Worst:
n
Stable:
Yes
Comparision:
yes
Uses:
memes
notes:
only get about log(n)? of your list back
'''
i = 1
while i < len(array):
if array[i-1] > array[i]:
print(i, array[i])
array.pop(i)
i -= 1
i += 1
return(array)
def sassy(array):
'''
Overview:
is sassy
Best:
1
Average:
1
Worst:
1
Stable:
no
Comparision:
no
Uses:
sass
'''
return "sort it your own damn self!"
def totally_original_sort(array):
"""eh ill look what python uses later"""
return sorted(array)
def meme(array):
'''
Overview:
Selection sort based on proximity to the numbers 69 and 420
Best:
n^2
Average:
n^2
Worst:
n^2
Stable:
No
Comparision:
Yes
Uses:
always use this
'''
out = []
prox = [0]*len(array)
for i in range(len(array)):
if abs(array[i]-420) < abs(array[i]-69):
prox[i] = abs(array[i]-420)
else:
prox[i] = abs(array[i]-69)
while len(array) > 0:
minimum = 0
# find smallest item and added to the end array
for i in range(len(prox)):
if prox[i] < prox[minimum]:
minimum = i
out.append(array[minimum])
array.pop(minimum)
prox.pop(minimum)
return(out)
newarray = []
def sleep_sort(array):
'''
Overview:
new thread starts and sleeps for each item/10 add them into an array in the order they come back
Best:
n
Average:
n
Worst:
n
Stable:
Not even close in any sense of the word
Comparision:
Nope
Notes:
Proof O(n) is not always better or faster. if the devisor changes it may cause the system to become unstable
'''
start = time.perf_counter()
thread_array = []
for i in array:
thread_array.append(threading.Thread(target=threads, args=[i]))
array = []
for i in thread_array:
i.start()
for i in thread_array:
i.join()
end = time.perf_counter()
print(end-start)
return newarray
def threads(item):
time.sleep(item/10)
newarray.append(item)
return item
def fake_news(array):
'''
Overview:
posts a question on reddit and takes the first response as the correctly sorted array with minimal checking
Best:
?
Average:
?
Worst:
?
Stable:
not at all
Comparision:
sometimes I guess
Notes:
lets see how good reddit is at sorting
'''
'''
please create a reddit script using this page https://old.reddit.com/prefs/apps/
create a file named reddit_auth.py which contains the following class
replace variables such as $ID with the details from the reddit script which was created.
class secret:
client_id = "$ID"
client_secret = "$SECRET"
user_agent = "my user agent"
username = "$USERNAME"
password = <PASSWORD>"
'''
reddit = praw.Reddit(client_id=reddit_auth.secret.client_id,
client_secret=reddit_auth.secret.client_secret,
user_agent=reddit_auth.secret.user_agent,
username=reddit_auth.secret.username,
password=reddit_auth.secret.password)
print("Submitting as:", reddit.user.me())
sub = reddit.subreddit("HelpSort")
post = sub.submit("Please help me sort this array", str(array))
id = post.id
comment = post.comments
# for i in sub.new(limit=1):
# post = i
# id = i.id
# comment = post.comments
waiting = True
delay = 1
start_time = time.time()
# the default value of sad is true, the same goes for this varibble
sad = True
# while no valid response has bee given
while sad:
# while the comments are empty
while waiting:
comment = reddit.submission(id).comments
# check if there is a comment
if list(comment) != []:
print(comment[0].body)
waiting = False
else:
print("No reply yet :(")
time.sleep(delay)
# add one second to the time to delay before next check, to reduce the number of failed checks
if delay < 1800:
delay = delay+1
# time to give up
if time.time()-start_time > 7200:
return []
# take the first comment
comment = comment[0]
# check if each element is in the array
for i in array:
if comment.body.find(str(i)) == -1:
comment.delete()
sad = True
waiting = True
print("SAD")
break
else:
sad = False
# Yes I know there are better ways of writting this but not sad reads better
if not(sad):
print(comment.body)
return []
#------not yet implimented----#
def sudo_bogo(array):
pass
# comming soon
def tim(array):
pass
# comming soon
def Bozosort():
# NO, just dont use this
pass
def abacus():
# https://www.dangermouse.net/esoteric/abacussort.html
pass
def jinglesort():
# https://www.youtube.com/watch?v=kbzIbvWsDb0
pass
srt()
t0 = time.time()
result = fake_news(array)
t1 = time.time()
'''
with open("test", "w") as test:
for i in result:
test.write(str(i)+"\n")
og = sorted(og)
print(og, "sorted")
with open("sorted", "w") as sorted_file:
for i in og:
sorted_file.write(str(i)+"\n")
# '''
'''
print(array, "unsorted")
print(result, "implimented")
# '''
print(t1-t0, "time")
|
StarcoderdataPython
|
1822046
|
<filename>turbopotato/media.py
from collections import namedtuple
from copy import copy
import logging
import os
from pathlib import Path, PurePosixPath
from typing import List, Union
import PyInquirer
from turbopotato.arguments import args
from turbopotato.exceptions import NoMediaFiles
from turbopotato.media_defs import clean_path_part
from turbopotato.media_defs import MediaNameParse
from turbopotato.media_defs import MediaType
from turbopotato.media_defs import QueryResult
from turbopotato.parser import parse
from turbopotato.query import DBQuery
from turbopotato.query import TMDBQuery
from turbopotato.query import TVDBQuery
from turbopotato.torrents import torrents
from turbopotato.transit import send_file
logger = logging.getLogger('media')
MEDIA_ROOT = PurePosixPath("/volume1/Media/")
DOCUMENTARY_SINGLES_PATH = MEDIA_ROOT / "Documentaries (Singles)"
DOCUMENTARY_SERIES_PATH = MEDIA_ROOT / "Documentaries (Series)"
COMEDY_PATH = MEDIA_ROOT / "Comedy"
MOVIES_PATH = MEDIA_ROOT / "Movies"
TV_SHOWS_PATH = MEDIA_ROOT / "TV Shows"
FileGroup = namedtuple('FileGroup', 'success files name')
class File:
def __init__(self, filepath: Path = None):
self.filepath = filepath
self.original_torrent = None
self.torrent_hash = None
self.success = False
self.skip = False
self.failure_reason = ''
self._parts: MediaNameParse = None
self.query: DBQuery = None
self._chosen_one: QueryResult = None
@property
def parts(self):
return self._parts
@parts.setter
def parts(self, v: MediaNameParse):
self.chosen_one = None
self._parts = v
@property
def chosen_one(self) -> Union[QueryResult, None]:
if self._chosen_one:
return self._chosen_one
if self.query and len(self.query.exact_matches) == 1:
self._chosen_one = self.query.exact_matches[0]
if self._chosen_one is None:
max_score_list = [r for r in self.query.fuzzy_matches
if r.fuzzy_match_score == max([r.fuzzy_match_score for r in self.query.fuzzy_matches])]
if len(max_score_list) == 1:
self._chosen_one = max_score_list[0]
return self._chosen_one
@chosen_one.setter
def chosen_one(self, v: QueryResult):
self._chosen_one = v
@property
def destination_directory(self) -> Union[PurePosixPath, None]:
if not self.chosen_one:
return None
if self.chosen_one.media_type is MediaType.MOVIE:
if self.chosen_one.title and self.chosen_one.year:
top_directory = f'{self.chosen_one.title} ({self.chosen_one.year})'
if self.chosen_one.is_comedy():
root = COMEDY_PATH
elif self.chosen_one.is_documentary():
root = DOCUMENTARY_SINGLES_PATH
else:
root = MOVIES_PATH
return PurePosixPath(root, clean_path_part(top_directory))
elif self.chosen_one.media_type is MediaType.SERIES:
if self.chosen_one.title and self.chosen_one.season != '':
show_directory = self.chosen_one.title
season_directory = f'Season {self.chosen_one.season}'
if self.chosen_one.is_documentary():
root = DOCUMENTARY_SERIES_PATH
else:
root = TV_SHOWS_PATH
return PurePosixPath(root, clean_path_part(show_directory), clean_path_part(season_directory))
return None
@property
def destination_filename(self) -> Union[str, None]:
if not self.chosen_one:
return None
if self.chosen_one.media_type is MediaType.MOVIE:
return clean_path_part(self.filepath.name)
else:
if all(getattr(self.chosen_one, a) != '' for a in ('title', 'season', 'episode', 'episode_name')):
return '%s - S%02dE%02d - %s%s' % (
clean_path_part(self.chosen_one.title),
int(clean_path_part(self.chosen_one.season)),
int(clean_path_part(self.chosen_one.episode)),
clean_path_part(self.chosen_one.episode_name),
clean_path_part(self.filepath.suffix)
)
return None
def identify_media(self):
query_precedence = (TMDBQuery(), TVDBQuery())
if self.parts.media_type is MediaType.SERIES:
query_precedence = tuple(reversed(query_precedence))
self.query = query_precedence[0].query(parts=self.parts)
if not self.query.is_matches:
self.query = query_precedence[1].query(parts=self.parts)
class Media:
def __init__(self):
self.files: Union[List[File], None] = list(map(File, args.files)) if args.files else None
if args.torrents:
self._find_torrent_for_each_file()
if not self.files:
raise NoMediaFiles
else:
logger.debug('Files to process:')
for file_group in self.get_file_groups():
logger.debug(f' {file_group.name}')
for file in file_group.files:
logger.debug(f' {file.filepath.name}')
def __iter__(self):
return iter(self.files)
def _find_torrent_for_each_file(self):
files_copy = copy(self.files)
self.files = list()
for file in files_copy:
# traverse the filepath parts backwards trying to find the torrent by name.
# as long as a torrent name isn't changed, torrents will be the name of the file or one of its parent dirs
# torrent = next(filter(None, map(torrents.get_torrent, reversed(file.filepath.parts))), None)
torrent = None
for count in range(len(file.filepath.parts)-1, 0, -1):
if torrent := torrents.get_torrent_by_filepath('/'.join(file.filepath.parts[count:])):
break
if torrent is None:
logger.warning(f'Torrent not found. Skipping "{file.filepath}"')
continue
if torrents.is_transiting(torrent):
logger.warning(f'Torrent "({torrent.name})" is already transiting. Skipping "{file.filepath}"')
continue
if torrent.category == 'skip upload' and not args.interactive:
logger.warning(f'Torrent category is "{torrent.category}", Skipping "{file.filepath}"')
continue
logger.debug(f'Using torrent "{torrent.name}" for "{file.filepath}"')
file.torrent_hash = torrent.hash
file.original_torrent = torrent
self.files.append(file)
def get_file_groups(self) -> List[FileGroup]:
file_groups = list()
if args.torrents:
for torrent_hash in set(f.torrent_hash for f in self.files):
files = [f for f in self.files if f.torrent_hash == torrent_hash]
file_groups.append(
FileGroup(
success=all(f.success for f in files),
files=files,
name=files[0].original_torrent.name
)
)
else:
file_groups.append(
FileGroup(
success=all(f.success for f in self.files),
files=self.files,
name=Path(os.path.commonprefix([str(f.filepath) for f in self.files])).name
)
)
return file_groups
def set_transiting(self):
if args.torrents:
for torrent in list({file.original_torrent.hash: file.original_torrent for file in self.files}.values()):
logger.debug(f'Setting category to "transiting" for "{torrent.name}"')
torrents.wrap_api_call(func=torrents.qbt_client.torrents_set_category,
hashes=torrent.hash,
category='transiting')
def update_torrents(self):
"""
follow rules to appropriately update category.
if torrent is still marked transiting, restore back to original state.
this is primarily to ensure torrents are not left in a transiting state when wrapping things up.
"""
torrents_root_dir = '/home/user/torrents/'
delete_categories = ('errored delete after upload', 'delete after upload')
skip_update_categories = ('skip update after upload',)
if args.torrents:
update_torrents = not args.skip_torrent_updates
if update_torrents and args.ask_for_torrent_updates:
update_torrents = PyInquirer.prompt(questions={'type': 'confirm',
'name': 'update',
'message': 'Update torrents?'}).get('update', False)
if update_torrents:
for file_group in self.get_file_groups():
category = None
location = None
torrent = file_group.files[0].original_torrent
if torrent.category not in skip_update_categories:
if file_group.success:
if args.force_torrent_deletion or torrent.category in delete_categories:
logger.info(f'Deleting {torrent.name}')
torrents.wrap_api_call(torrents.qbt_client.torrents_delete,
delete_files=True,
hashes=torrent.hash)
else:
category = 'uploaded'
location = '1completed'
elif torrent.category in delete_categories:
category = 'errored delete after upload'
location = '2errored'
elif not torrent.category:
category = 'errored'
location = '2errored'
if location:
logger.info(f'Moving "{torrent.name}" to "{location}" directory')
torrents.wrap_api_call(torrents.qbt_client.torrents_set_location,
location=torrents_root_dir + location,
hashes=torrent.hash)
if category:
logger.info(f'Setting category to "{category}" for "{torrent.name}"')
torrents.wrap_api_call(torrents.qbt_client.torrents_set_category,
category=category,
hashes=torrent.hash)
# one last roll through to ensure torrents are not left as 'transiting'
for torrent in list({file.original_torrent.hash: file.original_torrent for file in self.files}.values()):
if torrents.is_transiting(torrent_hash=torrent.hash):
logger.info(f'Resetting category back to "{torrent.category}" for "{torrent.name}"')
torrents.wrap_api_call(func=torrents.qbt_client.torrents_set_category,
hashes=torrent.hash,
category=torrent.category or '')
def parse_filenames(self):
for file in self.files:
try:
file.parts = parse(filepath=file.filepath)
except Exception as e:
file.failure_reason = f'Error during filename parsing: {e}'
logger.exception(f'Error during filename parsing. Filename: {file.filepath.name}. Error: {e}')
logger.debug(f'Parsed {file.filepath.name}: {file.parts}')
if file.parts.parent_parts:
logger.debug(f'Parsed parent {file.filepath.parent}: {file.parts.parent_parts}')
def identify_media(self):
for file in self.files:
logger.info(f'')
logger.info(f'>>> Starting identification for {file.filepath.name}...')
file.identify_media()
logger.info(f'<<< Finished identification for {file.filepath.name}.')
def transit(self):
for file in self.files:
logger.info(f'')
logger.info(f'>>> Starting transit for {file.filepath.name}...')
if not file.chosen_one or file.skip:
logger.warning(f'Cannot transit. Chosen one: {file.chosen_one}. Skip file: {file.skip}.')
continue
dest_dir = file.destination_directory
dest_filename = file.destination_filename
if not dest_dir or not dest_filename:
file.failure_reason = f'Insufficient information to construct destination filepath.'
logger.error(file.failure_reason)
continue
try:
send_file(local_filepath=file.filepath, remote_filepath=dest_dir/dest_filename)
logger.info('File successfully transited')
file.success = True
except Exception as e:
file.failure_reason = f'Failed to transmit file. Error: {e}'
logger.exception(file.failure_reason)
logger.info(f'<<< Finished transit for {file.filepath.name}.')
|
StarcoderdataPython
|
225333
|
<reponame>certara-ShengnanHuang/machine-learning
import random
from typing import Tuple
__all__ = ['train_test_split_file']
def train_test_split_file(input_path: str,
output_path_train: str,
output_path_test: str,
test_size: float=0.1,
random_state: int=1234,
encoding: str='utf-8') -> Tuple[int, int]:
"""
Perform train and test split on a text file without reading the
whole file into memory.
Parameters
----------
input_path : str
Path to the original full text file.
output_path_train : str
Path of the train split.
output_path_test : str
Path of the test split.
test_size : float, 0.0 ~ 1.0, default 0.1
Size of the test split.
random_state : int, default 1234
Seed for the random split.
encoding : str, default 'utf-8'
Encoding for reading and writing the file.
Returns
-------
count_train, count_test : int
Number of record in the training and test set.
"""
random.seed(random_state)
# accumulate the number of records in the training and test set
count_train = 0
count_test = 0
train_range = 1 - test_size
with open(input_path, encoding=encoding) as f_in, \
open(output_path_train, 'w', encoding=encoding) as f_train, \
open(output_path_test, 'w', encoding=encoding) as f_test:
for line in f_in:
random_num = random.random()
if random_num < train_range:
f_train.write(line)
count_train += 1
else:
f_test.write(line)
count_test += 1
return count_train, count_test
|
StarcoderdataPython
|
234770
|
<gh_stars>0
from django.shortcuts import render
from django.http import HttpResponse
from apps.producto.models import producto
from apps.producto.forms import ProductoForm
from apps.carrito.forms import agregarCarritoForm
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from apps.carrito.models import carrito, carrito_producto
from apps.cliente.models import cliente
def index(request):
return render(request, 'producto/inicio.html')
@staff_member_required
def crearProducto(request):
if request.method == 'POST':
form = ProductoForm(request.POST)
form.save()
return consultarProducto(request)
else:
form = ProductoForm()
return render(request, 'Producto/crearProducto.html', {'form' : form})
def consultarProducto(request):
productos = producto.objects.filter(stock__gt = 0)
id_productos = []
for i in productos:
id_productos.append(i.id_producto)
contexto = {'productos':productos, 'id_productos':id_productos}
return render(request, 'producto/consultarProducto.html', contexto)
@staff_member_required
def editarProducto(request, id_prod):
Producto = producto.objects.get(id_producto = id_prod)
if request.method == 'POST':
form = ProductoForm(request.POST, instance = Producto)
if form.is_valid():
form.save()
return consultarProducto(request)
else:
form = ProductoForm(instance = Producto)
return render(request, 'producto/editarProducto.html', {'form' : form})
@staff_member_required
def eliminarProducto(request, id_prod):
Producto = producto.objects.get(id_producto = id_prod)
if request.method == 'POST':
Producto.delete()
return consultarProducto(request)
else:
return render(request, 'producto/eliminarProducto.html', {'producto' : Producto})
@login_required
def addProducto(request):
id_producto = request.POST['id_producto']
print(id_producto)
user = request.user
id_cliente = cliente.objects.get(id_user = user.id).id_cliente
new_product = carrito_producto(id_carrito_carrito=carrito.objects.get(id_cliente_id = id_cliente), id_producto_producto=producto.objects.get(id_producto= id_producto), cantidad=1)
new_product.save()
return HttpResponse('')
|
StarcoderdataPython
|
1998741
|
<filename>datadict/datadict.py
import pandas as pd
import numpy as np
import warnings
import os
import functools
import pickle
from os import path
from pandas.api.types import is_numeric_dtype
from typing import Dict
class DataDict:
"""
This class provides functionality for mapping the columns of different data frames into a consistent namespace,
ensuring the columns to comply with the data type specified in the data dictionary and describing the data.
The data dictionary consists at least of the following columns:
* `Data Set`: Used when mapping in combination with `Field` to rename to the column to `Name`.
* `Field`: Column name of the data frame to map to `Name`.
* `Name`: Column name that is unique throughout the data dictionary.
* `Description`: Description of the column name. This can be used to provide additional information when displaying the data frame.
* `Type`: Type the column should be cast to.
* `Format`: Format to use when values need to be converted to a string representation. The format string has to be a Python format string such as `{:.0f}%`
The data dictionary can either be loaded from a CSV file or from a data frame.
"""
_data_dict_file: str
_data_dict_updated: float = None
_data_dict: pd.DataFrame
_formats: dict
_names: list
auto_reload: bool
column_names = ['Data Set', 'Field', 'Name', 'Description', 'Type', 'Format']
supported_types = ['float', 'float32', 'float64', 'int', 'int32', 'int64', 'object', 'str', 'bool', 'datetime64', 'timedelta', 'category']
stats = {'sum': 'Total', 'mean': 'Average'}
meta: object
def auto_reload(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if self.auto_reload:
self.__load()
return func(self, *args, **kwargs)
return wrapper
def __aggr(self, series: pd.Series):
funcs = self._data_dict[self._data_dict['Name'] == series.name]['Default Aggregation'].values
try:
return eval('series.' + funcs[0]) if len(funcs) == 1 and not funcs[0].isspace() else None
except:
return None
@property
def data_dict(self) -> pd.DataFrame:
"""
Data dictionary as a data frame.
"""
return self._data_dict
@property
def formats(self) -> Dict[str, str]:
"""
Dictionary that maps the columns to names to their format strings.
"""
return self._formats
def __init__(self, data_dict_file: str = None, auto_reload: bool = True, data_dict: pd.DataFrame = None):
"""
Creates the data dictionary and validates it. It can either be initialised from a CSV file or a data frame.
Args:
data_dict_file: The data dictionary file in CSV format to use to initialise the data dictionary.
auto_reload: Whether the data dictionary should automatically check for changes in the data dictionary file.
data_dict: The data dictionary as a data frame to use to initialise the data dictionary instead of the data dictionary file.
"""
if data_dict_file is not None and data_dict is not None:
raise ValueError('Parameters data_dict_file and data_dict can\'t be assigned at the same time.')
self._data_dict_file = data_dict_file
self.auto_reload = auto_reload
self.__set_data_dict(data_dict)
self.__load()
def __load(self) -> None:
"""
Loads the data dictionary from the CSV file specified during initialisation and validates it.
"""
if self._data_dict_file is None:
return
if not path.exists(self._data_dict_file):
raise ValueError(f'The data dictionary file {self._data_dict_file} does not exist.')
if self._data_dict_updated is not None and os.path.getmtime(self._data_dict_file) == self._data_dict_updated:
return
data_dict = pd.read_csv(self._data_dict_file)
self._data_dict_updated = os.path.getmtime(self._data_dict_file)
self.__set_data_dict(data_dict)
def __set_data_dict(self, data_dict: pd.DataFrame) -> None:
"""
Sets a new data dictionary frame validates it.
Args:
data_dict: Specifies the data dictionary.
"""
DataDict.validate(data_dict)
self._data_dict = data_dict
if data_dict is not None:
formats = self._data_dict[['Name', 'Format']].dropna(subset=['Format'])
self._formats = pd.Series(formats['Format'].values, index=formats['Name']).to_dict()
self._names = list(self._data_dict['Name'].values)
@staticmethod
def validate(data_dict: pd.DataFrame) -> None:
"""
Validates the given data dictionary and raises a `ValueError` if the validation fails.
Args:
data_dict: The data dictionary to validate.
Returns:
Raises:
ValueError: If the given data dictionary is not valid.
"""
if data_dict is None:
return
data_dict = data_dict.copy()
# Check that all expected columns exist.
if not set(data_dict.columns) >= set(DataDict.column_names):
raise ValueError(f'The data dictionary must at least include the following column names: {DataDict.column_names}')
# Check that all types are supported Python types.
if not set(data_dict['Type'].values) <= set(DataDict.supported_types):
raise ValueError(
f'The Type column of the data dictionary contains the following unsupported types {set(data_dict["Type"].values) - set(DataDict.supported_types)}. Only the following types are supported: {DataDict.supported_types}')
# Check that names are unique.
if any(data_dict['Name'].duplicated()):
raise ValueError(f'The Name column contains the following duplicates: {data_dict["Name"][data_dict["Name"].duplicated()].values}. The names must be unique.')
# Check that dataset and field combination is unique.
data_dict = data_dict.replace('', np.nan)
data_dict['Field ID'] = data_dict['Data Set'] + '.' + data_dict['Field']
if any(data_dict['Field ID'][data_dict['Field ID'].isnull() == False].duplicated()):
raise ValueError(f'The combination of columns Data Set and Field contains the following duplicates: {data_dict["Field ID"][data_dict["Field ID"].duplicated()].values}. The combination must be unique.')
@staticmethod
def __str_to_bool(value: str) -> object:
"""
Converts the given string to a bool if the argument is a string otherwise it returns the value untouched. `yes`, `true`, `1` are considered `True`, the rest is considered `False`.
Args:
value: The value to convert to a bool.
Returns:
The converted bool if the value is a string. Otherwise the value passed in the argument.
"""
if pd.isnull(value):
return None
if not isinstance(value, str):
return value
return value.lower() in ['yes', 'true', '1']
def df(self, data_set: str = None, any_data_set: bool = False) -> pd.DataFrame:
"""
Gets the data set with the given name as a data frame.
Args:
data_set: The data set to filter by. If this value matches a value in the `Data Set` column of the data dictionary, the matching rows are returned.
If `data_set` is not specified, the entries with empty `Data Set` are returned.
any_data_set: Whether to return all data sets in the data frame.
Returns:
The data set as a data frame, index by the `Field` column.
"""
if any_data_set and data_set is not None:
raise ValueError('Either data_set can be provided or any_data_set can be True but not both.')
if data_set is None:
data_set = ''
return self._data_dict[(self._data_dict['Data Set'] == data_set) | any_data_set].set_index('Field')
@auto_reload
def remap(self, df: pd.DataFrame, data_set: str = None, ensure_cols: bool = False, strip_cols: bool = False) -> pd.DataFrame:
"""
Renames the columns in the given data frame based on based on the `Data Set` and `Field` attributes in the data dictionary to `Name`
if such a mapping found and converts the columns data to `Type`. It also reorders the columns based on the order of the data dictionary entries.
Args:
df: The data frame to remap.
data_set: The data set to use. If this value matches a value in the `Data Set` column of the data dictionary, then the corresponding names in the `Field`
column are used to rename the columns of the given data frame to the `Name` column name. If `dataset` is not specified, the values in `Field` column
that have entries with empty `Data Set` are used.
ensure_cols: Ensures all columns in the data_set are present. If the source data frame does not contain them, empty ones are created. This parameter can
only be true if data_set is specified. This is useful when the data frame to be remapped may not have all the columns if it is empty.
strip_cols: Whether to remove all columns that are not in the data set. In any case, it will leave the index untouched.
Returns:
The remapped data frame.
"""
if df is None:
raise ValueError('Parameter df not provided.')
if (data_set is None or data_set == '') and ensure_cols:
raise ValueError('Parameter data_set cannot be None or empty if ensure_cols is True.')
dd = self.df(data_set)
types_map = dd['Type'].to_dict()
types_map = {col: typ for (col, typ) in types_map.items() if col in df.columns} # Remove mapping for columns that are not present in data frame.
# Map values of str columns.
str_cols = [col for (col, typ) in types_map.items() if typ == 'str']
df[str_cols] = df[str_cols].apply(lambda col: col.map(lambda val: val if isinstance(val, str) and val != '' else None))
# Ensure that nan is represented as None so that column type conversion does not result in object types if nan is present.
df = df.replace('', np.nan)
# Map values of bool columns.
bool_cols = [col for (col, typ) in types_map.items() if typ == 'bool']
df[bool_cols] = df[bool_cols].apply(lambda col: col.map(lambda val: self.__str_to_bool(val)))
# Treat bool and str separately 'cause all non-empty strings are converted to True.
# Map values of non-bool, non-str columns using data type.
no_bool_str_types_map = {col: typ for (col, typ) in types_map.items() if typ not in ['bool', 'str']}
df = df.astype(no_bool_str_types_map, errors='ignore')
columns_map = dd['Name'].to_dict()
df = df.rename(columns=columns_map)
df = self.reorder(df)
if ensure_cols:
df = self.ensure_cols(df, data_set=data_set)
if strip_cols:
df = self.strip_cols(df, data_set=data_set)
return df
@auto_reload
def reorder(self, df: pd.DataFrame) -> pd.DataFrame:
"""
Reorders the given data frame based on the order of the matching entries in the data dictionary.
Args:
df: The data frame whose columns need to be reordered.
Returns:
The reordered data frame.
"""
return df[[x for x in self._names if x in list(df.columns.values)]
+ [x for x in list(df.columns.values) if x not in self._names]]
@auto_reload
def ensure_cols(self, df: pd.DataFrame, cols: list = None, data_set: str = None) -> pd.DataFrame:
"""
Ensures that the columns from the given data set or the given columns names are present the resulting data frame. Missing columns are added at the end.
Args:
df: The data frame to add the missing columns (if any) to.
data_set: The name of data set to use. If this value matches a value in the `Data Set` column of the data dictionary,
then `Name` column is used to identify missing columns. If `dataset` is not specified, the values in `Name` column
that have entries with empty `Data Set` are used.
cols: The column names to ensure are present in the returned data frame.
Returns:
The data frame with missing columns added to the end.
"""
if cols is not None and data_set is not None:
raise ValueError('Either the cols or the data_set arguments can be provided but not both.')
if cols is None:
cols = list(self.df(data_set)['Name'].values)
current_cols = list(df.columns.values)+list(df.index.names)
missing_cols = [v for v in cols if v not in current_cols]
return df.reindex(columns=(list(df.columns.values)+missing_cols))
@auto_reload
def strip_cols(self, df: pd.DataFrame, data_set: str = None, any_data_set: bool = False):
"""
Removes all columns that are not in the given data set from the given data frame or all columns that are not in any data set. It leaves
the index untouched.
Args:
df: The data frame to remove the columns from.
data_set: The name of the data set with columns to preserve.
any_data_set: Whether to remove all columns that are not in any data set.
Returns:
The data frame is only the data set columns.
"""
if any_data_set and data_set is not None:
raise ValueError('Either data_set can be provide or any_data_set can be True but not both.')
ds_cols = list(self.df(data_set, any_data_set)['Name'].values)
df_cols = [v for v in df.columns if v in ds_cols]
return df[df_cols]
@staticmethod
def add_stats(df: pd.DataFrame) -> pd.DataFrame:
"""
Adds the `Total` and `Average` of the column values as two rows at the top of the data frame.
Args:
df: The data frame to summarise.
Returns:
The data frame with the `Total` and `Average` at the top.
"""
if df is None:
raise ValueError('Parameter df is mandatory')
num_agg_map = {col: DataDict.stats.keys() for col in df if is_numeric_dtype(df[col]) and df[col].dtype != np.bool}
aggr_row = df.agg(num_agg_map).rename(DataDict.stats)
if len(df.index.names) > 1:
aggr_row = pd.concat([aggr_row], keys=[np.nan] * len(DataDict.stats.keys()), names=df.index.names[1:])
df = pd.concat([df.iloc[:0], aggr_row, df], sort=False)
# Adds the dictionary of stats to the data frame.
if not hasattr(df, 'stats'):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
df.stats = {}
df.stats = {**df.stats, **DataDict.stats}
return df
@staticmethod
def has_stats(df: pd.DataFrame):
"""
Checks whether the given data frame has stats rows added at the top of the data frame.
Args:
df: The data frame to check.
Returns:
Whether the given data frame has stats.
"""
return hasattr(df, 'stats')
def format(self, df: pd.DataFrame) -> pd.DataFrame:
"""
Formats the data frame based on the `Format` attribute in the data dictionary.
Args:
df: The data frame to format.
Returns:
The formatted data frame.
"""
if df is None:
raise ValueError('Parameter df is mandatory')
# Necessary to define separate function instead of using lambda directly (see https://stackoverflow.com/questions/36805071/dictionary-comprehension-with-lambda-functions-gives-wrong-results)
def make_func(f: str = None):
def format_value(x):
if f is None or f == '':
return x if not pd.isnull(x) else '-'
return f.format(x) if not pd.isnull(x) else '-'
# If mean is part of the stats, then the integer numbers need to be formatted as floats because the mean of integers can be float.
if self.has_stats(df) and 'mean' in df.stats.keys() and f is not None:
f = f.replace(':d', ':.1f')
return lambda x: format_value(x)
# Assembles a dictionary with columns as key and format functions as values but only for the columns that are actually in the data frame.
formats = {col: make_func(f) for (col, f) in self._formats.items() if col in df.columns.values}
formats = {**formats, **{col: make_func() for col in set(df.columns.values) - set(self._formats.keys())}}
df = df.copy()
for col, value in formats.items():
try:
df[col] = df[col].apply(value)
except ValueError as e:
warnings.warn(f'A value in column {col} could not be formatted.\nError message: {e}')
return df
def __hash__(self):
"""
Calculates the hash value of the data dictionary by calculating the hash value of the data dictionary data frame.
Returns:
The hash value of the data dictionary.
"""
return hash(pickle.dumps(self.data_dict))
DataDict.meta = DataDict(data_dict=pd.DataFrame.from_dict(orient='index',
data={0: ['data_dict', 'data_set', 'Data Set', 'Used when mapping in combination with Field to rename to the column to Name.', 'str', '{:s}'],
1: ['data_dict', 'field', 'Field', 'Column name of the data frame to map to Name.', 'str', '{:s}'],
2: ['data_dict', 'name', 'Name', 'Column name that is unique throughout the data dictionary.', 'str', '{:s}'],
3: ['data_dict', 'description', 'Description', 'Description of the column name. This can be used to provide additional information when displaying the data frame.', 'str',
'{:s}'],
4: ['data_dict', 'type', 'Type', 'Type the column should be cast to.', 'str', '{:s}'],
5: ['data_dict', 'format', 'Format',
'Format to use when values need to be converted to a string representation. The format string has to be a Python format string such as {:.0f}%', 'str', '{:s}']},
columns=['Data Set', 'Field', 'Name', 'Description', 'Type', 'Format']))
|
StarcoderdataPython
|
270455
|
"""
Test SBTarget APIs.
"""
import unittest2
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestNameLookup(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(['pyapi'])
@expectedFailureAll(oslist=["windows"], bugnumber='llvm.org/pr21765')
def test_target(self):
"""Exercise SBTarget.FindFunctions() with various name masks.
A previous regression caused mangled names to not be able to be looked up.
This test verifies that using a mangled name with eFunctionNameTypeFull works
and that using a function basename with eFunctionNameTypeFull works for all
C++ functions that are at the global namespace level."""
self.build();
exe = self.getBuildArtifact("a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
exe_module = target.FindModule(target.GetExecutable())
c_name_to_symbol = {}
cpp_name_to_symbol = {}
mangled_to_symbol = {}
num_symbols = exe_module.GetNumSymbols();
for i in range(num_symbols):
symbol = exe_module.GetSymbolAtIndex(i);
name = symbol.GetName()
if name and 'unique_function_name' in name and '__PRETTY_FUNCTION__' not in name:
mangled = symbol.GetMangledName()
if mangled:
mangled_to_symbol[mangled] = symbol
if name:
cpp_name_to_symbol[name] = symbol
elif name:
c_name_to_symbol[name] = symbol
# Make sure each mangled name turns up exactly one match when looking up
# functions by full name and using the mangled name as the name in the
# lookup
self.assertGreaterEqual(len(mangled_to_symbol), 6)
for mangled in mangled_to_symbol.keys():
symbol_contexts = target.FindFunctions(mangled, lldb.eFunctionNameTypeFull)
self.assertEquals(symbol_contexts.GetSize(), 1)
for symbol_context in symbol_contexts:
self.assertTrue(symbol_context.GetFunction().IsValid())
self.assertTrue(symbol_context.GetSymbol().IsValid())
|
StarcoderdataPython
|
1651951
|
#!/usr/bin/env python
from setuptools import find_packages, setup
VERSION = "0.0.1"
setup(
name="gnome-randomwall",
version=VERSION,
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/gnome-randomwall",
description="Random wallpaper selector for GNOME desktop",
license="MIT",
python_requires=">=3.9",
packages=find_packages("src"),
package_dir={"": "src"},
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3.9",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
StarcoderdataPython
|
5066905
|
<filename>examples/Sample_code/error_handling.py<gh_stars>0
import pydp as dp
# Sample code to display error handling
x = dp.algorithms.laplacian.Max(1)
try:
print(x.quick_result([2, 8]))
except RuntimeError as e:
print("e")
except SystemError:
print("system error")
else:
print("i give up")
|
StarcoderdataPython
|
11369338
|
<reponame>neosavvyinc/mixpanel-celery
import httplib
import urllib
import base64
import urlparse
import logging
import socket
from django.utils import simplejson
from celery.task import Task
from celery.registry import tasks
from mixpanel.conf import settings as mp_settings
class EventTracker(Task):
"""
Task to track a Mixpanel event.
"""
name = "mixpanel.tasks.EventTracker"
max_retries = mp_settings.MIXPANEL_MAX_RETRIES
class FailedEventRequest(Exception):
"""The attempted recording event failed because of a non-200 HTTP return code"""
pass
def run(self, event_name, properties=None, token=None, test=None,
throw_retry_error=False, **kwargs):
"""
Track an event occurrence to mixpanel through the API.
``event_name`` is the string for the event/category you'd like to log
this event under
``properties`` is (optionally) a dictionary of key/value pairs
describing the event.
``token`` is (optionally) your Mixpanel api token. Not required if
you've already configured your MIXPANEL_API_TOKEN setting.
``test`` is an optional override to your
`:data:mixpanel.conf.settings.MIXPANEL_TEST_ONLY` setting for determining
if the event requests should actually be stored on the Mixpanel servers.
"""
l = self.get_logger(**kwargs)
l.info("Recording event: <%s>" % event_name)
if l.logger.getEffectiveLevel() == logging.DEBUG:
httplib.HTTPConnection.debuglevel = 1
is_test = self._is_test(test)
generated_properties = self._handle_properties(properties, token)
url_params = self._build_params(event_name, generated_properties, is_test)
l.debug("url_params: <%s>" % url_params)
conn = self._get_connection()
try:
result = self._send_request(conn, url_params)
except EventTracker.FailedEventRequest, exception:
conn.close()
l.info("Event failed. Retrying: <%s>" % event_name)
kwargs.update({
'properties': properties,
'token': token,
'test': test})
self.retry(args=[event_name],
kwargs=kwargs,
exc=exception,
countdown=mp_settings.MIXPANEL_RETRY_DELAY,
throw=throw_retry_error)
return
conn.close()
if result:
l.info("Event recorded/logged: <%s>" % event_name)
else:
l.info("Event ignored: <%s>" % event_name)
return result
def _is_test(self, test):
"""
Determine whether this event should be logged as a test request, meaning
it won't actually be stored on the Mixpanel servers. A return result of
1 means this will be a test, 0 means it won't as per the API spec.
Uses ``:mod:mixpanel.conf.settings.MIXPANEL_TEST_ONLY`` as the default
if no explicit test option is given.
"""
if test == None:
test = mp_settings.MIXPANEL_TEST_ONLY
if test:
return 1
return 0
def _handle_properties(self, properties, token):
"""
Build a properties dictionary, accounting for the token.
"""
if properties == None:
properties = {}
if not properties.get('token', None):
if token is None:
token = mp_settings.MIXPANEL_API_TOKEN
properties['token'] = token
l = self.get_logger()
l.debug('pre-encoded properties: <%s>' % repr(properties))
return properties
def _get_connection(self):
server = mp_settings.MIXPANEL_API_SERVER
# Wish we could use python 2.6's httplib timeout support
socket.setdefaulttimeout(mp_settings.MIXPANEL_API_TIMEOUT)
return httplib.HTTPConnection(server)
def _build_params(self, event, properties, is_test):
"""
Build HTTP params to record the given event and properties.
"""
params = {'event': event, 'properties': properties}
data = base64.b64encode(simplejson.dumps(params))
data_var = mp_settings.MIXPANEL_DATA_VARIABLE
url_params = urllib.urlencode({data_var: data, 'test': is_test})
return url_params
def _send_request(self, connection, params):
"""
Send a an event with its properties to the api server.
Returns ``true`` if the event was logged by Mixpanel.
"""
endpoint = mp_settings.MIXPANEL_TRACKING_ENDPOINT
try:
connection.request('GET', '%s?%s' % (endpoint, params))
response = connection.getresponse()
except socket.error, message:
raise EventTracker.FailedEventRequest("The tracking request failed with a socket error. Message: [%s]" % message)
if response.status != 200 or response.reason != 'OK':
raise EventTracker.FailedEventRequest("The tracking request failed. Non-200 response code was: %s %s" % (response.status, response.reason))
# Successful requests will generate a log
response_data = response.read()
if response_data != '1':
return False
return True
tasks.register(EventTracker)
class FunnelEventTracker(EventTracker):
"""
Task to track a Mixpanel funnel event.
"""
name = "mixpanel.tasks.FunnelEventTracker"
max_retries = mp_settings.MIXPANEL_MAX_RETRIES
class InvalidFunnelProperties(Exception):
"""Required properties were missing from the funnel-tracking call"""
pass
def run(self, funnel, step, goal, properties, token=None, test=None,
throw_retry_error=False, **kwargs):
"""
Track an event occurrence to mixpanel through the API.
``funnel`` is the string for the funnel you'd like to log
this event under
``step`` the step in the funnel you're registering
``goal`` the end goal of this funnel
``properties`` is a dictionary of key/value pairs
describing the funnel event. A ``distinct_id`` is required.
``token`` is (optionally) your Mixpanel api token. Not required if
you've already configured your MIXPANEL_API_TOKEN setting.
``test`` is an optional override to your
`:data:mixpanel.conf.settings.MIXPANEL_TEST_ONLY` setting for determining
if the event requests should actually be stored on the Mixpanel servers.
"""
l = self.get_logger(**kwargs)
l.info("Recording funnel: <%s>-<%s>" % (funnel, step))
properties = self._handle_properties(properties, token)
is_test = self._is_test(test)
properties = self._add_funnel_properties(properties, funnel, step, goal)
url_params = self._build_params(mp_settings.MIXPANEL_FUNNEL_EVENT_ID,
properties, is_test)
l.debug("url_params: <%s>" % url_params)
conn = self._get_connection()
try:
result = self._send_request(conn, url_params)
except EventTracker.FailedEventRequest, exception:
conn.close()
l.info("Funnel failed. Retrying: <%s>-<%s>" % (funnel, step))
kwargs.update({
'token': token,
'test': test})
self.retry(args=[funnel, step, goal, properties],
kwargs=kwargs,
exc=exception,
countdown=mp_settings.MIXPANEL_RETRY_DELAY,
throw=throw_retry_error)
return
conn.close()
if result:
l.info("Funnel recorded/logged: <%s>-<%s>" % (funnel, step))
else:
l.info("Funnel ignored: <%s>-<%s>" % (funnel, step))
return result
def _add_funnel_properties(self, properties, funnel, step, goal):
if not properties.has_key('distinct_id'):
error_msg = "A ``distinct_id`` must be given to record a funnel event"
raise FunnelEventTracker.InvalidFunnelProperties(error_msg)
properties['funnel'] = funnel
properties['step'] = step
properties['goal'] = goal
return properties
tasks.register(FunnelEventTracker)
|
StarcoderdataPython
|
391422
|
__copyright__ = "Copyright (C) 2012 <NAME>"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import logging
logger = logging.getLogger(__name__)
from loopy.diagnostic import (
LoopyError, WriteRaceConditionWarning, warn_with_kernel,
LoopyAdvisory)
import islpy as isl
from pytools.persistent_dict import WriteOncePersistentDict
from loopy.tools import LoopyKeyBuilder
from loopy.version import DATA_MODEL_VERSION
from loopy.kernel.data import make_assignment, filter_iname_tags_by_type
from loopy.kernel.tools import kernel_has_global_barriers
# for the benefit of loopy.statistics, for now
from loopy.type_inference import infer_unknown_types
from loopy.symbolic import RuleAwareIdentityMapper, ReductionCallbackMapper
# from loopy.transform.iname import remove_any_newly_unused_inames
from loopy.kernel.instruction import (MultiAssignmentBase, CInstruction,
CallInstruction, _DataObliviousInstruction)
from loopy.translation_unit import TranslationUnit
from loopy.kernel.function_interface import CallableKernel, ScalarCallable
from pytools import ProcessLogger
from functools import partial
# {{{ prepare for caching
def prepare_for_caching(program):
from warnings import warn
warn("prepare_for_caching is deprecated and no longer needed. "
"It will stop working in 2022.",
DeprecationWarning, stacklevel=2)
return program
# }}}
# {{{ check for writes to predicates
def check_for_writes_to_predicates(kernel):
from loopy.symbolic import get_dependencies
for insn in kernel.instructions:
pred_vars = (
frozenset.union(
*(get_dependencies(pred) for pred in insn.predicates))
if insn.predicates else frozenset())
written_pred_vars = frozenset(insn.assignee_var_names()) & pred_vars
if written_pred_vars:
raise LoopyError("In instruction '%s': may not write to "
"variable(s) '%s' involved in the instruction's predicates"
% (insn.id, ", ".join(written_pred_vars)))
# }}}
# {{{ check reduction iname uniqueness
def check_reduction_iname_uniqueness(kernel):
iname_to_reduction_count = {}
iname_to_nonsimultaneous_reduction_count = {}
def map_reduction(expr, rec):
rec(expr.expr)
for iname in expr.inames:
iname_to_reduction_count[iname] = (
iname_to_reduction_count.get(iname, 0) + 1)
if not expr.allow_simultaneous:
iname_to_nonsimultaneous_reduction_count[iname] = (
iname_to_nonsimultaneous_reduction_count.get(iname, 0) + 1)
return expr
from loopy.symbolic import ReductionCallbackMapper
cb_mapper = ReductionCallbackMapper(map_reduction)
for insn in kernel.instructions:
insn.with_transformed_expressions(cb_mapper)
for iname, count in iname_to_reduction_count.items():
nonsimul_count = iname_to_nonsimultaneous_reduction_count.get(iname, 0)
if nonsimul_count and count > 1:
raise LoopyError("iname '%s' used in more than one reduction. "
"(%d of them, to be precise.) "
"Since this usage can easily cause loop scheduling "
"problems, this is prohibited by default. "
"Use loopy.make_reduction_inames_unique() to fix this. "
"If you are sure that this is OK, write the reduction "
"as 'simul_reduce(...)' instead of 'reduce(...)'"
% (iname, count))
# }}}
# {{{ decide temporary address space
def _get_compute_inames_tagged(kernel, insn, tag_base):
return {iname for iname in kernel.insn_inames(insn.id)
if kernel.iname_tags_of_type(iname, tag_base)}
def _get_assignee_inames_tagged(kernel, insn, tag_base, tv_names):
return {iname
for aname, adeps in zip(
insn.assignee_var_names(),
insn.assignee_subscript_deps())
for iname in adeps & kernel.all_inames()
if aname in tv_names
if kernel.iname_tags_of_type(iname, tag_base)}
def find_temporary_address_space(kernel):
logger.debug("%s: find temporary address space" % kernel.name)
new_temp_vars = {}
from loopy.kernel.data import (LocalInameTagBase, GroupInameTag,
AddressSpace)
import loopy as lp
writers = kernel.writer_map()
base_storage_to_aliases = {}
kernel_var_names = kernel.all_variable_names(include_temp_storage=False)
for temp_var in kernel.temporary_variables.values():
if temp_var.base_storage is not None:
# no nesting allowed
if temp_var.base_storage in kernel_var_names:
raise LoopyError("base_storage for temporary '%s' is '%s', "
"which is an existing variable name"
% (temp_var.name, temp_var.base_storage))
base_storage_to_aliases.setdefault(
temp_var.base_storage, []).append(temp_var.name)
for temp_var in kernel.temporary_variables.values():
# Only fill out for variables that do not yet know if they're
# local. (I.e. those generated by implicit temporary generation.)
if temp_var.address_space is not lp.auto:
new_temp_vars[temp_var.name] = temp_var
continue
tv_names = (frozenset([temp_var.name])
| frozenset(base_storage_to_aliases.get(temp_var.base_storage, [])))
my_writers = writers.get(temp_var.name, frozenset())
if temp_var.base_storage is not None:
for alias in base_storage_to_aliases.get(temp_var.base_storage, []):
my_writers = my_writers | writers.get(alias, frozenset())
desired_aspace_per_insn = []
for insn_id in my_writers:
insn = kernel.id_to_insn[insn_id]
# A write race will emerge if:
#
# - the variable is local
# and
# - the instruction is run across more inames (locally) parallel
# than are reflected in the assignee indices.
locparallel_compute_inames = _get_compute_inames_tagged(
kernel, insn, LocalInameTagBase)
locparallel_assignee_inames = _get_assignee_inames_tagged(
kernel, insn, LocalInameTagBase, tv_names)
grpparallel_compute_inames = _get_compute_inames_tagged(
kernel, insn, GroupInameTag)
grpparallel_assignee_inames = _get_assignee_inames_tagged(
kernel, insn, GroupInameTag, temp_var.name)
assert locparallel_assignee_inames <= locparallel_compute_inames
assert grpparallel_assignee_inames <= grpparallel_compute_inames
desired_aspace = AddressSpace.PRIVATE
for iname_descr, aspace_descr, apin, cpin, aspace in [
("local", "local", locparallel_assignee_inames,
locparallel_compute_inames, AddressSpace.LOCAL),
("group", "global", grpparallel_assignee_inames,
grpparallel_compute_inames, AddressSpace.GLOBAL),
]:
if (apin != cpin and bool(apin)):
warn_with_kernel(
kernel,
f"write_race_{aspace_descr}({insn_id})",
"instruction '%s' looks invalid: "
"it assigns to indices based on %s IDs, but "
"its temporary '%s' cannot be made %s because "
"a write race across the iname(s) '%s' would emerge. "
"(Do you need to add an extra iname to your prefetch?)"
% (insn_id, iname_descr, temp_var.name, aspace_descr,
", ".join(cpin - apin)),
WriteRaceConditionWarning)
if (apin == cpin
# doesn't want to be in this address space if there
# aren't any parallel inames of that kind
and bool(cpin)):
desired_aspace = max(desired_aspace, aspace)
desired_aspace_per_insn.append(desired_aspace)
if not desired_aspace_per_insn:
warn_with_kernel(kernel, "temp_to_write(%s)" % temp_var.name,
"cannot automatically determine address space of '%s'"
% temp_var.name, LoopyAdvisory)
new_temp_vars[temp_var.name] = temp_var
continue
overall_aspace = max(desired_aspace_per_insn)
if not all(iaspace == overall_aspace for iaspace in desired_aspace_per_insn):
raise LoopyError("not all instructions agree on the "
"the desired address space (private/local/global) of the "
"temporary '%s'" % temp_var.name)
new_temp_vars[temp_var.name] = temp_var.copy(address_space=overall_aspace)
return kernel.copy(temporary_variables=new_temp_vars)
# }}}
# {{{ rewrite reduction to imperative form
# {{{ utils (not stateful)
from collections import namedtuple
_InameClassification = namedtuple("_InameClassifiction",
"sequential, local_parallel, nonlocal_parallel")
def _classify_reduction_inames(kernel, inames):
sequential = []
local_par = []
nonlocal_par = []
from loopy.kernel.data import (
LocalInameTagBase, UnrolledIlpTag, UnrollTag,
ConcurrentTag, filter_iname_tags_by_type)
for iname in inames:
iname_tags = kernel.iname_tags(iname)
if filter_iname_tags_by_type(iname_tags, (UnrollTag, UnrolledIlpTag)):
# These are nominally parallel, but we can live with
# them as sequential.
sequential.append(iname)
elif filter_iname_tags_by_type(iname_tags, LocalInameTagBase):
local_par.append(iname)
elif filter_iname_tags_by_type(iname_tags, ConcurrentTag):
nonlocal_par.append(iname)
else:
sequential.append(iname)
return _InameClassification(
tuple(sequential), tuple(local_par), tuple(nonlocal_par))
def _add_params_to_domain(domain, param_names):
dim_type = isl.dim_type
nparams_orig = domain.dim(dim_type.param)
domain = domain.add_dims(dim_type.param, len(param_names))
for param_idx, param_name in enumerate(param_names):
domain = domain.set_dim_name(
dim_type.param, param_idx + nparams_orig, param_name)
return domain
def _move_set_to_param_dims_except(domain, except_dims):
dim_type = isl.dim_type
iname_idx = 0
for iname in domain.get_var_names(dim_type.set):
if iname not in except_dims:
domain = domain.move_dims(
dim_type.param, 0,
dim_type.set, iname_idx, 1)
iname_idx -= 1
iname_idx += 1
return domain
def _domain_depends_on_given_set_dims(domain, set_dim_names):
set_dim_names = frozenset(set_dim_names)
return any(
set_dim_names & set(constr.get_coefficients_by_name())
for constr in domain.get_constraints())
def _check_reduction_is_triangular(kernel, expr, scan_param):
"""Check whether the reduction within `expr` with scan parameters described by
the structure `scan_param` is triangular. This attempts to verify that the
domain for the scan and sweep inames is as follows:
[params] -> {
[other inames..., scan_iname, sweep_iname]:
(sweep_min_value
<= sweep_iname
<= sweep_max_value)
and
(scan_min_value
<= scan_iname
<= stride * (sweep_iname - sweep_min_value) + scan_min_value)
and
(irrelevant constraints)
}
"""
orig_domain = kernel.get_inames_domain(
frozenset((scan_param.sweep_iname, scan_param.scan_iname)))
sweep_iname = scan_param.sweep_iname
scan_iname = scan_param.scan_iname
affs = isl.affs_from_space(orig_domain.space)
sweep_lower_bound = isl.align_spaces(
scan_param.sweep_lower_bound,
affs[0])
sweep_upper_bound = isl.align_spaces(
scan_param.sweep_upper_bound,
affs[0])
scan_lower_bound = isl.align_spaces(
scan_param.scan_lower_bound,
affs[0])
from itertools import product
for (sweep_lb_domain, sweep_lb_aff), \
(sweep_ub_domain, sweep_ub_aff), \
(scan_lb_domain, scan_lb_aff) in \
product(sweep_lower_bound.get_pieces(),
sweep_upper_bound.get_pieces(),
scan_lower_bound.get_pieces()):
# Assumptions inherited from the domains of the pwaffs
assumptions = sweep_lb_domain & sweep_ub_domain & scan_lb_domain
# Sweep iname constraints
hyp_domain = affs[sweep_iname].ge_set(sweep_lb_aff)
hyp_domain &= affs[sweep_iname].le_set(sweep_ub_aff)
# Scan iname constraints
hyp_domain &= affs[scan_iname].ge_set(scan_lb_aff)
hyp_domain &= affs[scan_iname].le_set(
scan_param.stride * (affs[sweep_iname] - sweep_lb_aff)
+ scan_lb_aff)
hyp_domain, = (hyp_domain & assumptions).get_basic_sets()
test_domain, = (orig_domain & assumptions).get_basic_sets()
hyp_gist_against_test = hyp_domain.gist(test_domain)
if _domain_depends_on_given_set_dims(hyp_gist_against_test,
(sweep_iname, scan_iname)):
return False, (
"gist of hypothesis against test domain "
"has sweep or scan dependent constraints: '%s'"
% hyp_gist_against_test)
test_gist_against_hyp = test_domain.gist(hyp_domain)
if _domain_depends_on_given_set_dims(test_gist_against_hyp,
(sweep_iname, scan_iname)):
return False, (
"gist of test against hypothesis domain "
"has sweep or scan dependent constraint: '%s'"
% test_gist_against_hyp)
return True, "ok"
_ScanCandidateParameters = namedtuple(
"_ScanCandidateParameters",
"sweep_iname, scan_iname, sweep_lower_bound, "
"sweep_upper_bound, scan_lower_bound, stride")
def _try_infer_scan_candidate_from_expr(
kernel, expr, within_inames, sweep_iname=None):
"""Analyze `expr` and determine if it can be implemented as a scan.
"""
from loopy.symbolic import Reduction
assert isinstance(expr, Reduction)
if len(expr.inames) != 1:
raise ValueError(
"Multiple inames in reduction: '{}'".format(", ".join(expr.inames)))
scan_iname, = expr.inames
from loopy.kernel.tools import DomainChanger
dchg = DomainChanger(kernel, (scan_iname,))
domain = dchg.get_original_domain()
if sweep_iname is None:
try:
sweep_iname = _try_infer_sweep_iname(
domain, scan_iname, kernel.all_inames())
except ValueError as v:
raise ValueError(
"Couldn't determine a sweep iname for the scan "
"expression '%s': %s" % (expr, v))
try:
sweep_lower_bound, sweep_upper_bound, scan_lower_bound = (
_try_infer_scan_and_sweep_bounds(
kernel, scan_iname, sweep_iname, within_inames))
except ValueError as v:
raise ValueError(
"Couldn't determine bounds for the scan with expression '%s' "
"(sweep iname: '%s', scan iname: '%s'): %s"
% (expr, sweep_iname, scan_iname, v))
try:
stride = _try_infer_scan_stride(
kernel, scan_iname, sweep_iname, sweep_lower_bound)
except ValueError as v:
raise ValueError(
"Couldn't determine a scan stride for the scan with expression '%s' "
"(sweep iname: '%s', scan iname: '%s'): %s"
% (expr, sweep_iname, scan_iname, v))
return _ScanCandidateParameters(sweep_iname, scan_iname, sweep_lower_bound,
sweep_upper_bound, scan_lower_bound, stride)
def _try_infer_sweep_iname(domain, scan_iname, candidate_inames):
"""The sweep iname is the outer iname which guides the scan.
E.g. for a domain of {[i,j]: 0<=i<n and 0<=j<=i}, i is the sweep iname.
"""
constrs = domain.get_constraints()
sweep_iname_candidate = None
for constr in constrs:
candidate_vars = {
var for var in constr.get_var_dict()
if var in candidate_inames}
# Irrelevant constraint - skip
if scan_iname not in candidate_vars:
continue
# No additional inames - skip
if len(candidate_vars) == 1:
continue
candidate_vars.remove(scan_iname)
# Depends on more than one iname - error
if len(candidate_vars) > 1:
raise ValueError(
"More than one sweep iname candidate for scan iname '%s' found "
"(via constraint '%s')" % (scan_iname, constr))
next_candidate = candidate_vars.pop()
if sweep_iname_candidate is None:
sweep_iname_candidate = next_candidate
defining_constraint = constr
else:
# Check next_candidate consistency
if sweep_iname_candidate != next_candidate:
raise ValueError(
"More than one sweep iname candidate for scan iname '%s' "
"found (via constraints '%s', '%s')" %
(scan_iname, defining_constraint, constr))
if sweep_iname_candidate is None:
raise ValueError(
"Couldn't find any sweep iname candidates for "
"scan iname '%s'" % scan_iname)
return sweep_iname_candidate
def _try_infer_scan_and_sweep_bounds(kernel, scan_iname, sweep_iname, within_inames):
domain = kernel.get_inames_domain(frozenset((sweep_iname, scan_iname)))
domain = _move_set_to_param_dims_except(domain, (sweep_iname, scan_iname))
var_dict = domain.get_var_dict()
sweep_idx = var_dict[sweep_iname][1]
scan_idx = var_dict[scan_iname][1]
domain = domain.project_out_except(
within_inames | kernel.non_iname_variable_names(), (isl.dim_type.param,))
try:
with isl.SuppressedWarnings(domain.get_ctx()):
sweep_lower_bound = domain.dim_min(sweep_idx)
sweep_upper_bound = domain.dim_max(sweep_idx)
scan_lower_bound = domain.dim_min(scan_idx)
except isl.Error as e:
raise ValueError("isl error: %s" % e)
return (sweep_lower_bound, sweep_upper_bound, scan_lower_bound)
def _try_infer_scan_stride(kernel, scan_iname, sweep_iname, sweep_lower_bound):
"""The stride is the number of steps the scan iname takes per iteration
of the sweep iname. This is allowed to be an integer constant.
E.g. for a domain of {[i,j]: 0<=i<n and 0<=j<=6*i}, the stride is 6.
"""
dim_type = isl.dim_type
domain = kernel.get_inames_domain(frozenset([sweep_iname, scan_iname]))
domain_with_sweep_param = _move_set_to_param_dims_except(domain, (scan_iname,))
domain_with_sweep_param = domain_with_sweep_param.project_out_except(
(sweep_iname, scan_iname), (dim_type.set, dim_type.param))
scan_iname_idx = domain_with_sweep_param.find_dim_by_name(
dim_type.set, scan_iname)
# Should be equal to k * sweep_iname, where k is the stride.
try:
with isl.SuppressedWarnings(domain_with_sweep_param.get_ctx()):
scan_iname_range = (
domain_with_sweep_param.dim_max(scan_iname_idx)
- domain_with_sweep_param.dim_min(scan_iname_idx)
).gist(domain_with_sweep_param.params())
except isl.Error as e:
raise ValueError("isl error: '%s'" % e)
scan_iname_pieces = scan_iname_range.get_pieces()
if len(scan_iname_pieces) > 1:
raise ValueError("range in multiple pieces: %s" % scan_iname_range)
elif len(scan_iname_pieces) == 0:
raise ValueError("empty range found for iname '%s'" % scan_iname)
scan_iname_constr, scan_iname_aff = scan_iname_pieces[0]
if not scan_iname_constr.plain_is_universe():
raise ValueError("found constraints: %s" % scan_iname_constr)
if scan_iname_aff.dim(dim_type.div):
raise ValueError("aff has div: %s" % scan_iname_aff)
coeffs = scan_iname_aff.get_coefficients_by_name(dim_type.param)
if len(coeffs) == 0:
try:
scan_iname_aff.get_constant_val()
except Exception:
raise ValueError("range for aff isn't constant: '%s'" % scan_iname_aff)
# If this point is reached we're assuming the domain is of the form
# {[i,j]: i=0 and j=0}, so the stride is technically 1 - any value
# this function returns will be verified later by
# _check_reduction_is_triangular().
return 1
if sweep_iname not in coeffs:
raise ValueError("didn't find sweep iname in coeffs: %s" % sweep_iname)
stride = coeffs[sweep_iname]
if not stride.is_int():
raise ValueError("stride not an integer: %s" % stride)
if not stride.is_pos():
raise ValueError("stride not positive: %s" % stride)
return stride.to_python()
def _get_domain_with_iname_as_param(domain, iname):
dim_type = isl.dim_type
if domain.find_dim_by_name(dim_type.param, iname) >= 0:
return domain
iname_idx = domain.find_dim_by_name(dim_type.set, iname)
assert iname_idx >= 0, (iname, domain)
return domain.move_dims(
dim_type.param, domain.dim(dim_type.param),
dim_type.set, iname_idx, 1)
def _create_domain_for_sweep_tracking(orig_domain,
tracking_iname, sweep_iname, sweep_min_value, scan_min_value, stride):
dim_type = isl.dim_type
subd = isl.BasicSet.universe(orig_domain.params().space)
# Add tracking_iname and sweep iname.
subd = _add_params_to_domain(subd, (sweep_iname, tracking_iname))
# Here we realize the domain:
#
# [..., i] -> {
# [j]: 0 <= j - l
# and
# j - l <= k * (i - m)
# and
# k * (i - m - 1) < j - l }
# where
# * i is the sweep iname
# * j is the tracking iname
# * k is the stride for the scan
# * l is the lower bound for the scan
# * m is the lower bound for the sweep iname
#
affs = isl.affs_from_space(subd.space)
subd &= (affs[tracking_iname] - scan_min_value).ge_set(affs[0])
subd &= (affs[tracking_iname] - scan_min_value)\
.le_set(stride * (affs[sweep_iname] - sweep_min_value))
subd &= (affs[tracking_iname] - scan_min_value)\
.gt_set(stride * (affs[sweep_iname] - sweep_min_value - 1))
# Move tracking_iname into a set dim (NOT sweep iname).
subd = subd.move_dims(
dim_type.set, 0,
dim_type.param, subd.dim(dim_type.param) - 1, 1)
# Simplify (maybe).
orig_domain_with_sweep_param = (
_get_domain_with_iname_as_param(orig_domain, sweep_iname))
subd = subd.gist_params(orig_domain_with_sweep_param.params())
subd, = subd.get_basic_sets()
return subd
def _hackily_ensure_multi_assignment_return_values_are_scoped_private(kernel):
"""
Multi assignment function calls are currently lowered into OpenCL so that
the function call::
a, b = segmented_sum(x, y, z, w)
becomes::
a = segmented_sum_mangled(x, y, z, w, &b).
For OpenCL, the scope of "b" is significant, and the preamble generation
currently assumes the scope is always private. This function forces that to
be the case by introducing temporary assignments into the kernel.
"""
insn_id_gen = kernel.get_instruction_id_generator()
var_name_gen = kernel.get_var_name_generator()
new_or_updated_instructions = {}
new_temporaries = {}
dep_map = {
insn.id: insn.depends_on for insn in kernel.instructions}
inverse_dep_map = {insn.id: set() for insn in kernel.instructions}
for insn_id, deps in dep_map.items():
for dep in deps:
inverse_dep_map[dep].add(insn_id)
del dep_map
# {{{ utils
def _add_to_no_sync_with(insn_id, new_no_sync_with_params):
insn = kernel.id_to_insn.get(insn_id)
insn = new_or_updated_instructions.get(insn_id, insn)
new_or_updated_instructions[insn_id] = (
insn.copy(
no_sync_with=(
insn.no_sync_with | frozenset(new_no_sync_with_params))))
def _add_to_depends_on(insn_id, new_depends_on_params):
insn = kernel.id_to_insn.get(insn_id)
insn = new_or_updated_instructions.get(insn_id, insn)
new_or_updated_instructions[insn_id] = (
insn.copy(
depends_on=insn.depends_on | frozenset(new_depends_on_params)))
# }}}
from loopy.kernel.instruction import CallInstruction, is_array_call
for insn in kernel.instructions:
if not isinstance(insn, CallInstruction):
continue
if len(insn.assignees) <= 1:
continue
if is_array_call(insn.assignees, insn.expression):
continue
assignees = insn.assignees
assignee_var_names = insn.assignee_var_names()
new_assignees = [assignees[0]]
newly_added_assignments_ids = set()
needs_replacement = False
last_added_insn_id = insn.id
from loopy.kernel.data import AddressSpace, TemporaryVariable
FIRST_POINTER_ASSIGNEE_IDX = 1 # noqa
for assignee_nr, assignee_var_name, assignee in zip(
range(FIRST_POINTER_ASSIGNEE_IDX, len(assignees)),
assignee_var_names[FIRST_POINTER_ASSIGNEE_IDX:],
assignees[FIRST_POINTER_ASSIGNEE_IDX:]):
if (
assignee_var_name in kernel.temporary_variables
and
(kernel.temporary_variables[assignee_var_name].address_space
== AddressSpace.PRIVATE)):
new_assignees.append(assignee)
continue
needs_replacement = True
# {{{ generate a new assignent instruction
new_assignee_name = var_name_gen(
"{insn_id}_retval_{assignee_nr}"
.format(insn_id=insn.id, assignee_nr=assignee_nr))
new_assignment_id = insn_id_gen(
"{insn_id}_assign_retval_{assignee_nr}"
.format(insn_id=insn.id, assignee_nr=assignee_nr))
newly_added_assignments_ids.add(new_assignment_id)
new_temporaries[new_assignee_name] = (
TemporaryVariable(
name=new_assignee_name,
dtype=None,
address_space=AddressSpace.PRIVATE))
from pymbolic import var
new_assignee = var(new_assignee_name)
new_assignees.append(new_assignee)
new_or_updated_instructions[new_assignment_id] = (
make_assignment(
assignees=(assignee,),
expression=new_assignee,
id=new_assignment_id,
depends_on=frozenset([last_added_insn_id]),
depends_on_is_final=True,
no_sync_with=(
insn.no_sync_with | frozenset([(insn.id, "any")])),
predicates=insn.predicates,
within_inames=insn.within_inames))
last_added_insn_id = new_assignment_id
# }}}
if not needs_replacement:
continue
# {{{ update originating instruction
orig_insn = new_or_updated_instructions.get(insn.id, insn)
new_or_updated_instructions[insn.id] = (
orig_insn.copy(assignees=tuple(new_assignees)))
_add_to_no_sync_with(insn.id,
[(id, "any") for id in newly_added_assignments_ids])
# }}}
# {{{ squash spurious memory dependencies amongst new assignments
for new_insn_id in newly_added_assignments_ids:
_add_to_no_sync_with(new_insn_id,
[(id, "any")
for id in newly_added_assignments_ids
if id != new_insn_id])
# }}}
# {{{ update instructions that depend on the originating instruction
for inverse_dep in inverse_dep_map[insn.id]:
_add_to_depends_on(inverse_dep, newly_added_assignments_ids)
for insn_id, scope in (
new_or_updated_instructions[inverse_dep].no_sync_with):
if insn_id == insn.id:
_add_to_no_sync_with(
inverse_dep,
[(id, scope) for id in newly_added_assignments_ids])
# }}}
if not new_temporaries and not new_or_updated_instructions:
return kernel
new_temporary_variables = kernel.temporary_variables.copy()
new_temporary_variables.update(new_temporaries)
new_instructions = (
list(new_or_updated_instructions.values())
+ list(insn
for insn in kernel.instructions
if insn.id not in new_or_updated_instructions))
return kernel.copy(temporary_variables=new_temporary_variables,
instructions=new_instructions)
def _insert_subdomain_into_domain_tree(kernel, domains, subdomain):
# Intersect with inames, because we could have captured some kernel params
# in here too...
dependent_inames = (
frozenset(subdomain.get_var_names(isl.dim_type.param))
& kernel.all_inames())
idx, = kernel.get_leaf_domain_indices(dependent_inames)
domains.insert(idx + 1, subdomain)
# }}}
class RealizeReductionCallbackMapper(ReductionCallbackMapper):
def __init__(self, callback, callables_table):
super().__init__(callback)
self.callables_table = callables_table
def map_reduction(self, expr, **kwargs):
result, self.callables_table = self.callback(expr, self.rec,
**kwargs)
return result
def map_if(self, expr, callables_table, guarding_predicates, nresults=1):
import pymbolic.primitives as prim
rec_cond = self.rec(expr.condition, callables_table=callables_table,
guarding_predicates=guarding_predicates,
nresults=nresults)
return prim.If(rec_cond,
self.rec(expr.then, callables_table=callables_table,
guarding_predicates=(
guarding_predicates
| frozenset([rec_cond])),
nresults=nresults),
self.rec(expr.else_, callables_table=callables_table,
guarding_predicates=(
guarding_predicates
| frozenset([prim.LogicalNot(rec_cond)])),
nresults=nresults))
# @remove_any_newly_unused_inames
def realize_reduction_for_single_kernel(kernel, callables_table,
insn_id_filter=None, unknown_types_ok=True, automagic_scans_ok=False,
force_scan=False, force_outer_iname_for_scan=None):
"""Rewrites reductions into their imperative form. With *insn_id_filter*
specified, operate only on the instruction with an instruction id matching
*insn_id_filter*.
If *insn_id_filter* is given, only the outermost level of reductions will be
expanded, inner reductions will be left alone (because they end up in a new
instruction with a different ID, which doesn't match the filter).
If *insn_id_filter* is not given, all reductions in all instructions will
be realized.
If *automagic_scans_ok*, this function will attempt to rewrite triangular
reductions as scans automatically.
If *force_scan* is *True*, this function will attempt to rewrite *all*
candidate reductions as scans and raise an error if this is not possible
(this is most useful combined with *insn_id_filter*).
If *force_outer_iname_for_scan* is not *None*, this function will attempt
to realize candidate reductions as scans using the specified iname as the
outer (sweep) iname.
"""
logger.debug("%s: realize reduction" % kernel.name)
new_insns = []
new_iname_tags = {}
insn_id_gen = kernel.get_instruction_id_generator()
var_name_gen = kernel.get_var_name_generator()
new_temporary_variables = kernel.temporary_variables.copy()
inames_added_for_scan = set()
inames_to_remove = set()
# {{{ helpers
def _strip_if_scalar(reference, val):
if len(reference) == 1:
return val[0]
else:
return val
def preprocess_scan_arguments(
insn, expr, nresults, scan_iname, track_iname,
newly_generated_insn_id_set):
"""Does iname substitution within scan arguments and returns a set of values
suitable to be passed to the binary op. Returns a tuple."""
if nresults > 1:
inner_expr = expr
# In the case of a multi-argument scan, we need a name for each of
# the arguments in order to pass them to the binary op - so we expand
# items that are not "plain" tuples here.
if not isinstance(inner_expr, tuple):
get_args_insn_id = insn_id_gen(
"{}_{}_get".format(insn.id, "_".join(expr.inames)))
inner_expr = expand_inner_reduction(
id=get_args_insn_id,
expr=inner_expr,
nresults=nresults,
depends_on=insn.depends_on,
within_inames=insn.within_inames | expr.inames,
within_inames_is_final=insn.within_inames_is_final,
predicates=insn.predicates,
)
newly_generated_insn_id_set.add(get_args_insn_id)
updated_inner_exprs = tuple(
replace_var_within_expr(sub_expr, scan_iname, track_iname)
for sub_expr in inner_expr)
else:
updated_inner_exprs = (
replace_var_within_expr(expr, scan_iname, track_iname),)
return updated_inner_exprs
def expand_inner_reduction(id, expr, nresults, depends_on, within_inames,
within_inames_is_final, predicates):
# FIXME: use make_temporaries
from pymbolic.primitives import Call
from loopy.symbolic import Reduction
assert isinstance(expr, (Call, Reduction))
temp_var_names = [
var_name_gen(id + "_arg" + str(i))
for i in range(nresults)]
for name in temp_var_names:
from loopy.kernel.data import TemporaryVariable, AddressSpace
new_temporary_variables[name] = TemporaryVariable(
name=name,
shape=(),
dtype=None,
address_space=AddressSpace.PRIVATE)
from pymbolic import var
temp_vars = tuple(var(n) for n in temp_var_names)
call_insn = make_assignment(
id=id,
assignees=temp_vars,
expression=expr,
depends_on=depends_on,
within_inames=within_inames,
within_inames_is_final=within_inames_is_final,
predicates=predicates)
generated_insns.append(call_insn)
return temp_vars
# }}}
# {{{ sequential
def map_reduction_seq(expr, rec, callables_table, nresults, arg_dtypes,
reduction_dtypes, guarding_predicates):
outer_insn_inames = insn.within_inames
from loopy.kernel.data import AddressSpace
acc_var_names = make_temporaries(
name_based_on="acc_"+"_".join(expr.inames),
nvars=nresults,
shape=(),
dtypes=reduction_dtypes,
address_space=AddressSpace.PRIVATE)
init_insn_depends_on = frozenset()
# check first that the original kernel had global barriers
# if not, we don't need to check. Since the function
# kernel_has_global_barriers is cached, we don't do
# extra work compared to not checking.
# FIXME: Explain why we care about global barriers her
if kernel_has_global_barriers(kernel):
global_barrier = lp.find_most_recent_global_barrier(temp_kernel,
insn.id)
if global_barrier is not None:
init_insn_depends_on |= frozenset([global_barrier])
from pymbolic import var
acc_vars = tuple(var(n) for n in acc_var_names)
init_id = insn_id_gen(
"{}_{}_init".format(insn.id, "_".join(expr.inames)))
expression, callables_table = expr.operation.neutral_element(
*arg_dtypes, callables_table=callables_table, target=kernel.target)
init_insn = make_assignment(
id=init_id,
assignees=acc_vars,
within_inames=outer_insn_inames - frozenset(expr.inames),
within_inames_is_final=insn.within_inames_is_final,
depends_on=init_insn_depends_on,
expression=expression,
# Do not inherit predicates: Those might read variables
# that may not yet be set, and we don't have a great way
# of figuring out what the dependencies of the accumulator
# initializer should be.
# This way, we may initialize a few too many accumulators,
# but that's better than being incorrect.
# https://github.com/inducer/loopy/issues/231
)
generated_insns.append(init_insn)
update_id = insn_id_gen(
based_on="{}_{}_update".format(insn.id, "_".join(expr.inames)))
update_insn_iname_deps = insn.within_inames | set(expr.inames)
if insn.within_inames_is_final:
update_insn_iname_deps = insn.within_inames | set(expr.inames)
reduction_insn_depends_on = {init_id}
# In the case of a multi-argument reduction, we need a name for each of
# the arguments in order to pass them to the binary op - so we expand
# items that are not "plain" tuples here.
if nresults > 1 and not isinstance(expr.expr, tuple):
get_args_insn_id = insn_id_gen(
"{}_{}_get".format(insn.id, "_".join(expr.inames)))
reduction_expr = expand_inner_reduction(
id=get_args_insn_id,
expr=expr.expr,
nresults=nresults,
depends_on=insn.depends_on,
within_inames=update_insn_iname_deps,
within_inames_is_final=insn.within_inames_is_final,
predicates=guarding_predicates,
)
reduction_insn_depends_on.add(get_args_insn_id)
else:
reduction_expr = expr.expr
expression, callables_table = expr.operation(
arg_dtypes,
_strip_if_scalar(acc_vars, acc_vars),
reduction_expr,
callables_table,
kernel.target)
reduction_insn = make_assignment(
id=update_id,
assignees=acc_vars,
expression=expression,
depends_on=frozenset(reduction_insn_depends_on) | insn.depends_on,
within_inames=update_insn_iname_deps,
within_inames_is_final=insn.within_inames_is_final,
predicates=guarding_predicates,)
generated_insns.append(reduction_insn)
new_insn_add_depends_on.add(reduction_insn.id)
if nresults == 1:
assert len(acc_vars) == 1
return acc_vars[0], callables_table
else:
return acc_vars, callables_table
# }}}
# {{{ local-parallel
def _get_int_iname_size(iname):
from loopy.isl_helpers import static_max_of_pw_aff
from loopy.symbolic import pw_aff_to_expr
size = pw_aff_to_expr(
static_max_of_pw_aff(
kernel.get_iname_bounds(iname).size,
constants_only=True))
assert isinstance(size, int)
return size
def _make_slab_set(iname, size):
v = isl.make_zero_and_vars([iname])
bs, = (
v[0].le_set(v[iname])
&
v[iname].lt_set(v[0] + size)).get_basic_sets()
return bs
def _make_slab_set_from_range(iname, lbound, ubound):
v = isl.make_zero_and_vars([iname])
bs, = (
v[iname].ge_set(v[0] + lbound)
&
v[iname].lt_set(v[0] + ubound)).get_basic_sets()
return bs
def map_reduction_local(expr, rec, callables_table, nresults, arg_dtypes,
reduction_dtypes, guarding_predicates):
red_iname, = expr.inames
size = _get_int_iname_size(red_iname)
outer_insn_inames = insn.within_inames
from loopy.kernel.data import LocalInameTagBase
outer_local_inames = tuple(oiname for oiname in outer_insn_inames
if kernel.iname_tags_of_type(oiname, LocalInameTagBase))
from pymbolic import var
outer_local_iname_vars = tuple(
var(oiname) for oiname in outer_local_inames)
outer_local_iname_sizes = tuple(
_get_int_iname_size(oiname)
for oiname in outer_local_inames)
from loopy.kernel.data import AddressSpace
neutral_var_names = make_temporaries(
name_based_on="neutral_"+red_iname,
nvars=nresults,
shape=(),
dtypes=reduction_dtypes,
address_space=AddressSpace.PRIVATE)
acc_var_names = make_temporaries(
name_based_on="acc_"+red_iname,
nvars=nresults,
shape=outer_local_iname_sizes + (size,),
dtypes=reduction_dtypes,
address_space=AddressSpace.LOCAL)
acc_vars = tuple(var(n) for n in acc_var_names)
# {{{ add separate iname to carry out the reduction
# Doing this sheds any odd conditionals that may be active
# on our red_iname.
base_exec_iname = var_name_gen("red_"+red_iname)
domains.append(_make_slab_set(base_exec_iname, size))
new_iname_tags[base_exec_iname] = kernel.iname_tags(red_iname)
# }}}
base_iname_deps = outer_insn_inames - frozenset(expr.inames)
neutral, callables_table = expr.operation.neutral_element(*arg_dtypes,
callables_table=callables_table, target=kernel.target)
init_id = insn_id_gen(f"{insn.id}_{red_iname}_init")
init_insn = make_assignment(
id=init_id,
assignees=tuple(
acc_var[outer_local_iname_vars + (var(base_exec_iname),)]
for acc_var in acc_vars),
expression=neutral,
within_inames=base_iname_deps | frozenset([base_exec_iname]),
within_inames_is_final=insn.within_inames_is_final,
depends_on=frozenset(),
# Do not inherit predicates: Those might read variables
# that may not yet be set, and we don't have a great way
# of figuring out what the dependencies of the accumulator
# initializer should be.
# This way, we may initialize a few too many accumulators,
# but that's better than being incorrect.
# https://github.com/inducer/loopy/issues/231
)
generated_insns.append(init_insn)
init_neutral_id = insn_id_gen(f"{insn.id}_{red_iname}_init_neutral")
init_neutral_insn = make_assignment(
id=init_neutral_id,
assignees=tuple(var(nvn) for nvn in neutral_var_names),
expression=neutral,
within_inames=base_iname_deps | frozenset([base_exec_iname]),
within_inames_is_final=insn.within_inames_is_final,
depends_on=frozenset(),
predicates=guarding_predicates,
)
generated_insns.append(init_neutral_insn)
transfer_depends_on = {init_neutral_id, init_id}
# In the case of a multi-argument reduction, we need a name for each of
# the arguments in order to pass them to the binary op - so we expand
# items that are not "plain" tuples here.
if nresults > 1 and not isinstance(expr.expr, tuple):
get_args_insn_id = insn_id_gen(
f"{insn.id}_{red_iname}_get")
reduction_expr = expand_inner_reduction(
id=get_args_insn_id,
expr=expr.expr,
nresults=nresults,
depends_on=insn.depends_on,
within_inames=(
(outer_insn_inames - frozenset(expr.inames))
| frozenset([red_iname])),
within_inames_is_final=insn.within_inames_is_final,
predicates=guarding_predicates,
)
transfer_depends_on.add(get_args_insn_id)
else:
reduction_expr = expr.expr
transfer_id = insn_id_gen(f"{insn.id}_{red_iname}_transfer")
expression, callables_table = expr.operation(
arg_dtypes,
_strip_if_scalar(
neutral_var_names,
tuple(var(nvn) for nvn in neutral_var_names)),
reduction_expr,
callables_table,
kernel.target)
transfer_insn = make_assignment(
id=transfer_id,
assignees=tuple(
acc_var[outer_local_iname_vars + (var(red_iname),)]
for acc_var in acc_vars),
expression=expression,
within_inames=(
(outer_insn_inames - frozenset(expr.inames))
| frozenset([red_iname])),
within_inames_is_final=insn.within_inames_is_final,
depends_on=frozenset([init_id, init_neutral_id]) | insn.depends_on,
no_sync_with=frozenset([(init_id, "any")]),
predicates=insn.predicates,
)
generated_insns.append(transfer_insn)
cur_size = 1
while cur_size < size:
cur_size *= 2
prev_id = transfer_id
bound = size
stage_exec_iname = None
istage = 0
while cur_size > 1:
new_size = cur_size // 2
assert new_size * 2 == cur_size
stage_exec_iname = var_name_gen("red_%s_s%d" % (red_iname, istage))
domains.append(_make_slab_set(stage_exec_iname, bound-new_size))
new_iname_tags[stage_exec_iname] = kernel.iname_tags(red_iname)
stage_id = insn_id_gen("red_%s_stage_%d" % (red_iname, istage))
expression, callables_table = expr.operation(
arg_dtypes,
_strip_if_scalar(acc_vars, tuple(
acc_var[
outer_local_iname_vars + (var(stage_exec_iname),)]
for acc_var in acc_vars)),
_strip_if_scalar(acc_vars, tuple(
acc_var[
outer_local_iname_vars + (
var(stage_exec_iname) + new_size,)]
for acc_var in acc_vars)),
callables_table,
kernel.target)
stage_insn = make_assignment(
id=stage_id,
assignees=tuple(
acc_var[outer_local_iname_vars + (var(stage_exec_iname),)]
for acc_var in acc_vars),
expression=expression,
within_inames=(
base_iname_deps | frozenset([stage_exec_iname])),
within_inames_is_final=insn.within_inames_is_final,
depends_on=frozenset([prev_id]),
predicates=insn.predicates,
)
generated_insns.append(stage_insn)
prev_id = stage_id
cur_size = new_size
bound = cur_size
istage += 1
new_insn_add_depends_on.add(prev_id)
new_insn_add_no_sync_with.add((prev_id, "any"))
new_insn_add_within_inames.add(stage_exec_iname or base_exec_iname)
if nresults == 1:
assert len(acc_vars) == 1
return acc_vars[0][outer_local_iname_vars + (0,)], callables_table
else:
return [acc_var[outer_local_iname_vars + (0,)] for acc_var in
acc_vars], callables_table
# }}}
# {{{ utils (stateful)
from pytools import memoize
@memoize
def get_or_add_sweep_tracking_iname_and_domain(
scan_iname, sweep_iname, sweep_min_value, scan_min_value, stride,
tracking_iname):
domain = temp_kernel.get_inames_domain(frozenset((scan_iname, sweep_iname)))
inames_added_for_scan.add(tracking_iname)
new_domain = _create_domain_for_sweep_tracking(domain,
tracking_iname, sweep_iname, sweep_min_value, scan_min_value, stride)
_insert_subdomain_into_domain_tree(temp_kernel, domains, new_domain)
return tracking_iname
def replace_var_within_expr(expr, from_var, to_var):
from pymbolic.mapper.substitutor import make_subst_func
from loopy.symbolic import (
SubstitutionRuleMappingContext, RuleAwareSubstitutionMapper)
rule_mapping_context = SubstitutionRuleMappingContext(
temp_kernel.substitutions, var_name_gen)
from pymbolic import var
mapper = RuleAwareSubstitutionMapper(
rule_mapping_context,
make_subst_func({from_var: var(to_var)}),
within=lambda *args: True)
return mapper(expr, temp_kernel, None)
def make_temporaries(name_based_on, nvars, shape, dtypes, address_space):
var_names = [
var_name_gen(name_based_on.format(index=i))
for i in range(nvars)]
from loopy.kernel.data import TemporaryVariable
for name, dtype in zip(var_names, dtypes):
new_temporary_variables[name] = TemporaryVariable(
name=name,
shape=shape,
dtype=dtype,
address_space=address_space)
return var_names
# }}}
# {{{ sequential scan
def map_scan_seq(expr, rec, callables_table, nresults, arg_dtypes,
reduction_dtypes, sweep_iname, scan_iname, sweep_min_value,
scan_min_value, stride, guarding_predicates):
outer_insn_inames = insn.within_inames
inames_to_remove.add(scan_iname)
track_iname = var_name_gen(
"{sweep_iname}__seq_scan"
.format(sweep_iname=sweep_iname))
get_or_add_sweep_tracking_iname_and_domain(
scan_iname, sweep_iname, sweep_min_value, scan_min_value,
stride, track_iname)
from loopy.kernel.data import AddressSpace
acc_var_names = make_temporaries(
name_based_on="acc_" + scan_iname,
nvars=nresults,
shape=(),
dtypes=reduction_dtypes,
address_space=AddressSpace.PRIVATE)
from pymbolic import var
acc_vars = tuple(var(n) for n in acc_var_names)
init_id = insn_id_gen(
"{}_{}_init".format(insn.id, "_".join(expr.inames)))
init_insn_depends_on = frozenset()
# FIXME: Explain why we care about global barriers here
if kernel_has_global_barriers(kernel):
global_barrier = lp.find_most_recent_global_barrier(temp_kernel, insn.id)
if global_barrier is not None:
init_insn_depends_on |= frozenset([global_barrier])
expression, callables_table = expr.operation.neutral_element(
*arg_dtypes, callables_table=callables_table, target=kernel.target)
init_insn = make_assignment(
id=init_id,
assignees=acc_vars,
within_inames=outer_insn_inames - frozenset(
(sweep_iname,) + expr.inames),
within_inames_is_final=insn.within_inames_is_final,
depends_on=init_insn_depends_on,
expression=expression,
# Do not inherit predicates: Those might read variables
# that may not yet be set, and we don't have a great way
# of figuring out what the dependencies of the accumulator
# initializer should be.
# This way, we may initialize a few too many accumulators,
# but that's better than being incorrect.
# https://github.com/inducer/loopy/issues/231
)
generated_insns.append(init_insn)
update_insn_depends_on = {init_insn.id} | insn.depends_on
updated_inner_exprs = (
preprocess_scan_arguments(insn, expr.expr, nresults,
scan_iname, track_iname, update_insn_depends_on))
update_id = insn_id_gen(
based_on="{}_{}_update".format(insn.id, "_".join(expr.inames)))
update_insn_iname_deps = insn.within_inames | {track_iname}
if insn.within_inames_is_final:
update_insn_iname_deps = insn.within_inames | {track_iname}
expression, callables_table = expr.operation(
arg_dtypes,
_strip_if_scalar(acc_vars, acc_vars),
_strip_if_scalar(acc_vars, updated_inner_exprs),
callables_table,
kernel.target)
scan_insn = make_assignment(
id=update_id,
assignees=acc_vars,
expression=expression,
depends_on=frozenset(update_insn_depends_on),
within_inames=update_insn_iname_deps,
no_sync_with=insn.no_sync_with,
within_inames_is_final=insn.within_inames_is_final,
predicates=guarding_predicates,
)
generated_insns.append(scan_insn)
new_insn_add_depends_on.add(scan_insn.id)
if nresults == 1:
assert len(acc_vars) == 1
return acc_vars[0], callables_table
else:
return acc_vars, callables_table
# }}}
# {{{ local-parallel scan
def map_scan_local(expr, rec, callables_table, nresults, arg_dtypes,
reduction_dtypes, sweep_iname, scan_iname, sweep_min_value,
scan_min_value, stride, guarding_predicates):
scan_size = _get_int_iname_size(sweep_iname)
assert scan_size > 0
if scan_size == 1:
return map_reduction_seq(expr, rec, callables_table,
nresults, arg_dtypes, reduction_dtypes,
guarding_predicates)
outer_insn_inames = insn.within_inames
from loopy.kernel.data import LocalInameTagBase
outer_local_inames = tuple(oiname for oiname in outer_insn_inames
if kernel.iname_tags_of_type(oiname, LocalInameTagBase)
and oiname != sweep_iname)
from pymbolic import var
outer_local_iname_vars = tuple(
var(oiname) for oiname in outer_local_inames)
outer_local_iname_sizes = tuple(
_get_int_iname_size(oiname)
for oiname in outer_local_inames)
track_iname = var_name_gen(
"{sweep_iname}__pre_scan"
.format(sweep_iname=sweep_iname))
get_or_add_sweep_tracking_iname_and_domain(
scan_iname, sweep_iname, sweep_min_value, scan_min_value, stride,
track_iname)
# {{{ add separate iname to carry out the scan
# Doing this sheds any odd conditionals that may be active
# on our scan_iname.
base_exec_iname = var_name_gen(sweep_iname + "__scan")
domains.append(_make_slab_set(base_exec_iname, scan_size))
new_iname_tags[base_exec_iname] = kernel.iname_tags(sweep_iname)
# }}}
from loopy.kernel.data import AddressSpace
read_var_names = make_temporaries(
name_based_on="read_"+scan_iname+"_arg_{index}",
nvars=nresults,
shape=(),
dtypes=reduction_dtypes,
address_space=AddressSpace.PRIVATE)
acc_var_names = make_temporaries(
name_based_on="acc_"+scan_iname,
nvars=nresults,
shape=outer_local_iname_sizes + (scan_size,),
dtypes=reduction_dtypes,
address_space=AddressSpace.LOCAL)
acc_vars = tuple(var(n) for n in acc_var_names)
read_vars = tuple(var(n) for n in read_var_names)
base_iname_deps = (outer_insn_inames
- frozenset(expr.inames) - frozenset([sweep_iname]))
neutral, callables_table = expr.operation.neutral_element(
*arg_dtypes, callables_table=callables_table, target=kernel.target)
init_insn_depends_on = insn.depends_on
# FIXME: Explain why we care about global barriers here
if kernel_has_global_barriers(kernel):
global_barrier = lp.find_most_recent_global_barrier(temp_kernel, insn.id)
if global_barrier is not None:
init_insn_depends_on |= frozenset([global_barrier])
init_id = insn_id_gen(f"{insn.id}_{scan_iname}_init")
init_insn = make_assignment(
id=init_id,
assignees=tuple(
acc_var[outer_local_iname_vars + (var(base_exec_iname),)]
for acc_var in acc_vars),
expression=neutral,
within_inames=base_iname_deps | frozenset([base_exec_iname]),
within_inames_is_final=insn.within_inames_is_final,
depends_on=init_insn_depends_on,
# Do not inherit predicates: Those might read variables
# that may not yet be set, and we don't have a great way
# of figuring out what the dependencies of the accumulator
# initializer should be.
# This way, we may initialize a few too many accumulators,
# but that's better than being incorrect.
# https://github.com/inducer/loopy/issues/231
)
generated_insns.append(init_insn)
transfer_insn_depends_on = {init_insn.id} | insn.depends_on
updated_inner_exprs = (
preprocess_scan_arguments(insn, expr.expr, nresults,
scan_iname, track_iname, transfer_insn_depends_on))
from loopy.symbolic import Reduction
from loopy.symbolic import pw_aff_to_expr
sweep_min_value_expr = pw_aff_to_expr(sweep_min_value)
transfer_id = insn_id_gen(f"{insn.id}_{scan_iname}_transfer")
transfer_insn = make_assignment(
id=transfer_id,
assignees=tuple(
acc_var[outer_local_iname_vars
+ (var(sweep_iname) - sweep_min_value_expr,)]
for acc_var in acc_vars),
expression=Reduction(
operation=expr.operation,
inames=(track_iname,),
expr=_strip_if_scalar(acc_vars, updated_inner_exprs),
allow_simultaneous=False,
),
within_inames=outer_insn_inames - frozenset(expr.inames),
within_inames_is_final=insn.within_inames_is_final,
depends_on=frozenset(transfer_insn_depends_on),
no_sync_with=frozenset([(init_id, "any")]) | insn.no_sync_with,
predicates=insn.predicates,
)
generated_insns.append(transfer_insn)
prev_id = transfer_id
istage = 0
cur_size = 1
while cur_size < scan_size:
stage_exec_iname = var_name_gen("%s__scan_s%d" % (sweep_iname, istage))
domains.append(
_make_slab_set_from_range(stage_exec_iname, cur_size, scan_size))
new_iname_tags[stage_exec_iname] = kernel.iname_tags(sweep_iname)
for read_var, acc_var in zip(read_vars, acc_vars):
read_stage_id = insn_id_gen(
"scan_%s_read_stage_%d" % (scan_iname, istage))
read_stage_insn = make_assignment(
id=read_stage_id,
assignees=(read_var,),
expression=(
acc_var[
outer_local_iname_vars
+ (var(stage_exec_iname) - cur_size,)]),
within_inames=(
base_iname_deps | frozenset([stage_exec_iname])),
within_inames_is_final=insn.within_inames_is_final,
depends_on=frozenset([prev_id]),
predicates=insn.predicates,
)
if cur_size == 1:
# Performance hack: don't add a barrier here with transfer_insn.
# NOTE: This won't work if the way that local inames
# are lowered changes.
read_stage_insn = read_stage_insn.copy(
no_sync_with=(
read_stage_insn.no_sync_with
| frozenset([(transfer_id, "any")])))
generated_insns.append(read_stage_insn)
prev_id = read_stage_id
write_stage_id = insn_id_gen(
"scan_%s_write_stage_%d" % (scan_iname, istage))
expression, callables_table = expr.operation(
arg_dtypes,
_strip_if_scalar(acc_vars, read_vars),
_strip_if_scalar(acc_vars, tuple(
acc_var[
outer_local_iname_vars + (var(stage_exec_iname),)]
for acc_var in acc_vars)),
callables_table,
kernel.target)
write_stage_insn = make_assignment(
id=write_stage_id,
assignees=tuple(
acc_var[outer_local_iname_vars + (var(stage_exec_iname),)]
for acc_var in acc_vars),
expression=expression,
within_inames=(
base_iname_deps | frozenset([stage_exec_iname])),
within_inames_is_final=insn.within_inames_is_final,
depends_on=frozenset([prev_id]),
predicates=insn.predicates,
)
generated_insns.append(write_stage_insn)
prev_id = write_stage_id
cur_size *= 2
istage += 1
new_insn_add_depends_on.add(prev_id)
new_insn_add_within_inames.add(sweep_iname)
output_idx = var(sweep_iname) - sweep_min_value_expr
if nresults == 1:
assert len(acc_vars) == 1
return (acc_vars[0][outer_local_iname_vars + (output_idx,)],
callables_table)
else:
return [acc_var[outer_local_iname_vars + (output_idx,)]
for acc_var in acc_vars], callables_table
# }}}
# {{{ seq/par dispatch
def map_reduction(expr, rec, callables_table,
guarding_predicates, nresults=1):
# Only expand one level of reduction at a time, going from outermost to
# innermost. Otherwise we get the (iname + insn) dependencies wrong.
from loopy.type_inference import (
infer_arg_and_reduction_dtypes_for_reduction_expression)
arg_dtypes, reduction_dtypes = (
infer_arg_and_reduction_dtypes_for_reduction_expression(
temp_kernel, expr, callables_table, unknown_types_ok))
outer_insn_inames = insn.within_inames
bad_inames = frozenset(expr.inames) & outer_insn_inames
if bad_inames:
raise LoopyError("reduction used within loop(s) that it was "
"supposed to reduce over: " + ", ".join(bad_inames))
iname_classes = _classify_reduction_inames(temp_kernel, expr.inames)
n_sequential = len(iname_classes.sequential)
n_local_par = len(iname_classes.local_parallel)
n_nonlocal_par = len(iname_classes.nonlocal_parallel)
really_force_scan = force_scan and (
len(expr.inames) != 1 or expr.inames[0] not in inames_added_for_scan)
def _error_if_force_scan_on(cls, msg):
if really_force_scan:
raise cls(msg)
may_be_implemented_as_scan = False
if force_scan or automagic_scans_ok:
from loopy.diagnostic import ReductionIsNotTriangularError
try:
# Try to determine scan candidate information (sweep iname, scan
# iname, etc).
scan_param = _try_infer_scan_candidate_from_expr(
temp_kernel, expr, outer_insn_inames,
sweep_iname=force_outer_iname_for_scan)
except ValueError as v:
error = str(v)
else:
# Ensures the reduction is triangular (somewhat expensive).
may_be_implemented_as_scan, error = (
_check_reduction_is_triangular(
temp_kernel, expr, scan_param))
if not may_be_implemented_as_scan:
_error_if_force_scan_on(ReductionIsNotTriangularError, error)
# {{{ sanity checks
if n_local_par and n_sequential:
raise LoopyError("Reduction over '%s' contains both parallel and "
"sequential inames. It must be split "
"(using split_reduction_{in,out}ward) "
"before code generation."
% ", ".join(expr.inames))
if n_local_par > 1:
raise LoopyError("Reduction over '%s' contains more than"
"one parallel iname. It must be split "
"(using split_reduction_{in,out}ward) "
"before code generation."
% ", ".join(expr.inames))
if n_nonlocal_par:
bad_inames = iname_classes.nonlocal_parallel
raise LoopyError("the only form of parallelism supported "
"by reductions is 'local'--found iname(s) '%s' "
"respectively tagged '%s'"
% (", ".join(bad_inames),
", ".join(str(kernel.iname_tags(iname))
for iname in bad_inames)))
if n_local_par == 0 and n_sequential == 0:
from loopy.diagnostic import warn_with_kernel
warn_with_kernel(kernel, "empty_reduction",
"Empty reduction found (no inames to reduce over). "
"Eliminating.")
# We're not supposed to reduce/sum at all. (Note how this is distinct
# from an empty reduction--there is an element here, just no inames
# to reduce over. It's rather similar to an array with () shape in
# numpy.)
return expr.expr, callables_table
# }}}
if may_be_implemented_as_scan:
assert force_scan or automagic_scans_ok
# We require the "scan" iname to be tagged sequential.
if n_sequential:
sweep_iname = scan_param.sweep_iname
sweep_class = _classify_reduction_inames(kernel, (sweep_iname,))
sequential = sweep_iname in sweep_class.sequential
parallel = sweep_iname in sweep_class.local_parallel
bad_parallel = sweep_iname in sweep_class.nonlocal_parallel
if sweep_iname not in outer_insn_inames:
_error_if_force_scan_on(LoopyError,
"Sweep iname '%s' was detected, but is not an iname "
"for the instruction." % sweep_iname)
elif bad_parallel:
_error_if_force_scan_on(LoopyError,
"Sweep iname '%s' has an unsupported parallel tag '%s' "
"- the only parallelism allowed is 'local'." %
(sweep_iname,
", ".join(tag.key
for tag in temp_kernel.iname_tags(sweep_iname))))
elif parallel:
return map_scan_local(
expr, rec, callables_table, nresults,
arg_dtypes, reduction_dtypes,
sweep_iname, scan_param.scan_iname,
scan_param.sweep_lower_bound,
scan_param.scan_lower_bound,
scan_param.stride,
guarding_predicates)
elif sequential:
return map_scan_seq(
expr, rec, callables_table, nresults,
arg_dtypes, reduction_dtypes, sweep_iname,
scan_param.scan_iname,
scan_param.sweep_lower_bound,
scan_param.scan_lower_bound,
scan_param.stride,
guarding_predicates)
# fallthrough to reduction implementation
else:
assert n_local_par > 0
scan_iname, = expr.inames
_error_if_force_scan_on(LoopyError,
"Scan iname '%s' is parallel tagged: this is not allowed "
"(only the sweep iname should be tagged if parallelism "
"is desired)." % scan_iname)
# fallthrough to reduction implementation
if n_sequential:
assert n_local_par == 0
return map_reduction_seq(expr, rec, callables_table,
nresults, arg_dtypes, reduction_dtypes,
guarding_predicates)
else:
assert n_local_par > 0
return map_reduction_local(
expr, rec, callables_table, nresults, arg_dtypes,
reduction_dtypes, guarding_predicates)
# }}}
cb_mapper = RealizeReductionCallbackMapper(map_reduction, callables_table)
insn_queue = kernel.instructions[:]
insn_id_replacements = {}
domains = kernel.domains[:]
temp_kernel = kernel
changed = False
import loopy as lp
while insn_queue:
new_insn_add_depends_on = set()
new_insn_add_no_sync_with = set()
new_insn_add_within_inames = set()
generated_insns = []
insn = insn_queue.pop(0)
if insn_id_filter is not None and insn.id != insn_id_filter \
or not isinstance(insn, lp.MultiAssignmentBase):
new_insns.append(insn)
continue
nresults = len(insn.assignees)
# Run reduction expansion.
from loopy.symbolic import Reduction
if isinstance(insn.expression, Reduction) and nresults > 1:
new_expressions = cb_mapper(insn.expression,
callables_table=cb_mapper.callables_table,
guarding_predicates=insn.predicates,
nresults=nresults)
else:
new_expressions = cb_mapper(insn.expression,
callables_table=cb_mapper.callables_table,
guarding_predicates=insn.predicates),
if generated_insns:
# An expansion happened, so insert the generated stuff plus
# ourselves back into the queue.
result_assignment_dep_on = \
insn.depends_on | frozenset(new_insn_add_depends_on)
kwargs = insn.get_copy_kwargs(
no_sync_with=insn.no_sync_with
| frozenset(new_insn_add_no_sync_with),
within_inames=(
insn.within_inames
| new_insn_add_within_inames))
kwargs.pop("id")
kwargs.pop("depends_on")
kwargs.pop("expression")
kwargs.pop("assignee", None)
kwargs.pop("assignees", None)
kwargs.pop("temp_var_type", None)
kwargs.pop("temp_var_types", None)
if isinstance(insn.expression, Reduction) and nresults > 1:
result_assignment_ids = [
insn_id_gen(insn.id) for i in range(nresults)]
replacement_insns = [
lp.Assignment(
id=result_assignment_ids[i],
depends_on=(
result_assignment_dep_on
| (frozenset([result_assignment_ids[i-1]])
if i else frozenset())),
assignee=assignee,
expression=new_expr,
**kwargs)
for i, (assignee, new_expr) in enumerate(zip(
insn.assignees, new_expressions))]
insn_id_replacements[insn.id] = [
rinsn.id for rinsn in replacement_insns]
else:
new_expr, = new_expressions
# since we are replacing the instruction with
# only one instruction, there's no need to replace id
replacement_insns = [
make_assignment(
id=insn.id,
depends_on=result_assignment_dep_on,
assignees=insn.assignees,
expression=new_expr,
**kwargs)
]
insn_queue = generated_insns + replacement_insns + insn_queue
# The reduction expander needs an up-to-date kernel
# object to find dependencies. Keep temp_kernel up-to-date.
temp_kernel = kernel.copy(
instructions=new_insns + insn_queue,
temporary_variables=new_temporary_variables,
domains=domains)
temp_kernel = lp.replace_instruction_ids(
temp_kernel, insn_id_replacements)
changed = True
else:
# nothing happened, we're done with insn
assert not new_insn_add_depends_on
new_insns.append(insn)
if changed:
kernel = kernel.copy(
instructions=new_insns,
temporary_variables=new_temporary_variables,
domains=domains)
kernel = lp.replace_instruction_ids(kernel, insn_id_replacements)
from loopy.transform.iname import tag_inames
kernel = tag_inames(kernel, new_iname_tags)
kernel = (
_hackily_ensure_multi_assignment_return_values_are_scoped_private(
kernel))
return kernel, cb_mapper.callables_table
def realize_reduction(program, *args, **kwargs):
assert isinstance(program, TranslationUnit)
callables_table = dict(program.callables_table)
kernels_to_scan = [in_knl_callable.subkernel
for in_knl_callable in program.callables_table.values()
if isinstance(in_knl_callable, CallableKernel)]
for knl in kernels_to_scan:
new_knl, callables_table = realize_reduction_for_single_kernel(
knl, callables_table, *args, **kwargs)
in_knl_callable = callables_table[knl.name].copy(
subkernel=new_knl)
callables_table[knl.name] = in_knl_callable
return program.copy(callables_table=callables_table)
# }}}
# {{{ realize_ilp
def realize_ilp(kernel):
logger.debug("%s: add axes to temporaries for ilp" % kernel.name)
from loopy.kernel.data import (IlpBaseTag, VectorizeTag,
filter_iname_tags_by_type)
privatizing_inames = frozenset(
name for name, iname in kernel.inames.items()
if filter_iname_tags_by_type(iname.tags, (IlpBaseTag, VectorizeTag))
)
if not privatizing_inames:
return kernel
from loopy.transform.privatize import privatize_temporaries_with_inames
return privatize_temporaries_with_inames(kernel, privatizing_inames)
# }}}
# {{{ check for loads of atomic variables
def check_atomic_loads(kernel):
"""Find instances of AtomicInit or AtomicUpdate with use of other atomic
variables to update the atomicity
"""
logger.debug("%s: check atomic loads" % kernel.name)
from loopy.types import AtomicType
from loopy.kernel.array import ArrayBase
from loopy.kernel.instruction import Assignment, AtomicLoad
# find atomic variables
atomicity_candidates = (
{v.name for v in kernel.temporary_variables.values()
if isinstance(v.dtype, AtomicType)}
|
{v.name for v in kernel.args
if isinstance(v, ArrayBase)
and isinstance(v.dtype, AtomicType)})
new_insns = []
for insn in kernel.instructions:
if isinstance(insn, Assignment):
# look for atomic variables
atomic_accesses = {a.var_name for a in insn.atomicity}
accessed_atomic_vars = (insn.dependency_names() & atomicity_candidates)\
- {insn.assignee_var_names()[0]}
if not accessed_atomic_vars <= atomic_accesses:
#if we're missing some
missed = accessed_atomic_vars - atomic_accesses
for x in missed:
if {x} & atomicity_candidates:
insn = insn.copy(
atomicity=insn.atomicity + (AtomicLoad(x),))
new_insns.append(insn)
return kernel.copy(instructions=new_insns)
# }}}
# {{{ arg_descr_inference
class ArgDescrInferenceMapper(RuleAwareIdentityMapper):
"""
Infers :attr:`~loopy.kernel.function_interface.arg_id_to_descr` of
callables visited in an expression.
"""
def __init__(self, rule_mapping_context, caller_kernel, clbl_inf_ctx):
super().__init__(rule_mapping_context)
self.caller_kernel = caller_kernel
self.clbl_inf_ctx = clbl_inf_ctx
def map_call(self, expr, expn_state, assignees=None):
from pymbolic.primitives import Call, Variable
from loopy.kernel.function_interface import ValueArgDescriptor
from loopy.symbolic import ResolvedFunction
from loopy.kernel.array import ArrayBase
from loopy.kernel.data import ValueArg
from pymbolic.mapper.substitutor import make_subst_func
from loopy.symbolic import SubstitutionMapper
from loopy.kernel.function_interface import get_arg_descriptor_for_expression
if not isinstance(expr.function, ResolvedFunction):
# ignore if the call is not to a ResolvedFunction
return super().map_call(expr, expn_state)
arg_id_to_arg = dict(enumerate(expr.parameters))
if assignees is not None:
# If supplied with assignees then this is a CallInstruction
for i, arg in enumerate(assignees):
arg_id_to_arg[-i-1] = arg
arg_id_to_descr = {
arg_id: get_arg_descriptor_for_expression(self.caller_kernel, arg)
for arg_id, arg in arg_id_to_arg.items()}
clbl = self.clbl_inf_ctx[expr.function.name]
# {{{ translating descriptor expressions to the callable's namespace
deps_as_params = []
subst_map = {}
deps = frozenset().union(*(descr.depends_on()
for descr in arg_id_to_descr.values()))
assert deps <= self.caller_kernel.all_variable_names()
for dep in deps:
caller_arg = self.caller_kernel.arg_dict.get(dep, (self.caller_kernel
.temporary_variables
.get(dep)))
if not (isinstance(caller_arg, ValueArg)
or (isinstance(caller_arg, ArrayBase)
and caller_arg.shape == ())):
raise NotImplementedError(f"Obtained '{dep}' as a dependency for"
f" call '{expr.function.name}' which is not a scalar.")
clbl, callee_name = clbl.with_added_arg(caller_arg.dtype,
ValueArgDescriptor())
subst_map[dep] = Variable(callee_name)
deps_as_params.append(Variable(dep))
mapper = SubstitutionMapper(make_subst_func(subst_map))
arg_id_to_descr = {id_: descr.map_expr(mapper)
for id_, descr in arg_id_to_descr.items()}
# }}}
# specializing the function according to the parameter description
new_clbl, self.clbl_inf_ctx = clbl.with_descrs(arg_id_to_descr,
self.clbl_inf_ctx)
self.clbl_inf_ctx, new_func_id = (self.clbl_inf_ctx
.with_callable(expr.function.function,
new_clbl))
return Call(ResolvedFunction(new_func_id),
tuple(self.rec(child, expn_state)
for child in expr.parameters)
+ tuple(deps_as_params))
def map_call_with_kwargs(self, expr):
# See https://github.com/inducer/loopy/pull/323
raise NotImplementedError
def __call__(self, expr, kernel, insn, assignees=None):
from loopy.kernel.data import InstructionBase
from loopy.symbolic import IdentityMapper, ExpansionState
assert insn is None or isinstance(insn, InstructionBase)
return IdentityMapper.__call__(self, expr,
ExpansionState(
kernel=kernel,
instruction=insn,
stack=(),
arg_context={}), assignees=assignees)
def map_kernel(self, kernel):
new_insns = []
for insn in kernel.instructions:
if isinstance(insn, CallInstruction):
# In call instructions the assignees play an important in
# determining the arg_id_to_descr
mapper = partial(self, kernel=kernel, insn=insn,
assignees=insn.assignees)
new_insns.append(insn.with_transformed_expressions(mapper))
elif isinstance(insn, MultiAssignmentBase):
mapper = partial(self, kernel=kernel, insn=insn)
new_insns.append(insn.with_transformed_expressions(mapper))
elif isinstance(insn, (_DataObliviousInstruction, CInstruction)):
new_insns.append(insn)
else:
raise NotImplementedError("arg_descr_inference for %s instruction" %
type(insn))
return kernel.copy(instructions=new_insns)
def traverse_to_infer_arg_descr(kernel, callables_table):
"""
Returns a copy of *kernel* with the argument shapes and strides matching for
resolved functions in the *kernel*. Refer
:meth:`loopy.kernel.function_interface.InKernelCallable.with_descrs`.
.. note::
Initiates a walk starting from *kernel* to all its callee kernels.
"""
from loopy.symbolic import SubstitutionRuleMappingContext
rule_mapping_context = SubstitutionRuleMappingContext(
kernel.substitutions, kernel.get_var_name_generator())
arg_descr_inf_mapper = ArgDescrInferenceMapper(rule_mapping_context,
kernel, callables_table)
descr_inferred_kernel = rule_mapping_context.finish_kernel(
arg_descr_inf_mapper.map_kernel(kernel))
return descr_inferred_kernel, arg_descr_inf_mapper.clbl_inf_ctx
def infer_arg_descr(program):
"""
Returns a copy of *program* with the
:attr:`loopy.InKernelCallable.arg_id_to_descr` inferred for all the
callables.
"""
from loopy.translation_unit import make_clbl_inf_ctx, resolve_callables
from loopy.kernel.array import ArrayBase
from loopy.kernel.function_interface import (ArrayArgDescriptor,
ValueArgDescriptor)
from loopy import auto, ValueArg
program = resolve_callables(program)
clbl_inf_ctx = make_clbl_inf_ctx(program.callables_table,
program.entrypoints)
for e in program.entrypoints:
def _tuple_or_None(s):
if isinstance(s, tuple):
return s
elif s in [None, auto]:
return s
else:
return s,
arg_id_to_descr = {}
for arg in program[e].args:
if isinstance(arg, ArrayBase):
if arg.shape not in (None, auto):
arg_id_to_descr[arg.name] = ArrayArgDescriptor(
_tuple_or_None(arg.shape), arg.address_space,
arg.dim_tags)
elif isinstance(arg, ValueArg):
arg_id_to_descr[arg.name] = ValueArgDescriptor()
else:
raise NotImplementedError()
new_callable, clbl_inf_ctx = program.callables_table[e].with_descrs(
arg_id_to_descr, clbl_inf_ctx)
clbl_inf_ctx, new_name = clbl_inf_ctx.with_callable(e, new_callable,
is_entrypoint=True)
return clbl_inf_ctx.finish_program(program)
# }}}
# {{{ inline_kernels_with_gbarriers
def inline_kernels_with_gbarriers(program):
from loopy.kernel.instruction import BarrierInstruction
from loopy.transform.callable import inline_callable_kernel
from loopy.kernel.tools import get_call_graph
from pytools.graph import compute_topological_order
def has_gbarrier(knl):
return any((isinstance(insn, BarrierInstruction)
and insn.synchronization_kind == "global")
for insn in knl.instructions)
call_graph = get_call_graph(program, only_kernel_callables=True)
# traverse the kernel calls in a reverse topological sort so that barriers
# are rightly passed to the entrypoints.
toposort = compute_topological_order(call_graph,
# pass key to have deterministic codegen
key=lambda x: x
)
for name in toposort[::-1]:
if has_gbarrier(program[name]):
program = inline_callable_kernel(program, name)
return program
# }}}
def filter_reachable_callables(t_unit):
from loopy.translation_unit import get_reachable_resolved_callable_ids
reachable_function_ids = get_reachable_resolved_callable_ids(t_unit
.callables_table,
t_unit.entrypoints)
new_callables = {name: clbl for name, clbl in t_unit.callables_table.items()
if name in (reachable_function_ids | t_unit.entrypoints)}
return t_unit.copy(callables_table=new_callables)
preprocess_cache = WriteOncePersistentDict(
"loopy-preprocess-cache-v2-"+DATA_MODEL_VERSION,
key_builder=LoopyKeyBuilder())
def _preprocess_single_kernel(kernel, callables_table, device=None):
from loopy.kernel import KernelState
prepro_logger = ProcessLogger(logger, "%s: preprocess" % kernel.name)
from loopy.check import check_identifiers_in_subst_rules
check_identifiers_in_subst_rules(kernel)
# {{{ check that there are no l.auto-tagged inames
from loopy.kernel.data import AutoLocalInameTagBase
for name, iname in kernel.inames.items():
if (filter_iname_tags_by_type(iname.tags, AutoLocalInameTagBase)
and name in kernel.all_inames()):
raise LoopyError("kernel with automatically-assigned "
"local axes passed to preprocessing")
# }}}
# Ordering restriction:
# Type inference and reduction iname uniqueness don't handle substitutions.
# Get them out of the way.
check_for_writes_to_predicates(kernel)
check_reduction_iname_uniqueness(kernel)
# Ordering restriction:
# add_axes_to_temporaries_for_ilp because reduction accumulators
# need to be duplicated by this.
kernel = realize_ilp(kernel)
kernel = find_temporary_address_space(kernel)
# check for atomic loads, much easier to do here now that the dependencies
# have been established
kernel = check_atomic_loads(kernel)
kernel = kernel.target.preprocess(kernel)
kernel = kernel.copy(
state=KernelState.PREPROCESSED)
prepro_logger.done()
return kernel
def preprocess_program(program, device=None):
# {{{ cache retrieval
from loopy import CACHING_ENABLED
if CACHING_ENABLED:
input_program = program
try:
result = preprocess_cache[program]
logger.debug(f"program with entrypoints: {program.entrypoints}"
" preprocess cache hit")
return result
except KeyError:
pass
# }}}
from loopy.kernel import KernelState
if program.state >= KernelState.PREPROCESSED:
return program
if len([clbl for clbl in program.callables_table.values() if
isinstance(clbl, CallableKernel)]) == 1:
program = program.with_entrypoints(",".join(clbl.name for clbl in
program.callables_table.values() if isinstance(clbl,
CallableKernel)))
if not program.entrypoints:
raise LoopyError("Translation unit did not receive any entrypoints")
from loopy.translation_unit import resolve_callables
program = resolve_callables(program)
program = filter_reachable_callables(program)
if device is not None:
# FIXME: Time to remove this? (Git blame shows 5 years ago)
from warnings import warn
warn("passing 'device' to preprocess_kernel() is deprecated",
DeprecationWarning, stacklevel=2)
program = infer_unknown_types(program, expect_completion=False)
from loopy.transform.subst import expand_subst
program = expand_subst(program)
from loopy.kernel.creation import apply_single_writer_depencency_heuristic
program = apply_single_writer_depencency_heuristic(program)
# Ordering restrictions:
#
# - realize_reduction must happen after type inference because it needs
# to be able to determine the types of the reduced expressions.
#
# - realize_reduction must happen after default dependencies are added
# because it manipulates the depends_on field, which could prevent
# defaults from being applied.
program = realize_reduction(program, unknown_types_ok=False)
# {{{ preprocess callable kernels
# Callable editing restrictions:
#
# - should not edit callables_table in :meth:`preprocess_single_kernel`
# as we are iterating over it.[1]
#
# [1] https://docs.python.org/3/library/stdtypes.html#dictionary-view-objects
new_callables = {}
for func_id, in_knl_callable in program.callables_table.items():
if isinstance(in_knl_callable, CallableKernel):
new_subkernel = _preprocess_single_kernel(
in_knl_callable.subkernel, program.callables_table,
device)
in_knl_callable = in_knl_callable.copy(
subkernel=new_subkernel)
elif isinstance(in_knl_callable, ScalarCallable):
pass
else:
raise NotImplementedError("Unknown callable type %s." % (
type(in_knl_callable).__name__))
new_callables[func_id] = in_knl_callable
program = program.copy(callables_table=new_callables)
# }}}
# infer arg descrs of the callables
program = infer_arg_descr(program)
# Ordering restriction:
# callees with gbarrier in them must be inlined after inferrring arg_descr.
program = inline_kernels_with_gbarriers(program)
# {{{ prepare for caching
# PicklableDtype instances for example need to know the target they're working
# towards in order to pickle and unpickle them. This is the first pass that
# uses caching, so we need to be ready to pickle. This means propagating
# this target information.
# }}}
if CACHING_ENABLED:
preprocess_cache.store_if_not_present(input_program, program)
return program
# FIXME: Do we add a deprecation warning?
preprocess_kernel = preprocess_program
# vim: foldmethod=marker
|
StarcoderdataPython
|
4955080
|
"""
Test ChatterBot's statement comparison algorithms.
"""
from unittest import TestCase
from app.chatterbot.conversation import Statement
from app.chatterbot import comparisons
from app.chatterbot import languages, tagging
# set language
LANGUAGE = languages.CHI
class LevenshteinDistanceTestCase(TestCase):
def setUp(self):
self.compare = comparisons.LevenshteinDistance(language=LANGUAGE)
def test_false(self):
"""
Falsy values should match by zero.
"""
# Test first statement is empty
statement = Statement(text='')
other_statement = Statement(text='你好')
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
# Test latter statement is empty
statement = Statement(text='你好')
other_statement = Statement(text='')
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
# Test that an exception is not raised
# if a statement is initialized with an integer value as its text attribute.
statement = Statement(text=2)
other_statement = Statement(text='你好')
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
def test_true(self):
# Test that text capitalization is ignored.
statement = Statement(text='你好今天怎么样')
other_statement = Statement(text='你好今天怎么样')
value = self.compare(statement, other_statement)
self.assertEqual(value, 1)
class SpacySimilarityTests(TestCase):
def setUp(self):
self.compare = comparisons.LevenshteinDistance(language=LANGUAGE)
def test_false(self):
"""
Falsy values should match by zero.
"""
# Test first statement is empty
statement = Statement(text='')
other_statement = Statement(text='你好')
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
# Test latter statement is empty
statement = Statement(text='你好')
other_statement = Statement(text='')
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
# Test that an exception is not raised
# if a statement is initialized with an integer value as its text attribute.
statement = Statement(text=2)
other_statement = Statement(text='你好')
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
def test_true(self):
# Test sentences with different stopwords.
statement = Statement(text='今天天气怎么样')
other_statement = Statement(text='今天天气怎么样啊')
value = self.compare(statement, other_statement)
self.assertAlmostEqual(value, 0.9, places=1)
# Test that text capitalization is ignored.
statement = Statement(text='你好')
other_statement = Statement(text='你好')
value = self.compare(statement, other_statement)
self.assertAlmostEqual(value, 1, places=1)
class JaccardSimilarityTestCase(TestCase):
def setUp(self):
self.compare = comparisons.LevenshteinDistance(language=LANGUAGE)
self.tagger = tagging.PosLemmaTagger(language=LANGUAGE)
def test_false(self):
"""
Falsy values should match by zero.
"""
# Test first statement is empty
statement = Statement(text='', search_text=self.tagger.get_text_index_string(''))
other_statement = Statement(text='你好', search_text=self.tagger.get_text_index_string('你好'))
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
# Test latter statement is empty
statement = Statement(text='你好', search_text=self.tagger.get_text_index_string('你好'))
other_statement = Statement(text='', search_text=self.tagger.get_text_index_string(''))
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
# Test that an exception is not raised
# if a statement is initialized with an integer value as its text attribute.
statement = Statement(text=2)
other_statement = Statement(text='你好')
value = self.compare(statement, other_statement)
self.assertEqual(value, 0)
def test_true(self):
# Test that text capitalization is ignored.
text = '你好'
statement = Statement(text=text, search_text=self.tagger.get_text_index_string(text))
other_text = '你好'
other_statement = Statement(text=other_text, search_text=self.tagger.get_text_index_string(other_text))
value = self.compare(statement, other_statement)
self.assertEqual(value, 1)
|
StarcoderdataPython
|
3579947
|
"""Map views with routes."""
from .default import (
home_view,
welcome_view,
sundays_view,
youth_kids_view,
go_deeper_view,
bible_studies_view,
life_groups_view,
military_view,
bobs_view,
worship_view,
hebrews_view,
message_view,
children_view,
values_view,
contact_view,
mission_view,
staff_view,
council_view,
beliefs_view,
im_new_view,
first_impressions_view,
foursquare_view,
giving_view,
events_view,
youth_events_view,
foodbank_view,
connect_view,
create_view,
update_view,
delete_view,
api_view,
means_view,
search_view,
volunteer_view,
)
def includeme(config):
"""List of views to include for the configurator object."""
config.add_view(home_view, route_name='home')
config.add_view(welcome_view, route_name='welcome')
config.add_view(sundays_view, route_name='sundays')
config.add_view(youth_kids_view, route_name='youth_kids')
config.add_view(go_deeper_view, route_name='go_deeper')
config.add_view(bible_studies_view, route_name='bible_studies')
config.add_view(life_groups_view, route_name='life_groups')
config.add_view(military_view, route_name='military')
config.add_view(bobs_view, route_name='bobs')
config.add_view(worship_view, route_name='worship')
config.add_view(hebrews_view, route_name='hebrews')
config.add_view(message_view, route_name='message')
config.add_view(children_view, route_name='children')
config.add_view(values_view, route_name='values')
config.add_view(contact_view, route_name='contact')
config.add_view(mission_view, route_name='mission')
config.add_view(staff_view, route_name='staff')
config.add_view(council_view, route_name='council')
config.add_view(beliefs_view, route_name='beliefs')
config.add_view(im_new_view, route_name='im_new')
config.add_view(first_impressions_view, route_name='first_impressions')
config.add_view(foursquare_view, route_name='foursquare')
config.add_view(giving_view, route_name='giving')
config.add_view(events_view, route_name='events')
config.add_view(youth_events_view, route_name='youth_events')
config.add_view(foodbank_view, route_name='foodbank')
config.add_view(connect_view, route_name='connect')
config.add_view(create_view, route_name='new')
config.add_view(update_view, route_name='edit')
config.add_view(delete_view, route_name='delete')
config.add_view(api_view, route_name='api')
config.add_view(means_view, route_name='means')
config.add_view(search_view, route_name='search')
config.add_view(volunteer_view, route_name='volunteer')
|
StarcoderdataPython
|
9671568
|
<gh_stars>0
# coding:UTF-8
QINIU_HOST = ""
QINIU_KEY = ""
QINIU_TOKEN = ""
QINIU_BUCKET = ""
CACHE_PREFIX = "glue"
|
StarcoderdataPython
|
1999053
|
<filename>src/config/device-manager/device_manager/plugins/ansible/job_handler.py<gh_stars>0
#
# Copyright (c) 2018 Juniper Networks, Inc. All rights reserved.
#
"""
This file contains implementation of job api handler code
"""
import gevent
import json
import random
from enum import Enum
from vnc_api.vnc_api import VncApi
class JobStatus(Enum):
INIT = 0
IN_PROGRESS = 1
COMPLETE = 2
FAILED = 3
# end class JobStatus
class JobHandler(object):
JOB_STATUS_MAPPING = {
'SUCCESS': JobStatus.COMPLETE,
'FAILURE': JobStatus.FAILED,
'UNKNOWN': JobStatus.FAILED
}
def __init__(self, job_type, job_input, device_list, api_server_config,
logger):
self._job_type = job_type
self._job_input = job_input
self._device_list = device_list
self._api_server_config = api_server_config
self._logger = logger
self._job_id = None
self._job_status = JobStatus.INIT
super(JobHandler, self).__init__()
# end __init__
def push(self, timeout, max_retries):
vnc_api = self._get_vnc_api(**self._api_server_config)
self._job_status = JobStatus.IN_PROGRESS
job_execution_id = None
try:
self._logger.debug("job handler: executing job for (%s, %s)" %
(self._device_list, str(self._job_type)))
job_execution_info = vnc_api.execute_job(
job_template_fq_name=self._job_type,
job_input=self._job_input,
device_list=self._device_list
)
job_execution_id = job_execution_info.get('job_execution_id')
self._logger.debug("job started with execution id %s" %
job_execution_id)
self._wait(vnc_api, job_execution_id, timeout, max_retries)
except Exception as e:
self._logger.error("job handler: push failed for (%s, %s)"
" execution id %s: %s" % (self._device_list,
str(self._job_type), job_execution_id, repr(e)))
self._job_status = JobStatus.FAILED
if self._job_status == JobStatus.FAILED:
raise Exception("job handler: push failed for (%s, %s)"
" execution id %s" % (self._device_list,
str(self._job_type), job_execution_id))
self._logger.debug("job handler: push succeeded for (%s, %s)"
" execution id %s" % (self._device_list,
str(self._job_type), job_execution_id))
# end push
def _check_job_status(self, vnc_api, job_execution_id, status):
try:
job_status = vnc_api.job_status(job_execution_id)
return self._verify_job_status(job_status, status)
except Exception as e:
self._logger.error("job handler: error while querying "
"job status for execution_id %s: %s" %
(job_execution_id, repr(e)))
return False
# end _check_job_status
def _get_job_status(self, vnc_api, job_execution_id):
if self._check_job_status(vnc_api, job_execution_id,
JobStatus.COMPLETE):
return JobStatus.COMPLETE
if self._check_job_status(vnc_api, job_execution_id,
JobStatus.FAILED):
return JobStatus.FAILED
return JobStatus.IN_PROGRESS
# end _get_job_status
def _wait(self, vnc_api, job_execution_id, timeout, max_retries):
retry_count = 1
while not self.is_job_done():
self._job_status = self._get_job_status(vnc_api, job_execution_id)
if not self.is_job_done():
if retry_count >= max_retries:
self._logger.error(
"job handler: timed out waiting for job %s for device"
" %s and job_type %s:" %
(job_execution_id, self._device_list,
str(self._job_type)))
self._job_status = JobStatus.FAILED
else:
retry_count += 1
gevent.sleep(timeout)
# end _wait
def get_job_status(self):
return self._job_status
# end get_job_status
def is_job_done(self):
if self._job_status == JobStatus.COMPLETE or \
self._job_status == JobStatus.FAILED:
return True
return False
# end is_job_done
@staticmethod
def _get_vnc_api(ips, port, username, password, tenant, use_ssl):
return VncApi(api_server_host=random.choice(ips),
api_server_port=port, username=username,
password=password, tenant_name=tenant,
api_server_use_ssl=use_ssl)
# end _get_vnc_api
@classmethod
def _verify_job_status(cls, job_status, status):
return job_status and \
cls.JOB_STATUS_MAPPING.get(job_status.get('job_status')) == \
status
# end _verify_job_status
# end class JobHandler
|
StarcoderdataPython
|
3369577
|
__all__ = ['ImageAutoEncoders', 'ExprAutoEncoders', 'JointLatentGenerator', 'Baseline', 'Translators']
from .ImageAutoEncoders import AAEImg, ImgVAE
from .Translators import DomainTranslator
from .ExprAutoEncoders import (AE, VAE, AAE, SupervisedAAE,
ClassDiscriminator, ClassDiscriminatorBig, Discriminator, DoubleDiscriminator)
from .Baseline import (NucleiImgVAE, FC_VAE, FC_SAAE,
Simple_Classifier, Adversarial_Classifier, Adversarial_ClassifierA549, FC_Classifier)
from .JointLatentGenerator import JointLatentGenerator
|
StarcoderdataPython
|
11307082
|
"""Support running bcbio-nextgen inside of isolated docker containers.
"""
|
StarcoderdataPython
|
9747251
|
<gh_stars>0
from . import command_line
exit(command_line())
|
StarcoderdataPython
|
6429919
|
<reponame>gruber-sciencelab/SMEAGOL
from smeagol.matrices import *
import os
import pandas as pd
from smeagol.utils import _equals
import pytest
script_dir = os.path.dirname(__file__)
rel_path = "data"
data_path = os.path.join(script_dir, rel_path)
def test_check_ppm():
probs = np.array([[0, 0, 0, 1], [.5, .5, 0, 0], [-.1, .6, .1, .2]])
with pytest.raises(ValueError):
check_ppm(probs)
probs = np.array([[0, 0, 0, 1], [.5, 1.5, 0, 0], [.1, 2.6, .1, .2]])
with pytest.raises(ValueError):
check_ppm(probs)
probs = np.array([[0, .5, .1], [0, .5, .6], [0, 0, .1], [1, 0, .2]])
with pytest.raises(ValueError):
check_ppm(probs)
probs = np.array([[0, 0, 0, 1], [.5, .5, 0, 0], [.1, .6, .1, .2]])
check_ppm(probs)
def test_check_pfm():
freqs = np.array([[0, 0, 0, 10], [.5, .5, 0, 0], [1.1, .6, .2, 1]])
with pytest.raises(ValueError):
check_pfm(freqs)
freqs = np.array([[0, 0, 0, 10], [5, 5, 0, 0], [1, -6, 2, 1]])
with pytest.raises(ValueError):
check_pfm(freqs)
probs = np.array([[0, 5, 1], [0, 5, 6], [0, 0, 1], [10, 0, 1]])
with pytest.raises(ValueError):
check_ppm(probs)
def test_check_pwm():
weights = np.array([[-6.65821148, -6.65821148, -6.65821148, 1.98924694],
[ 0.99284021, 0.99284021, -6.65821148, -6.65821148],
[-1.30065948, 1.25467785, -0.31836148, -1.30065948]])
check_pwm(weights)
with pytest.raises(ValueError):
check_pwm(np.transpose(weights))
def test_normalize_pm():
probs = np.array([[0.1, 0.1, 0.1, 0.7], [.5, .48, 0.01, 0.01], [.1, .6, .1, .2]])
assert np.all(normalize_pm(probs) == probs)
probs = np.array([[0.1, 0.1, 0.1, 0.8], [.5, .48, 0.01, 0.01], [.1, .6, .1, .2]])
expected = np.array([[0.09090909, 0.09090909, 0.09090909, 0.72727273],
[.5, .48, 0.01, 0.01],
[.1, .6, .1, .2]])
assert _equals(normalize_pm(probs), expected)
def test_entropy():
probs = np.array([[0.1, 0.1, 0.1, 0.7], [.5, .48, 0.01, 0.01], [.1, .6, .1, .2]])
assert _equals(entropy(probs[0]), 1.3567796494470394)
assert _equals(entropy(probs), 4.068876338442915)
def test_position_wise_ic():
probs = np.array([[0.1, 0.1, 0.1, 0.7], [.5, .48, 0.01, 0.01], [.1, .6, .1, .2]])
expected = np.array([0.6432203505529606, 0.8588539054587927, 0.42904940554533133])
assert _equals(position_wise_ic(probs), expected)
def test_matrix_conversions():
freqs = np.array([[0, 0, 0, 10], [5, 5, 0, 0], [1, 6, 2, 1]])
probs = pfm_to_ppm(freqs, pseudocount = 1)
expected = np.array([[.25/11, .25/11, .25/11, 10.25/11],
[5.25/11, 5.25/11, .25/11, .25/11],
[1.25/11, 6.25/11, 2.25/11, 1.25/11]])
assert _equals(probs, expected)
weights = ppm_to_pwm(probs)
expected = np.array([[-3.45943162, -3.45943162, -3.45943162, 1.89812039],
[ 0.9328858 , 0.9328858 , -3.45943162, -3.45943162],
[-1.13750352, 1.18442457, -0.28950662, -1.13750352]])
assert _equals(weights, expected)
assert _equals(pwm_to_ppm(weights), probs)
def test_trim_ppm():
probs = np.array([[0.1, 0.1, 0.1, 0.7],
[.5, .48, 0.01, 0.01],
[.25, .25, .3, .2],
[0.1, 0.1, 0.1, 0.7],
[.25, .25, .3, .2]])
result = trim_ppm(probs, frac_threshold = 0.05)
expected = probs = np.array([[0.1, 0.1, 0.1, 0.7],
[.5, .48, 0.01, 0.01],
[.25, .25, .3, .2],
[0.1, 0.1, 0.1, 0.7]])
assert _equals(result, expected)
def test_matrix_correlation():
X = np.array([[-1.32192809, -1.32192809, -1.32192809, 1.48542683],
[ 1. , 0.94110631, -4.64385619, -4.64385619],
[-1.32192809, 1.26303441, -1.32192809, -0.32192809]])
Y = np.array([[-1.32192809, -1.32192809, -1.32192809, 1.48542683],
[ 1. , 0.94110631, -4.64385619, -4.64385619],
[-3.64385619, 1.84799691, -3.05889369, -2.32192809]])
result = matrix_correlation(X, Y)
assert _equals(result, 0.91069616)
Y = np.array([[-1.32192809, -1.32192809, -1.32192809, 1.48542683],
[ 1. , 0.94110631, -4.64385619, -4.64385619]])
with pytest.raises(ValueError):
matrix_correlation(X, Y)
def test_ncorr():
X = np.array([[-1.30065948, -6.65821148, 1.668218 , -1.30065948],
[ 0.99284021, 0.99284021, -6.65821148, -6.65821148],
[ 0.26065175, 0.6727054 , -1.30065948, -0.31836148]])
Y = np.array([[-0.99106688, -1.97336487, -0.41205364, 1.31654155],
[-7.33091688, 0.99551261, 0.80350944, -1.97336487],
[ 1.57897621, -0.41205364, -7.33091688, -1.97336487],
[ 0. , 0. , 0.32013481, -0.41205364],
[-0.99106688, -1.97336487, 1.31654155, -0.41205364]])
result = ncorr(X, Y, min_overlap=3)
assert _equals(result, 0.25069190635147276)
result = ncorr(Y, X, min_overlap=3)
assert _equals(result, 0.25069190635147276)
def test_pairwise_ncorrs():
df = pd.read_hdf(os.path.join(data_path, 'test_pwms.hdf5'), key='data')
result = pairwise_ncorrs(list(df.weights))
expected = np.array([[1, 0.2506919063514727, 0.9871733730221669],
[0.2506919063514727, 1, 0.21411930243854532],
[0.9871733730221669, 0.21411930243854532, 1]])
assert _equals(result, expected)
def test_choose_representative_pm():
df = pd.read_hdf(os.path.join(data_path, 'test_pwms.hdf5'), key='data')
result = choose_representative_pm(df)
assert result == 'x'
result = choose_representative_pm(df.iloc[1:, :])
assert result == 'z'
def test_cluster_pms():
df = pd.read_hdf(os.path.join(data_path, 'test_pwms.hdf5'), key='data')
result = cluster_pms(df, n_clusters=2, sims=None, weight_col='weights')
assert len(result['clusters']) == 3
assert len(result['reps']) == 2
assert len(result['min_ncorr']) == 2
|
StarcoderdataPython
|
1784775
|
import importlib
import pytest
module = importlib.import_module("19_an_elephant_named_joseph")
josephus = module.josephus
round_game = module.round_game
@pytest.mark.parametrize(
"elves, winner",
[
(1, 1),
(2, 1),
(3, 3),
(4, 1),
(5, 3),
(6, 5),
(7, 7),
(8, 1),
(9, 3),
(10, 5),
(11, 7),
(12, 9),
(13, 11),
(14, 13),
(15, 15),
(16, 1),
(17, 3),
(41, 19)])
def test_josephus(elves, winner):
assert josephus(elves) == winner
@pytest.mark.parametrize(
"elves, winner", [
(1, 1),
(2, 1),
(3, 3),
(4, 1),
(5, 2),
(6, 3),
(7, 5),
(8, 7),
(9, 9),
(10, 1),
(11, 2),
(12, 3),
(13, 4),
(14, 5),
(15, 6),
(16, 7),
(17, 8),
(18, 9),
(19, 11),
(20, 13),
(21, 15),
(22, 17),
(23, 19),
(24, 21),
(25, 23),
(26, 25),
(27, 27),
(28, 1),
(29, 2),
(30, 3),
(31, 4),
(32, 5),
(33, 6),
(34, 7),
(35, 8),
(36, 9),
(37, 10),
(38, 11),
(39, 12),
(40, 13),
(41, 14),
(42, 15),
(43, 16),
(44, 17),
(45, 18),
(46, 19),
(47, 20),
(48, 21),
(49, 22),
(50, 23),
(51, 24),
(52, 25),
(53, 26),
(54, 27),
(55, 29),
(56, 31),
(57, 33),
(58, 35),
(59, 37),
(60, 39),
(61, 41),
(62, 43),
(63, 45),
(64, 47),
(65, 49),
(66, 51),
(67, 53),
(68, 55),
(69, 57),
(70, 59),
(71, 61),
(72, 63),
(73, 65),
(74, 67),
(75, 69),
(76, 71),
(77, 73),
(78, 75),
(79, 77),
(80, 79),
(81, 81),
(82, 1),
(83, 2),
(84, 3),
(85, 4),
(86, 5),
(87, 6),
(88, 7),
(89, 8),
(90, 9),
(91, 10),
(92, 11),
(93, 12),
(94, 13),
(95, 14),
(96, 15),
(97, 16),
(98, 17),
(99, 18)])
def test_round_game(elves, winner):
assert round_game(elves) == winner
|
StarcoderdataPython
|
9609182
|
<reponame>AleksNeStu/projects
# 🚨 Don't change the code below 👇
student_scores = input("Input a list of student scores ").split()
for n in range(0, len(student_scores)):
student_scores[n] = int(student_scores[n])
print(student_scores)
# 🚨 Don't change the code above 👆
print('The highest score in the class is: {}'.format(max(student_scores)))
#First *fork* your copy. Then copy-paste your code below this line 👇
#Finally click "Run" to execute the tests
#SOLUTION
# highest_score = 0
# for score in student_scores:
# if score > highest_score:
# highest_score = score
# # print(highest_score)
#
# print(f"The highest score in the class is: {highest_score}")
#SOLUTION
#Write your code above this line 👆
# 🚨 Do NOT modify the code below this line 👇
with open('testing_copy.py', 'w') as file:
file.write('def test_func():\n')
with open('main.py', 'r') as original:
f2 = original.readlines()[0:40]
for x in f2:
file.write(" " + x)
import testing_copy
import unittest
from unittest.mock import patch
from io import StringIO
import os
class MyTest(unittest.TestCase):
def run_test(self, given_answer, expected_print):
with patch('builtins.input', return_value=given_answer), patch('sys.stdout', new=StringIO()) as fake_out:
testing_copy.test_func()
self.assertEqual(fake_out.getvalue(), expected_print)
def test_1(self):
self.run_test(given_answer='78 65 89 86 55 91 64 89', expected_print='[78, 65, 89, 86, 55, 91, 64, 89]\nThe highest score in the class is: 91\n')
def test_2(self):
self.run_test(given_answer='150 142 185 120 171 184 149 199', expected_print='[150, 142, 185, 120, 171, 184, 149, 199]\nThe highest score in the class is: 199\n')
def test_3(self):
self.run_test(given_answer='24 59 68', expected_print='[24, 59, 68]\nThe highest score in the class is: 68\n')
print("\n\n\n.\n.\n.")
print('Checking that your code prints a sentence that reads:\n\nThe highest score in the class is: X\n\nwhere X is the largest integer that was entered.')
print('Running some tests on your code:')
print(".\n.\n.")
unittest.main(verbosity=1, exit=False)
os.remove("testing_copy.py")
|
StarcoderdataPython
|
3528978
|
<filename>cyder/cydhcp/interface/dynamic_intr/forms.py<gh_stars>1-10
from django import forms
from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface,
DynamicIntrKeyValue)
class DynamicInterfaceForm(forms.ModelForm):
class Meta:
model = DynamicInterface
class DynamicIntrKeyValueForm(forms.ModelForm):
class Meta:
model = DynamicIntrKeyValue
exclude = ('is_option', 'is_statement', 'is_quoted')
|
StarcoderdataPython
|
6512229
|
# coding:utf-8
from pecan import conf # noqa
def init_model():
"""
This is a stub method which is called at application startup time.
If you need to bind to a parsed database configuration, set up tables or
ORM classes, or perform any database initialization, this is the
recommended place to do it.
For more information working with databases, and some common recipes,
see https://pecan.readthedocs.io/en/latest/databases.html
"""
pass
|
StarcoderdataPython
|
9674687
|
from dataclasses import dataclass, field
from enum import Enum
from typing import Optional
__NAMESPACE__ = "NISTSchema-SV-IV-atomic-long-enumeration-1-NS"
class NistschemaSvIvAtomicLongEnumeration1Type(Enum):
VALUE_67417897408 = 67417897408
VALUE_445463702 = 445463702
VALUE_11686316 = 11686316
VALUE_MINUS_223498733 = -223498733
VALUE_MINUS_5496081750511 = -5496081750511
VALUE_MINUS_4233583602889 = -4233583602889
@dataclass
class NistschemaSvIvAtomicLongEnumeration1:
class Meta:
name = "NISTSchema-SV-IV-atomic-long-enumeration-1"
namespace = "NISTSchema-SV-IV-atomic-long-enumeration-1-NS"
value: Optional[NistschemaSvIvAtomicLongEnumeration1Type] = field(
default=None,
metadata={
"required": True,
}
)
|
StarcoderdataPython
|
3553687
|
"""Update module."""
from dataclasses import dataclass
from typing import Tuple
import click
from . import git, github, poetry
program_name = "poetry-up"
@dataclass
class Options:
"""Options for the update operation."""
latest: bool
install: bool
commit: bool
push: bool
merge_request: bool
pull_request: bool
upstream: str
remote: str
dry_run: bool
packages: Tuple[str, ...]
class Action:
"""Base class for actions."""
def __init__(self, updater: "PackageUpdater") -> None:
"""Constructor."""
self.updater = updater
@property
def required(self) -> bool:
"""Return True if the action needs to run."""
return True
def __call__(self) -> None:
"""Run the action."""
class Switch(Action):
"""Switch to the update branch."""
@property
def required(self) -> bool:
"""Return True if the action needs to run."""
return (
self.updater.options.commit
or self.updater.options.push
or self.updater.options.pull_request
)
def __call__(self) -> None:
"""Run the action."""
git.switch(
self.updater.branch,
create=not git.branch_exists(self.updater.branch),
location=self.updater.options.upstream,
)
class Update(Action):
"""Update the package using Poetry."""
def __call__(self) -> None:
"""Run the action."""
poetry.update(
self.updater.package,
lock=not self.updater.options.install,
latest=self.updater.options.latest,
)
class Commit(Action):
"""Create a Git commit for the update."""
@property
def required(self) -> bool:
"""Return True if the action needs to run."""
return self.updater.options.commit and not git.is_clean(
["pyproject.toml", "poetry.lock"]
)
def __call__(self) -> None:
"""Run the action."""
git.add(["pyproject.toml", "poetry.lock"])
git.commit(message=f"{self.updater.title}\n")
class Rollback(Action):
"""Rollback an attempted package update."""
@property
def required(self) -> bool:
"""Return True if the action needs to run."""
return self.updater.actions.switch.required and (
git.resolve_branch(self.updater.branch)
== git.resolve_branch(self.updater.options.upstream)
)
def __call__(self) -> None:
"""Run the action."""
click.echo(
f"Skipping {self.updater.package.name} {self.updater.package.new_version}"
" (Poetry refused upgrade)"
)
git.switch(self.updater.original_branch)
git.remove_branch(self.updater.branch)
class Push(Action):
"""Push the update branch to the remote repository."""
@property
def required(self) -> bool:
"""Return True if the action needs to run."""
return self.updater.options.push
def __call__(self) -> None:
"""Run the action."""
merge_request = (
git.MergeRequest(self.updater.title, self.updater.description)
if self.updater.options.merge_request
else None
)
git.push(
self.updater.options.remote,
self.updater.branch,
merge_request=merge_request,
)
class PullRequest(Action):
"""Open a pull request for the update branch."""
@property
def required(self) -> bool:
"""Return True if the action needs to run."""
return self.updater.options.pull_request and not github.pull_request_exists(
self.updater.branch
)
def __call__(self) -> None:
"""Run the action."""
github.create_pull_request(self.updater.title, self.updater.description)
@dataclass
class Actions:
"""Actions for a package update."""
switch: Switch
update: Update
commit: Commit
rollback: Rollback
push: Push
pull_request: PullRequest
@classmethod
def create(cls, updater: "PackageUpdater") -> "Actions":
"""Create the package update actions."""
return cls(
Switch(updater),
Update(updater),
Commit(updater),
Rollback(updater),
Push(updater),
PullRequest(updater),
)
class PackageUpdater:
"""Update a package."""
def __init__(
self, package: poetry.Package, options: Options, original_branch: str
) -> None:
"""Constructor."""
self.package = package
self.options = options
self.original_branch = original_branch
self.branch = f"{program_name}/{package.name}-{package.new_version}"
self.title = (
f"Bump {package.name} from {package.old_version} to {package.new_version}"
)
self.description = self.title
self.actions = Actions.create(self)
@property
def required(self) -> bool:
"""Return True if the package needs to be updated."""
return not self.options.packages or self.package.name in self.options.packages
def run(self) -> None:
"""Run the package update."""
if self.actions.switch.required:
self.actions.switch()
self.actions.update()
if self.actions.commit.required:
self.actions.commit()
if self.actions.rollback.required:
self.actions.rollback()
return
if self.actions.push.required:
self.actions.push()
if self.actions.pull_request.required:
self.actions.pull_request()
def show(self) -> None:
"""Print information about the package update."""
message = "{}: {} → {}".format(
click.style(self.package.name, fg="bright_green"),
click.style(self.package.old_version, fg="blue"),
click.style(self.package.new_version, fg="yellow"),
)
click.echo(message)
class Updater:
"""Update packages."""
def __init__(self, options: Options) -> None:
"""Constructor."""
self.options = options
def run(self) -> None:
"""Run the package updates."""
if not git.is_clean():
raise click.ClickException("Working tree is not clean")
original_branch = git.current_branch()
for package in poetry.show_outdated():
updater = PackageUpdater(package, self.options, original_branch)
if updater.required:
updater.show()
if not self.options.dry_run:
updater.run()
if original_branch != git.current_branch():
git.switch(original_branch)
|
StarcoderdataPython
|
4986882
|
<gh_stars>0
import numpy as np
import matplotlib.pyplot as plt
class Lorenz_equations:
'''
Lorenz equations class
'''
def __init__(self, prandtl_number, rayleigh_number, beta, delta_t):
self.sigma = prandtl_number
self.rho = rayleigh_number
self.beta = beta
self.delta_t = delta_t
def getCriticalPoints(self):
'''
Get critical points for Lorenz equation
dX/dt = 0
'''
critical_points = {}
p_0 = np.array([0, 0, 0])
critical_points["p_0"] = p_0
if not (self.rho - 1 < 0):
p_1 = np.array([np.sqrt(self.beta * (self.rho - 1)), np.sqrt(self.beta * (self.rho - 1)), self.rho - 1])
critical_points["p_1"] = p_1
p_2 = np.array([-np.sqrt(self.beta * (self.rho - 1)), -np.sqrt(self.beta * (self.rho - 1)), self.rho - 1])
critical_points["p_2"] = p_2
return critical_points
def getStabilityPoint(self, point):
'''
Get the stability of critical point
'''
Jacobian = np.array([[ -self.sigma, self.sigma, 0],
[ self.rho - point[2], -1, -point[0]],
[ point[1], point[0], -self.beta]])
eigenvalues, eigenvectors = np.linalg.eig(Jacobian)
return eigenvalues, eigenvectors
def plotBifurcation(self, max_value = 26.5):
'''
Plot bifurcations
'''
rho_init = self.rho
critical_points_locations = {"p_0":[], "p_1":[], "p_2": []}
critical_points_eigenvalues = {"p_0":[], "p_1":[], "p_2": []}
rho_locations = []
rho_high = max_value
for rho in np.linspace(0, rho_high, 1000):
self.rho = rho
rho_locations.append(rho)
critical_points_dict = self.getCriticalPoints()
for key in critical_points_locations:
if critical_points_dict.get(key) is None:
critical_points_locations[key].append(np.array([0, 0, 0]))
critical_points_eigenvalues[key].append(np.array([0, 0, 0]))
else:
critical_points_locations[key].append(critical_points_dict[key])
eigenvalues, eigenvectors = self.getStabilityPoint(critical_points_dict[key])
# print(np.real(eigenvalues), np.imag(eigenvalues))
critical_points_eigenvalues[key].append(eigenvalues)
rho_locations = np.array(rho_locations)
minor_ticks = np.arange(0, rho_high // 1, 1)
# Plot Pitchfork bifurcation
fig, axs = plt.subplots(3, 1, constrained_layout=True)
i = 0
for axis in ["x", "y", "z"]:
for key in critical_points_locations:
trajectory = np.stack(critical_points_locations[key], axis = 0)
# print(trajectory.shape)
axs[i].plot(rho_locations[:trajectory.shape[0]], trajectory[:, i], label = key)
axs[i].set_title("%s vs. rho" % axis)
axs[i].set_xlabel("rho")
axs[i].set_ylabel(axis)
axs[i].set_xticks(minor_ticks, minor = True)
axs[i].grid(which='minor', alpha=0.2)
axs[i].grid(True)
axs[i].legend()
i = i + 1
plt.suptitle("Pitchfork bifurcation by varying value of rho")
plt.show()
fig = plt.figure(constrained_layout = True)
m = 1
# Plot Hopf bifurcation
for key in critical_points_locations:
ax = fig.add_subplot(1, 3, m, projection='3d')
m = m + 1
i = 0
for axis in ["eigenvalue 1", "eigenvalue 2", "eigenvalue 3"]:
trajectory = np.stack(critical_points_eigenvalues[key], axis = 0)
ax.plot3D(rho_locations, np.real(trajectory[:, i]), np.imag(trajectory[:, i]), label = axis)
if (np.imag(trajectory[:, i]) != 0).any():
index_rho_gt_1 = np.where( rho_locations > 1)
min_value = np.min(np.absolute(np.real(trajectory[index_rho_gt_1, i])))
index = np.where(np.absolute(np.real(trajectory[index_rho_gt_1, i])) == min_value)
temp_r = rho_locations[index_rho_gt_1]
print("Hopf bifurcation at rho = ", temp_r[index[1]])
ax.set_xlabel("rho")
ax.set_ylabel("a")
ax.set_zlabel("b")
title = "Eigenvalue vs. rho for %s" % key
ax.set_title(title)
ax.grid(True)
i = i + 1
ax.legend()
plt.suptitle("Hopf bifurcation by varying value of rho. a + ib vs. rho => (a, b, rho)")
# plt.legend()
plt.show()
self.rho = rho_init
def getLorenzMatrix(self, x, y, z):
'''
Get Lorenz matrix dX/dt = AX
dx/dt = -sigma * x + sigma * y + 0 * z
dy/dt = (rho - z/2) * x + (- 1) * y - (x/2) * z = rho * x -y - xz
dz/dt = (y/2) * x + (x/2) * y - (beta) * z = xy - beta * z
'''
A = np.array([[ -self.sigma, self.sigma, 0],
[ self.rho - z / 2, -1, -x / 2],
[ y / 2, x / 2, -self.beta]])
return A
def getLorenzTrajectory(self, initial_point, num_points = 5000):
'''
Get lorenz trajectory given initial point
'''
X = np.array(initial_point)
A = self.getLorenzMatrix(X[0], X[1], X[2])
trajectory = [X]
for i in range(num_points):
delta_X = A @ X * self.delta_t
X = delta_X + X
A = self.getLorenzMatrix(X[0], X[1], X[2])
trajectory.append(X)
trajectory = np.stack(trajectory, axis = 0)
return trajectory
def plotLorenzTrajectory(self, initial_point, num_points = 5000):
'''
Plot the lorenz trajectory given initial point
'''
critical_points = self.getCriticalPoints()
trajectory = self.getLorenzTrajectory(initial_point, num_points)
ax = plt.figure().add_subplot(projection='3d')
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
ax.set_title("Lorenz equations live plot. Init point = (%.02f, %.02f, %.02f)" % (initial_point[0], initial_point[1], initial_point[2]))
ax.plot3D(initial_point[0], initial_point[1], initial_point[2], "ro")
for p in critical_points:
point = critical_points[p]
if not np.iscomplex(point).any():
eigenvalues, eigenvectors = self.getStabilityPoint(point)
ax.plot3D(point[0], point[1], point[2], "go")
plot_steps = 100
for i in range(0, trajectory.shape[0], plot_steps):
ax.plot3D(trajectory[i: i + plot_steps + 1, 0], trajectory[i: i + plot_steps + 1, 1], trajectory[i:i + plot_steps + 1, 2], "b")
plt.pause(0.2 / (num_points / plot_steps))
plt.show()
def plotLorenzAlongAxis(self, initial_point, num_points = 5000):
'''
plot x, y, z w.r.t t
'''
trajectory = self.getLorenzTrajectory(initial_point, num_points)
t = np.linspace(0, trajectory.shape[0] * self.delta_t, trajectory.shape[0])
minor_ticks = np.arange(0, t[-1]//1, 1)
fig, axs = plt.subplots(3, 1, constrained_layout=True)
axs[0].plot(t, trajectory[:, 0], "b")
axs[0].set_title("x (convection) vs. t")
axs[0].set_xlabel("t")
axs[0].set_ylabel("x")
axs[0].set_xticks(minor_ticks, minor = True)
axs[0].grid(which='minor', alpha=0.2)
axs[0].grid(True)
axs[1].plot(t, trajectory[:, 1], "b")
axs[1].set_title("y (temperature difference (horizontal)) vs. t")
axs[1].set_xlabel("t")
axs[1].set_ylabel("y")
axs[1].set_xticks(minor_ticks, minor = True)
axs[1].grid(which='minor', alpha=0.2)
axs[1].grid(True)
axs[2].plot(t, trajectory[:, 2], "b")
axs[2].set_title("z (temperature difference (vertical)) vs. t")
axs[2].set_xlabel("t")
axs[2].set_ylabel("z")
axs[2].set_xticks(minor_ticks, minor = True)
axs[2].grid(which='minor', alpha=0.2)
axs[2].grid(True)
plt.suptitle("Plot x, y, and z vs. t")
plt.show()
def plotBifurcationTrajectories(self, initial_point, rho_list = [0.7, 1.1, 5, 10, 15, 18, 21, 24, 55]):
'''
Plot Lorenz trajectories for varying only rho and keeping the same initial point
'''
h = int(np.ceil(np.sqrt(len(rho_list))) // 1)
# print(h)
fig = plt.figure(constrained_layout = True)
rho_init = self.rho
m = 1
for r in rho_list:
ax = fig.add_subplot(h, h, m, projection='3d')
m = m + 1
self.rho = r
critical_points = self.getCriticalPoints()
for p in critical_points:
point = critical_points[p]
if not np.iscomplex(point).any():
eigenvalues, eigenvectors = self.getStabilityPoint(point)
ax.plot3D(point[0], point[1], point[2], "go")
ax.plot3D(initial_point[0], initial_point[1], initial_point[2], "ro")
trajectory = self.getLorenzTrajectory(initial_point, num_points=3000)
ax.plot3D(trajectory[:, 0], trajectory[:, 1], trajectory[:, 2], label = "rho = %f" %r)
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
# title = "rho = %.02f" % r
# ax.set_title(title)
ax.grid(True)
ax.legend()
plt.suptitle("Varying rho and plotting Lorenz equations trajectory with initial point = (%.02f, %.02f, %.02f)" % (initial_point[0], initial_point[1], initial_point[2]))
# plt.legend()
plt.show()
self.rho = rho_init
if __name__ == "__main__":
lorenz = Lorenz_equations(prandtl_number = 10, rayleigh_number = 25, beta = 8/3, delta_t = 1e-2)
init_point = [0, 1, 2]
# Plot Lorenztrajectory
lorenz.plotLorenzTrajectory(init_point)
# Plot Bifurcation trajectories
rho_list_1 = [0.3, 0.8, 1.4, 3]
lorenz.plotBifurcationTrajectories(init_point, rho_list=rho_list_1)
rho_list_2 = [5, 9, 15, 25]
lorenz.plotBifurcationTrajectories(init_point, rho_list=rho_list_2)
rho_list_3 = [19, 27, 32, 40]
lorenz.plotBifurcationTrajectories(init_point, rho_list=rho_list_3)
# Plot Lorenz equations vs. t
lorenz.plotLorenzAlongAxis(init_point)
# Plot bifurcation diagrams
lorenz.plotBifurcation(35)
# Get critical points
critical_points = lorenz.getCriticalPoints()
print("Printing critical points:\n", critical_points)
|
StarcoderdataPython
|
11261113
|
<reponame>Alexhuszagh/XLDiscoverer
'''
Utils/skimage/measure
_____________________
Block_reduce functionality to rapdily interpolate large arrays.
:copyright: Copyright (C) 2011, the scikit-image team
:license: see licenses/skimage.txt for more details.
'''
# load modules
import numpy as np
from .arraypad import pad
from .shape import view_as_blocks
# FUNCTIONS
# ---------
def block_reduce(image, block_size, func=np.sum, cval=0):
"""Down-sample image by applying function to local blocks.
Parameters
----------
image : ndarray
N-dimensional input image.
block_size : array_like
Array containing down-sampling integer factor along each axis.
func : callable
Function object which is used to calculate the return value for each
local block. This function must implement an ``axis`` parameter such as
``numpy.sum`` or ``numpy.min``.
cval : float
Constant padding value if image is not perfectly divisible by the
block size.
Returns
-------
image : ndarray
Down-sampled image with same number of dimensions as input image.
Examples
--------
>>> from skimage.measure import block_reduce
>>> image = np.arange(3*3*4).reshape(3, 3, 4)
>>> image # doctest: +NORMALIZE_WHITESPACE
array([[[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]],
[[12, 13, 14, 15],
[16, 17, 18, 19],
[20, 21, 22, 23]],
[[24, 25, 26, 27],
[28, 29, 30, 31],
[32, 33, 34, 35]]])
>>> block_reduce(image, block_size=(3, 3, 1), func=np.mean)
array([[[ 16., 17., 18., 19.]]])
>>> image_max1 = block_reduce(image, block_size=(1, 3, 4), func=np.max)
>>> image_max1 # doctest: +NORMALIZE_WHITESPACE
array([[[11]],
[[23]],
[[35]]])
>>> image_max2 = block_reduce(image, block_size=(3, 1, 4), func=np.max)
>>> image_max2 # doctest: +NORMALIZE_WHITESPACE
array([[[27],
[31],
[35]]])
"""
if len(block_size) != image.ndim:
raise ValueError("`block_size` must have the same length "
"as `image.shape`.")
pad_width = []
for i in range(len(block_size)):
if image.shape[i] % block_size[i] != 0:
after_width = block_size[i] - (image.shape[i] % block_size[i])
else:
after_width = 0
pad_width.append((0, after_width))
image = pad(image, pad_width=pad_width, mode='constant',
constant_values=cval)
out = view_as_blocks(image, block_size)
for i in range(len(out.shape) // 2):
out = func(out, axis=-1)
return out
|
StarcoderdataPython
|
6583650
|
<filename>inductive_modules.py
import torch
from torch import nn
from torch.nn import functional as F
from torch.autograd import Variable
import numpy as np
from scipy import sparse
from utilities import sparse_mx_to_torch_sparse_tensor, normalize
from metapath import query_path, query_path_indexed
def to_numpy(x):
if isinstance(x, Variable):
return to_numpy(x.data)
return x.cpu().numpy() if x.is_cuda else x.numpy()
def adj_list_to_adj_mat(neigh):
"""from dense adj list neigh to adj mat"""
# tmp = np.zeros((neigh.shape[0],neigh.shape[0]),dtype=bool)
tmp = sparse.coo_matrix((np.ones(neigh.size),
(np.arange(neigh.shape[0]).repeat(neigh.shape[1]).reshape(-1),
np.array(neigh).reshape(-1))))
return tmp
# --
# Samplers
def UniformNeighborSampler(adj, n_samples=128):
"""
Samples from "sparse 2D edgelist" COO matrix, according to adj. adj[v1,v2]=1: connected
:return sparse neighbor adj mat
"""
assert n_samples > 0, 'UniformNeighborSampler: n_samples > 0'
adj_np = adj.numpy().to_csr()
degrees = np.count_nonzero(adj_np, axis=1)
degrees[degrees == 0] = adj_np.shape[1] # if no degree at all, sample from all vertices
sel = np.random.choice(adj_np.shape[1], (adj_np.shape[0], n_samples))
sel = sel % degrees.reshape(-1, 1)
nonzeros = np.split(adj_np.indices, adj_np.indptr)[1:-1] ##nonzeros for each row
nonzeros[degrees == adj_np.shape[1]] = np.arange(0,
adj_np.shape[0]) ##if no degree at all, sample from all vertices
tmp = nonzeros[np.arange(adj_np.shape[0]).repeat(n_samples).reshape(-1),
np.array(sel).reshape(-1)]
tmp = adj_list_to_adj_mat(tmp)
tmp = sparse_mx_to_torch_sparse_tensor(tmp)
if adj.is_cuda():
tmp = tmp.cuda()
return tmp
class WeightedNeighborSampler():
"""
weighted sampling from "sparse 2D edgelist" COO matrix.
"""
def __init__(self, adj):
assert adj.is_sparse, "WeightedNeighborSampler: not sparse.issparse(adj)"
self.is_cuda = adj.is_cuda
self.adj = normalize(adj.to_dense().numpy())
self.degrees = np.count_nonzero(self.adj, axis=1)
def __call__(self, ids, n_samples=128):
assert n_samples > 0, 'WeightedNeighborSampler: n_samples must be set explicitly'
sel = [np.random.choice(self.adj.shape[1], n_samples, p=self.adj[id], replace=False)
if self.degrees[id] >= n_samples
else np.random.choice(self.adj.shape[1], n_samples, p=self.adj[id], replace=True)
for id in ids]
sel = np.asarray(sel)
tmp = Variable(torch.LongTensor(sel))
if self.is_cuda:
tmp = tmp.cuda()
return tmp
sampler_lookup = {
"uniform_neighbor_sampler": UniformNeighborSampler,
"weighted_neighbor_sampler": WeightedNeighborSampler,
}
# --
# Aggregators
class AttentionAggregator(nn.Module):
def __init__(self, input_dim, output_dim, dropout, alpha, concat=True):
super(AttentionAggregator, self).__init__()
self.dropout = dropout
self.alpha = alpha
self.concat = concat
self.output_dim = output_dim
self.leakyrelu = nn.LeakyReLU(self.alpha)
self.W = nn.Parameter(torch.zeros(size=(input_dim, output_dim)))
nn.init.xavier_uniform_(self.W.data, gain=1.414)
self.a = nn.Parameter(torch.zeros(size=(2 * output_dim, 1)))
nn.init.xavier_uniform_(self.a.data, gain=1.414)
def forward(self, x, neibs):
# Compute attention weights
N = x.size()[0]
n_sample = neibs.shape[1]
x = torch.mm(x, self.W)
a_input = torch.cat([x.repeat(1, n_sample).view(N * n_sample, -1),
x[neibs].view(N * n_sample, -1)], dim=1) \
.view(N, -1, 2 * self.output_dim)
e = self.leakyrelu(torch.matmul(a_input, self.a).squeeze(2)) # e[ver,sample] attention coeff
# Weighted average of neighbors
attention = F.softmax(e, dim=1)
attention = F.dropout(attention, self.dropout, training=self.training)
# h_prime = torch.matmul(attention, x[neibs])
# h_prime = [ h_prime[id,id].unsqueeze(0) for id in range(N)]
# h_prime = torch.cat(h_prime)
h_prime = [torch.matmul(attention[i], x[neibs[i]]).unsqueeze(0) for i in range(N)]
h_prime = torch.cat(h_prime)
if self.concat:
output = torch.cat([x, h_prime], dim=1)
else:
output = x + h_prime
return F.elu(output)
def __repr__(self):
return self.__class__.__name__ + ' (' + str(self.in_features) + ' -> ' + str(self.out_features) + ')'
class MetapathAggrLayer(nn.Module):
"""
metapath attention layer.
"""
def __init__(self, in_features, nmeta, dropout, alpha):
super(MetapathAggrLayer, self).__init__()
self.dropout = dropout
self.in_features = in_features
self.alpha = alpha
self.n_meta = nmeta
# Weight: [in_features][1]
self.a = nn.Parameter(torch.zeros(size=(in_features, 1)))
nn.init.xavier_uniform_(self.a.data, gain=1.414)
self.leakyrelu = nn.LeakyReLU(self.alpha)
def forward(self, input):
# input: tensor(nmeta,N,in_features)
input = input.transpose(0, 1) # tensor(N,nmeta,in_features)
N = input.size()[0]
# a_input = torch.cat([input.repeat(1,1,self.nmeta).view(N, self.nmeta*self.nmeta, -1),
# input.repeat(1,self.nmeta, 1)], dim=2).view(N, -1, 2 * self.in_features)
a_input = input
e = self.leakyrelu(torch.matmul(a_input, self.a).squeeze(2))
e = F.softmax(e, dim=1)
output = [torch.matmul(e[i], input[i]).unsqueeze(0) for i in range(N)]
output = torch.cat(output)
return output
def __repr__(self):
return self.__class__.__name__ + ' (' + str(self.in_features) + ' -> ' + str(self.out_features) + ')'
class EdgeAttentionAggregator(nn.Module):
def __init__(self, input_dim, output_dim, edge_dim, scheme, dropout, alpha, concat=True):
super(EdgeAttentionAggregator, self).__init__()
self.dropout = dropout
self.alpha = alpha
self.concat = concat
self.output_dim = output_dim
self.scheme = scheme
self.leakyrelu = nn.LeakyReLU(self.alpha)
self.W = nn.Parameter(torch.zeros(size=(input_dim, output_dim)))
nn.init.xavier_uniform_(self.W.data, gain=1.414)
self.a = nn.Parameter(torch.zeros(size=(2 * output_dim + edge_dim, 1)))
nn.init.xavier_uniform_(self.a.data, gain=1.414)
def forward(self, features, index, node_emb, n_sample=128):
# Compute attention weights
N = features.size()[0]
x = torch.mm(features, self.W)
output = []
for v in range(N):
# generate neighbors of v
neigh, emb = query_path_indexed(v, self.scheme, index, node_emb, n_sample)
# assert neigh.shape[0] == n_sample
n_neigh = neigh.shape[0]
a_input = torch.cat([x[v].repeat(1, n_neigh).view(n_neigh, -1),
x[neigh], emb], dim=1) \
.view(n_neigh, -1)
e = self.leakyrelu(torch.matmul(a_input, self.a).view(1, -1))
attention = F.softmax(e, dim=1)
attention = F.dropout(attention, self.dropout, training=self.training)
if self.concat:
h_prime = torch.matmul(attention, torch.cat([x[neigh], emb], dim=1))
else:
h_prime = torch.matmul(attention, x[neigh])
output.append(torch.cat([x[v], h_prime.squeeze()]))
output = torch.stack(output)
return F.elu(output)
def __repr__(self):
return self.__class__.__name__ + ' (' + str(self.in_features) + ' -> ' + str(self.out_features) + ')'
# use edge emb instead of query_path
class EdgeEmbAttentionAggregator(nn.Module):
def __init__(self, input_dim, output_dim, edge_dim, dropout, alpha,
concat=False, addedge=False, update_edge=False):
super(EdgeEmbAttentionAggregator, self).__init__()
self.dropout = dropout
self.alpha = alpha
self.concat = concat
self.output_dim = output_dim
self.addedge = addedge
self.update_edge = update_edge
self.leakyrelu = nn.LeakyReLU(self.alpha)
self.W = nn.Parameter(torch.zeros(size=(input_dim, output_dim)))
nn.init.xavier_uniform_(self.W.data, gain=1.414)
self.W2 = nn.Parameter(torch.zeros(size=(output_dim, edge_dim)))
nn.init.xavier_uniform_(self.W2.data, gain=1.414)
self.W3 = nn.Parameter(torch.zeros(size=(edge_dim, edge_dim)))
nn.init.xavier_uniform_(self.W3.data, gain=1.414)
self.B = nn.Parameter(torch.zeros(size=(1, edge_dim)))
nn.init.xavier_uniform_(self.B.data, gain=1.414)
self.a = nn.Parameter(torch.zeros(size=(2 * output_dim + edge_dim, 1)))
nn.init.xavier_uniform_(self.a.data, gain=1.414)
def forward(self, features, index, node_emb, edge_index, edge_emb, n_sample=128):
emb = edge_emb
e_index = edge_index
# Compute attention weights
N = features.size()[0]
x = torch.mm(features, self.W)
# vectorize: each vertex sample n_sample neighbors;
neigh = []
for v in range(N):
nonz = torch.nonzero(e_index[v]).view(-1)
if (len(nonz) == 0):
# no neighbor, only sample from itself
# for edge embedding... PADDING with all-zero embedding at edge_emb[0]
neigh.append(torch.LongTensor([v]).repeat(n_sample))
else:
idx = np.random.choice(nonz.shape[0], n_sample)
neigh.append(nonz[idx])
neigh = torch.stack(neigh).long()
a_input = torch.cat([x.repeat(1, n_sample).view(N, n_sample, -1),
x[neigh],
emb[e_index[
torch.arange(N).view(-1, 1).repeat(1, n_sample).view(-1),
neigh.view(-1)]
].view(N, n_sample, -1)], dim=2) \
.view(N, n_sample, -1)
e = self.leakyrelu(torch.matmul(a_input, self.a))
attention = F.softmax(e, dim=1)
attention = attention.squeeze(2)
attention = F.dropout(attention, self.dropout, training=self.training)
h_prime = [torch.matmul(attention[i], x[neigh[i]]) for i in range(N)]
h_prime = torch.stack(h_prime)
if self.concat:
output = torch.cat([x, h_prime], dim=1)
else:
output = h_prime + x
if self.addedge:
output = torch.cat([output, torch.stack([
torch.matmul(attention[i],
emb[e_index[i, neigh[i]]]) for i in range(N)])
], dim=1)
output = F.elu(output)
# update edge
if self.update_edge:
to_update = e_index.nonzero()
to_update = to_update[(to_update[:, 0] < to_update[:, 1]).nonzero().squeeze()]
n = to_update.shape[0]
#memory error.. consider minibatch update
edges = e_index[to_update[:, 0], to_update[:, 1]]
v_input = output[to_update]
v_input = torch.matmul(v_input, self.W2).sum(dim=1)
e_input = torch.mm(emb[edges], self.W3)
a_input = torch.cat([self.B, e_input+v_input+self.B.repeat(n,1)],dim=0)
emb = F.relu(a_input * emb)
return output, emb
def __repr__(self):
return self.__class__.__name__ + ' (' + str(self.in_features) + ' -> ' + str(self.out_features) + ')'
aggregator_lookup = {
# "mean": MeanAggregator,
# "max_pool": MaxPoolAggregator,
# "mean_pool": MeanPoolAggregator,
# "lstm": LSTMAggregator,
"attention": AttentionAggregator,
"eged_attention": EdgeAttentionAggregator,
"edge_emb_attn": EdgeEmbAttentionAggregator,
}
class NodeEmbeddingPrep(nn.Module):
def __init__(self, input_dim, n_nodes, embedding_dim=64):
""" adds node embedding """
super(NodeEmbeddingPrep, self).__init__()
self.n_nodes = n_nodes
self.input_dim = input_dim
self.embedding_dim = embedding_dim
self.embedding = nn.Embedding(num_embeddings=n_nodes + 1, embedding_dim=embedding_dim)
self.fc = nn.Linear(embedding_dim, embedding_dim) # Affine transform, for changing scale + location
@property
def output_dim(self):
if self.input_dim:
return self.input_dim + self.embedding_dim
else:
return self.embedding_dim
def forward(self, ids, feats, layer_idx=0):
if layer_idx > 0:
embs = self.embedding(ids)
else:
# Don't look at node's own embedding for prediction, or you'll probably overfit a lot
embs = self.embedding(Variable(ids.clone().data.zero_() + self.n_nodes))
embs = self.fc(embs)
if self.input_dim:
return torch.cat([feats, embs], dim=1)
else:
return embs
|
StarcoderdataPython
|
6512227
|
<reponame>fransward/open-cultuur-data
from flask import Flask
from ocd_frontend.helpers import register_blueprints
def create_app_factory(package_name, package_path, settings_override=None):
"""Returns a :class:`Flask` application instance configured with
project-wide functionality.
:param package_name: application package name.
:param package_path: application package path.
:param settings_override: a dictionary of settings to override.
"""
app = Flask(package_name, instance_relative_config=True)
app.config.from_object('ocd_frontend.settings')
app.config.from_object(settings_override)
register_blueprints(app, package_name, package_path)
return app
|
StarcoderdataPython
|
9610486
|
<gh_stars>1-10
"""
ABC that defines the context manager behavior and stores base attributes
Not meant to be instantiated
"""
import logging
from abc import ABC
class HttpClientBase(ABC):
""" store base attributes for login and define context manager methods """
def __init__(self, user, password, token, logger: logging.Logger):
self.user = user
self.password = password
self.token = token
self.logger = logger
def login(self):
pass
def logout(self):
pass
def __enter__(self):
"""
Optionally over-ride this to have login occur on context manager
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Trigger logout on leaving context
"""
self.logout()
|
StarcoderdataPython
|
1938888
|
#
# Copyright (c) 2020 Idiap Research Institute, http://www.idiap.ch/
# Written by <NAME> <<EMAIL>>
#
"""Implement local context attention."""
from math import sqrt
import torch
from torch.nn import Module, Dropout
from torch.nn import functional as F
from ..attention_registry import AttentionRegistry, Optional, Int, Float, \
EventDispatcherInstance
from ..events import EventDispatcher
from ..local_product import local_dot_product, local_weighted_average
class LocalAttention(Module):
"""Implement fast local attention where a query can only attend to
neighboring keys.
In this attention module the query Q_i can only attend to a key K_j if
|i-j| < local_context/2.
Arguments
---------
local_context: The neighborhood to consider for local attention.
softmax_temp: The temperature to use for the softmax attention.
(default: 1/sqrt(d_keys) where d_keys is computed at
runtime)
attention_dropout: The dropout rate to apply to the attention
(default: 0.1)
event_dispatcher: str or EventDispatcher instance to be used by this
module for dispatching events (default: the default
global dispatcher)
"""
def __init__(self, local_context, softmax_temp=None, attention_dropout=0.1,
event_dispatcher=""):
super(LocalAttention, self).__init__()
self.local_context = local_context
self.softmax_temp = softmax_temp
self.dropout = Dropout(attention_dropout)
self.event_dispatcher = EventDispatcher.get(event_dispatcher)
def forward(self, queries, keys, values, attn_mask, query_lengths,
key_lengths):
"""Implements the local attention.
The attn_mask can be anything but the only values that will be
considered will be the ones in the neighborhood of each query.
Arguments
---------
queries: (N, L, H, E) The tensor containing the queries
keys: (N, S, H, E) The tensor containing the keys
values: (N, S, H, D) The tensor containing the values
attn_mask: An implementation of BaseMask that encodes where each
query can attend to
query_lengths: An implementation of BaseMask that encodes how
many queries each sequence in the batch consists of
key_lengths: An implementation of BaseMask that encodes how
many queries each sequence in the batch consists of
"""
# Extract some shapes and compute the temperature
N, L, H, E = queries.shape
_, S, _, D = values.shape
context = self.local_context
softmax_temp = self.softmax_temp or 1./sqrt(E)
# Permute the dimensions to NHLE instead of NLHE
queries = queries.permute(0, 2, 1, 3).contiguous()
keys = keys.permute(0, 2, 1, 3).contiguous()
values = values.permute(0, 2, 1, 3).contiguous()
QK = local_dot_product(
queries,
keys,
attn_mask.additive_matrix_finite,
key_lengths.lengths,
self.local_context
)
A = self.dropout(torch.softmax(softmax_temp * QK, dim=-1))
V_new = local_weighted_average(A, values)
return V_new.permute(0, 2, 1, 3).contiguous()
# Register the attention implementation so that it becomes available in our
# builders
AttentionRegistry.register(
"local", LocalAttention,
[
("local_context", Int),
("softmax_temp", Optional(Float)),
("attention_dropout", Optional(Float, 0.1)),
("event_dispatcher", Optional(EventDispatcherInstance, ""))
]
)
|
StarcoderdataPython
|
3276209
|
# -*- coding: utf-8 -*-
#
from bluepy import btle
import struct
import logging
_log = logging.getLogger(__name__)
_log.addHandler(logging.StreamHandler())
_log.setLevel(logging.INFO)
def _ZEI_UUID(short_uuid):
return 'c7e7%04X-c847-11e6-8175-8c89a55d403c' % (short_uuid)
class ZeiCharBase:
def __init__(self, periph):
self.periph = periph
self.hndl = None
def enable(self):
_svc = self.periph.getServiceByUUID(self.svcUUID)
_chr = _svc.getCharacteristics(self.charUUID)[0]
self.hndl = _chr.getHandle()
# this is uint16_t - see: https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.descriptor.gatt.client_characteristic_configuration.xml
_cccd = _chr.getDescriptors(btle.AssignedNumbers.client_characteristic_configuration)[0]
_cccd.write(struct.pack("<H", 2), withResponse=True)
class ZeiOrientationChar(ZeiCharBase):
svcUUID = _ZEI_UUID(0x0010)
charUUID = _ZEI_UUID(0x0012)
def __init__(self, periph):
ZeiCharBase.__init__(self, periph)
class BatteryLevelChar(ZeiCharBase):
svcUUID = btle.AssignedNumbers.battery_service
charUUID = btle.AssignedNumbers.battery_level
def __init__(self, periph):
ZeiCharBase.__init__(self, periph)
class Zei(btle.Peripheral):
def __init__(self, *args, **kwargs):
btle.Peripheral.__init__(self, *args, **kwargs)
self.withDelegate(ZeiDelegate(self))
# activate notifications about turn
self.orientation = ZeiOrientationChar(self)
self.orientation.enable()
class ZeiDelegate(btle.DefaultDelegate):
def __init__(self, periph):
btle.DefaultDelegate.__init__(self)
self.parent = periph
def handleNotification(self, cHandle, data):
if cHandle == 39:
_log.info("Current side up is %s", struct.unpack('B', data) )
else:
_log.info("Notification from hndl: %s - %r", cHandle, data)
class ZeiDiscoveryDelegate(btle.DefaultDelegate):
def __init__(self, scanner, periph):
btle.DefaultDelegate.__init__(self)
self.scanner = scanner
self.periph = periph
def handleDiscovery(self, dev, isNewDev, isNewData):
if not dev.addr == 'f1:05:a5:9c:2e:9b':
return
_log.info("Device %s (%s), RSSI=%d dB", dev.addr, dev.addrType, dev.rssi)
for (adtype, desc, value) in dev.getScanData():
_log.info(" %s = %s", desc, value)
# reconnect
# bluepy can only do one thing at a time, so stop scanning while trying to connect
# this is not supported by bluepy
#self.scanner.stop()
try:
self.periph.connect(dev)
self.scanner.stop_scanning = True
except:
# re
self.scanner.start()
pass
class ZeiDiscovery(btle.Scanner):
def __init__(self, periph=None, **kwargs):
self.zei = periph
btle.Scanner.__init__(self, **kwargs)
#self.withDelegate(ZeiDiscoveryDelegate(self, self.zei))
#self.stop_scanning = False
def reconnect(self):
self.iface=self.zei.iface
self.clear()
self.start()
while self.zei.addr not in self.scanned:
self.process(timeout=2)
self.stop()
self.zei.connect(self.scanned[self.zei.addr])
def main():
zei = Zei('f1:05:a5:9c:2e:9b', 'random', iface=1)
scanner = ZeiDiscovery(zei)
while True:
try:
zei.waitForNotifications(timeout=None)
except Exception as e:
_log.exception(e)
scanner.reconnect()
zei.disconnect()
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
4827525
|
# Copyright 2014 Google Inc. All Rights Reserved.
"""Command for updating target HTTP proxies."""
from googlecloudapis.compute.v1 import compute_v1_messages as messages
from googlecloudsdk.compute.lib import base_classes
class Update(base_classes.BaseAsyncMutator):
"""Update a target HTTP proxy."""
@staticmethod
def Args(parser):
url_map = parser.add_argument(
'--url-map',
required=True,
help=('A reference to a URL map resource that will define the mapping '
' of URLs to backend services.'))
url_map.detailed_help = """\
A reference to a URL map resource that will define the mapping of
URLs to backend services. The URL map must exist and cannot be
deleted while referenced by a target HTTP proxy.
"""
parser.add_argument(
'name',
help='The name of the target HTTP proxy.')
@property
def service(self):
return self.context['compute'].targetHttpProxies
@property
def method(self):
return 'SetUrlMap'
@property
def print_resource_type(self):
return 'targetHttpProxies'
def CreateRequests(self, args):
url_map_uri = self.context['uri-builder'].Build(
'global', 'urlMaps', args.url_map)
request = messages.ComputeTargetHttpProxiesSetUrlMapRequest(
project=self.context['project'],
targetHttpProxy=args.name,
urlMapReference=messages.UrlMapReference(
urlMap=url_map_uri))
return [request]
Update.detailed_help = {
'brief': 'Update a target HTTP proxy',
'DESCRIPTION': """\
*{command}* is used to change the URL map of existing
target HTTP proxies. A target HTTP proxy is referenced
by one or more forwarding rules which
define which packets the proxy is responsible for routing. The
target HTTP proxy in turn points to a URL map that defines the rules
for routing the requests. The URL map's job is to map URLs to
backend services which handle the actual requests.
""",
}
|
StarcoderdataPython
|
126596
|
<filename>example/app/forms.py
'''
Created on 24/05/2013
@author: luan
'''
from hstore_flattenfields.forms import HStoreModelForm
from models import Something
class SomethingForm(HStoreModelForm):
class Meta:
model = Something
|
StarcoderdataPython
|
146501
|
<reponame>elminster-aom/homeworks
"""Validate that DB and Communication bus (Kafka) are defined with all
needed resources for our web-monitoring application
"""
import pytest
import traceback
from homeworks import config
from homeworks.communication_manager import Communication_manager
from homeworks.store_manager import Store_manager
def test_kafka_resources():
"""Validate that our topic is already created in kafka"""
assertion = False
try:
kafka = Communication_manager()
kafka.connect_consumer()
except Exception:
print(traceback.print_exc())
else:
topics_set = kafka.kafka_consumer.topics()
print(f"List of available topics in Kafka: {topics_set}")
if config.kafka_topic_name in topics_set:
assertion = True
finally:
kafka.close_consumer()
# fmt: off
assert assertion, f"Test failed because our topic '{config.kafka_topic_name}' is not in the list of defined Kafka topics: {topics_set}"
# fmt: on
def test_postgres_resources_1():
"""Validate that our table is already created as hypertable and it has 2 dimensions in Postgres"""
assertion = False
try:
postgres = Store_manager()
assertion = postgres.validate_metric_store()
finally:
postgres.close()
# fmt: off
assert assertion, f"Something wrong with DB definitions, '{config.db_table}' is not a hypertable or does not have two dimension"
# fmt: on
""" 1. Confirm the debug-output for DB reports right definition for our metrics, e.g. from:
$ ./setup.py
Connecting with DB
Established connection with DB, status code: 1
Connection with DB already established, status code: 1
Enabling TimescaleDB extension, if it doesn't exist
Creating table for metrics (web_health_metrics), if it doesn't exist
Turning 'web_health_metrics' to a hypertable partitioned by 2 dimensions: 'time' and 'web_url', if it doesn't exist
All DB resources were created
Connection with DB already established, status code: 1
Database ready for storing metrics, all resources crated
Information about our table in '_timescaledb_catalog.hypertable':
RealDictRow([('id', 35), ('schema_name', 'public'), ('table_name', 'web_health_metrics'), ('associated_schema_name', '_timescaledb_internal'), ('associated_table_prefix', '_hyper_35'), ('num_dimensions', 2), ('chunk_sizing_func_schema', '_timescaledb_internal'), ('chunk_sizing_func_name', 'calculate_chunk_interval'), ('chunk_target_size', 0), ('compression_state', 0), ('compressed_hypertable_id', None), ('replication_factor', None)])
Connection with DB was closed
Checks:
* RealDictRow.table_name == web_health_metrics
* RealDictRow.num_dimensions == 2
* ??? Dimension == ('time', 'web_url')
"""
|
StarcoderdataPython
|
1605433
|
#!/usr/bin/env python
import sys, os, shutil
from pyps import *
# removing previous output
if os.path.isdir("loop_tiling.database"):
shutil.rmtree("loop_tiling.database", True)
ws = workspace("loop_tiling.c", name="loop_tiling",deleteOnClose=True)
ws.props.ABORT_ON_USER_ERROR = True
fct = ws.fun.main
# try a basic function
fct.privatize_module ()
# do some tiling on loop
print "do some tiling on loop"
#look for the desired loop
for loop in fct.loops ():
lbl = loop.label
if lbl == "l300":
loop.loop_tiling (LOOP_TILING_MATRIX = "1111 0 0 , 0 2222 0 , 0 0 3333")
# print result
fct.display ()
# close the workspace
ws.close()
|
StarcoderdataPython
|
9721276
|
<gh_stars>1000+
"""
States Directory
"""
|
StarcoderdataPython
|
8001340
|
from typing import TextIO, Union, Optional, Callable, Dict, Type
from hbreader import FileInfo, hbread
from biolinkml.utils.yamlutils import YAMLRoot
def load_source(source: Union[str, dict, TextIO],
loader: Callable[[Union[str, Dict], FileInfo], Optional[Dict]],
target_class: Type[YAMLRoot],
accept_header: Optional[str] = "text/plain, application/yaml;q=0.9",
metadata: Optional[FileInfo] = None) -> Optional[YAMLRoot]:
""" Base loader - convert a file, url, string, open file handle or dictionary into an instance
of target_class
:param source: URL, file name, block of text, Existing Object or open file handle
:param loader: Take a stringified image or a dictionary and return a loadable dictionary
:param target_class: Destination class
:param accept_header: Accept header to use if doing a request
:param metadata: Metadata about the source. Filled in as we go along
:return: Instance of the target class if loader worked
"""
# Makes coding easier down the line if we've got this, even if it is strictly internal
if metadata is None:
metadata = FileInfo()
if not isinstance(source, dict):
data = hbread(source, metadata, metadata.base_path, accept_header)
else:
data = source
data_as_dict = loader(data, metadata)
return target_class(**data_as_dict) if data_as_dict is not None else None
|
StarcoderdataPython
|
3295994
|
<filename>WebSearchableEquipmentDatabase/equipment/urls.py
from django.urls import path
from . import views
urlpatterns = [
path('equipment/', views.data_table, name="equipment"),
path('filter/', views.filter_data, name="filter_data"),
path('uploadcsv/', views.upload_csv, name='uploadCSV'),
path('testing', views.testing, name='testing'),
path('crud/delete/', views.delete_equipment, name='crud_delete'),
path('crud/edit', views.get_item_by_id, name='crud_edit'),
path('crud/create', views.create_equipment, name='crud_create')
]
|
StarcoderdataPython
|
4894061
|
# _*_ ecoding: utf-8 _*_
|
StarcoderdataPython
|
11378686
|
'''
Compare the outputs from the pure python to the pure c++ to ensure that they
are sane and both are implemented correctly.
The pure python is so that we can use pypy to see if it is faster with pyEvolve,
the c++ is much quicker than all implementations in python.
'''
import automata as pa
import pyAutomata as pb
a = pa.pyautomata(110, 16) #Python
b = pb.pyautomata(110, 16) #cython
a.init_seed()
b.init_seed()
assert a.chunks_FromCurrentGeneration(8) == b.chunks_FromCurrentGeneration(8)
print 'chunks_FromCurrentGeneration(8) passes'
a.init_seed((42, 42, 42))
b.init_seed((42, 42, 42))
assert a.chunks_FromCurrentGeneration(8) == b.chunks_FromCurrentGeneration(8)
assert a.chunks_FromCurrentGeneration(8) == [42, 42]
print "init_seed([(42, 42, 42)) passes"
for i in xrange(10):
a.iterateAutomata()
b.iterateAutomata()
assert a.chunks_FromCurrentGeneration(8) == b.chunks_FromCurrentGeneration(8)
print '10 generations passes'
for i in xrange(1000):
a.iterateAutomata()
b.iterateAutomata()
assert a.chunks_FromCurrentGeneration(8) == b.chunks_FromCurrentGeneration(8)
print '1000 generations passes'
a.printBuffer()
print '---------------'
b.printBuffer()
|
StarcoderdataPython
|
5180891
|
"""Register the models for the admin."""
from django.contrib import admin
from .models import ImagerProfile
# Register your models here.
admin.site.register(ImagerProfile)
|
StarcoderdataPython
|
337432
|
<gh_stars>100-1000
# -*- coding: utf-8 -*-
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A basic webserver for hosting plugin routes."""
import os
from google.cloud.aiplatform.training_utils.cloud_profiler import wsgi_types
from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin
from typing import List
from werkzeug import wrappers, Response
class WebServer:
"""A basic web server for handling requests."""
def __init__(self, plugins: List[base_plugin.BasePlugin]):
"""Creates a web server to host plugin routes.
Args:
plugins (List[base_plugin.BasePlugin]):
Required. A list of `BasePlugin` objects.
Raises:
ValueError:
When there is an invalid route passed from
one of the plugins.
"""
self._plugins = plugins
self._routes = {}
# Routes are in form {plugin_name}/{route}
for plugin in self._plugins:
for route, handler in plugin.get_routes().items():
if not route.startswith("/"):
raise ValueError(
'Routes should start with a "/", '
"invalid route for plugin %s, route %s"
% (plugin.PLUGIN_NAME, route)
)
app_route = os.path.join("/", plugin.PLUGIN_NAME)
app_route += route
self._routes[app_route] = handler
def dispatch_request(
self, environ: wsgi_types.Environment, start_response: wsgi_types.StartResponse
) -> Response:
"""Handles the routing of requests.
Args:
environ (wsgi_types.Environment):
Required. The WSGI environment.
start_response (wsgi_types.StartResponse):
Required. The response callable provided by the WSGI server.
Returns:
A response iterable.
"""
# Check for existince of route
request = wrappers.Request(environ)
if request.path in self._routes:
return self._routes[request.path](environ, start_response)
response = wrappers.Response("Not Found", status=404)
return response(environ, start_response)
def wsgi_app(
self, environ: wsgi_types.Environment, start_response: wsgi_types.StartResponse
) -> Response:
"""Entrypoint for wsgi application.
Args:
environ (wsgi_types.Environment):
Required. The WSGI environment.
start_response (wsgi_types.StartResponse):
Required. The response callable provided by the WSGI server.
Returns:
A response iterable.
"""
response = self.dispatch_request(environ, start_response)
return response
def __call__(self, environ, start_response):
"""Entrypoint for wsgi application.
Args:
environ (wsgi_types.Environment):
Required. The WSGI environment.
start_response (wsgi_types.StartResponse):
Required. The response callable provided by the WSGI server.
Returns:
A response iterable.
"""
return self.wsgi_app(environ, start_response)
|
StarcoderdataPython
|
1741462
|
<filename>openhab_creator/output/__init__.py
from openhab_creator.output.color import Color
|
StarcoderdataPython
|
203951
|
# -*- coding: utf-8 -*-
from django.db import migrations
def get_document_permissions(apps):
# return a queryset of the 'add_document' and 'change_document' permissions
Permission = apps.get_model('auth.Permission')
ContentType = apps.get_model('contenttypes.ContentType')
document_content_type, _created = ContentType.objects.get_or_create(
model='document',
app_label='wagtaildocs',
)
return Permission.objects.filter(
content_type=document_content_type,
codename__in=['add_document', 'change_document']
)
def copy_document_permissions_to_collections(apps, schema_editor):
Collection = apps.get_model('wagtailcore.Collection')
Group = apps.get_model('auth.Group')
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
root_collection = Collection.objects.get(depth=1)
for permission in get_document_permissions(apps):
for group in Group.objects.filter(permissions=permission):
GroupCollectionPermission.objects.create(
group=group,
collection=root_collection,
permission=permission
)
def remove_document_permissions_from_collections(apps, schema_editor):
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
document_permissions = get_document_permissions(apps)
GroupCollectionPermission.objects.filter(permission__in=document_permissions).delete()
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0026_group_collection_permission'),
('wagtaildocs', '0005_document_collection'),
]
operations = [
migrations.RunPython(
copy_document_permissions_to_collections,
remove_document_permissions_from_collections),
]
|
StarcoderdataPython
|
12801579
|
from typing import Any
import requests
from .national import NationalJSONAPI, NationalXMLAPI
from .regional import RegionalJSONAPI
class JSONClient:
def __init__(self, **kwargs: Any):
self.session = requests.Session()
self._settings = {"session": self.session, **kwargs}
self.national = NationalJSONAPI(**self._settings)
self.regional = RegionalJSONAPI(**self._settings)
class XMLClient:
def __init__(self, **kwargs):
self.session = requests.Session()
self.national = NationalXMLAPI(self.session, **kwargs)
|
StarcoderdataPython
|
3560321
|
<reponame>kraglik/ore
from typing import Tuple, Union, Any, Callable
from ore_combinators.combinator import combinator, Combinator
from ore_combinators.parser_state import ParserState
from ore_combinators.result import Result
from ore_combinators.error import ParserError, EndOfFileError
class take_while(combinator): # noqa
def __init__(self, c: Combinator, condition: Union[Combinator, Callable[[Any], bool]]):
self._condition = condition
self._combinator = c
def __call__(self, state: ParserState) -> Tuple[Any, ParserState]:
output = []
initial_state = state
previous_state = state
while True:
if state.is_at_end():
break
if isinstance(self._condition, combinator):
try:
condition_result, _ = self._condition(state)
except ParserError:
break
previous_state = state
try:
result, state = self._combinator(state)
except EndOfFileError:
break
except ParserError as e:
raise ParserError(
message='TakeWhile error',
position=initial_state.position,
nested_error=e
)
if not isinstance(self._condition, combinator) and not self._condition(result):
break
output.append(result)
return Result.make_value(
output,
state if isinstance(self._condition, combinator) else previous_state
)
|
StarcoderdataPython
|
11273338
|
from typing import Any, Callable, Dict, List, Tuple
import argparse
from itertools import repeat
import json
from pathlib import Path
from pprint import pprint
import numpy as np
import pandas as pd
from scipy.special import expit
from sklearn import metrics
from sklearn.model_selection import RepeatedStratifiedKFold
from catalyst import utils
BINARY_PER_CLASS_METRICS = [
"accuracy_score",
"precision_score",
"recall_score",
"f1_score",
"roc_auc_score",
]
RANK_METRICS = [
"ndcg_score",
"coverage_error",
"label_ranking_loss",
"label_ranking_average_precision_score",
]
def build_args(parser):
"""Constructs the command-line arguments."""
parser.add_argument(
"--in-csv",
type=Path,
help="Path to .csv with labels column",
required=True,
)
parser.add_argument(
"--in-label-column",
type=str,
help="Column to get labels",
required=False,
default="labels",
)
parser.add_argument(
"--in-npy",
type=Path,
help="Path to .npy with class logits",
required=True,
)
parser.add_argument(
"--out-thresholds",
type=Path,
help="Path to save .json with thresholds",
required=True,
)
parser.add_argument(
"--metric",
type=str,
help="Metric to use",
required=False,
choices=BINARY_PER_CLASS_METRICS,
default="roc_auc_score",
)
# parser.add_argument(
# "--ignore-label", type=int,
# required=False,
# default=None
# )
parser.add_argument(
"--num-splits", type=int, help="NUM_SPLITS", required=False, default=5
)
parser.add_argument(
"--num-repeats",
type=int,
help="NUM_REPEATS",
required=False,
default=1,
)
parser.add_argument(
"--num-workers",
type=int,
help="CPU pool size",
required=False,
default=1,
)
utils.boolean_flag(parser, "verbose", default=False)
utils.boolean_flag(parser, "sigmoid", default=False)
return parser
def parse_args():
"""Parses the command line arguments for the main method."""
parser = argparse.ArgumentParser()
build_args(parser)
args = parser.parse_args()
return args
def get_binary_labels(labels: np.array, label: int, ignore_label: int = None):
"""@TODO: Docs. Contribution is welcome."""
binary_labels = labels == label
if ignore_label is not None:
binary_labels[labels == ignore_label] = 0
return (binary_labels).astype(int)
def find_best_split_threshold(
y_pred: np.array, y_true: np.array, metric: Callable,
):
"""@TODO: Docs. Contribution is welcome."""
thresholds = np.linspace(0.0, 1.0, num=100)
metric_values = []
for t in thresholds:
predictions = (y_pred >= t).astype(int)
if sum(predictions) > 0:
metric_values.append(metric(y_true, predictions))
else:
metric_values.append(0.0)
best_threshold = thresholds[np.argmax(metric_values)]
return best_threshold
def find_best_threshold(
y_pred: np.ndarray,
y_true: np.ndarray,
metric_fn: Callable = metrics.roc_auc_score,
num_splits: int = 5,
num_repeats: int = 1,
random_state: int = 42,
):
"""@TODO: Docs. Contribution is welcome."""
rkf = RepeatedStratifiedKFold(
n_splits=num_splits, n_repeats=num_repeats, random_state=random_state
)
fold_thresholds = []
fold_metrics = {k: [] for k in BINARY_PER_CLASS_METRICS}
for train_index, test_index in rkf.split(y_true, y_true):
y_pred_train, y_pred_test = y_pred[train_index], y_pred[test_index]
y_true_train, y_true_test = y_true[train_index], y_true[test_index]
best_threshold = find_best_split_threshold(
y_pred_train, y_true_train, metric=metric_fn
)
best_predictions = (y_pred_test >= best_threshold).astype(int)
for metric_name in BINARY_PER_CLASS_METRICS:
try:
metric_value = metrics.__dict__[metric_name](
y_true_test, best_predictions
)
except ValueError:
metric_value = 0.0
fold_metrics[metric_name].append(metric_value)
fold_thresholds.append(best_threshold)
fold_best_threshold = np.mean(fold_thresholds)
for metric_name in fold_metrics:
fold_metrics[metric_name] = np.mean(fold_metrics[metric_name])
return fold_best_threshold, fold_metrics
def wrap_find_best_threshold(args: Tuple[Any]):
"""@TODO: Docs. Contribution is welcome."""
class_id, function_args = args[0], args[1:]
threshold, metrics = find_best_threshold(*function_args)
return class_id, threshold, metrics
def optimize_thresholds(
predictions: np.ndarray,
labels: np.ndarray,
classes: List[int],
metric_fn: Callable = metrics.roc_auc_score,
num_splits: int = 5,
num_repeats: int = 1,
num_workers: int = 0,
ignore_label: int = None,
) -> Tuple[Dict, Dict]:
"""@TODO: Docs. Contribution is welcome."""
pool = utils.get_pool(num_workers)
predictions_ = predictions.copy()
predictions_list, labels_list = [], []
for cls in classes:
predictions_list.append(predictions_[:, cls])
labels_list.append(
get_binary_labels(labels, cls, ignore_label=ignore_label)
)
results = utils.tqdm_parallel_imap(
wrap_find_best_threshold,
zip(
classes,
predictions_list,
labels_list,
repeat(metric_fn),
repeat(num_splits),
repeat(num_repeats),
),
pool,
)
results = [(r[1], r[2]) for r in sorted(results, key=lambda x: x[0])]
result_thresholds = [r[0] for r in results]
result_metrics = [r[1] for r in results]
class_thresholds = {c: t for (c, t) in zip(classes, result_thresholds)}
class_metrics = {c: m for (c, m) in zip(classes, result_metrics)}
return class_thresholds, class_metrics
def get_model_confidences(
confidences: np.ndarray,
thresholds: Dict[int, float] = None,
classes: List[int] = None,
):
"""
@TODO: Docs (add description). Contribution is welcome
Args:
confidences (np.ndarray): model predictions of shape
[dataset_len; class_confidences]
thresholds (Dict[int, float]): thresholds for each class
classes (List[int]): classes of interest for evaluation
"""
if classes is not None:
classes = np.array(classes)
confidences = confidences[:, classes]
confidences_th = confidences.copy()
if thresholds is not None:
assert confidences.shape[1] == len(thresholds)
thresholds = np.array(list(thresholds.values()))
confidences_th = confidences - thresholds
return confidences_th
def score_model_coverage(confidences: np.ndarray, labels: np.ndarray):
"""@TODO: Docs. Contribution is welcome."""
candidates = np.argsort(-confidences, axis=1)
confidences = -np.sort(-confidences, axis=1)
candidates[confidences < 0] = -1
labels = labels[:, None]
coverage_metrics = {}
for top_k in [1, 3, 5]:
metric = (candidates[:, :top_k] == labels).sum(axis=1).mean()
coverage_metrics[f"Recall@{top_k:02d}"] = metric
return coverage_metrics
def _sort_dict_by_keys(disordered: Dict):
key = lambda item: item[0]
sorted_dict = {k: v for k, v in sorted(disordered.items(), key=key)}
return sorted_dict
def _save_json(dct: Dict, outpath: Path, suffix: str = None):
outpath = str(outpath)
if suffix is not None:
outpath = outpath.replace(".json", f"{suffix}.json")
dct = _sort_dict_by_keys({str(k): v for k, v in dct.copy().items()})
with open(outpath, "w") as fout:
json.dump(dct, fout, ensure_ascii=False, indent=4)
def main(args, _=None):
"""Run ``catalyst-contrib find-thresholds`` script."""
predictions = expit(np.load(args.in_npy))
if args.sigmoid:
predictions = expit(predictions)
labels = pd.read_csv(args.in_csv)[args.in_label_column].values
classes = list(set(labels)) # - set([args.ignore_label]))
assert args.metric in metrics.__dict__.keys()
metric_fn = metrics.__dict__[args.metric]
class_thresholds, class_metrics = optimize_thresholds(
predictions=predictions,
labels=labels,
classes=classes,
metric_fn=metric_fn,
num_splits=args.num_splits,
num_repeats=args.num_repeats,
ignore_label=None, # args.ignore_label,
num_workers=args.num_workers,
)
_save_json(class_thresholds, outpath=args.out_thresholds)
class_metrics["_mean"] = {
key_metric: np.mean(
[
class_metrics[key_class][key_metric]
for key_class in class_metrics.keys()
]
)
for key_metric in BINARY_PER_CLASS_METRICS
}
_save_json(class_metrics, args.out_thresholds, suffix=".class.metrics")
if args.verbose:
print("CLASS METRICS")
pprint(class_metrics)
print("CLASS THRESHOLDS")
pprint(class_thresholds)
labels_scores = np.zeros(predictions.shape)
labels_scores[:, labels] = 1.0
for class_thresholds_ in [None, class_thresholds]:
thresholds_used = class_thresholds_ is not None
confidences = get_model_confidences(
confidences=predictions,
thresholds=class_thresholds_,
classes=classes,
)
rank_metrics = {
key: metrics.__dict__[key](labels_scores, confidences)
for key in RANK_METRICS
}
postfix = (
".rank.metrics"
if not thresholds_used
else ".rank.metrics.thresholds"
)
_save_json(rank_metrics, args.out_thresholds, suffix=postfix)
coverage_metrics = score_model_coverage(confidences, labels)
postfix = (
".coverage.metrics.json"
if not thresholds_used
else ".coverage.metrics.thresholds.json"
)
_save_json(coverage_metrics, args.out_thresholds, suffix=postfix)
if args.verbose:
print(
"RANK METRICS"
if not thresholds_used
else "RANK METRICS WITH THRESHOLD"
)
pprint(rank_metrics)
print(
"COVERAGE METRICS"
if not thresholds_used
else "COVERAGE METRICS WITH THRESHOLD"
)
pprint(coverage_metrics)
if __name__ == "__main__":
args = parse_args()
main(args)
|
StarcoderdataPython
|
6477689
|
<reponame>samtherussell/tesla-powerwall-controller
import requests
import json
protocol = "https://"
base_api_path = "/api"
battery_level_path = base_api_path + "/system_status/soe"
power_levels_path = base_api_path + "/meters/aggregates"
grid_connected_path = base_api_path + "/system_status/grid_status"
class PowerwallController:
def __init__(self, host):
self.base_url = protocol + host
def request_api(self, path):
req = requests.get(self.base_url + path, verify=False)
content = req.content.decode("utf-8")
return json.loads(content)
def get_battery_charge(self):
result = self.request_api(battery_level_path)
val = int(result['percentage'])
return f"The battery is at {val} percent"
def get_battery_power(self):
result = self.request_api(power_levels_path)
val = int(result['battery']['instant_power'])
if val == 0:
return "The battery is not being used"
if val < 0:
return f"The battery is charging at {-val} watts"
else:
return f"The battery is supplying {val} watts"
def get_solar_power(self):
result = self.request_api(power_levels_path)
val = int(result['solar']['instant_power'])
return f"The solar panels are supplying {val} watts"
def get_grid_power(self):
result = self.request_api(power_levels_path)
val = int(result['site']['instant_power'])
if val == 0:
return "The grid is not being used"
if val < 0:
return f"You are exporting {-val} watts"
else:
return f"You are importing {val} watts"
def get_house_power(self):
result = self.request_api(power_levels_path)
val = int(result['load']['instant_power'])
return f"The house is using {val} watts"
def is_grid_connected(self):
result = self.request_api(grid_connected_path)
val = result['grid_status']
if val == "SystemGridConnected":
return "The house is connected to the grid"
elif val == "SystemIslandedActive":
return "The house is not connected to the grid"
elif val == "SystemTransitionToGrid":
return "The house is connecting to the grid"
else:
raise Exception("The powerwall returned an unknown status")
|
StarcoderdataPython
|
6400159
|
from .furlong import Furlong
|
StarcoderdataPython
|
135963
|
import logging
import re
from collections import namedtuple
from datetime import time
import six
from six.moves.urllib.parse import (ParseResult, quote, urlparse,
urlunparse)
logger = logging.getLogger(__name__)
_Rule = namedtuple('Rule', ['field', 'value'])
RequestRate = namedtuple(
'RequestRate', ['requests', 'seconds', 'start_time', 'end_time'])
_DISALLOW_DIRECTIVE = {'disallow', 'dissallow', 'dissalow', 'disalow', 'diasllow', 'disallaw'}
_ALLOW_DIRECTIVE = {'allow'}
_USER_AGENT_DIRECTIVE = {'user-agent', 'useragent', 'user agent'}
_SITEMAP_DIRECTIVE = {'sitemap', 'sitemaps', 'site-map'}
_CRAWL_DELAY_DIRECTIVE = {'crawl-delay', 'crawl delay'}
_REQUEST_RATE_DIRECTIVE = {'request-rate', 'request rate'}
_HOST_DIRECTIVE = {'host'}
_WILDCARDS = {'*', '$'}
_HEX_DIGITS = set('0123456789ABCDEFabcdef')
__all__ = ['RequestRate', 'Protego']
def _is_valid_directive_field(field):
return any([field in _DISALLOW_DIRECTIVE,
field in _ALLOW_DIRECTIVE,
field in _USER_AGENT_DIRECTIVE,
field in _SITEMAP_DIRECTIVE,
field in _CRAWL_DELAY_DIRECTIVE,
field in _REQUEST_RATE_DIRECTIVE,
field in _HOST_DIRECTIVE])
def _enforce_path(pattern):
if pattern.startswith('/'):
return pattern
return '/' + pattern
class _URLPattern(object):
"""Internal class which represents a URL pattern."""
def __init__(self, pattern):
self._pattern = pattern
self.priority = len(pattern)
self._contains_asterisk = '*' in self._pattern
self._contains_dollar = self._pattern.endswith('$')
if self._contains_asterisk:
self._pattern_before_asterisk = self._pattern[:self._pattern.find('*')]
elif self._contains_dollar:
self._pattern_before_dollar = self._pattern[:-1]
self._pattern_compiled = False
def match(self, url):
"""Retun True if pattern matches the given URL, otherwise return False."""
# check if pattern is already compiled
if self._pattern_compiled:
return self._pattern.match(url)
if not self._contains_asterisk:
if not self._contains_dollar:
# answer directly for patterns without wildcards
return url.startswith(self._pattern)
# pattern only contains $ wildcard.
return url == self._pattern_before_dollar
if not url.startswith(self._pattern_before_asterisk):
return False
self._pattern = self._prepare_pattern_for_regex(self._pattern)
self._pattern = re.compile(self._pattern)
self._pattern_compiled = True
return self._pattern.match(url)
def _prepare_pattern_for_regex(self, pattern):
"""Return equivalent regex pattern for the given URL pattern."""
pattern = re.sub(r'\*+', '*', pattern)
s = re.split(r'(\*|\$$)', pattern)
for index, substr in enumerate(s):
if substr not in _WILDCARDS:
s[index] = re.escape(substr)
elif s[index] == '*':
s[index] = '.*?'
pattern = ''.join(s)
return pattern
class _RuleSet(object):
"""Internal class which stores rules for a user agent."""
def __init__(self, parser_instance):
self.user_agent = None
self._rules = []
self._crawl_delay = None
self._req_rate = None
self._parser_instance = parser_instance
def applies_to(self, robotname):
"""Return matching score."""
robotname = robotname.strip().lower()
if self.user_agent == '*':
return 1
if self.user_agent in robotname:
return len(self.user_agent)
return 0
def _unquote(self, url, ignore='', errors='replace'):
"""Replace %xy escapes by their single-character equivalent."""
if '%' not in url:
return url
def hex_to_byte(h):
"""Replaces a %xx escape with equivalent binary sequence."""
if six.PY2:
return chr(int(h, 16))
return bytes.fromhex(h)
# ignore contains %xy escapes for characters that are not
# meant to be converted back.
ignore = {'{:02X}'.format(ord(c)) for c in ignore}
parts = url.split('%')
parts[0] = parts[0].encode('utf-8')
for i in range(1, len(parts)):
if len(parts[i]) >= 2:
# %xy is a valid escape only if x and y are hexadecimal digits.
if set(parts[i][:2]).issubset(_HEX_DIGITS):
# make sure that all %xy escapes are in uppercase.
hexcode = parts[i][:2].upper()
leftover = parts[i][2:]
if hexcode not in ignore:
parts[i] = hex_to_byte(hexcode) + leftover.encode('utf-8')
continue
else:
parts[i] = hexcode + leftover
# add back the '%' we removed during splitting.
parts[i] = b'%' + parts[i].encode('utf-8')
return b''.join(parts).decode('utf-8', errors)
def hexescape(self, char):
"""Escape char as RFC 2396 specifies"""
hex_repr = hex(ord(char))[2:].upper()
if len(hex_repr) == 1:
hex_repr = "0%s" % hex_repr
return "%" + hex_repr
def _quote_path(self, path):
"""Return percent encoded path."""
parts = urlparse(path)
path = self._unquote(parts.path, ignore='/%')
# quote do not work with unicode strings in Python 2.7
if six.PY2:
path = quote(path.encode('utf-8'), safe='/%')
else:
path = quote(path, safe='/%')
parts = ParseResult('', '', path, parts.params, parts.query, parts.fragment)
path = urlunparse(parts)
return path
def _quote_pattern(self, pattern):
# Corner case for query only (e.g. '/abc?') and param only (e.g. '/abc;') URLs.
# Save the last character otherwise, urlparse will kill it.
last_char = ''
if pattern[-1] == '?' or pattern[-1] == ';' or pattern[-1] == '$':
last_char = pattern[-1]
pattern = pattern[:-1]
parts = urlparse(pattern)
pattern = self._unquote(parts.path, ignore='/*$%')
# quote do not work with unicode strings in Python 2.7
if six.PY2:
pattern = quote(pattern.encode('utf-8'), safe='/*%')
else:
pattern = quote(pattern, safe='/*%')
parts = ParseResult('', '', pattern + last_char, parts.params, parts.query, parts.fragment)
pattern = urlunparse(parts)
return pattern
def allow(self, pattern):
if '$' in pattern:
self.allow(pattern.replace('$', self.hexescape('$')))
pattern = self._quote_pattern(pattern)
if not pattern:
return
self._rules.append(_Rule(field='allow', value=_URLPattern(pattern)))
# If index.html is allowed, we interpret this as / being allowed too.
if pattern.endswith('/index.html'):
self.allow(pattern[:-10] + '$')
def disallow(self, pattern):
if '$' in pattern:
self.disallow(pattern.replace('$', self.hexescape('$')))
pattern = self._quote_pattern(pattern)
if not pattern:
return
self._rules.append(_Rule(field='disallow', value=_URLPattern(pattern)))
def finalize_rules(self):
self._rules.sort(key=lambda r: (r.value.priority, r.field == 'allow'), reverse=True)
def can_fetch(self, url):
"""Return if the url can be fetched."""
url = self._quote_path(url)
allowed = True
for rule in self._rules:
if rule.value.match(url):
if rule.field == 'disallow':
allowed = False
break
return allowed
@property
def crawl_delay(self):
"""Get & set crawl delay for the rule set."""
return self._crawl_delay
@crawl_delay.setter
def crawl_delay(self, delay):
try:
delay = float(delay)
except ValueError:
# Value is malformed, do nothing.
logger.debug("Malformed rule at line {} : cannot set crawl delay to '{}'. "
"Ignoring this rule.".format(self._parser_instance._total_line_seen, delay))
return
self._crawl_delay = delay
@property
def request_rate(self):
"""Get & set request rate for the rule set."""
return self._req_rate
@request_rate.setter
def request_rate(self, value):
try:
parts = value.split()
if len(parts) == 2:
rate, time_period = parts
else:
rate, time_period = parts[0], ''
requests, seconds = rate.split('/')
time_unit = seconds[-1].lower()
requests, seconds = int(requests), int(seconds[:-1])
if time_unit == 'm':
seconds *= 60
elif time_unit == 'h':
seconds *= 3600
elif time_unit == 'd':
seconds *= 86400
start_time = None
end_time = None
if time_period:
start_time, end_time = time_period.split('-')
start_time = time(int(start_time[:2]), int(start_time[-2:]))
end_time = time(int(end_time[:2]), int(end_time[-2:]))
except Exception:
# Value is malformed, do nothing.
logger.debug("Malformed rule at line {} : cannot set request rate using '{}'. "
"Ignoring this rule.".format(self._parser_instance._total_line_seen, value))
return
self._req_rate = RequestRate(requests, seconds, start_time, end_time)
class Protego(object):
def __init__(self):
# A dict mapping user agents (specified in robots.txt) to rule sets.
self._user_agents = {}
# Preferred host specified in the robots.txt
self._host = None
# A list of sitemaps specified in the robots.txt
self._sitemap_list = []
# A memoization table mapping user agents (used in queries) to matched rule sets.
self._matched_rule_set = {}
self._total_line_seen = 0
self._invalid_directive_seen = 0
self._total_directive_seen = 0
@classmethod
def parse(cls, content):
o = cls()
o._parse_robotstxt(content)
return o
def _parse_robotstxt(self, content):
lines = content.splitlines()
# A list containing rule sets corresponding to user
# agents of the current record group.
current_rule_sets = []
# Last encountered rule irrespective of whether it was valid or not.
previous_rule_field = None
for line in lines:
self._total_line_seen += 1
# Remove the comment portion of the line
hash_pos = line.find('#')
if hash_pos != -1:
line = line[0: hash_pos].strip()
# Whitespace at the beginning and at the end of the line is ignored.
line = line.strip()
if not line:
continue
# Format for a valid robots.txt rule is "<field>:<value>"
if line.find(':') != -1:
field, value = line.split(':', 1)
else:
# We will be generous here and give it a second chance.
parts = line.split(' ')
if len(parts) < 2:
continue
possible_filed = parts[0]
for i in range(1, len(parts)):
if _is_valid_directive_field(possible_filed):
field, value = possible_filed, ' '.join(parts[i:])
break
possible_filed += ' ' + parts[i]
else:
continue
field = field.strip().lower()
value = value.strip()
# Ignore rules with no value part (e.g. "Disallow: ", "Allow: ").
if not value:
previous_rule_field = field
continue
# Ignore rules without a corresponding user agent.
if not current_rule_sets and field not in _USER_AGENT_DIRECTIVE:
logger.debug("Rule at line {} without any user agent to enforce it on.".format(self._total_line_seen))
continue
self._total_directive_seen += 1
if field in _USER_AGENT_DIRECTIVE:
if previous_rule_field and previous_rule_field not in _USER_AGENT_DIRECTIVE:
current_rule_sets = []
# Wildcards are not supported in the user agent values.
# We will be generous here and remove all the wildcards.
user_agent = value.strip().lower()
user_agent_without_asterisk = None
if user_agent != '*' and '*' in user_agent:
user_agent_without_asterisk = user_agent.replace('*', '')
for user_agent in [user_agent, user_agent_without_asterisk]:
if not user_agent:
continue
# See if this user agent is encountered before, if so merge these rules into it.
rule_set = self._user_agents.get(user_agent, None)
if rule_set and rule_set not in current_rule_sets:
current_rule_sets.append(rule_set)
if not rule_set:
rule_set = _RuleSet(self)
rule_set.user_agent = user_agent
self._user_agents[user_agent] = rule_set
current_rule_sets.append(rule_set)
elif field in _ALLOW_DIRECTIVE:
for rule_set in current_rule_sets:
rule_set.allow(_enforce_path(value))
elif field in _DISALLOW_DIRECTIVE:
for rule_set in current_rule_sets:
rule_set.disallow(_enforce_path(value))
elif field in _SITEMAP_DIRECTIVE:
self._sitemap_list.append(value)
elif field in _CRAWL_DELAY_DIRECTIVE:
for rule_set in current_rule_sets:
rule_set.crawl_delay = value
elif field in _REQUEST_RATE_DIRECTIVE:
for rule_set in current_rule_sets:
rule_set.request_rate = value
elif field in _HOST_DIRECTIVE:
self._host = value
else:
self._invalid_directive_seen += 1
previous_rule_field = field
for user_agent in self._user_agents.values():
user_agent.finalize_rules()
def _get_matching_rule_set(self, user_agent):
"""Return the rule set with highest matching score."""
if not self._user_agents:
return None
if user_agent in self._matched_rule_set:
return self._matched_rule_set[user_agent]
score_rule_set_pairs = ((rs.applies_to(user_agent), rs) for rs in self._user_agents.values())
match_score, matched_rule_set = max(score_rule_set_pairs, key=lambda p: p[0])
if not match_score:
self._matched_rule_set[user_agent] = None
return None
self._matched_rule_set[user_agent] = matched_rule_set
return matched_rule_set
def can_fetch(self, url, user_agent):
"""Return True if the user agent can fetch the URL, otherwise return False."""
matched_rule_set = self._get_matching_rule_set(user_agent)
if not matched_rule_set:
return True
return matched_rule_set.can_fetch(url)
def crawl_delay(self, user_agent):
"""Return the crawl delay specified for the user agent as a float.
If nothing is specified, return None.
"""
matched_rule_set = self._get_matching_rule_set(user_agent)
if not matched_rule_set:
return None
return matched_rule_set.crawl_delay
def request_rate(self, user_agent):
"""Return the request rate specified for the user agent as a named tuple
RequestRate(requests, seconds, start_time, end_time). If nothing is
specified, return None.
"""
matched_rule_set = self._get_matching_rule_set(user_agent)
if not matched_rule_set:
return None
return matched_rule_set.request_rate
@property
def sitemaps(self):
"""Get an iterator containing links to sitemaps specified."""
return iter(self._sitemap_list)
@property
def preferred_host(self):
"""Get the preferred host."""
return self._host
@property
def _valid_directive_seen(self):
return self._total_directive_seen - self._invalid_directive_seen
|
StarcoderdataPython
|
1877019
|
#!/usr/bin/env python3
import os
import subprocess
from distutils.dir_util import copy_tree
from contextlib import contextmanager
@contextmanager
def chg_cwd(path: str):
old_cwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(old_cwd)
# check=True will raise an exception if returncode != 0
# it's like subprocess.chek_output() but we still want to print output even if returncode != 0
def run(cmd, check: bool):
print(' '.join(cmd))
completed = subprocess.run(cmd, check=check, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
print(completed.stdout.decode("utf-8"))
if check and completed.returncode != 0:
raise subprocess.CalledProcessError
blog = os.path.dirname(os.path.abspath(__file__))
generated = os.path.normpath(os.path.join(blog, '../galdebert.github.io'))
to_copy = os.path.join(blog, 'to_copy')
run(['hugo', '--cleanDestinationDir', '-d', generated], check=True)
copy_tree('./to_copy', generated)
with chg_cwd(generated):
run(['git', 'add', '-A'], check=True)
# git commit returns 1 if there is nothing to commit
run(['git', 'commit', '-m', 'new generated pages'], check=False)
run(['git', 'push'], check=True)
|
StarcoderdataPython
|
3445520
|
<reponame>opensource-assist/fuschia
#!/usr/bin/env python2.7
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import test_env
from lib.host import Host
from process_mock import MockProcess
class MockHost(Host):
def __init__(self):
super(MockHost, self).__init__()
self._ids = [os.path.join('mock', '.build-id')]
self._llvm_symbolizer = os.path.join('mock', 'llvm_symbolizer')
self._symbolizer_exec = os.path.join('mock', 'symbolize')
self._platform = 'mock'
self._zxtools = os.path.join('mock', 'out', 'default.zircon', 'tools')
self.ssh_config = os.path.join(
'mock', 'out', 'default', 'ssh-keys', 'ssh_config')
self.fuzzers = [
(u'mock-package1', u'mock-target1'),
(u'mock-package1', u'mock-target2'),
(u'mock-package1', u'mock-target3'),
(u'mock-package2', u'mock-target1'),
(u'mock-package2', u'mock-target11'),
(u'mock-package2', u'an-extremely-verbose-target-name')
]
self.history = []
def create_process(self, args, **kwargs):
p = MockProcess(self, args, **kwargs)
if ' '.join(args) == 'git rev-parse HEAD':
p.response = 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
elif args[0] == self._symbolizer_exec:
p.response = """[000001.234567][123][456][klog] INFO: Symbolized line 1
[000001.234568][123][456][klog] INFO: Symbolized line 2
[000001.234569][123][456][klog] INFO: Symbolized line 3
"""
return p
|
StarcoderdataPython
|
6558101
|
<reponame>trujunzhang/djzhang-targets
# coding=utf-8
import logging
import time
class HarajsTime(object):
"""
Converting the string date to time using 'GMT'.
"""
tm_minute = 0
tm_hour = 0
tm_day = 0
tm_week = 0
tm_month = 0
tm_year = 0
lang = [
"دقيقه", # "minute"
"ساعه", # "hour"
"يوم", # "day"
'أسبوع', # "week"
"شهر", # "month"
"سنه", # "year"
]
value = [
60, # => $lang['minute'],
60 * 60, # => $lang['hour'],
24 * 60 * 60, # => $lang['day'],
24 * 60 * 60 * 7, # => $lang['week'],
30 * 24 * 60 * 60, # => $lang['month'],
365 * 24 * 60 * 60, # => $lang['year'],
]
def __init__(self):
super(HarajsTime, self).__init__()
def maketime(self, split, url):
for item in split:
item = item.replace('بل', '').strip()
self._get_value_from_string(item, url)
return self._make_time()
def _make_time(self):
seconds = self.tm_minute * self.value[0] + \
self.tm_hour * self.value[1] + \
self.tm_day * self.value[2] + \
self.tm_month * self.value[3] + \
self.tm_year * self.value[4]
return self._get_current_time() - seconds
def _get_current_time(self):
return int(time.time())
def _get_value_from_string(self, item, url):
split = item.split(' ')
if len(split) == 1:
if split[0] in self.lang: # such as 'ساعه'(an hour)
time_type = split[0]
time_value = 1
else:
logging.debug(" make time for harajs failure, {}".format(url))
return
else:
time_type = split[1]
time_value = int(split[0])
if time_type in self.lang:
index = self.lang.index(time_type)
if index == 0:
self.tm_minute = time_value
elif index == 1:
self.tm_hour = time_value
elif index == 2:
self.tm_day = time_value
elif index == 3:
self.tm_week = time_value
elif index == 4:
self.tm_month = time_value
elif index == 5:
self.tm_year = time_value
logging.debug(" make time for harajs sucessfully, {}".format(url))
class TimerUtil(object):
def __init__(self):
super(TimerUtil, self).__init__()
def get_time_for_harajs(self, time_ago, url=''):
"""
:param time_ago: such as 'قبل 6 يوم و 2 ساعه في'
:return:
"""
if time_ago == '':
return self._get_default_time()
spec_ago = 'قبل 0 دقيقه في' # 0 minutes ago
if spec_ago in time_ago:
return int(time.time())
spec_ago = 'قبل دقيقه في' # A minute ago at
if spec_ago in time_ago:
return int(time.time())
time_ago = time_ago.replace(' في', '').replace('قبل ', '').replace("\xc2\xa0", "").strip()
split = time_ago.split(' و')
return HarajsTime().maketime(split, url)
def get_time_for_mstaml(self, time_ago):
"""
Converting string time to int.
:param time_ago: such as '2016-06-29 14:39:34 GMT'
:return:
"""
if time_ago == '':
return self._get_default_time()
today = time.strptime(time_ago, "%Y-%m-%d %H:%M:%S %Z")
int_time = time.mktime(today)
return int_time + self._get_utc_offset()
def _get_utc_offset(self):
from datetime import datetime
ts = time.time()
utc_offset = (datetime.fromtimestamp(ts) -
datetime.utcfromtimestamp(ts)).total_seconds()
return utc_offset
def _get_default_time(self):
"""
When time string is empty, So we only give it a default time.
:return:
"""
return int(time.time()) + self._get_utc_offset()
|
StarcoderdataPython
|
6511261
|
<reponame>ananya5254/WIE-WoC
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 3 00:20:07 2022
@author: sachi
"""
# Bubble sort in Python
def bubbleSort(array):
for i in range(len(array)):
for j in range(0, len(array) - i - 1):
if array[j] > array[j + 1]:
temp = array[j]
array[j] = array[j+1]
array[j+1] = temp
data = []
n=int(input("Number of elemnts in an array:"))
for i in range(0,n):
l=int(input())
data.append(l)
bubbleSort(data)
print('Sorted Array is:')
print(data)
|
StarcoderdataPython
|
210602
|
"""Generate the image lists for data loaders."""
import os
import sys
from os import path as osp
from ..common.logger import logger
def gen_list(
data_root: str,
data_dir: str,
list_dir: str,
phase: str,
list_type: str,
suffix: str = ".jpg",
) -> None:
"""Generate the list."""
phase_dir = osp.join(data_root, data_dir, phase)
if not osp.exists(phase_dir):
raise ValueError(f"Can not find folder {phase_dir}")
images = sorted(
[
osp.join(data_dir, phase, n)
for n in os.listdir(phase_dir)
if n[-len(suffix) :] == suffix
]
)
logger.info("Found %d items in %s %s", len(images), data_dir, phase)
out_path = osp.join(list_dir, f"{phase}_{list_type}.txt")
if not osp.exists(list_dir):
os.makedirs(list_dir)
logger.info("Writing %s", out_path)
with open(out_path, "w", encoding="utf-8") as fp:
fp.write("\n".join(images))
def gen_images(
data_root: str, list_dir: str, image_type: str = "100k"
) -> None:
"""Generate lists for different phases."""
for phase in ["train", "val", "test"]:
gen_list(
data_root,
osp.join("images", image_type),
list_dir,
phase,
"images",
".jpg",
)
def gen_drivable(data_root: str) -> None:
"""Generate lists for drivable area."""
image_type = "100k"
label_dir = "drivable_maps/labels"
list_dir = "lists/100k/drivable"
gen_images(data_root, list_dir, image_type)
for p in ["train", "val"]:
gen_list(
data_root, label_dir, list_dir, p, "labels", "drivable_id.png"
)
def gen_seg(data_root: str) -> None:
"""Generate lists for segmentation."""
image_type = "10k"
label_dir = "seg_maps/labels"
list_dir = "lists/10k/seg"
gen_images(data_root, list_dir, image_type)
for p in ["train", "val"]:
gen_list(data_root, label_dir, list_dir, p, "labels", "train_id.png")
if __name__ == "__main__":
gen_drivable(sys.argv[1])
gen_seg(sys.argv[1])
|
StarcoderdataPython
|
372078
|
# 测试一下lark能用
# https://lark-parser.readthedocs.io/en/latest/examples/calc.html
from lark import Lark, Transformer, v_args
calc_grammar = """
?start: sum
| NAME "=" sum -> assign_var
?sum: product
| sum "+" product -> add
| sum "-" product -> sub
?product: atom
| product "*" atom -> mul
| product "/" atom -> div
?atom: NUMBER -> number
| "-" atom -> neg
| NAME -> var
| "(" sum ")"
%import common.CNAME -> NAME
%import common.NUMBER
%import common.WS_INLINE
%ignore WS_INLINE
"""
@v_args(inline=True) # Affects the signatures of the methods
class CalculateTree(Transformer):
from operator import add, sub, mul, truediv as div, neg
number = float
def __init__(self):
self.vars = {}
def assign_var(self, name, value):
self.vars[name] = value
return value
def var(self, name):
try:
return self.vars[name]
except KeyError:
raise Exception("Variable not found: %s" % name)
calc_parser = Lark(calc_grammar, parser='lalr', transformer=CalculateTree())
calc = calc_parser.parse
|
StarcoderdataPython
|
8000326
|
<filename>robot.py<gh_stars>0
#!/usr/bin/env python3
import random
from fireant import FireAnt
import userControl as UC # use a custom control library
# Examples of user defined functions
def my_function(value):
# do something with value
print(value)
def light_on():
print("Light is ON")
def light_off():
print("Light is OFF")
def light_switch(value):
if value:
light_on()
else:
light_off()
def light_reader():
return random.randint(1, 501)
def temperature_reader():
return random.randint(-101, 101)
def distance_reader():
return random.randint(0, 1001)
def hold(value):
if value:
print('hold ON')
else:
print('hold OFF')
if __name__ == '__main__':
try:
myAnt = FireAnt('auth.json')
print(myAnt.get_name())
print(myAnt.get_description())
# myAnt.add_sensor(name, callback_function)
myAnt.add_sensor("light", light_reader)
myAnt.add_sensor("temperature", temperature_reader)
myAnt.add_sensor("distance", distance_reader)
# myAnt.remove_sensor(name)
# myAnt.add_command(name, callback, key, behavior)
myAnt.add_command('fwd', UC.move_forward, 'W', "press")
myAnt.add_command('left', UC.move_left, 'A', "press")
myAnt.add_command('right', UC.move_right, 'D', "press")
myAnt.add_command('back', UC.move_back, 'S', "press")
myAnt.add_command('light', light_switch, 'F', "tap")
myAnt.add_command('hold on', hold, 'h', 'hold')
# myAnt.remove_command(name)
except KeyboardInterrupt:
print("Interrupted by owner")
|
StarcoderdataPython
|
1822255
|
<gh_stars>0
from . import uct_helper_utils
from . import uct_parameters
from .. import agents
from .. import characters
from .. import constants
import numpy as np
from random import randint
class StateHelper(object):
def precompute_possible_actions(self, board):
listoflistoflists = []
for i in range(0, constants.BOARD_SIZE):
sublist = []
for j in range(0, constants.BOARD_SIZE):
action_list = [constants.Action.Stop.value] # stay action
if i - 1 >= 0 and board[i - 1][j] != constants.Item.Rigid.value: # north
action_list.append(constants.Action.Up.value)
if i + 1 < constants.BOARD_SIZE and board[i + 1][
j] != constants.Item.Rigid.value: # south
action_list.append(constants.Action.Down.value)
if j - 1 >= 0 and board[i][j - 1] != constants.Item.Rigid.value: # west
action_list.append(constants.Action.Left.value)
if j + 1 < constants.BOARD_SIZE and board[i][
j + 1] != constants.Item.Rigid.value: # east
action_list.append(constants.Action.Right.value)
sublist.append(action_list)
listoflistoflists.append(sublist)
return listoflistoflists
def __init__(self, observation, game_tracker):
self.sim_joint_obs = {}
self.state_game_tracker = game_tracker # pointer to the game tracker to utilize
self.sim_my_position = tuple(observation['position'])
self.sim_board = np.array(observation['board'])
self.lookup_possible_acts = self.precompute_possible_actions(self.sim_board)
self.sim_enemy_locations = uct_helper_utils._enemies_positions(self.sim_board, tuple(observation['enemies']))
self._reserve_agents = [agents.RandomAgent(), agents.RandomAgent(), agents.RandomAgent(), agents.RandomAgent()]
for i in range(4):
self._reserve_agents[i].init_agent(i, constants.GameType.FFA)
# TODO populate only alive enemies
for i in range(len(self.sim_enemy_locations)):
if uct_parameters.DEBUG_MODE:
print(i,'th enemy at', self.sim_enemy_locations[i])
self.sim_bombs_dict = uct_helper_utils.convert_bombs(np.array(observation['bomb_blast_strength']))
self.sim_enemies = [constants.Item(e) for e in observation['enemies']]
if uct_parameters.DEBUG_MODE:
print('enemies are', self.sim_enemies)
game_tracker_flames = self.state_game_tracker.global_flame_map
self.sim_flames_ind = np.transpose(np.nonzero(game_tracker_flames)) # get indices of flames
#if uct_parameters.DEBUG_MODE:
# print('flames are',self.sim_flames_dict)
self.sim_ammo = int(observation['ammo'])
self.sim_blast_strength = int(observation['blast_strength'])
self.sim_actions_for_four = [None] * 4 # TODO set it to the number of remaining agents - must be overridden
self.sim_agent_list = [] # starts with uct dummy agent - first agent is our agent indeed
self.sim_agent_list.append(self._reserve_agents[0])
for i in range(len(self.sim_enemy_locations)): # go over all enemies EXCLUDING recently dead ones
self.sim_agent_list.append(self._reserve_agents[i+1])
self.sim_bombs = []
for i in range(len(self.sim_bombs_dict)): # TODO associate the bomb with the bomber efficiently
self.sim_bombs.append(characters.Bomb(self.sim_agent_list[randint(0,len(self.sim_agent_list)-1)], self.sim_bombs_dict[i]['position'],
observation['bomb_life'][self.sim_bombs_dict[i]['position'][0]][self.sim_bombs_dict[i]['position'][1]],
self.sim_bombs_dict[i]['blast_strength'], moving_direction=None))
self.sim_flames = []
for i in range(np.count_nonzero(game_tracker_flames)):
self.sim_flames.append(characters.Flame(tuple(self.sim_flames_ind[i]), life=game_tracker_flames[self.sim_flames_ind[i][0]][self.sim_flames_ind[i][1]]))
self.sim_items, self.sim_dist, self.sim_prev = uct_helper_utils._djikstra(self.sim_board, self.sim_my_position, self.sim_bombs_dict, self.sim_enemies, depth=8)
def reset_obs(self,observation):
self.sim_my_position = tuple(observation['position'])
self.sim_board = np.array(observation['board'])
self.sim_bombs_dict = uct_helper_utils.convert_bombs(np.array(observation['bomb_blast_strength']))
self.sim_enemies = [constants.Item(e) for e in observation['enemies']]
self.sim_enemy_locations = uct_helper_utils._enemies_positions(self.sim_board, tuple(observation['enemies']))
#self.sim_flames_dict = uct_helper_utils.convert_flames(uct_helper_utils._flame(self.sim_board))
game_tracker_flames = self.state_game_tracker.global_flame_map
self.sim_flames_ind = np.transpose(np.nonzero(game_tracker_flames)) # get indices of flames
self.sim_ammo = int(observation['ammo'])
self.sim_blast_strength = int(observation['blast_strength'])
self.sim_items, self.sim_dist, self.sim_prev = uct_helper_utils._djikstra(self.sim_board, self.sim_my_position, self.sim_bombs_dict, self.sim_enemies, depth=8)
# TODO opponent modeling must fill the information correctly here
# TODO Tricky - how to track bomb bomber relation to reset these values correctly?
# Agent Modeling has to update this part
# TODO : Associate bombs with enemies- correlate bomb lifes with bomb & enemy locations
self._reserve_agents[0].set_start_position(self.sim_my_position)
self._reserve_agents[0].reset(self.sim_ammo, True, self.sim_blast_strength, observation['can_kick'])
self.sim_actions_for_four = [None] * 4
self.sim_agent_list = [self._reserve_agents[0]] # first agent is our agent indeed
for i in range(len(self.sim_enemy_locations)): # go over all enemies EXCLUDING recently dead ones
self._reserve_agents[i+1].set_start_position(self.sim_enemy_locations[i])
self._reserve_agents[i+1].reset(1, is_alive=True, blast_strength=None, can_kick=False)
self.sim_agent_list.append(self._reserve_agents[i+1])
self.sim_bombs = []
for i in range(len(self.sim_bombs_dict)): # TODO currently moving bombs do not transfer to the UCT as moving.
self.sim_bombs.append(characters.Bomb(self.sim_agent_list[randint(0,len(self.sim_agent_list)-1)], self.sim_bombs_dict[i]['position'],
observation['bomb_life'][self.sim_bombs_dict[i]['position'][0]][
self.sim_bombs_dict[i]['position'][1]],
self.sim_bombs_dict[i]['blast_strength'], moving_direction=None))
self.sim_flames = []
for i in range(np.count_nonzero(game_tracker_flames)):
self.sim_flames.append(characters.Flame(tuple(self.sim_flames_ind[i]), life=game_tracker_flames[self.sim_flames_ind[i][0]][self.sim_flames_ind[i][1]])) # now flames have correct lifetimes!!!
|
StarcoderdataPython
|
8186406
|
'''
Contains the following classes:
LMLexicon
'''
import os
import requests
import pandas
class LMLexicon:
'''This can only be used for non-commerical purposes only see the following
website for details:
http://sraf.nd.edu/
'''
def __init__(self):
data_path = os.path.abspath(os.path.join('data', 'finance',
'loughran_list.csv'))
if not os.path.exists(data_path):
os.makedirs(os.sep + os.path.join(*data_path.split(os.sep)[:-1]),
exist_ok=True)
headers = {'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '\
'AppleWebKit/537.36 (KHTML, like Gecko) '\
'Chrome/58.0.3029.110 Safari/537.36'}
response = requests.get('https://drive.google.com/uc?id=0B4niqV00F'\
'3msaFZGUEZNTGtBblU&export=download',
headers=headers)
with open(data_path, 'w') as fp:
fp.write(response.text)
self.lexicon = LMLexicon._load_lexicon(data_path)
@staticmethod
def _load_lexicon(data_path):
with open(data_path, 'r', encoding='ISO-8859-1', newline='') as fp:
data = pandas.read_csv(fp)
pos_words = data.loc[data['Positive'] != 0]['Word']
neg_words = data.loc[data['Negative'] != 0]['Word']
lexicon = {word.lower().strip() : 1 for word in pos_words}
lexicon.update({word.lower().strip() : -1 for word in neg_words})
return lexicon
|
StarcoderdataPython
|
6512152
|
''' extracting frames from video to improve advanced lane lines project'''
import os
import cv2
import io
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
# reading frames from video
video = cv2.VideoCapture('challenge_video.mp4')
# check if path exists. if not, create a path to store images in
if not os.path.exists('video_2_image'):
os.makedirs('video_2_image')
images = [] # empty list to store the frames
index = 0
while(True):
ret,frame = video.read()
if not ret:
break
store = './video_2_image/frame' + str(index) + '.jpg'
print('doing something with' + store)
index += 1
|
StarcoderdataPython
|
3251620
|
<gh_stars>1-10
import cupy # temp hack around cupy-114 and torch==1.9.1+cu111 compatibility issue
import cProfile
import os
from pathlib import Path
import sys
from settings import process_arguments, Parameters
from apps.fusion.pipeline import FusionPipeline
if __name__ == "__main__":
process_arguments(help_header="A 3D reconstruction pipeline based on Neural Non-Rigid Tracking + "
"DynamicFusion/Fusion4D + Open3D Spatial Hashing")
settings_path = os.path.join(Path(__file__).parent.resolve(), "configuration_files/nnrt_fusion_parameters.yaml")
pipeline = FusionPipeline()
if Parameters.profile.value:
cProfile.run('pipeline.run()')
else:
sys.exit(pipeline.run())
|
StarcoderdataPython
|
1669247
|
<gh_stars>0
#!/usr/bin/env python
"""
Check rabbit for connections older than <time>
Usage:
rabbit-check-connections.py (-e host) [-h] [-d] [-p port] [-t time]
(-u username)
(-x password)
[--version]
Options:
-e <host> Rabbit host to connect to
-h, --help Display this help message
-d Perform delete operation
-p <port> Port to connect to (defaults to 15672)
-t <time> Time to consider a queue stale in seconds (defaults to 86400; one day)
-u <username> Rabbit username
--version Show version
-x <password> Rabbit password
"""
import datetime
import requests
from docopt import docopt
from requests.auth import HTTPBasicAuth
arguments = docopt(__doc__, version='Check Rabbit Connections 1.0')
"""
{'--help': False,
'--version': False,
'-d': False,
'-e': 'bob',
'-p': '15672',
'-u': 'foo',
'-x': 'bar'}
"""
host = arguments.get('-e')
perform_delete = arguments.get('-d')
port = arguments.get('-p')
if port is None:
port = "15672"
user = arguments.get('-u')
password = arguments.get('-x')
do_delete = arguments.get('-d')
time_delta = int(arguments.get('-t'))
if time_delta is None:
time_delta = int(86400)
channel_url = "http://" + host + ":" + port + "/api/channels/"
connection_url = "http://" + host + ":" + port + "/api/connections/"
channel_response = requests.get(channel_url, auth=HTTPBasicAuth(user, password))
connection_response = requests.get(connection_url, auth=HTTPBasicAuth(user, password))
channel_data = channel_response.json()
connection_data = connection_response.json()
# Match up connections and channels and print
# Their data correlated
def delete_connection(connection_name):
url = connection_url + connection_name
response = requests.delete(url, auth=HTTPBasicAuth(user, password))
print "We've been idle more than %s seconds, and have no consumers; deleting connection %s" % (time_delta, connection_name)
print "Responose: %s" % response
def check_if_idle(idle_since, connection_name, idle_delta=86400):
delta_time = datetime.datetime.now() - datetime.timedelta(seconds=idle_delta)
# 2015-07-31 16:51:04
idle_time = datetime.datetime.strptime(idle_since, "%Y-%m-%d %H:%M:%S")
if delta_time > idle_time:
return True
else:
return False
for connection in connection_data:
if "peer_port" in connection:
connection_peer_port = connection['peer_port']
else:
break
for channel in channel_data:
if "peer_port" in channel['connection_details']:
channel_peer_port = channel['connection_details']['peer_port']
else:
break
if connection_peer_port == channel_peer_port and "idle_since" in channel:
if check_if_idle(channel['idle_since'], connection['name'], time_delta) and do_delete and channel['consumer_count'] == 0:
delete_connection(connection['name'])
print "There are %s connections" % len(connection_data)
print "There are %s channels" % len(channel_data)
|
StarcoderdataPython
|
9744040
|
import requests
from .auth import MpesaBase
class C2B(MpesaBase):
def __init__(self, env="sandbox", app_key=None, app_secret=None, sandbox_url=None, live_url=None):
MpesaBase.__init__(self, env, app_key, app_secret,
sandbox_url, live_url)
self.authentication_token = self.authenticate()
def register(self, shortcode=None, response_type=None, confirmation_url=None, validation_url=None):
"""This method uses Mpesa's C2B API to register validation and confirmation URLs on M-Pesa.
**Args:**
- shortcode (int): The short code of the organization.
- response_type (str): Default response type for timeout. Incase a tranaction times out,
Mpesa will by default Complete or Cancel the transaction.
- confirmation_url (str): Confirmation URL for the client.
- validation_url (str): Validation URL for the client.
**Returns:**
- OriginatorConverstionID (str): The unique request ID for tracking a transaction.
- ConversationID (str): The unique request ID returned by mpesa for each request made
- ResponseDescription (str): Response Description message
"""
payload = {
"ShortCode": shortcode,
"ResponseType": response_type,
"ConfirmationURL": confirmation_url,
"ValidationURL": validation_url
}
headers = {'Authorization': f"Bearer {self.authentication_token}", 'Content-Type': "application/json"}
if self.env == "production":
base_safaricom_url = self.live_url
else:
base_safaricom_url = self.sandbox_url
saf_url = f"{base_safaricom_url}/mpesa/c2b/v1/registerurl"
r = requests.post(saf_url, headers=headers, json=payload)
return r.json()
def simulate(self, shortcode=None, command_id=None, amount=None, msisdn=None, bill_ref_number=None):
"""This method uses Mpesa's C2B API to simulate a C2B transaction.
**Args:**
- shortcode (int): The short code of the organization.
- command_id (str): Unique command for each transaction type.
- CustomerPayBillOnline - CustomerBuyGoodsOnline.
- amount (int): The amount being transacted
- msisdn (int): Phone number (msisdn) initiating the transaction MSISDN(12 digits)
- bill_ref_number: Optional
**Returns:**
- OriginatorConverstionID (str): The unique request ID for tracking a transaction.
- ConversationID (str): The unique request ID returned by mpesa for each request made
- ResponseDescription (str): Response Description message
"""
payload = {
"ShortCode": shortcode,
"CommandID": command_id,
"Amount": amount,
"Msisdn": msisdn,
"BillRefNumber": bill_ref_number
}
headers = {'Authorization': f"Bearer {self.authentication_token}", 'Content-Type': "application/json"}
if self.env == "production":
base_safaricom_url = self.live_url
else:
base_safaricom_url = self.sandbox_url
saf_url = f"{base_safaricom_url}/mpesa/c2b/v1/simulate"
r = requests.post(saf_url, headers=headers, json=payload)
return r.json()
|
StarcoderdataPython
|
1807596
|
from .grip_cifar10_dber import GripCifar10
from .suction_cifar10_dber import SuctionCifar10
from .grasp_cifar10_dber import GraspCifar10
from torchvision.datasets import *
import torchvision
__all__ = ('GripCifar10','SuctionCifar10', 'GraspCifar10') + torchvision.datasets.__all__
def get_mean_std(name):
assert name in __all__ + torchvision.datasets.__all__
if name in torchvision.datasets.__all__:
db_mean = (0.4914, 0.4822, 0.4465)
db_std = (0.2023, 0.1994, 0.2010)
else:
db_mean = eval(name + '.db_mean')
db_std = eval(name + '.db_std')
return db_mean, db_std
def Dataset(name, **kwargs):
assert name in __all__ + torchvision.datasets.__all__
if name in torchvision.datasets.__all__: del kwargs['im_shape']
return eval(name)(**kwargs)
|
StarcoderdataPython
|
6669801
|
<gh_stars>1-10
# Copyright (C) 2016 <NAME> <iw<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test cases for ovs_vsctl.parser.
"""
import logging
import unittest
from nose.tools import eq_
from ovs_vsctl.parser import Record
LOG = logging.getLogger(__name__)
class TestRecord(unittest.TestCase):
"""
Test cases for ovs_vsctl.parser.Record.
"""
def test_str(self):
record = Record(aaa=1, bbb='value')
eq_("Record(aaa=1, bbb='value')", str(record))
|
StarcoderdataPython
|
11335329
|
import json
import requests
import sys
from pylons import app_globals as g
class AdzerkError(Exception):
def __init__(self, status_code, response_body):
message = "(%s) %s" % (status_code, response_body)
super(AdzerkError, self).__init__(message)
self.status_code = status_code
self.response_body = response_body
class NotFound(AdzerkError): pass
def handle_response(response):
if not (200 <= response.status_code <= 299):
try:
text = response.text
except TypeError:
# A TypeError can be raised if the encoding is incorrect
text = ""
raise AdzerkError(response.status_code, text)
try:
return json.loads(response.text)
except ValueError:
raise AdzerkError(response.status_code, response.text)
class Stub(object):
def __init__(self, Id):
self.Id = Id
def _to_item(self):
return {'Id': self.Id}
class Field(object):
def __init__(self, name, optional=False):
self.name = name
self.optional = optional
class FieldSet(object):
def __init__(self, *fields):
self.fields = {field.name for field in fields}
self.essentials = {field.name for field in fields if not field.optional}
def to_set(self, exclude_optional=True):
if exclude_optional:
return self.essentials
else:
return self.fields
def __iter__(self):
for field_name in self.fields:
yield field_name
class Base(object):
_name = ''
_base_url = 'https://api.adzerk.net/v1'
_fields = FieldSet()
@classmethod
def _headers(cls):
return {'X-Adzerk-ApiKey': g.secrets['az_selfserve_key'],
'Content-Type': 'application/x-www-form-urlencoded'}
def __init__(self, Id, _is_response=False, **attr):
self.Id = Id
missing = self._fields.to_set() - set(attr.keys())
if missing:
missing = ', '.join(missing)
msg = 'missing required attributes: %s' % missing
if _is_response:
sys.stderr.write('WARNING: %s' % msg)
else:
raise ValueError(msg)
for attr, val in attr.iteritems():
self.__setattr__(attr, val, fail_on_unrecognized=(not _is_response))
def __setattr__(self, attr, val, fail_on_unrecognized=True):
if attr not in self._fields and attr != 'Id':
msg = 'unrecognized attribute: %s' % attr
if fail_on_unrecognized:
raise ValueError(msg)
else:
pass
object.__setattr__(self, attr, val)
@classmethod
def _from_item(cls, item):
Id = item.pop('Id')
thing = cls(Id, _is_response=True, **item)
return thing
def _to_item(self):
item = {}
if self.Id:
item['Id'] = self.Id
for attr in self._fields:
if hasattr(self, attr):
item[attr] = getattr(self, attr)
return item
def _to_data(self):
return {self._name: json.dumps(self._to_item())}
@classmethod
def list(cls, params=None):
url = '/'.join([cls._base_url, cls._name])
response = requests.get(url, headers=cls._headers(), params=params)
content = handle_response(response)
items = content.get('items')
if items:
return [cls._from_item(item) for item in items]
@classmethod
def create(cls, **attr):
url = '/'.join([cls._base_url, cls._name])
thing = cls(None, **attr)
data = thing._to_data()
response = requests.post(url, headers=cls._headers(), data=data)
item = handle_response(response)
if isinstance(item.get('Id'), int) and item.get('Id') < 5000:
g.log.info('item with weird Id: %s' % response.text)
return cls._from_item(item)
def _send(self):
url = '/'.join([self._base_url, self._name, str(self.Id)])
data = self._to_data()
response = requests.put(url, headers=self._headers(), data=data)
item = handle_response(response)
@classmethod
def get(cls, Id):
url = '/'.join([cls._base_url, cls._name, str(Id)])
response = requests.get(url, headers=cls._headers())
item = handle_response(response)
return cls._from_item(item)
class Map(Base):
parent = None
parent_id_attr = 'ParentId'
child = None
@classmethod
def list(cls, ParentId):
url = '/'.join([cls._base_url, cls.parent._name, str(ParentId),
cls.child._name + 's'])
response = requests.get(url, headers=cls._headers())
content = handle_response(response)
items = content.get('items')
if items:
return [cls._from_item(item) for item in items]
@classmethod
def create(cls, ParentId, **attr):
url = '/'.join([cls._base_url, cls.parent._name, str(ParentId),
cls.child._name])
thing = cls(None, **attr)
data = thing._to_data()
response = requests.post(url, headers=cls._headers(), data=data)
item = handle_response(response)
return cls._from_item(item)
def _send(self):
url = '/'.join([self._base_url, self.parent._name,
str(getattr(self, self.parent_id_attr)),
self.child._name, str(self.Id)])
data = self._to_data()
response = requests.put(url, headers=self._headers(), data=data)
item = handle_response(response)
@classmethod
def get(cls, ParentId, Id):
url = '/'.join([cls._base_url, cls.parent._name, str(ParentId),
cls.child._name, str(Id)])
response = requests.get(url, headers=cls._headers())
item = handle_response(response)
return cls._from_item(item)
class Site(Base):
_name = 'site'
_fields = FieldSet(
Field('Url'),
Field('Title'),
Field('PublisherAccountId', optional=True),
Field('IsDeleted'),
)
def __repr__(self):
return '<Site %s <%s-%s>>' % (self.Id, self.Title, self.Url)
class Zone(Base):
_name = 'zone'
_fields = FieldSet(
Field('Name'),
Field('SiteId'),
)
def __repr__(self):
return '<Zone %s <%s on Site %s>>' % (self.Id, self.Name, self.SiteId)
class Advertiser(Base):
_name = 'advertiser'
_fields = FieldSet(
Field('Title'),
Field('IsActive', optional=True),
Field('IsDeleted', optional=True),
)
@classmethod
def search(cls, Title):
raise NotImplementedError
def __repr__(self):
return '<Advertiser %s <%s>>' % (self.Id, self.Title)
class Flight(Base):
_name = 'flight'
_fields = FieldSet(
Field('Name'),
Field('StartDate'),
Field('EndDate', optional=True),
Field('NoEndDate', optional=True),
Field('Price'),
Field('OptionType'),
Field('Impressions', optional=True),
Field('IsUnlimited'),
Field('IsNoDuplicates', optional=True),
Field('IsFullSpeed'),
Field('Keywords', optional=True),
Field('UserAgentKeywords', optional=True),
Field('CampaignId'),
Field('PriorityId'),
Field('IsDeleted'),
Field('IsActive'),
Field('GoalType', optional=True),
Field('RateType', optional=True),
Field('IsFreqCap', optional=True),
Field('FreqCap', optional=True),
Field('FreqCapDuration', optional=True),
Field('FreqCapType', optional=True),
Field('DatePartingStartTime', optional=True),
Field('DatePartingEndTime', optional=True),
Field('IsSunday', optional=True),
Field('IsMonday', optional=True),
Field('IsTuesday', optional=True),
Field('IsWednesday', optional=True),
Field('IsThursday', optional=True),
Field('IsFriday', optional=True),
Field('IsSaturday', optional=True),
Field('IPTargeting', optional=True),
Field('GeoTargeting', optional=True),
Field('SiteZoneTargeting', optional=True),
Field('CreativeMaps', optional=True),
Field('ReferrerKeywords', optional=True),
Field('WeightOverride', optional=True),
Field('DeliveryStatus', optional=True),
Field('CustomTargeting', optional=True),
Field('DailyCapAmount', optional=True),
Field('LifetimeCapAmount', optional=True),
Field('CapType', optional=True),
)
@classmethod
def _from_item(cls, item):
if not 'Name' in item:
item['Name'] = '' # not always included in response
if not 'CreativeMaps' in item or not item['CreativeMaps']:
item['CreativeMaps'] = []
thing = super(cls, cls)._from_item(item)
if hasattr(thing, 'CreativeMaps'):
thing.CreativeMaps = [CreativeFlightMap._from_item(item)
for item in thing.CreativeMaps]
return thing
@classmethod
def list(cls, is_active=False):
return super(Flight, cls).list({"isActive" : is_active})
def _to_item(self):
item = Base._to_item(self)
cfm_things = item.get('CreativeMaps')
if cfm_things:
item['CreativeMaps'] = [thing._to_item() for thing in cfm_things]
return item
def __repr__(self):
return '<Flight %s <Campaign %s>>' % (self.Id, self.CampaignId)
class Priority(Base):
_name = 'priority'
_fields = FieldSet(
Field('Name'),
Field('ChannelId'),
Field('Weight'),
Field('IsDeleted'),
)
def __repr__(self):
return '<Priority %s <Weight %s - Channel %s>>' % (self.Id, self.Weight,
self.ChannelId)
class Creative(Base):
_name = 'creative'
_fields = FieldSet(
Field('Title'),
Field('Body'),
Field('Url', optional=True),
Field('AdvertiserId'),
Field('AdTypeId'),
Field('ImageName', optional=True),
Field('Alt'),
Field('IsHTMLJS', optional=True),
Field('ScriptBody', optional=True),
Field('Metadata', optional=True),
Field('IsSync'),
Field('IsDeleted'),
Field('IsActive'),
Field('IsNoTrack', optional=True),
)
@classmethod
def list(cls, AdvertiserId):
url = '/'.join([cls._base_url, 'advertiser', str(AdvertiserId),
'creatives'])
response = requests.get(url, headers=cls._headers())
content = handle_response(response)
items = content.get('items')
if items:
return [cls._from_item(item) for item in items]
def __repr__(self):
return '<Creative %s>' % (self.Id)
class CreativeFlightMap(Map):
parent = Flight
parent_id_attr = 'FlightId'
child = Creative
_name = 'creative'
_fields = FieldSet(
Field('SizeOverride'),
Field('CampaignId'),
Field('IsDeleted'),
Field('Percentage'),
Field('Iframe'),
Field('Creative'),
Field('IsActive'),
Field('FlightId'),
Field('Impressions'),
Field('SiteId', optional=True),
Field('ZoneId', optional=True),
Field('DistributionType'),
)
def __setattr__(self, attr, val, **kw):
if attr == 'Creative':
# Creative could be a full object or just a stub
d = val
Id = d.pop('Id')
if d:
# if we are not fail_on_unrecognized, assume this is a response
is_response = not kw.get('fail_on_unrecognized', True)
val = Creative(Id, _is_response=is_response, **d)
else:
val = Stub(Id)
Map.__setattr__(self, attr, val, **kw)
@classmethod
def _from_item(cls, item):
if not 'SizeOverride' in item:
item['SizeOverride'] = False # not always included in response
if not 'Iframe' in item:
item['Iframe'] = False # not always included in response
thing = super(cls, cls)._from_item(item)
return thing
def _to_item(self):
item = Base._to_item(self)
item['Creative'] = item['Creative']._to_item()
return item
def __repr__(self):
return '<CreativeFlightMap %s <Creative %s - Flight %s>>' % (
self.Id,
self.Creative.Id,
self.FlightId,
)
class Channel(Base):
_name = 'channel'
_fields = FieldSet(
Field('Title'),
Field('Commission'),
Field('Engine'),
Field('Keywords'),
Field('CPM'),
Field('AdTypes'),
Field('IsDeleted'),
)
def __repr__(self):
return '<Channel %s>' % (self.Id)
class Publisher(Base):
_name = 'publisher'
_fields = FieldSet(
Field('FirstName', optional=True),
Field('LastName', optional=True),
Field('CompanyName', optional=True),
Field('PaypalEmail', optional=True),
Field('PaymentOption', optional=True),
Field('Address', optional=True),
Field('IsDeleted'),
)
def __repr__(self):
return '<Publisher %s>' % (self.Id)
class Campaign(Base):
_name = 'campaign'
_fields = FieldSet(
Field('Name'),
Field('AdvertiserId'),
Field('SalespersonId'),
Field('Flights'),
Field('StartDate'),
Field('EndDate', optional=True),
Field('IsDeleted'),
Field('IsActive'),
Field('Price'),
)
@classmethod
def _from_item(cls, item):
if not 'Flights' in item or not item['Flights']:
item['Flights'] = [] # not always included in response
thing = super(cls, cls)._from_item(item)
if hasattr(thing, 'Flights'):
thing.Flights = [Flight._from_item(flight)
for flight in thing.Flights]
return thing
def _to_item(self):
item = Base._to_item(self)
flights = item.get('Flights')
if flights:
item['Flights'] = [flight._to_item() for flight in flights]
return item
def __repr__(self):
return '<Campaign %s>' % (self.Id)
class GeoTargeting(Base):
_name = 'geotargeting'
_fields = FieldSet(
Field('CountryCode'),
Field('Region'),
Field('MetroCode'),
Field('IsExclude'), # geotargets can include or exclude locations
)
@classmethod
def _from_item(cls, item):
Id = item.pop('LocationId')
thing = cls(Id, _is_response=True, **item)
return thing
def _send(self, FlightId):
url = '/'.join([self._base_url, 'flight', str(FlightId), self._name,
str(self.Id)])
data = self._to_data()
response = requests.put(url, headers=self._headers(), data=data)
item = handle_response(response)
def _delete(self, FlightId):
url = '/'.join([self._base_url, 'flight', str(FlightId), self._name,
str(self.Id), 'delete'])
response = requests.get(url, headers=self._headers())
message = handle_response(response)
def __repr__(self):
return '<GeoTargeting %s>' % (self.Id)
|
StarcoderdataPython
|
9703546
|
from rdflib.namespace import DC, OWL, RDFS, SKOS
from rdflib.plugins import sparql
def test_issue():
query = sparql.prepareQuery(
"""
SELECT DISTINCT ?property ?parent
WHERE{
?property a owl:DeprecatedProperty .
?property dc:relation ?relation .
?property rdfs:subPropertyOf ?parent .
?property rdfs:label | skos:altLabel ?label .
}
""",
initNs={"rdfs": RDFS, "owl": OWL, "dc": DC, "skos": SKOS},
)
|
StarcoderdataPython
|
9613723
|
#!/usr/bin/env python
# Author: <NAME>
# Date: May 29, 2018
# Class: ME 599
# File: calculate_sizes_test.py
# Description: tests for calculations for deep learning layer calculator project
import pytest
from ..calculate_sizes import *
#def test_import_all():
# """ Doesn't work because Travis
# checks if import function is importing things from csv file; not robust
# does not check if ourput is correct because could not get file to be found in travis"""
#assert import_all('inputs.csv') != None
def test_calculate_output_sizes_encoder():
""" compares calculated values to known set; all formulas are the same so includes variety
no real edge cases but does test with some zeros"""
encoder_test_output, last_layer_test_output = calculate_output_sizes_encoder([['', 'Input size', 'Padding', 'Dilation', 'Kernel Size', 'Stride', 'Output Padding (decoder only)', 'Output Size'], ['Encoder', '', '', '', '', '', '', ''], ['Conv1d', '360', '1', '1', '5', '3', '', '120'], ['Maxpool1d', '120', '0', '1', '1', '1', '', '120'], ['Conv1d', '120', '1', '1', '5', '3', '', '40'], ['Maxpool1d', '40', '0', '1', '1', '1', '', '40'], ['Conv1d', '40', '0', '1', '5', '5', '', '8'], ['Maxpool1d', '8', '0', '1', '1', '1', '', '8'], ['Decoder ', '', '', '', '', '', '', ''], ['ConvTrans1d', '8', '0', '0', '5', '5', '0', '40'], ['ConvTrans1d', '40', '1', '1', '5', '3', '0', '120'], ['ConvTrans1d', '120', '1', '1', '5', '3', '0', '360']])
assert encoder_test_output == [120, 120, 40, 40, 8, 8] and last_layer_test_output == 8
def test_calculate_output_sizes_decoder():
""" compares calculated values to known set; all formulas are the same so includes variety
no real edge cases but does test with some zeros"""
decoder_test_output = calculate_output_sizes_decoder([['', 'Input size', 'Padding', 'Dilation', 'Kernel Size', 'Stride', 'Output Padding (decoder only)', 'Output Size'], ['Encoder', '', '', '', '', '', '', ''], ['Conv1d', '360', '1', '1', '5', '3', '', '120'], ['Maxpool1d', '120', '0', '1', '1', '1', '', '120'], ['Conv1d', '120', '1', '1', '5', '3', '', '40'], ['Maxpool1d', '40', '0', '1', '1', '1', '', '40'], ['Conv1d', '40', '0', '1', '5', '5', '', '8'], ['Maxpool1d', '8', '0', '1', '1', '1', '', '8'], ['Decoder ', '', '', '', '', '', '', ''], ['ConvTrans1d', '8', '0', '0', '5', '5', '0', '40'], ['ConvTrans1d', '40', '1', '1', '5', '3', '0', '120'], ['ConvTrans1d', '120', '1', '1', '5', '3', '0', '360']], 8)
assert decoder_test_output == [40, 120, 360]
|
StarcoderdataPython
|
3371528
|
<filename>backend/tests/conftest.py<gh_stars>1-10
import pytest
from backend.api import create_app
from backend.config import TestConfig
@pytest.fixture
def app():
app = create_app(TestConfig())
yield app
@pytest.fixture
def client(app):
return app.test_client()
|
StarcoderdataPython
|
11210765
|
<gh_stars>1-10
import configparser
import re
import pytz
from common.commandline import argv
CONFIG_SECTION = 'lrrbot'
config = configparser.ConfigParser()
config.read(argv.conf)
apipass = dict(config.items("apipass"))
from_apipass = {p: u for u, p in apipass.items()}
config = dict(config.items(CONFIG_SECTION))
# hostname - server to connect to (default Twitch)
config.setdefault('hostname', 'irc.chat.twitch.tv')
# secure - whether to use TLS to connect to the server
config.setdefault('secure', True)
config['secure'] = str(config['secure']).lower() != 'false'
# port - portname to connect on (default 6667, or 6697 for secure)
config['port'] = int(config.get('port', 6697 if config['secure'] else 6667))
# username
config.setdefault('username', 'lrrbot')
# password - server password
config.setdefault('password', None)
# channel - without the hash
config.setdefault('channel', 'loadingreadyrun')
# postgres - libpg connection string
# See https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING
config.setdefault('postgres', 'postgres:///lrrbot')
# reconnecttime - seconds to wait before reconnecting after a disconnect
config['reconnecttime'] = int(config.get('reconnecttime', 15))
# keepalivetime - seconds between sending keep-alive ping messages
config['keepalivetime'] = int(config.get('keepalivetime', 60))
# keepalivethreshold - number of keep-alive pings with no response before giving up
config['keepalivethreshold'] = int(config.get('keepalivethreshold', 5))
# debug - boolean option
config.setdefault('debug', False)
config['debug'] = str(config['debug']).lower() != 'false'
# debugsql - boolean option, enables debugging mode for sqlalchemy
config.setdefault('debugsql', config['debug'])
config['debugsql'] = str(config['debugsql']).lower() != 'false'
# notifyuser - user to watch for notifications
config['notifyuser'] = config.get('notifyuser', 'twitchnotify').lower()
# commandprefix - symbol to prefix all bot commands
config.setdefault('commandprefix', '!')
# siteurl - root of web site
config.setdefault('siteurl', 'https://lrrbot.com/')
# datafile - file to store save data to
config.setdefault('datafile', 'data.json')
# timezone - timezone to use for display purposes - default to Pacific Time
config['timezone'] = pytz.timezone(config.get('timezone', 'America/Vancouver'))
# socket_filename - Filename for the UDS channel that the webserver uses to communicate with the bot
config.setdefault('socket_filename', 'lrrbot.sock')
# eventsocket - Filename for the UDS channel that the webserver uses to communicate with SSE clients
config.setdefault('eventsocket', "/tmp/eventserver.sock")
# eris_socket - Filename for the UDS channel that the Discord bot uses.
config.setdefault('eris_socket', 'eris.sock')
# socket_port - TCP port to use when Unix domain sockets are not available.
config['socket_port'] = int(config.get('socket_port', 49601))
# event_port - TCP port to use when Unix domain sockets are not available.
config['event_port'] = int(config.get('event_port', 49602))
config['eris_port'] = int(config.get('event_port', 49603))
# google_key - Google API key
config.setdefault('google_key', '')
# twitch_clientid - Twitch API client ID
config.setdefault('twitch_clientid', '')
# twitch_clientsecret - Twitch API secret key
config.setdefault('twitch_clientsecret', '')
# twitch_redirect_uri - Redirect URI set up for Twitch login
config.setdefault('twitch_redirect_uri', 'https://lrrbot.com/login')
# session_secret - Secret key for signing session cookies
config.setdefault('session_secret', '')
# preferred_url_scheme - Flask config key PREFERRED_URL_SCHEME: the URL scheme to use when no scheme is available
config.setdefault('preferred_url_scheme', 'https')
# whispers - boolean option, whether to connect to group chat server and respond to whispers
config.setdefault('whispers', False)
config['whispers'] = str(config['whispers']).lower() != 'false'
# cardviewersubkey - Pubnub subscribe key for xsplit card viewer channel
config.setdefault('cardsubkey', None)
# cardviewerchannel - Pubnub channel for xsplit card viewer
config.setdefault('cardviewerchannel', 'xsplit_image')
# Slack:
# slack_webhook_url - URL to post messages to
config.setdefault('slack_webhook_url', None)
# Patreon:
# patreon_clientid - Patreon API client ID
config.setdefault('patreon_clientid', '')
# patreon_clientsecret - Patreon API secret key
config.setdefault('patreon_clientsecret', '')
# patreon_redirect_uri - Redirect URI set up for Patreon login
config.setdefault('patreon_redirect_uri', 'https://lrrbot.com/patreon/login')
# log_desertbus_moderator_actions - log moderator actions in #desertbus
config['log_desertbus_moderator_actions'] = str(config.get('log_desertbus_moderator_actions', 'true')).lower() != 'false'
# autoautomod - automatically approve posts rejected by automod
config['autoautomod'] = str(config.get('autoautomod', 'false')).lower() != 'false'
# mods - extra users who should be treated as mods by the bot even if they're not +o
config['mods'] = set(i.lower().strip() for i in config['mods'].split(',')) if config.get('mods') else set()
|
StarcoderdataPython
|
1809609
|
from django.contrib import admin
from .models import *
# Register your models here.
@admin.register(produtora)
class ProdutoraAdmin(admin.ModelAdmin):
list_display = ('nome',)
list_filter = ('nome',)
search_fields = ('nome',)
@admin.register(filme)
class FilmeAdmin(admin.ModelAdmin):
list_display = ('titulo', 'descricao', 'duracao', 'etaria', 'produtora', 'nota_imdb',)
list_filter = ('titulo', 'sinopse', 'duracao', 'etaria', 'produtora', 'nota_imdb',)
search_fields = ('titulo',)
def descricao(self, obj):
desc = obj.sinopse
if desc:
if len(desc) > 40:
return '%s...' % desc[:50]
return desc
@admin.register(ator)
class AtorAdmin(admin.ModelAdmin):
list_display = ('nome',)
list_filter = ('nome',)
search_fields = ('nome',)
@admin.register(elenco)
class ElencoAdmin(admin.ModelAdmin):
list_display = ('ator', 'filme')
list_filter = ('ator', 'filme',)
search_fields = ('ator',)
@admin.register(diretor)
class DiretorAdmin(admin.ModelAdmin):
list_display = ('nome',)
list_filter = ('nome',)
search_fields = ('nome',)
@admin.register(conselho)
class ConselhoAdmin(admin.ModelAdmin):
list_display = ('diretor', 'filme',)
list_filter = ('diretor', 'filme',)
search_fields = ('diretor',)
@admin.register(categoria)
class CategoriaAdmin(admin.ModelAdmin):
list_display = ('genero',)
list_filter = ('genero',)
search_fields = ('genero',)
@admin.register(grupo)
class GrupoAdmin(admin.ModelAdmin):
list_display = ('genero', 'filme',)
list_filter = ('genero', 'filme',)
search_fields = ('genero',)
@admin.register(conteudo)
class ConteudoAdmin(admin.ModelAdmin):
list_display = ('tema',)
list_filter = ('tema',)
search_fields = ('tema',)
@admin.register(teor)
class TeorAdmin(admin.ModelAdmin):
list_display = ('tema', 'filme',)
list_filter = ('tema', 'filme',)
search_fields = ('tema',)
@admin.register(cinema)
class CinemaAdmin(admin.ModelAdmin):
list_display = ('empresa', 'shopping',)
list_filter = ('empresa', 'shopping',)
search_fields = ('empresa',)
@admin.register(exibicao)
class ExibicaoAdmin(admin.ModelAdmin):
list_display = ('empresa', 'filme',)
list_filter = ('empresa', 'filme',)
search_fields = ('empresa',)
@admin.register(sala)
class SalaAdmin(admin.ModelAdmin):
list_display = ('sala',)
list_filter = ('sala',)
search_fields = ('sala',)
@admin.register(recinto)
class RecintoAdmin(admin.ModelAdmin):
list_display = ('sala', 'cinema',)
list_filter = ('sala', 'cinema',)
search_fields = ('sala',)
@admin.register(versao)
class VersaoAdmin(admin.ModelAdmin):
list_display = ('versao',)
list_filter = ('versao',)
search_fields = ('versao',)
@admin.register(formato)
class FormatoAdmin(admin.ModelAdmin):
list_display = ('formato',)
list_filter = ('formato',)
search_fields = ('formato',)
@admin.register(tecnologia)
class TecnologiaAdmin(admin.ModelAdmin):
list_display = ('tecnologia',)
list_filter = ('tecnologia',)
search_fields = ('tecnologia',)
@admin.register(data)
class DataAdmin(admin.ModelAdmin):
list_display = ('data',)
list_filter = ('data',)
search_fields = ('data',)
@admin.register(sessao)
class SessaoAdmin(admin.ModelAdmin):
list_display = ('sala', 'filme', 'versao', 'formato', 'tecnologia', 'data',)
list_filter = ('sala', 'filme', 'versao', 'formato', 'tecnologia', 'data',)
search_fields = ('sala',)
@admin.register(horario)
class HorarioAdmin(admin.ModelAdmin):
list_display = ('hora', 'sessao',)
list_filter = ('hora', 'sessao',)
search_fields = ('hora',)
@admin.register(calendario)
class CalendarioAdmin(admin.ModelAdmin):
list_display = ('data', 'horario',)
list_filter = ('data', 'horario',)
search_fields = ('data',)
|
StarcoderdataPython
|
8039658
|
<reponame>sireliah/polish-python<gh_stars>1-10
"""Fix incompatible imports oraz module references."""
# Authors: <NAME>, <NAME>
# Local imports
z .. zaimportuj fixer_base
z ..fixer_util zaimportuj Name, attr_chain
MAPPING = {'StringIO': 'io',
'cStringIO': 'io',
'cPickle': 'pickle',
'__builtin__' : 'builtins',
'copy_reg': 'copyreg',
'Queue': 'queue',
'SocketServer': 'socketserver',
'ConfigParser': 'configparser',
'repr': 'reprlib',
'FileDialog': 'tkinter.filedialog',
'tkFileDialog': 'tkinter.filedialog',
'SimpleDialog': 'tkinter.simpledialog',
'tkSimpleDialog': 'tkinter.simpledialog',
'tkColorChooser': 'tkinter.colorchooser',
'tkCommonDialog': 'tkinter.commondialog',
'Dialog': 'tkinter.dialog',
'Tkdnd': 'tkinter.dnd',
'tkFont': 'tkinter.font',
'tkMessageBox': 'tkinter.messagebox',
'ScrolledText': 'tkinter.scrolledtext',
'Tkconstants': 'tkinter.constants',
'Tix': 'tkinter.tix',
'ttk': 'tkinter.ttk',
'Tkinter': 'tkinter',
'markupbase': '_markupbase',
'_winreg': 'winreg',
'thread': '_thread',
'dummy_thread': '_dummy_thread',
# anydbm oraz whichdb are handled by fix_imports2
'dbhash': 'dbm.bsd',
'dumbdbm': 'dbm.dumb',
'dbm': 'dbm.ndbm',
'gdbm': 'dbm.gnu',
'xmlrpclib': 'xmlrpc.client',
'DocXMLRPCServer': 'xmlrpc.server',
'SimpleXMLRPCServer': 'xmlrpc.server',
'httplib': 'http.client',
'htmlentitydefs' : 'html.entities',
'HTMLParser' : 'html.parser',
'Cookie': 'http.cookies',
'cookielib': 'http.cookiejar',
'BaseHTTPServer': 'http.server',
'SimpleHTTPServer': 'http.server',
'CGIHTTPServer': 'http.server',
#'test.test_support': 'test.support',
'commands': 'subprocess',
'UserString' : 'collections',
'UserList' : 'collections',
'urlparse' : 'urllib.parse',
'robotparser' : 'urllib.robotparser',
}
def alternates(members):
zwróć "(" + "|".join(map(repr, members)) + ")"
def build_pattern(mapping=MAPPING):
mod_list = ' | '.join(["module_name='%s'" % key dla key w mapping])
bare_names = alternates(mapping.keys())
uzyskaj """name_import=import_name< 'import' ((%s) |
multiple_imports=dotted_as_names< any* (%s) any* >) >
""" % (mod_list, mod_list)
uzyskaj """import_from< 'from' (%s) 'import' ['(']
( any | import_as_name< any 'as' any > |
import_as_names< any* >) [')'] >
""" % mod_list
uzyskaj """import_name< 'import' (dotted_as_name< (%s) 'as' any > |
multiple_imports=dotted_as_names<
any* dotted_as_name< (%s) 'as' any > any* >) >
""" % (mod_list, mod_list)
# Find usages of module members w code e.g. thread.foo(bar)
uzyskaj "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
klasa FixImports(fixer_base.BaseFix):
BM_compatible = Prawda
keep_line_order = Prawda
# This jest overridden w fix_imports2.
mapping = MAPPING
# We want to run this fixer late, so fix_zaimportuj doesn't try to make stdlib
# renames into relative imports.
run_order = 6
def build_pattern(self):
zwróć "|".join(build_pattern(self.mapping))
def compile_pattern(self):
# We override this, so MAPPING can be pragmatically altered oraz the
# changes will be reflected w PATTERN.
self.PATTERN = self.build_pattern()
super(FixImports, self).compile_pattern()
# Don't match the node jeżeli it's within another match.
def match(self, node):
match = super(FixImports, self).match
results = match(node)
jeżeli results:
# Module usage could be w the trailer of an attribute lookup, so we
# might have nested matches when "bare_with_attr" jest present.
jeżeli "bare_with_attr" nie w results oraz \
any(match(obj) dla obj w attr_chain(node, "parent")):
zwróć Nieprawda
zwróć results
zwróć Nieprawda
def start_tree(self, tree, filename):
super(FixImports, self).start_tree(tree, filename)
self.replace = {}
def transform(self, node, results):
import_mod = results.get("module_name")
jeżeli import_mod:
mod_name = import_mod.value
new_name = self.mapping[mod_name]
import_mod.replace(Name(new_name, prefix=import_mod.prefix))
jeżeli "name_import" w results:
# If it's nie a "z x zaimportuj x, y" albo "zaimportuj x jako y" import,
# marked its usage to be replaced.
self.replace[mod_name] = new_name
jeżeli "multiple_imports" w results:
# This jest a nasty hack to fix multiple imports on a line (e.g.,
# "zaimportuj StringIO, urlparse"). The problem jest that I can't
# figure out an easy way to make a pattern recognize the keys of
# MAPPING randomly sprinkled w an zaimportuj statement.
results = self.match(node)
jeżeli results:
self.transform(node, results)
inaczej:
# Replace usage of the module.
bare_name = results["bare_with_attr"][0]
new_name = self.replace.get(bare_name.value)
jeżeli new_name:
bare_name.replace(Name(new_name, prefix=bare_name.prefix))
|
StarcoderdataPython
|
6563492
|
<reponame>kerenpeer/Project-Tohna-1-for-real
import sys
def initialise(k: int):
points = []
points_to_clusters = []
clusters_to_points = []
clusters_to_centroids = []
index = 0
while (True):
try:
input_point = input()
except EOFError:
# no more points in file
break
point = [float(x) for x in input_point.split(',')]
points.append(point) # this point's id is "index"
if index<k:
points_to_clusters.append(index) # asign cluster "index" to point "index"
clusters_to_points.append([index]) # asign point "index" to cluster "index"
clusters_to_centroids.append(point) # set "point" as cluster "index"'s centroid
else:
points_to_clusters.append(None) # asign cluster "None" to point "index"
index+=1
return points, points_to_clusters, clusters_to_points, clusters_to_centroids, index
def calc_centroid(cluster_id: int):
sum = [0.0 for i in range(dim)]
amount_points_in_cluster = len(clusters_to_points[cluster_id])
for point_id in clusters_to_points[cluster_id]:
point = points[point_id]
for i in range(dim):
sum[i] += point[i]
for i in range(dim):
sum[i] = sum[i]/amount_points_in_cluster
return sum
def get_centroid(cluster_id: int):
return clusters_to_centroids[cluster_id]
# calculates the distance between 2 points
def calc_dist(point1: list, point2: list):
sum = 0.0
for i in range(len(point1)):
sum += (point1[i] - point2[i]) ** 2
return sum
def find_closest_cluster(point: list, curr_cluster: int):
closest_cluster = curr_cluster
if closest_cluster is None:
closest_dist = float('inf')
else:
centroid = get_centroid(curr_cluster)
closest_dist = calc_dist(point, centroid)
for cluster_id in range(len(clusters_to_points)):
centroid = get_centroid(cluster_id)
this_dist = calc_dist(point, centroid)
if this_dist < closest_dist:
closest_dist = this_dist
closest_cluster = cluster_id
return closest_cluster
def move_point(point_id: int, curr_cluster:int, new_cluster: int):
if curr_cluster is not None:
clusters_to_points[curr_cluster].remove(point_id)
clusters_to_points[new_cluster].append(point_id)
points_to_clusters[point_id] = new_cluster
# main
try:
k = int(sys.argv[1])
except:
print ("invalid input")
try:
max_iter = int(sys.argv[2])
except Exception as e:
if type(e) == IndexError:
max_iter = 200
else:
print ("invalid input")
points, points_to_clusters, clusters_to_points, clusters_to_centroids, n = initialise(k)
if k >= n:
raise Exception("Too many clusters, K is too big")
changes = True
index = 0
dim = len(points[0])
while changes:
if index == max_iter:
break
index+=1
changes = False
for i in range(len(points)):
point = points[i]
curr_cluster = points_to_clusters[i]
new_cluster = find_closest_cluster(point, curr_cluster)
if new_cluster != curr_cluster:
move_point(i, curr_cluster, new_cluster)
changes = True
for i in range(len(clusters_to_points)):
new_centroid = calc_centroid(i)
clusters_to_centroids[i] = new_centroid
# print to cmd
for i in range(len(clusters_to_points)):
centroid = get_centroid(i)
four_decimals = ['%.4f' % x for x in centroid]
output_line = f'{four_decimals}'
output_line = output_line.replace(" ","").replace("'","")
print(f'{output_line[1:-1]}')
|
StarcoderdataPython
|
1889539
|
"""accounts URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to uprofilerlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib.auth import views as auth_views
from django.urls import path
from .views import index
# from .views import account_detail_view, userprofile_view, dynamic_lookup_view, signup_view, CreateUserProfileView
from .views import account_detail_view, signup_view
from . import views
urlpatterns = [
# path('', index),
path('signup/', signup_view, name='signup'),
# path('', login_view, name='login'),
# path('logout/', logout_view, name='logout'),
# path('logout/', auth_views.LogoutView.as_view(template_name='logout.html'), name='logout'),
path('profile/', account_detail_view, name='profile'),
path('going_to_save_account/', views.going_to_save_account, name="going_to_save_account"),
path('going_to_save_all_info/', views.going_to_save_all_info, name="going_to_save_all_info"),
path('delete_this/<int:pk_id>', views.delete_this, name="delete_this"),
# path('', views.first_login_page, name='first_login_page'),
path('', views.login_page, name='login_page'),
path('logout_page/', views.logout_page, name='logout_page'),
path('login_redirect_from_logout/', views.login_redirect_from_logout, name='login_redirect_from_logout'),
]
|
StarcoderdataPython
|
11205528
|
<reponame>JeanExtreme002/CSES-Problem-Set-Solutions<filename>Introductory Problems/weird_algorithm.py
value = int(input())
while value != 1:
print(value, end = " ")
value = int(value / 2) if value % 2 == 0 else value * 3 + 1
print(1)
|
StarcoderdataPython
|
4885390
|
<reponame>JonHylands/uCee-py
from L3G import *
from LSM303 import *
print("Starting")
gyro = L3G()
compass = LSM303()
print("About to enableDefault")
gyro.enableDefault()
compass.enableDefault()
print("About to read")
gyro.read()
compass.read()
print("Gyro value: ", gyro.g)
print("Accel value: ", compass.a)
print("Mag value: ", compass.m)
|
StarcoderdataPython
|
9691904
|
import sys
import pprint
import wrapt
import gen_util_constants as util_constants
from google.protobuf.descriptor import Descriptor, EnumDescriptor, EnumValueDescriptor, FieldDescriptor, FileDescriptor
from typing import Tuple
pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
Num2Type = {
1: "double",
2: "float",
3: "int64", # not zigzag (proto3 compiler does not seem to use it)
4: "uint64",
5: "int32", # not zigzag (proto3 compiler does not seem to use it)
6: "uint64",
7: "uint32",
8: "bool",
9: "string",
10: None, #"group", # group (deprecated in proto3)
11: None, #"message", # another messsage
12: "bytes", # bytes
13: "uint32",
14: "enum",
15: "int32",
16: "int64",
17: "int32", # Uses ZigZag encoding.
18: "int64", # Uses ZigZag encoding.
}
Num2PbType = {
1: "double",
2: "float",
3: "int64", # not zigzag (proto3 compiler does not seem to use it)
4: "uint64",
5: "int32", # not zigzag (proto3 compiler does not seem to use it)
6: "fixed64",
7: "fixed32",
8: "bool",
9: "string",
10: None, #"group", # group (deprecated in proto3)
11: None, #"message", # another messsage
12: "bytes", # bytes
13: "uint32",
14: "enum",
15: "sfixed32",
16: "sfixed64",
17: "sint32", # Uses ZigZag encoding.
18: "sint64", # Uses ZigZag encoding.
}
Num2WireType = {
1: "Fixed64",
2: "Fixed32",
3: "Varint",
4: "Varint",
5: "Varint",
6: "Fixed64",
7: "Fixed32",
8: "Varint",
9: "LengthDelim",
10: None,
11: "LengthDelim",
12: "LengthDelim",
13: "Varint",
14: "Varint",
15: "Fixed32",
16: "Fixed64",
17: "Varint",
18: "Varint",
}
SolType2BodyLen = {
"address": 20,
"uint" : 32,
"uint8" : 1,
"uint16" : 2,
"uint24" : 3,
"uint32" : 4,
"uint40" : 5,
"uint48" : 6,
"uint56" : 7,
"uint64" : 8,
"uint72" : 9,
"uint80" : 10,
"uint88" : 11,
"uint96" : 12,
"uint104" : 13,
"uint112" : 14,
"uint120" : 15,
"uint128" : 16,
"uint136" : 17,
"uint144" : 18,
"uint152" : 19,
"uint160" : 20,
"uint168" : 21,
"uint176" : 22,
"uint184" : 23,
"uint192" : 24,
"uint200" : 25,
"uint208" : 26,
"uint216" : 27,
"uint224" : 28,
"uint232" : 29,
"uint240" : 30,
"uint248" : 31,
"uint256" : 32,
"int" : 32,
"int8" : 1,
"int16" : 2,
"int24" : 3,
"int32" : 4,
"int40" : 5,
"int48" : 6,
"int56" : 7,
"int64" : 8,
"int72" : 9,
"int80" : 10,
"int88" : 11,
"int96" : 12,
"int104" : 13,
"int112" : 14,
"int120" : 15,
"int128" : 16,
"int136" : 17,
"int144" : 18,
"int152" : 19,
"int160" : 20,
"int168" : 21,
"int176" : 22,
"int184" : 23,
"int192" : 24,
"int200" : 25,
"int208" : 26,
"int216" : 27,
"int224" : 28,
"int232" : 29,
"int240" : 30,
"int248" : 31,
"int256" : 32,
"bytes1" : 1,
"bytes2" : 2,
"bytes3" : 3,
"bytes4" : 4,
"bytes5" : 5,
"bytes6" : 6,
"bytes7" : 7,
"bytes8" : 8,
"bytes9" : 9,
"bytes10": 10,
"bytes11": 11,
"bytes12": 12,
"bytes13": 13,
"bytes14": 14,
"bytes15": 15,
"bytes16": 16,
"bytes17": 17,
"bytes18": 18,
"bytes19": 19,
"bytes20": 20,
"bytes21": 21,
"bytes22": 22,
"bytes23": 23,
"bytes24": 24,
"bytes25": 25,
"bytes26": 26,
"bytes27": 27,
"bytes28": 28,
"bytes29": 29,
"bytes30": 30,
"bytes31": 31,
"bytes32": 32,
}
SOL_RESERVED_KEYWORDS = set([
# global variables
"abi", "block", "gasleft",
"msg", "now", "tx",
"assert", "require", "revert",
"blockhash", "keccak256", "sha256",
"ripemd160", "ecrecover", "addmod",
"mulmod", "this", "super", "selfdestruct",
"type", "address", "bytes",
# units
"wei", "gwei", "ether",
"seconds", "minutes", "hours",
"days", "weeks"
])
INTERNAL_TYPE_CATEGORY_BUILTIN = 1
INTERNAL_TYPE_CATEGORY_ENUM = 2
INTERNAL_TYPE_CATEGORY_USERTYPE = 3
PB_LIB_NAME_PREFIX = ""
LIBRARY_LINKING_MODE = False
ENUM_AS_CONSTANT = False
ALLOW_RESERVED_KEYWORDS = False
SOLIDITY_VERSION = "0.8.10"
SOLIDITY_PRAGMAS = []
IGNORED_PROTOS = []
# utils
def is_map_type(f: FieldDescriptor) -> bool:
return f.message_type and f.message_type.GetOptions().map_entry
def to_camel_case(name: str) -> str:
if "_" in name:
return name.replace("_", " ").title().replace(" ", "")
return name[:1].upper() + name[1:]
def parse_urllike_parameter(s):
ret = {} #hash
if s:
for e in s.split('&'):
kv = e.split('=')
ret[kv[0]] = kv[1]
return ret
def field_is_message(f: FieldDescriptor) -> bool:
return f.message_type and (f.message_type.file.package != "solidity")
def field_is_repeated(f: FieldDescriptor) -> bool:
return f.label == FieldDescriptor.LABEL_REPEATED
def field_is_scalar_numeric(f: FieldDescriptor) -> bool:
return gen_wire_type(f) in ['Varint', 'Fixed32', 'Fixed64']
def field_is_packed(f: FieldDescriptor) -> bool:
opt = f.GetOptions()
return opt.packed or field_is_scalar_numeric(f) and not opt.HasField("packed")
def field_has_dyn_size(f: FieldDescriptor) -> bool:
# if string or bytes, dynamic
if f.type == FieldDescriptor.TYPE_STRING or f.type == FieldDescriptor.TYPE_BYTES:
return True
elif f.type == FieldDescriptor.TYPE_MESSAGE:
# if non struct, message should be translate struct, which may have dynamic size
# otherwise solidity native type, which should not have dynamic size
return field_sol_type(f) == None
else:
return False
def field_pb_type(f: FieldDescriptor) -> str:
if f.type == FieldDescriptor.TYPE_MESSAGE:
return "message"
return Num2PbType.get(f.type, None)
def field_sol_type(f: FieldDescriptor) -> str:
if f.type != FieldDescriptor.TYPE_MESSAGE:
return None
elif f.message_type.file.package == "solidity":
return f.message_type.name
else:
return None
def prefix_lib(name: str) -> str:
return PB_LIB_NAME_PREFIX + name
def gen_delegate_lib_name(msg: Descriptor) -> str:
"""Generate a library name as follows.
PackageNameContainingTypeNameSelfTypeName
"""
name = "".join(map(lambda word: word[:1].upper() + word[1:], msg.full_name.split(".")))
return prefix_lib(name)
def gen_global_type_name_from_field(field: FieldDescriptor) -> str:
ftid, type_category = gen_field_type_id(field)
if type_category == INTERNAL_TYPE_CATEGORY_BUILTIN:
return ftid
elif type_category == INTERNAL_TYPE_CATEGORY_ENUM:
global ENUM_AS_CONSTANT
return "int64" if ENUM_AS_CONSTANT else prefix_lib(ftid)
else:
assert type_category == INTERNAL_TYPE_CATEGORY_USERTYPE
return prefix_lib(ftid) + ".Data"
def is_complex_type(field: str) -> bool:
return "string" == field or "bytes" == field or ".Data" in field or "[]" in field
def gen_global_type_decl_from_field(field: FieldDescriptor) -> str:
tp = gen_global_type_name_from_field(field)
if field_has_dyn_size(field):
return tp + " memory"
else:
return tp
def gen_global_type_from_field(field: FieldDescriptor) -> str:
t = gen_global_type_name_from_field(field)
if t is None:
pp.pprint(field)
pp.pprint("will die ======================================= ")
if field_is_repeated(field):
return t + "[]"
else:
return t
def gen_internal_struct_name(_: Descriptor) -> str:
return "Data"
def max_field_number(msg: Descriptor) -> int:
num = 0
for f in msg.fields:
if num < f.number:
num = f.number
return num
def str_contains(s, token):
try:
return s.index(token) >= 0
except Exception as e:
return False
def gen_struct_name(msg: Descriptor) -> str:
"""Generates PackageNameContainingTypeNameSelfTypeName"""
return "".join(map(lambda word: word[:1].upper() + word[1:], msg.full_name.split('.')))
def gen_struct_name_from_field(f: FieldDescriptor) -> str:
assert f.message_type
return gen_struct_name(f.message_type)
def gen_enum_name(e: EnumDescriptor) -> str:
"""Generates the following.
- PackageNameContainingTypeName.EnumName if the enum is nested
- EnumName if the enum is global
"""
if e.containing_type:
return gen_struct_name(e.containing_type) + '.' + e.name
else:
return gen_global_enum_name(e.file) + '.' + e.name
def gen_enum_name_from_field(f: FieldDescriptor) -> str:
assert f.enum_type
return gen_enum_name(f.enum_type)
def gen_field_type_id(field: FieldDescriptor) -> Tuple[str, int]:
val = Num2Type.get(field.type, None)
if val != None:
if val == "enum":
return (gen_enum_name_from_field(field), INTERNAL_TYPE_CATEGORY_ENUM)
return (val, INTERNAL_TYPE_CATEGORY_BUILTIN)
val = field_sol_type(field)
if val != None:
return (val, INTERNAL_TYPE_CATEGORY_BUILTIN)
return (gen_struct_name_from_field(field), INTERNAL_TYPE_CATEGORY_USERTYPE)
def gen_fieldtype(field: FieldDescriptor) -> str:
t = gen_global_type_name_from_field(field)
assert t[0] != "."
if field_is_repeated(field):
return t + "[]"
else:
return t
def gen_enumvalue_entry(v: Tuple[int, EnumValueDescriptor]):
if v[0] == 0:
return "{name}".format(
name = v[1].name,
)
else:
return ",\n {name}".format(
name = v[1].name,
)
def gen_enumencoder_entry(v: EnumValueDescriptor) -> str:
return util_constants.ENUM_ENCODE_FUNCTION_INNER.format(
name = v.name,
value = v.number,
enum_name = v.type.name
)
def gen_enumdecoder_entry(v: EnumValueDescriptor) -> str:
return util_constants.ENUM_DECODE_FUNCTION_INNER.format(
name = v.name,
value = v.number,
enum_name = v.type.name
)
def gen_enumvalues(e: EnumDescriptor) -> str:
return ''.join(
map(gen_enumvalue_entry, enumerate(e.values))
)
def gen_enum_encoders(e: EnumDescriptor) -> str:
return '\n'.join(
map(gen_enumencoder_entry, e.values)
)
def gen_enum_decoders(e: EnumDescriptor) -> str:
return '\n'.join(
map(gen_enumdecoder_entry, e.values)
)
def gen_enumtype(e: EnumDescriptor) -> str:
"""Generate the following parts.
enum Foo {
...
}
function encode_Foo(Foo x) internal pure returns (int32) {
...
}
function decode_Foo(int64 x) internal pure returns (Foo) {
...
}
"""
global ENUM_AS_CONSTANT
if ENUM_AS_CONSTANT:
return '\n'.join(
map(lambda v: util_constants.ENUM_TYPE.format(
type = e.name,
name = v.name,
value = v.number
),
e.values)
)
else:
definition = util_constants.ENUM_FUNCTION.format(
enum_name = e.name,
enum_values = gen_enumvalues(e)
)
encoder = util_constants.ENUM_ENCODE_FUNCTION.format(
enum_name = e.name,
enum_values = gen_enum_encoders(e)
)
decoder = util_constants.ENUM_DECODE_FUNCTION.format(
enum_name = e.name,
enum_values = gen_enum_decoders(e)
)
estimator = util_constants.ENUM_ESTIMATE_FUNCTION.format(
enum_name = e.name
)
return definition + "\n" + encoder + "\n" + decoder + "\n" + estimator
def gen_struct_decoder_name_from_field(field: FieldDescriptor) -> str:
ftid, _ = gen_field_type_id(field)
return "_decode_" + ftid
def gen_struct_codec_lib_name_from_field(field: FieldDescriptor) -> str:
ftid, type_category = gen_field_type_id(field)
assert type_category == INTERNAL_TYPE_CATEGORY_USERTYPE
return prefix_lib(ftid)
def gen_decoder_name(field: FieldDescriptor) -> str:
val = Num2PbType.get(field.type, None)
if val != None:
return "ProtoBufRuntime._decode_" + val
else:
val = field_sol_type(field)
if val != None:
return "ProtoBufRuntime._decode_sol_" + val
return "_decode_" + gen_struct_name_from_field(field)
def gen_encoder_name(field: FieldDescriptor) -> str:
val = Num2PbType.get(field.type, None)
if val != None:
return "ProtoBufRuntime._encode_" + val
else:
val = field_sol_type(field)
if val != None:
return "ProtoBufRuntime._encode_sol_" + val
return gen_struct_codec_lib_name_from_field(field) + "._encode_nested"
def gen_empty_checker_block(msg: Descriptor, field: FieldDescriptor) -> str:
blk = EmptyCheckBlock(field)
begin = blk.begin()
if begin == '':
return ''
return """
{block_begin}
return false;
{block_end}
""".format(
block_begin=begin,
block_end=blk.end()
)
def is_struct_type(field: FieldDescriptor) -> bool:
val = Num2PbType.get(field.type, None)
if val != None:
return False
else:
return True
def gen_wire_type(field: FieldDescriptor) -> str:
return Num2WireType.get(field.type, None)
def gen_soltype_estimate_len(sol_type: str) -> int:
val = SolType2BodyLen.get(sol_type, 0)
return val + 3
def gen_global_enum_name(file: FileDescriptor) -> str:
"""Generate the name of a library containing global enums as follows.
FILE_NAME_GLOBAL_ENUMS
"""
return file.name.replace(".", "_").upper() + "_" + "GLOBAL_ENUMS"
def change_pb_libname_prefix(new_name: str):
global PB_LIB_NAME_PREFIX
PB_LIB_NAME_PREFIX = new_name
def is_lib_linking_mode() -> bool:
global LIBRARY_LINKING_MODE
return LIBRARY_LINKING_MODE
def set_library_linking_mode():
global LIBRARY_LINKING_MODE
LIBRARY_LINKING_MODE = True
global SOLIDITY_PRAGMAS
SOLIDITY_PRAGMAS = ["pragma experimental ABIEncoderV2"]
def set_internal_linking_mode():
global LIBRARY_LINKING_MODE
LIBRARY_LINKING_MODE = False
global SOLIDITY_PRAGMAS
SOLIDITY_PRAGMAS = []
def set_solc_version(version: str):
global SOLIDITY_VERSION
SOLIDITY_VERSION = version
def set_allow_reserved_keywords(on: bool):
global ALLOW_RESERVED_KEYWORDS
ALLOW_RESERVED_KEYWORDS = on
def set_enum_as_constant(on: bool):
global ENUM_AS_CONSTANT
ENUM_AS_CONSTANT = on
def set_ignored_protos(opt: str):
global IGNORED_PROTOS
IGNORED_PROTOS = opt.split(',')
def ignores_proto(name: str) -> bool:
global IGNORED_PROTOS
return (name in IGNORED_PROTOS)
def gen_visibility(is_decoder) -> str:
if not LIBRARY_LINKING_MODE:
return "internal"
return "public" #"internal" if is_decoder else ""
def simple_term(field: FieldDescriptor) -> str:
return "r.{name}".format(name=field.name)
def string_term(field: FieldDescriptor) -> str:
return "bytes(r.{name}).length".format(name=field.name)
def bytes_term(field: FieldDescriptor) -> str:
return "r.{name}.length".format(name=field.name)
def message_term(field: FieldDescriptor) -> str:
child = gen_struct_name_from_field(field)
return "{child}._empty(r.{name})".format(child=child, name=field.name)
def enum_term(field: FieldDescriptor) -> str:
return "uint(r.{name})".format(name=field.name)
default_values = {
"bytes": {"cond": "!= 0", "f": bytes_term},
"string": {"cond": "!= 0", "f": string_term},
"bool": {"cond": "!= false", "f": simple_term},
"int32": {"cond": "!= 0", "f": simple_term},
"int64": {"cond": "!= 0", "f": simple_term},
"uint32": {"cond": "!= 0", "f": simple_term},
"uint64": {"cond": "!= 0", "f": simple_term},
"sint32": {"cond": "!= 0", "f": simple_term},
"sint64": {"cond": "!= 0", "f": simple_term},
"fixed32": {"cond": "!= 0", "f": simple_term},
"fixed64": {"cond": "!= 0", "f": simple_term},
"sfixed32": {"cond": "!= 0", "f": simple_term},
"sfixed64": {"cond": "!= 0", "f": simple_term},
"enum": {"cond": "!= 0", "f": enum_term},
"message": {"cond": "!= true", "f": message_term}
}
class EmptyCheckBlock:
def __init__(self, field: FieldDescriptor):
self.field = field
self.val = Num2PbType.get(self.field.type, None)
def begin(self):
if field_is_repeated(self.field):
return "if ({term} != 0) {{".format(term="r." + self.field.name + ".length")
elif self.val in default_values:
dv = default_values[self.val]
params = dict(
term=dv['f'](self.field),
op=dv['cond'],
)
return "if ({term} {op}) ".format(**params) + "{"
elif is_struct_type(self.field):
return ""
else:
raise Exception('Unsupported type: {}', self.field.type)
def end(self):
if is_struct_type(self.field) and not field_is_repeated(self.field):
return ""
else:
return "}"
class MessageFieldWrapper(wrapt.ObjectProxy):
@property
def name(self):
if self.__wrapped__.name in SOL_RESERVED_KEYWORDS:
return self.__wrapped__.name.capitalize()
return self.__wrapped__.name
class MessageFieldsWrapper(wrapt.ObjectProxy):
def __init__(self, wrapped):
super(MessageFieldsWrapper, self).__init__(wrapped)
self._self_fields = None
def __iter__(self):
if self._self_fields is None:
self._self_fields = [MessageFieldWrapper(f) for f in self.__wrapped__]
for f in self._self_fields:
yield f
class MessageWrapper(wrapt.ObjectProxy):
def __init__(self, wrapped):
super(MessageWrapper, self).__init__(wrapped)
self._self_fields = MessageFieldsWrapper(wrapped.fields)
@property
def fields(self):
return self._self_fields
|
StarcoderdataPython
|
4874018
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: candig/schemas/candig/metadata_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from candig.schemas.candig import metadata_pb2 as candig_dot_schemas_dot_candig_dot_metadata__pb2
from candig.schemas.google.api import annotations_pb2 as candig_dot_schemas_dot_google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='candig/schemas/candig/metadata_service.proto',
package='candig.schemas.candig',
syntax='proto3',
serialized_pb=_b('\n,candig/schemas/candig/metadata_service.proto\x12\x15\x63\x61ndig.schemas.candig\x1a$candig/schemas/candig/metadata.proto\x1a+candig/schemas/google/api/annotations.proto\">\n\x15SearchDatasetsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t\"c\n\x16SearchDatasetsResponse\x12\x30\n\x08\x64\x61tasets\x18\x01 \x03(\x0b\x32\x1e.candig.schemas.candig.Dataset\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\'\n\x11GetDatasetRequest\x12\x12\n\ndataset_id\x18\x01 \x01(\t2\xa4\x02\n\x0fMetadataService\x12\x91\x01\n\x0eSearchDatasets\x12,.candig.schemas.candig.SearchDatasetsRequest\x1a-.candig.schemas.candig.SearchDatasetsResponse\"\"\x82\xd3\xe4\x93\x02\x1c\"\x17/v0.8.0/datasets/search:\x01*\x12}\n\nGetDataset\x12(.candig.schemas.candig.GetDatasetRequest\x1a\x1e.candig.schemas.candig.Dataset\"%\x82\xd3\xe4\x93\x02\x1f\x12\x1d/v0.8.0/datasets/{dataset_id}b\x06proto3')
,
dependencies=[candig_dot_schemas_dot_candig_dot_metadata__pb2.DESCRIPTOR,candig_dot_schemas_dot_google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_SEARCHDATASETSREQUEST = _descriptor.Descriptor(
name='SearchDatasetsRequest',
full_name='candig.schemas.candig.SearchDatasetsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='page_size', full_name='candig.schemas.candig.SearchDatasetsRequest.page_size', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='page_token', full_name='candig.schemas.candig.SearchDatasetsRequest.page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=154,
serialized_end=216,
)
_SEARCHDATASETSRESPONSE = _descriptor.Descriptor(
name='SearchDatasetsResponse',
full_name='candig.schemas.candig.SearchDatasetsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='datasets', full_name='candig.schemas.candig.SearchDatasetsResponse.datasets', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='candig.schemas.candig.SearchDatasetsResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=218,
serialized_end=317,
)
_GETDATASETREQUEST = _descriptor.Descriptor(
name='GetDatasetRequest',
full_name='candig.schemas.candig.GetDatasetRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataset_id', full_name='candig.schemas.candig.GetDatasetRequest.dataset_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=319,
serialized_end=358,
)
_SEARCHDATASETSRESPONSE.fields_by_name['datasets'].message_type = candig_dot_schemas_dot_candig_dot_metadata__pb2._DATASET
DESCRIPTOR.message_types_by_name['SearchDatasetsRequest'] = _SEARCHDATASETSREQUEST
DESCRIPTOR.message_types_by_name['SearchDatasetsResponse'] = _SEARCHDATASETSRESPONSE
DESCRIPTOR.message_types_by_name['GetDatasetRequest'] = _GETDATASETREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
SearchDatasetsRequest = _reflection.GeneratedProtocolMessageType('SearchDatasetsRequest', (_message.Message,), dict(
DESCRIPTOR = _SEARCHDATASETSREQUEST,
__module__ = 'candig.schemas.candig.metadata_service_pb2'
# @@protoc_insertion_point(class_scope:candig.schemas.candig.SearchDatasetsRequest)
))
_sym_db.RegisterMessage(SearchDatasetsRequest)
SearchDatasetsResponse = _reflection.GeneratedProtocolMessageType('SearchDatasetsResponse', (_message.Message,), dict(
DESCRIPTOR = _SEARCHDATASETSRESPONSE,
__module__ = 'candig.schemas.candig.metadata_service_pb2'
# @@protoc_insertion_point(class_scope:candig.schemas.candig.SearchDatasetsResponse)
))
_sym_db.RegisterMessage(SearchDatasetsResponse)
GetDatasetRequest = _reflection.GeneratedProtocolMessageType('GetDatasetRequest', (_message.Message,), dict(
DESCRIPTOR = _GETDATASETREQUEST,
__module__ = 'candig.schemas.candig.metadata_service_pb2'
# @@protoc_insertion_point(class_scope:candig.schemas.candig.GetDatasetRequest)
))
_sym_db.RegisterMessage(GetDatasetRequest)
# @@protoc_insertion_point(module_scope)
|
StarcoderdataPython
|
296458
|
"""
Iso-tropic filtering and compression as discussed with <NAME>.
For more check-out the Book.
Write down the latex equation.
todo:
OT = optimize
remove the unessary print statements
swap is not correct for the filtering.
comments are not proper
done:
better variable names
"""
from time import time
import pyopencl as cl
import numpy as np
import argparse
import os
import sys
pwd = os.getcwd()
bool_1 = False
print(pwd) if bool_1 else print()
mywf = os.path.join(pwd,'filtering_iso/iso.cl')
class hyperParams:
""" A basic structure to group all the hyper params
params:
------
lamb:
it:
ep:
pl:
"""
def __init__(self, lamb, it, ep, pl):
self.lamb = lamb
self.it = it
self.ep = ep
self.pl = pl
def isoFilter(graph, signal, hp):
"""Does the isoprotic filtering on the graph
params
------
graph:
signal:
hp: hyperparameters
returns
-------
new_signal:
"""
ngbrs = graph.ngbrs
wgts = graph.wgts
k = graph.k
ngbrs = ngbrs.astype('int32')
wgts = wgts.astype('float32')
n, chnl = signal.shape
signal = np.reshape(signal,(n*chnl),order='F')
signal = signal.astype('float32')
print("signal",signal.shape) if bool_1 else print()
signal_old = np.copy(signal)
print("n",n) if bool_1 else print()
lamb = hp.lamb
it = hp.it
pl = hp.pl
epsilon = hp.ep
print("success till loading") if bool_1 else print()
# create the opencl context
platform = cl.get_platforms()[0]
print(platform)
device = platform.get_devices()[0]
print(device)
context = cl.Context([device])
print(context)
program = cl.Program(context, open(mywf).read()).build()
queue = cl.CommandQueue(context)
print(queue)
#create the buffers now.
mem_flags = cl.mem_flags
ngbrs_buf = cl.Buffer(context, mem_flags.READ_ONLY | mem_flags.COPY_HOST_PTR,hostbuf=ngbrs)
signal_buf= cl.Buffer(context, mem_flags.READ_ONLY | mem_flags.COPY_HOST_PTR, hostbuf=signal)
signal_old_buf= cl.Buffer(context, mem_flags.READ_ONLY | mem_flags.COPY_HOST_PTR, hostbuf=signal_old)
weight_buf = cl.Buffer(context, mem_flags.READ_ONLY | mem_flags.COPY_HOST_PTR, hostbuf=wgts)
#need to create new intensity buffers
new_signal= np.ndarray(shape=(n*chnl,), dtype=np.float32)
new_signal_buf = cl.Buffer(context, mem_flags.WRITE_ONLY, new_signal.nbytes)
#run the kernel here in a loop
import time
start = time.time()
for uv in range(0, it):
program.laplacian_filter(queue, (n,), None, signal_old_buf, signal_buf, new_signal_buf, ngbrs_buf, weight_buf, np.int32(k), np.float32(lamb) ,np.float32(pl), np.float32(epsilon), np.int32(chnl))
#swap
signal_buf,new_signal_buf=new_signal_buf,signal_buf
queue.finish()
end = time.time() - start
print(f"time taken is {end}")
# copy the new signal vec
cl.enqueue_copy(queue, new_signal, new_signal_buf)
# save the new intensity vec here
print("finish") if bool_1 else print()
return np.reshape(new_signal,(int(len(new_signal)/chnl),chnl),order="F")
|
StarcoderdataPython
|
3313096
|
#[1]
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import matplotlib.pyplot as plt
# Input data files are available in the "../input/" directory.
# For example, running this (by clicking run or pressing Shift+Enter) will list the files in the input directory
import os
import glob
import cv2
import tensorflow as tf
from keras import layers
from keras.layers import Dropout , Input, Add, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D
from keras.models import Model, load_model
from keras.initializers import glorot_uniform
from sklearn.model_selection import train_test_split
import keras.backend as K
from sklearn.utils import shuffle
# Any results you write to the current directory are saved as output.
# IMAGE_DIR = os.getcwd() + "/cellimages/cell_im"
#[2]
X_para = []
Y_para = []
myfiles = glob.glob("cell_images/Parasitized/*.png")
for file in myfiles:
kernel = np.array([[0,-1,0],[-1,6,-1],[0,-1,0]])
img = cv2.filter2D( cv2.resize(cv2.imread(file) , (120,120)) , -1 , kernel)
image_yuv = cv2.cvtColor(img ,cv2.COLOR_BGR2YUV )
image_yuv[: ,: , 0] = cv2.equalizeHist(image_yuv[:,:,0])
image = cv2.cvtColor(image_yuv , cv2.COLOR_YUV2RGB)
X_para.append(image)
Y_para.append(1)
#[3]
X_un , Y_un = [],[]
unfiles = glob.glob("cell_images/Uninfected/*.png")
for file in unfiles:
kernel = np.array([[0,-1,0],[-1,7,-1],[0,-1,0]])
img = cv2.filter2D( cv2.resize(cv2.imread(file) , (120,120)) , -1 , kernel)
X_un.append(img)
Y_un.append(0)
#[4]
X = X_para + X_un
Y = Y_para + Y_un
X,Y = shuffle = (X,Y)
X,Y = shuffle = (X,Y)
X,Y = shuffle = (X,Y)
X_train,X_test,Y_train,Y_test = train_test_split(X,Y,test_size = 0.3 , random_state =42)
X = np.array(X)
#[5]
inp = Input(shape = (120 , 120 , 3))
x = Conv2D(filters = 16 , kernel_size = (3,3) , strides = (1,1) , padding = "valid" , kernel_initializer=glorot_uniform(seed = 2))(inp)
x = Activation("relu")(x)
x = Dropout(0.2)(x)
x = Conv2D(filters = 32 , kernel_size = (4,4) , strides = (2,2) , padding = "valid" , kernel_initializer=glorot_uniform(seed = 2))(x)
x = Activation("relu")(x)
x = MaxPooling2D(pool_size = (2,2) , strides = (2,2) , padding = "valid")(x)
x = Dropout(0.2)(x)
x = Conv2D(filters = 64 , kernel_size = (3,3) , strides = (2,2) , padding = "valid" , kernel_initializer = glorot_uniform(seed = 2))(x)
x = Activation("relu")(x)
x = Dropout(0.2)(x)
x = Conv2D(filters = 128 , kernel_size = (3,3) , strides = (1,1) , padding = "valid" , kernel_initializer = glorot_uniform())(x)
x = Activation("relu")(x)
x = MaxPooling2D(pool_size = (2,2) , strides = (2,2) , padding = "valid")(x)
x = Dropout(0.2)(x)
x = Conv2D(filters = 256 , kernel_size = (2,2) , strides = (2,2) , padding = "valid" , kernel_initializer = glorot_uniform())(x)
x = Activation("relu")(x)
x = AveragePooling2D(pool_size = (3,3) , strides = (1,1) , padding = "valid")(x)
x = Dropout(0.2)(x)
x = Flatten()(x)
x = Dense(120)(x)
x = Activation("relu")(x)
x = Dropout(0.2)(x)
x = Dense(60)(x)
x = Activation("relu")(x)
x = Dropout(0.2)(x)
x = Dense(10)(x)
x = Activation("relu")(x)
x = Dropout(0.)(x)
x = Dense(1)(x)
output = Activation("sigmoid")(x)
model = Model(inputs =inp , outputs = output )
#[6]
model.compile(loss = "binary_crossentropy" , optimizer = "adam" , metrics = ["accuracy"])
history = model.fit(np.array(X_train) ,np.array(Y_train) , epochs = 13 ,validation_split = 0.2 )
# model saving
scores = model.evaluate(X, Y, verbose=0)
print("%s: %.2f%%" % (model.metrics_names[1], scores[1]*100))
model.save('model_99.h5')
print('Saved model to disk')
#[7]
y_pre = model.predict(np.array(X_test))
y_pre = np.reshape(y_pre ,(8268,) )
Y_test = np.array(Y_test)
fil = y_pre > 0.8
y_pre[fil] = 1
fil = y_pre < 0.8
y_pre[fil] = 0
np.sum(Y_test == y_pre)/8268
[8]
plt.plot(history.history['acc'])
plt.plot(history.history["val_acc"])
plt.title("Model Accuracy")
plt.xlabel("Accuracy")
plt.ylabel("Epochs")
plt.legend(['Train', 'Val'], loc='lower right')
plt.show()
#[9]
plt.plot(history.history["loss"])
plt.plot(history.history["val_loss"])
plt.title("Model Loss")
plt.ylabel("Loss")
plt.xlabel("Epochs")
plt.legend(['Train', 'Val'], loc='upper right')
plt.show()
|
StarcoderdataPython
|
366884
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import downward.suites
import common_setup
import configs
CONFIGS = configs.default_configs_optimal(ipc=False, extended=False)
print(sorted(CONFIGS.keys()))
print(len(CONFIGS))
SUITE = downward.suites.suite_optimal_with_ipc11()
SCATTER_ATTRIBUTES = ["total_time"]
exp = common_setup.IssueExperiment(
search_revisions=["issue77-v7-base", "issue77-v7"],
configs=CONFIGS,
suite=SUITE
)
exp.add_absolute_report_step()
exp.add_comparison_table_step()
exp.add_scatter_plot_step(attributes=SCATTER_ATTRIBUTES, relative=True)
exp()
|
StarcoderdataPython
|
3203562
|
# Copyright PA Knowledge Ltd 2021
# For licence terms see LICENCE.md file
import copy
import unittest
import verify_config
class VerifyConfigTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.schema = {"properties": {"ingress": {"type": "object"},
"egress": {"type": "object"},
"routingTable": {"type": "array"}},
"required": ["ingress", "egress", "routingTable"]}
interface = {
"useDHCP": False,
"ping": True,
"mtu": 9000
}
cls.config = {"ingress": interface,
"egress": interface,
"routingTable": [
{
"ingressPort": 50000,
"egressIpAddress": "192.168.0.20",
"egressSrcPort": 60000,
"egressDestPort": 60600
},
{
"ingressPort": 50500,
"egressIpAddress": "192.168.0.21",
"egressSrcPort": 60004,
"egressDestPort": 61004
}
]
}
def test_empty_config_throws_error(self):
self.assertRaises(verify_config.ConfigErrorEmptyFile,
verify_config.VerifyConfig({}).validate,
{})
def test_config_file_longer_than_max_length_throws_error(self):
self.assertRaises(verify_config.ConfigErrorFileSizeTooLarge,
verify_config.VerifyConfig(schema={}, max_config_bytes=10).validate,
config={"ingress": {}, "egress": {}, "routingTable": []})
def test_config_file_matches_schema(self):
verify_config.VerifyConfig(self.schema).validate(self.config)
def test_config_file_that_does_not_match_schema_throws_error(self):
self.assertRaises(verify_config.ConfigErrorFailedSchemaVerification,
verify_config.VerifyConfig(self.schema).validate,
{"ingress": {}})
def test_port_span_exceeds_2048_throws_error(self):
config_port_span_too_large = copy.deepcopy(self.config)
config_port_span_too_large["routingTable"] = [
{
"ingressPort": 40000,
"egressIpAddress": "192.168.0.20",
"egressSrcPort": 50001,
"egressDestPort": 50001
},
{
"ingressPort": 42048,
"egressIpAddress": "192.168.0.21",
"egressSrcPort": 51024,
"egressDestPort": 51024
}
]
self.assertRaisesRegex(verify_config.ConfigErrorInvalidPortSpan,
"Config validation failed: Ingress portSpan must be less than 2048.",
verify_config.VerifyConfig(self.schema).validate,
config_port_span_too_large)
def test_ingress_ports_not_unique_throws_error(self):
config_ports_not_unique = copy.deepcopy(self.config)
config_ports_not_unique["routingTable"] = [
{
"ingressPort": 40000,
"egressIpAddress": "192.168.0.20",
"egressSrcPort": 50001,
"egressDestPort": 50001
},
{
"ingressPort": 40000,
"egressIpAddress": "192.168.0.21",
"egressSrcPort": 51024,
"egressDestPort": 51024
}
]
self.assertRaisesRegex(verify_config.ConfigErrorIngressPortsNotUnique,
"Config validation failed: Ingress ports must be unique.",
verify_config.VerifyConfig(self.schema).validate,
config_ports_not_unique)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
217768
|
import os
import uuid
from dataclasses import dataclass
from datetime import datetime
from . import FilePath, DirPath, Topic
from .util import create_directory, FileCreationException, DirectoryCreationException, create_file_from_str_to, \
create_file_from_dict_to
from .console_logging import print_success_step, print_error_step
@dataclass
class ApplicationSettings:
application_base_dir: DirPath
application_storage_dir: DirPath
application_init_report_file: FilePath
application_settings_file: FilePath
sensors_config_file: FilePath
root_topic: Topic
def create_default_application_settings() -> ApplicationSettings:
return ApplicationSettings(
application_base_dir=DirPath(os.path.expanduser("~/.jetson_detectify")),
application_storage_dir=DirPath(os.path.expanduser("~/.jetson_detectify/.storage")),
application_init_report_file=FilePath(os.path.expanduser("~/.jetson_detectify/.storage/init.json")),
application_settings_file=FilePath(os.path.expanduser("~/.jetson_detectify/application.yaml")),
sensors_config_file=FilePath(os.path.expanduser("~/.jetson_detectify/sensor.yaml")),
root_topic=Topic("homeassistant")
)
def write_init_report_to(dir_path: DirPath):
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
try:
content = {"success": True, "last_init_run": now, "node_id": "jetson_detectify", "device_id": uuid.uuid4().hex}
create_file_from_dict_to(FilePath(f"{dir_path}/init.json"), content)
print_success_step(f"Init-Report creation to [{dir_path}] successful.")
except FileCreationException as error:
print_error_step(
f"Init-Report creation to [{dir_path}] failed. I/O error({error.cause.strerror}): {error.cause.strerror}")
def create_application_settings_file(settings: ApplicationSettings):
create_application_directory(settings.application_base_dir)
create_application_directory(settings.application_storage_dir)
mqtt_broker = f"""mqtt_broker:
username: username
password: password
host: localhost
port: 1883
root_topic: {settings.root_topic}
"""
create_application_file(settings.application_settings_file, mqtt_broker)
def create_sensors_config_file(settings: ApplicationSettings):
content = """sensor: []
"""
create_application_file(settings.sensors_config_file, content)
def create_application_directory(dir_path: DirPath):
if not os.path.exists(dir_path):
try:
create_directory(dir_path)
print_success_step(f"Creating directory [{dir_path}] succeeded")
return
except DirectoryCreationException as error:
print_error_step(
f"Creating directory [{dir_path}] failed! OS error({error.cause.strerror}): {error.cause.strerror}")
print_success_step(f"Directory [{dir_path}] already exists!")
def create_application_file(file_path: FilePath, content: str):
if not os.path.exists(file_path):
try:
create_file_from_str_to(file_path=file_path, content=content)
print_success_step(f"Creating file [{file_path}] succeeded")
return
except FileCreationException as error:
print_error_step(
f"Creating file [{file_path}] failed! I/O error({error.cause.errno}): {error.cause.strerror}")
print_success_step(f"File [{file_path}] already exists!")
|
StarcoderdataPython
|
1671994
|
<reponame>ruslanmv/BOT-MMORPG-AI
from AutoHotPy import AutoHotPy
from InterceptionWrapper import *
def exitAutoHotKey(autohotpy,event):
autohotpy.stop()
def recorded_macro(autohotpy, event):
autohotpy.moveMouseToPosition(384,474)
autohotpy.sleep(0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00434422492980957)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005738973617553711)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0074231624603271484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010085105895996094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.01446986198425293)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.2800328731536865)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006239891052246094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004995107650756836)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005003929138183594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006809711456298828)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0069141387939453125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.008278369903564453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0077228546142578125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.010417461395263672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.011681556701660156)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.019338607788085938)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.04949450492858887)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006984710693359375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004812479019165039)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002680063247680664)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030057430267333984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002989530563354492)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030066967010498047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001992940902709961)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003370523452758789)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002008199691772461)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002040863037109375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0013802051544189453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0015821456909179688)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001996278762817383)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002000093460083008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002003908157348633)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009970664978027344)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0035979747772216797)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010128021240234375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010218620300292969)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0021338462829589844)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003006458282470703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002992868423461914)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010097026824951172)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019974708557128906)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003992795944213867)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001007080078125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002414703369140625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004078865051269531)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002001047134399414)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002003192901611328)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004335641860961914)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004040956497192383)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0006139278411865234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006148338317871094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020270347595214844)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001971721649169922)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005999565124511719)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006990194320678711)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010077953338623047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0052187442779541016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005963802337646484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005960702896118164)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002333402633666992)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004919767379760742)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007950544357299805)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.011607170104980469)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.01608729362487793)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.05959057807922363)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.012789011001586914)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.008389949798583984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006636142730712891)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00800943374633789)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007989168167114258)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.008775949478149414)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006921052932739258)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007165670394897461)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004288196563720703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005289316177368164)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004053831100463867)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029239654541015625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0025849342346191406)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002028942108154297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001994609832763672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003398418426513672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020513534545898438)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009555816650390625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001999378204345703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030100345611572266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009906291961669922)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003005504608154297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009949207305908203)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019996166229248047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003008604049682617)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009920597076416016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003007173538208008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002327442169189453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002010345458984375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029985904693603516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003005504608154297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029926300048828125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002000093460083008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003999948501586914)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00127410888671875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030074119567871094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029997825622558594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004012346267700195)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005199909210205078)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00656580924987793)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0028905868530273438)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = -1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002674102783203125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.010047435760498047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.011561155319213867)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.008566141128540039)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0070192813873291016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00599217414855957)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005998849868774414)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004859447479248047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006207942962646484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006711721420288086)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006114959716796875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006701231002807617)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006928205490112305)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.011602163314819336)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.05808448791503906)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.02391958236694336)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0008590221405029297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.009413480758666992)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.010595083236694336)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.011991024017333984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020215511322021484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.015035152435302734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.2451343536376953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_BUTTON_1_DOWN
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.08463501930236816)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0052089691162109375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005104541778564453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004754781723022461)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005181312561035156)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002960681915283203)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006040334701538086)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004181623458862305)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004773855209350586)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0037682056427001953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0038580894470214844)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003873109817504883)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005352497100830078)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004049777984619141)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004590749740600586)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005390167236328125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00583648681640625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0049896240234375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005793094635009766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004991054534912109)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0058138370513916016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005662441253662109)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007215738296508789)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005791902542114258)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0069463253021240234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.008598089218139648)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0059986114501953125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020008087158203125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007796525955200195)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006009817123413086)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006909847259521484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006125450134277344)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005430698394775391)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0054225921630859375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006790637969970703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005399465560913086)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006295204162597656)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007008075714111328)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005995512008666992)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007992744445800781)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007806062698364258)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006009578704833984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0064508914947509766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006994962692260742)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006630420684814453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006305694580078125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00697636604309082)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006056785583496094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0065762996673583984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005488157272338867)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007805347442626953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006076335906982422)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007000923156738281)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007004976272583008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006786346435546875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005947113037109375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0048372745513916016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005634307861328125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004548072814941406)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004990339279174805)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005011320114135742)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006125211715698242)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005743741989135742)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006174564361572266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0067975521087646484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007740020751953125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005950927734375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007903099060058594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005734920501708984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0061948299407958984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005511760711669922)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0072116851806640625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005853176116943359)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005629301071166992)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0050089359283447266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00648045539855957)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006323814392089844)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004723310470581055)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005418300628662109)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005585908889770508)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0060651302337646484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006013393402099609)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0014271736145019531)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0041925907135009766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0068018436431884766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005978107452392578)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007010698318481445)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005644083023071289)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004750251770019531)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0060312747955322266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029985904693603516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019371509552001953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005239009857177734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005269527435302734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0016164779663085938)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020275115966796875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004950284957885742)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0051500797271728516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005418300628662109)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004357099533081055)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0038263797760009766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0047969818115234375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0051686763763427734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00499272346496582)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0064334869384765625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002437591552734375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002007722854614258)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006033420562744141)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.000995635986328125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005231142044067383)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005633115768432617)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0012159347534179688)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006773948669433594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007094621658325195)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019829273223876953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00597071647644043)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006062507629394531)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004691362380981445)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007941007614135742)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0016236305236816406)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.008735895156860352)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0015757083892822266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.007129192352294922)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0018723011016845703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005026817321777344)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002088785171508789)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005242586135864258)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020329952239990234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009992122650146484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005262851715087891)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0037004947662353516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002000093460083008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0021202564239501953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030007362365722656)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019989013671875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002041339874267578)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0034453868865966797)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0015141963958740234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020024776458740234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030303001403808594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009665489196777344)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020215511322021484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0031957626342773438)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0017921924591064453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0012028217315673828)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0027894973754882812)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0032231807708740234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0007777214050292969)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002429485321044922)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0022268295288085938)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003993034362792969)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002204418182373047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0037987232208251953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001974344253540039)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010013580322265625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002001047134399414)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002997875213623047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004061460494995117)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0013878345489501953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0012156963348388672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001893758773803711)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0031099319458007812)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0027816295623779297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0032100677490234375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0017886161804199219)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002583026885986328)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030145645141601562)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002993345260620117)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019996166229248047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004168272018432617)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0008337497711181641)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003464937210083008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020449161529541016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003177642822265625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019996166229248047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002978086471557617)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00208282470703125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019116401672363281)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003004312515258789)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002111673355102539)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003024578094482422)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019118785858154297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0016553401947021484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0021092891693115234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00286102294921875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001997232437133789)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00200653076171875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003008604049682617)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0024955272674560547)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029463768005371094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020084381103515625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029020309448242188)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001974344253540039)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00311279296875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00319671630859375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0028121471405029297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004176139831542969)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0028231143951416016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003216266632080078)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003368854522705078)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00521087646484375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0037949085235595703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010008811950683594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004022121429443359)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005870342254638672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0034017562866210938)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0016562938690185547)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0049991607666015625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0050084590911865234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009641647338867188)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005999565124511719)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004018306732177734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030040740966796875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005737781524658203)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0003876686096191406)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005014181137084961)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019986629486083984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004001140594482422)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001992940902709961)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003000974655151367)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0033020973205566406)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030715465545654297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0031833648681640625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0018248558044433594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029997825622558594)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002016305923461914)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002988100051879883)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0052280426025390625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0015339851379394531)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029036998748779297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019998550415039062)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019943714141845703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030050277709960938)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002000570297241211)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0042307376861572266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0008392333984375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005442142486572266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029973983764648438)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030002593994140625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009996891021728516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005002021789550781)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029981136322021484)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00437474250793457)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005419015884399414)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001580953598022461)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004006385803222656)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0024280548095703125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0005657672882080078)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0034689903259277344)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0013539791107177734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0016133785247802734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030002593994140625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030069351196289062)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019915103912353516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010001659393310547)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0032052993774414062)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0027959346771240234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003154277801513672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002696514129638672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029327869415283203)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010101795196533203)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002000093460083008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002989530563354492)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010020732879638672)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030050277709960938)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009958744049072266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029973983764648438)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0034027099609375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004197359085083008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0008115768432617188)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001998424530029297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005000114440917969)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004000186920166016)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0039098262786865234)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030083656311035156)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004006624221801758)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010046958923339844)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00426793098449707)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030298233032226562)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003388643264770508)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00400090217590332)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0009987354278564453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001999378204345703)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004002809524536133)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030045509338378906)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004109859466552734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003488779067993164)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0035200119018554688)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003005504608154297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003000497817993164)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001993894577026367)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004567384719848633)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0025658607482910156)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030074119567871094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010118484497070312)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004990339279174805)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019960403442382812)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005355119705200195)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003165006637573242)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00542449951171875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0026035308837890625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003194570541381836)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003793954849243164)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003072977066040039)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0033483505249023438)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0020837783813476562)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0011227130889892578)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002802610397338867)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030059814453125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019998550415039062)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029935836791992188)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0021278858184814453)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010182857513427734)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0013127326965332031)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0033867359161376953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0016155242919921875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002976655960083008)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0012106895446777344)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0007808208465576172)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019998550415039062)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0019986629486083984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030417442321777344)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0025017261505126953)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0014679431915283203)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003404855728149414)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0015954971313476562)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003999233245849609)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0024454593658447266)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002969503402709961)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0035598278045654297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0012099742889404297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0012807846069335938)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0032050609588623047)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005272626876831055)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0039806365966796875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029006004333496094)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005407810211181641)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004999637603759766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0059947967529296875)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005306243896484375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005846500396728516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0039942264556884766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0050106048583984375)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004236459732055664)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.002933502197265625)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003779172897338867)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.00379180908203125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0032074451446533203)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0017898082733154297)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 1
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0010199546813964844)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003528594970703125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.001992940902709961)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.003027200698852539)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0030031204223632812)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0029973983764648438)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0047724246978759766)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005011081695556641)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.004178047180175781)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.005788564682006836)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.0068051815032958984)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.006806612014770508)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.013042211532592773)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.009449005126953125)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.016971111297607422)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.016405582427978516)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = -1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.21574163436889648)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_BUTTON_1_UP
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 0
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(0.24522185325622559)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y = 1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
if __name__=="__main__":
auto = AutoHotPy()
auto.registerExit(auto.ESC,exitAutoHotKey)
auto.registerForKeyDown(auto.F1,recorded_macro)
auto.start()
|
StarcoderdataPython
|
6431899
|
from __future__ import print_function
from argparse import ArgumentParser
import os
import sys
try:
from catkin_pkg.workspaces import order_paths
except ImportError as e:
sys.exit('ImportError: "from catkin_pkg.package import parse_package" failed: %s\nMake sure that you have installed "catkin_pkg", it is up to date and on the PYTHONPATH.' % e)
def main():
"""
Order a list of paths according to a list of prefixes which define the order.
"""
parser = ArgumentParser(description='Utility to order a list of paths according to a list of prefixes. Creates a file with CMake set command setting a variable.')
parser.add_argument('outfile', help='The filename of the generated CMake file')
parser.add_argument('--paths-to-order', nargs='*', help='The semicolon-separated paths to order')
parser.add_argument('--prefixes', nargs='*', help='The semicolon-separated prefixes defining the order')
args = parser.parse_args()
ordered_paths = order_paths(args.paths_to_order, args.prefixes)
# create directory if necessary
outdir = os.path.dirname(args.outfile)
if not os.path.exists(outdir):
os.makedirs(outdir)
with open(args.outfile, 'w') as fh:
fh.write('set(ORDERED_PATHS "%s")' % ';'.join(ordered_paths))
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
4850057
|
<gh_stars>0
""":mod:`wand.version` --- Version data
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You can find the current version in the command line interface:
.. sourcecode:: console
$ python -m wand.version
0.0.0
$ python -m wand.version --verbose
Wand 0.0.0
ImageMagick 6.7.7-6 2012-06-03 Q16 http://www.imagemagick.org
.. versionadded:: 0.2.0
The command line interface.
.. versionadded:: 0.2.2
The ``--verbose``/``-v`` option which also prints ImageMagick library
version for CLI.
"""
from __future__ import print_function
import ctypes
import datetime
import re
import sys
try:
from .api import libmagick
except ImportError:
libmagick = None
from .compat import text
__all__ = ('VERSION', 'VERSION_INFO', 'MAGICK_VERSION',
'MAGICK_VERSION_INFO', 'MAGICK_VERSION_NUMBER',
'MAGICK_RELEASE_DATE', 'MAGICK_RELEASE_DATE_STRING',
'QUANTUM_DEPTH')
#: (:class:`tuple`) The version tuple e.g. ``(0, 1, 2)``.
#:
#: .. versionchanged:: 0.1.9
#: Becomes :class:`tuple`. (It was string before.)
VERSION_INFO = (0, 3, 6)
#: (:class:`basestring`) The version string e.g. ``'0.1.2'``.
#:
#: .. versionchanged:: 0.1.9
#: Becomes string. (It was :class:`tuple` before.)
VERSION = '{0}.{1}.{2}'.format(*VERSION_INFO)
if libmagick:
c_magick_version = ctypes.c_size_t()
#: (:class:`basestring`) The version string of the linked ImageMagick
#: library. The exactly same string to the result of
#: :c:func:`GetMagickVersion` function.
#:
#: Example::
#:
#: 'ImageMagick 6.7.7-6 2012-06-03 Q16 http://www.imagemagick.org'
#:
#: .. versionadded:: 0.2.1
MAGICK_VERSION = text(
libmagick.GetMagickVersion(ctypes.byref(c_magick_version))
)
#: (:class:`numbers.Integral`) The version number of the linked
#: ImageMagick library.
#:
#: .. versionadded:: 0.2.1
MAGICK_VERSION_NUMBER = c_magick_version.value
_match = re.match(r'^ImageMagick\s+(\d+)\.(\d+)\.(\d+)(?:-(\d+))?',
MAGICK_VERSION)
#: (:class:`tuple`) The version tuple e.g. ``(6, 7, 7, 6)`` of
#: :const:`MAGICK_VERSION`.
#:
#: .. versionadded:: 0.2.1
MAGICK_VERSION_INFO = tuple(int(v or 0) for v in _match.groups())
#: (:class:`datetime.date`) The release date of the linked ImageMagick
#: library. The same to the result of :c:func:`GetMagickReleaseDate`
#: function.
#:
#: .. versionadded:: 0.2.1
MAGICK_RELEASE_DATE_STRING = text(libmagick.GetMagickReleaseDate())
#: (:class:`basestring`) The date string e.g. ``'2012-06-03'`` of
#: :const:`MAGICK_RELEASE_DATE_STRING`. This value is the exactly same
#: string to the result of :c:func:`GetMagickReleaseDate` function.
#:
#: .. versionadded:: 0.2.1
MAGICK_RELEASE_DATE = datetime.date(
*map(int, MAGICK_RELEASE_DATE_STRING.split('-')))
c_quantum_depth = ctypes.c_size_t()
libmagick.GetMagickQuantumDepth(ctypes.byref(c_quantum_depth))
#: (:class:`numbers.Integral`) The quantum depth configuration of
#: the linked ImageMagick library. One of 8, 16, 32, or 64.
#:
#: .. versionadded:: 0.3.0
QUANTUM_DEPTH = c_quantum_depth.value
del c_magick_version, _match, c_quantum_depth
if __doc__ is not None:
__doc__ = __doc__.replace('0.0.0', VERSION)
del libmagick
if __name__ == '__main__':
options = frozenset(sys.argv[1:])
if '-v' in options or '--verbose' in options:
print('Wand', VERSION)
try:
print(MAGICK_VERSION)
except NameError:
pass
else:
print(VERSION)
|
StarcoderdataPython
|
3309476
|
<reponame>manishwins/Greenline<filename>landlord/migrations/0012_auto_20210629_1203.py<gh_stars>0
# Generated by Django 3.1.7 on 2021-06-29 12:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('landlord', '0011_auto_20210609_0631'),
]
operations = [
migrations.RenameField(
model_name='notifications',
old_name='property_id',
new_name='apartment_id',
),
migrations.AddField(
model_name='notifications',
name='link',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='notifications',
name='notification_type',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.