max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
src/easydmp/eventlog/models.py | hmpf/easydmp | 5 | 12798051 | <gh_stars>1-10
from functools import reduce
from operator import or_
from copy import deepcopy
from django.contrib.contenttypes.models import ContentType
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.utils.timezone import now as tznow
GFK_MAPPER = {
'actor': {'ct': 'actor_content_type', 'id': 'actor_object_id'},
'target': {'ct': 'target_content_type', 'id': 'target_object_id'},
'action_object': {
'ct': 'action_object_content_type',
'id': 'action_object_object_id'
},
}
Q = models.Q
def _get_gfk(obj):
obj_ct = ContentType.objects.get_for_model(obj)
obj_id = obj.pk
return (obj_ct, obj_id)
def _get_remote_obj(obj_ct, obj_id):
if obj_ct and obj_id:
obj = obj_ct.get_object_for_this_type(pk=obj_id)
return obj
def _serialize_gfk(obj):
if not obj:
return
if isinstance(obj, dict):
ct = obj['ct']
pk = obj['pk']
value = obj['value']
else:
ct, pk = _get_gfk(obj)
value = str(obj)
return {
'ct': {'pk': ct.pk, 'name': str(ct)},
'pk': pk,
'value': str(obj)
}
def delazify_object(obj):
if hasattr(obj, '_wrapped') and hasattr(obj, '_setup'):
if obj._wrapped.__class__ == object:
obj._setup()
obj = obj._wrapped
return obj
def _format_obj(obj):
try:
objstring = obj.logprint()
except AttributeError:
obj = delazify_object(obj)
objstring = repr(obj).strip('<>')
return objstring
def _format_timestamp(timestamp):
"Print timestamp in JSON serializer compatible format"
timestamp = timestamp.strftime('%Y-%m-%dT%H:%M:%S.%f%Z')
timestamp = timestamp.replace('UTC', 'Z', 1)
return timestamp
def _format_description(kwargs, description_template):
context = deepcopy(kwargs)
context['timestamp'] = _format_timestamp(context['timestamp'])
for field in ('actor', 'target', 'action_object'):
obj = kwargs[field]
if not obj:
continue
objstring = _format_obj(obj)
context[field] = objstring
return description_template.format(**context)
def _serialize_event(kwargs):
data = {}
data['actor'] = _serialize_gfk(kwargs['actor'])
data['target'] = _serialize_gfk(kwargs['target'])
data['action_object'] = _serialize_gfk(kwargs['action_object'])
# copy the rest
for field in ('verb', 'description', 'timestamp', 'extra'):
data[field] = kwargs[field]
return data
class EventLogQuerySet(models.QuerySet):
def delete(self):
return (0, {})
delete.queryset_only = True # type: ignore
def update(self, **_):
return 0
update.queryset_only = True # type: ignore
def _get_gfks(self, field, *objects):
ct = GFK_MAPPER[field]['ct']
id = GFK_MAPPER[field]['id']
q_objs = []
for obj in objects:
if not obj:
continue
obj_ct, obj_id = _get_gfk(obj)
lookups = {ct: obj_ct, id: obj_id}
q_objs.append(Q(**lookups))
return self.filter(reduce(or_, q_objs))
def actors(self, *actors):
return self._get_gfks('actor', *actors)
def targets(self, *targets):
return self._get_gfks('target', *targets)
def action_objects(self, *action_objects):
return self._get_gfks('action_object', *action_objects)
def model_actions(self, model):
ct = ContentType.objects.get_for_model(model)
return self.filter(
Q(actor_content_type=ct),
Q(target_content_type=ct),
Q(action_object_content_type=ct),
)
def any(self, obj):
ct = ContentType.objects.get_for_model(obj)
qs = self.actors(obj) | self.targets(obj) | self.action_objects(obj)
return qs.distinct()
class EventLogManager(models.Manager):
def log_event(self, actor, verb, target=None, action_object=None,
description_template='', timestamp=None, extra=None,
using=None):
"""Log event
`actor`, `target` and `action_object` are model instances. `actor`
is required.
`verb` is a short string, preferrably an infinitive. It should not
duplicate information about the model instances of `actor`, `target`
or `action_object`.
`description_template` is used to build a human-readble string from
the other arguments.
`timestamp` must be a datetime with timezone
`extra` must be JSON serializable, preferrably a dict. The info will
be added to the `data`-field, and may be looked up from the
`description_template`.
"""
timestamp = timestamp if timestamp else tznow()
description = _format_description(locals(), description_template)
data = _serialize_event(locals())
return self.create(
actor=actor,
target=target,
action_object=action_object,
verb=verb,
description=description,
timestamp=timestamp,
data=data,
)
class EventLog(models.Model):
actor_content_type = models.ForeignKey(ContentType,
on_delete=models.DO_NOTHING,
related_name='actor', db_index=True)
actor_object_id = models.TextField(db_index=True)
verb = models.CharField(max_length=255, db_index=True)
description = models.TextField(blank=True, null=True)
target_content_type = models.ForeignKey(ContentType,
on_delete=models.DO_NOTHING,
blank=True, null=True,
related_name='target',
db_index=True)
target_object_id = models.TextField(blank=True, null=True, db_index=True)
action_object_content_type = models.ForeignKey(ContentType,
on_delete=models.DO_NOTHING,
blank=True, null=True,
related_name='action_object',
db_index=True)
action_object_object_id = models.TextField(blank=True, null=True,
db_index=True)
data = models.JSONField(default=dict, encoder=DjangoJSONEncoder)
timestamp = models.DateTimeField(default=tznow, db_index=True)
objects = EventLogManager.from_queryset(EventLogQuerySet)()
class Meta:
ordering = ('-timestamp',)
def __str__(self):
return self.description
def delete(self, **_):
# Deletion not allowed
return (0, {})
@property
def actor(self):
obj_ct = self.actor_content_type
obj_id = self.actor_object_id
return _get_remote_obj(obj_ct, obj_id)
@actor.setter
def actor(self, actor):
obj_ct, obj_id = _get_gfk(actor)
self.actor_content_type = obj_ct
self.actor_object_id = obj_id
@property
def target(self):
obj_ct = self.target_content_type
obj_id = self.target_object_id
return _get_remote_obj(obj_ct, obj_id)
@target.setter
def target(self, target):
if target:
obj_ct, obj_id = _get_gfk(target)
self.target_content_type = obj_ct
self.target_object_id = obj_id
@property
def action_object(self):
obj_ct = self.action_object_content_type
obj_id = self.action_object_object_id
return _get_remote_obj(obj_ct, obj_id)
@action_object.setter
def action_object(self, action_object):
if action_object:
obj_ct, obj_id = _get_gfk(action_object)
self.action_object_content_type = obj_ct
self.action_object_object_id = obj_id
| 1.992188 | 2 |
trypython/basic/dict_/dict02.py | devlights/try-python | 4 | 12798052 | <filename>trypython/basic/dict_/dict02.py<gh_stars>1-10
"""
ディクショナリについてのサンプルです。
dict の マージ について (Python 3.5 以降で有効)
"""
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
class Sample(SampleBase):
def exec(self):
# ------------------------------------------------------------
# ディクショナリのマージ
#
# python 3.5 以降であれば以下のようにして
# 2つのディクショナリをマージできる。(いくつでも可能)
# ------------------------------------------------------------
dict_a = {'a': 1, 'b': 2}
dict_b = {'c': 3, 'd': 4}
merged = {**dict_a, **dict_b}
pr('merged', merged)
dict_c = {'f': 5, 'g': 6}
pr('merged2', {**merged, **dict_c})
# 同じ要素がある場合、後の要素で上書きされる
dict_d = {'a': 10, 'd': 40}
pr('merge3', {**merged, **dict_d})
def go():
obj = Sample()
obj.exec()
| 3 | 3 |
src/text_utils/symbols_dict.py | stefantaubert/text-utils | 0 | 12798053 | <filename>src/text_utils/symbols_dict.py<gh_stars>0
from collections import Counter, OrderedDict
from pathlib import Path
from text_utils.utils import parse_json, save_json
class SymbolsDict(OrderedDict): # Tuple[str, int]
def save(self, file_path: Path):
save_json(file_path, self)
@classmethod
def load(cls, file_path: Path):
data = parse_json(file_path)
return cls(data)
@classmethod
def fromcounter(cls, counter: Counter):
return cls(counter.most_common())
| 2.515625 | 3 |
sdk_k64f/boards/frdmk64f/multiprocessor_examples/erpc_remote_control/service/erpc_remote_control_app_1/interface.py | Sir-Branch/k64f-starter-template | 1 | 12798054 | # Copyright (c) 2014-2016, Freescale Semiconductor, Inc.
# Copyright 2016 NXP
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Generated by erpcgen 1.7.3 on Mon Sep 23 13:00:45 2019.
#
# AUTOGENERATED - DO NOT EDIT
#
import erpc
# Abstract base class for remote_control_app_1
class Iremote_control_app_1(object):
SERVICE_ID = 2
BUTTON_PRESSED_ID = 1
#Return which button was pressed
def button_pressed(self, which):
raise NotImplementedError()
| 1.539063 | 2 |
utils/urbansounds.py | birds-on-mars/birdsonearth | 13 | 12798055 | <reponame>birds-on-mars/birdsonearth<filename>utils/urbansounds.py
'''
script to restructure the urban sounds data set into the required format to run
the train.py script on it.
Download the the dataset from:
https://www.kaggle.com/pavansanagapati/urban-sound-classification
Then unpack train.zip into root specified below, define where the restructured
data is saved to (save_to) and the maxium instances per class (nmax) and run
this script
TODO: apparently there are different bit depths in the dataset and 24bit leads to issues
now these files are simply ignored...
'''
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import os
import shutil
from scipy.io import wavfile
class UrbanSoundsProcessor:
def __init__(self, root, nmax, save_to):
self.root = root
self.nmax = nmax
self.save_to = save_to
self.df = pd.read_csv(os.path.join(root, 'train.csv'))
self.labels = self.df.Class.unique()
self.counts = {i:0 for i in self.labels}
self.delete_download = False
def setup(self):
for label in self.labels:
os.makedirs(os.path.join(self.save_to, label))
def run(self):
self.setup()
total = 0
for i in self.df.index:
if total >= self.nmax * len(self.labels):
print('done loading {} instances each'.format(self.nmax))
break
label = self.df.at[i, 'Class']
if self.nmax is not None and self.counts[label] >= self.nmax:
continue
else:
file = str(self.df.at[i, 'ID']) + '.wav'
print('\n', file)
try:
sr, wav_data = wavfile.read(os.path.join(self.root, 'Train', file))
except Exception as e:
print(e)
print('not using file ', file)
continue
print(type(wav_data), 'original sample rate: ', sr)
print(np.min(wav_data), np.max(wav_data))
wav_data = wav_data.astype(np.int16)
wavfile.write(os.path.join(self.save_to, label, file), rate=22050, data=wav_data)
# shutil.copyfile(os.path.join(self.root, 'Train', file),
# os.path.join(self.save_to, label, file))
self.counts[label] += 1
total += 1
if self.delete_download:
shutil.rmtree(self.root)
if __name__ == '__main__':
print(os.listdir('data/'))
root = 'data/urbansounds_download'
save_to = 'data/urbansounds'
nmax = 100
proc = UrbanSoundsProcessor(root, nmax, save_to)
proc.run()
| 2.59375 | 3 |
docker_registry_frontend/manifest.py | cschaba/docker-registry-frontend | 21 | 12798056 | <filename>docker_registry_frontend/manifest.py
import abc
import functools
import json
import operator
class DockerRegistryManifest(abc.ABC):
def __init__(self, content):
self._content = content
def get_created_date(self):
raise NotImplementedError
def get_entrypoint(self):
raise NotImplementedError
def get_exposed_ports(self):
raise NotImplementedError
def get_docker_version(self):
raise NotImplementedError
def get_volumes(self):
raise NotImplementedError
class DockerRegistrySchema1Manifest(DockerRegistryManifest):
def __get_sorted_history(self):
history = []
for entry in self._content['history']:
history.append(json.loads(entry['v1Compatibility']))
history.sort(key=lambda x: x['created'], reverse=True)
return history
def __get_first_value(self, *keys):
for entry in self.__get_sorted_history():
try:
return functools.reduce(operator.getitem, keys, entry)
except KeyError:
pass
return None
def get_created_date(self):
return self.__get_first_value('created')
def get_docker_version(self):
return self.__get_first_value('docker_version')
def get_entrypoint(self):
return self.__get_first_value('config', 'Entrypoint')
def get_exposed_ports(self):
return self.__get_first_value('config', 'ExposedPorts')
def get_layer_ids(self):
layer_ids = []
for layer in self._content['fsLayers']:
layer_ids.append(layer['blobSum'])
return set(layer_ids)
def get_volumes(self):
return self.__get_first_value('config', 'Volumes')
def makeManifest(content):
if content['schemaVersion'] == 1:
return DockerRegistrySchema1Manifest(content)
else:
raise ValueError
| 2.234375 | 2 |
inference-framework/checker-framework/checkers/binary/trace-log.py | SoftwareEngineeringToolDemos/type-inference | 1 | 12798057 | #!/usr/bin/env python
import sqlite3
import sys
import re
auto_mode = True
#codeCodes = {
# 'black': '0;30', 'bright gray': '0;37',
# 'blue': '0;34', 'white': '1;37',
# 'green': '0;32', 'bright blue': '1;34',
# 'cyan': '0;36', 'bright green': '1;32',
# 'red': '0;31', 'bright cyan': '1;36',
# 'purple': '0;35', 'bright red': '1;31',
# 'yellow': '0;33', 'bright purple':'1;35',
# 'dark gray':'1;30', 'bright yellow':'1;33',
# 'normal': '0'
#}
#def printc(text, color):
# """Print in color."""
# print "\033["+codeCodes[color]+"m"+text+"\033[0m"
#def writec(text, color):
# """Write to stdout in color."""
# sys.stdout.write("\033["+codeCodes[color]+"m"+text+"\033[0m")
def print_highlight(text):
p = re.compile('\(([0-9]+)\)')
start = 0
r = p.search(text, start)
res = ''
while r != None:
res = res + text[start:r.start(1)]
res = res + '\033[0;31m' + text[r.start(1):r.end(1)] + '\033[0m'
start = r.end(1)
r = p.search(text, start)
res = res + text[start:len(text)] + '\n'
text = res
p = re.compile('{[^{}]+}')
start = 0
r = p.search(text, start)
res = ''
while r != None:
res = res + text[start:r.start(0)]
res = res + '\033[0;34m' + text[r.start(0):r.end(0)] + '\033[0m'
start = r.end(0)
r = p.search(text, start)
res = res + text[start:len(text)] + '\n'
sys.stdout.write(res)
# while r != None:
# writec(text[start:r.start(1)], 'normal')
# writec(text[r.start(1):r.end(1)], 'red')
# start = r.end(1)
# r = p.search(text, start)
# writec(text[start:len(text)], 'normal')
# writec('\n', 'normal')
if len(sys.argv) == 3:
all_refs_file = sys.argv[1];
all_trace_file = sys.argv[2];
else:
all_refs_file = 'infer-output/all-refs.log'
all_trace_file = 'infer-output/trace.log'
#all_refs_file = 'all-refs.log'
#all_trace_file = 'trace.log'
#conn = sqlite3.connect("trace.db")
conn = sqlite3.connect(":memory:")
c = conn.cursor()
# Create tables
refs_table = 'refs'
trace_table = 'trace'
c.execute('create table ' + refs_table + '(ID INTEGER, NAME TEXT, ANNOS TEXT)')
c.execute('create table ' + trace_table + '(ID INTEGER, NAME TEXT, OLD_ANNOS TEXT, NEW_ANNOS TEXT, CONS TEXT, CAUSEDBY TEXT, CAUSEDID INTEGER)')
conn.commit()
# Dump the data
print 'Loading reference data from ' + all_refs_file + '...'
f = open(all_refs_file)
for line in f:
sql = 'INSERT INTO ' + refs_table + ' VALUES ('
is_first = True
for s in line.split('|'):
if is_first:
is_first = False
else:
sql = sql + ','
sql = sql + '\'' + s.strip().replace("\'", "").replace("\n", "") + '\''
sql = sql + ')'
# print sql
try:
c.execute(sql)
except:
print 'Skip invalid input: ' + line
# print sql
# raise
conn.commit()
f.close()
print 'Done'
print 'Loading trace data from ' + all_trace_file + '...'
f = open(all_trace_file)
for line in f:
sql = 'INSERT INTO ' + trace_table + ' VALUES ('
is_first = True
for s in line.split('|'):
if is_first:
is_first = False
else:
sql = sql + ','
sql = sql + '\'' + s.strip().replace("\'", "") + '\''
sql = sql + ')'
try:
c.execute(sql)
except:
print 'Skip invalid input: ' + line
# print sql
# raise
conn.commit()
f.close()
print 'Done'
ref_id = 0
# Now ready to trace
trace_list = []
while True:
print ''
if ref_id == 0 or auto_mode == False:
input_text = raw_input("Enter the ID you want to trace (Press Enter to exit): ")
try:
ref_id = int(input_text.strip())
except ValueError:
ref_id = -1
if ref_id == -1:
print 'The trace is:\n'
for rid in trace_list:
for row in c.execute('SELECT NAME, ANNOS from ' + refs_table + ' where ID = ' + str(rid)):
print row[0], row[1]
print ' | '
print ' V '
print 'Done'
sys.exit(1)
if ref_id in trace_list:
print str(ref_id) + ' is in the trace list. You may have entered it before.'
# if auto_mode == True:
# ref_id = -1
# continue
has_result = False
print ''
caused_id = 0;
first_id = 0;
for row in c.execute('SELECT * from ' + trace_table + ' where ID = ' + str(ref_id)):
has_result = True
print_highlight(row[1] + ': ')
print '\t\t\t' + row[2] + ' --> ' + row[3]
print 'Due to:'
print_highlight(row[4] + "\nCaused by: " + row[5])
# print_highlight("caused by " + row[5])
first_id = caused_id;
caused_id = row[6]
# print ''
if has_result and not ref_id in trace_list:
trace_list.append(ref_id)
else:
print "Not found"
if first_id == 0 or first_id == caused_id:
ref_id = caused_id
else:
ref_id = 0
conn.close()
| 2.84375 | 3 |
perm_comb.py | RikGhosh487/Combinations-Permutations | 0 | 12798058 | <reponame>RikGhosh487/Combinations-Permutations<filename>perm_comb.py<gh_stars>0
import sys
# method that calculates factorials
def factorial(input):
if type(input) != int:
return 'Can only accept integer inputs'
if input < 0:
return 'Invalid input. Must be a value greater than 0'
if input == 0 or input == 1:
return 1
return input * factorial(input - 1)
# smart factorial that doesn't perform the full operation and saves time
def fact_with_div(n, mid, k):
result = 1
if mid > k:
while n != mid:
result *= n
n -= 1
else:
while n != k:
result *= n
n -= 1
return result
# method that performs the combination formula
def combination(n, k):
numerator = fact_with_div(n, n - k, k)
denominator = factorial(k) if k < n - k else factorial(n - k)
return int(numerator / denominator)
# method that performs the permutation formula
def permutation(n, k):
numerator = fact_with_div(n, n - k, 0)
return numerator
def main():
# options for operations and matching code
options = {'Combinations':'(C)','C':'(C)', 'Permutations':'(P)','P':'(P)',
'Ordered with Replacement':'(OR)', 'OR':'(OR)'}
# reading input for operation type until correct
operation_choice = input('Please choose an operation: ')
while operation_choice not in options:
print('That is an invalid option. Please select one of the following options:')
for key in options:
print(f'>> {key}')
operation_choice = input('Please choose an operation: ')
operation = options[operation_choice]
# reading n and k values until correct
n = int(input('Number of elements to pick from (n): '))
k = int(input('Number of spaces to fit to (k): '))
while k > n or k < 0 or n < 0:
print('Both values have to be in the range 0 \u2264 k \u2264 n.' +
f'\nYour values are k = {k} and n = {n}')
n = int(input('Number of elements to pick from: '))
k = int(input('Number of spaces to fit to: '))
# operation choosing
if operation == '(C)':
result = combination(n, k)
elif operation == '(P)':
result = permutation(n, k)
else:
result = n ** k
print(f'Result: {result}')
if __name__ == '__main__':
main() | 4.0625 | 4 |
ProdTracker/product/admin.py | rishthas/ProdTracker | 0 | 12798059 | <reponame>rishthas/ProdTracker
from django.contrib import admin
from .models import Branch,Product,Transfer,Vendor,Model
# Register your models here.
admin.site.register(Branch)
admin.site.register(Product)
admin.site.register(Transfer)
admin.site.register(Vendor)
admin.site.register(Model)
| 1.382813 | 1 |
MascotasPasto/afiliados/migrations/0001_initial.py | FabianBedoya/Frameworks-7a-2020B | 0 | 12798060 | <reponame>FabianBedoya/Frameworks-7a-2020B
# Generated by Django 3.1.1 on 2020-10-30 20:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Paises',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codigo', models.CharField(max_length=20)),
('nombre', models.CharField(max_length=20)),
('abreviatura', models.CharField(max_length=5)),
('estado', models.BooleanField(default=True)),
('fecha_creacion', models.DateTimeField(verbose_name='Date creatio')),
('fecha_modificacion', models.DateTimeField(verbose_name='Date update')),
],
),
migrations.CreateModel(
name='Ciudades',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codigo', models.CharField(max_length=20)),
('nombre', models.CharField(max_length=20)),
('abreviatura', models.CharField(max_length=5)),
('estado', models.BooleanField(default=True)),
('fecha_creacion', models.DateTimeField(verbose_name='Date creatio')),
('fecha_modificacion', models.DateTimeField(verbose_name='Date update')),
('id_pais', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='afiliados.paises')),
],
),
migrations.CreateModel(
name='Afiliados',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombres', models.CharField(max_length=25)),
('apellidos', models.CharField(max_length=25)),
('numero_movil', models.BigIntegerField()),
('direccion', models.CharField(max_length=100)),
('email', models.CharField(max_length=70)),
('estado', models.BooleanField(default=True)),
('fecha_creacion', models.DateTimeField(verbose_name='Date creatio')),
('fecha_modificacion', models.DateTimeField(verbose_name='Date update')),
('id_ciudad', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='afiliados.ciudades')),
],
),
]
| 1.710938 | 2 |
app/__init__.py | csud-reservation/flask-backend | 1 | 12798061 | <reponame>csud-reservation/flask-backend
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from config import config
from flask_debugtoolbar import DebugToolbarExtension
from flask.ext.login import LoginManager, UserMixin, login_user, logout_user, login_required
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
# api_manager = APIManager(flask_sqlalchemy_db=db)
toolbar = DebugToolbarExtension()
lm = LoginManager()
lm.login_view = 'main.login'
def create_app(config_name, models={}):
app = Flask(__name__)
lm.init_app(app)
User = models['User']
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
# initialisation de l'api REST ==> ne fonctionne pas ...
# api_manager.create_api(models['User'], methods=['GET', 'DELETE'])
# api_manager.init_app(app)
# initialisation de la barre de déboguage
# toolbar.init_app(app)
# Blueprint main
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .csudadmin import csudadmin as csudadmin_blueprint
app.register_blueprint(csudadmin_blueprint, url_prefix='/csudadmin')
from .sendmail import sendmail as sendmail_blueprint
app.register_blueprint(sendmail_blueprint, url_prefix='/sendmail')
return app
| 2.25 | 2 |
src/catcher/handlers/smtp.py | gavin-anders/callback-catcher | 2 | 12798062 | <gh_stars>1-10
'''
Created on 15 Sep 2017
@author: gavin
test STARTTLS with openssl s_client -connect 127.0.0.1:25 -starttls smtp
'''
from .basehandler import TcpHandler
from catcher.settings import SSL_KEY, SSL_CERT
import ssl
import os
import base64
class smtp(TcpHandler):
NAME = "SMTP"
DESCRIPTION = '''Another basic mail server. Records LOGIN AUTH and AUTH PLAIN to secrets.'''
CONFIG = {
'hostname': 'catcher.pentestlabs.co.uk',
}
def __init__(self, *args):
'''
Constructor
'''
self.session = True
TcpHandler.__init__(self, *args)
def base_handle(self):
self.send_response('220 {} ESMTP CallbackCatcher service ready\r\n'.format(self.hostname), encoding='utf-8')
while self.session is True:
data = self.handle_request().decode('utf-8')
if data:
line = data.rstrip()
try:
if line.startswith('HELO'):
self.set_fingerprint()
self._HELO(line.replace('HELO', '').strip())
elif line.startswith('EHLO'):
self.set_fingerprint()
self._EHLO(line.replace('EHLO', '').strip())
elif line.startswith('STARTTLS'):
self._STARTTLS()
elif line.startswith('MAIL FROM'):
self._MAIL_FROM()
elif line.startswith('RCPT TO'):
self._RCPT_TO()
elif line.startswith('DATA'):
self._DATA()
elif line.startswith('AUTH PLAIN'):
self._AUTH_PLAIN(line.replace('AUTH PLAIN', '').strip())
elif line.startswith('AUTH LOGIN'):
self._AUTH_LOGIN()
elif line.startswith('QUIT'):
self._QUIT()
except Exception as e:
raise
self.session = False
else:
self.session = False
def _HELO(self, param=""):
resp = '220 Hello {} pleased to meet you\r\n'.format(param)
self.send_response(resp.encode())
def _EHLO(self, param=None):
resp = '250 Hello {}\r\n250 STARTTLS\r\n'.format(param)
self.send_response(resp.encode())
def _STARTTLS(self):
self.send_response(b'220 Ready to start TLS\r\n')
self.request = ssl.wrap_socket(self.request, keyfile=SSL_KEY, certfile=SSL_CERT, server_side=True)
def _MAIL_FROM(self, param=""):
self.send_response(b'250 Ok\r\n')
def _RCPT_TO(self, param=None):
self.send_response(b'250 Ok\r\n')
def _DATA(self):
while True:
data = self.handle_request()
if data.startswith(b'.'):
self.send_response(b'250 Ok\r\n')
break
def _AUTH_PLAIN(self, param=""):
if param == "":
self.send_response(b'334\r\n')
param = self.handle_request()
credsline = base64.b64decode(param)
creds = credsline.split(b"\0")
if len(creds) == 3:
self.add_secret("SMTP Identity", creds[0])
self.add_secret("SMTP Username", creds[1])
self.add_secret("SMTP Password", creds[2])
else:
self.add_secret("SMTP Username", creds[0])
self.add_secret("SMTP Password", creds[1])
self.send_response(b'235 Authentication successful\r\n')
def _AUTH_LOGIN(self):
self.send_response(b'334 VXNlcm5hbWU6\r\n')
username = self.handle_request()
self.add_secret("SMTP Username", base64.b64decode(username.strip()))
self.send_response(b'334 UGFzc3dvcmQ6\r\n')
password = self.handle_request()
self.add_secret("SMTP Password", base64.b64decode(password.strip()))
self.send_response(b'235 Authentication successful\r\n')
def _QUIT(self):
self.send_response(b'221 Bye\r\n')
self.session = False
| 2.328125 | 2 |
biothings-hub/files/nde-hub/utils/pmid_helper.py | NIAID-Data-Ecosystem/nde-crawlers | 0 | 12798063 | <reponame>NIAID-Data-Ecosystem/nde-crawlers<gh_stars>0
# Helper file to batch call pmids to get citations and funding
# Helpful links to documentation of Biopython Package for writing this file
# https://biopython.org/DIST/docs/tutorial/Tutorial.html#sec162
# https://biopython.org/docs/1.76/api/Bio.Entrez.html
# https://www.nlm.nih.gov/bsd/mms/medlineelements.html
# https://dataguide.nlm.nih.gov/eutilities/utilities.html#efetch
import os
import orjson
import yaml
import time
from .date import add_date
from Bio import Entrez
from Bio import Medline
from datetime import datetime
from typing import Optional, List, Iterable, Dict
from itertools import islice
from config import GEO_API_KEY, GEO_EMAIL
import logging
logger = logging.getLogger('nde-logger')
def get_pub_date(date: str):
"""helper method to solve the problem transforming dates such as "2000 Spring" into date().isoformat dates
https://www.nlm.nih.gov/bsd/mms/medlineelements.html#dp
Returns:
An isoformatted date depending on context:
Seasons use metrological start
Winter: December 1
Spring: March 1
Summer: June 1
Fall: September 1
Dates with Y/M/D-D take only the beginning day
Dates with only Y/M or Y/M-M take the first day of that month
Dates with only Y or Y-Y take first day of that year
TODO: Not important since only one instance so far but fix edge case "2016 11-12"
"""
months = ["jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct", "nov", "dec"]
seasons = {"spring": " mar 1", "summer": " jun 1", "fall": " sep 1", "winter": " dec 1"}
s_date = date.lower().split()
date_len = len(s_date)
# if length is 1 can either be year or year-year
if date_len == 1:
return datetime.strptime(s_date[0].split('-')[0], '%Y').date().isoformat()
# if length is 2 can either be year season or year month or year month-month
elif date_len == 2:
if s_date[1][:3] in months:
return datetime.strptime(s_date[0] + ' ' + s_date[1][:3], '%Y %b').date().isoformat()
elif season := seasons.get(s_date[1]):
return datetime.strptime(s_date[0] + season, '%Y %b %d').date().isoformat()
else:
logger.warning("Need to update isoformat transformation: %s", date)
return None
# if length is 3 should be year month day or year month day-day
elif date_len == 3:
return datetime.strptime(s_date[0] + ' ' + s_date[1] + ' ' + s_date[2].split('-')[0], '%Y %b %d').date().isoformat()
# exception case there are quite a few entries with this case "2020 Jan - Feb"
elif date_len == 4:
if s_date[1] in months and s_date[3] in months and s_date[2] == "-":
return datetime.strptime(s_date[0] + ' ' + s_date[1], '%Y %b').date().isoformat()
else:
logger.warning("Need to update isoformat transformation %s", date)
else:
logger.warning("Need to update isoformat transformation: %s", date)
return None
# TODO add retry decorator to this function if getting too many imcompleteread errors
def batch_get_pmid_eutils(pmids: Iterable[str], email: str, api_key: Optional[str] = None) -> Dict:
"""Use pmid to retrieve both citation and funding info in batch
:param pmids: A list of PubMed PMIDs
:param api_key: API Key from NCBI to access E-utilities
:return: A dictionary containing the pmids which hold citations and funding.
"""
# probably dont need this line. Using python package, should work both ways.
# if pmids is str:
# warnings.warn(f"Got str:{pmids} as parameter, expecting an Iterable of str", RuntimeWarning)
# set up Entrez variables. Email is required.
Entrez.email = email
if api_key:
Entrez.api_key = api_key
ct_fd = {}
# api query to parse citations
handle = Entrez.efetch(db="pubmed", id=pmids, rettype="medline", retmode="text")
records = Medline.parse(handle)
# TODO: this can get an incompleteread error need implementation to rerun api query if this happens
records = list(records)
for record in records:
citation = {}
# rename keys
if name := record.get('TI'):
citation['name'] = name
if pmid := record.get('PMID'):
citation['pmid'] = pmid
citation['identifier'] = 'PMID:' + pmid
citation['url'] = 'https://pubmed.ncbi.nlm.nih.gov/' + pmid + '/'
if journal_name := record.get('JT'):
citation['journalName'] = journal_name
if date_published := record.get('DP'):
if date := get_pub_date(date_published):
citation['datePublished'] = date
# make an empty list if there is some kind of author
if record.get('AU') or record.get('CN'):
citation['author'] = []
if authors := record.get('AU'):
for author in authors:
citation['author'].append({'@type': 'Person', 'name': author})
if corp_authors := record.get('CN'):
for corp_author in corp_authors:
citation['author'].append({'@type': 'Organization', 'name': corp_author})
# put citation in dictionary
ct_fd[pmid] = {'citation': citation}
# throttle request rates, NCBI says up to 10 requests per second with API Key, 3/s without.
if api_key:
time.sleep(0.1)
else:
time.sleep(0.35)
# get the funding using xml file because of problems parsing the medline file
# https://www.nlm.nih.gov/bsd/mms/medlineelements.html#gr
handle = Entrez.efetch(db="pubmed", id=pmids, retmode="xml")
# Have to use Entrez.read() instead of Entrez.parse(). The problem is discussed here: https://github.com/biopython/biopython/issues/1027
# TODO: this can get an incompleteread error need implementation to rerun api query if this happens
records = Entrez.read(handle)
records = records['PubmedArticle']
funding = []
for record in records:
if grants := record['MedlineCitation']['Article'].get('GrantList'):
for grant in grants:
fund = {}
if grant_id := grant.get('GrantID'):
fund['identifier'] = grant_id
if agency := grant.get('Agency'):
fund['funder'] = {'@type': 'Organization', 'name': agency}
funding.append(fund)
if pmid := record['MedlineCitation'].get('PMID'):
if funding:
ct_fd[pmid]['funding'] = funding
funding = []
return ct_fd
def load_pmid_ctfd(data_folder):
""" Takes 1000 documents at a time and batch queries all of the pmids in the documents to improve runtime.
Loads the citation and funding into the documents along with uploading the date field.
Returns: A generator with the completed documents
"""
# a temporary solution to make bigger batch api call instead of multiple smaller calls in crawler to improve runtime
# TODO: figure out how to make a batch api call in crawler perferrably
api_key = GEO_API_KEY
email = GEO_EMAIL
# if no access to config file comment out above and enter your own email
# email = <EMAIL>
with open(os.path.join(data_folder, 'data.ndjson'), 'rb') as f:
while True:
# pmid list for batch query
pmid_list = []
# docs to yield for each batch query
doc_list = []
# to make batch api query take the next 1000 docs and collect all the pmids
next_n_lines = list(islice(f, 1000))
if not next_n_lines:
break
for line in next_n_lines:
doc = orjson.loads(line)
doc_list.append(doc)
if pmids := doc.get('pmids'):
pmid_list += [pmid.strip() for pmid in pmids.split(',')]
# check if there are any pmids before requesting
if pmid_list:
# batch request
eutils_info = batch_get_pmid_eutils(pmid_list, email, api_key)
# throttle request rates, NCBI says up to 10 requests per second with API Key, 3/s without.
if api_key:
time.sleep(0.1)
else:
time.sleep(0.35)
# add in the citation and funding to each doc in doc_list and yield
for rec in doc_list:
if pmids := rec.pop('pmids', None):
pmids = [pmid.strip() for pmid in pmids.split(',')]
# fixes issue where pmid numbers under 10 is read as 04 instead of 4
pmids = [ele.lstrip('0') for ele in pmids]
for pmid in pmids:
if not eutils_info.get(pmid):
logger.info('There is an issue with this pmid. PMID: %s, rec_id: %s', pmid, rec['_id'])
# this fixes the error where there is no pmid
# https://www.ncbi.nlm.nih.gov/geo/query/acc.cgi?acc=GSE41964
if eutils_info.get(pmid):
if citation := eutils_info[pmid].get('citation'):
if rec.get('citation'):
rec['citation'].append(citation)
else:
rec['citation'] = [citation]
if funding := eutils_info[pmid].get('funding'):
if rec.get('funding'):
rec['funding'] += funding
else:
rec['funding'] = funding
# add the date tranformation before yielding
rec = add_date(rec)
yield rec
| 2.515625 | 3 |
vimfiles/bundle/vim-python/submodules/pylint/tests/functional/n/non/non_ascii_name.py | ciskoinch8/vimrc | 463 | 12798064 | """ Tests for non-ascii-name checker. """
áéíóú = 4444 # [non-ascii-name]
def úóíéá(): # [non-ascii-name]
"""yo"""
| 1.78125 | 2 |
octoprint_octopod/layer_notifications.py | mnebelung/OctoPrint-OctoPod | 52 | 12798065 | <gh_stars>10-100
from .alerts import Alerts
from .base_notification import BaseNotification
class LayerNotifications(BaseNotification):
def __init__(self, logger, ifttt_alerts):
BaseNotification.__init__(self, logger)
self._ifttt_alerts = ifttt_alerts
self._alerts = Alerts(self._logger)
self.reset_layers()
def get_layers(self):
""" Returns list of layers for which notifications will be sent """
return self._layers
def reset_layers(self):
""" Reset list of layers for which notifications will be sent """
self._layers = [] # Variable used for tracking layer numbers to notify. Values are strings
def add_layer(self, layer):
""" Add a new layer to the list of layers for which notifications will be sent """
self._layers.append(layer)
def remove_layer(self, layer):
""" Remove layer from list of layers for which notifications will be sent """
self._layers.remove(layer)
def layer_changed(self, settings, current_layer):
first_layers = settings.get_int(['notify_first_X_layers'])
if current_layer in self._layers:
# User specified they wanted to get a notification when print started printing at this layer
self.__send__layer_notification(settings, current_layer)
elif first_layers > 0 and 1 < int(current_layer) <= first_layers + 1:
# Send a picture for first X layers (only send once layer was printed)
self.__send__layer_notification(settings, current_layer)
def __send__layer_notification(self, settings, current_layer):
# Send IFTTT Notifications
self._ifttt_alerts.fire_event(settings, "layer-changed", current_layer)
server_url = settings.get(["server_url"])
if not server_url or not server_url.strip():
# No APNS server has been defined so do nothing
return -1
tokens = settings.get(["tokens"])
if len(tokens) == 0:
# No iOS devices were registered so skip notification
return -2
# Get a snapshot of the camera
image = None
try:
hflip = settings.get(["webcam_flipH"])
vflip = settings.get(["webcam_flipV"])
rotate = settings.get(["webcam_rotate90"])
camera_url = settings.get(["camera_snapshot_url"])
if camera_url and camera_url.strip():
image = self.image(camera_url, hflip, vflip, rotate)
except:
self._logger.info("Could not load image from url")
# For each registered token we will send a push notification
# We do it individually since 'printerID' is included so that
# iOS app can properly render local notification with
# proper printer name
used_tokens = []
last_result = None
for token in tokens:
apns_token = token["apnsToken"]
printerID = token["printerID"]
# Ignore tokens that already received the notification
# This is the case when the same OctoPrint instance is added twice
# on the iOS app. Usually one for local address and one for public address
if apns_token in used_tokens:
continue
# Keep track of tokens that received a notification
used_tokens.append(apns_token)
if 'printerName' in token and token["printerName"] is not None:
# We can send non-silent notifications (the new way) so notifications are rendered even if user
# killed the app
printer_name = token["printerName"]
language_code = token["languageCode"]
url = server_url + '/v1/push_printer'
last_result = self._alerts.send_alert_code(settings, language_code, apns_token, url, printer_name,
"layer_changed", None, image, current_layer)
return last_result
| 2.890625 | 3 |
meiduo_mall/meiduo_mall/apps/users/serializers.py | Goldx4/meiduo-demo | 0 | 12798066 | from rest_framework import serializers, status
from django_redis import get_redis_connection
from rest_framework_jwt.settings import api_settings
import logging
import re
from .models import User
from .utils import get_user_by_account
from celery_tasks.email.tasks import send_verify_email
logger = logging.getLogger('django')
class CreateUserSerializer(serializers.ModelSerializer):
"""
创建用户序列化器
"""
password2 = serializers.CharField(label='确认密码', required=True, allow_null=False, allow_blank=False, write_only=True)
sms_code = serializers.CharField(label='短信验证码', required=True, allow_null=False, allow_blank=False, write_only=True)
allow = serializers.CharField(label='同意协议', required=True, allow_null=False, allow_blank=False, write_only=True)
token = serializers.CharField(label='登录状态token', read_only=True) # 增加token字段
def validate_mobile(self, value):
"""验证手机号"""
if not re.match(r'^1[3-9]\d{9}$', value):
raise serializers.ValidationError('手机号格式错误')
return value
def validate_allow(self, value):
"""检验用户是否同意协议"""
if value != 'true':
raise serializers.ValidationError('请同意用户协议')
return value
def validate(self, attrs):
# 判断两次密码
if attrs['password'] != attrs['password2']:
raise serializers.ValidationError('两次密码不一致')
# 判断短信验证码
redis_conn = get_redis_connection('verify_codes')
mobile = attrs['mobile']
real_sms_code = redis_conn.get('sms_%s' % mobile)
if real_sms_code is None:
raise serializers.ValidationError('无效的短信验证码')
if attrs['sms_code'] != real_sms_code.decode():
raise serializers.ValidationError('短信验证码错误')
return attrs
def create(self, validated_data):
"""
创建用户
"""
# 移除数据库模型中不存在的属性
del validated_data['password2']
del validated_data['sms_code']
del validated_data['allow']
user = super().create(validated_data)
# 调用django的认证系统加密密码
user.set_password(validated_data['password'])
user.save()
# 补充生成记录登录状态的token
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
user.token = token
return user
class Meta:
model = User
# 此序列化器用于传入和输出,所以得包含所有要用到的字段
fields = ('id', 'username', 'password', '<PASSWORD>', 'sms_code', 'mobile', 'allow', 'token')
extra_kwargs = {
'id': {'read_only': True}, # read_only为True,指明只有输出时才会用到
'username': {
'min_length': 5,
'max_length': 20,
'error_messages': {
'min_length': '仅允许5-20个字符的用户名',
'max_length': '仅允许5-20个字符的用户名',
}
},
'password': {
'write_only': True,
'min_length': 8,
'max_length': 20,
'error_messages': {
'min_length': '仅允许8-20个字符的密码',
'max_length': '仅允许8-20个字符的密码',
}
}
}
class CheckSMSCodeSerializer(serializers.Serializer):
"""
检查sms code
"""
sms_code = serializers.CharField(min_length=6, max_length=6)
def validate_sms_code(self, value):
account = self.context['view'].kwargs['account']
# 获取user
user = get_user_by_account(account)
if user is None:
raise serializers.ValidationError('用户不存在')
self.user = user
# 从redis中取出真实的验证码
redis_conn = get_redis_connection('verify_codes')
real_sms_code = redis_conn.get('sms_%s' % user.mobile)
if real_sms_code is None:
return serializers.ValidationError('无效的短信验证码')
if value != real_sms_code.decode():
raise serializers.ValidationError('短信验证码错误')
return value
class ResetPasswordSerializer(serializers.ModelSerializer):
password2 = serializers.CharField(label='确认密码', write_only=True)
access_token = serializers.CharField(label='操作token', write_only=True)
class Meta:
model = User
fields = ('id', 'password', 'password2', 'access_token')
extra_kwargs = {
'password': {
'write_only': True,
'min_length': 8,
'max_length': 20,
'error_messages': {
'min_length': '仅允许8-20个字符的密码',
'max_length': '仅允许8-20个字符的密码',
}
}
}
def validate(self, attrs):
"""
校验数据
"""
if attrs['password'] != attrs['password2']:
raise serializers.ValidationError('两次密码不一致')
allow = User.check_set_password_token(self.context['view'].kwargs['pk'], attrs['access_token'])
if not allow:
raise serializers.ValidationError('无效的access token')
return attrs
def update(self, instance, validated_data):
"""
更新密码
"""
instance.set_password(validated_data['password'])
instance.save()
return instance
class UserDetailSerializer(serializers.ModelSerializer):
"""
用户详细信息序列化器
"""
class Meta:
model = User
fields = ['id', 'username', 'mobile', 'email', 'email_active']
class EmailSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'email')
extra_kwargs = {
'email': {
'required': True
}
}
def update(self, instance, validated_data):
email = validated_data['email']
instance.email = email
instance.save()
# 生成验证链接
verify_url = instance.generate_verify_email_url()
# 发送验证邮件
send_verify_email.delay(email, verify_url)
return instance
| 2.109375 | 2 |
main.py | yuxuibbs/Academic-Games-Player | 4 | 12798067 | import os
from flask import Flask
from MrLing import ling_blueprint
from MrBasicWff import wff_blueprint
app = Flask(__name__)
app.config['SECRET_KEY'] = 'random string'
app.debug = True
app.register_blueprint(ling_blueprint)
app.register_blueprint(wff_blueprint)
@app.route("/")
def home():
return '<p><a href="/MrLing">Ling</a></p><p><a href="/MrWff">Wff</a></p>'
if __name__ == '__main__':
app.run(host = '0.0.0.0', port = int(os.getenv('PORT', 5000)))
| 2.078125 | 2 |
tests/regressions/python/596_list_arrays.py | frzfrsfra4/phylanx | 83 | 12798068 | # Copyright (c) 2018 <NAME>
#
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from phylanx import Phylanx
import numpy as np
@Phylanx
def foo(x, y):
return [x, y]
x = np.array([1, 2])
y = np.array([3, 4])
result = foo(x, y)
assert((result[0] == x).all() and (result[1] == y).all())
| 2.203125 | 2 |
SurveyBackend.py | BlkPingu/SurveyBackend | 0 | 12798069 | <reponame>BlkPingu/SurveyBackend
from flask import Flask, request, redirect, url_for, Response, jsonify
from flask_cors import CORS
from flask_sqlalchemy import SQLAlchemy
import jwt
import datetime
from flask_cors import CORS,cross_origin
import os
import json
from werkzeug.utils import secure_filename
from werkzeug.datastructures import FileStorage
app = Flask(__name__)
if app.config['ENV'] == 'production':
app.config['DEBUG'] = False
app.config['SECRET_KEY'] = '<KEY>'
app.config['SOUNDFILE_UPLOAD'] = '/srv/data/soundfiles'
app.config['METADATA_UPLOAD'] = '/srv/data/database'
else:
app.config['DEBUG'] = True
app.config['SECRET_KEY'] = 'secret'
app.config['SOUNDFILE_UPLOAD'] = '/Users/Tobias/Desktop/Bachelorarbeit/Code/SurveyPage/data/soundfiles'
app.config['METADATA_UPLOAD'] = '/Users/Tobias/Desktop/Bachelorarbeit/Code/SurveyPage/data/database'
print(f'ENV is set to: {app.config["ENV"]}')
CORS(app, supports_credentials=True)
meta_keys = ['gender', 'age', 'nativeLanguage', 'dateTime', 'sessionID']
def encode_auth_token(payload):
signed_token = {
'exp': datetime.datetime.utcnow() + datetime.timedelta(hours=3),
'iat': datetime.datetime.utcnow(),
'sessionID':payload['sessionID']
}
try:
return jwt.encode(
signed_token,
app.config.get('SECRET_KEY'),
algorithm='HS256'
)
except Exception as e:
return e
def validate_json_payload(meta_keys, metadata):
for key in meta_keys:
if key not in metadata:
return False
return True
def save_meta(metadata):
directory = os.path.join(app.config['METADATA_UPLOAD'])
session_id = metadata['sessionID']
if not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
metadata = {
'uuid': metadata['sessionID'],
'age_range': metadata['age'],
'request': metadata['nativeLanguage'],
'gender': metadata['gender']
}
file_path = os.path.join(directory, secure_filename(session_id + '.json'))
with open(file_path, 'w') as fp:
json.dump(metadata, fp)
def get_token(bearer_token):
PREFIX = 'Bearer '
if not bearer_token.startswith(PREFIX):
return None
return bearer_token[len(PREFIX):]
@app.route('/xyz', methods=['GET'])
def generic():
return {'msg':'hello i am flask'}, 200
@app.route('/meta', methods=['PUT'])
def meta():
metadata = request.json
if request.method == 'PUT' and validate_json_payload(metadata, meta_keys):
save_meta(metadata)
token = encode_auth_token(metadata).decode()
return {'token':token}, 200
else:
return {'msg': 'Missing keys or wrong request method'}, 403
def save_wav(directory, filename, file):
if not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
file_path = os.path.join(directory, secure_filename(filename + '.wav'))
file.save(file_path)
def validate_token(request):
try:
bearer_token = request.headers['Authorization']
token_string = get_token(bearer_token)
token_bytes = token_string.encode()
payload = jwt.decode(token_bytes, app.config['SECRET_KEY'])
return payload
except:
print('token validation went wrong')
return None
@app.route('/audio', methods=['POST'])
def soundfile():
token_data = validate_token(request)
file = request.files['audio']
if request.method == 'POST' and token_data is not None and file is not None:
filename = request.form['filename']
foldername = request.form['foldername']
directory = os.path.join(app.config.get('SOUNDFILE_UPLOAD'),foldername)
save_wav(directory, filename, file)
resp = Response('Soundfile submit worked')
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
else:
return {'msg':'Wrong request method or bad token'}, 403
| 2.046875 | 2 |
tests/test_commands.py | smartpr/django-future | 0 | 12798070 | from datetime import timedelta
from django.core.management import call_command
from django.test import TestCase
from django.utils import timezone
from django_future.jobs import schedule_job
from django_future.models import ScheduledJob
class RunScheduledJobsCommandTest(TestCase):
def setUp(self):
self.schedule_at = timezone.now() - timedelta(days=1)
self.jobs = [
schedule_job(self.schedule_at, 'math.pow', args=(2, 3)),
schedule_job(self.schedule_at, 'math.pow', args=(5, 2))
]
def test_cmd_noargs(self):
"""
Test invocation of command with no arguments. Ensure the scheduled jobs
are marked as completed.
"""
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_SCHEDULED).count()
)
call_command('runscheduledjobs')
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_COMPLETE).count()
)
def test_cmd_delete_completed(self):
"""
Test invocation of command with '-d' argument to delete completed jobs.
Ensure the scheduled jobs are removed after.
"""
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_SCHEDULED).count()
)
call_command('runscheduledjobs', '-d')
self.assertEqual(0, ScheduledJob.objects.count())
def test_cmd_ignore_errors(self):
"""
Test invocation of command with '-i' argument to keep processing jobs
even if a job fails. Ensure the non-failing jobs are marked as
completed and the error job is marked as failed.
"""
schedule_at = self.schedule_at - timedelta(days=1)
error_job = schedule_job(schedule_at, 'math.funky_error')
self.assertEqual(
3,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_SCHEDULED).count()
)
call_command('runscheduledjobs', '-i')
error_job.refresh_from_db()
self.assertEqual(error_job.status, ScheduledJob.STATUS_FAILED)
self.assertEqual(
2,
ScheduledJob.objects.filter(
status=ScheduledJob.STATUS_COMPLETE).count()
)
| 2.515625 | 3 |
train/train_image_classifier.py | open-climate-tech/firecam | 9 | 12798071 | # Copyright 2020 Open Climate Tech Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Training code using Keras for TF2
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
from firecam.lib import settings
from firecam.lib import collect_args
from firecam.lib import goog_helper
from firecam.lib import tf_helper
import glob
import tensorflow as tf
from tensorflow import keras
import logging
import datetime
def _parse_function(example_proto):
"""
Function for converting TFRecordDataset to uncompressed image pixels + labels
:return:
"""
feature_description = {
'image/class/label': tf.io.FixedLenFeature([], tf.int64, default_value=0),
'image/encoded': tf.io.FixedLenFeature([], tf.string, default_value=''),
'image/format': tf.io.FixedLenFeature([], tf.string, default_value=''),
'image/height': tf.io.FixedLenFeature([], tf.int64, default_value=0),
'image/width': tf.io.FixedLenFeature([], tf.int64, default_value=0),
}
# Parse the input `tf.Example` proto using the dictionary above.
example = tf.io.parse_single_example(example_proto, feature_description)
image = tf.image.decode_jpeg(example['image/encoded'], channels=3, dct_method='INTEGER_ACCURATE')
#Resizing images in training set because they are apprently rectangular much fo the time
if example['image/height'] != 299 or example['image/width'] != 299:
image = tf.image.resize(tf.reshape(image, [example['image/height'], example['image/width'], 3]), [299, 299])
image = tf.cast(image, tf.uint8)
image = tf.reshape(image, [299, 299, 3]) #weird workaround because decode image doesnt get shape
label = tf.one_hot(example['image/class/label'], depth=2)
image = (tf.cast(image, tf.float32) - 128) / 128.0
return [image, label]
class LRTensorBoard(keras.callbacks.TensorBoard):
def __init__(self, log_dir, **kwargs): # add other arguments to __init__ if you need
super().__init__(log_dir=log_dir, **kwargs)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs.update({'lr': keras.backend.eval(self.model.optimizer.lr)})
super().on_epoch_end(epoch, logs)
def main():
reqArgs = [
["i", "inputDir", "directory containing TFRecord files"],
["o", "outputDir", "directory to write out checkpoints and tensorboard logs"],
["a", "algorithm", "adam, nadam, or rmsprop"],
]
optArgs = [
["m", "maxEpochs", "(optional) max number of epochs (default 1000)", int],
["r", "resumeModel", "resume training from given saved model"],
["s", "startEpoch", "epoch to resume from (epoch from resumeModel)"],
["t", "stepsPerEpoch", "(optional) number of steps per epoch", int],
["v", "valStepsPerEpoch", "(optional) number of validation steps per epoch", int],
]
args = collect_args.collectArgs(reqArgs, optionalArgs=optArgs, parentParsers=[goog_helper.getParentParser()])
batch_size = 64
max_epochs = args.maxEpochs if args.maxEpochs else 1000
steps_per_epoch = args.stepsPerEpoch if args.stepsPerEpoch else 2000
overshoot_epochs = 30 #number of epochs over which validation loss hasnt decreased to stop training at
val_steps = args.valStepsPerEpoch if args.valStepsPerEpoch else 200
#val_steps only needed for now because of a bug in tf2.0, which should be fixed in next version
#TODO: either set this to # of validation examples /batch size (i.e. figure out num validation examples)
#or upgrade to TF2.1 when its ready and automatically go thorugh the whole set
train_filenames = glob.glob(os.path.join(args.inputDir, 'firecam_train_*.tfrecord'))
val_filenames = glob.glob(os.path.join(args.inputDir, 'firecam_validation_*.tfrecord'))
logging.warning('Found %d training files, and %d validation files', len(train_filenames), len(val_filenames))
if (len(train_filenames) == 0) or (len(val_filenames) == 0):
logging.error('Could not find data in %s', args.inputDir)
exit(1)
raw_dataset_train = tf.data.TFRecordDataset(train_filenames)
raw_dataset_val = tf.data.TFRecordDataset(val_filenames)
dataset_train = raw_dataset_train.map(_parse_function).repeat(max_epochs * steps_per_epoch).shuffle(batch_size * 5).batch(batch_size)
dataset_val = raw_dataset_val.map(_parse_function).repeat().batch(batch_size)
if args.resumeModel:
inception = tf_helper.loadModel(args.resumeModel)
assert int(args.startEpoch) > 0
initial_epoch = int(args.startEpoch)
else:
inception = keras.applications.inception_v3.InceptionV3(weights=None, include_top=True, input_tensor=None,
classes=2)
initial_epoch = 0
if args.algorithm == "adam":
# optimizer = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, amsgrad=False)
optimizer = tf.keras.optimizers.Adam(decay=1e-06, amsgrad=True)
elif args.algorithm == "nadam":
optimizer = tf.keras.optimizers.Nadam()
elif args.algorithm == "rmsprop":
optimizer = tf.keras.optimizers.RMSprop(decay=1e-06)
else:
logging.error('Unsupported algo %s', args.algorithm)
exit(1)
inception.compile(optimizer=optimizer, loss=tf.keras.losses.BinaryCrossentropy(), metrics=['accuracy'])
logdir = os.path.join(args.outputDir, datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
callbacks = [keras.callbacks.EarlyStopping(monitor='val_loss', patience=overshoot_epochs),
keras.callbacks.ModelCheckpoint(filepath=os.path.join(args.outputDir, 'model_{epoch}'),
monitor='val_loss', save_best_only=True),
LRTensorBoard(log_dir=logdir)]
logging.warning('Start training')
inception.fit(dataset_train, validation_data=dataset_val,
epochs=max_epochs, initial_epoch=initial_epoch,
steps_per_epoch=steps_per_epoch, validation_steps=val_steps,
callbacks=callbacks)
logging.warning('Done training')
if __name__ == "__main__":
main()
| 1.96875 | 2 |
docs/conf.py | arthurazs/dotapatch | 12 | 12798072 | <filename>docs/conf.py
#!/usr/bin/env python3
# coding: utf-8
# dotapatch documentation build configuration file, created by
# sphinx-quickstart on Thu Jan 4 11:19:55 2018.
from os.path import abspath
from sys import path
import sphinx_rtd_theme
path.insert(0, abspath('..'))
needs_sphinx = '1.6.5'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'dotapatch'
copyright = '2016, <NAME>'
author = '<NAME>'
version = '2.4'
release = '2.4.4'
pygments_style = 'sphinx'
intersphinx_mapping = {'https://docs.python.org/3': None}
htmlhelp_basename = 'dotapatchdoc'
latex_documents = [
(master_doc, 'dotapatch.tex', 'dotapatch Documentation',
'<NAME>', 'manual'),
]
man_pages = [
(master_doc, 'dotapatch', 'dotapatch Documentation',
[author], 1)
]
exclude_patterns = []
language = None
gettext_compact = False
texinfo_documents = [
(master_doc, 'dotapatch', 'dotapatch Documentation',
author, 'dotapatch', 'Parses Dota 2 text patches to html format.',
'Miscellaneous'),
]
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'logo_only': True,
'display_version': False
}
| 1.601563 | 2 |
home/migrations/0008_delete_addpost.py | sandipsandal/Just-A-Thought | 0 | 12798073 | # Generated by Django 3.0.9 on 2020-08-23 12:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0007_auto_20200823_1340'),
]
operations = [
migrations.DeleteModel(
name='addPost',
),
]
| 1.335938 | 1 |
lists_dictionary/Bread Factory.py | vasetousa/Python-fundamentals | 0 | 12798074 | events = input().split("|")
energy = 100
coins = 100
for el in range(len(events)):
string = events[el]
order, value = string.split("-")
value = int(value)
if order == "rest":
energy += value
if energy > 100:
energy = 100
difference = 100 - energy
print(f"You gained {difference} energy.")
print(f"Current energy: {energy}.")
else:
print(f"You gained {value} energy.")
print(f"Current energy: {energy}.")
elif order == "order":
if energy >= 30:
energy -= 30
coins += value
print(f"You earned {value} coins.")
else:
energy += 50
print(f"You had to rest!")
else:
if coins > 0:
coins -= value
if coins > 0:
print(f"You bought {order}.")
else:
print(f"Closed! Cannot afford {order}.")
break
if coins > 0:
print(f"""Day completed!
Coins: {coins}
Energy: {energy}""")
# rest-2|order-10|eggs-100|rest-10 | 3.71875 | 4 |
run.py | John-5014/Password-locker | 0 | 12798075 | #!/usr/bin/env python3.6
import secrets
import string
import pyperclip
from user import User, Credentials
def create_user(fname, lname, password):
'''
Function to create a new user account
'''
new_user = User(fname, lname, password)
return new_user
def save_user(user):
'''
function to save user
'''
User.save_user(user)
def verify_user(first_name, password):
'''
function that veifies the acount
'''
checking_user = Credentials.check_user(first_name, password)
return checking_user
def generate_password():
'''
function that generates passwords automatically
'''
N = 12
gen_password = ''.join(secrets.choice(
string.ascii_lowercase + string.digits)for i in range(N))
return gen_password
def create_credential(user_name,site_name,account_name,password):
'''
function that creates new credential
'''
new_credential = Credentials(user_name,site_name,account_name,password)
return new_credential
def save_credential(credential):
'''
function to save a credential
'''
Credentials.save_credentials(credential)
def display_credential(user_name):
'''
Function to display credentials saved by a user
'''
return Credentials.display_credentials(user_name)
def copy_credentials(site_name):
'''
Function to copy a credentials details to the clipboard
'''
return Credentials.copy_credentials(site_name)
def delete_credential(credential):
'''
function that deletes credential account
'''
credential.delete_credential()
def find_by_site_name(site_name):
'''
function that finds a credential by name
'''
return Credentials.find_by_site_name(site_name)
def main():
print(' ')
print('Hi! Welcome to Password-locker.')
while True:
print(' ')
print("-"*30)
print(
'Use the guide codes to choose: \n ca-Create an Account \n li-Log In \n ex-Exit')
short_code = input('Enter a choice: ').lower().strip()
if short_code == 'ex':
break
elif short_code == 'ca':
print("-"*30)
print(' ')
print('To create a new account:')
first_name = input('Enter your first name- \n').strip()
last_name = input('Enter your last name- \n').strip()
password = input('Enter your password \n').strip()
save_user(create_user(first_name, last_name, password))
print(" ")
print(
f'New Account Created for: {first_name} {last_name} with password: {password}')
elif short_code == 'li':
print("-"*30)
print(' ')
print('To login,enter your account details: ')
user_name = input('Enter your first name- ').strip()
password = str(input('Enter your password - '))
user_exists = verify_user(user_name, password)
if user_exists == user_name:
print(
f'Welcome {user_name}.Please choose an option to continue.')
print(' ')
while True:
print("-"*30)
print('Navigation codes: \n cc-Create a Credential \n dc-Display Credentials \n dl-Delete \n copy-Copy Password \n ex-Exit')
short_code = input('Enter a choice: ').lower().strip()
print("-"*30)
if short_code == 'ex':
print(" ")
print(f'Goodbye {user_name}')
break
elif short_code == 'cc':
print(' ')
print('Enter your credential details:')
site_name = input('Enter the site\'s name-').strip()
account_name = input('Enter your account\'s name-').strip()
while True:
print(' ')
print("-"*30)
print('Please choose an option for entering a password: \n ep-enter existing password \n gp-generate a password \n ex-exit')
psw_choice = input('Enter an option: ').lower().strip()
print("-"*30)
if psw_choice == 'ep':
print(" ")
password = input('Enter your password: ').strip()
break
elif psw_choice == 'gp':
password = generate_password()
break
elif psw_choice == 'ex':
break
else:
print('Wrong option entered. Retry.')
save_credential(create_credential(user_name,site_name,account_name,password))
print(' ')
print(
f'Credential created: Site Name:{site_name} - Account Name: {account_name} - Password:{password}')
print(' ')
elif short_code == 'dc':
print(' ')
if display_credential(user_name):
print('Here is a list of all your credentials')
print(' ')
for credential in display_credential(user_name):
print(f'Site Name: {credential.site_name} - Account Name: {credential.account_name} - Password{<PASSWORD>}')
print(' ')
elif short_code == 'copy':
print(' ')
chosen_site = input('Enter the site name for the credential password to copy: ')
copy_credentials(chosen_site)
print('')
elif short_code == 'dl':
print('Enter the name of site to be deleted')
delete_account = input()
if find_by_site_name(delete_account):
delete_credential(find_by_site_name(delete_account))
else:
print('Sorry account not matched')
else:
print('Wrong option entered. Retry')
else:
print("-"*30)
print(' ')
print('Sorry wrong option enetered. Retry')
if __name__ == '__main__':
unittest: main()
| 3.703125 | 4 |
pygreppy/core.py | skvoter/pygreppy | 4 | 12798076 | <reponame>skvoter/pygreppy<filename>pygreppy/core.py
from pygments.lexers.python import PythonLexer
from pygments import highlight
from pygments.formatters.terminal import TerminalFormatter
import shutil
import re
import ast
import sys
import os
from . import codegen
def usage():
print('''
usage: pytgrep [-c <depth> | -cl | -f | -re] (optional) pattern file
file should be python script formatted with pep8 guidelines
optional arguments:
-h show this page
-c [depth] show context of the string.
-cl show class containing string (ignored if no class)
-f show function containing string (ignored if no function)
-re pattern is regexp
Note: only one option (except -re) can be specified at a time.
''')
def get_numlines(node):
return len(codegen.to_source(node).splitlines())
def mhighlight(num, string, pattern, regexp):
if pattern in string or (regexp is True and re.search(pattern, string)):
pass
else:
pattern = None
if not pattern:
return ('\033[1;90m{:0>2}\033[0;0m {}\n'.format(
num,
highlight(
string,
PythonLexer(),
TerminalFormatter()).strip('\n'),
))
else:
if regexp is False:
resstring = '\033[1;90m{:0>2}\033[0;0m '.format(num)
splits = string.split(pattern)
for split in splits:
resstring += highlight(
split, PythonLexer(), TerminalFormatter()
).strip('\n')
if split != splits[-1]:
resstring += '\033[1;91m{}\033[0;0m'.format(
pattern.strip('\n'))
return resstring + '\n'
else:
resstring = '\033[1;90m{:0>2}\033[0;0m '.format(num)
patt = re.compile(pattern)
splits = patt.split(string)
found = patt.findall(string)
for i in range(len(found)):
resstring += highlight(
splits[i], PythonLexer(), TerminalFormatter()
).strip('\n')
resstring += '\033[1;91m{}\033[0;0m'.format(
found[i].strip('\n'))
resstring += highlight(
splits[-1], PythonLexer(), TerminalFormatter()
).strip('\n')
return resstring + '\n'
class Args:
def __init__(self, args):
self.context = False
self.cl = False
self.func = False
self.depth = 0
self.path = None
self.pattern = None
self.regexp = False
self.args = self.parseargs(args)
def parseargs(self, args):
if len(args) == 0:
return 1
for arg in args:
arg = args[0]
if arg == '-re':
self.regexp = True
args.remove(arg)
if arg == '-cl':
self.cl = True
args.remove(arg)
elif arg == '-h':
return 1
elif arg == '-c':
self.context = True
if arg != args[-1] and args[args.index(arg)+1].isdigit():
self.depth = int(args[args.index(arg)+1])
args.remove(args[args.index(arg)+1])
else:
self.depth = 1
args.remove(arg)
elif arg == '-f':
self.func = True
args.remove(arg)
if not os.path.exists(args[-1]):
print('Error: no file {}'.format(args[-1]))
return 1
elif not args[-1].endswith('.py'):
with open(args[-1]) as f:
line = f.readline(0)
if '#!' not in line and 'python' not in line:
print('Error: {} is not a python script'.format(args[-1]))
return 1
self.path = args[-1]
args.remove(args[-1])
if len(args) != 0:
self.pattern = args[-1]
args.remove(args[-1])
else:
print('Error: there is no search pattern')
return 1
if len(args) != 0:
print(args)
for arg in args:
print('{} is not recognized option'.format(arg))
return 1
if len(
[arg for arg in [self.cl, self.func, self.context] if arg is True]
) > 1:
print('Error: Only one of -cl, -c, -f can be used at a time')
return 1
return 0
def find_match_node(results, num, root, args):
for node in ast.walk(root):
for child in ast.iter_child_nodes(node):
if args.regexp:
pattern = re.compile(args.pattern)
if pattern.search(codegen.to_source(child)) and \
hasattr(child, 'lineno') and \
child.lineno == num:
return child
else:
if args.pattern in codegen.to_source(child) and \
hasattr(child, 'lineno') and \
child.lineno == num:
return child
def get_end(node):
ints = []
ints.append(node.lineno)
for child in ast.walk(node):
for ch in ast.iter_child_nodes(child):
if hasattr(ch, 'lineno'):
ints.append(ch.lineno)
return max(ints)
def class_parse(args):
if args.cl:
objsearch = 'ClassDef'
elif args.func:
objsearch = 'FunctionDef'
with open(args.path) as f:
content = f.read()
results = []
added_lines = []
root = ast.parse(content)
for node in ast.walk(root):
for child in ast.iter_child_nodes(node):
child.parent = node
# search for pattern
for num, line in enumerate(content.splitlines(), 1):
if (args.pattern in line or (
args.regexp and re.search(args.pattern, line)
)) and (num, line) not in added_lines:
pattern_node = find_match_node(results, num, root, args)
if pattern_node is None:
continue
else:
while objsearch not in str(pattern_node):
if pattern_node.parent is root:
break
pattern_node = pattern_node.parent
curres = []
if objsearch in str(pattern_node):
first = pattern_node.lineno
end = get_end(pattern_node)
curres += [
mhighlight(
num,
line,
args.pattern,
args.regexp
) for num, line in
enumerate(content.splitlines()[first-1:end], first)
]
added_lines += [
(num, line) for num, line in enumerate(
content.splitlines()[first-1:end], first
)]
results.append(''.join(curres))
return results
def context_parse(args):
with open(args.path) as f:
content = f.read()
results = []
added_lines = []
root = ast.parse(content)
for node in ast.walk(root):
for child in ast.iter_child_nodes(node):
child.parent = node
# search for pattern
for num, line in enumerate(content.splitlines(), 1):
if (args.pattern in line or (
args.regexp and re.search(args.pattern, line)
)) and (num, line) not in added_lines:
pattern_node = find_match_node(results, num, root, args)
if pattern_node is None:
continue
top_root = False
if pattern_node.parent is root:
top_root = True
else:
for i in range(args.depth):
pattern_node = pattern_node.parent
if pattern_node.parent is root:
top_root = True
break
first = pattern_node.lineno
end = get_end(pattern_node)
curres = []
if top_root is True:
if pattern_node is not root.body[0]:
top = root.body[root.body.index(pattern_node)-1]
first_top = top.lineno
end_top = get_end(top)
if end_top - first_top < 3:
curres += [
mhighlight(
num,
line,
args.pattern,
args.regexp
) for num, line in enumerate(
content.splitlines()[first_top-1:end_top],
first_top
)]
added_lines += [
(num, line) for num, line in enumerate(
content.splitlines()[first_top-1:end_top],
first_top
)]
first = end_top+1
else:
curres += [('\033[1;90m{:0>2}'
+ ' +--{} lines: {}---\033[0;0m\n').format(
first_top,
end_top - first_top,
content.splitlines()[first_top-1]
)]
first = end_top+1
curres += [
mhighlight(
num,
line,
args.pattern,
args.regexp
) for num, line in
enumerate(content.splitlines()[first-1:end], first)
]
added_lines += [
(num, line) for num, line in enumerate(
content.splitlines()[first-1:end], first
)]
if pattern_node is not root.body[-1]:
bottom = root.body[root.body.index(pattern_node)+1]
first_bottom = bottom.lineno
if first_bottom - end > 1:
added_lines += content.splitlines()[
end:first_bottom]
end_bottom = get_end(bottom)
if end_bottom - first_bottom < 3:
curres += [
mhighlight(
num,
line,
args.pattern,
args.regexp
) for num, line in enumerate(
content.splitlines()[
first_bottom-1:end_bottom],
first_bottom
)]
added_lines += [
(num, line) for num, line in enumerate(
content.splitlines()[
first_bottom-1:end_bottom],
first_bottom
)]
else:
curres += [('\033[1;90m{:0>2}'
+ ' +--{} lines: {}---\033[0;0m\n').format(
first_bottom,
end_bottom - first_bottom,
content.splitlines()[first_bottom-1]
)]
else:
curres += [
mhighlight(
num,
line,
args.pattern,
args.regexp
) for num, line in
enumerate(content.splitlines()[first-1:end], first)
]
added_lines += [
(num, line) for num, line in enumerate(
content.splitlines()[first-1:end], first
)]
results.append(''.join(curres))
return results
def parse(args):
results = []
if args.cl or args.func:
results = class_parse(args)
elif args.context:
results = context_parse(args)
else:
with open(args.path) as f:
ln = 0
curres = ''
for num, line in enumerate(f, 1):
if args.pattern in line or (
re.search(args.pattern, line) and args.regexp
):
a = mhighlight(num, line, args.pattern, args.regexp)
if num == ln + 1:
curres += a
else:
results.append(curres)
curres = a
ln = num
results.append(curres)
if ''.join(results) == '':
results = []
return ('\n\033[1;90m'
+ '='*shutil.get_terminal_size()[0]
+ '\033[0;0m\n\n').join(results)
def main():
args = Args(sys.argv[1:])
if args.args == 1:
usage()
sys.exit(1)
else:
print('\n' + parse(args))
if __name__ == '__main__':
main()
| 2.609375 | 3 |
Web_Scraping/PluMA/PluMA/items.py | movingpictures83/PluMA-GUI | 0 | 12798077 | <reponame>movingpictures83/PluMA-GUI
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
class PlumaItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
Name = scrapy.Field()
Description = scrapy.Field()
Language = scrapy.Field()
Link = scrapy.Field()
Time = scrapy.Field()
#pass
| 2.234375 | 2 |
scripts/jitl_test.py | cmusatyalab/dronesearch | 8 | 12798078 | <filename>scripts/jitl_test.py
# Drone Search
#
# A computer vision pipeline for live video search on drone video
# feeds leveraging edge servers.
#
# Copyright (C) 2018-2019 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import fire
import matplotlib
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from sklearn.metrics import accuracy_score
from sklearn.svm import SVC
from sklearn.utils import resample
from jitl_data import _split_imageid, _get_videoid
def max_pooling_on_dataset(jit_data_file,
output_file,
mp_span_secs=1.0,
mp_stride_secs=0.5):
"""
Run max pooling on a dataset's JITL input file and produce a smaller one
:param dataset:
:param base_dir:
:param jit_data_file:
:param mp_span_secs:
:param mp_stride_secs:
:return:
"""
# if not isinstance(mp_span_secs, list):
# mp_span_secs = [mp_span_secs]
# if not isinstance(mp_stride_secs, list):
# mp_stride_secs = [mp_stride_secs]
df = pd.read_pickle(jit_data_file)
print("Found {} images in total.".format(df.shape[0]))
df['videoid'] = df['imageid'].map(lambda x: _get_videoid(x))
df['frameid'] = df['imageid'].map(lambda x: _split_imageid(x)[1]).astype(int)
df['grid_x'] = df['imageid'].map(lambda x: _split_imageid(x)[2]).astype(int)
df['grid_y'] = df['imageid'].map(lambda x: _split_imageid(x)[3]).astype(int)
span_frms = int(mp_span_secs * 30)
stride_frms = int(mp_stride_secs * 30)
print("Max pooling span frames={}, stride frame={}".format(span_frms, stride_frms))
downsample_df = pd.DataFrame()
video_id_grp = df.groupby(['videoid'])
for video_id, video_rows in video_id_grp:
print("Found {} frames for video {}".format(video_rows.shape[0], video_id))
count = 0
gridxy_grp = video_rows.groupby(['grid_x', 'grid_y'])
for gridxy, inputs in gridxy_grp:
inputs = inputs.sort_values(by=['frameid'])
last_sent_imageid = None
min_frm = inputs['frameid'].min()
max_frm = inputs['frameid'].max()
for pool_start_frm in range(min_frm, max_frm + 1, stride_frms):
# print("Max pooling between frame {} and {}".format(pool_start_frm, pool_start_frm + span_frms))
pool_images = inputs[(inputs['frameid'] >= pool_start_frm)
& (inputs['frameid'] < pool_start_frm + span_frms)]
dnn_scores = np.array(pool_images['prediction_proba'].tolist())[:, 1]
assert dnn_scores.ndim == 1
max_ind = np.argmax(dnn_scores)
imageid = pool_images['imageid'].iloc[max_ind]
if imageid != last_sent_imageid:
# print("sampled image: {}".format(imageid))
downsample_df = downsample_df.append(pool_images.iloc[max_ind], ignore_index=True)
last_sent_imageid = imageid
count += 1
print("Sample {}/{} frames from video {}".format(count, video_rows.shape[0], video_id))
downsample_df = downsample_df.sort_values(by=['imageid'])
print("After max pooling, we have {} images".format(downsample_df.shape[0]))
print("Sample 10 rows.")
print downsample_df.iloc[::downsample_df.shape[0] / 10]
if output_file:
downsample_df.to_pickle(output_file)
class StealPositiveFromVideoEnd(object):
def __init__(self, df, video_id, tail=10):
super(StealPositiveFromVideoEnd, self).__init__()
df = df[(df['videoid'] == video_id) & (df['label'].astype(bool))]
df = df.sort_values(by=['frameid'])
# print("Will steal these positives:")
# print(df.iloc[-tail:])
self.features = np.array(df.iloc[-tail:]['feature'].tolist())
def __call__(self, n=5):
samples = resample(self.features, n_samples=n, replace=False)
return samples
def eval_jit_svm_on_dataset(jit_data_file,
output_file,
dnn_threshold_input_file=None,
dnn_cutoff_start=80, # dnn threshold for passing early discard filter
dnn_cutoff_end=100,
dnn_cutoff_step=2,
delta_t=10, # train every 10s
activate_threshold=5, # min number of examples per class needed to train the SVM,
# otherwise passthrough; training set is ever expanding;
svm_cutoff=0.3):
if not isinstance(svm_cutoff, list):
svm_cutoff = [svm_cutoff]
if dnn_threshold_input_file is not None:
print("Warning: Dnn_threshold_input_file is specified! Ignoring dnn_cutoff_start, dnn_cutoff_end, "
"dnn_cutoff_step variable.")
dnn_cutoff_list = np.load(dnn_threshold_input_file)
dnn_cutoff_list.sort()
print("loaded dnn cutoff threshold is: {}".format(dnn_cutoff_list))
else:
dnn_cutoff_list = [0.01 * x for x in range(dnn_cutoff_start, dnn_cutoff_end, dnn_cutoff_step)]
print("Generated dnn cutoff list: {}".format(dnn_cutoff_list))
df = pd.read_pickle(jit_data_file)
print df.iloc[:5]
df['videoid'] = df['imageid'].map(lambda x: _get_videoid(x))
df['frameid'] = df['imageid'].map(lambda imgid: _split_imageid(imgid)[1]).astype(int)
print df.iloc[:5]
unique_videos = set(df['videoid'].tolist())
result_df = pd.DataFrame()
for video_id in unique_videos:
for dnn_cutoff in dnn_cutoff_list:
for svm_cut in svm_cutoff:
print("-" * 50)
print("Emulating video '{}' w/ DNN cutoff {}, SVM cutoff {}".format(video_id, dnn_cutoff, svm_cut))
print("-" * 50)
rv = run_once_jit_svm_on_video(df, video_id,
dnn_cutoff=dnn_cutoff,
delta_t=delta_t,
activate_threshold=activate_threshold,
svm_cutoff=svm_cut)
result_df = result_df.append(rv, ignore_index=True)
print result_df
if output_file:
result_df.to_pickle(output_file)
def run_once_jit_svm_on_video(df_in, video_id, dnn_cutoff,
delta_t=10, activate_threshold=5, svm_cutoff=0.3, augment_positive=False):
# filter df by video id
df = df_in[df_in['videoid'] == video_id]
# print df.iloc[0]
dnn_proba = np.array(df['prediction_proba'].tolist())
assert dnn_proba.shape[1] == 2, dnn_proba.shape
dnn_fire = (dnn_proba[:, 1] >= dnn_cutoff)
dnn_fire_index = np.nonzero(dnn_fire)[0]
# filter df by dnn positive
if len(dnn_fire_index) == 0:
print("DNN fires nothing. Stop")
return None
print("DNN fires {} frames".format(len(dnn_fire_index)))
df = df.iloc[dnn_fire_index]
X = np.array(df['feature'].tolist())
y = np.array(df['label'].tolist())
imageids = df['imageid'].tolist()
max_frame = df['frameid'].max()
print("Max frame ID: {}".format(max_frame))
X_jit = X[:0] # cumulative, used to train JIT SVM
y_jit = y[:0] # same
pred_jit = y[:0] # store SVM's prediction on DNN's positive frames
clf = None
positive_supply = StealPositiveFromVideoEnd(df_in, video_id)
for t in range(0, int(1 + (max_frame / 30)), delta_t):
# extract data within this window (from t to t+delta_t)
# print("time window {} to {}".format(t, t + delta_t))
df_test = df[(df['frameid'] >= t * 30) & (df['frameid'] < (t + delta_t) * 30)]
# print df_test.iloc[:5]
if df_test.empty:
continue
X_test = np.array(df_test['feature'].tolist())
y_test = np.array(df_test['label'])
assert X_test.shape[1] == 1024, str(X_test.shape)
# Do we have an SVM to use?
if clf:
smv_proba = clf.predict_proba(X_test)
predictions = (smv_proba[:, 1] >= svm_cutoff)
# predictions = clf.predict(X_test)
else: # pass-through DNN's prediction (DNN says all are positive)
predictions = np.ones_like(y_test)
# write out to global prediction and cumulative JIT training set
pred_jit = np.append(pred_jit, predictions, axis=0)
sent_mask = (predictions == 1)
X_jit = np.append(X_jit, X_test[sent_mask], axis=0)
y_jit = np.append(y_jit, y_test[sent_mask], axis=0)
assert X_jit.shape[1] == 1024
# print("Found {} frames in window. Sent {}.".format(y_test.shape[0], np.count_nonzero(sent_mask)))
# now, shall we (re-)train a new SVM?
print("JIT training set {}/{}".format(y_jit.shape[0], np.count_nonzero(y_jit)))
if np.count_nonzero(sent_mask) > 0 \
and np.count_nonzero(y_jit == 0) >= activate_threshold \
and (augment_positive or np.count_nonzero(y_jit == 1) >= activate_threshold):
print("retraining")
if not np.count_nonzero(y_jit == 1) >= activate_threshold and augment_positive:
print("Houston, we don't have enough TPs.")
augment_pos_X = positive_supply(n=activate_threshold)
X_jit_train = np.append(X_jit, augment_pos_X, axis=0)
y_jit_train = np.append(y_jit, np.ones((augment_pos_X.shape[0],)), axis=0)
assert X_jit_train.shape[0] == y_jit_train.shape[0]
print("Now you have {}/{}".format(y_jit_train.shape[0], np.count_nonzero(y_jit_train)))
else:
X_jit_train = X_jit
y_jit_train = y_jit
# use grid search to improve SVM accuracy
# tuned_params = {
# 'C': [1],
# 'kernel': ['linear'],
# }
# clf = GridSearchCV(SVC(random_state=43,
# max_iter=100,
# class_weight='balanced',
# probability=True,
# verbose=True),
# param_grid=tuned_params,
# n_jobs=4,
# refit=True)
clf = SVC(random_state=42,
kernel='linear',
class_weight='balanced',
probability=True,
verbose=0)
clf.fit(X_jit_train, y_jit_train)
else:
print("NOT retraining. Nothing new or not enough positives.")
pass
assert y.shape == pred_jit.shape, "y: {}, pred_jit: {}".format(y.shape, pred_jit.shape)
assert y_jit.shape[0] == np.count_nonzero(pred_jit)
jit_accuracy = accuracy_score(y, pred_jit)
print("JIT accuracy: {}".format(jit_accuracy))
res_df = pd.DataFrame().append({'delta_t': delta_t,
'imageids': imageids,
'dnn_cutoff': dnn_cutoff,
'jitl_accuracy': jit_accuracy,
'jitl_samples': y_jit.shape[0],
'jitl_prediction': pred_jit,
'label': y,
'video_id': video_id,
'svm_cutoff': svm_cutoff},
ignore_index=True)
print res_df
return res_df
def plot_frame_accuracy(input_file, savefig=None):
df = pd.read_csv(
input_file,
sep=r'\s+'
)
print df
xlabels = map(int, df.columns[2:])
for _, row in df.iterrows():
x = xlabels
y = np.array(row[2:])
print x, y
plt.plot(xlabels, np.array(row[2:]), '-o')
plt.axis([0, max(xlabels), 0, 1.0])
# plt.show()
if savefig:
plt.savefig(savefig)
def plot_rolling_svm(file_glob, savefig=None):
paths = glob.glob(file_glob)
df = pd.DataFrame()
for path in paths:
print("Parsing {}".format(path))
df1 = pd.read_csv(path, sep=' ')
df = df.append(df1, ignore_index=True)
print df
# df = df[df['delta_t'] < 90]
video_ids = set(df['video_id'])
fig, ax1 = plt.subplots()
ax1.set_xlabel("$\Delta t$ (sec)")
ax1.set_ylabel("Frame accuracy")
ax1.set_ylim((0, 1))
ax2 = ax1.twinx()
ax2.set_ylabel("# frames transmitted")
# plt.xticks(sorted(set(df['delta_t'])), sorted(set(df['delta_t'])))
for vid in video_ids:
df_video = df[df['video_id'] == vid]
# accuracy
ax1.plot(df_video['delta_t'], df_video['jit_accuracy'], '-')
ax2.plot(df_video['delta_t'], df_video['jit_samples'], '--')
if savefig:
plt.savefig(savefig)
plt.show()
if __name__ == '__main__':
fire.Fire()
| 2.515625 | 3 |
appfl/algorithm/algorithm.py | markxiao/APPFL | 0 | 12798079 | <reponame>markxiao/APPFL<gh_stars>0
import copy
"""This implements a base class for server."""
class BaseServer:
def __init__(self, model, num_clients, device):
self.model = model
self.num_clients = num_clients
self.device = device
# update global model
def update(self):
raise NotImplementedError
def get_model(self):
return copy.deepcopy(self.model)
"""This implements a base class for client."""
class BaseClient:
def __init__(self, id, model, optimizer, optimizer_args, dataloader, device):
self.id = id
self.model = model
self.optimizer = optimizer
self.optimizer_args = optimizer_args
self.dataloader = dataloader
self.device = device
# update local model
def update(self):
raise NotImplementedError
def get_model(self):
return self.model.state_dict()
| 2.734375 | 3 |
Round #585 (Div 2)/C.py | julianferres/Codeforces | 4 | 12798080 | SI = lambda : input()
from collections import Counter
n = int(input())
a = SI()
b = SI()
def solve(n,a,b):
d = Counter(a)+Counter(b)
for i in d:
if(d[i]&1):
print(-1)
return
xa = d[a]//2
newa = []
newb = []
for i in range(n):
if(a[i]!=b[i]):
newa.append((a[i],i))
newb.append((b[i],i))
a,b = newa,newb
aux = len(a)
if(aux==0):
print(0)
return
canta = 0
for i in a:
if(i[0]=='a'):
canta+=1
if(canta&1):
print(len(a)//2+1)
print(a[0][1]+1,a[0][1]+1)
a[0],b[0] = b[0],a[0]
else:
print(len(a)//2)
lastA,lastB = -1,-1
for i in range(aux):
if(a[i][0]=='a'):
if(lastA==-1):
lastA=a[i][1]
else:
print(lastA+1,a[i][1]+1)
lastA=-1
else:
if(lastB==-1):
lastB=a[i][1]
else:
print(lastB+1,a[i][1]+1)
lastB=-1
solve(n,a,b)
| 3.03125 | 3 |
9_datetime.py | Alirezak2n/Python-Tutorials | 0 | 12798081 | <gh_stars>0
import datetime
import pytz
today = datetime.date.today()
print(today)
birthday = datetime.date(1994,12,19)
print(birthday)
days_since_birth = today - birthday
print(days_since_birth)
tdelta = datetime.timedelta(days=10)
print(today + tdelta)
print(today.month)
print(today.weekday())
print(datetime.time(7,2,20,15))
#datetime.date(y,m,d)
#datetime.time(h,m,s,ms)
#datetime.datetime(y,m,d,h,m,s,ms)
hour_delta = datetime.timedelta(hours=10)
print(datetime.datetime.now() + hour_delta)
datetime_today=datetime.datetime.now(tz=pytz.utc)
print(datetime_today.astimezone(pytz.timezone('US/Pacific')))
datetime_pacific = datetime_today.astimezone(pytz.timezone('US/Pacific'))
for timezones in pytz.all_timezones:
print(timezones)
#taghyire format str format time
print(datetime_pacific.strftime('%B %d, %Y'))
#taghyire format str parse time
print(datetime.datetime.strptime('march 09,2019','%B %d,%Y'))
print(repr(datetime.datetime.strptime('march 09,2019','%B %d,%Y')))
| 2.875 | 3 |
00.py | takeoverjp/nlp100 | 0 | 12798082 | #!/usr/bin/python3
s="stressed"
rev=""
for c in s:
rev=c+rev
print(rev)
| 3.015625 | 3 |
necrobot/match/matchutil.py | saturnin55/necrobot | 0 | 12798083 | import datetime
import discord
import pytz
from necrobot.botbase import server, discordutil
from necrobot.database import matchdb, racedb
from necrobot.util import console, timestr, writechannel, strutil, rtmputil
from necrobot.botbase.necrobot import Necrobot
from necrobot.gsheet.matchgsheetinfo import MatchGSheetInfo
from necrobot.match.match import Match
from necrobot.match.matchinfo import MatchInfo
from necrobot.match.matchroom import MatchRoom
from necrobot.race.raceinfo import RaceInfo
from necrobot.user.necrouser import NecroUser
from necrobot.config import Config
match_library = {}
# noinspection PyIncorrectDocstring
async def make_match(*args, register=False, **kwargs) -> Match:
"""Create a Match object. There should be no need to call this directly; use matchutil.make_match instead,
since this needs to interact with the database.
Parameters
----------
racer_1_id: int
The DB user ID of the first racer.
racer_2_id: int
The DB user ID of the second racer.
max_races: int
The maximum number of races this match can be. (If is_best_of is True, then the match is a best of
max_races; otherwise, the match is just repeating max_races.)
match_id: int
The DB unique ID of this match.
suggested_time: datetime.datetime
The time the match is suggested for. If no tzinfo, UTC is assumed.
r1_confirmed: bool
Whether the first racer has confirmed the match time.
r2_confirmed: bool
Whether the second racer has confirmed the match time.
r1_unconfirmed: bool
Whether the first racer wishes to unconfirm the match time.
r2_unconfirmed: bool
Whether the second racer wishes to unconfirm the match time.
match_info: MatchInfo
The types of races to be run in this match.
cawmentator_id: int
The DB unique ID of the cawmentator for this match.
sheet_id: int
The sheetID of the worksheet the match was created from, if any.
register: bool
Whether to register the match in the database.
Returns
---------
Match
The created match.
"""
if 'match_id' in kwargs and kwargs['match_id'] in match_library:
return match_library[kwargs['match_id']]
match = Match(*args, commit_fn=matchdb.write_match, **kwargs)
await match.initialize()
if register:
await match.commit()
match_library[match.match_id] = match
return match
async def get_match_from_id(match_id: int) -> Match or None:
"""Get a match object from its DB unique ID.
Parameters
----------
match_id: int
The databse ID of the match.
Returns
-------
Optional[Match]
The match found, if any.
"""
if match_id is None:
return None
if match_id in match_library:
return match_library[match_id]
raw_data = await matchdb.get_raw_match_data(match_id)
if raw_data is not None:
return await make_match_from_raw_db_data(raw_data)
else:
return None
def get_matchroom_name(match: Match) -> str:
"""Get a new unique channel name corresponding to the match.
Parameters
----------
match: Match
The match whose info determines the name.
Returns
-------
str
The name of the channel.
"""
name_prefix = match.matchroom_name
cut_length = len(name_prefix) + 1
largest_postfix = 1
found = False
for channel in server.server.channels:
if channel.name.startswith(name_prefix):
found = True
try:
val = int(channel.name[cut_length:])
largest_postfix = max(largest_postfix, val)
except ValueError:
pass
return name_prefix if not found else '{0}-{1}'.format(name_prefix, largest_postfix + 1)
async def get_upcoming_and_current() -> list:
"""
Returns
-------
list[Match]
A list of all upcoming and ongoing matches, in order.
"""
matches = []
for row in await matchdb.get_channeled_matches_raw_data(must_be_scheduled=True, order_by_time=True):
channel_id = int(row[13]) if row[13] is not None else None
if channel_id is not None:
channel = server.find_channel(channel_id=channel_id)
if channel is not None:
match = await make_match_from_raw_db_data(row=row)
if match.suggested_time is None:
console.warning('Found match object {} has no suggested time.'.format(repr(match)))
continue
if match.suggested_time > pytz.utc.localize(datetime.datetime.utcnow()):
matches.append(match)
else:
match_room = Necrobot().get_bot_channel(channel)
if match_room is not None and await match_room.during_races():
matches.append(match)
return matches
async def get_matches_with_channels(racer: NecroUser = None) -> list:
"""
Parameters
----------
racer: NecroUser
The racer to find channels for. If None, finds all channeled matches.
Returns
-------
list[Match]
A list of all Matches that have associated channels on the server featuring the specified racer.
"""
matches = []
if racer is not None:
raw_data = await matchdb.get_channeled_matches_raw_data(
must_be_scheduled=False, order_by_time=False, racer_id=racer.user_id
)
else:
raw_data = await matchdb.get_channeled_matches_raw_data(must_be_scheduled=False, order_by_time=False)
for row in raw_data:
channel_id = int(row[13])
channel = server.find_channel(channel_id=channel_id)
if channel is not None:
match = await make_match_from_raw_db_data(row=row)
matches.append(match)
else:
console.warning('Found Match with channel {0}, but couldn\'t find this channel.'.format(channel_id))
return matches
async def delete_all_match_channels(log=False, completed_only=False) -> None:
"""Delete all match channels from the server.
Parameters
----------
log: bool
If True, the channel text will be written to a log file before deletion.
completed_only: bool
If True, will only find completed matches.
"""
for row in await matchdb.get_channeled_matches_raw_data():
match_id = int(row[0])
channel_id = int(row[13])
channel = server.find_channel(channel_id=channel_id)
delete_this = True
if channel is not None:
if completed_only:
match_room = Necrobot().get_bot_channel(channel)
if match_room is None or not match_room.played_all_races:
delete_this = False
if delete_this:
if log:
await writechannel.write_channel(
client=server.client,
channel=channel,
outfile_name='{0}-{1}'.format(match_id, channel.name)
)
await server.client.delete_channel(channel)
if delete_this:
await matchdb.register_match_channel(match_id, None)
async def make_match_room(match: Match, register=False) -> MatchRoom or None:
"""Create a discord.Channel and a corresponding MatchRoom for the given Match.
Parameters
----------
match: Match
The Match to create a room for.
register: bool
If True, will register the Match in the database.
Returns
-------
Optional[MatchRoom]
The created room object.
"""
# Check to see the match is registered
if not match.is_registered:
if register:
await match.commit()
else:
console.warning('Tried to make a MatchRoom for an unregistered Match ({0}).'.format(match.matchroom_name))
return None
# Check to see if we already have the match channel
channel_id = match.channel_id
match_channel = server.find_channel(channel_id=channel_id) if channel_id is not None else None
# If we couldn't find the channel or it didn't exist, make a new one
if match_channel is None:
# Create permissions
deny_read = discord.PermissionOverwrite(read_messages=False)
permit_read = discord.PermissionOverwrite(read_messages=True)
racer_permissions = []
for racer in match.racers:
if racer.member is not None:
racer_permissions.append(discord.ChannelPermissions(target=racer.member, overwrite=permit_read))
# Make a channel for the room
# noinspection PyUnresolvedReferences
match_channel = await server.client.create_channel(
server.server,
get_matchroom_name(match),
discord.ChannelPermissions(target=server.server.default_role, overwrite=deny_read),
discord.ChannelPermissions(target=server.server.me, overwrite=permit_read),
*racer_permissions,
type=discord.ChannelType.text)
if match_channel is None:
console.warning('Failed to make a match channel.')
return None
# Put the match channel in the matches category
match_channel_category = server.find_channel(channel_name=Config.MATCH_CHANNEL_CATEGORY_NAME)
if match_channel_category is not None:
await discordutil.set_channel_category(channel=match_channel, category=match_channel_category)
# Make the actual RaceRoom and initialize it
match.set_channel_id(int(match_channel.id))
new_room = MatchRoom(match_discord_channel=match_channel, match=match)
Necrobot().register_bot_channel(match_channel, new_room)
await new_room.initialize()
return new_room
async def close_match_room(match: Match) -> None:
"""Close the discord.Channel corresponding to the Match, if any.
Parameters
----------
match: Match
The Match to close the channel for.
"""
if not match.is_registered:
console.warning('Trying to close the room for an unregistered match.')
return
channel_id = match.channel_id
channel = server.find_channel(channel_id=channel_id)
if channel is None:
console.warning('Coudn\'t find channel with id {0} in close_match_room '
'(match_id={1}).'.format(channel_id, match.match_id))
return
await Necrobot().unregister_bot_channel(channel)
await server.client.delete_channel(channel)
match.set_channel_id(None)
async def get_nextrace_displaytext(match_list: list) -> str:
utcnow = pytz.utc.localize(datetime.datetime.utcnow())
if len(match_list) > 1:
display_text = 'Upcoming matches: \n'
else:
display_text = 'Next match: \n'
for match in match_list:
# noinspection PyUnresolvedReferences
display_text += '\N{BULLET} **{0}** - **{1}**'.format(
match.racer_1.display_name,
match.racer_2.display_name)
if match.suggested_time is None:
display_text += '\n'
continue
display_text += ': {0} \n'.format(timestr.timedelta_to_str(match.suggested_time - utcnow, punctuate=True))
match_cawmentator = await match.get_cawmentator()
if match_cawmentator is not None:
display_text += ' Cawmentary: <http://www.twitch.tv/{0}> \n'.format(match_cawmentator.twitch_name)
elif match.racer_1.twitch_name is not None and match.racer_2.twitch_name is not None:
display_text += ' Kadgar: {} \n'.format(
rtmputil.kadgar_link(match.racer_1.twitch_name, match.racer_2.twitch_name)
)
# display_text += ' RTMP: {} \n'.format(
# rtmputil.rtmp_link(match.racer_1.rtmp_name, match.racer_2.rtmp_name)
# )
display_text += '\nFull schedule: <https://condor.host/schedule>'
return display_text
async def delete_match(match_id: int) -> None:
await matchdb.delete_match(match_id=match_id)
if match_id in match_library:
del match_library[match_id]
async def make_match_from_raw_db_data(row: list) -> Match:
match_id = int(row[0])
if match_id in match_library:
return match_library[match_id]
match_info = MatchInfo(
race_info=await racedb.get_race_info_from_type_id(int(row[1])) if row[1] is not None else RaceInfo(),
ranked=bool(row[9]),
is_best_of=bool(row[10]),
max_races=int(row[11])
)
sheet_info = MatchGSheetInfo()
sheet_info.wks_id = row[14]
sheet_info.row = row[15]
new_match = Match(
commit_fn=matchdb.write_match,
match_id=match_id,
match_info=match_info,
racer_1_id=int(row[2]),
racer_2_id=int(row[3]),
suggested_time=row[4],
finish_time=row[16],
r1_confirmed=bool(row[5]),
r2_confirmed=bool(row[6]),
r1_unconfirmed=bool(row[7]),
r2_unconfirmed=bool(row[8]),
cawmentator_id=row[12],
channel_id=int(row[13]) if row[13] is not None else None,
gsheet_info=sheet_info
)
await new_match.initialize()
match_library[new_match.match_id] = new_match
return new_match
async def get_schedule_infotext():
utcnow = pytz.utc.localize(datetime.datetime.utcnow())
matches = await get_upcoming_and_current()
max_r1_len = 0
max_r2_len = 0
for match in matches:
max_r1_len = max(max_r1_len, len(strutil.tickless(match.racer_1.display_name)))
max_r2_len = max(max_r2_len, len(strutil.tickless(match.racer_2.display_name)))
schedule_text = '``` \nUpcoming matches: \n'
for match in matches:
if len(schedule_text) > 1800:
break
schedule_text += '{r1:>{w1}} v {r2:<{w2}} : '.format(
r1=strutil.tickless(match.racer_1.display_name),
w1=max_r1_len,
r2=strutil.tickless(match.racer_2.display_name),
w2=max_r2_len
)
if match.suggested_time - utcnow < datetime.timedelta(minutes=0):
schedule_text += 'Right now!'
else:
schedule_text += timestr.str_full_24h(match.suggested_time)
schedule_text += '\n'
schedule_text += '```'
return schedule_text
async def get_race_data(match: Match):
return await matchdb.get_match_race_data(match.match_id)
| 2.5 | 2 |
sljassbot/player/rl_player/model.py | andieder/pyschieberSLJassBot | 0 | 12798084 | import os
import os.path
from keras.layers import Dense, Flatten, Conv1D, Reshape
from keras.optimizers import Nadam
from keras.models import Sequential
from keras.models import load_model
from keras.regularizers import l2
from keras import backend as K
from keras.losses import mean_squared_error
from sljassbot.player.rl_player.input_handler import InputHandler
def huber_loss(a, b, in_keras=True):
error = a - b
quadratic_term = error * error / 2
linear_term = abs(error) - 1 / 2
use_linear_term = (abs(error) > 1.0)
if in_keras:
# Keras won't let us multiply floats by booleans, so we explicitly cast the booleans to floats
use_linear_term = K.cast(use_linear_term, 'float32')
return use_linear_term * linear_term + (1 - use_linear_term) * quadratic_term
'''
def build_model(model_path, learning_rate=0.01):
if os.path.exists(model_path):
# model = load_model(model_path, custom_objects={'huber_loss': huber_loss})
model = load_model(model_path)
print('Load existing model.')
else:
model = Sequential()
model.add(Dense(InputHandler.input_size * 2, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Reshape((InputHandler.input_size * 2, 1,), input_shape=(InputHandler.input_size * 2,)))
#model.add(Dense(InputHandler.input_size, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Conv1D(filters=50, kernel_size=18, strides=18, padding='same', activation='relu'))
model.add(Conv1D(filters=25, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Flatten())
model.add(Dense(InputHandler.input_size * 2, activation='relu', W_regularizer=l2(0.01)))
model.add(Dense(InputHandler.output_size, activation='linear'))
# optimizer = RMSprop(lr=0.00025, rho=0.95, epsilon=0.01)
optimizer = Nadam(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, schedule_decay=0.004)
# model.compile(loss=huber_loss, optimizer=optimizer)
model.compile(loss=mean_squared_error, optimizer=optimizer)
print('Create new model.')
return model
'''
# TODO: first 2 Conv1D then 2 Fully
def build_model(model_path, learning_rate=0.01):
if os.path.exists(model_path):
# model = load_model(model_path, custom_objects={'huber_loss': huber_loss})
model = load_model(model_path)
print('Load existing model.')
else:
model = Sequential()
model.add(Dense(InputHandler.input_size * 2, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Reshape((InputHandler.input_size * 2, 1,), input_shape=(InputHandler.input_size * 2,)))
#model.add(Dense(InputHandler.input_size, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Conv1D(filters=50, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=50, kernel_size=18, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=50, kernel_size=36, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=25, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Flatten())
model.add(Dense(InputHandler.input_size * 2, activation='relu', W_regularizer=l2(0.01)))
model.add(Dense(InputHandler.output_size, activation='linear'))
# optimizer = RMSprop(lr=0.00025, rho=0.95, epsilon=0.01)
optimizer = Nadam(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, schedule_decay=0.004)
# model.compile(loss=huber_loss, optimizer=optimizer)
model.compile(loss=mean_squared_error, optimizer=optimizer)
print('Create new model.')
return model
| 2.3125 | 2 |
gpio.py | bibinvarghese/democes | 0 | 12798085 | <gh_stars>0
import sys
import time
class UI(object):
"""Abstract UI class. Subclassed by specific board implementations."""
def __init__(self):
self._button_state = [False for _ in self._buttons]
current_time = time.time()
self._button_state_last_change = [current_time for _ in self._buttons]
self._debounce_interval = 0.1 # seconds
def setOnlyLED(self, index):
for i in range(len(self._LEDs)): self.setLED(i, False)
if index is not None: self.setLED(index, True)
def isButtonPressed(self, index):
buttons = self.getButtonState()
return buttons[index]
def setLED(self, index, state):
raise NotImplementedError()
def getButtonState(self):
raise NotImplementedError()
def getDebouncedButtonState(self):
t = time.time()
for i,new in enumerate(self.getButtonState()):
if not new:
self._button_state[i] = False
continue
old = self._button_state[i]
if ((t-self._button_state_last_change[i]) >
self._debounce_interval) and not old:
self._button_state[i] = True
else:
self._button_state[i] = False
self._button_state_last_change[i] = t
return self._button_state
def testButtons(self, times):
for t in range(0,times):
for i in range(5):
self.setLED(i, self.isButtonPressed(i))
print("Buttons: ", " ".join([str(i) for i,v in
enumerate(self.getButtonState()) if v]))
time.sleep(0.01)
def wiggleLEDs(self, reps=3):
for i in range(reps):
for i in range(5):
self.setLED(i, True)
time.sleep(0.05)
self.setLED(i, False)
class UI_EdgeTpuDevBoard(UI):
def __init__(self):
global GPIO, PWM
from periphery import GPIO, PWM, GPIOError
def initPWM(pin):
pwm = PWM(pin, 0)
pwm.frequency = 1e3
pwm.duty_cycle = 0
pwm.enable()
return pwm
try:
self._buttons = [
GPIO(6, "in"),
GPIO(138, "in"),
GPIO(140,"in"),
GPIO(7, "in"),
GPIO(141, "in"),
]
self._LEDs = [
initPWM(2),
GPIO(73, "out"),
initPWM(1),
initPWM(0),
GPIO(77 , "out"),
]
except GPIOError as e:
print("Unable to access GPIO pins. Did you run with sudo ?")
sys.exit(1)
super(UI_EdgeTpuDevBoard, self).__init__()
def __del__(self):
if hasattr(self, "_LEDs"):
for x in self._LEDs or [] + self._buttons or []: x.close()
def setLED(self, index, state):
"""Abstracts away mix of GPIO and PWM LEDs."""
if isinstance(self._LEDs[index], GPIO): self._LEDs[index].write(state)
else: self._LEDs[index].duty_cycle = 1.0 if state else 0.0
def getButtonState(self):
return [not button.read() for button in self._buttons]
if __name__== "__main__":
ui = UI_EdgeTpuDevBoard()
ui.wiggleLEDs()
ui.testButtons(1000)
| 3.265625 | 3 |
applications/conventions/models.py | Haakenlid/grenselandet | 0 | 12798086 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import formats
from django.utils import timezone
# Create your models here.
class ConventionManager(models.Manager):
def next(self):
""" The upcoming event """
next_convention = self.exclude(end_time__lt=timezone.now()).order_by('start_time').first()
return next_convention
class Convention(models.Model):
""" A con, festival or event """
name = models.CharField(max_length=100)
description = models.TextField()
mail_signature = models.TextField()
# logo
# TODO: logo som sorl-greie
location = models.CharField(max_length=500)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
ticket_sales_opens = models.DateTimeField()
ticket_sales_closes = models.DateTimeField()
program_signup_opens = models.DateTimeField()
program_signup_closes = models.DateTimeField()
objects = ConventionManager()
class Meta:
verbose_name = _('Convention')
verbose_name_plural = _('Conventions')
def __str__(self):
return self.name
def dates(self):
days = (self.end_time.date() - self.start_time.date()).days + 1
dates = [self.start_time.replace(hour=0, minute=0) + timezone.timedelta(days=x) for x in range(days)]
return dates
def ticket_sales_has_started(self):
return timezone.now() > self.ticket_sales_opens
def ticket_sales_has_ended(self):
return timezone.now() > self.ticket_sales_closes
def full_description(self):
return '{name}\n{description}\n{start} to {end}'.format(
name=self.name,
description=self.description,
start=formats.date_format(self.start_time, 'SHORT_DATE_FORMAT'),
end=formats.date_format(self.end_time, 'SHORT_DATE_FORMAT'),
)
| 2.3125 | 2 |
python/producer.py | ssproessig/amqp-training | 0 | 12798087 | #!/usr/bin/env python
import time
from random import choice
from string import ascii_lowercase
from amqp import connect_get_channel_declare_exchange_and_return_channel, EXCHANGE_NAME
APPS = ["foo", "bar", "infrastructure"]
LEVELS = ["debug", "info", "warn", "error"]
def publish_cyclically():
channel = connect_get_channel_declare_exchange_and_return_channel()
for counter in range(1, 1000):
routing_key = "%s.%s" % (choice(APPS), choice(LEVELS))
body = "%03d Some random text: %s " % (
counter,
''.join(choice(ascii_lowercase) for _ in range(16))
)
channel.basic_publish(
exchange=EXCHANGE_NAME,
routing_key=routing_key,
body=body
)
print("Published '%s' to '%s' with routing-key '%s'." % (body, EXCHANGE_NAME, routing_key))
time.sleep(1)
if __name__ == "__main__":
try:
publish_cyclically()
except KeyboardInterrupt:
pass
| 2.734375 | 3 |
help.py | Fxcilities/KEKWBot | 2 | 12798088 | import discord
from discord.ext import commands
from discord.ext import *
from discord.ext.commands import *
import asyncio
class help(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def help(self, ctx):
embed = discord.Embed(
title="KEKW Bot Help",
description="_ _\nThank you for inviting KEKW bot!\nCheck out our other bot, [Essentials](https://essentialsbot.xyz)\n\n[Setup the bot](https://github.com/Fxcilities/KEKWBot/blob/main/README.md)",
color=discord.Color.dark_gold()
)
embed.add_field(name="Main commands:", value="**```kekw!start (amount, defaults to 50)```**\n**```kekw!emojis```**", inline=False)
embed.set_footer(text="Requested by: " + str(ctx.author), icon_url=str(ctx.author.avatar_url))
await ctx.message.delete()
await ctx.send(embed=embed, delete_after=30)
def setup(bot):
bot.add_cog(help(bot))
| 2.78125 | 3 |
twiliokey.py | ridwanrahman/wavemaps | 3 | 12798089 | <reponame>ridwanrahman/wavemaps<filename>twiliokey.py
''' Twilio keys for send_text.py:
account_sid_key
auth_token
'''
class TwilioKey:
def __init__(self): # https://www.twilio.com/try-twilio
self.account_sid_key = "your_sid"
self.auth_token_key = "your_auth"
def get_sid(self):
return self.account_sid_key
def get_auth(self):
return self.auth_token_key
| 2.53125 | 3 |
Spark_streaming/spark_streaming.py | XinxinTang/Real-time_Stock_Monitor | 0 | 12798090 | # @author: <NAME>
# email: <EMAIL>
# -2
# fetch data from kafka producer
# doing computation using spark streaming
# store back to kafka producer in another topic
import argparse
import json
import logging
import atexit
from pyspark import SparkContext
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
from kafka import KafkaProducer
from kafka.errors import KafkaError
class spark_streaming():
def __init__(self, topic, target_topic, kafka_broker):
self.topic = topic
self.kafka_broker = kafka_broker
self.target_topic = target_topic
self.kafka_producer = KafkaProducer(bootrap_servers=kafka_broker)
sc = SparkContext("local[2]", "AveragePrice")
sc.setLogLevel("INFO")
self.ssc = StreamingContext(sc, 5)
logging.basicConfig()
self.logger = logging.getLogger()
self.logger.setLevel(logging.INFO)
def process(self, timeobj, rdd):
def group(record):
data = json.loads(record[1].decode('utf-8'))[0]
return data.get("StockSymbol"), (float(data.get("LastTradePrice")), 1)
newRDD = rdd.map(group).reduceByKey(lambda x, y: (x[0]+y[0], x[1]+y[1]))\
.map(lambda symbol, price : (symbol, price[0]/price[1]))
results = newRDD.collect()
for res in results:
msg = {"StockSymbol": res[0],
"AveragePrice": res[1]}
try:
self.kafka_producer.send(self.target_topic, value=json.dumps(msg))
self.logger.info("Successfully send processed data to {}, {}".format(self.target_topic, msg))
except KafkaError as KE:
self.logger.warning("Failed to send data, the error is {}".format(msg))
def createStream(self):
# create space for data computation
directKafkaStream = KafkaUtils.createDirectStream(self.ssc, [self.topic],
{"metadata.broker.list" : self.kafka_broker})
return directKafkaStream
def run(self):
direceKafkaStream = self.createStream()
direceKafkaStream.foreachRDD(self.process) # transformation with action
self.ssc.start()
self.ssc.awaitTermination()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("topic", help="this is the topic to receive data from kafka producer")
parser.add_argument("target_topic", help="this is the topic to send processed data to kafka broker")
parser.add_argument("kafka_broker", help="this is the kafka broker")
args = parser.parse_args()
topic = args.topic
target_topic = args.target_topic
kafka_broker = args.kafka_broker
KafkaSpark = spark_streaming(topic, target_topic, kafka_broker)
KafkaSpark.run()
| 2.84375 | 3 |
news_spider/spiders/wallstreetcn.py | zuoakang/news_spider | 0 | 12798091 | import scrapy
from news_spider.items import WallstreetcnItem
from news_spider.utils.common import get_category_by_name
class WallstreetcnSpider(scrapy.Spider):
name = 'wallstreetcn'
allowed_domains = ['https://api.wallstcn.com']
start_urls = ['https://api.wallstcn.com/apiv1/content/information-flow?channel=global&accept=article&limit=20&action=upglide']
category_id = get_category_by_name(name)
def parse(self, response):
json_data = response.json()
res_list = json_data["data"]["items"]
for res in res_list:
item = WallstreetcnItem()
resource = res["resource"]
title = resource["title"]
display_time = resource["display_time"]
url = resource["uri"]
hot_val = resource["author"]["display_name"]
item["title"] = title
item["url"] = url
item["hot_val"] = hot_val
item["rank"] = display_time
item["category_id"] = self.category_id
yield item
| 2.875 | 3 |
150-Challenges/Challenges 80 - 87/Challenge 82.py | DGrifferty/Python | 0 | 12798092 | <filename>150-Challenges/Challenges 80 - 87/Challenge 82.py<gh_stars>0
# 082
# Show the user a line of text from your favourite poem and
# ask for a starting and ending point. Display the characters
# between those two points.
# very similar to challenge 74
from typing import List
def print_list(lst: List):
"""prints a list in a cleaner way"""
string = ''
for i in range(len(lst)):
if i == len(lst) - 1:
string += f'{lst[i]}.'
else:
string += f'{lst[i]}, '
if i > 0:
if i % 10 == 0:
string += '\n'
print(string)
def get_num_int(prompt: str) -> int:
"""Function to check if users input is an integer"""
while True:
try:
number = int(input(prompt))
return number
except Exception as e:
print(e)
def get_slice(sl):
min, max = 0, len(sl)
while True:
start = get_num_int('Enter starting number of slice: ')
if min <= start <= max:
break
else:
print(f'Please enter a number between {min} and {max}')
while True:
end = get_num_int('Enter end number of slice: ')
if start <= end <= max:
break
else:
print(f'Please enter a number between {min} and {max}')
return sl[start: end]
if __name__ == '__main__':
poem = 'line from poem'
print(poem)
print_list(get_slice(poem))
| 4.125 | 4 |
pdfmerge/views.py | rupin/pdfmerger | 0 | 12798093 | from django.http import HttpResponse
from django.template import loader
from .models import *
from django.conf import settings
from django.shortcuts import redirect
from utils import dataLayerPDF
from utils import dprint
from utils import modelUtils
import pandas as pd
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django.utils.dateparse import parse_date
import datetime
from dateutil.parser import *
def homePage(request):
template = loader.get_template('base_intro.html')
context = {
}
return HttpResponse(template.render(context, request))
def loginForm(request):
context = {
'errors': "",
}
template = loader.get_template('registration/login.html')
return HttpResponse(template.render(context, request))
def loginUser(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
#print(user)
if user is not None:
login(request, user)
return redirect('systemForms')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
@login_required
def viewSystemForms(request):
pdfforms=PDFForm.objects.all()
context = {
'systemforms':pdfforms ,
}
template = loader.get_template('formsList.html')
return HttpResponse(template.render(context, request))
@login_required
def addFormToProfile(request,form_id):
#return HttpResponse(str(form_id))
errorCondition=False
loggedUserID=request.user.id
UserObject=request.user
PDFormObject=PDFForm.objects.get(id=form_id)
isFormPresent=GeneratedPDF.objects.filter(user=UserObject, pdf=PDFormObject).count()
if(isFormPresent==0):
addform=GeneratedPDF(user=UserObject, pdf=PDFormObject)
addform.save()
modelUtils.addFieldsToProfile(UserObject, PDFormObject)
#get all fields in PDF related to PDFID
fieldsinPDF=PDFFormField.objects.filter(pdf=form_id).values_list(
"field",
"field__field_display",
"field__field_question",
"field__field_state",
"field__field_description",
named=True
)
#get all fields Related to User in UserProfile and that match the fields in the PDFForm
userFields=UserProfile.objects.filter(user=loggedUserID).values_list(
"field",
"field_text",
"data_index",
named=True
)
#print(userFields)
#print(fieldsinPDF)
#Set the column as index on which the join is to be made in pandas
userFieldDF=pd.DataFrame(list(userFields)).set_index('field')
PDFFieldsDF=pd.DataFrame(list(fieldsinPDF)).set_index('field')
#dprint.dprint(userFieldDF)
#dprint.dprint(PDFFieldsDF)
#Make the Join
combinedDF=PDFFieldsDF.join(userFieldDF, on='field',lsuffix='_left', rsuffix='_right')
#remove rows with NA Values. Will happen when the number of rows in the above datasets differ in count.
#combinedDF.dropna(0,inplace=True)
#sort the Dataframe by Field Page Number, then convert it to a list of dictionaries
#dataSet=combinedDF.sort_values(by=['field_page_number']).to_dict('records')
#dprint.dprint(combinedDF)
missingQuestionsList=combinedDF[combinedDF["field__field_state"]=='DYNAMIC']
missingQuestionsList.fillna(value='',inplace=True)
missingQuestionsList.reset_index(inplace=True)
#missingQuestionsList['field_str']=missingQuestionsList['field'].astype(str)
missingQuestionTuples=list(missingQuestionsList.itertuples())
#print(type(missingQuestionTuples))
fieldIDStr=""
for question in missingQuestionTuples:
fieldIDStr=fieldIDStr+" " +str(question.field)
fieldIDStr=fieldIDStr.strip().replace(" ", ",")
#print(fieldIDStr)
numberOfMissingQuestions=len(missingQuestionTuples)
context = {
'formObject':PDFormObject,
"missingQuestions":missingQuestionTuples,
'questionCount':numberOfMissingQuestions,
'form_id':form_id,
'fieldIDS':fieldIDStr
}
#dprint.dprint(missingQuestionsList)
#print(context)
template = loader.get_template('process_form.html')
return HttpResponse(template.render(context, request))
@login_required
@require_http_methods(["POST"])
def saveDynamicFieldData(request,pdfid):
recievedDateFormat=""
fieldIDs=request.POST["fieldIDs"]
fieldIDList=[]
fieldData=[]
if(fieldIDs is not None):
fieldIDList=fieldIDs.split(",")
for fieldID in fieldIDList:
fieldDict={}
fieldDict["ID"]=fieldID
fieldDict["userValue"]=request.POST[fieldID]
fieldData.append(fieldDict)
#print(fieldData)
modelUtils.saveUserProfileFields(fieldData, request.user)
return redirect('/editPDF/'+str(pdfid))
def logoutUser(request):
logout(request)
return redirect('login')
@login_required
def fillForm(request, pdfid):
dataSet, formData=modelUtils.getUserFormData(request, pdfid)
#print(dataSet)
#Use the dataset as input to generate the pdf, recieve a buffer as reponse
pdfData=dataLayerPDF.addText(dataSet,formData)
# #output=dataLayerPDF.mergePDFs()
timestamp=datetime.datetime.now().strftime("%d-%m-%Y-%I-%M-%S")
filename=formData.pdf_name +"-"+request.user.first_name+"-" + str(timestamp) +".pdf"
metaData = {
'/Title': filename,
}
pdfData.addMetadata(metaData)
#Set the httpresponse to download a pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename= "%s"' % filename
#response.write(PDFBytes)
#write the pdfdata to the responseobject
pdfData.write(response)
#response.write(pdfData)
#return the response
return response
@login_required
def profile(request):
# userForms=GeneratedPDF.objects.filter(user=request.user).values("pdf__pdf_name",
# "pdf__pdf_description",
# "pdf__image",
# )
userForms=GeneratedPDF.objects.filter(user=request.user).prefetch_related("pdf")
#print(userForms)
userData=UserProfile.objects.filter(user=request.user).prefetch_related("field").order_by(
"field__category",
"field__category_order",
"field__field_description")
formsCount=userForms.count()
#print(userData)
template = loader.get_template('base_view_profile.html')
context = {
"systemforms":userForms,
"userData":userData,
"formcount":formsCount
}
#print(context)
return HttpResponse(template.render(context, request))
@login_required
def editPDFLive(request, pdfid):
userFormsCount=GeneratedPDF.objects.filter(user=request.user, pdf=pdfid).count()
if(userFormsCount==0):
return HttpResponse("Not found");
dataSet, formData=modelUtils.getUserFormData(request, pdfid, False)
#dprint.dprint(fieldsinPDF)
context = {
"userFormDataSet":dataSet,
"formData": formData,
'formID':pdfid
}
#print(formData)
template = loader.get_template('editPDF.html')
return HttpResponse(template.render(context, request))
@login_required
def arrangeFormQuestions(request, pdfid):
superUser=request.user.is_superuser
if(not superUser):
return HttpResponse(status=404)
FormFieldQueryset=PDFFormField.objects.filter(pdf=pdfid).prefetch_related('field')
context={
"formFields":FormFieldQueryset,
'formID':pdfid
}
#print(context)
template = loader.get_template('rearrangeformquestions.html')
return HttpResponse(template.render(context, request))
| 2.15625 | 2 |
FunctionalPractice/py_sorted.py | CarrieGao/py3-practice | 0 | 12798094 | # _*_ coding: utf-8 _*_
def by_name(t):
return t[0].lower()
def by_score(t):
return t[1]
if __name__ == '__main__':
print(sorted([36, 5, -12, 9, -21]))
s = sorted(['bob', 'about', 'Zoo', 'Credit'], key=str.lower, reverse=True)
print(s)
L = [('Bob', 75), ('Adam', 92), ('Bart', 66), ('Lisa', 88)]
s2 = sorted(L, key = by_name)
print(s2)
s3 = sorted(L, key = by_score)
print(s3) | 3.78125 | 4 |
tools/apply_gsub_rules.py | aravindavk/Gubbi | 9 | 12798095 | #!/usr/bin/python
# Copyright (C) 2012, <NAME> <<EMAIL>>
# http://aravindavk.in
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import fontforge
if __name__ == "__main__":
font = fontforge.open("../Gubbi.sfd")
# Remove all GSUB lookups
for lookup in font.gsub_lookups:
font.removeLookup(lookup)
# Merge the new featurefile
font.mergeFeature("gsub.fea")
font.save()
font.close()
| 2.046875 | 2 |
abtools/core/models.py | i1bgv/abtools | 3 | 12798096 | <reponame>i1bgv/abtools
# -*- coding: utf-8 -*-
import numpy as np
from .base import Distribution
from .distributions import Bernoulli, Lognormal
class BLModel(Distribution):
def __init__(self, x=None, mu=None, std=None,
alpha=None, n=None, k_b=None, k_l=None):
self.bernoulli = Bernoulli(
x=(x > 0) * 1 if x is not None else None,
alpha=alpha if alpha is not None else None,
beta=n - alpha if n is not None and alpha is not None else None
)
self.lognormal = Lognormal(
x=x[x > 0] if x is not None else None,
mu=mu if mu is not None else None,
std=std if std is not None else None,
n=alpha if alpha is not None else None
)
super(BLModel, self).__init__()
self._set_parents(self.bernoulli, self.lognormal)
def prod(args):
return np.prod(args, axis=0)
self._set_parent_operation(prod, 'Product')
self.bernoulli.k = self._set_k(k_b)
self.lognormal.k = self._set_k(k_l)
def __rshift__(self, dist):
if not isinstance(dist, BLModel):
raise TypeError
new_b_model = self.bernoulli >> dist.bernoulli
new_l_model = self.lognormal >> dist.lognormal
new_bl = BLModel(
mu=new_l_model.mu,
std=new_l_model.std,
alpha=new_b_model.alpha,
n=new_b_model.n
)
return new_bl
def __mul__(self, k):
if not isinstance(k, list):
raise TypeError
self.bernoulli.k = self._set_k(k[0])
self.lognormal.k = self._set_k(k[1])
return self
| 2.375 | 2 |
url_shortener/db.py | kuderr/simple-url-shortener | 0 | 12798097 | import typing
import random
import uuid
from pydantic import BaseModel, Field
class URL(BaseModel):
"""
FastAPI uses pydantic to validate and represent data.
Maybe dive deeper in it.
"""
id: int = Field(..., title="ID of URL")
full_url: str = Field(..., title="Full URL")
short_url_code: str = Field(..., title="Redirection code of URL")
class Database:
"""
Fake db
When using with real -- all CRUD should be awaited
"""
def __init__(self):
self._items: typing.Dict[int, URL] = {}
async def get_random(self) -> int:
"""
Create list from dict_keys, because it is not supported in random.choice
"""
ids = list(self._items.keys())
return random.choice(ids)
async def get_all(self) -> typing.List[URL]:
"""
To work with large collections of data better use generators, to give an item one at a time.
Combo with asyncio allows async for loop. With real db you will be awaiting reads from it.
"""
for url in self._items.values():
yield url
async def get(self, id: typing.Optional[int] = None,
full_url: typing.Optional[str] = None,
short_url_code: typing.Optional[str] = None) -> typing.Optional[URL]:
"""
Simulate get from db like in sqlalchemy, where u can .get by 'key'
"""
if id:
return self._items.get(id)
try:
return next(item for item in self._items.values()
if item.full_url == full_url or item.short_url_code == short_url_code)
except StopIteration:
return None
async def add(self, url: str) -> URL:
"""
DB create simulation.
Better check 'code' in db for duplicate, but not here, cause it`s an example project.
"""
id = len(self._items) + 1
code = uuid.uuid4().hex[:8]
new_url = URL(id=id, full_url=url, short_url_code=code)
self._items[id] = new_url
return new_url
async def delete(self, id: int) -> typing.Union[typing.NoReturn, None]:
"""
typing.NoReturn means that method raises an error
else it returns None as any other method/function with no 'return' specified
same as typing.Optional[typing.NoReturn]
"""
if id in self._items:
del self._items[id]
else:
raise ValueError("URL doesn`t exist")
| 3.265625 | 3 |
run_onnx_on_microtvm.py | mshr-h/nucleo-f746zg-microtvm-example | 0 | 12798098 | <reponame>mshr-h/nucleo-f746zg-microtvm-example
import tvm
from tvm import relay
import onnx
import tvm.micro
from tvm.micro.contrib import zephyr
import os
import numpy as np
model_path = "add.onnx"
onnx_model = onnx.load(model_path)
input1_name = "Input1"
input2_name = "Input2"
shape_dict = {input1_name: [1], input2_name: [1]}
mod, params = relay.frontend.from_onnx(onnx_model, shape_dict)
target = tvm.target.target.micro("stm32f746xx")
board = "nucleo_f746zg"
with tvm.transform.PassContext(opt_level=3, config={"tir.disable_vectorize": True}):
module = tvm.relay.build(mod, target=target, params=params)
graph_json, compiled_model, simplified_params = module.get_graph_json(), module.get_lib(), module.get_params()
repo_root = "/home/ubuntu/workspace/tvm"
project_dir = os.path.join(repo_root, "apps", "microtvm", "zephyr", "demo_runtime")
compiler = zephyr.ZephyrCompiler(
project_dir=project_dir,
board=board,
zephyr_toolchain_variant="zephyr",
)
opts = tvm.micro.default_options(f"{project_dir}/crt")
workspace = tvm.micro.Workspace(debug=True)
micro_bin = tvm.micro.build_static_runtime(workspace, compiler, compiled_model, opts)
dtype = "float32"
with tvm.micro.Session(binary=micro_bin, flasher=compiler.flasher()) as sess:
m_ = tvm.micro.create_local_graph_executor(graph_json, sess.get_system_lib(), sess.device)
input1 = tvm.nd.array(np.array([4], dtype=dtype))
input2 = tvm.nd.array(np.array([7], dtype=dtype))
m_.set_input("Input1", input1)
m_.set_input("Input2", input2)
m_.run()
output = m_.get_output(0).asnumpy()
print('microTVM:', output)
| 2.25 | 2 |
scripts/data_subset.py | Njfritter/myersBriggsNLPAnalysis | 16 | 12798099 | #!/usr/bin/env python3
################################################################
# <NAME> Personality Type Tweets Natural Language Processing
# By <NAME>
# Project can be found at:
# https://www.inertia7.com/projects/109 &
# https://www.inertia7.com/projects/110
################################################################
##################
# Import packages
##################
import sys, os
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
# Confirm the correct directory; break script and prompt user to move to correct directory otherwise
filepath = os.getcwd()
if not filepath.endswith('myersBriggsNLPAnalysis'):
print('\nYou do not appear to be in the correct directory,\
you must be in the \'myersBriggsNLPAnalysis\' directory\
in order to run these scripts. Type \'pwd\' in the command line\
if you are unsure of your location in the terminal.')
sys.exit(1)
raw_data = 'data/mbti_1.csv'
token_data = 'data/mbti_tokenized.csv'
clean_data = 'data/mbti_cleaned.csv'
columns = np.array(['type', 'posts'])
##################################################
# Make different versions of our data for analysis
##################################################
'''
Explanation
-----------
Now we will have various versions of our data:
- Raw, unfiltered data
- Tokenized data with hashtags, mentions, retweets, etc.
- Cleaned tokenized data with stopwords removed
We will now subset the data into various parts to be used in the other scripts
'''
# First check if the data has been generated
# If not prompt user to make it
token_file_exists = os.path.isfile(token_data)
clean_file_exists = os.path.isfile(clean_data)
if not token_file_exists or not clean_file_exists:
print('It looks like no processed data has been generated.\n',
'Please run the \'data_generation.py\' file and follow the prompts.')
sys.exit(1)
# Declare different processed and unprocessed objects for further analysis
raw_df = pd.read_csv(raw_data, header = 0)
raw_type = raw_df['type']
raw_posts = raw_df['posts']
token_df = pd.read_csv(token_data, header = 0)
token_type = token_df['type']
token_posts = token_df['posts']
clean_df = pd.read_csv(clean_data, header = 0)
clean_type = clean_df['type']
clean_posts = clean_df['posts']
# Split up data into training and testing datasets
# To evaluate effectiveness of model training
X_train_token, X_test_token, y_train_token, y_test_token = train_test_split(
token_posts, token_type, test_size = 0.30, random_state = 42)
X_train_clean, X_test_clean, y_train_clean, y_test_clean = train_test_split(
clean_posts, clean_type, test_size = 0.30, random_state = 42)
| 2.453125 | 2 |
e1/p1.py | larsaars/algorithms_datastructures_exercises | 0 | 12798100 | <filename>e1/p1.py
import argparse
from utils import time_func
def gcd_iterative(a: int, b: int) -> int:
while True:
r = a % b
a = b
b = r
if r == 0: # not r
break
return a
def gcd_recursive(a: int, b: int):
return gcd_recursive(b, a % b) if b else a
def scm(a: int, b: int) -> int:
# gcd(a, b) * scm(a, b) = |a * b|
return abs(a * b) // gcd_iterative(a, b)
def parse_args():
parser = argparse.ArgumentParser(description='find greatest common divisor')
parser.add_argument('-a', '--a', type=int, required=True, help='number 1')
parser.add_argument('-b', '--b', type=int, required=True, help='number 2')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
print(f'gcd_iterative({args.a}, {args.b}) = {time_func(gcd_iterative, args.a, args.b)}')
print(f'gcd_recursive({args.a}, {args.b}) = {time_func(gcd_recursive, args.a, args.b)}')
print(f'scm({args.a}, {args.b}) = {time_func(scm, args.a, args.b)}')
# because of the call stack the recursive function is normally slower, but better readable
print(f"{'a':^3}|{'b':^3}|{'gcd':^6}|{'kcm':^6}|{'a*b':^6}")
print(f"{'':-^28}")
for i in range(30, 40 + 1):
for k in range(i, 40 + 1):
print(f"{i:>3}|{k:>3}|{gcd_iterative(i, k):>6}|{scm(i, k):>6}|{(i * k):>6}")
# gcd(a, b) * scm(a, b) = |a * b|
| 3.8125 | 4 |
projects/olds/ocr/ocr-tensorflow/train.py | Bingwen-Hu/hackaway | 0 | 12798101 | # -*- coding: utf-8 -*-
import os
import time
import tensorflow as tf
from config import FLAGS
from model import build_graph
from preprocess import train_data_iterator, test_data_helper
def train():
with tf.Session() as sess:
# initialization
graph = build_graph(top_k=1)
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
# multi thread
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# log writer
train_writer = tf.summary.FileWriter(FLAGS.log_dir + '/train', sess.graph)
test_writer = tf.summary.FileWriter(FLAGS.log_dir + '/val')
# restore model
if FLAGS.restore:
ckpt = tf.train.latest_checkpoint(FLAGS.checkpoint_dir)
if ckpt:
saver.restore(sess, ckpt)
print("restore from the checkpoint {}".format(ckpt))
# training begins
try:
while not coord.should_stop():
for step, (x_batch, y_batch) in enumerate(train_data_iterator()):
start_time = time.time()
feed_dict = {graph['images']: x_batch,
graph['labels']: y_batch,
graph['keep_prob']: 0.8,
graph['is_training']: True}
train_opts = [graph['train_op'], graph['loss'], graph['merged_summary_op'], graph['global_step']]
_, loss_val, train_summary, step = sess.run(train_opts, feed_dict=feed_dict)
train_writer.add_summary(train_summary, step)
end_time = time.time()
print("the step {0} takes {1} loss {2}".format(step, end_time - start_time, loss_val))
# eval stage
if step % FLAGS.eval_steps == 0:
x_batch_test, y_batch_test = test_data_helper(128)
feed_dict = {graph['images']: x_batch_test,
graph['labels']: y_batch_test,
graph['keep_prob']: 1.0,
graph['is_training']: False}
test_opts = [graph['accuracy'], graph['merged_summary_op']]
accuracy_test, test_summary = sess.run(test_opts, feed_dict=feed_dict)
test_writer.add_summary(test_summary, step)
print('===============Eval a batch=======================')
print('the step {0} test accuracy: {1}'.format(step, accuracy_test))
print('===============Eval a batch=======================')
# save stage
if step % FLAGS.save_steps == 0 and step > FLAGS.min_save_steps:
saver.save(sess, os.path.join(FLAGS.checkpoint_dir, FLAGS.model_name), global_step=graph['global_step'])
except tf.errors.OutOfRangeError:
print('==================Train Finished================')
saver.save(sess, os.path.join(FLAGS.checkpoint_dir, FLAGS.model_name), global_step=graph['global_step'])
finally:
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
train() | 2.171875 | 2 |
test/test_huffman.py | brendanlong/compression | 0 | 12798102 | #!/usr/bin/env python3
import bitstring
from compression import huffman
simple = b"122333"
simple_codes = {
"1": "11",
"2": "10",
"3": "0"
}
simple_tree = bitstring.Bits("0b00100110001100110010100110011")
simple_compressed = bitstring.Bits("0x498cca67d0")
lorem = (b"Lorem ipsum dolor sit amet, consectetur adipisicing "
b"elit, sed do eiusmod tempor incididunt ut labore et dolore magna "
b"aliqua. Ut enim ad minim veniam, quis nostrud exercitation "
b"ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis "
b"aute irure dolor in reprehenderit in voluptate velit esse cillum "
b"dolore eu fugiat nulla pariatur. Excepteur sint occaecat "
b"cupidatat non proident, sunt in culpa qui officia deserunt "
b"mollit anim id est laborum.")
lorem_codes = {
" ": "001",
",": "1001000",
".": "111111",
"D": "100101101",
"E": "100101100",
"L": "11111011",
"U": "11111010",
"a": "0111",
"b": "1111100",
"c": "01001",
"d": "00011",
"e": "0000",
"f": "1001101",
"g": "1001100",
"h": "10010111",
"i": "110",
"l": "1110",
"m": "01000",
"n": "1010",
"o": "0110",
"p": "11110",
"q": "100111",
"r": "1011",
"s": "00010",
"t": "0101",
"u": "1000",
"v": "1001010",
"x": "1001001"
}
lorem_tree = bitstring.Bits("0x025c532ab62b85b2d25cadc2e2b359c5a144a2dd97"
"8965d4586deba2c76d480b25cec, 0b101")
lorem_compressed = bitstring.Bits("0x0204b8a6556c570b65a4b95b85c566b38b42"
"8945bb2f12cba8b0dbd7458eda90164b9d97edac10778508236e6b22ca5d00b20a5a8"
"4095058b2e3dec2c9d530876590440323621a04861950479acea4e1e1c529853cff1a"
"c08291b7358143cca72fda787fcfd290ac82e328d59065056744833c611a612dc0c84"
"90b4e575cd463b9d0963cff1af08d61630a5fb4f1bc42490729642186c52d4209e1d7"
"f32d8c22f0a075c5811b7359d46c3d612e1430bca75194dd1e57503283b2901080a77"
"74208db9ac08419b1333a9a8ee73e7bceb17f996494879422c8b52964a5c12ea531ec"
"3757534d47d6d8614b208a294ea298ef399e3169b37273918082e294a1bbb297ac838"
"6404a79fe35c23f")
def test_tree_from_data():
tree = huffman.Node.from_data(simple)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == simple_codes)
tree = huffman.Node.from_data(lorem)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == lorem_codes)
def test_tree_from_binary():
tree = huffman.Node.from_binary(simple_tree)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == simple_codes)
tree = huffman.Node.from_binary(lorem_tree)
codes = {chr(symbol): code.unpack("bin")[0]
for symbol, code in tree.codes().items()}
assert(codes == lorem_codes)
def test_compression():
compressed = huffman.compress(simple)
assert(bitstring.Bits(compressed) == simple_compressed)
compressed = huffman.compress(lorem)
assert(bitstring.Bits(compressed) == lorem_compressed)
def test_decompression():
data = huffman.decompress(simple_compressed)
assert(data == simple)
data = huffman.decompress(lorem_compressed)
assert(data == lorem)
def test_both():
compressed = huffman.compress(simple)
data = huffman.decompress(compressed)
assert(data == simple)
compressed = huffman.compress(lorem)
data = huffman.decompress(compressed)
assert(data == lorem)
| 2.8125 | 3 |
transition_amr_parser/action_pointer/o8_data_oracle.py | IBM/transition-amr-parser | 76 | 12798103 | <reponame>IBM/transition-amr-parser
import json
import argparse
from collections import Counter, defaultdict
import re
from tqdm import tqdm
from transition_amr_parser.io import read_propbank, read_amr, write_tokenized_sentences
from transition_amr_parser.action_pointer.o8_state_machine import (
AMRStateMachine,
get_spacy_lemmatizer
)
"""
This algorithm contains heuristics for generating linearized action sequences for AMR graphs in a rule based way.
The parsing algorithm is transition-based combined with pointers for long distance arcs.
Actions are
SHIFT : move cursor to next position in the token sequence
REDUCE : delete current token
MERGE : merge two tokens (for MWEs)
ENTITY(type) : form a named entity, or a subgraph
PRED(label) : form a new node with label
COPY_LEMMA : form a new node by copying lemma
COPY_SENSE01 : form a new node by copying lemma and add '01'
DEPENDENT(edge,node) : Add a node which is a dependent of the current node
LA(pos,label) : form a left arc from the current node to the previous node at location pos
RA(pos,label) : form a left arc to the current node from the previous node at location pos
CLOSE : complete AMR, run post-processing
"""
use_addnode_rules = True
def argument_parser():
parser = argparse.ArgumentParser(description='AMR parser oracle')
# Single input parameters
parser.add_argument(
"--in-amr",
help="AMR notation in LDC format",
type=str,
required=True
)
parser.add_argument(
"--in-propbank-args",
help="Propbank argument data",
type=str,
)
parser.add_argument(
"--out-oracle",
help="tokens, AMR notation and actions given by oracle",
type=str
)
parser.add_argument(
"--out-sentences",
help="tokenized sentences from --in-amr",
type=str
)
parser.add_argument(
"--out-actions",
help="actions given by oracle",
type=str
)
parser.add_argument(
"--out-action-stats",
help="statistics about actions",
type=str
)
parser.add_argument(
"--out-rule-stats", # TODO this is accessed by replacing '-' to '_'
help="statistics about alignments",
type=str
)
# Multiple input parameters
parser.add_argument(
"--out-amr",
help="corresponding AMR",
type=str
)
#
parser.add_argument(
"--verbose",
action='store_true',
help="verbose processing"
)
#
parser.add_argument(
"--multitask-max-words",
type=int,
help="number of woprds to use for multi-task"
)
# Labeled shift args
parser.add_argument(
"--out-multitask-words",
type=str,
help="where to store top-k words for multi-task"
)
parser.add_argument(
"--in-multitask-words",
type=str,
help="where to read top-k words for multi-task"
)
parser.add_argument(
"--no-whitespace-in-actions",
action='store_true',
help="avoid tab separation in actions and sentences by removing whitespaces"
)
# copy lemma action
parser.add_argument(
"--copy-lemma-action",
action='store_true',
help="Use copy action from Spacy lemmas"
)
# copy lemma action
parser.add_argument(
"--addnode-count-cutoff",
help="forbid all addnode actions appearing less times than count",
type=int
)
parser.add_argument(
"--in-pred-entities",
type=str,
default="person,thing",
help="comma separated list of entity types that can have pred"
)
args = parser.parse_args()
return args
def yellow_font(string):
return "\033[93m%s\033[0m" % string
entities_with_preds = []
def preprocess_amr(gold_amr, add_unaligned=None, included_unaligned=None, root_id=-1):
# clean alignments
for i, tok in enumerate(gold_amr.tokens):
align = gold_amr.alignmentsToken2Node(i + 1)
if len(align) == 2:
edges = [
(s, r, t)
for s, r, t in gold_amr.edges
if s in align and t in align
]
if not edges:
remove = 1
if (
gold_amr.nodes[align[1]].startswith(tok[:2]) or
len(gold_amr.alignments[align[0]]) >
len(gold_amr.alignments[align[1]])
):
remove = 0
gold_amr.alignments[align[remove]].remove(i + 1)
gold_amr.token2node_memo = {}
# clean invalid alignments: sometimes the alignments are outside of the sentence boundary
# TODO check why this happens in the data reading process
# TODO and fix that and remove this cleaning process
# an example is in training data, when the sentence is
# ['Among', 'common', 'birds', ',', 'a', 'rather', 'special', 'one', 'is',
# 'the', 'black', '-', 'faced', 'spoonbill', '.']
# TODO if not dealt with, this causes a problem when the root aligned token id is sentence length (not -1)
for nid, tids in gold_amr.alignments.items():
gold_amr.alignments[nid] = [tid for tid in tids if tid <= len(gold_amr.tokens)]
# TODO: describe this
# append a special token at the end of the sentence for the first unaligned node
# whose label is in `included_unaligned` to align to
# repeat `add_unaligned` times
if add_unaligned:
for i in range(add_unaligned):
gold_amr.tokens.append("<unaligned>")
for n in gold_amr.nodes:
if n not in gold_amr.alignments or not gold_amr.alignments[n]:
if gold_amr.nodes[n] in included_unaligned:
gold_amr.alignments[n] = [len(gold_amr.tokens)]
break
# add root node
gold_amr.tokens.append("<ROOT>")
gold_amr.nodes[root_id] = "<ROOT>"
gold_amr.edges.append((root_id, "root", gold_amr.root))
# gold_amr.alignments[root_id] = [-1] # NOTE do not do this; we have made all the token ids natural positive index
# setting a token id to -1 will break the code
gold_amr.alignments[root_id] = [len(gold_amr.tokens)] # NOTE shifted by 1 for AMR alignment
return gold_amr
def get_node_alignment_counts(gold_amrs_train):
"""Get statistics of alignments between nodes and surface words"""
node_by_token = defaultdict(lambda: Counter())
for train_amr in gold_amrs_train:
# Get alignments
alignments = defaultdict(list)
for i in range(len(train_amr.tokens)):
for al_node in train_amr.alignmentsToken2Node(i + 1):
alignments[al_node].append(
train_amr.tokens[i]
)
for node_id, aligned_tokens in alignments.items():
# join multiple words into one single expression
if len(aligned_tokens) > 1:
token_str = " ".join(aligned_tokens)
else:
token_str = aligned_tokens[0]
node = train_amr.nodes[node_id]
# count number of time a node is aligned to a token, indexed by
# token
node_by_token[token_str].update([node])
return node_by_token
def is_most_common(node_counts, node, rank=0):
return (
(
# as many results as the rank and node in that rank matches
len(node_counts) == rank + 1 and
node_counts.most_common(rank + 1)[-1][0] == node
) or (
# more results than the rank, node in that rank matches, and rank
# results is more probable than rank + 1
len(node_counts) > rank + 1 and
node_counts.most_common(rank + 1)[-1][0] == node and
node_counts.most_common(rank + 1)[-1][1] >
node_counts.most_common(rank + 2)[-1][1]
)
)
def sanity_check_amr(gold_amrs):
num_sentences = len(gold_amrs)
sentence_count = Counter()
amr_by_amrkey_by_sentence = defaultdict(dict)
amr_counts_by_sentence = defaultdict(lambda: Counter())
for amr in gold_amrs:
# hash of sentence
skey = " ".join(amr.tokens)
# count number of time sentence repeated
sentence_count.update([skey])
# hash of AMR labeling
akey = amr.toJAMRString()
# store different amr labels for same sent, keep has map
if akey not in amr_by_amrkey_by_sentence[skey]:
amr_by_amrkey_by_sentence[skey][akey] = amr
# count how many time each hash appears
amr_counts_by_sentence[skey].update([akey])
num_unique_sents = len(sentence_count)
num_labelings = 0
for skey, sent_count in sentence_count.items():
num_labelings += len(amr_counts_by_sentence[skey])
if len(amr_counts_by_sentence[skey]) > 1:
pass
# There is more than one labeling for this sentence
# amrs = list(amr_by_amrkey_by_sentence[skey].values())
# inform user
if num_sentences > num_unique_sents:
num_repeated = num_sentences - num_unique_sents
perc = num_repeated / num_sentences
alert_str = '{:d}/{:d} {:2.1f} % {:s} (max {:d} times)'.format(
num_repeated,
num_sentences,
100 * perc,
'repeated sents',
max(
count
for counter in amr_counts_by_sentence.values()
for count in counter.values()
)
)
print(yellow_font(alert_str))
if num_labelings > num_unique_sents:
num_inconsistent = num_labelings - num_unique_sents
perc = num_inconsistent / num_sentences
alert_str = '{:d}/{:d} {:2.4f} % {:s}'.format(
num_inconsistent,
num_sentences,
perc,
'inconsistent labelings from repeated sents'
)
print(yellow_font(alert_str))
def sanity_check_actions(sentence_tokens, oracle_actions):
pointer_arc_re = re.compile(r'^(LA|RA)\(([0-9]+),(.*)\)$')
assert len(sentence_tokens) == len(oracle_actions)
source_lengths = []
target_lengths = []
action_count = Counter()
for tokens, actions in zip(sentence_tokens, oracle_actions):
# filter actions to remove pointer
for action in actions:
if pointer_arc_re.match(action):
items = pointer_arc_re.match(action).groups()
action = f'{items[0]}({items[2]})'
action_count.update([action])
source_lengths.append(len(tokens))
target_lengths.append(len(actions))
pass
singletons = [k for k, c in action_count.items() if c == 1]
print('Base actions:')
print(Counter([k.split('(')[0] for k in action_count.keys()]))
print('Most frequent actions:')
print(action_count.most_common(10))
if singletons:
base_action_count = [x.split('(')[0] for x in singletons]
msg = f'{len(singletons)} singleton actions'
print(yellow_font(msg))
print(Counter(base_action_count))
def alert_inconsistencies(gold_amrs):
def yellow_font(string):
return "\033[93m%s\033[0m" % string
num_sentences = len(gold_amrs)
sentence_count = Counter()
amr_by_amrkey_by_sentence = defaultdict(dict)
amr_counts_by_sentence = defaultdict(lambda: Counter())
for amr in gold_amrs:
# hash of sentence
skey = " ".join(amr.tokens)
# count number of time sentence repeated
sentence_count.update([skey])
# hash of AMR labeling
akey = amr.toJAMRString()
# store different amr labels for same sent, keep has map
if akey not in amr_by_amrkey_by_sentence[skey]:
amr_by_amrkey_by_sentence[skey][akey] = amr
# count how many time each hash appears
amr_counts_by_sentence[skey].update([akey])
num_unique_sents = len(sentence_count)
num_labelings = 0
for skey, sent_count in sentence_count.items():
num_labelings += len(amr_counts_by_sentence[skey])
if len(amr_counts_by_sentence[skey]) > 1:
pass
# There is more than one labeling for this sentence
# amrs = list(amr_by_amrkey_by_sentence[skey].values())
# inform user
if num_sentences > num_unique_sents:
num_repeated = num_sentences - num_unique_sents
perc = num_repeated / num_sentences
alert_str = '{:d}/{:d} {:2.1f} % repeated sents (max {:d} times)'.format(
num_repeated,
num_sentences,
100 * perc,
max(
count
for counter in amr_counts_by_sentence.values()
for count in counter.values()
)
)
print(yellow_font(alert_str))
if num_labelings > num_unique_sents:
num_inconsistent = num_labelings - num_unique_sents
perc = num_inconsistent / num_sentences
alert_str = '{:d}/{:d} {:2.4f} % inconsistent labelings from repeated sents'.format(
num_inconsistent,
num_sentences,
perc
)
print(yellow_font(alert_str))
def read_multitask_words(multitask_list):
multitask_words = []
with open(multitask_list) as fid:
for line in fid:
items = line.strip().split('\t')
if len(items) > 2:
multitask_words.append(items[1])
return multitask_words
def label_shift(state_machine, multitask_words):
tok = state_machine.get_current_token(lemma=False)
if tok in multitask_words:
return f'SHIFT({tok})'
else:
return 'SHIFT'
def get_multitask_actions(max_symbols, tokenized_corpus, add_root=False):
word_count = Counter()
for sentence in tokenized_corpus:
word_count.update([x for x in sentence])
# Restrict to top-k words
allowed_words = dict(list(sorted(
word_count.items(),
key=lambda x: x[1])
)[-max_symbols:])
if add_root:
# Add root regardless
allowed_words.update({'ROOT': word_count['ROOT']})
return allowed_words
def process_multitask_words(tokenized_corpus, multitask_max_words,
in_multitask_words, out_multitask_words,
add_root=False):
# Load/Save words for multi-task
if multitask_max_words:
assert multitask_max_words
assert out_multitask_words
# get top words
multitask_words = get_multitask_actions(
multitask_max_words,
tokenized_corpus,
add_root=add_root
)
# store in file
with open(out_multitask_words, 'w') as fid:
for word in multitask_words.keys():
fid.write(f'{word}\n')
elif in_multitask_words:
assert not multitask_max_words
assert not out_multitask_words
# store in file
with open(in_multitask_words) as fid:
multitask_words = [line.strip() for line in fid.readlines()]
else:
multitask_words = None
return multitask_words
def print_corpus_info(amrs):
# print some info
print(f'{len(amrs)} sentences')
node_label_count = Counter([
n for amr in amrs for n in amr.nodes.values()
])
node_tokens = sum(node_label_count.values())
print(f'{len(node_label_count)}/{node_tokens} node types/tokens')
edge_label_count = Counter([t[1] for amr in amrs for t in amr.edges])
edge_tokens = sum(edge_label_count.values())
print(f'{len(edge_label_count)}/{edge_tokens} edge types/tokens')
word_label_count = Counter([w for amr in amrs for w in amr.tokens])
word_tokens = sum(word_label_count.values())
print(f'{len(word_label_count)}/{word_tokens} word types/tokens')
class AMROracleBuilder:
"""Build AMR oracle for one sentence."""
def __init__(self, gold_amr, lemmatizer, copy_lemma_action, multitask_words):
self.gold_amr = gold_amr
# initialize the state machine
self.machine = AMRStateMachine(gold_amr.tokens, spacy_lemmatizer=lemmatizer, amr_graph=True, entities_with_preds=entities_with_preds)
self.copy_lemma_action = copy_lemma_action
self.multitask_words = multitask_words
# TODO deprecate `multitask_words` or change for a better name such as `shift_label_words`
# AMR construction states info
self.nodeid_to_gold_nodeid = {} # key: node id in the state machine, value: list of node ids in gold AMR
self.nodeid_to_gold_nodeid[self.machine.root_id] = [-1] # NOTE gold amr root id is fixed at -1
self.built_gold_nodeids = []
@property
def tokens(self):
return self.gold_amr.tokens
@property
def time_step(self):
return self.machine.time_step
@property
def actions(self):
return self.machine.actions
def get_valid_actions(self):
"""Get the valid actions and invalid actions based on the current AMR state machine status and the gold AMR."""
# find the next action
# NOTE the order here is important, which is based on priority
# e.g. within node-arc actions, arc subsequence comes highest, then named entity subsequence, etc.
# debug
# on dev set, sentence id 459 (starting from 0) -> for DEPENDENT missing
# if self.tokens == ['The', 'cyber', 'attacks', 'were', 'unprecedented', '.', '<ROOT>']:
# if self.time_step >= 8:
# breakpoint()
action = self.try_reduce()
if not action:
action = self.try_merge()
#if not action:
# action = self.try_dependent()
if not action:
action = self.try_arcs()
#if not action:
# action = self.try_named_entities()
if not action:
action = self.try_entities_with_pred()
if not action:
action = self.try_entity()
if not action:
action = self.try_pred()
if not action:
if len(self.machine.actions) and self.machine.actions[-1] == 'SHIFT' and self.machine.tok_cursor != self.machine.tokseq_len - 1 :
action = 'REDUCE'
else:
action = 'SHIFT'
if action == 'SHIFT' and self.multitask_words is not None:
action = label_shift(self.machine, self.multitask_words)
valid_actions = [action]
invalid_actions = []
return valid_actions, invalid_actions
def build_oracle_actions(self):
"""Build the oracle action sequence for the current token sentence, based on the gold AMR
and the alignment.
"""
# Loop over potential actions
# NOTE "<ROOT>" token at last position is added as a node from the beginning, so no prediction
# for it here; the ending sequence is always [... SHIFT CLOSE] or [... LA(pos,'root') SHIFT CLOSE]
machine = self.machine
while not machine.is_closed:
valid_actions, invalid_actions = self.get_valid_actions()
# for now
assert len(valid_actions) == 1, "Oracle must be deterministic"
assert len(invalid_actions) == 0, "Oracle can\'t blacklist actions"
action = valid_actions[0]
# update the machine
machine.apply_action(action)
# close machine
# below are equivalent
# machine.apply_action('CLOSE', training=True, gold_amr=self.gold_amr)
machine.CLOSE(training=True, gold_amr=self.gold_amr)
return self.actions
def try_reduce(self):
"""
Check if the next action is REDUCE.
If
1) there is nothing aligned to a token.
"""
machine = self.machine
gold_amr = self.gold_amr
if machine.current_node_id is not None:
# not on the first time on a new token
return None
tok_id = machine.tok_cursor
tok_alignment = gold_amr.alignmentsToken2Node(tok_id + 1) # NOTE the index + 1
if len(tok_alignment) == 0:
return 'REDUCE'
else:
return None
def try_merge(self):
"""
Check if the next action is MERGE.
If
1) the current and the next token have the same node alignment.
"""
machine = self.machine
gold_amr = self.gold_amr
if machine.current_node_id is not None:
# not on the first time on a new token
return None
if machine.tok_cursor < machine.tokseq_len - 1:
cur = machine.tok_cursor
nxt = machine.tok_cursor + 1
cur_alignment = gold_amr.alignmentsToken2Node(cur + 1)
nxt_alignment = gold_amr.alignmentsToken2Node(nxt + 1)
if not cur_alignment or not nxt_alignment:
return None
# If both tokens are mapped to same node or overlap
if cur_alignment == nxt_alignment:
return 'MERGE'
if set(cur_alignment).intersection(set(nxt_alignment)):
return 'MERGE'
return None
else:
return None
def try_named_entities(self):
"""
Get the named entity sub-sequences one by one from the current surface token (segments).
E.g.
a) for one entity
ENTITY('name') PRED('city') [other arcs] LA(pos,':name')
b) for two entities with same surface tokens
ENTITY('name') PRED('city') [other arcs] LA(pos,':name') PRED('city') [other arcs] LA(pos,':name')
c) for two entities with two surface tokens
ENTITY('name') PRED('city') [other arcs] LA(pos,':name') ENTITY('name') PRED('city') [other arcs] LA(pos,':name')
"""
machine = self.machine
gold_amr = self.gold_amr
tok_id = machine.tok_cursor
tok_alignment = gold_amr.alignmentsToken2Node(tok_id + 1)
# check if alignment empty (or singleton)
if len(tok_alignment) <= 1:
return None
# check if there is any edge with the aligned nodes
edges = gold_amr.findSubGraph(tok_alignment).edges
if not edges:
return None
# check if named entity case: (entity_category, ':name', 'name')
entity_edges = []
name_node_ids = []
for s, r, t in edges:
if r == ':name' and gold_amr.nodes[t] == 'name':
entity_edges.append((s, r, t))
name_node_ids.append(t)
if not name_node_ids:
return None
for s, r, t in entity_edges:
if t not in self.built_gold_nodeids:
self.built_gold_nodeids.append(t)
self.nodeid_to_gold_nodeid.setdefault(machine.new_node_id, []).append(t)
return 'ENTITY(name)'
if s not in self.built_gold_nodeids:
self.built_gold_nodeids.append(s)
self.nodeid_to_gold_nodeid.setdefault(machine.new_node_id, []).append(s)
return f'PRED({gold_amr.nodes[s]})'
return None
def try_entities_with_pred(self):
"""
allow pred inside entities that frequently need it i.e. person, thing
"""
machine = self.machine
gold_amr = self.gold_amr
tok_id = machine.tok_cursor
tok_alignment = gold_amr.alignmentsToken2Node(tok_id + 1)
# check if alignment empty (or singleton)
if len(tok_alignment) <= 1:
return None
# check if there is any edge with the aligned nodes
edges = gold_amr.findSubGraph(tok_alignment).edges
if not edges:
return None
is_dependent = False
for s, r, t in edges:
if r == ':name' and gold_amr.nodes[t] == 'name':
return None
if r in [':polarity', ':mode']:
is_dependent = True
root = gold_amr.findSubGraph(tok_alignment).root
if gold_amr.nodes[root] not in entities_with_preds and not is_dependent:
return None
new_id = None
for s, r, t in edges:
if s not in self.built_gold_nodeids:
new_id = s
break
if t not in self.built_gold_nodeids:
new_id = t
break
if new_id != None:
self.built_gold_nodeids.append(new_id)
self.nodeid_to_gold_nodeid.setdefault(machine.new_node_id, []).append(new_id)
new_node = gold_amr.nodes[new_id]
if self.copy_lemma_action:
lemma = machine.get_current_token(lemma=True)
if lemma == new_node:
action = 'COPY_LEMMA'
elif f'{lemma}-01' == new_node:
action = 'COPY_SENSE01'
else:
action = f'PRED({new_node})'
else:
action = f'PRED({new_node})'
return action
return None
def try_entity(self):
"""
Check if the next action is ENTITY.
TryENTITY before tryPRED.
If
1) aligned to more than 1 nodes, and
2) there are edges in the aligned subgraph, and then
3) take the source nodes in the aligned subgraph altogether.
"""
machine = self.machine
gold_amr = self.gold_amr
tok_id = machine.tok_cursor
# to avoid subgraph ENTITY after named entities
if tok_id in machine.entity_tokenids:
return None
# NOTE currently do not allow multiple ENTITY here on a single token
if machine.current_node_id in machine.entities:
return None
tok_alignment = gold_amr.alignmentsToken2Node(tok_id + 1)
# check if alignment empty (or singleton)
if len(tok_alignment) <= 1:
return None
# check if there is any edge with the aligned nodes
edges = gold_amr.findSubGraph(tok_alignment).edges
if not edges:
return None
# check if named entity case: (entity_category, ':name', 'name')
# no need, since named entity check happens first
is_dependent = False
is_named = False
for s, r, t in edges:
if r == ':name' and gold_amr.nodes[t] == 'name':
is_named = True
if r in [':polarity', ':mode']:
is_dependent = True
root = gold_amr.findSubGraph(tok_alignment).root
if not is_named and ( gold_amr.nodes[root] in entities_with_preds or is_dependent):
return None
gold_nodeids = [n for n in tok_alignment if any(s == n for s, r, t in edges)]
new_nodes = ','.join([gold_amr.nodes[n] for n in gold_nodeids])
action = f'ENTITY({new_nodes})'
self.built_gold_nodeids.extend(gold_nodeids)
self.nodeid_to_gold_nodeid.setdefault(machine.new_node_id, []).extend(gold_nodeids)
return action
def try_pred(self):
"""
Check if the next action is PRED, COPY_LEMMA, COPY_SENSE01.
If
1) the current token is aligned to a single node, or multiple nodes? (figure out)
2) the aligned node has not been predicted yet
"""
machine = self.machine
gold_amr = self.gold_amr
tok_id = machine.tok_cursor
if tok_id == machine.tokseq_len - 1:
# never do PRED(<ROOT>) currently, as the root node is automatically added at the beginning
# NOTE to change this behavior, we need to be careful about the root node id which should be -1 now
# that is also massively used in postprocessing to find/add root.
return None
tok_alignment = gold_amr.alignmentsToken2Node(tok_id + 1) # NOTE we make all token ids positive natural index
# check if the alignment is empty
# no need since the REDUCE check happens first
if len(tok_alignment) == 1:
gold_nodeid = tok_alignment[0]
else:
# TODO check when this happens -> should we do multiple PRED?
gold_nodeid = gold_amr.findSubGraph(tok_alignment).root
# TODO for multiple PREDs, we need to do a for loop here
# check if the node has been constructed, for multiple PREDs
if gold_nodeid not in self.built_gold_nodeids:
self.built_gold_nodeids.append(gold_nodeid)
self.nodeid_to_gold_nodeid.setdefault(machine.new_node_id, []).append(gold_nodeid)
new_node = gold_amr.nodes[gold_nodeid]
if self.copy_lemma_action:
lemma = machine.get_current_token(lemma=True)
if lemma == new_node:
action = 'COPY_LEMMA'
elif f'{lemma}-01' == new_node:
action = 'COPY_SENSE01'
else:
action = f'PRED({new_node})'
else:
action = f'PRED({new_node})'
return action
else:
return None
def try_dependent(self):
"""
Check if the next action is DEPENDENT.
If
1) the aligned node has been predicted already
2)
Only for :polarity and :mode, if an edge and node is aligned
to this token in the gold amr but does not exist in the predicted amr,
the oracle adds it using the DEPENDENT action.
"""
machine = self.machine
gold_amr = self.gold_amr
tok_id = machine.tok_cursor
node_id = machine.current_node_id
if node_id is None: # NOTE if node_id could be 0, 'if not node_id' would cause a bug
# the node has not been built at current step
return None
# NOTE this doesn't work for ENTITY now, as the mapping from ENTITY node is only to the source nodes in the
# aligned subgraph, whereas for the DEPENDENT we are checking the target nodes in the subgraph
# gold_nodeids = self.nodeid_to_gold_nodeid[node_id]
# gold_nodeids = list(set(gold_nodeids)) # just in case
gold_nodeids = gold_amr.alignmentsToken2Node(tok_id + 1)
# below is coupled with the PRED checks? and also the ENTITY
if len(gold_nodeids) == 1:
gold_nodeid = gold_nodeids[0]
else:
gold_nodeid = gold_amr.findSubGraph(gold_nodeids).root
for s, r, t in gold_amr.edges:
if s == gold_nodeid and r in [':polarity', ':mode']:
if (node_id, r) in [(e[0], e[1]) for e in machine.amr.edges]:
# to prevent same DEPENDENT added twice, as each time we scan all the possible edges
continue
if t not in gold_nodeids and (t in gold_amr.alignments and gold_amr.alignments[t]):
continue
self.built_gold_nodeids.append(t)
# NOTE this might affect the next DEPEDENT check, but is fine if we always use subgraph root
self.nodeid_to_gold_nodeid.setdefault(node_id, []).append(t)
new_edge = r[1:] if r.startswith(':') else r
new_node = gold_amr.nodes[t]
action = f'DEPENDENT({new_node},{new_edge})'
return action
return None
def try_arcs(self):
"""
Get the arcs that involve the current token aligned node.
If
1) currently is on a node that was just constructed
2) there are edges that have not been built with this node
"""
machine = self.machine
node_id = machine.current_node_id
if node_id is None: # NOTE if node_id could be 0, 'if not node_id' would cause a bug
# the node has not been built at current step
return None
#for act_id, act_node_id in enumerate(machine.actions_to_nodes):
for act_id, act_node_id in reversed(list(enumerate(machine.actions_to_nodes))):
if act_node_id is None:
continue
# for multiple nodes out of one token --> need to use node id to check edges
arc = self.get_arc(act_node_id, node_id)
if arc is None:
continue
arc_name, arc_label = arc
# avoid repetitive edges
if arc_name == 'LA':
if (node_id, arc_label, act_node_id) in machine.amr.edges:
continue
if arc_name == 'RA':
if (act_node_id, arc_label, node_id) in machine.amr.edges:
continue
# pointer value
arc_pos = act_id
return f'{arc_name}({arc_pos},{arc_label})'
return None
def get_arc(self, node_id1, node_id2):
"""
Get the arcs between node with `node_id1` and node with `node_id2`.
RA if there is an edge `node_id1` --> `node_id2`
LA if there is an edge `node_id2` <-- `node_id2`
Thus the order of inputs matter. (could also change to follow strict orders between these 2 ids)
# TODO could there be more than one edges?
# currently we only return the first one.
"""
gold_amr = self.gold_amr
# get the node ids in the gold AMR graph
nodes1 = self.nodeid_to_gold_nodeid[node_id1]
nodes2 = self.nodeid_to_gold_nodeid[node_id2]
if not isinstance(nodes1, list):
nodes1 = [nodes1]
if not isinstance(nodes2, list):
nodes2 = [nodes2]
if not nodes1 or not nodes2:
return None
# convert to single node aligned to each of these two tokens
if len(nodes1) > 1:
# get root of subgraph aligned to token 1
node1 = gold_amr.findSubGraph(nodes1).root
else:
node1 = nodes1[0]
if len(nodes2) > 1:
# get root of subgraph aligned to token 2
node2 = gold_amr.findSubGraph(nodes2).root
else:
node2 = nodes2[0]
# find edges
for s, r, t in gold_amr.edges:
if node1 == s and node2 == t:
return ('RA', r)
if node1 == t and node2 == s:
return ('LA', r)
return None
def run_oracle(gold_amrs, copy_lemma_action, multitask_words):
# Initialize lemmatizer as this is slow
lemmatizer = get_spacy_lemmatizer()
# This will store the oracle stats
statistics = {
'sentence_tokens': [],
'oracle_actions': [],
'oracle_amr': [],
'rules': {
# Will store count of PREDs given pointer position
'possible_predicates': defaultdict(lambda: Counter())
}
}
pred_re = re.compile(r'^PRED\((.*)\)$')
# Process AMRs one by one
for sent_idx, gold_amr in tqdm(enumerate(gold_amrs), desc='Oracle'):
# TODO: See if we can remove this part
gold_amr = gold_amr.copy()
gold_amr = preprocess_amr(gold_amr)
# Initialize oracle builder
oracle_builder = AMROracleBuilder(gold_amr, lemmatizer, copy_lemma_action, multitask_words)
# build the oracle actions sequence
actions = oracle_builder.build_oracle_actions()
# store data
statistics['sentence_tokens'].append(oracle_builder.tokens)
# do not write CLOSE action at the end;
# CLOSE action is internally managed, and treated same as <eos> in training
statistics['oracle_actions'].append(actions[:-1])
statistics['oracle_amr'].append(oracle_builder.machine.amr.toJAMRString())
# pred rules
for idx, action in enumerate(actions):
if pred_re.match(action):
node_name = pred_re.match(action).groups()[0]
token = oracle_builder.machine.actions_tokcursor[idx]
statistics['rules']['possible_predicates'][token].update(node_name)
return statistics
def main():
# Argument handling
args = argument_parser()
global entities_with_preds
entities_with_preds = args.in_pred_entities.split(",")
# Load AMR (replace some unicode characters)
# TODO: unicode fixes and other normalizations should be applied more
# transparently
print(f'Reading {args.in_amr}')
corpus = read_amr(args.in_amr, unicode_fixes=True)
gold_amrs = corpus.amrs
# sanity check AMRS
print_corpus_info(gold_amrs)
sanity_check_amr(gold_amrs)
# Load propbank if provided
# TODO: Use here XML propbank reader instead of txt reader
propbank_args = None
if args.in_propbank_args:
propbank_args = read_propbank(args.in_propbank_args)
# read/write multi-task (labeled shift) action
# TODO: Add conditional if here
multitask_words = process_multitask_words(
[list(amr.tokens) for amr in gold_amrs],
args.multitask_max_words,
args.in_multitask_words,
args.out_multitask_words,
add_root=True
)
# run the oracle for the entire corpus
stats = run_oracle(gold_amrs, args.copy_lemma_action, multitask_words)
# print stats about actions
sanity_check_actions(stats['sentence_tokens'], stats['oracle_actions'])
# Save statistics
write_tokenized_sentences(
args.out_actions,
stats['oracle_actions'],
separator='\t'
)
write_tokenized_sentences(
args.out_sentences,
stats['sentence_tokens'],
separator='\t'
)
# State machine stats for this sentence
if args.out_rule_stats:
with open(args.out_rule_stats, 'w') as fid:
fid.write(json.dumps(stats['rules']))
if __name__ == '__main__':
main()
| 2.515625 | 3 |
src/diamond/collectors/hacheck/test/testhacheck.py | rohangulati/fullerite | 0 | 12798104 | <reponame>rohangulati/fullerite
#!/usr/bin/python
# coding=utf-8
################################################################################
from mock import Mock
from mock import patch
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from urllib2 import HTTPError
from diamond.collector import Collector
from hacheck import HacheckCollector
################################################################################
class TestHacheckCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('HacheckCollector', {})
self.collector = HacheckCollector(config, None)
@patch.object(Collector, 'publish')
@patch('urllib2.urlopen')
def test_works_with_real_data(self, urlopen_mock, publish_mock):
urlopen_mock.return_value = self.getFixture('metrics')
self.collector.collect()
self.assertPublishedMany(
publish_mock,
{
'hacheck.cache.expirations': 2692,
'hacheck.cache.sets': 2713,
'hacheck.cache.gets': 28460,
'hacheck.cache.hits': 25747,
'hacheck.cache.misses': 2713,
'hacheck.outbound_request_queue_size': 12
},
)
@patch.object(Collector, 'publish')
@patch('urllib2.urlopen')
def test_graceful_failure_on_http_error(self, urlopen_mock, publish_mock):
urlopen_mock.side_effect = HTTPError(
Mock(), Mock(), Mock(), Mock(), Mock())
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
@patch('urllib2.urlopen')
def test_graceful_failure_on_json_error(self, urlopen_mock, publish_mock):
urlopen_mock.return_value = self.getFixture('bad_metrics')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
################################################################################
if __name__ == "__main__":
unittest.main()
| 2.296875 | 2 |
experiments/chouse_auxiliary_supervised.py | jkulhanek/a2cat-vn-pytorch | 7 | 12798105 | from deep_rl import register_trainer
from deep_rl.core import AbstractTrainer
from deep_rl.core import MetricContext
from deep_rl.configuration import configuration
from environments.gym_house.goal import GoalImageCache
import os
import torch
import numpy as np
from torch.utils.data import Dataset,DataLoader
from models import AuxiliaryBigGoalHouseModel as Model
from torch.optim import Adam
import torch.nn.functional as F
from experiments.ai2_auxiliary.trainer import compute_auxiliary_target
class HouseDataset(Dataset):
def __init__(self, deconv_cell_size, transform = None):
self.image_cache = GoalImageCache((174,174), configuration.get('house3d.dataset_path'))
self.images = list(self.image_cache.all_image_paths(['rgb','depth','segmentation']))
self.transform = transform
self.deconv_cell_size = deconv_cell_size
def __len__(self):
return len(self.images)
def __getitem__(self, index):
image, depth, segmentation = self.images[index]
image = self.image_cache.read_image(image)
depth = self.image_cache.read_image(depth)
segmentation = self.image_cache.read_image(segmentation)
image = torch.from_numpy(np.transpose(image.astype(np.float32), [2,0,1]) / 255.0).unsqueeze(0)
depth = torch.from_numpy(np.transpose(depth[:,:,:1].astype(np.float32), [2,0,1]) / 255.0).unsqueeze(0)
segmentation = torch.from_numpy(np.transpose(segmentation.astype(np.float32), [2,0,1]) / 255.0).unsqueeze(0)
segmentation = compute_auxiliary_target(segmentation.unsqueeze(0), self.deconv_cell_size, (42, 42)).squeeze(0)
depth = compute_auxiliary_target(depth.unsqueeze(0), self.deconv_cell_size, (42, 42)).squeeze(0)
ret = (image, depth, segmentation)
if self.transform:
ret = self.transform(ret)
return ret
@register_trainer(save = True, saving_period = 1)
class SupervisedTrained(AbstractTrainer):
def __init__(self, name, **kwargs):
super().__init__(dict(), dict())
self.name = name
self.batch_size = 32
self.main_device = torch.device('cuda')
def optimize(self, image, depth, segmentation):
image = image.to(self.main_device)
depth = depth.to(self.main_device)
segmentation = segmentation.to(self.main_device)
zeros1 = torch.rand((image.size()[0], 1, 3,174,174), dtype = torch.float32, device = self.main_device)
zeros2 = torch.rand((image.size()[0], 1, 3,174,174), dtype = torch.float32, device = self.main_device)
(r_depth, r_segmentation, _), _ = self.model.forward_deconv(((image, zeros1), None), None, None)
(_, _, r_goal_segmentation), _ = self.model.forward_deconv(((zeros2, image), None,), None, None)
loss = F.mse_loss(r_depth, depth) + F.mse_loss(r_segmentation, segmentation) + F.mse_loss(r_goal_segmentation, segmentation)
loss = loss / 3.0
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
return loss.item()
def save(self, path):
super().save(path)
torch.save(self.model.state_dict(), os.path.join(path, 'weights.pth'))
print('Saving to %s' % os.path.join(path, 'weights.pth'))
def process(self, mode = 'train', **kwargs):
assert mode == 'train'
# Single epoch
metric_context = MetricContext()
dataloader = DataLoader(self.dataset, batch_size=self.batch_size,shuffle=True, num_workers=4)
total_loss = 0
total_updates = 0
for i, item in enumerate(dataloader):
loss = self.optimize(*item)
print('loss is %s' % loss)
total_loss += loss
total_updates += 1
print('Epoch done with loss=%s' % (total_loss / total_updates))
return (1, (1, 1), metric_context)
def create_dataset(self, deconv_cell_size):
return HouseDataset(deconv_cell_size)
def _initialize(self):
model = Model(3, 6).to(self.main_device)
model_path = os.path.join(configuration.get('models_path'),'chouse-auxiliary4-supervised', 'weights.pth')
if os.path.isfile(model_path):
print('Loading %s' % model_path)
model.load_state_dict(torch.load(model_path))
self.dataset = self.create_dataset(model.deconv_cell_size)
self.optimizer = Adam(model.parameters())
return model
def run(self, process, **kwargs):
self.model = self._initialize()
for i in range(30):
print('Starting epoch %s' % (i + 1))
process()
def default_args():
return dict() | 2.140625 | 2 |
UI/main_window.py | b0bs0n/valo-crosshair | 0 | 12798106 | <gh_stars>0
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'main_window.ui'
##
## Created by: Qt User Interface Compiler version 6.2.2
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide6.QtCore import (QCoreApplication, QDate, QDateTime, QLocale,
QMetaObject, QObject, QPoint, QRect,
QSize, QTime, QUrl, Qt)
from PySide6.QtGui import (QBrush, QColor, QConicalGradient, QCursor,
QFont, QFontDatabase, QGradient, QIcon,
QImage, QKeySequence, QLinearGradient, QPainter,
QPalette, QPixmap, QRadialGradient, QTransform)
from PySide6.QtWidgets import (QApplication, QComboBox, QFrame, QGraphicsView,
QGridLayout, QHBoxLayout, QLabel, QLayout,
QLineEdit, QMainWindow, QPushButton, QSizePolicy,
QSlider, QWidget)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
if not MainWindow.objectName():
MainWindow.setObjectName(u"MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(541, 849)
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName(u"centralwidget")
self.gridLayoutWidget = QWidget(self.centralwidget)
self.gridLayoutWidget.setObjectName(u"gridLayoutWidget")
self.gridLayoutWidget.setGeometry(QRect(10, 10, 521, 766))
self.gridLayout = QGridLayout(self.gridLayoutWidget)
self.gridLayout.setObjectName(u"gridLayout")
self.gridLayout.setSizeConstraint(QLayout.SetDefaultConstraint)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.hlay_inner_opacity = QHBoxLayout()
self.hlay_inner_opacity.setObjectName(u"hlay_inner_opacity")
self.le_inner_opacity = QLineEdit(self.gridLayoutWidget)
self.le_inner_opacity.setObjectName(u"le_inner_opacity")
sizePolicy1 = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
sizePolicy1.setHorizontalStretch(0)
sizePolicy1.setVerticalStretch(0)
sizePolicy1.setHeightForWidth(self.le_inner_opacity.sizePolicy().hasHeightForWidth())
self.le_inner_opacity.setSizePolicy(sizePolicy1)
self.le_inner_opacity.setMinimumSize(QSize(0, 0))
self.le_inner_opacity.setMaximumSize(QSize(50, 16777215))
self.le_inner_opacity.setAlignment(Qt.AlignCenter)
self.le_inner_opacity.setReadOnly(False)
self.hlay_inner_opacity.addWidget(self.le_inner_opacity)
self.slide_inner_opacity = QSlider(self.gridLayoutWidget)
self.slide_inner_opacity.setObjectName(u"slide_inner_opacity")
self.slide_inner_opacity.setMinimumSize(QSize(230, 0))
self.slide_inner_opacity.setMaximum(1000)
self.slide_inner_opacity.setOrientation(Qt.Horizontal)
self.slide_inner_opacity.setTickPosition(QSlider.TicksBelow)
self.slide_inner_opacity.setTickInterval(100)
self.hlay_inner_opacity.addWidget(self.slide_inner_opacity)
self.gridLayout.addLayout(self.hlay_inner_opacity, 13, 1, 1, 2)
self.btn_outline_on = QPushButton(self.gridLayoutWidget)
self.btn_outline_on.setObjectName(u"btn_outline_on")
self.btn_outline_on.setCheckable(True)
self.btn_outline_on.setChecked(True)
self.btn_outline_on.setAutoDefault(False)
self.btn_outline_on.setFlat(False)
self.gridLayout.addWidget(self.btn_outline_on, 4, 1, 1, 1)
self.lbl_outline_opacity = QLabel(self.gridLayoutWidget)
self.lbl_outline_opacity.setObjectName(u"lbl_outline_opacity")
sizePolicy2 = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy2.setHorizontalStretch(0)
sizePolicy2.setVerticalStretch(0)
sizePolicy2.setHeightForWidth(self.lbl_outline_opacity.sizePolicy().hasHeightForWidth())
self.lbl_outline_opacity.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_outline_opacity, 5, 0, 1, 1)
self.lbl_crosshair_color = QLabel(self.gridLayoutWidget)
self.lbl_crosshair_color.setObjectName(u"lbl_crosshair_color")
sizePolicy2.setHeightForWidth(self.lbl_crosshair_color.sizePolicy().hasHeightForWidth())
self.lbl_crosshair_color.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_crosshair_color, 3, 0, 1, 1)
self.lbl_inner_lines_offset = QLabel(self.gridLayoutWidget)
self.lbl_inner_lines_offset.setObjectName(u"lbl_inner_lines_offset")
sizePolicy2.setHeightForWidth(self.lbl_inner_lines_offset.sizePolicy().hasHeightForWidth())
self.lbl_inner_lines_offset.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_inner_lines_offset, 16, 0, 1, 1)
self.hlay_inner_offset = QHBoxLayout()
self.hlay_inner_offset.setObjectName(u"hlay_inner_offset")
self.le_inner_offset = QLineEdit(self.gridLayoutWidget)
self.le_inner_offset.setObjectName(u"le_inner_offset")
sizePolicy1.setHeightForWidth(self.le_inner_offset.sizePolicy().hasHeightForWidth())
self.le_inner_offset.setSizePolicy(sizePolicy1)
self.le_inner_offset.setMinimumSize(QSize(0, 0))
self.le_inner_offset.setMaximumSize(QSize(50, 16777215))
self.le_inner_offset.setAlignment(Qt.AlignCenter)
self.le_inner_offset.setReadOnly(False)
self.hlay_inner_offset.addWidget(self.le_inner_offset)
self.slide_inner_offset = QSlider(self.gridLayoutWidget)
self.slide_inner_offset.setObjectName(u"slide_inner_offset")
self.slide_inner_offset.setMinimumSize(QSize(230, 0))
self.slide_inner_offset.setMinimum(0)
self.slide_inner_offset.setMaximum(20)
self.slide_inner_offset.setOrientation(Qt.Horizontal)
self.slide_inner_offset.setTickPosition(QSlider.TicksBelow)
self.slide_inner_offset.setTickInterval(1)
self.hlay_inner_offset.addWidget(self.slide_inner_offset)
self.gridLayout.addLayout(self.hlay_inner_offset, 16, 1, 1, 2)
self.hlay_outer_thck = QHBoxLayout()
self.hlay_outer_thck.setObjectName(u"hlay_outer_thck")
self.le_outer_thck = QLineEdit(self.gridLayoutWidget)
self.le_outer_thck.setObjectName(u"le_outer_thck")
sizePolicy1.setHeightForWidth(self.le_outer_thck.sizePolicy().hasHeightForWidth())
self.le_outer_thck.setSizePolicy(sizePolicy1)
self.le_outer_thck.setMinimumSize(QSize(0, 0))
self.le_outer_thck.setMaximumSize(QSize(50, 16777215))
self.le_outer_thck.setAlignment(Qt.AlignCenter)
self.le_outer_thck.setReadOnly(False)
self.hlay_outer_thck.addWidget(self.le_outer_thck)
self.slide_outer_thck = QSlider(self.gridLayoutWidget)
self.slide_outer_thck.setObjectName(u"slide_outer_thck")
self.slide_outer_thck.setMinimumSize(QSize(230, 0))
self.slide_outer_thck.setMinimum(0)
self.slide_outer_thck.setMaximum(10)
self.slide_outer_thck.setOrientation(Qt.Horizontal)
self.slide_outer_thck.setTickPosition(QSlider.TicksBelow)
self.slide_outer_thck.setTickInterval(1)
self.hlay_outer_thck.addWidget(self.slide_outer_thck)
self.gridLayout.addLayout(self.hlay_outer_thck, 21, 1, 1, 2)
self.lbl_ch_select = QLabel(self.gridLayoutWidget)
self.lbl_ch_select.setObjectName(u"lbl_ch_select")
sizePolicy2.setHeightForWidth(self.lbl_ch_select.sizePolicy().hasHeightForWidth())
self.lbl_ch_select.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_ch_select, 1, 0, 1, 1)
self.btn_outer_on = QPushButton(self.gridLayoutWidget)
self.btn_outer_on.setObjectName(u"btn_outer_on")
self.btn_outer_on.setCheckable(True)
self.btn_outer_on.setChecked(True)
self.btn_outer_on.setAutoDefault(False)
self.gridLayout.addWidget(self.btn_outer_on, 18, 1, 1, 1)
self.hlay_outer_offset = QHBoxLayout()
self.hlay_outer_offset.setObjectName(u"hlay_outer_offset")
self.le_outer_offset = QLineEdit(self.gridLayoutWidget)
self.le_outer_offset.setObjectName(u"le_outer_offset")
sizePolicy1.setHeightForWidth(self.le_outer_offset.sizePolicy().hasHeightForWidth())
self.le_outer_offset.setSizePolicy(sizePolicy1)
self.le_outer_offset.setMinimumSize(QSize(0, 0))
self.le_outer_offset.setMaximumSize(QSize(50, 16777215))
self.le_outer_offset.setAlignment(Qt.AlignCenter)
self.le_outer_offset.setReadOnly(False)
self.hlay_outer_offset.addWidget(self.le_outer_offset)
self.slide_outer_offset = QSlider(self.gridLayoutWidget)
self.slide_outer_offset.setObjectName(u"slide_outer_offset")
self.slide_outer_offset.setMinimumSize(QSize(230, 0))
self.slide_outer_offset.setMinimum(0)
self.slide_outer_offset.setMaximum(20)
self.slide_outer_offset.setOrientation(Qt.Horizontal)
self.slide_outer_offset.setTickPosition(QSlider.TicksBelow)
self.slide_outer_offset.setTickInterval(1)
self.hlay_outer_offset.addWidget(self.slide_outer_offset)
self.gridLayout.addLayout(self.hlay_outer_offset, 22, 1, 1, 2)
self.hlay_outer_opacity = QHBoxLayout()
self.hlay_outer_opacity.setObjectName(u"hlay_outer_opacity")
self.le_outer_opacity = QLineEdit(self.gridLayoutWidget)
self.le_outer_opacity.setObjectName(u"le_outer_opacity")
sizePolicy1.setHeightForWidth(self.le_outer_opacity.sizePolicy().hasHeightForWidth())
self.le_outer_opacity.setSizePolicy(sizePolicy1)
self.le_outer_opacity.setMinimumSize(QSize(0, 0))
self.le_outer_opacity.setMaximumSize(QSize(50, 16777215))
self.le_outer_opacity.setAlignment(Qt.AlignCenter)
self.le_outer_opacity.setReadOnly(False)
self.hlay_outer_opacity.addWidget(self.le_outer_opacity)
self.slide_outer_opacity = QSlider(self.gridLayoutWidget)
self.slide_outer_opacity.setObjectName(u"slide_outer_opacity")
self.slide_outer_opacity.setMinimumSize(QSize(230, 0))
self.slide_outer_opacity.setMaximum(1000)
self.slide_outer_opacity.setOrientation(Qt.Horizontal)
self.slide_outer_opacity.setTickPosition(QSlider.TicksBelow)
self.slide_outer_opacity.setTickInterval(100)
self.hlay_outer_opacity.addWidget(self.slide_outer_opacity)
self.gridLayout.addLayout(self.hlay_outer_opacity, 19, 1, 1, 2)
self.hlay_inner_length = QHBoxLayout()
self.hlay_inner_length.setObjectName(u"hlay_inner_length")
self.le_inner_length = QLineEdit(self.gridLayoutWidget)
self.le_inner_length.setObjectName(u"le_inner_length")
sizePolicy1.setHeightForWidth(self.le_inner_length.sizePolicy().hasHeightForWidth())
self.le_inner_length.setSizePolicy(sizePolicy1)
self.le_inner_length.setMinimumSize(QSize(0, 0))
self.le_inner_length.setMaximumSize(QSize(50, 16777215))
self.le_inner_length.setAlignment(Qt.AlignCenter)
self.le_inner_length.setReadOnly(False)
self.hlay_inner_length.addWidget(self.le_inner_length)
self.slide_inner_length = QSlider(self.gridLayoutWidget)
self.slide_inner_length.setObjectName(u"slide_inner_length")
self.slide_inner_length.setMinimumSize(QSize(230, 0))
self.slide_inner_length.setMinimum(0)
self.slide_inner_length.setMaximum(20)
self.slide_inner_length.setOrientation(Qt.Horizontal)
self.slide_inner_length.setTickPosition(QSlider.TicksBelow)
self.slide_inner_length.setTickInterval(1)
self.hlay_inner_length.addWidget(self.slide_inner_length)
self.gridLayout.addLayout(self.hlay_inner_length, 14, 1, 1, 2)
self.lbl_stretch_res = QLabel(self.gridLayoutWidget)
self.lbl_stretch_res.setObjectName(u"lbl_stretch_res")
self.gridLayout.addWidget(self.lbl_stretch_res, 24, 0, 1, 1)
self.btn_outline_off = QPushButton(self.gridLayoutWidget)
self.btn_outline_off.setObjectName(u"btn_outline_off")
self.btn_outline_off.setCheckable(True)
self.btn_outline_off.setChecked(False)
self.btn_outline_off.setAutoDefault(False)
self.gridLayout.addWidget(self.btn_outline_off, 4, 2, 1, 1)
self.hlay_outline_opacity = QHBoxLayout()
self.hlay_outline_opacity.setObjectName(u"hlay_outline_opacity")
self.le_outline_opacity = QLineEdit(self.gridLayoutWidget)
self.le_outline_opacity.setObjectName(u"le_outline_opacity")
sizePolicy1.setHeightForWidth(self.le_outline_opacity.sizePolicy().hasHeightForWidth())
self.le_outline_opacity.setSizePolicy(sizePolicy1)
self.le_outline_opacity.setMinimumSize(QSize(0, 0))
self.le_outline_opacity.setMaximumSize(QSize(50, 16777215))
self.le_outline_opacity.setAlignment(Qt.AlignCenter)
self.le_outline_opacity.setReadOnly(False)
self.hlay_outline_opacity.addWidget(self.le_outline_opacity)
self.slide_outline_opacity = QSlider(self.gridLayoutWidget)
self.slide_outline_opacity.setObjectName(u"slide_outline_opacity")
self.slide_outline_opacity.setMinimumSize(QSize(230, 0))
self.slide_outline_opacity.setMaximum(1000)
self.slide_outline_opacity.setOrientation(Qt.Horizontal)
self.slide_outline_opacity.setTickPosition(QSlider.TicksBelow)
self.slide_outline_opacity.setTickInterval(100)
self.hlay_outline_opacity.addWidget(self.slide_outline_opacity)
self.gridLayout.addLayout(self.hlay_outline_opacity, 5, 1, 1, 2)
self.btn_inner_on = QPushButton(self.gridLayoutWidget)
self.btn_inner_on.setObjectName(u"btn_inner_on")
self.btn_inner_on.setCheckable(True)
self.btn_inner_on.setChecked(True)
self.btn_inner_on.setAutoDefault(False)
self.gridLayout.addWidget(self.btn_inner_on, 12, 1, 1, 1)
self.lbl_inner_lines_length = QLabel(self.gridLayoutWidget)
self.lbl_inner_lines_length.setObjectName(u"lbl_inner_lines_length")
sizePolicy2.setHeightForWidth(self.lbl_inner_lines_length.sizePolicy().hasHeightForWidth())
self.lbl_inner_lines_length.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_inner_lines_length, 14, 0, 1, 1)
self.lbl_inner_lines_thck = QLabel(self.gridLayoutWidget)
self.lbl_inner_lines_thck.setObjectName(u"lbl_inner_lines_thck")
sizePolicy2.setHeightForWidth(self.lbl_inner_lines_thck.sizePolicy().hasHeightForWidth())
self.lbl_inner_lines_thck.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_inner_lines_thck, 15, 0, 1, 1)
self.lbl_outer_lines_offset = QLabel(self.gridLayoutWidget)
self.lbl_outer_lines_offset.setObjectName(u"lbl_outer_lines_offset")
sizePolicy2.setHeightForWidth(self.lbl_outer_lines_offset.sizePolicy().hasHeightForWidth())
self.lbl_outer_lines_offset.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_outer_lines_offset, 22, 0, 1, 1)
self.lbl_inner_lines_length_2 = QLabel(self.gridLayoutWidget)
self.lbl_inner_lines_length_2.setObjectName(u"lbl_inner_lines_length_2")
sizePolicy2.setHeightForWidth(self.lbl_inner_lines_length_2.sizePolicy().hasHeightForWidth())
self.lbl_inner_lines_length_2.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_inner_lines_length_2, 20, 0, 1, 1)
self.btn_outer_off = QPushButton(self.gridLayoutWidget)
self.btn_outer_off.setObjectName(u"btn_outer_off")
self.btn_outer_off.setCheckable(True)
self.btn_outer_off.setAutoDefault(False)
self.gridLayout.addWidget(self.btn_outer_off, 18, 2, 1, 1)
self.hlay_inner_thck = QHBoxLayout()
self.hlay_inner_thck.setObjectName(u"hlay_inner_thck")
self.le_inner_thck = QLineEdit(self.gridLayoutWidget)
self.le_inner_thck.setObjectName(u"le_inner_thck")
sizePolicy1.setHeightForWidth(self.le_inner_thck.sizePolicy().hasHeightForWidth())
self.le_inner_thck.setSizePolicy(sizePolicy1)
self.le_inner_thck.setMinimumSize(QSize(0, 0))
self.le_inner_thck.setMaximumSize(QSize(50, 16777215))
self.le_inner_thck.setAlignment(Qt.AlignCenter)
self.le_inner_thck.setReadOnly(False)
self.hlay_inner_thck.addWidget(self.le_inner_thck)
self.slide_inner_thck = QSlider(self.gridLayoutWidget)
self.slide_inner_thck.setObjectName(u"slide_inner_thck")
self.slide_inner_thck.setMinimumSize(QSize(230, 0))
self.slide_inner_thck.setMinimum(0)
self.slide_inner_thck.setMaximum(10)
self.slide_inner_thck.setOrientation(Qt.Horizontal)
self.slide_inner_thck.setTickPosition(QSlider.TicksBelow)
self.slide_inner_thck.setTickInterval(1)
self.hlay_inner_thck.addWidget(self.slide_inner_thck)
self.gridLayout.addLayout(self.hlay_inner_thck, 15, 1, 1, 2)
self.lbl_outer_lines_thck = QLabel(self.gridLayoutWidget)
self.lbl_outer_lines_thck.setObjectName(u"lbl_outer_lines_thck")
sizePolicy2.setHeightForWidth(self.lbl_outer_lines_thck.sizePolicy().hasHeightForWidth())
self.lbl_outer_lines_thck.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_outer_lines_thck, 21, 0, 1, 1)
self.hlay_dot_thck = QHBoxLayout()
self.hlay_dot_thck.setObjectName(u"hlay_dot_thck")
self.le_dot_thck = QLineEdit(self.gridLayoutWidget)
self.le_dot_thck.setObjectName(u"le_dot_thck")
sizePolicy1.setHeightForWidth(self.le_dot_thck.sizePolicy().hasHeightForWidth())
self.le_dot_thck.setSizePolicy(sizePolicy1)
self.le_dot_thck.setMinimumSize(QSize(0, 0))
self.le_dot_thck.setMaximumSize(QSize(50, 16777215))
self.le_dot_thck.setAlignment(Qt.AlignCenter)
self.le_dot_thck.setReadOnly(False)
self.hlay_dot_thck.addWidget(self.le_dot_thck)
self.slide_dot_thck = QSlider(self.gridLayoutWidget)
self.slide_dot_thck.setObjectName(u"slide_dot_thck")
self.slide_dot_thck.setMinimumSize(QSize(230, 0))
self.slide_dot_thck.setMinimum(1)
self.slide_dot_thck.setMaximum(6)
self.slide_dot_thck.setOrientation(Qt.Horizontal)
self.slide_dot_thck.setTickPosition(QSlider.TicksBelow)
self.slide_dot_thck.setTickInterval(1)
self.hlay_dot_thck.addWidget(self.slide_dot_thck)
self.gridLayout.addLayout(self.hlay_dot_thck, 9, 1, 1, 2)
self.hlay_outline_thck = QHBoxLayout()
self.hlay_outline_thck.setObjectName(u"hlay_outline_thck")
self.le_outline_thck = QLineEdit(self.gridLayoutWidget)
self.le_outline_thck.setObjectName(u"le_outline_thck")
sizePolicy1.setHeightForWidth(self.le_outline_thck.sizePolicy().hasHeightForWidth())
self.le_outline_thck.setSizePolicy(sizePolicy1)
self.le_outline_thck.setMinimumSize(QSize(0, 0))
self.le_outline_thck.setMaximumSize(QSize(50, 16777215))
self.le_outline_thck.setAlignment(Qt.AlignCenter)
self.le_outline_thck.setReadOnly(False)
self.hlay_outline_thck.addWidget(self.le_outline_thck)
self.slide_outline_thck = QSlider(self.gridLayoutWidget)
self.slide_outline_thck.setObjectName(u"slide_outline_thck")
self.slide_outline_thck.setMinimumSize(QSize(230, 0))
self.slide_outline_thck.setMinimum(1)
self.slide_outline_thck.setMaximum(6)
self.slide_outline_thck.setOrientation(Qt.Horizontal)
self.slide_outline_thck.setTickPosition(QSlider.TicksBelow)
self.slide_outline_thck.setTickInterval(1)
self.hlay_outline_thck.addWidget(self.slide_outline_thck)
self.gridLayout.addLayout(self.hlay_outline_thck, 6, 1, 1, 2)
self.qcb_crosshair_color = QComboBox(self.gridLayoutWidget)
self.qcb_crosshair_color.setObjectName(u"qcb_crosshair_color")
self.gridLayout.addWidget(self.qcb_crosshair_color, 3, 1, 1, 2)
self.lbl_outer_lines_show = QLabel(self.gridLayoutWidget)
self.lbl_outer_lines_show.setObjectName(u"lbl_outer_lines_show")
sizePolicy2.setHeightForWidth(self.lbl_outer_lines_show.sizePolicy().hasHeightForWidth())
self.lbl_outer_lines_show.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_outer_lines_show, 18, 0, 1, 1)
self.btn_dot_off = QPushButton(self.gridLayoutWidget)
self.btn_dot_off.setObjectName(u"btn_dot_off")
self.btn_dot_off.setCheckable(True)
self.btn_dot_off.setAutoDefault(False)
self.gridLayout.addWidget(self.btn_dot_off, 7, 2, 1, 1)
self.lbl_inner_lines_show = QLabel(self.gridLayoutWidget)
self.lbl_inner_lines_show.setObjectName(u"lbl_inner_lines_show")
sizePolicy2.setHeightForWidth(self.lbl_inner_lines_show.sizePolicy().hasHeightForWidth())
self.lbl_inner_lines_show.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_inner_lines_show, 12, 0, 1, 1)
self.lbl_outer_lines = QLabel(self.gridLayoutWidget)
self.lbl_outer_lines.setObjectName(u"lbl_outer_lines")
sizePolicy3 = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Expanding)
sizePolicy3.setHorizontalStretch(0)
sizePolicy3.setVerticalStretch(0)
sizePolicy3.setHeightForWidth(self.lbl_outer_lines.sizePolicy().hasHeightForWidth())
self.lbl_outer_lines.setSizePolicy(sizePolicy3)
font = QFont()
font.setPointSize(12)
self.lbl_outer_lines.setFont(font)
self.lbl_outer_lines.setTextFormat(Qt.AutoText)
self.gridLayout.addWidget(self.lbl_outer_lines, 17, 0, 1, 3)
self.lbl_outlines = QLabel(self.gridLayoutWidget)
self.lbl_outlines.setObjectName(u"lbl_outlines")
sizePolicy2.setHeightForWidth(self.lbl_outlines.sizePolicy().hasHeightForWidth())
self.lbl_outlines.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_outlines, 4, 0, 1, 1)
self.btn_inner_off = QPushButton(self.gridLayoutWidget)
self.btn_inner_off.setObjectName(u"btn_inner_off")
self.btn_inner_off.setCheckable(True)
self.btn_inner_off.setAutoDefault(False)
self.gridLayout.addWidget(self.btn_inner_off, 12, 2, 1, 1)
self.hlay_dot_opacity = QHBoxLayout()
self.hlay_dot_opacity.setObjectName(u"hlay_dot_opacity")
self.le_dot_opacity = QLineEdit(self.gridLayoutWidget)
self.le_dot_opacity.setObjectName(u"le_dot_opacity")
sizePolicy1.setHeightForWidth(self.le_dot_opacity.sizePolicy().hasHeightForWidth())
self.le_dot_opacity.setSizePolicy(sizePolicy1)
self.le_dot_opacity.setMinimumSize(QSize(0, 0))
self.le_dot_opacity.setMaximumSize(QSize(50, 16777215))
self.le_dot_opacity.setAlignment(Qt.AlignCenter)
self.le_dot_opacity.setReadOnly(False)
self.hlay_dot_opacity.addWidget(self.le_dot_opacity)
self.slide_dot_opacity = QSlider(self.gridLayoutWidget)
self.slide_dot_opacity.setObjectName(u"slide_dot_opacity")
self.slide_dot_opacity.setMinimumSize(QSize(230, 0))
self.slide_dot_opacity.setMaximum(1000)
self.slide_dot_opacity.setOrientation(Qt.Horizontal)
self.slide_dot_opacity.setTickPosition(QSlider.TicksBelow)
self.slide_dot_opacity.setTickInterval(100)
self.hlay_dot_opacity.addWidget(self.slide_dot_opacity)
self.gridLayout.addLayout(self.hlay_dot_opacity, 8, 1, 1, 2)
self.hlay_ch_select = QHBoxLayout()
self.hlay_ch_select.setObjectName(u"hlay_ch_select")
self.qcb_ch_select = QComboBox(self.gridLayoutWidget)
self.qcb_ch_select.setObjectName(u"qcb_ch_select")
self.qcb_ch_select.setMaximumSize(QSize(300, 16777215))
self.hlay_ch_select.addWidget(self.qcb_ch_select)
self.qgv_crosshair = QGraphicsView(self.gridLayoutWidget)
self.qgv_crosshair.setObjectName(u"qgv_crosshair")
sizePolicy.setHeightForWidth(self.qgv_crosshair.sizePolicy().hasHeightForWidth())
self.qgv_crosshair.setSizePolicy(sizePolicy)
self.qgv_crosshair.setMinimumSize(QSize(50, 50))
self.qgv_crosshair.setMaximumSize(QSize(50, 50))
self.qgv_crosshair.setBaseSize(QSize(50, 50))
self.qgv_crosshair.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.qgv_crosshair.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.hlay_ch_select.addWidget(self.qgv_crosshair)
self.gridLayout.addLayout(self.hlay_ch_select, 1, 1, 1, 2)
self.lbl_outer_lines_opacity = QLabel(self.gridLayoutWidget)
self.lbl_outer_lines_opacity.setObjectName(u"lbl_outer_lines_opacity")
sizePolicy2.setHeightForWidth(self.lbl_outer_lines_opacity.sizePolicy().hasHeightForWidth())
self.lbl_outer_lines_opacity.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_outer_lines_opacity, 19, 0, 1, 1)
self.lbl_center_dot_thck = QLabel(self.gridLayoutWidget)
self.lbl_center_dot_thck.setObjectName(u"lbl_center_dot_thck")
sizePolicy2.setHeightForWidth(self.lbl_center_dot_thck.sizePolicy().hasHeightForWidth())
self.lbl_center_dot_thck.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_center_dot_thck, 9, 0, 1, 1)
self.btn_dot_on = QPushButton(self.gridLayoutWidget)
self.btn_dot_on.setObjectName(u"btn_dot_on")
self.btn_dot_on.setCheckable(True)
self.btn_dot_on.setChecked(True)
self.btn_dot_on.setAutoDefault(False)
self.gridLayout.addWidget(self.btn_dot_on, 7, 1, 1, 1)
self.lbl_center_dot_opacity = QLabel(self.gridLayoutWidget)
self.lbl_center_dot_opacity.setObjectName(u"lbl_center_dot_opacity")
sizePolicy2.setHeightForWidth(self.lbl_center_dot_opacity.sizePolicy().hasHeightForWidth())
self.lbl_center_dot_opacity.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_center_dot_opacity, 8, 0, 1, 1)
self.lbl_center_dot = QLabel(self.gridLayoutWidget)
self.lbl_center_dot.setObjectName(u"lbl_center_dot")
sizePolicy2.setHeightForWidth(self.lbl_center_dot.sizePolicy().hasHeightForWidth())
self.lbl_center_dot.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_center_dot, 7, 0, 1, 1)
self.lbl_inner_lines_opacity = QLabel(self.gridLayoutWidget)
self.lbl_inner_lines_opacity.setObjectName(u"lbl_inner_lines_opacity")
sizePolicy2.setHeightForWidth(self.lbl_inner_lines_opacity.sizePolicy().hasHeightForWidth())
self.lbl_inner_lines_opacity.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_inner_lines_opacity, 13, 0, 1, 1)
self.lbl_outline_thck = QLabel(self.gridLayoutWidget)
self.lbl_outline_thck.setObjectName(u"lbl_outline_thck")
sizePolicy2.setHeightForWidth(self.lbl_outline_thck.sizePolicy().hasHeightForWidth())
self.lbl_outline_thck.setSizePolicy(sizePolicy2)
self.gridLayout.addWidget(self.lbl_outline_thck, 6, 0, 1, 1)
self.hlay_outer_length = QHBoxLayout()
self.hlay_outer_length.setObjectName(u"hlay_outer_length")
self.le_outer_length = QLineEdit(self.gridLayoutWidget)
self.le_outer_length.setObjectName(u"le_outer_length")
sizePolicy1.setHeightForWidth(self.le_outer_length.sizePolicy().hasHeightForWidth())
self.le_outer_length.setSizePolicy(sizePolicy1)
self.le_outer_length.setMinimumSize(QSize(0, 0))
self.le_outer_length.setMaximumSize(QSize(50, 16777215))
self.le_outer_length.setAlignment(Qt.AlignCenter)
self.le_outer_length.setReadOnly(False)
self.hlay_outer_length.addWidget(self.le_outer_length)
self.slide_outer_length = QSlider(self.gridLayoutWidget)
self.slide_outer_length.setObjectName(u"slide_outer_length")
self.slide_outer_length.setMinimumSize(QSize(230, 0))
self.slide_outer_length.setMinimum(0)
self.slide_outer_length.setMaximum(20)
self.slide_outer_length.setOrientation(Qt.Horizontal)
self.slide_outer_length.setTickPosition(QSlider.TicksBelow)
self.slide_outer_length.setTickInterval(1)
self.hlay_outer_length.addWidget(self.slide_outer_length)
self.gridLayout.addLayout(self.hlay_outer_length, 20, 1, 1, 2)
self.lbl_crosshair = QLabel(self.gridLayoutWidget)
self.lbl_crosshair.setObjectName(u"lbl_crosshair")
sizePolicy3.setHeightForWidth(self.lbl_crosshair.sizePolicy().hasHeightForWidth())
self.lbl_crosshair.setSizePolicy(sizePolicy3)
self.lbl_crosshair.setFont(font)
self.lbl_crosshair.setTextFormat(Qt.AutoText)
self.gridLayout.addWidget(self.lbl_crosshair, 2, 0, 1, 3)
self.lbl_inner_lines = QLabel(self.gridLayoutWidget)
self.lbl_inner_lines.setObjectName(u"lbl_inner_lines")
sizePolicy3.setHeightForWidth(self.lbl_inner_lines.sizePolicy().hasHeightForWidth())
self.lbl_inner_lines.setSizePolicy(sizePolicy3)
self.lbl_inner_lines.setFont(font)
self.lbl_inner_lines.setTextFormat(Qt.AutoText)
self.gridLayout.addWidget(self.lbl_inner_lines, 10, 0, 1, 3)
self.lbl_screen_stretch = QLabel(self.gridLayoutWidget)
self.lbl_screen_stretch.setObjectName(u"lbl_screen_stretch")
self.lbl_screen_stretch.setFont(font)
self.gridLayout.addWidget(self.lbl_screen_stretch, 23, 0, 1, 1)
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.le_res_screen_w = QLineEdit(self.gridLayoutWidget)
self.le_res_screen_w.setObjectName(u"le_res_screen_w")
sizePolicy4 = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy4.setHorizontalStretch(0)
sizePolicy4.setVerticalStretch(0)
sizePolicy4.setHeightForWidth(self.le_res_screen_w.sizePolicy().hasHeightForWidth())
self.le_res_screen_w.setSizePolicy(sizePolicy4)
self.le_res_screen_w.setAlignment(Qt.AlignCenter)
self.horizontalLayout.addWidget(self.le_res_screen_w)
self.le_res_screen_h = QLineEdit(self.gridLayoutWidget)
self.le_res_screen_h.setObjectName(u"le_res_screen_h")
sizePolicy4.setHeightForWidth(self.le_res_screen_h.sizePolicy().hasHeightForWidth())
self.le_res_screen_h.setSizePolicy(sizePolicy4)
self.le_res_screen_h.setAlignment(Qt.AlignCenter)
self.horizontalLayout.addWidget(self.le_res_screen_h)
self.line = QFrame(self.gridLayoutWidget)
self.line.setObjectName(u"line")
self.line.setFrameShape(QFrame.VLine)
self.line.setFrameShadow(QFrame.Sunken)
self.horizontalLayout.addWidget(self.line)
self.le_res_game_w = QLineEdit(self.gridLayoutWidget)
self.le_res_game_w.setObjectName(u"le_res_game_w")
sizePolicy4.setHeightForWidth(self.le_res_game_w.sizePolicy().hasHeightForWidth())
self.le_res_game_w.setSizePolicy(sizePolicy4)
self.le_res_game_w.setAlignment(Qt.AlignCenter)
self.horizontalLayout.addWidget(self.le_res_game_w)
self.le_res_game_h = QLineEdit(self.gridLayoutWidget)
self.le_res_game_h.setObjectName(u"le_res_game_h")
sizePolicy4.setHeightForWidth(self.le_res_game_h.sizePolicy().hasHeightForWidth())
self.le_res_game_h.setSizePolicy(sizePolicy4)
self.le_res_game_h.setAlignment(Qt.AlignCenter)
self.horizontalLayout.addWidget(self.le_res_game_h)
self.btn_stretch_apply = QPushButton(self.gridLayoutWidget)
self.btn_stretch_apply.setObjectName(u"btn_stretch_apply")
sizePolicy5 = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
sizePolicy5.setHorizontalStretch(0)
sizePolicy5.setVerticalStretch(0)
sizePolicy5.setHeightForWidth(self.btn_stretch_apply.sizePolicy().hasHeightForWidth())
self.btn_stretch_apply.setSizePolicy(sizePolicy5)
self.btn_stretch_apply.setMinimumSize(QSize(40, 0))
self.horizontalLayout.addWidget(self.btn_stretch_apply)
self.gridLayout.addLayout(self.horizontalLayout, 24, 1, 1, 2)
self.btn_save_ch = QPushButton(self.centralwidget)
self.btn_save_ch.setObjectName(u"btn_save_ch")
self.btn_save_ch.setGeometry(QRect(10, 800, 141, 41))
self.lbl_link = QLabel(self.centralwidget)
self.lbl_link.setObjectName(u"lbl_link")
self.lbl_link.setGeometry(QRect(280, 820, 241, 20))
self.lbl_link.setTextFormat(Qt.RichText)
self.lbl_link.setAlignment(Qt.AlignRight|Qt.AlignTrailing|Qt.AlignVCenter)
self.lbl_link.setOpenExternalLinks(True)
self.btn_del_ch = QPushButton(self.centralwidget)
self.btn_del_ch.setObjectName(u"btn_del_ch")
self.btn_del_ch.setGeometry(QRect(180, 800, 141, 41))
self.lbl_err_msg = QLabel(self.centralwidget)
self.lbl_err_msg.setObjectName(u"lbl_err_msg")
self.lbl_err_msg.setGeometry(QRect(10, 780, 521, 20))
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.btn_outline_on.setDefault(False)
self.btn_outer_on.setDefault(False)
self.btn_outline_off.setDefault(False)
self.btn_inner_on.setDefault(False)
self.btn_dot_on.setDefault(False)
QMetaObject.connectSlotsByName(MainWindow)
# setupUi
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"Crosshair Manager", None))
self.le_inner_opacity.setText(QCoreApplication.translate("MainWindow", u"0.000", None))
self.btn_outline_on.setText(QCoreApplication.translate("MainWindow", u"On", None))
self.lbl_outline_opacity.setText(QCoreApplication.translate("MainWindow", u"Outline Opacity", None))
self.lbl_crosshair_color.setText(QCoreApplication.translate("MainWindow", u"Crosshair Color", None))
self.lbl_inner_lines_offset.setText(QCoreApplication.translate("MainWindow", u"Inner Lines Offset", None))
self.le_inner_offset.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.le_outer_thck.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.lbl_ch_select.setText(QCoreApplication.translate("MainWindow", u"Select Crosshair", None))
self.btn_outer_on.setText(QCoreApplication.translate("MainWindow", u"On", None))
self.le_outer_offset.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.le_outer_opacity.setText(QCoreApplication.translate("MainWindow", u"0.000", None))
self.le_inner_length.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.lbl_stretch_res.setText(QCoreApplication.translate("MainWindow", u"Screen Res / Game Res", None))
self.btn_outline_off.setText(QCoreApplication.translate("MainWindow", u"Off", None))
self.le_outline_opacity.setText(QCoreApplication.translate("MainWindow", u"0.000", None))
self.btn_inner_on.setText(QCoreApplication.translate("MainWindow", u"On", None))
self.lbl_inner_lines_length.setText(QCoreApplication.translate("MainWindow", u"Inner Lines Length", None))
self.lbl_inner_lines_thck.setText(QCoreApplication.translate("MainWindow", u"Inner Lines Thiccness", None))
self.lbl_outer_lines_offset.setText(QCoreApplication.translate("MainWindow", u"Outer Lines Offset", None))
self.lbl_inner_lines_length_2.setText(QCoreApplication.translate("MainWindow", u"Outer Lines Length", None))
self.btn_outer_off.setText(QCoreApplication.translate("MainWindow", u"Off", None))
self.le_inner_thck.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.lbl_outer_lines_thck.setText(QCoreApplication.translate("MainWindow", u"Outer Lines Thiccness", None))
self.le_dot_thck.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.le_outline_thck.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.lbl_outer_lines_show.setText(QCoreApplication.translate("MainWindow", u"Show Outer Lines", None))
self.btn_dot_off.setText(QCoreApplication.translate("MainWindow", u"Off", None))
self.lbl_inner_lines_show.setText(QCoreApplication.translate("MainWindow", u"Show Inner Lines", None))
self.lbl_outer_lines.setText(QCoreApplication.translate("MainWindow", u"Outer Lines", None))
self.lbl_outlines.setText(QCoreApplication.translate("MainWindow", u"Outlines", None))
self.btn_inner_off.setText(QCoreApplication.translate("MainWindow", u"Off", None))
self.le_dot_opacity.setText(QCoreApplication.translate("MainWindow", u"0.000", None))
self.lbl_outer_lines_opacity.setText(QCoreApplication.translate("MainWindow", u"Outer Lines Opacity", None))
self.lbl_center_dot_thck.setText(QCoreApplication.translate("MainWindow", u"Center Dot Thiccness", None))
self.btn_dot_on.setText(QCoreApplication.translate("MainWindow", u"On", None))
self.lbl_center_dot_opacity.setText(QCoreApplication.translate("MainWindow", u"Center Dot Opacity", None))
self.lbl_center_dot.setText(QCoreApplication.translate("MainWindow", u"Center Dot", None))
self.lbl_inner_lines_opacity.setText(QCoreApplication.translate("MainWindow", u"Inner Lines Opacity", None))
self.lbl_outline_thck.setText(QCoreApplication.translate("MainWindow", u"Outline Thiccness", None))
self.le_outer_length.setText(QCoreApplication.translate("MainWindow", u"0", None))
self.lbl_crosshair.setText(QCoreApplication.translate("MainWindow", u"Crosshair", None))
self.lbl_inner_lines.setText(QCoreApplication.translate("MainWindow", u"Inner Lines", None))
self.lbl_screen_stretch.setText(QCoreApplication.translate("MainWindow", u"Screen Stretch", None))
self.btn_stretch_apply.setText(QCoreApplication.translate("MainWindow", u"Apply", None))
self.btn_save_ch.setText(QCoreApplication.translate("MainWindow", u"Save Crosshair", None))
self.lbl_link.setText(QCoreApplication.translate("MainWindow", u"<a href=\"http://example.com/\">Project Home</a>", None))
self.btn_del_ch.setText(QCoreApplication.translate("MainWindow", u"Delete Crosshair", None))
self.lbl_err_msg.setText("")
# retranslateUi
| 1.546875 | 2 |
actions.py | matthewb66/bdconsole | 0 | 12798107 | import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash_extensions import Download
def create_actions_tab(projname, vername):
return [
dbc.Row(
dbc.Col(html.H2("Actions")),
),
dbc.Row(
dbc.Col(html.H4("Project: - Version: - "),
id='actions_projver'),
),
dbc.Row(
[
dbc.Col(
dbc.Card(
[
dbc.CardHeader("Export SPDX JSON file",
style={'classname': 'card-title'},
id='spdxtitle'),
dbc.CardBody(
[
dcc.Interval(
id='spdx_interval',
disabled=True,
interval=1 * 6000, # in milliseconds
n_intervals=0,
max_intervals=400
),
dbc.Form(
[
dbc.FormGroup(
[
dbc.Label("Filename", className="mr-2"),
dbc.Input(type="text",
id="spdx_file",
placeholder="Enter output SPDX file"),
],
className="mr-3",
),
dbc.FormGroup(
[
dbc.Checklist(
id="spdx_recursive",
options=[
{"label": "Recursive (Projects in Projects)",
"value": 1},
],
value=[],
switch=True,
)
],
className="mr-3",
),
dbc.Button("Export SPDX",
id="buttons_export_spdx",
color="primary"),
],
# inline=True,
),
html.Div('', id='spdx_status'),
dbc.Collapse(
[
dbc.Button("Download SPDX",
id="button_download_spdx",
color="primary"),
Download(id="download_spdx"),
],
id="spdx_collapse",
is_open=False,
),
],
),
# dbc.CardFooter(dbc.CardLink('Project Version link', href=projlink)),
], id="spdxcard",
),
width=4,
),
dbc.Col(
dbc.Card(
[
dbc.CardHeader("Ignore CVEs with BDSA Mismatch",
style={'classname': 'card-title'},
id='fixcvestitle'),
dbc.CardBody(
[
dcc.Interval(
id='fixcves_interval',
disabled=True,
interval=1 * 6000, # in milliseconds
n_intervals=0,
max_intervals=400
),
dbc.Form(
[
dbc.Button("Ignore CVEs with Mismatched BDSA Versions",
id="buttons_fixcves",
color="primary"),
],
# inline=True,
),
html.Div('', id='fixcves_status'),
],
),
# dbc.CardFooter(dbc.CardLink('Project Version link', href=projlink)),
], id="fixcvescard",
),
width=4,
),
],
)
]
def patch_cves(bd, version, vuln_list, vulns):
# vulnerable_components_url = hub.get_link(version, "vulnerable-components") + "?limit=9999"
# custom_headers = {'Accept':'application/vnd.blackducksoftware.bill-of-materials-6+json'}
# response = hub.execute_get(vulnerable_components_url, custom_headers=custom_headers)
# vulnerable_bom_components = response.json().get('items', [])
active_statuses = ["NEW", "NEEDS_REVIEW", "REMEDIATION_REQUIRED"]
status = "IGNORED"
comment = "Ignored as linked BDSA has component version as fixed"
print("Processing vulnerabilities ...")
ignoredcount = 0
alreadyignoredcount = 0
try:
for vuln in vulns:
vuln_name = vuln['vulnerabilityWithRemediation']['vulnerabilityName']
if vuln_name in vuln_list:
if vuln['vulnerabilityWithRemediation']['remediationStatus'] in active_statuses:
vuln['remediationStatus'] = status
vuln['remediationComment'] = comment
# result = hub.execute_put(vuln['_meta']['href'], data=vuln)
r = bd.session.put(vuln['_meta']['href'], json=vuln)
if r.status_code == 202:
ignoredcount += 1
print("{}: marked ignored".format(vuln_name))
else:
print("{}: Unable to change status".format(vuln_name))
else:
print(vuln_name + ": has BDSA which disgrees on version applicability but not active - no action")
alreadyignoredcount += 1
else:
print(vuln_name + ": No action")
except Exception as e:
print("ERROR: Unable to update vulnerabilities via API\n" + str(e))
return 0
print("- {} CVEs already inactive".format(alreadyignoredcount))
print("- {} CVEs newly marked as ignored".format(ignoredcount))
return ignoredcount
def check_cves(bd, projverurl, comps, vulns):
cve_list = []
num = 0
total = 0
for comp in comps:
# print(comp)
if 'componentVersionName' not in comp:
continue
print("- " + comp['componentName'] + '/' + comp['componentVersionName'])
for x in comp['_meta']['links']:
if x['rel'] == 'vulnerabilities':
# custom_headers = {'Accept': 'application/vnd.blackducksoftware.vulnerability-4+json'}
# response = hub.execute_get(x['href'] + "?limit=9999", custom_headers=custom_headers)
# vulns = response.json().get('items', [])
cvulns = bd.get_json(x['href'] + "?limit=3000")
for vuln in cvulns['items']:
total += 1
if vuln['source'] == 'NVD':
for y in vuln['_meta']['links']:
if y['rel'] == 'related-vulnerabilities':
if y['label'] == 'BDSA':
# print("{} has BDSA which disagrees with component version - potential false
# positive".format(vuln['name']))
if vuln['name'] not in cve_list:
cve_list.append(vuln['name'])
num += 1
print("Found {} total vulnerabilities".format(total))
print("Found {} CVEs with associated BDSAs but which do not agree on affected component version\n".format(num))
ret = patch_cves(bd, projverurl, cve_list, vulns)
return ret
| 2.078125 | 2 |
tests/tests.py | chunglabmit/phathom | 1 | 12798108 | <reponame>chunglabmit/phathom<filename>tests/tests.py
import phathom
import phathom.io
import phathom.utils
from phathom.test_helpers import *
import multiprocessing
import numpy as np
import unittest
import os
import tempfile
import sys
class TestConversion(unittest.TestCase):
def test_imread(self):
filename = os.path.join(os.path.split(__file__)[0], 'example.tif')
data = phathom.io.tiff.imread(filename)
self.assertEqual(data.shape, (64, 128, 128), msg='loaded array has the wrong shape')
self.assertEqual(data.dtype, 'uint16', msg='loaded array has the wrong data type')
def test_imsave(self):
arr = np.random.random((32, 32, 32))
filename = os.path.join(tempfile.gettempdir(), "imsave_test.tif")
phathom.io.tiff.imsave(filename, arr)
tmp = phathom.io.tiff.imread(filename)
self.assertTrue(np.all(arr == tmp), msg='saved and loaded array values are not equal')
self.assertEqual(arr.dtype, tmp.dtype, msg='saved and loaded array do not have same data type')
class TestUtils(unittest.TestCase):
def test_make_dir(self):
test_dir = 'tests/make_dir_test/'
if os.path.isdir(test_dir):
os.rmdir(test_dir)
phathom.utils.make_dir(test_dir)
self.assertTrue(os.path.isdir(test_dir), msg='test_dir does not exist after running make_dir')
if os.path.isdir(test_dir): # cleanup
os.rmdir(test_dir)
def test_files_in_dir(self):
file_test_dir = os.path.join(os.path.dirname(__file__),
'file_tests')
expected_files = ['file1.txt', 'file2.tif', 'file3.tif']
found_files = phathom.utils.files_in_dir(file_test_dir)
self.assertEqual(found_files, expected_files, msg='found incorrect files')
def test_tifs_in_dir(self):
file_test_dir = os.path.join(os.path.dirname(__file__),
'file_tests')
expected_files = ['file2.tif', 'file3.tif']
abs_path = os.path.abspath(file_test_dir)
expected_paths = [os.path.join(abs_path, fname) for fname in expected_files]
found_paths, found_files = phathom.utils.tifs_in_dir(file_test_dir)
self.assertEqual(found_files, expected_files, msg='found incorrect tif filenames')
self.assertEqual(found_paths, expected_paths, msg='found incorrect tif paths')
def test_pickle_save_load(self):
true_dict = {'chunks': (8, 16, 32), 'shape': (100, 1000, 1000)}
tmp_file = 'tests/tmp.pkl'
phathom.utils.pickle_save(tmp_file, true_dict)
read_dict = phathom.utils.pickle_load(tmp_file)
self.assertEqual(read_dict, true_dict, msg='saved and read dict do not match')
os.remove(tmp_file) # cleanup
@staticmethod
def write_for_memory_tteesstt(expected):
global memory
with memory.txn() as t:
t[:] = expected
@staticmethod
def do_memory_tteesstt():
global memory
memory = phathom.utils.SharedMemory(100, np.uint32)
expected = np.random.RandomState(1234).randint(0, 100, 100)
with multiprocessing.Pool(1) as pool:
pool.apply(TestUtils.write_for_memory_tteesstt, (expected,))
with memory.txn() as t:
np.testing.assert_equal(t[:], expected)
def test_shared_memory(self):
old_is_linux = phathom.utils.is_linux
if sys.platform.startswith("linux"):
# Test the generic form of SharedMemory
phathom.utils.is_linux = False
try:
self.do_memory_tteesstt()
finally:
phathom.utils.is_linux = old_is_linux
if sys.platform.startswith("linux"):
def test_linux_shared_memory(self):
self.do_memory_tteesstt()
# def test_parallel_map(self):
# result = find_primes(5 * 1000 * 1000 * 1000, 5*1000*1000*1000 + 1000)
# self.assertEqual(result[0], 5000000029)
class TestSegmentation(unittest.TestCase):
pass
if __name__=="__main__":
unittest.main()
| 2.375 | 2 |
app.py | anthonycgalea/FRC-2022-Scouting-Server | 0 | 12798109 | <filename>app.py<gh_stars>0
from flask import (
Flask
, render_template
, redirect
, request
, jsonify
, url_for
)
from flask_sqlalchemy import SQLAlchemy
import os, requests
from models import constants
from requests.exceptions import HTTPError
basedir = os.path.abspath(os.path.dirname(__file__))
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///'+os.path.join(basedir, 'data/ScoutingData.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS']=False
db = SQLAlchemy(app)
class Team(db.Model):
'''
Database model for a Team table. Only includes rudimentary team information to not clutter.
'''
teamNumber = db.Column(db.Integer, primary_key=True, nullable=False, unique=True)
teamName = db.Column(db.String(50), nullable=False)
teamCity = db.Column(db.String(50))
teamState = db.Column(db.String(50))
class Match(db.Model):
'''
Database model for a Match table. Only includes match number and team members of an alliance.
'''
matchNo = db.Column(db.Integer, primary_key=True, nullable=False, unique=True)
red_1 = db.Column(db.Integer, nullable=False)
red_2 = db.Column(db.Integer, nullable=False)
red_3 = db.Column(db.Integer, nullable=False)
blue_1 = db.Column(db.Integer, nullable=False)
blue_2 = db.Column(db.Integer, nullable=False)
blue_3 = db.Column(db.Integer, nullable=False)
class TeamMatch(db.Model):
'''
Database model for recording a team's performance in a match.
'''
teamMatchId = db.Column(db.Integer, primary_key=True) #tuid
matchNo = db.Column(db.Integer, db.ForeignKey('Match.matchNo'), nullable=False)
teamNo = db.Column(db.Integer, db.ForeignKey('Team.teamNumber'), nullable=False)
#TODO: Insert game-specfic data here
#Autonomous
autoTaxi = db.Column(db.Integer) #this is an integer to make data handling in Tableau easier
autoLow = db.Column(db.Integer)
autoHigh = db.Column(db.Integer)
autoPickedUp = db.Column(db.Integer)
autoNotes = db.Column(db.String(255))
#Teleop
teleLow = db.Column(db.Integer)
teleHigh = db.Column(db.Integer)
telePickedUp = db.Column(db.Integer)
didDefense = db.Column(db.Boolean)
teleDefense = db.Column(db.Integer)
teleNotes = db.Column(db.String(255))
#Endgame
attemptedClimb = db.Column(db.Boolean)
levelClimbed = db.Column(db.Integer)
endgameNotes = db.Column(db.String(255))
#generic game data
brokenBot = db.Column(db.Boolean)
noShow = db.Column(db.Boolean)
fouls = db.Column(db.Integer)
generalNotes = db.Column(db.String(255))
class TeamPitScout(db.Model):
'''
Database model for pit scouting information for a team.
'''
TeamPitScoutId = db.Column(db.Integer, primary_key=True)
teamNo = db.Column(db.Integer, db.ForeignKey('Team.teamNumber'), nullable=False)
#TODO: Insert game-specific data here
#TODO: replace this with a
def getTeamInfo(teamNo):
url = f"{constants.tba_base}{teamNo}.json?key={constants.tba_key}"
res = requests.get(url)
res_json = res.json()
info = tuple(
[
res_json["results"][0]["nickname"],
res_json["results"][0]["city"],
res_json["results"][0]["state_prov"],
]
)
return info
@app.route("/")
def index():
'''
Flask method for returning homepage.
'''
return render_template("index.html")
@app.route("/scoutMatch/<int:matchNo><int:teamNo>")
def scoutMatch():
return render_template("matchScout.html") | 2.609375 | 3 |
insert_into_table.py | cegladanych/demo_storage | 0 | 12798110 | <gh_stars>0
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
import csv
account_name = ''
accoun_key = ''
table_name = 'projectx'
file_name = 'C:\\Users\\admin\\Downloads\\meta.csv'
def set_table_service():
return TableService(account_name,accoun_key)
def get_table_service():
return ts.exists(table_name,10)
ts = set_table_service()
if get_table_service() == False:
ts.create_table(table_name)
csvFile = open(file_name, 'r')
field_names = ('PartitionKey','RowKey','TimeStamp','UpdatedOn','ID','Priority')
reader = csv.DictReader(csvFile)
rows = [row for row in reader]
for row in rows:
index = rows.index(row)
ts.insert_or_replace_entity(table_name,row)
| 2.375 | 2 |
mlnx-ofed-4.9-driver/rdma-core-50mlnx1/tests/test_relaxed_ordering.py | Hf7WCdtO/KRCore | 0 | 12798111 | <filename>mlnx-ofed-4.9-driver/rdma-core-50mlnx1/tests/test_relaxed_ordering.py
from tests.base import RCResources, UDResources, XRCResources
from tests.utils import traffic, xrc_traffic
from tests.base import RDMATestCase
from pyverbs.mr import MR
import pyverbs.enums as e
class RoUD(UDResources):
def create_mr(self):
self.mr = MR(self.pd, self.msg_size + self.GRH_SIZE,
e.IBV_ACCESS_LOCAL_WRITE | e.IBV_ACCESS_RELAXED_ORDERING)
class RoRC(RCResources):
def create_mr(self):
self.mr = MR(self.pd, self.msg_size,
e.IBV_ACCESS_LOCAL_WRITE | e.IBV_ACCESS_RELAXED_ORDERING)
class RoXRC(XRCResources):
def create_mr(self):
self.mr = MR(self.pd, self.msg_size,
e.IBV_ACCESS_LOCAL_WRITE | e.IBV_ACCESS_RELAXED_ORDERING)
class RoTestCase(RDMATestCase):
def setUp(self):
super(RoTestCase, self).setUp()
self.iters = 100
self.qp_dict = {'rc': RoRC, 'ud': RoUD, 'xrc': RoXRC}
def create_players(self, qp_type):
client = self.qp_dict[qp_type](self.dev_name, self.ib_port,
self.gid_index)
server = self.qp_dict[qp_type](self.dev_name, self.ib_port,
self.gid_index)
if qp_type == 'xrc':
client.pre_run(server.psns, server.qps_num)
server.pre_run(client.psns, client.qps_num)
else:
client.pre_run(server.psn, server.qpn)
server.pre_run(client.psn, client.qpn)
return client, server
def test_ro_rc_traffic(self):
client, server = self.create_players('rc')
traffic(client, server, self.iters, self.gid_index, self.ib_port)
def test_ro_ud_traffic(self):
client, server = self.create_players('ud')
traffic(client, server, self.iters, self.gid_index, self.ib_port)
def test_ro_xrc_traffic(self):
client, server = self.create_players('xrc')
xrc_traffic(client, server)
| 1.929688 | 2 |
run_files.py | OmarOsman/Arabic_Text_Summarization | 0 | 12798112 | <filename>run_files.py<gh_stars>0
import os, pickle, re
import document
import preprocess
import argparse
import pdb
def get_summary(input_text):
pr = preprocess.Preprocess()
original_text = input_text
preprocessed_text = pr.get_clean_article(input_text)
sentences = pr.get_article_sentences(preprocessed_text)
original_sentences = pr.get_article_sentences(input_text)
paragraphs = pr.get_cleaned_article_paragraphes(preprocessed_text)
para_sent_list = pr.get_para_sentences(paragraphs)
tokenized_word_sentences = pr.get_tokenized_word_sentences(sentences)
doc = document.Doc(
original_text = original_text , original_sentences = original_sentences ,
preprocessed_text = preprocessed_text.replace('ppp',""),
sentences = sentences,
paragraphs = paragraphs ,para_sent_list = para_sent_list ,tokenized_word_sentences = tokenized_word_sentences)
summary = doc.summarize()
return summary
def run():
input_dir = "input"
output_dir = "output"
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--input", required=True,help="path to input text document")
#ap.add_argument("-o", "--output", required=True,help="path to output Summarized Document")
args = vars(ap.parse_args())
input_path = os.path.join(input_dir,args['input'])
output_path = os.path.join(output_dir,args['input'])
pr = preprocess.Preprocess()
input_text = pr.get_article_content(input_path)
summary = get_summary(input_text)
#pdb.set_trace()
with open(output_path,'w' ,encoding = "utf-8") as f: f.write(summary)
if __name__ == "__main__":
run()
| 2.859375 | 3 |
exercise/venv/lib/python3.7/site-packages/sqreen/log.py | assuzzanne/my-sqreen | 0 | 12798113 | <gh_stars>0
# -*- coding: utf-8 -*-
# Copyright (c) 2016, 2017, 2018, 2019 Sqreen. All rights reserved.
# Please refer to our terms for more information:
#
# https://www.sqreen.io/terms.html
#
""" Logging module helpers
"""
import logging
ROOT_LOGGER_NAME = "sqreen"
LOG_FORMAT = (
"[%(levelname)s][%(asctime)s #%(process)d.%(threadName)s]"
" %(name)s:%(lineno)s \t%(message)s"
)
def configure_root_logger(log_level, log_location=None, root=ROOT_LOGGER_NAME):
""" Configure the sqreen root logger. Set following settings:
- log_level
Ensure that the sqreen root logger don't propagate messages logs
to the python root logger.
Configure two handlers, one stream handler on stderr for errors
and one file handler if log_location is set for configured level
"""
logger = logging.getLogger(root)
# Don't propagate messages to upper loggers
logger.propagate = False
formatter = logging.Formatter(LOG_FORMAT)
handlers = []
# Configure the stderr handler configured on CRITICAL level
stderr_handler = logging.StreamHandler()
stderr_handler.setFormatter(formatter)
handlers.append(stderr_handler)
if log_location is not None:
try:
filehandler = logging.FileHandler(log_location)
filehandler.setFormatter(formatter)
handlers.append(filehandler)
except (OSError, IOError):
msg = "Couldn't use %s as sqreen log location, fallback to stderr."
logger.exception(msg, log_location)
if logger.handlers:
logger.handlers = []
for handler in handlers:
logger.addHandler(handler)
try:
logger.setLevel(log_level)
except ValueError:
logger.error("Unknown log_level %r, don't alter log level", log_level)
return logger
| 2.234375 | 2 |
src/btt/questions/admin.py | kevinkissi/basic-tech-tips-webapp | 116 | 12798114 | <reponame>kevinkissi/basic-tech-tips-webapp
from django.contrib import admin
from django.apps import apps
questions = apps.get_app_config('questions')
for model_name, model in questions.models.items():
admin.site.register(model)
| 1.953125 | 2 |
photoframe/__init__.py | sesamemucho/photoframe | 0 | 12798115 | <reponame>sesamemucho/photoframe
__author__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '0.1.0'
| 1.015625 | 1 |
en/verbocean_to_json.py | yuichigoto/ccg2lambda | 200 | 12798116 | <filename>en/verbocean_to_json.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
from collections import defaultdict
import gzip
import json
parser = argparse.ArgumentParser()
parser.add_argument("infile")
parser.add_argument("outfile")
args = parser.parse_args()
def load_verbocean(verbocean_filename):
relations = dict()
with gzip.open(verbocean_filename, 'rt', 'utf-8') as fin:
for line in fin:
if not line.startswith('#'):
verb1, rel, verb2 = line.split()[0:3]
if verb1 not in relations:
relations[verb1] = defaultdict(set)
relations[verb1][verb2].add(rel.strip('[]'))
return relations
verbocean = load_verbocean(args.infile)
for v1, d in verbocean.items():
for v2, rels in d.items():
verbocean[v1][v2] = list(rels)
with open(args.outfile, 'w') as fout:
json.dump(verbocean, fout, indent=2)
| 2.6875 | 3 |
view/runnable/showMultiPlayerBoard.py | achrafJhidri/LeCompteEstBon | 0 | 12798117 | from view.runnable.Runnable import Runnable
class showMultiPlayerBoard(Runnable):
def __init__(self,vue, sec):
Runnable.__init__(self,vue)
self.sec = sec
def run(self):
self.vue.showMultiPlayerBoard(self.sec) | 1.8125 | 2 |
cryptomarket/exchange/client.py | miguelagustin/cryptomkt-python | 0 | 12798118 | # coding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import requests
import time
import warnings
from .auth import HMACAuth
from .compat import imap
from .compat import quote
from .compat import urljoin
from .compat import urlencode
from .error import build_api_error
from .model import APIObject
from .model import new_api_object
from .socket import Socket
from .util import check_uri_security
from .util import encode_params
class Client(object):
BASE_API_URI = 'https://api.cryptomkt.com/'
API_VERSION = 'v2'
def __init__(self, api_key, api_secret, base_api_uri=None, api_version=None, debug=False):
if not api_key:
raise ValueError('Missing `api_key`.')
if not api_secret:
raise ValueError('Missing `api_secret`.')
# Allow passing in a different API base.
self.BASE_API_URI = check_uri_security(base_api_uri or self.BASE_API_URI)
self.API_VERSION = api_version or self.API_VERSION
self.socket = None
# Set up a requests session for interacting with the API.
self.session = self._build_session(HMACAuth, api_key, api_secret, self.API_VERSION)
# a container for the socket if needed.
self.socket = None
def _build_session(self, auth_class, *args, **kwargs):
"""Internal helper for creating a requests `session` with the correct
authentication handling."""
session = requests.session()
session.auth = auth_class(*args, **kwargs)
# session.headers.update({'Content-type': 'application/json'})
return session
def _create_api_uri(self, *parts, **kwargs):
"""Internal helper for creating fully qualified endpoint URIs."""
params = kwargs.get("params", None)
if params and isinstance(params, dict):
url = urljoin(self.BASE_API_URI, '/'.join(imap(quote, parts)) + '?%s' % urlencode(params))
else:
url = urljoin(self.BASE_API_URI, '/'.join(imap(quote, parts)))
return url
def _request(self, method, *relative_path_parts, **kwargs):
"""Internal helper for creating HTTP requests to the CryptoMarket API.
Raises an APIError if the response is not 20X. Otherwise, returns the response object. Not intended for direct use by API consumers.
"""
uri = self._create_api_uri(*relative_path_parts, **kwargs)
data = kwargs.get("data", None)
if data and isinstance(data, dict):
kwargs['data'] = data
response = getattr(self.session, method)(uri, **kwargs)
return self._handle_response(response)
def _handle_response(self, response):
"""Internal helper for handling API responses from the CryptoMarket server.
Raises the appropriate exceptions when necessary; otherwise, returns the
response.
"""
if not str(response.status_code).startswith('2'):
raise build_api_error(response)
return response
def _get(self, *args, **kwargs):
return self._request('get', *args, **kwargs)
def _post(self, *args, **kwargs):
return self._request('post', *args, **kwargs)
def _make_api_object(self, response, model_type=None):
blob = response.json()
data = blob.get('data', None)
# All valid responses have a "data" key.
if data is None:
raise build_api_error(response, blob)
# Warn the user about each warning that was returned.
warnings_data = blob.get('warnings', None)
for warning_blob in warnings_data or []:
message = "%s (%s)" % (
warning_blob.get('message', ''),
warning_blob.get('url', ''))
warnings.warn(message, UserWarning)
pagination = blob.get('pagination', None)
kwargs = {
'response': response,
'pagination': pagination and new_api_object(None, pagination, APIObject),
'warnings': warnings_data and new_api_object(None, warnings_data, APIObject),
}
if isinstance(data, dict):
obj = new_api_object(self, data, model_type, **kwargs)
else:
obj = APIObject(self, **kwargs)
obj.data = new_api_object(self, data, model_type)
return obj
# Public API
# -----------------------------------------------------------
def get_markets(self):
"""Returns a list of the marketpairs as strings available in Cryptomkt
as the "data" member of a dict.
"""
response = self._get(self.API_VERSION, 'market')
return self._make_api_object(response, APIObject)
def get_ticker(self, market=None):
"""Returns a general view of the market state as a dict.
Shows the actual bid and ask, the volume and price, and the low and high of the market.
Stored in the "data" member of a dict.
Does not requiere to be authenticated.
Optional Arguments:
market: A market pair as string, if no market pair is provided,
the market state of all the market pairs are returned.
e.g: 'EHTARS'.
"""
params = {}
if market:
params['market'] = market
response = self._get(self.API_VERSION, 'ticker', params=params)
return self._make_api_object(response, APIObject)
def get_book(self, market, side, page=None, limit=None):
"""Returns a list of active orders of a given side in a specified
market pair. stored in the "data" member of a dict.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHEUR'.
side: 'buy' or 'sell'.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market,
side=side
)
if page is not None and isinstance(page, int):
params['page'] = page
if limit is not None and isinstance(limit, int):
params['limit'] = limit
response = self._get(self.API_VERSION, 'book', params=params)
return self._make_api_object(response, APIObject)
def get_trades(self, market, start=None, end=None, page=None, limit=None):
"""returns a list of all trades (executed orders) of a market between
the start date, until the end date. the earlier trades first, and the
older last. stored in the "data" member of a dict
If no start date is given, returns trades since 2020-02-17.
If no end date is given, returns trades until the present moment.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
Optional Arguments:
start: The older date to get trades from, inclusive.
end: The earlier date to get trades from, exclusive.
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market
)
if start is not None:
params['start'] = start
if end is not None:
params['end'] = end
if page is not None:
params['page'] = page
if limit is not None:
params['limit'] = limit
response = self._get(self.API_VERSION, 'trades', params=params)
return self._make_api_object(response, APIObject)
def get_prices(self, market, timeframe, page = None, limit = None):
"""returns a list of the prices of a market (candles on the market
prices graph), given a timeframe. The earlier prices first and the
older last. the list is stored in the data member of a dict
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
timeframe: timelapse between every candle in minutes.
accepted values are 1, 5, 15, 60, 240, 1440 and 10080.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market = market,
timeframe = timeframe
)
if page is not None:
params["page"] = page
if limit is not None:
params["limit"] = limit
response = self._get(self.API_VERSION,"prices", params = params)
return self._make_api_object(response, APIObject)
# Authenticated endpoints
#-------------------------------------------------------------------
# account
def get_account(self):
"""returns the account information of the user. Name, email, rate
and bank accounts.
"""
response = self._get(self.API_VERSION,"account")
return self._make_api_object(response,APIObject)
# orders
def get_active_orders(self, market, page=None, limit=None):
"""returns a list of the active orders of the user in a given market.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market
)
if page is not None:
params['page'] = page
if limit is not None:
params['limit'] = limit
response = self._get(self.API_VERSION, 'orders', 'active', params=params)
return self._make_api_object(response, APIObject)
def get_executed_orders(self, market, page=None, limit=None):
"""returns the list of the executed orders of the user on a given market.
Required Arguments:
market: A market pair as a string. Is the specified market to get
the book from.
e.g: 'ETHCLP'.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
market=market
)
if page is not None:
params['page'] = page
if limit is not None:
params['limit'] = limit
response = self._get(self.API_VERSION, 'orders', 'executed', params=params)
return self._make_api_object(response, APIObject)
def create_order(self, market, amount, price, side, type):
"""creates an orders from the specified argument.
Required Arguments:
amount: The amount of crypto to be buyed or selled.
market: A market pair as a string. Is the specified market to place the order in
e.g: 'ETHCLP'.
price: The price to ask or bid for one unit of crypto
side: 'buy' or 'sell' the crypto
type: one of the keywords 'market', 'limit', 'stop_limit'
"""
params = dict(
amount=amount,
market=market,
price=price,
side=side,
type=type,
)
response = self._post(self.API_VERSION, 'orders', 'create', data=params)
return self._make_api_object(response, APIObject)
def create_multi_orders(self, order_list):
for order in order_list:
if ('market' not in order
or 'type' not in order
or 'side' not in order
or 'amount' not in order):
return None
params = dict(
orders=json.dumps(order_list, sort_keys=True, separators=(',',':')),
)
response = self._post(self.API_VERSION, 'orders', 'create', 'bulk', data=params)
return self._make_api_object(response, APIObject)
def get_order_status(self, id):
"""returns the present status of an order, given the order id.
Required Arguments:
id: The identification of the order.
"""
params = dict(
id=id
)
response = self._get(self.API_VERSION, 'orders', 'status', params=params)
return self._make_api_object(response, APIObject)
def cancel_order(self, id):
"""Cancel an order given its id.
Required Arguments:
id: The identification of the order.
"""
params = dict(
id=id
)
response = self._post(self.API_VERSION, 'orders', 'cancel', data=params)
return self._make_api_object(response, APIObject)
def cancel_multi_orders(self, order_list):
for order in order_list:
if 'id' not in order:
return None
params = dict(
ids=json.dumps(order_list, sort_keys=True, separators=(',',':')),
)
response = self._post(self.API_VERSION, 'orders', 'cancel', 'bulk', data=params)
return self._make_api_object(response, APIObject)
def get_instant(self,market, side, amount):
"""If side is sell, returns an estimate of the amount of fiat obtained and the amount of crypto required to obatin it.
If side is buy, returns an estimate of the amount ofOrder crypto obtained and the amount of fiat required to obtain it.
Required Arguments:
market: The market to get the estimate of the transaction.
side: 'buy' or 'sell'
amount: Is the amount of crypto to 'buy' or 'sell'
"""
rest = float(amount)
book_side = 'sell' if side == 'buy' else 'buy'
amount_required = 0.0
amount_obtained = 0.0
page = 0
n_entries = 100
while True:
book_page = self.get_book(market, book_side, page=page, limit=n_entries)
for entry in book_page['data']:
price = float(entry['price'])
amount = float(entry['amount'])
if rest < amount:
amount_obtained += rest * price
amount_required += rest
rest = 0
break
else:
amount_obtained += amount * price
amount_required += amount
rest -= amount
if rest == 0 or len(book_page['data']) < n_entries:
break
else: time.sleep(3)
page = page + 1
if book_side == 'sell':
temp = amount_required
amount_required = amount_obtained
amount_obtained = temp
instant = dict(obtained=amount_obtained, required=amount_required)
return instant
#Wallet
def get_balance(self):
"""returns the balance of the user.
"""
response = self._get(self.API_VERSION, 'balance')
return self._make_api_object(response, APIObject)
def get_transactions(self, currency, page = None, limit = None):
"""return all the transactions of a currency of the user.
Required Arguments:
currency: The currency to get all the user transactions.
Optional Arguments:
page: Page number to query. Default is 0
limit: Number of orders returned in each page. Default is 20.
"""
params = dict(
currency = currency
)
if page is not None:
params["page"] = page
if limit is not None:
params["limit"] = limit
response = self._get(self.API_VERSION, "transactions", params=params)
return self._make_api_object(response, APIObject)
def notify_deposit(self,amount,bank_account, date= None, tracking_code = None, voucher = None):
"""Notifies a deposit from your bank account to your wallet (fiat).
Required Arguments:
amount: The amount deposited to your wallet.
bank_account: The address (id) of the bank account from which you deposited.
Extra Arguments required for Brazil and the European Union:
voucher: a file.
Extra Arguments required for Mexico:
date: The date of the deposit, in format dd/mm/yyyy.
tracking_code: The tracking code of the deposit.
voucher: a file.
"""
params = dict(
amount = amount,
bank_account = bank_account
)
if date is not None:
params["date"] = date
if tracking_code is not None:
params["tracking_code"] = tracking_code
if voucher is not None:
params["voucher"] = voucher
response = self._post(self.API_VERSION, "deposit", data = params)
return self._make_api_object(response,APIObject)
def notify_withdrawal(self, amount, bank_account):
"""Notifies a withdrawal from fiat wallet to your bank account.
Required Arguments:
amount: the amount you need to withdraw to your bank account.
bank_account: The address(id) of the bank account.
"""
params = dict(
amount = amount,
bank_account = bank_account
)
response = self._post(self.API_VERSION, "withdrawal", data = params)
return self._make_api_object(response, APIObject)
def transfer(self,address, amount, currency, memo = None):
"""transfer money between wallets.
Required Arguments:
adderss: The address of the wallet to transfer money.
amount: The amount of money to transfer into the wallet.
currency: The wallet from which to take the money.
e.g. 'ETH'
memo (optional): memo of the wallet to transfer money.
"""
params = dict(
address = address,
amount = amount,
currency = currency
)
if memo is not None:
params["memo"] = memo
response = self._post(self.API_VERSION, "transfer", data = params)
return self._make_api_object(response, APIObject)
def get_auth_socket(self):
"""returns the userid and the socket ids to permit a socket connection with cryptomkt.
"""
response = self._get("v2", "socket/auth")
return self._make_api_object(response, APIObject)
def get_socket(self, debug=False):
"""returns a socket connection with cryptomkt.
"""
if self.socket is None:
auth = self.get_auth_socket()
del auth['verify']
self.socket = Socket(auth, debug=debug)
return self.socket | 2.359375 | 2 |
setup.py | maiziex/Aquila_stLFR | 7 | 12798119 | from setuptools import setup, find_packages, Extension
setup(name='aquila_stlfr',
version='1.1',
description='assembly and variant calling for stlfr and hybrid assembler for linked-reads',
author='XinZhou',
author_email='<EMAIL>',
packages=['bin',],
entry_points={'console_scripts':['Aquila_stLFR_step1=bin.Aquila_stLFR_step1:main','Aquila_step1_hybrid=bin.Aquila_step1_hybrid:main','Aquila_stLFR_step2=bin.Aquila_stLFR_step2:main','Aquila_stLFR_assembly_based_variants_call=bin.Aquila_stLFR_assembly_based_variants_call:main','Aquila_stLFR_phasing_all_variants=bin.Aquila_stLFR_phasing_all_variants:main','Aquila_stLFR_clean=bin.Aquila_stLFR_clean:main','Aquila_step0_sortbam_hybrid=bin.Aquila_step0_sortbam_hybrid:main','Aquila_stLFR_fastq_preprocess=bin.Aquila_stLFR_fastq_preprocess:main']},
zip_safe=False)
| 1.070313 | 1 |
rx/core/operators/catch.py | mmpio/RxPY | 4,342 | 12798120 | from typing import Callable, Union
import rx
from rx.core import Observable, typing
from rx.disposable import SingleAssignmentDisposable, SerialDisposable
from rx.internal.utils import is_future
def catch_handler(source: Observable, handler: Callable[[Exception, Observable], Observable]) -> Observable:
def subscribe(observer, scheduler=None):
d1 = SingleAssignmentDisposable()
subscription = SerialDisposable()
subscription.disposable = d1
def on_error(exception):
try:
result = handler(exception, source)
except Exception as ex: # By design. pylint: disable=W0703
observer.on_error(ex)
return
result = rx.from_future(result) if is_future(result) else result
d = SingleAssignmentDisposable()
subscription.disposable = d
d.disposable = result.subscribe(observer, scheduler=scheduler)
d1.disposable = source.subscribe_(
observer.on_next,
on_error,
observer.on_completed,
scheduler
)
return subscription
return Observable(subscribe)
def _catch(handler: Union[Observable, Callable[[Exception, Observable], Observable]]
) -> Callable[[Observable], Observable]:
def catch(source: Observable) -> Observable:
"""Continues an observable sequence that is terminated by an
exception with the next observable sequence.
Examples:
>>> op = catch(ys)
>>> op = catch(lambda ex, src: ys(ex))
Args:
handler: Second observable sequence used to produce
results when an error occurred in the first sequence, or an
exception handler function that returns an observable sequence
given the error and source observable that occurred in the
first sequence.
Returns:
An observable sequence containing the first sequence's
elements, followed by the elements of the handler sequence
in case an exception occurred.
"""
if callable(handler):
return catch_handler(source, handler)
elif isinstance(handler, typing.Observable):
return rx.catch(source, handler)
else:
raise TypeError('catch operator takes whether an Observable or a callable handler as argument.')
return catch
| 2.546875 | 3 |
test.py | pboi20/fuzzycd | 0 | 12798121 | <gh_stars>0
import os
import shutil
import unittest
from fuzzycd import (
path_is_directory, filter_paths, get_print_directories, get_best_match)
TEST_DIR = "fixtures"
def touch(path):
with open(path, 'a'):
os.utime(path, None)
class TestFuzzyCD(unittest.TestCase):
@classmethod
def setUpClass(cls):
script_path = os.path.realpath(__file__)
script_dir_path = os.path.dirname(script_path)
test_dir_path = os.path.join(script_dir_path, TEST_DIR)
if os.path.exists(test_dir_path):
shutil.rmtree(test_dir_path, ignore_errors=False)
# Root directory for tests
os.mkdir(test_dir_path)
os.chdir(test_dir_path)
# 4 directories
os.mkdir(os.path.join(test_dir_path, "one"))
os.mkdir(os.path.join(test_dir_path, "two"))
os.mkdir(os.path.join(test_dir_path, "three"))
four_dir_path = os.path.join(test_dir_path, "four")
os.mkdir(four_dir_path)
# 1 symlink to a directory
dir_link_path = os.path.join(test_dir_path, "dir_link")
os.symlink(four_dir_path, dir_link_path)
# 1 hidden directory
os.mkdir(os.path.join(test_dir_path, ".hidden_dir"))
# 1 regular file and 1 symlink
file_path = os.path.join(test_dir_path, "some_file")
file_link_path = os.path.join(test_dir_path, "file_link")
touch(file_path)
os.symlink(file_path, file_link_path)
# Paths used in tests below
cls.test_dir_path = test_dir_path
cls.dir_link_path = dir_link_path
cls.file_path = file_path
cls.file_link_path = file_link_path
@classmethod
def get_test_dir_path_list(cls):
return os.listdir(cls.test_dir_path)
def test_path_is_directory(self):
self.assertTrue(path_is_directory(self.test_dir_path))
def test_path_is_directory_reject_file(self):
self.assertFalse(path_is_directory(self.file_path))
def test_path_is_directory_accept_symlink(self):
self.assertTrue(
path_is_directory(self.dir_link_path, follow_links=True))
def test_path_is_directory_reject_symlink(self):
self.assertFalse(
path_is_directory(self.dir_link_path, follow_links=False))
def test_filter_paths_include_symlinks(self):
path_list = self.get_test_dir_path_list()
filtered_path_list = filter_paths(path_list, follow_links=True)
self.assertEqual(len(filtered_path_list), 5)
def test_filter_paths_exclude_symlinks(self):
path_list = self.get_test_dir_path_list()
filtered_path_list = filter_paths(path_list, follow_links=False)
self.assertEqual(len(filtered_path_list), 4)
def test_filter_paths_include_hidden(self):
path_list = self.get_test_dir_path_list()
filtered_path_list = filter_paths(path_list, include_hidden=True)
self.assertEqual(len(filtered_path_list), 6)
def test_filter_paths_exlude_hidden(self):
path_list = self.get_test_dir_path_list()
filtered_path_list = filter_paths(path_list, include_hidden=False)
self.assertEqual(len(filtered_path_list), 5)
def test_get_print_directories(self):
path_list = ["one", "two", "three", "four"]
output = get_print_directories(path_list)
output_list = output.split(" ")
self.assertEqual(len(output_list), 4)
def test_get_print_directories_as_list(self):
path_list = ["one", "two", "three", "four"]
output = get_print_directories(path_list, as_list=True)
output_list = output.split("\n")
self.assertEqual(len(output_list), 4)
def test_get_best_match(self):
path_list = [
"Desktop", "Documents", "Downloads", "Projects", "Everything Else",
"else"]
# Match beginning or end
self.assertEqual(
"Desktop", get_best_match("desk", path_list))
self.assertEqual(
"Desktop", get_best_match("top", path_list))
# Match within
self.assertEqual(
"Downloads", get_best_match("load", path_list))
self.assertEqual(
"Everything Else", get_best_match("y", path_list))
# Full match
self.assertEqual(
"else", get_best_match("else", path_list))
# Fuzzy match
self.assertEqual(
"Everything Else", get_best_match("something", path_list))
self.assertEqual(
"Documents", get_best_match("dos", path_list))
self.assertEqual(
"Downloads", get_best_match("dol", path_list))
# Case insensitive
self.assertEqual(
"Desktop", get_best_match("DESK", path_list))
self.assertEqual(
"Downloads", get_best_match("DOL", path_list))
self.assertEqual(
"else", get_best_match("Else", path_list))
# XXX Anomalies...
# - 'do' normally gets us 'Desktop', but 'Documents' seems more useful,
# so a higher priority is given to exact matches at the beginning
self.assertEqual(
"Documents", get_best_match("do", path_list))
def test_get_best_match_no_match(self):
path_list = ["one", "two", "three"]
self.assertEqual(None, get_best_match("xyz", path_list))
if __name__ == "__main__":
unittest.main()
| 2.390625 | 2 |
bx24_orm/core/utils.py | dmitriilazukov/bx24_orm | 1 | 12798122 | # -*- coding: utf-8 -*-
class classproperty(property):
def __get__(self, cls, owner):
return classmethod(self.fget).__get__(None, owner)()
| 2.234375 | 2 |
examples/misc/hexapod_z.py | brutzl/pymbs | 0 | 12798123 | # -*- coding: utf-8 -*-
'''
This file is part of PyMbs.
PyMbs is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
PyMbs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with PyMbs.
If not, see <http://www.gnu.org/licenses/>.
Copyright 2011, 2012 <NAME>, <NAME>,
<NAME>, <NAME>
'''
'''
Created on 13.05.2011
@author: <NAME>
Pfade für Visualisierung anpassen !!!
'''
#################################
# import PyMbs & Lib. #
#################################
from PyMbs.Input import *
from PyMbs.Symbolics import Matrix,cos,sin
pi = 3.1415926535897932384626433832795
#################################
# set up inertial frame #
#################################
world=MbsSystem([0,0,-1])
#################################
# Parameters #
#################################
# Länge der Zylinderstangen und Gehäuse
hoehe = 0.01
R_AP=0.3
R_BP=0.5
R_Zyl_stange=0.02
R_Zyl_geh=0.04
l_zyl=0.6
m_z_geh = 0.1
m_z_st = 0.1
c=world.addParam('c',10)
c1=world.addParam('c1',5)
m1=world.addParam('m1', 1.0)
R1=world.addParam('R1', R_BP)
m2=world.addParam('m2', 50)
R2=world.addParam('R2', R_AP)
H2=world.addParam('H2',hoehe)
I2x=world.addParam( 'I2x', (m2*H2**2)/12) # Traägheit eines Vollzylinders um x die x-Achse
I2y=world.addParam( 'I2y', (m2*H2**2)/12) # Traägheit eines Vollzylinders um y die x-Achse
I2z=world.addParam( 'I2z', (m2*R2**2)/2) # Traägheit eines Vollzylinders um z die x-Achse
################################################
m_Zyl_Geh=world.addParam('m_Zyl_Geh', 18.6)
l_Zyl_Geh=world.addParam('l_Zyl_Geh',0.74)
cg_Zyl_Geh_x=world.addParam('cg_Zyl_Geh_x',0.353)
I_Zyl_Geh_x=world.addParam( 'I_Zyl_Geh_x', 0.027)
I_Zyl_Geh_y=world.addParam( 'I_Zyl_Geh_y', 1.061)
I_Zyl_Geh_z=world.addParam( 'I_Zyl_Geh_z', 1.061)
m_Zyl_Stange=world.addParam('m_Zyl_Stange', 8.4)
l_Zyl_Stange=world.addParam('l_Zyl_Stange',0.66)
cg_Zyl_Stange_x=world.addParam('cg_Zyl_Stange_x',-0.347)
I_Zyl_Stange_x=world.addParam('I_Zyl_Stange_x', 0.003)
I_Zyl_Stange_y=world.addParam('I_Zyl_Stange_y', 0.433)
I_Zyl_Stange_z=world.addParam('I_Zyl_Stange_z', 0.432)
###############
# Anordnungen #
###############
phi_BP_1 = pi/2-pi/18
phi_BP_2 = phi_BP_1 + pi/9
phi_BP_3 = phi_BP_1 + 2*pi/3
phi_BP_4 = phi_BP_2 + 2*pi/3
phi_BP_5 = phi_BP_3 + 2*pi/3
phi_BP_6 = phi_BP_4 + 2*pi/3
phi_AP_1 = pi/6+pi/18
phi_AP_2 = phi_AP_1 + 2*pi/3-pi/9
phi_AP_3 = phi_AP_1 + 2*pi/3
phi_AP_4 = phi_AP_3 + 2*pi/3-pi/9
phi_AP_5 = phi_AP_3 + 2*pi/3
phi_AP_6 = phi_AP_4 + 2*pi/3
################
# Hexapod #
################
#################################
# Bodies & KS #
#################################
Ground = world.addBody(name='Ground',mass=1)
Ground.addFrame(name='KS_1',p=[0,0,0])
BP = Ground.KS_1
BP.addFrame(name='BP_visual', p=[0,0,0],R=rotMat(pi/2,'x'))
BP.addFrame(name='BP_Anlenkpunkt_1', p=[R1*cos(phi_BP_1),R1*sin(phi_BP_1),0])
BP.addFrame(name='BP_Anlenkpunkt_2', p=[R1*cos(phi_BP_2),R1*sin(phi_BP_2),0])
BP.addFrame(name='BP_Anlenkpunkt_3', p=[R1*cos(phi_BP_3),R1*sin(phi_BP_3),0])
BP.addFrame(name='BP_Anlenkpunkt_4', p=[R1*cos(phi_BP_4),R1*sin(phi_BP_4),0])
BP.addFrame(name='BP_Anlenkpunkt_5', p=[R1*cos(phi_BP_5),R1*sin(phi_BP_5),0])
BP.addFrame(name='BP_Anlenkpunkt_6', p=[R1*cos(phi_BP_6),R1*sin(phi_BP_6),0])
BP.addFrame(name='BP_Feder',p=[0,0,1.1])
################################################################################
AP = world.addBody(name='Arbeitsplattform', mass=m2,inertia=diag([I2x,I2y,I2z]))
AP.addFrame(name='AP_visual', p=[0,0,0],R=rotMat(pi/2,'x'))
AP.addFrame(name='AP_Anlenkpunkt_1', p=[R2*cos(phi_AP_1),R2*sin(phi_AP_1),0])
AP.addFrame(name='AP_Anlenkpunkt_2', p=[R2*cos(phi_AP_2),R2*sin(phi_AP_2),0])
AP.addFrame(name='AP_Anlenkpunkt_3', p=[R2*cos(phi_AP_3),R2*sin(phi_AP_3),0])
AP.addFrame(name='AP_Anlenkpunkt_4', p=[R2*cos(phi_AP_4),R2*sin(phi_AP_4),0])
AP.addFrame(name='AP_Anlenkpunkt_5', p=[R2*cos(phi_AP_5),R2*sin(phi_AP_5),0])
AP.addFrame(name='AP_Anlenkpunkt_6', p=[R2*cos(phi_AP_6),R2*sin(phi_AP_6),0])
################################################################################
'''
#Für Visualisierung im Dymola
Zyl_geh_1 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_1')
Zyl_geh_1.addFrame('Zyl_geh_1_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_1.addFrame('Zyl_geh_1_cs', p=[0,0,0])
Zyl_geh_1.addFrame('Zyl_geh_1_cs_2', p=[0,0,0])
Zyl_geh_2 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_2')
Zyl_geh_2.addFrame('Zyl_geh_2_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_2.addFrame('Zyl_geh_2_cs', p=[0,0,0])
Zyl_geh_2.addFrame('Zyl_geh_2_cs_2', p=[0,0,0])
Zyl_geh_3 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_3')
Zyl_geh_3.addFrame('Zyl_geh_3_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_3.addFrame('Zyl_geh_3_cs', p=[0,0,0])
Zyl_geh_3.addFrame('Zyl_geh_3_cs_2', p=[0,0,0])
Zyl_geh_4 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_4')
Zyl_geh_4.addFrame('Zyl_geh_4_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_4.addFrame('Zyl_geh_4_cs', p=[0,0,0])
Zyl_geh_4.addFrame('Zyl_geh_4_cs_2', p=[0,0,0])
Zyl_geh_5 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_5')
Zyl_geh_5.addFrame('Zyl_geh_5_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_5.addFrame('Zyl_geh_5_cs', p=[0,0,0])
Zyl_geh_5.addFrame('Zyl_geh_5_cs_2', p=[0,0,0])
Zyl_geh_6 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_6')
Zyl_geh_6.addFrame('Zyl_geh_6_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_geh_6.addFrame('Zyl_geh_6_cs', p=[0,0,0])
Zyl_geh_6.addFrame('Zyl_geh_6_cs_2', p=[0,0,0])
################################################################################
Zyl_stange_1 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_1')
Zyl_stange_1.addFrame('Zyl_stange_1_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_1.addFrame('Zyl_stange_1_cs', p=[0,0,0])
Zyl_stange_1.addFrame('Zyl_stange_1_cs_2', p=[0,0,0])
Zyl_stange_2 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_2')
Zyl_stange_2.addFrame('Zyl_stange_2_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_2.addFrame('Zyl_stange_2_cs', p=[0,0,0])
Zyl_stange_2.addFrame('Zyl_stange_2_cs_2', p=[0,0,0])
Zyl_stange_3 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_3')
Zyl_stange_3.addFrame('Zyl_stange_3_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_3.addFrame('Zyl_stange_3_cs', p=[0,0,0])
Zyl_stange_3.addFrame('Zyl_stange_3_cs_2', p=[0,0,0])
Zyl_stange_4 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_4')
Zyl_stange_4.addFrame('Zyl_stange_4_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_4.addFrame('Zyl_stange_4_cs', p=[0,0,0])
Zyl_stange_4.addFrame('Zyl_stange_4_cs_2', p=[0,0,0])
Zyl_stange_5 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_5')
Zyl_stange_5.addFrame('Zyl_stange_5_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_5.addFrame('Zyl_stange_5_cs', p=[0,0,0])
Zyl_stange_5.addFrame('Zyl_stange_5_cs_2', p=[0,0,0])
Zyl_stange_6 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_6')
Zyl_stange_6.addFrame('Zyl_stange_6_visual', p=[0,0,0],R=rotMat(pi/2,'y')*rotMat(pi/2,'x'))
Zyl_stange_6.addFrame('Zyl_stange_6_cs', p=[0,0,0])
Zyl_stange_6.addFrame('Zyl_stange_6_cs_2', p=[0,0,0])
'''
# Für Visualisierung im PyMbs
Zyl_geh_1 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_1')
Zyl_geh_1.addFrame('Zyl_geh_1_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_1.addFrame('Zyl_geh_1_cs', p=[0,0,0])
Zyl_geh_1.addFrame('Zyl_geh_1_cs_2', p=[0,0,0])
Zyl_geh_2 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_2')
Zyl_geh_2.addFrame('Zyl_geh_2_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_2.addFrame('Zyl_geh_2_cs', p=[0,0,0])
Zyl_geh_2.addFrame('Zyl_geh_2_cs_2', p=[0,0,0])
Zyl_geh_3 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_3')
Zyl_geh_3.addFrame('Zyl_geh_3_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_3.addFrame('Zyl_geh_3_cs', p=[0,0,0])
Zyl_geh_3.addFrame('Zyl_geh_3_cs_2', p=[0,0,0])
Zyl_geh_4 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_4')
Zyl_geh_4.addFrame('Zyl_geh_4_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_4.addFrame('Zyl_geh_4_cs', p=[0,0,0])
Zyl_geh_4.addFrame('Zyl_geh_4_cs_2', p=[0,0,0])
Zyl_geh_5 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_5')
Zyl_geh_5.addFrame('Zyl_geh_5_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_5.addFrame('Zyl_geh_5_cs', p=[0,0,0])
Zyl_geh_5.addFrame('Zyl_geh_5_cs_2', p=[0,0,0])
Zyl_geh_6 = world.addBody( mass=m_Zyl_Geh,cg=[cg_Zyl_Geh_x,0,0], inertia=diag([I_Zyl_Geh_x,I_Zyl_Geh_y,I_Zyl_Geh_z]),name='Zyl_geh_6')
Zyl_geh_6.addFrame('Zyl_geh_6_visual', p=[0,0,l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_geh_6.addFrame('Zyl_geh_6_cs', p=[0,0,0])
Zyl_geh_6.addFrame('Zyl_geh_6_cs_2', p=[0,0,0])
################################################################################
Zyl_stange_1 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_1')
Zyl_stange_1.addFrame('Zyl_stange_1_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_1.addFrame('Zyl_stange_1_cs', p=[0,0,0])
Zyl_stange_1.addFrame('Zyl_stange_1_cs_2', p=[0,0,0])
Zyl_stange_2 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_2')
Zyl_stange_2.addFrame('Zyl_stange_2_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_2.addFrame('Zyl_stange_2_cs', p=[0,0,0])
Zyl_stange_2.addFrame('Zyl_stange_2_cs_2', p=[0,0,0])
Zyl_stange_3 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_3')
Zyl_stange_3.addFrame('Zyl_stange_3_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_3.addFrame('Zyl_stange_3_cs', p=[0,0,0])
Zyl_stange_3.addFrame('Zyl_stange_3_cs_2', p=[0,0,0])
Zyl_stange_4 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_4')
Zyl_stange_4.addFrame('Zyl_stange_4_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_4.addFrame('Zyl_stange_4_cs', p=[0,0,0])
Zyl_stange_4.addFrame('Zyl_stange_4_cs_2', p=[0,0,0])
Zyl_stange_5 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_5')
Zyl_stange_5.addFrame('Zyl_stange_5_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_5.addFrame('Zyl_stange_5_cs', p=[0,0,0])
Zyl_stange_5.addFrame('Zyl_stange_5_cs_2', p=[0,0,0])
Zyl_stange_6 = world.addBody( mass=m_Zyl_Stange,cg=[cg_Zyl_Stange_x,0,0], inertia=diag([I_Zyl_Stange_x,I_Zyl_Stange_y,I_Zyl_Stange_z]),name='Zyl_stange_6')
Zyl_stange_6.addFrame('Zyl_stange_6_visual', p=[0,0,-l_zyl/2],R=rotMat(pi/2,'x'))
Zyl_stange_6.addFrame('Zyl_stange_6_cs', p=[0,0,0])
Zyl_stange_6.addFrame('Zyl_stange_6_cs_2', p=[0,0,0])
#################################
# Joints #
#################################
#world.addJoint('fix_BP', world, BP)
world.addJoint( world, Ground, name='fix_BP')
jAP=world.addJoint(world, AP,['Tx', 'Ty', 'Tz','Rx', 'Ry', 'Rz'],[0,0,1,0,0,0],name='free_AP')
world.addJoint(BP.BP_Anlenkpunkt_1,Zyl_geh_1.Zyl_geh_1_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_1')
world.addJoint(BP.BP_Anlenkpunkt_2,Zyl_geh_2.Zyl_geh_2_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_2')
world.addJoint(BP.BP_Anlenkpunkt_3,Zyl_geh_3.Zyl_geh_3_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_3')
world.addJoint(BP.BP_Anlenkpunkt_4,Zyl_geh_4.Zyl_geh_4_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_4')
world.addJoint(BP.BP_Anlenkpunkt_5,Zyl_geh_5.Zyl_geh_5_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_5')
world.addJoint(BP.BP_Anlenkpunkt_6,Zyl_geh_6.Zyl_geh_6_cs_2,['Rz', 'Ry'],[0,0],name='Zyl_geh_1_an_BP_6')
world.addJoint(Zyl_geh_1.Zyl_geh_1_cs,Zyl_stange_1.Zyl_stange_1_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_1')
world.addJoint(Zyl_geh_2.Zyl_geh_2_cs,Zyl_stange_2.Zyl_stange_2_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_2')
world.addJoint(Zyl_geh_3.Zyl_geh_3_cs,Zyl_stange_3.Zyl_stange_3_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_3')
world.addJoint(Zyl_geh_4.Zyl_geh_4_cs,Zyl_stange_4.Zyl_stange_4_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_4')
world.addJoint(Zyl_geh_5.Zyl_geh_5_cs,Zyl_stange_5.Zyl_stange_5_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_5')
world.addJoint(Zyl_geh_6.Zyl_geh_6_cs,Zyl_stange_6.Zyl_stange_6_cs_2,'Tz',0,name='Zyl_stange_1_an_Zyl_geh_6')
########################
# Constraints or Loops #
########################
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_1, Zyl_stange_1.Zyl_stange_1_cs, 'Verbindung_1')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_2, Zyl_stange_2.Zyl_stange_2_cs, 'Verbindung_2')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_3, Zyl_stange_3.Zyl_stange_3_cs, 'Verbindung_3')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_4, Zyl_stange_4.Zyl_stange_4_cs, 'Verbindung_4')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_5, Zyl_stange_5.Zyl_stange_5_cs, 'Verbindung_5')
world.addLoop.Hexapod(AP.AP_Anlenkpunkt_6, Zyl_stange_6.Zyl_stange_6_cs, 'Verbindung_6')
#####################
# add visualisation #
#####################
world.addVisualisation.Cylinder(BP.BP_visual,R_BP, hoehe)
world.addVisualisation.Cylinder(AP.AP_visual,R_AP, hoehe)
'''
# Für Visualisierung im Dymola
world.addVisualisation.File(Zyl_geh_1.Zyl_geh_1_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_1')
world.addVisualisation.File(Zyl_geh_2.Zyl_geh_2_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_2')
world.addVisualisation.File(Zyl_geh_3.Zyl_geh_3_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_3')
world.addVisualisation.File(Zyl_geh_4.Zyl_geh_4_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_4')
world.addVisualisation.File(Zyl_geh_5.Zyl_geh_5_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_5')
world.addVisualisation.File(Zyl_geh_6.Zyl_geh_6_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_geh_001.stl',1,name='Zylinder_geh_6')
world.addVisualisation.File(Zyl_stange_1.Zyl_stange_1_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_1')
world.addVisualisation.File(Zyl_stange_2.Zyl_stange_2_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_2')
world.addVisualisation.File(Zyl_stange_3.Zyl_stange_3_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_3')
world.addVisualisation.File(Zyl_stange_4.Zyl_stange_4_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_4')
world.addVisualisation.File(Zyl_stange_5.Zyl_stange_5_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_5')
world.addVisualisation.File(Zyl_stange_6.Zyl_stange_6_visual, 'C:\\Users\JeSche\Desktop\Diplom_Arbeit\Hexapod/zyl_stange_001.stl',1,name='Zylinder_stange_6')
'''
# Für Visualisierung im Dymola
world.addVisualisation.Cylinder(Zyl_geh_1.Zyl_geh_1_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_2.Zyl_geh_2_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_3.Zyl_geh_3_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_4.Zyl_geh_4_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_5.Zyl_geh_5_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_geh_6.Zyl_geh_6_visual, R_Zyl_geh,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_1.Zyl_stange_1_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_2.Zyl_stange_2_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_3.Zyl_stange_3_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_4.Zyl_stange_4_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_5.Zyl_stange_5_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Cylinder(Zyl_stange_6.Zyl_stange_6_visual, R_Zyl_stange,l_zyl)
world.addVisualisation.Frame(AP,0.4)
#world.addVisualisation.Frame(BP.BP_Feder,1)
world.addVisualisation.Frame(Ground,0.6)
#################################
# add visualisation #
#################################
print("System has been assembled")
#################################
# add Sensors #
#################################
#world.addSensor.Position(world,AP.AP_Anlenkpunkt_1,"P_AP_1")
#world.addSensor.Energy(AP,'E_AP')
#####################
# add Imput & Load #
#####################
#l = world.addSensor.Distance(AP,BP.BP_Feder, 'l', 'DistanceSensor')
#lz = world.addSensor.Distance(BP,AP, 'lz', 'DistanceSensor_Cylinder')
#c=50
#F_c = world.addExpression('SpringForce', 'F_c', -c*l[0])
#world.addLoad.PtPForce(AP,BP.BP_Feder, F_c, name='Spring')
#################################
# generate equations & sim Code #
#################################
world.genEquations.Recursive()
#world.genCode.Modelica('hexapod_z_kpl','.\HP_Output',inputsAsInputs=True, debugMode=False)
world.show('hexapod_z_kpl')
| 1.75 | 2 |
gym_round_bot/envs/round_bot_worlds.py | robotsthatdream/gym-round_bot | 2 | 12798124 | #!/usr/bin/python
# -*- coding: utf-8 -*-
""" <NAME>
ISIR - CNRS / Sorbonne Université
02/2018
This file allows to build worlds
TODO : replace this file by a .json loader and code worlds in .json
"""
# WARNING : don't fo (from round_bot_py import round_bot_model) here to avoid mutual imports !
import os
def _texture_path(texture_bricks_name):
"""
Parameter
---------
texture_bricks_name : str
name of the world main texture
Return
------
texture_path: (str) path corresponding to the texture_bricks_name
Raises
------
ValueError : raised if texture_bricks_name is unkwnonw
"""
if texture_bricks_name == 'minecraft':
return '/textures/texture_minecraft.png'
elif texture_bricks_name == 'graffiti':
return '/textures/texture_graffiti.png'
elif texture_bricks_name == 'colours':
return '/textures/texture_colours.png'
else :
raise ValueError('Unknown texture name '+ texture_bricks_name + ' in loading world')
def _build_square_default_world(model, texture_bricks_name, width=45, depth=45, hwalls=4, dwalls=1,
texture_robot='/textures/robot.png',
texture_visualisation='/textures/visualisation.png',
texture_distractors='/textures/texture_distractors.png',
wall_reward=-1,
distractors=False,
distractors_speed=0.1,
sandboxes=False,
trigger_button=False,
):
"""
Builds a simple rectangle planar world with walls around
Parameters
----------
- model : (round_bot_model.Model) model to load world in
- texture_bricks_name : (str) name of the texture for the bricks
- width : (int) width of the world
- depth : (int) depth of the world
- hwalls : (int) heigh of walls
- dwalls: (int) depth of walls
- texture_bricks, texture_robot, texture_visualisation : (string)
paths for texture image of bricks, robot and visualisation
- wall_reward : (float) reward for wall collision
- distractors (Bool) : add visual distractors on walls and ground
- distractors_speed (float) : speed of visual distractors displacement
- sandboxes (Bool) : add sandboxes ont the ground (slowing down the robot when crossed)
- trigger_button (Bool) : add a trigger button that will trigger a change in the environment (change to be defined)
Returns
-------
world information
"""
texture_bricks = _texture_path(texture_bricks_name)
# TODO : better import would be global and without "from" but doesn't work for the moment
from gym_round_bot.envs import round_bot_model
# create textures coordinates
GRASS = round_bot_model.Block.tex_coords((1, 0), (0, 1), (0, 0))
SAND = round_bot_model.Block.tex_coords((1, 1), (1, 1), (1, 1))
BRICK = round_bot_model.Block.tex_coords((2, 0), (2, 0), (2, 0))
BRICK2 = round_bot_model.Block.tex_coords((0, 2), (0, 2), (0, 2))
STONE = round_bot_model.Block.tex_coords((2, 1), (2, 1), (2, 1))
STONE2 = round_bot_model.Block.tex_coords((1, 2), (1, 2), (1, 2))
BUTTON = round_bot_model.Block.tex_coords((2, 2), (2, 2), (2, 2))
DISTRACTORS = [ round_bot_model.Block.tex_coords(t,t,t) for t in [(0,0),(1,0),(2,0),(0,1),(1,1),(2,1),(2,0)] ]
nw = width/2.0 # 1/2 width of this world
nd = depth/2.0 # 1/2 depth of this world
wr = width/3.0 # wr width of reward area
wwalls = width
# get texture paths in current directory
brick_texture_path = os.path.dirname(__file__) + texture_bricks
robot_texture_path = os.path.dirname(__file__) + texture_robot
visualisation_texture_path = os.path.dirname(__file__) + texture_visualisation
distractors_texture_path = os.path.dirname(__file__) + texture_distractors
texture_paths = {'brick':brick_texture_path,
'robot':robot_texture_path,
'visualisation':visualisation_texture_path,
'distractors':distractors_texture_path,
}
# Build gound block
ground_block = model.add_block( (0, -3, 0, 2*nd, 6, 2*nw, 0.0, 0.0, 0.0), GRASS, block_type='brick')
# Build wall blocks with negative reward on collision
#front wall
back_wall_block = model.add_block( (0, hwalls/2, -nw, depth, hwalls, dwalls, 0.0, 0.0, 0.0),
texture=BRICK, block_type='brick', collision_reward = wall_reward)
#back wall
front_wall_block = model.add_block( (0, hwalls/2, nw, depth, hwalls, dwalls, 0.0, 0.0, 0.0),
texture=STONE2, block_type='brick', collision_reward = wall_reward)
#left wall
left_wall_block = model.add_block( (-nd, hwalls/2, 0, dwalls, hwalls, wwalls, 0.0, 0.0, 0.0),
texture=STONE, block_type='brick', collision_reward = wall_reward)
#right wall
right_wall_block = model.add_block( (nd, hwalls/2, 0, dwalls, hwalls, wwalls, 0.0, 0.0, 0.0),
texture=BRICK2, block_type='brick', collision_reward = wall_reward)
if distractors:
# add visual distractors on the groud and inner faces of walls if asked
# distractor ground block
size_ground_distractor = n = min(nw,nd)
ground_bb = round_bot_model.BoundingBoxBlock( (0, 0.1, 0), (2*n, 0, 2*n), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, size_ground_distractor, 0.0, size_ground_distractor, 0.0, 0.0, 0.0),
texture=DISTRACTORS[0], block_type='flat_distractor', boundingBox = ground_bb, speed=distractors_speed)
model.add_block( components=(0, 0, 0, size_ground_distractor, 0.0, size_ground_distractor, 0.0, 0.0, 0.0),
texture=DISTRACTORS[0], block_type='flat_distractor', boundingBox = ground_bb, speed=distractors_speed)
# wall distractors :
width_wall_distractors = wwalls/2
height_wall_distractors = hwalls*2/3
# distractor back_wall inner face block
back_wall_bb = round_bot_model.BoundingBoxBlock( (0, hwalls/2, -nw+dwalls/2+0.1), (wwalls, height_wall_distractors, 0.0), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, width_wall_distractors, height_wall_distractors, 0.0, 0.0, 0.0, 0.0),
texture=DISTRACTORS[1], block_type='flat_distractor', boundingBox = back_wall_bb, speed=distractors_speed)
# distractor front_wall inner face block
front_wall_bb = round_bot_model.BoundingBoxBlock(( 0, hwalls/2, nw-dwalls/2-0.1), (wwalls, height_wall_distractors, 0.0), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, width_wall_distractors, height_wall_distractors, 0.0, 0.0, 0.0, 0.0),
texture=DISTRACTORS[2], block_type='flat_distractor', boundingBox = front_wall_bb, speed=distractors_speed)
# distractor left_wall inner face block
left_wall_bb = round_bot_model.BoundingBoxBlock( (-nd+dwalls/2+0.1, hwalls/2, 0), (0.0, height_wall_distractors, wwalls), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, 0.0, height_wall_distractors, width_wall_distractors, 0.0, 0.0, 0.0),
texture=DISTRACTORS[3], block_type='flat_distractor', boundingBox = left_wall_bb, speed=distractors_speed)
# distractor right_wall inner face block
right_wall_bb = round_bot_model.BoundingBoxBlock(( nd-dwalls/2-0.1, hwalls/2, 0), (0.0, height_wall_distractors, wwalls), (0.0, 0.0, 0.0), linked_block=ground_block)
model.add_block( components=(0, 0, 0, 0.0, height_wall_distractors, width_wall_distractors, 0.0, 0.0, 0.0),
texture=DISTRACTORS[4], block_type='flat_distractor', boundingBox = right_wall_bb, speed=distractors_speed)
if sandboxes :
# add sandboxes ont the ground if asked (slowing down the robot when crossed)
model.add_block( (0, 0.3, 0, nd/2, 0, nw/2, 0.0, 0.0, 0.0), SAND, block_type='sandbox')
if trigger_button :
# add a trigger button that will trigger a change in the world when crossed ON / OFF
#TRIGGER = round_bot_model.Block.tex_coords((1, 0), (1, 0), (1, 0))
model.add_block( (0, 0.3, 0, nw/3, 0.2, nw/3, 0.0, 0.0, 0.0), BUTTON, block_type='trigger_button')
world_info = { 'width' : 2*nw,
'depth' : 2*nd,
}
return texture_paths, world_info
def build_square_world(model, texture, robot_diameter=2 ,width=45, depth=45, hwalls=4, dwalls=1, wall_reward=-1, goal_reward=10, distractors=False,
distractors_speed=0.1, sandboxes=False, trigger_button=False, visible_reward=False):
"""
Builds the square world
"""
## first build default world
texture_paths, world_info = _build_square_default_world(model, texture, width=width, depth=depth,
hwalls=hwalls, dwalls=dwalls,
wall_reward=wall_reward, distractors=distractors,
distractors_speed=distractors_speed,
sandboxes=sandboxes, trigger_button=trigger_button,)
## then add specs
from gym_round_bot.envs import round_bot_model
BOT = round_bot_model.Block.tex_coords((0, 0), (0, 1), (0, 1))
START = round_bot_model.Block.tex_coords((0, 0), (0, 0), (0, 0))
REWARD = round_bot_model.Block.tex_coords((0, 1), (0, 1), (0, 1))
nw = width/2.0 # 1/2 width of this world
nd = depth/2.0 # 1/2 depth of this world
wwalls = width # width of walls
wr = width/4.0 # wr width of reward area
# set robot specifications
bot_radius = robot_diameter/2.0
bot_height = bot_radius
# Build reward block in the corner
rew = model.add_block( (nd-(wr/2+dwalls/2), bot_height/2.0, -nw+(wr/2+dwalls/2), wr, bot_height/3.0, wr, 0.0, 0.0, 0.0),
texture=REWARD, block_type='reward', collision_reward = goal_reward, visible=visible_reward)
# Build robot block, set initial height to bot_heigh/2 + small offset to avoid ground collision
model.add_block( (0, bot_height/2.0+0.1, 0, 2*bot_radius, bot_height, 2*bot_radius, 0.0, 0.0, 0.0),
texture=BOT, block_type='robot')
# add starting areas (the height=0 of block does not matter here, only area of (hwalls-2*dwalls)^2)
model.add_block( (0, bot_height/2.0+0.1, 0, 2*nd-2*dwalls, 0.1, 2*nw-2*dwalls, 0.0, 0.0, 0.0),
texture=START, block_type='start')
return texture_paths, world_info
def build_square_1wall_world(model, texture, robot_diameter=2, width=45, depth=45, hwalls=2, dwalls=2, wall_reward=-1, goal_reward=10, distractors=False,
distractors_speed=0.1, sandboxes=False, trigger_button=False, visible_reward=False):
"""
Builds a simple rectangle planar world with walls around, and 1 wall in the middle
"""
## first build default world
texture_paths, world_info = _build_square_default_world(model, texture, width=width, depth=depth,
hwalls=hwalls, dwalls=dwalls,
wall_reward=wall_reward, distractors=distractors,
distractors_speed=distractors_speed,
sandboxes=sandboxes, trigger_button=trigger_button,)
## then add specs
from gym_round_bot.envs import round_bot_model
BOT = round_bot_model.Block.tex_coords((0, 0), (0, 1), (0, 1))
START = round_bot_model.Block.tex_coords((0, 0), (0, 0), (0, 0))
REWARD = round_bot_model.Block.tex_coords((0, 1), (0, 1), (0, 1))
SAND = round_bot_model.Block.tex_coords((1, 1), (1, 1), (1, 1))
n = width/2.0 # 1/2 width and depth of world
wwalls = 2*n # width of walls
wr = width/4.0 # wr width of reward area
# set robot specifications
bot_radius = robot_diameter/2.0
bot_height = bot_radius
# middle wall
model.add_block( (n/2, hwalls/2, -n/4, wwalls/2, hwalls, dwalls, 0.0, 0.0, 0.0), SAND, block_type='brick', collision_reward = -1)
# Build reward block in the corner
model.add_block( (n-(wr/2+dwalls/2), bot_height/2.0, -n+(wr/2+dwalls/2), wr, bot_height/3.0, wr, 0.0, 0.0, 0.0),
texture=REWARD, block_type='reward', collision_reward = 1, visible_reward=visible_reward)
# Build robot block, set initial height to bot_heigh/2 + small offset to avoid ground collision
model.add_block( (0, bot_height/2.0+0.1, 0, 2*bot_radius, bot_height, 2*bot_radius, 0.0, 0.0, 0.0),
texture=BOT, block_type='robot')
# add starting areas (the height=0 of block does not matter here, only area of (hwalls-2*dwalls)^2)
model.add_block( (0, bot_height/2.0+0.1, (wwalls-2*dwalls)/4, wwalls-2*dwalls, 0.1, (wwalls-2*dwalls)/2, 0.0, 0.0, 0.0),
texture=START, block_type='start')
model.add_block( ( -(wwalls-2*dwalls)/4, bot_height/2.0+0.1, -(wwalls-2*dwalls)/4, (wwalls-2*dwalls)/2, 0.1, (wwalls-2*dwalls)/2, 0.0, 0.0, 0.0),
texture=START, block_type='start')
return texture_paths, world_info
| 2.578125 | 3 |
uwhoisd/caching.py | kgaughan/uwhoisd | 32 | 12798125 | <gh_stars>10-100
"""
Caching support.
"""
import collections
import logging
import time
import pkg_resources
logger = logging.getLogger("uwhoisd")
class UnknownCache(Exception):
"""
The supplied cache type name cannot be found.
"""
def get_cache(cfg):
"""
Attempt to load the configured cache.
"""
cache_name = cfg.pop("type", "null")
if cache_name == "null":
logger.info("Caching deactivated")
return None
for ep in pkg_resources.iter_entry_points("uwhoisd.cache"):
if ep.name == cache_name:
logger.info("Using cache '%s' with the parameters %r", cache_name, cfg)
cache_type = ep.load()
return cache_type(**cfg)
raise UnknownCache(cache_name)
def wrap_whois(cache, whois_func):
"""
Wrap a WHOIS query function with a cache.
"""
if cache is None:
return whois_func
def wrapped(query):
response = cache.get(query)
if response is None:
response = whois_func(query)
cache.set(query, response)
else:
logger.info("Cache hit for '%s'", query)
return response
return wrapped
# pylint: disable-msg=R0924
class LFU(object):
"""
A simple LFU cache.
"""
# This is implemented as an LFU cache. The eviction queue contains
# 2-tuples consisting of the time the item was put into the cache and the
# cache key. The cache maps cache keys onto 2-tuples consisting of a
# counter giving the number of times this item occurs on the eviction queue
# and the value.
#
# I may end up reimplementing this as an LRU cache if it turns out that's
# more apt, but I haven't went that route as an LRU cache is somewhat more
# awkward and involved to implement correctly.
__slots__ = ("cache", "max_age", "max_size", "queue")
clock = staticmethod(time.time)
def __init__(self, max_size=256, max_age=300):
"""
Create a new LFU cache.
:param max_size int: Maximum number of entries the cache can contain.
:param max_age int: Maximum number of seconds to consider an entry
live.
"""
super(LFU, self).__init__()
self.cache = {}
self.queue = collections.deque()
self.max_size = int(max_size)
self.max_age = int(max_age)
def evict_one(self):
"""
Remove the item at the head of the eviction cache.
"""
_, key = self.queue.popleft()
self.attempt_eviction(key)
def attempt_eviction(self, key):
"""
Attempt to remove the named item from the cache.
"""
counter, value = self.cache[key]
counter -= 1
if counter == 0:
del self.cache[key]
else:
self.cache[key] = (counter, value)
def evict_expired(self):
"""
Evict any items older than the maximum age from the cache.
"""
cutoff = self.clock() - self.max_age
while len(self.queue) > 0:
ts, key = self.queue.popleft()
if ts > cutoff:
self.queue.appendleft((ts, key))
break
self.attempt_eviction(key)
def get(self, key):
"""
Pull a value from the cache corresponding to the key.
If no value exists, `None` is returned.
"""
self.evict_expired()
if key not in self.cache:
return None
_, value = self.cache[key]
# Force this onto the top of the queue.
self.set(key, value)
return value
def set(self, key, value):
"""
Add `value` to the cache, to be referenced by `key`.
"""
if len(self.queue) == self.max_size:
self.evict_one()
if key in self.cache:
counter, _ = self.cache[key]
else:
counter = 0
self.cache[key] = (counter + 1, value)
self.queue.append((self.clock(), key))
| 2.78125 | 3 |
tests/unit/configuration_subsystem/test_settings_sources.py | saito-hideki/ansible-navigator | 0 | 12798126 | <gh_stars>0
"""Test the ability to produce a dictionary of effective sources."""
from copy import deepcopy
import pytest
from ansible_navigator.configuration_subsystem import Configurator
from ansible_navigator.configuration_subsystem import Constants as C
from ansible_navigator.configuration_subsystem import NavigatorConfiguration
from ansible_navigator.configuration_subsystem import to_sources
from ansible_navigator.initialization import parse_and_update
def test_defaults():
"""Check the settings file used as a sample against the schema."""
settings = deepcopy(NavigatorConfiguration)
settings.internals.initializing = True
Configurator(params=[], application_configuration=settings).configure()
sources = to_sources(settings)
for path, source in sources.items():
assert source in [C.AUTO.value, C.DEFAULT_CFG.value, C.NOT_SET.value, C.NONE.value], (
path,
source,
)
def test_cli():
"""Test the source of effective settings given some cli parameters."""
settings = deepcopy(NavigatorConfiguration)
settings.internals.initializing = True
params = ["images", "--ll", "debug", "--la", "false"]
_messages, _exit_messages = parse_and_update(params=params, args=settings)
sources = to_sources(settings)
assert sources.pop("ansible-navigator.app") == C.USER_CLI.value
assert sources.pop("ansible-navigator.logging.level") == C.USER_CLI.value
assert sources.pop("ansible-navigator.logging.append") == C.USER_CLI.value
for path, source in sources.items():
assert source in [C.AUTO.value, C.DEFAULT_CFG.value, C.NOT_SET.value, C.NONE.value], (
path,
source,
)
def test_env(monkeypatch: pytest.MonkeyPatch):
"""Test the source of effective settings given some environment variables.
:param monkeypatch: The pytest monkeypatch fixture
"""
settings = deepcopy(NavigatorConfiguration)
settings.internals.initializing = True
prefix = settings.application_name
monkeypatch.setenv(settings.entry("app").environment_variable(prefix), "images")
monkeypatch.setenv(settings.entry("log_level").environment_variable(prefix), "debug")
monkeypatch.setenv(settings.entry("log_append").environment_variable(prefix), "false")
_messages, _exit_messages = parse_and_update(params=[], args=settings)
assert not _exit_messages
sources = to_sources(settings)
assert sources.pop("ansible-navigator.app") == C.ENVIRONMENT_VARIABLE.value
assert sources.pop("ansible-navigator.logging.level") == C.ENVIRONMENT_VARIABLE.value
assert sources.pop("ansible-navigator.logging.append") == C.ENVIRONMENT_VARIABLE.value
for path, source in sources.items():
assert source in [C.AUTO.value, C.DEFAULT_CFG.value, C.NOT_SET.value, C.NONE.value], (
path,
source,
)
def test_full(settings_env_var_to_full):
"""Test the source of effective settings given a full config.
:param settings_env_var_to_full: The pytest fixture to provide a full config
"""
# pylint: disable=unused-argument
settings = deepcopy(NavigatorConfiguration)
settings.internals.initializing = True
_messages, _exit_messages = parse_and_update(params=[], args=settings)
sources = to_sources(settings)
for path, source in sources.items():
if path.startswith("settings_file"):
continue
assert source in [C.USER_CFG.value, C.AUTO.value], (path, source)
| 2.390625 | 2 |
src/controller/widgets/buttons_cch.py | gustavosaquetta/FrameworkSSQt-Vocatus | 0 | 12798127 | import os, sys
sys.path.append(os.getcwd())
from PyQt5.QtWidgets import QApplication
from src.view.widgets.buttons_cch import ButtonsCCHView
from src.controller.base import Base
class ButtonsCCHController:
def __init__(self, force_show=False):
print('C botoes')
self.view = ButtonsCCHView()
self.view.bt_confirmar.clicked.connect(self.on_bt_confirmare)
if force_show:
self.view.show()
def show(self):
return self.view
def on_bt_confirmare(self):
print(10)
def validate_user(self):
if True:
self.auth = True
if __name__ == '__main__':
app = QApplication(sys.argv)
w = ButtonsCCHController(True)
sys.exit(app.exec_()) | 2.46875 | 2 |
delivery/pdf_processor/preprocessor/__init__.py | sidmishraw/scp | 2 | 12798128 | # __init__.py
# -*- coding: utf-8 -*-
# @Author: <NAME>
# @Date: 2017-04-05 20:29:06
# @Last Modified by: <NAME>
# @Last Modified time: 2017-04-05 23:13:28
'''
Houses the core logic used to build the reverse-indices for the words extracted from the PDFs.
The preprocessor module.
'''
# CS 267 specific imports
from preprocessor.build_tables import read_input_files
from preprocessor.build_tables import determine_word_positions
from preprocessor.build_tables import determine_doc_frequency
__all__ = ['read_input_files', 'determine_word_positions', 'determine_doc_frequency']
| 2.15625 | 2 |
UniExplore/base/migrations/0032_alter_responses_photograph.py | MichaelHills01/group-software-project-1 | 1 | 12798129 | <gh_stars>1-10
# Generated by Django 4.0.1 on 2022-03-16 14:58
import base.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0031_alter_profile_picture_delete_weeklychallenge'),
]
operations = [
migrations.AlterField(
model_name='responses',
name='photograph',
field=models.ImageField(default='image_uploads/challenge-completed.png', upload_to=base.models.response_pic_location),
),
]
| 1.453125 | 1 |
compressed_communication/broadcasters/histogram_model_test.py | amitport/google_research_federated | 0 | 12798130 | <filename>compressed_communication/broadcasters/histogram_model_test.py
# Copyright 2022, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from absl.testing import parameterized
import tensorflow as tf
import tensorflow_federated as tff
from compressed_communication.broadcasters import histogram_model
_mn = 0.0
_mx = 10.0
_nbins = 4
_test_weights_type_float_tensor = (tf.float32, (3,))
_test_weights_type_struct_float_tensor = (
(tf.float32, (3,)), (tf.float32, (2,)))
_test_measurements_type = (tf.int32, (_nbins,))
def _histogram_model():
return histogram_model.HistogramModelBroadcastProcess(
_test_weights_type_struct_float_tensor, _mn, _mx, _nbins)
class HistogramModelComputationTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
('float_tensor', _test_weights_type_float_tensor,
_test_measurements_type),
('struct_float_tensor', _test_weights_type_struct_float_tensor,
_test_measurements_type))
def test_historgram_type_properties(self, weights_type,
expected_measurements_type):
broadcast_process = histogram_model.HistogramModelBroadcastProcess(
weights_type, _mn, _mx, _nbins)
self.assertIsInstance(broadcast_process, tff.templates.MeasuredProcess)
server_state_type = tff.type_at_server(())
expected_initialize_type = tff.FunctionType(
parameter=None, result=server_state_type)
tff.test.assert_types_equivalent(
broadcast_process.initialize.type_signature, expected_initialize_type)
expected_measurements_type = tff.to_type(expected_measurements_type)
expected_next_type = tff.FunctionType(
parameter=collections.OrderedDict(
state=server_state_type, weights=tff.type_at_server(weights_type)),
result=tff.templates.MeasuredProcessOutput(
state=server_state_type,
result=tff.type_at_clients(weights_type, all_equal=True),
measurements=tff.type_at_server(expected_measurements_type)))
tff.test.assert_types_equivalent(broadcast_process.next.type_signature,
expected_next_type)
class HistogramModelExecutionTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
('float_tensor', _test_weights_type_float_tensor,
[1.0, 2.0, 3.0], [2, 1, 0, 0]),
('struct_float_tensor', _test_weights_type_struct_float_tensor,
[[1.0, 2.0, 3.0], [1.0, 1.0]], [4, 1, 0, 0]))
def test_histogram_impl(self, weights_type, weights, expected_histogram):
weights_type = tff.to_type(weights_type)
histogram_broadcaster = histogram_model.HistogramModelBroadcastProcess(
weights_type, _mn, _mx, _nbins)
state = histogram_broadcaster.initialize()
histogram = histogram_broadcaster.next(state, weights).measurements
self.assertAllEqual(histogram, expected_histogram)
result = histogram_broadcaster.next(state, weights).result
self.assertAllClose(result, weights)
if __name__ == '__main__':
tff.backends.native.set_local_python_execution_context(10)
tf.test.main()
| 2.078125 | 2 |
scripts/download_osm_tiles.py | MartinRusk/stratux | 104 | 12798131 | #!/usr/bin/python3
from sys import argv
import os
import math
import urllib.request
import random
import os.path
import sqlite3
URL_TEMPLATE = "https://c.tile.openstreetmap.org/%d/%d/%d.png"
BBOX = None # [lon_min, lat_min, lon_max, lat_max] or None for whole world
ZOOM_MAX = 7
LAYERTYPE = "baselayer" # "baselayer" or "overlay"
LAYERNAME = "OSM Low Detail"
TILE_FORMAT = "png"
def deg2num(lat_deg, lon_deg, zoom):
lat_rad = math.radians(lat_deg)
n = 2.0 ** zoom
xtile = int((lon_deg + 180.0) / 360.0 * n)
ytile = int((1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) / 2.0 * n)
return (xtile, ytile)
def download_url(zoom, xtile, ytile, cursor):
subdomain = random.randint(1, 4)
url = URL_TEMPLATE % (zoom, xtile, ytile)
ymax = 1 << zoom
yinverted = ymax - ytile - 1
existing = cursor.execute('SELECT count(*) FROM tiles WHERE zoom_level=? AND tile_column=? AND tile_row=?', (zoom, xtile, yinverted)).fetchall()
if existing[0][0] > 0:
print('Skipping ' + url)
return
print("downloading %r" % url)
request = urllib.request.Request(
url, data=None,
headers={
'User-Agent': 'Low-Zoom Downloader'
}
)
source = urllib.request.urlopen(request)
content = source.read()
source.close()
cursor.execute('INSERT INTO tiles(zoom_level, tile_column, tile_row, tile_data) VALUES(?, ?, ?, ?)', (zoom, xtile, yinverted, content))
def main(argv):
db = argv[1] if len(argv) > 1 else 'osm.mbtiles'
conn = sqlite3.connect(db)
cur = conn.cursor()
bboxStr = "-180,-85,180,85" if BBOX is None else ",".join(map(str, BBOX))
cur.executescript('''
CREATE TABLE IF NOT EXISTS tiles (
zoom_level integer,
tile_column integer,
tile_row integer,
tile_data blob);
CREATE TABLE IF NOT EXISTS metadata(name text, value text);
CREATE UNIQUE INDEX IF NOT EXISTS metadata_name on metadata (name);
CREATE UNIQUE INDEX IF NOT EXISTS tile_index on tiles(zoom_level, tile_column, tile_row);
INSERT OR REPLACE INTO metadata VALUES('minzoom', '1');
INSERT OR REPLACE INTO metadata VALUES('maxzoom', '{0}');
INSERT OR REPLACE INTO metadata VALUES('name', '{1}');
INSERT OR REPLACE INTO metadata VALUES('type', '{2}');
INSERT OR REPLACE INTO metadata VALUES('format', '{3}');
INSERT OR REPLACE INTO metadata VALUES('bounds', '{4}');
'''.format(ZOOM_MAX, LAYERNAME, LAYERTYPE, TILE_FORMAT, bboxStr))
# from 0 to 6 download all
for zoom in range(0, ZOOM_MAX+1):
xstart = 0
ystart = 0
xend = 2**zoom-1
yend = 2**zoom-1
if BBOX is not None:
xstart, yend = deg2num(BBOX[1], BBOX[0], zoom)
xend, ystart = deg2num(BBOX[3], BBOX[2], zoom)
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
download_url(zoom, x, y, cur)
conn.commit()
cur.close()
conn.close()
main(argv)
| 2.921875 | 3 |
vctk_texts.py | deciding/hifi-gan | 0 | 12798132 | text_dir='VCTK-Corpus/txt/'
wav_dir='VCTK-Corpus/wav48/'
train_txt='VCTK-Corpus/training.txt'
val_txt='VCTK-Corpus/validation.txt'
eval_txt='VCTK-Corpus/evaluation.txt'
import os
all_set=[]
train_set=[]
val_set=[]
eval_set=[]
spks=os.listdir(text_dir)
spks.sort()
for spk in spks:
if spk in ['p360', 'p361', 'p362', 'p363']:
continue
#import pdb;pdb.set_trace()
spk_dir=os.path.join(text_dir, spk)
txts=os.listdir(spk_dir)
txts.sort()
for txt in txts[:-20]:
iid=os.path.basename(txt).split('.')[0]
txt=os.path.join(spk_dir, txt)
with open(txt) as f:
text=f.readline().strip()
train_set.append((iid, text))
for txt in txts[-20:-10]:
iid=os.path.basename(txt).split('.')[0]
txt=os.path.join(spk_dir, txt)
with open(txt) as f:
text=f.readline().strip()
val_set.append((iid, text))
for txt in txts[-10:]:
iid=os.path.basename(txt).split('.')[0]
txt=os.path.join(spk_dir, txt)
with open(txt) as f:
text=f.readline().strip()
eval_set.append((iid, text))
with open(train_txt, 'w') as f:
for iid, text in train_set:
f.write(f'{iid}|{text}\n')
with open(val_txt, 'w') as f:
for iid, text in val_set:
f.write(f'{iid}|{text}\n')
with open(eval_txt, 'w') as f:
for iid, text in eval_set:
f.write(f'{iid}|{text}\n')
| 2.3125 | 2 |
btb/selection/recent.py | dataronio/BTB | 161 | 12798133 | <filename>btb/selection/recent.py
import logging
from btb.selection.ucb1 import UCB1
# the minimum number of scores that each choice must have in order to use
# best-K optimizations. If not all choices meet this threshold, default UCB1
# selection will be used.
K_MIN = 2
logger = logging.getLogger('btb')
class RecentKReward(UCB1):
"""Recent K reward selector
Args:
k (int): number of best scores to consider
"""
def __init__(self, choices, k=K_MIN):
super(RecentKReward, self).__init__(choices)
self.k = k
def compute_rewards(self, scores):
"""Retain the K most recent scores, and replace the rest with zeros"""
for i in range(len(scores)):
if i >= self.k:
scores[i] = 0.
return scores
def select(self, choice_scores):
"""Use the top k learner's scores for usage in rewards for the bandit calculation"""
# if we don't have enough scores to do K-selection, fall back to UCB1
min_num_scores = min([len(s) for s in choice_scores.values()])
if min_num_scores >= K_MIN:
logger.info('{klass}: using Best K bandit selection'.format(klass=type(self).__name__))
reward_func = self.compute_rewards
else:
logger.warning(
'{klass}: Not enough choices to do K-selection; using plain UCB1'
.format(klass=type(self).__name__))
reward_func = super(RecentKReward, self).compute_rewards
choice_rewards = {}
for choice, scores in choice_scores.items():
if choice not in self.choices:
continue
choice_rewards[choice] = reward_func(scores)
return self.bandit(choice_rewards)
class RecentKVelocity(RecentKReward):
"""Recent K velocity selector"""
def compute_rewards(self, scores):
"""Compute the velocity of thte k+1 most recent scores.
The velocity is the average distance between scores. Return a list with those k velocities
padded out with zeros so that the count remains the same.
"""
# take the k + 1 most recent scores so we can get k velocities
recent_scores = scores[:-self.k - 2:-1]
velocities = [recent_scores[i] - recent_scores[i + 1] for i in
range(len(recent_scores) - 1)]
# pad the list out with zeros, so the length of the list is
# maintained
zeros = (len(scores) - self.k) * [0]
return velocities + zeros
| 3.265625 | 3 |
tests/components/test_pressure.py | StephenOrJames/aviation-weather | 2 | 12798134 | import unittest
from aviation_weather import Pressure
from aviation_weather.exceptions import PressureDecodeError
class TestPressure(unittest.TestCase):
"""Unit tests for aviation_weather.components.pressure.Pressure"""
def _test_valid(self, raw, indicator, value):
p = Pressure(raw)
self.assertEqual(raw, p.raw)
self.assertEqual(indicator, p.indicator)
self.assertEqual(value, p.value)
def test_valid_altimeter(self):
self._test_valid("A2992", "A", 29.92)
def test_valid_QNH(self):
self._test_valid("Q1013", "Q", 1013)
def test_invalid(self):
with self.assertRaises(PressureDecodeError):
Pressure("3000") # no unit indicator; more likely visibility
| 3.109375 | 3 |
projects/models.py | DerrickOdhiambo/P-Awards | 0 | 12798135 | <gh_stars>0
from django.db import models
from django.utils.timezone import now
from django.contrib.auth.models import User
from django.urls import reverse
from django_resized import ResizedImageField
class Project(models.Model):
title = models.CharField(max_length=50)
project_image = models.ImageField(upload_to='project/')
project_description = models.TextField()
project_link = models.CharField(max_length=60)
project_owner = models.ForeignKey(
User, on_delete=models.CASCADE, null=True)
date_posted = models.DateTimeField(default=now)
def __str__(self):
return self.title
def save_project(self):
self.save()
@classmethod
def get_image_by_id(cls, id):
image = cls.objects.get(id=id)
return image
@classmethod
def all_images(cls):
project_images = cls.objects.all()
return project_images
@classmethod
def search_by_title(cls, project):
projects = cls.objects.filter(title__icontains=project)
return projects
def get_absolute_url(self):
return reverse('project-detail', kwargs={'pk': self.pk})
class Profile(models.Model):
profile_picture = ResizedImageField(size=[300, 300], quality=75,
default='default.jpg', upload_to='profile_pics/')
user_bio = models.TextField()
user = models.OneToOneField(User, on_delete=models.CASCADE)
def __str__(self):
return f'{self.user.username} Profile'
def save_profile(self):
self.save()
RATE_CHOICES = [
(1, '1'),
(2, '2'),
(3, '3'),
(4, '4'),
(5, '5'),
(6, '6'),
(7, '7'),
(8, '8'),
(9, '9'),
(10, '10')
]
class Rating(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
projects = models.ForeignKey(
Project, on_delete=models.CASCADE, related_name='project_ratings')
design = models.PositiveSmallIntegerField(choices=RATE_CHOICES, null=True)
content = models.PositiveSmallIntegerField(choices=RATE_CHOICES, null=True)
usability = models.PositiveSmallIntegerField(
choices=RATE_CHOICES, null=True)
average_rating = models.FloatField(default=0)
review = models.TextField()
def __str__(self):
return str(self.id)
@classmethod
def get_ratings(cls, id):
rating = cls.objects.all()[id]
# return rating
return [rating]
| 2.15625 | 2 |
franki/docker/compose.py | cr0hn/franki | 1 | 12798136 | import argparse
from typing import List
import yaml
from ..dal.services import ServiceConfig
TAB = " " * 2
def cli_docker_compose(parser: argparse._SubParsersAction):
sub_parser = parser.add_parser("docker-compose",
help="build a docker-compose")
sub_parser.add_argument("-v", "--compose-version",
default="3.7",
help="minimum docker-compose format")
sub_parser.add_argument("PATH", nargs="+")
def build_docker_compose(parsed: argparse.Namespace,
services_config: List[ServiceConfig]) -> str:
#
# NOT USE YAML LIBRARY BECAUSE IT DOESN'T GUARANTEES ORDER ON KEYS
#
data_service = [
f"version: {parsed.compose_version}",
f"services:\n",
]
for serv in services_config:
service = serv.service
# -------------------------------------------------------------------------
# Service config
# -------------------------------------------------------------------------
data_service.extend([
f"{TAB}#{'-' * 40}",
f"{TAB}# Service: '{service.name}'",
f"{TAB}#{'-' * 40}",
f"{TAB}{service.name.lower()}:",
f"{TAB}{TAB}image: {service.name.lower()}:{service.version}"
])
if service.environment:
data_service.append(f"{TAB}{TAB}environment:")
for e in service.environment:
data_service.append(f"{TAB}{TAB}{TAB}- {e}={e}")
if service.port:
data_service.append(f"{TAB}{TAB}ports:")
data_service.append(f"{TAB}{TAB}{TAB}- {service.port}:{service.port}")
if service.command:
data_service.append(f"{TAB}{TAB}command: {service.command}")
if service.entrypoint:
data_service.append(f"{TAB}{TAB}command: {service.entrypoint}")
data_service.append("")
# -------------------------------------------------------------------------
# Dependencies
# -------------------------------------------------------------------------
for dep in service.dependencies:
data_service.append(f"{TAB}{dep.name}:")
data_service.append(f"{TAB}{TAB}image: {dep.image}")
data_service.append(f"{TAB}{TAB}environment:")
for e in dep.environment:
data_service.append(f"{TAB}{TAB}{TAB} - {e}={e}")
# TODO: importar de catálogo
if dep.command:
data_service.append(f"{TAB}{TAB}command: {dep.command}")
# if dep.ports:
# data_service.append(f"{TAB}{TAB}ports: {dep.environment}")
data_service.append("")
data_service.extend([
f"{TAB}#{'-' * 40}",
f"{TAB}# END '{service.name}'",
f"{TAB}#{'-' * 40}"
])
return "\n".join(data_service)
__all__ = ("cli_docker_compose", "build_docker_compose")
| 2.40625 | 2 |
leetcode/even_digits.py | Imipenem/Competitive_Prog_with_Python | 0 | 12798137 | <reponame>Imipenem/Competitive_Prog_with_Python<gh_stars>0
def findNumbers(nums: [int]) -> int:
return len(list(([e for e in nums if len(str(e))%2 == 0])))
if __name__ == '__main__':
print(findNumbers([12,345,2,6,7896]))
| 3.5625 | 4 |
flappy.py | antpa/FlapPyBird | 0 | 12798138 | from itertools import cycle
import random
import sys
import pygame
from pygame.locals import *
FPS = 30
SCREENWIDTH = 512
SCREENHEIGHT = 512
# amount by which base can maximum shift to left
PIPEGAPSIZE = 100 # gap between upper and lower part of pipe
PIPEHEIGHT = 300
PIPEWIDTH = 50
BASEY = SCREENHEIGHT * 0.79
BASEX = 0
try:
xrange
except NameError:
xrange = range
class Player:
def __init__(self):
self.x = int(SCREENWIDTH * 0.2)
self.width = 20
self.height = 20
maxValue = int((SCREENHEIGHT - self.height) / SCREENHEIGHT * 100)
minValue = int(self.height / SCREENHEIGHT * 100)
self.y = int((SCREENHEIGHT - self.height) * random.randint(minValue, maxValue) / 100 )
# player velocity, max velocity, downward accleration, accleration on flap
self.velY = -9 # player's velocity along Y, default same as playerFlapped
self.maxVelY = 10 # max vel along Y, max descend speed
self.accY = 1 # players downward accleration
self.flapAcc = -9 # players speed on flapping
self.flapped = False # True when player flaps
self.score = 0
def update(self, event):
if event.type == KEYDOWN and (event.key == K_SPACE or event.key == K_UP):
if self.y > -2 * self.height:
self.velY = self.flapAcc
self.flapped = True
def main():
global SCREEN, FPSCLOCK, myfont
pygame.init()
FPSCLOCK = pygame.time.Clock()
SCREEN = pygame.display.set_mode((SCREENWIDTH, SCREENHEIGHT))
pygame.display.set_caption('Flappy Bird')
myfont = pygame.font.SysFont("Comic Sans MS", 30)
while True:
crashInfo = mainGame()
showGameOverScreen(crashInfo)
def mainGame():
players = []
for i in range(0,1):
players.append(Player())
# get 2 new pipes to add to upperPipes lowerPipes list
newPipe1 = getRandomPipe()
# newPipe2 = getRandomPipe()
# list of upper pipes
upperPipes = [
newPipe1[0],
# newPipe2[0],
]
# list of lowerpipe
lowerPipes = [
newPipe1[1],
# newPipe2[1],
]
pipeVelX = -4
while True:
playerEvent = type('', (object,),{ 'type': 0, 'key': 0})
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type == KEYDOWN and (event.key == K_SPACE or event.key == K_UP):
playerEvent = event
# move pipes to left
for uPipe, lPipe in zip(upperPipes, lowerPipes):
uPipe['x'] += pipeVelX
lPipe['x'] += pipeVelX
# add new pipe when first pipe is about to touch left of screen
if 0 < upperPipes[0]['x'] < 5:
newPipe = getRandomPipe()
upperPipes.append(newPipe[0])
lowerPipes.append(newPipe[1])
# remove first pipe if its out of the screen
if upperPipes[0]['x'] < -PIPEWIDTH:
upperPipes.pop(0)
lowerPipes.pop(0)
# draw sprites
SCREEN.fill((0,0,0))
for uPipe, lPipe in zip(upperPipes, lowerPipes):
pygame.draw.rect(SCREEN,(255,255,255), (uPipe['x'], uPipe['y'],PIPEWIDTH,PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (lPipe['x'], lPipe['y'],PIPEWIDTH,PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (BASEX, BASEY,SCREENWIDTH,BASEY))
for player in players:
player.update(playerEvent)
# check for crash here
crashTest = checkCrash(player,
upperPipes, lowerPipes)
if crashTest[0]:
players.remove(player)
if len(players) ==0:
return {
'player': player,
'upperPipes': upperPipes,
'lowerPipes': lowerPipes,
}
# check for score
playerMidPos = player.x + player.width / 2
for pipe in upperPipes:
pipeMidPos = pipe['x'] + PIPEWIDTH / 2
if pipeMidPos <= playerMidPos < pipeMidPos + 4:
player.score += 1
# player's movement
if player.velY < player.maxVelY and not player.flapped:
player.velY += player.accY
if player.flapped:
player.flapped = False
player.y += min(player.velY, BASEY - player.y - player.height)
# print score so player overlaps the score
showScore(player.score)
pygame.draw.ellipse(SCREEN, (255,255,255,200), (player.x, player.y, player.width, player.width), 0)
pygame.display.update()
FPSCLOCK.tick(FPS)
def showGameOverScreen(crashInfo):
"""crashes the player down ans shows gameover image"""
player = crashInfo['player']
upperPipes, lowerPipes = crashInfo['upperPipes'], crashInfo['lowerPipes']
while True:
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type == KEYDOWN and (event.key == K_SPACE or event.key == K_UP):
return
# draw sprites
SCREEN.fill((0,0,0))
for uPipe, lPipe in zip(upperPipes, lowerPipes):
pygame.draw.rect(SCREEN,(255,255,255), (uPipe['x'], uPipe['y'],PIPEWIDTH, PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (lPipe['x'], lPipe['y'],PIPEWIDTH, PIPEHEIGHT))
pygame.draw.rect(SCREEN,(255,255,255), (BASEX, BASEY,SCREENWIDTH,BASEY))
showScore(player.score)
pygame.draw.ellipse(SCREEN, (255,255,255,200), (player.x, player.y, player.width, player.width), 0)
FPSCLOCK.tick(FPS)
pygame.display.update()
def getRandomPipe():
"""returns a randomly generated pipe"""
# y of gap between upper and lower pipe
gapY = random.randrange(0, int(BASEY * 0.6 - PIPEGAPSIZE))
gapY += int(BASEY * 0.2)
pipeX = SCREENWIDTH + 10
return [
{'x': pipeX, 'y': gapY - PIPEHEIGHT}, # upper pipe
{'x': pipeX, 'y': gapY + PIPEGAPSIZE}, # lower pipe
]
def showScore(score):
"""displays score in center of screen"""
label = myfont.render(str(score), 1, (255,255,255))
SCREEN.blit(label, (10, 10))
def checkCrash(player, upperPipes, lowerPipes):
"""returns True if player collders with base or pipes."""
# if player crashes into ground
if player.y + player.height >= BASEY - 1:
return [True, True]
else:
playerRect = pygame.Rect(player.x, player.y,
player.width, player.height)
for uPipe, lPipe in zip(upperPipes, lowerPipes):
# upper and lower pipe rects
uPipeRect = pygame.Rect(uPipe['x'], uPipe['y'], PIPEWIDTH, PIPEHEIGHT)
lPipeRect = pygame.Rect(lPipe['x'], lPipe['y'], PIPEWIDTH, PIPEHEIGHT)
# if bird collided with upipe or lpipe
uCollide = pixelCollision(playerRect, uPipeRect)
lCollide = pixelCollision(playerRect, lPipeRect)
if uCollide or lCollide:
return [True, False]
return [False, False]
def pixelCollision(rect1, rect2):
"""Checks if two objects collide and not just their rects"""
rect = rect1.clip(rect2)
if rect.width == 0 or rect.height == 0:
return False
return True
if __name__ == '__main__':
main()
| 3.09375 | 3 |
src/pages/views.py | ahsanali2000/Patientio | 2 | 12798139 | from django.shortcuts import render, get_object_or_404
from .models import About,Appointment,Doctor,Report,Service,History
from django.http import HttpResponse,HttpResponseRedirect
from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
# Create your views here.
def home(request):
return render(request, "index.html", {})
def about(request):
context = {
'about': About.objects.all()
}
return render(request, "about.html", context)
def doctor_list(request):
context = {
'doctors': Doctor.objects.all()
}
return render(request, "doctor_list_booking.html", context)
def services(request):
context = {
'service': Service.objects.all()
}
return render(request, "services.html", context)
@login_required
def appointment_delete(request, appointment_id):
appointment = get_object_or_404(Appointment, pk=appointment_id)
if request.method == 'POST':
appointment.delete()
return redirect('appointments_list')
return redirect('appointments_list')
@login_required
def lab_report_delete(request, report_id):
lab_report = get_object_or_404(Report, pk=report_id)
if request.method == 'POST':
lab_report.delete()
return redirect('lab_reports')
return redirect('lab_reports')
@login_required
def lab_reports(request):
reports = Report.objects.filter(user = request.user)
if reports:
return render(request, "lab_reports_list.html", {'reports': reports})
else:
message = "No records Found"
return render(request, "lab_reports_list.html", {'message': message})
@login_required
def appointments(request):
appointments = Appointment.objects.filter(user = request.user)
if appointments:
return render(request, "appointments_list.html", {'appointments': appointments})
else:
message = "No records Found"
return render(request, "appointments_list.html", {'message': message})
def single_report(request, report_id):
report = get_object_or_404(Report, pk=report_id)
return render(request, "single_report.html", {'report': report})
def single_service(request, service_id):
service = get_object_or_404(Service, pk=service_id)
return render(request, 'single_service.html', {'service': service, 'services_info' : Service.objects.all()})
def single_doctor(request,doctor_id):
doctor = get_object_or_404(Doctor, pk=doctor_id)
return render(request, "single_doctor_booking.html", {'doctor': doctor})
@login_required
def profile(request):
history = History.objects.filter(user = request.user)
if history:
return render(request, "profile.html", {'history': history})
else:
message = "No records Found"
return render(request, "profile.html", {'message': message})
@login_required
def booking(request, doctor_id):
if request.method == 'POST' and request.POST.get('Appointment Date') and request.POST.get('Appointment Time'):
disease = request.POST.get('issue')
date=request.POST.get('Appointment Date')
time=request.POST.get('Appointment Time')
doctor = doctor_id
user = request.user
appointment = Appointment.objects.create(date=date, time=time, user=user, disease_option=disease, doctor = doctor)
appointment.save()
appointments = Appointment.objects.filter(user = request.user)
return render(request, 'appointments_list.html', {'appointments': appointments})
else:
appointments = Appointment.objects.filter(user = request.user)
return render(request, 'appointments_list.html', {'appointments': appointments})
def contactus(request):
pass
def patient_info(request):
pass
| 2 | 2 |
search/conftest.py | ArturMichalak/ElasticSearch-DSL-Playground | 0 | 12798140 | """Basic test configuration"""
from unittest.mock import MagicMock
from pytest import fixture
from elasticsearch_dsl.connections import add_connection
@fixture
def mock_client(dummy_response):
"""Returns elasticsearch mock client"""
client = MagicMock()
client.search.return_value = dummy_response
add_connection("mock", client)
yield client
@fixture(name="dummy_response")
def fixture_dummy_response():
"""Returns the dictionary for comparison in tests"""
return {
"_shards": {
"failed": 0,
"successful": 10,
"total": 10
},
"hits": {
"hits": [
{
"_index": "blog",
"_type": "_doc",
"_id": "1",
"_score": "10.114",
"_source": {
"title": "Test elasticsearch",
"body": """
Litwo! Ojczyzno moja! Ty jesteś jak zdrowie. Ile cię stracił.
Dziś człowieka nie policzę.
Opuszczali rodziców i jeszcze dobrze
na kozłach niemczysko chude na Ojczyzny łono.
Tymczasem na nim się zdawał małpą lub ławę przeskoczyć.
Zręcznie między dwie strony: Uciszcie się! woła.
Marząc i krwi tonęła,
gdy przysięgał na krzaki fijołkowe skłonił oczyma ciekawymi
po kryjomu kazał stoły z Paryża a czuł choroby zaród.
Krzyczano na waszych polowaniach łowił?
Piękna byłaby sława, ażeby nie było gorąca).
wachlarz pozłocist powiewając rozlewał deszcz iskier rzęsisty.
Głowa do Twych świątyń progi iść
za zającami nie został pośmiewiska celem i niesrogi.
Odgadnęła sąsiadka powód jego lata
wleką w kota się nagle, stronnicy Sokół na kształt deski.
Nogi miał głos miły: Witaj nam,
że spod ramion wytknął palce i Asesor, razem, jakoby zlewa.
I też co się przyciągnąć do dworu
uprawne dobrze zachowana sklepienie całe wesoło, lecz w rozmowę lecz lekki.
odgadniesz, że jacyś Francuzi wymowny
zrobili wynalazek: iż ludzie są architektury.
Choć Sędzia jego bok usiadła
owa piękność zda się Gorecki, Pac i opisuję,
bo tak nas reformować cywilizować
będzie wojna u nas starych więcej godni
Wojewody względów doszli potem się teraz
wzrostem dorodniejsza bo tak pan Wojski na nim ją w ulicę się tajemnie,
Ścigany od płaczącej matki pod
Turka czy wstydzić, czy na lewo,
on rodaków zbiera się w domu dostatek mieszka i panien
nie w nieczynności! a Suwarów w posiadłość.
""",
"published_from": "2013-02-10T10:31:07.851688",
"tags": ["g1", "g2"],
"lines": "1",
},
"highlight": {
"title": ["<em>Test</em> elasticsearch"]
},
},
{
"_index": "blog",
"_type": "_doc",
"_id": "2",
"_score": "12.0",
"_source": {
"title": "Test elasticsearch numer 2",
"body": """
Litwo! Ojczyzno moja! Ty jesteś jak zdrowie. Ile cię stracił.
Dziś człowieka nie policzę.
Opuszczali rodziców i jeszcze dobrze
na kozłach niemczysko chude na Ojczyzny łono.
Tymczasem na nim się zdawał małpą lub ławę przeskoczyć.
Zręcznie między dwie strony: Uciszcie się! woła.
Marząc i krwi tonęła,
gdy przysięgał na krzaki fijołkowe skłonił oczyma ciekawymi
po kryjomu kazał stoły z Paryża a czuł choroby zaród.
Krzyczano na waszych polowaniach łowił?
Piękna byłaby sława, ażeby nie było gorąca).
wachlarz pozłocist powiewając rozlewał deszcz iskier rzęsisty.
Głowa do Twych świątyń progi iść
za zającami nie został pośmiewiska celem i niesrogi.
Odgadnęła sąsiadka powód jego lata
wleką w kota się nagle, stronnicy Sokół na kształt deski.
Nogi miał głos miły: Witaj nam,
że spod ramion wytknął palce i Asesor, razem, jakoby zlewa.
I też co się przyciągnąć do dworu
uprawne dobrze zachowana sklepienie całe wesoło, lecz w rozmowę lecz lekki.
odgadniesz, że jacyś Francuzi wymowny
zrobili wynalazek: iż ludzie są architektury.
Choć Sędzia jego bok usiadła
owa piękność zda się Gorecki, Pac i opisuję,
bo tak nas reformować cywilizować
będzie wojna u nas starych więcej godni
Wojewody względów doszli potem się teraz
wzrostem dorodniejsza bo tak pan Wojski na nim ją w ulicę się tajemnie,
Ścigany od płaczącej matki pod
Turka czy wstydzić, czy na lewo,
on rodaków zbiera się w domu dostatek mieszka i panien
nie w nieczynności! a Suwarów w posiadłość.
""",
"published_from": "2014-02-10T10:31:07.851688",
"tags": ["g1", "g2"],
"lines": "1",
},
"highlight": {
"title": ["<em>Test</em> elasticsearch numer 2"]
},
},
]
},
"timed_out": False,
"took": 123,
}
| 2.65625 | 3 |
hyperspeed/text.py | bovesan/mistika-hyperspeed | 3 | 12798141 | #!/usr/bin/env python
import re
class Title(object):
def __init__(self, path):
self.path = path
self._fonts = False
self._string = False
def __str__(self):
return self.name
def __repr__(self):
return self.name
@property
def fonts(self):
if not self._fonts:
self.parse_font_dat()
return self._fonts
@property
def string(self):
if not self._string:
self.parse_font_dat()
return self._string
def parse_font_dat(self):
self._fonts = []
float_pattern = '[+-]?[0-9]+\.[0-9]+'
string = ''
char_line = 9999
char = {}
italic = False
try:
for line in open(self.path):
stripped = line.strip('\n\r')
char_line += 1
if re.match('^'+' '.join([float_pattern] * 10)+'$', stripped): # New character: 10 floats
# Todo: Insert detection of slanted text here
char_line = 0
char = {}
elif char_line == 1:
char['font'] = stripped
if not char['font'] in self._fonts:
self._fonts.append(char['font'])
if "italic" in stripped.lower() or "oblique" in stripped.lower():
if not italic:
string += '<i>'
italic = True
else:
if italic:
string += '</i>'
italic = False
elif char_line == 2:
char['char'] = stripped
string += stripped
elif char_line < 9999 and re.match('^'+' '.join([float_pattern] * 4)+'$', stripped): # New line: 4 floats
if string.endswith('\r\n'): # Break on double line break
break
string += '\r\n'
if italic:
string += '</i>'
except IOError:
print 'Could not read file: %s' % self.path
return
self._string = string | 3.265625 | 3 |
complaints/ccdb/tests/test_choose_field_map.py | JeffreyMFarley/ccdb-data-pipeline | 6 | 12798142 | <filename>complaints/ccdb/tests/test_choose_field_map.py
import os
import unittest
import complaints.ccdb.choose_field_map as sut
from common.tests import build_argv, captured_output, validate_files
def fieldsToAbsolute(mapping_file):
# where is _this_ file? and one level up
thisScriptDir = os.path.dirname(os.path.dirname(__file__))
return os.path.join(thisScriptDir, 'fields-s3', mapping_file)
def fixtureToAbsolute(fixture_file):
# where is _this_ file?
thisScriptDir = os.path.dirname(__file__)
return os.path.join(thisScriptDir, '__fixtures__', fixture_file)
# ------------------------------------------------------------------------------
# Classes
# ------------------------------------------------------------------------------
class TestMain(unittest.TestCase):
def setUp(self):
self.actual_file = fixtureToAbsolute('a.txt')
self.positional = [
None,
self.actual_file
]
def tearDown(self):
try:
os.remove(self.actual_file)
except Exception:
pass
def test_v1_intake_json(self):
self.positional[0] = fixtureToAbsolute('v1-intake.csv')
argv = build_argv(positional=self.positional)
with captured_output(argv) as (out, err):
sut.main()
validate_files(self.actual_file, fieldsToAbsolute('v1-json.txt'))
actual_print = out.getvalue().strip()
self.assertEqual('Using "v1-json.txt" for field mapping', actual_print)
def test_v1_intake_csv(self):
self.positional[0] = fixtureToAbsolute('v1-intake.csv')
optional = [
'--target-format', 'CSV'
]
argv = build_argv(optional, self.positional)
with captured_output(argv) as (out, err):
sut.main()
validate_files(self.actual_file, fieldsToAbsolute('v1-csv.txt'))
actual_print = out.getvalue().strip()
self.assertEqual('Using "v1-csv.txt" for field mapping', actual_print)
def test_v1_public(self):
self.positional[0] = fixtureToAbsolute('v1-public.csv')
argv = build_argv(positional=self.positional)
with captured_output(argv) as (out, err):
sut.main()
validate_files(self.actual_file, fieldsToAbsolute('v1-json.txt'))
actual_print = out.getvalue().strip()
self.assertEqual('Using "v1-json.txt" for field mapping', actual_print)
def test_v2_intake(self):
self.positional[0] = fixtureToAbsolute('v2-intake.csv')
argv = build_argv(positional=self.positional)
with captured_output(argv) as (out, err):
sut.main()
validate_files(self.actual_file, fieldsToAbsolute('v2-json.txt'))
actual_print = out.getvalue().strip()
self.assertEqual('Using "v2-json.txt" for field mapping', actual_print)
def test_v2_public(self):
self.positional[0] = fixtureToAbsolute('v2-public.csv')
argv = build_argv(positional=self.positional)
with captured_output(argv) as (out, err):
sut.main()
validate_files(self.actual_file, fieldsToAbsolute('v2-json.txt'))
actual_print = out.getvalue().strip()
self.assertEqual('Using "v2-json.txt" for field mapping', actual_print)
def test_bad_input(self):
self.positional[0] = fixtureToAbsolute('complaints-subset.csv')
argv = build_argv(positional=self.positional)
with self.assertRaises(SystemExit) as ex:
with captured_output(argv) as (out, err):
sut.main()
self.assertEqual(ex.exception.code, 2)
console_output = err.getvalue()
self.assertIn('Unknown field set', console_output)
def test_bad_format_argument(self):
self.positional[0] = fixtureToAbsolute('v1-intake.csv')
optional = [
'--target-format', 'tsv'
]
argv = build_argv(optional, self.positional)
with self.assertRaises(SystemExit) as ex:
with captured_output(argv) as (out, err):
sut.main()
self.assertEqual(ex.exception.code, 2)
console_output = err.getvalue()
self.assertIn('usage: choose_field_map', console_output)
self.assertIn('--target-format: invalid choice', console_output)
| 2.296875 | 2 |
bash_to_python/cat.py | blakfeld/Bash-To-Python | 0 | 12798143 | <reponame>blakfeld/Bash-To-Python<filename>bash_to_python/cat.py<gh_stars>0
"""
cat.py -- Emulate UNIX cat.
Author: <NAME>
E-Mail: <EMAIL>
Date: 5/25/2015
"""
import os
import sys
class Cat(object):
def __init__(self, fname=None, stdin=None):
"""
Constructor
Args:
fname (str): File to print to screen
stdin (str): Input from sys.stdin to output.
Raises:
ValueError: If provided file doesn't exist or is a directory.
"""
self.fname = fname
self.stdin = stdin
def run(self):
"""
Emulate 'cat'.
Echo User input if a file is not provided, if a file is provided, print
it to the screen.
"""
if self.stdin:
self._cat_stdin(self.stdin)
return
if not self.fname:
self._cat_input()
return
if isinstance(self.fname, list):
for f in self.fname:
self._validate_file(f)
self._cat_file(f)
else:
self._validate_file(self.fname)
self._cat_file(self.fname)
def _cat_stdin(self, stdin):
"""
Print data provided in stdin.
Args:
stdin (str): The output of sys.stdin.read()
"""
print stdin
def _cat_file(self, fname):
"""
Print contents of a file.
Args:
fname: Name of file to print.
"""
with open(fname, 'r') as f:
sys.stdout.write((f.read()))
def _cat_input(self):
"""
Echo back user input.
"""
while True:
user_input = raw_input()
sys.stdout.write(user_input)
def _validate_file(self, fname):
"""
Ensure fname exists, and is not a directory.
Args:
fname (str): The file path to validate.
Raises:
ValueError: If file does not exist or is a directory.
"""
if not os.path.exists(fname):
raise ValueError('cat: {}: No such file or directory.'
.format(fname))
if os.path.isdir(fname):
raise ValueError('cat: {}: Is a directory.'
.format(fname))
| 3.28125 | 3 |
calculate_diff.py | yhc45/IP-Spoof | 0 | 12798144 | <filename>calculate_diff.py<gh_stars>0
#!/usr/bin/python2
from spoof_struct import send_packet
import dpkt, socket, subprocess
from collections import defaultdict
import time
import cPickle as pickle
import itertools
src_ip = '192.168.100.128'
spoof_ip = '192.168.22.21'
src_port = 54024
pcap_name = "filter.pcap"
ipid_map={}
def parse_pcap(file_n):
f = open(file_n)
pcap = dpkt.pcap.Reader(f)
for ts, buf in pcap:
eth = dpkt.ethernet.Ethernet(buf)
ip = eth.data
tcp = ip.data
#src_addr = socket.inet_ntoa(ip.src)
if eth.type == dpkt.ethernet.ETH_TYPE_IP and tcp.dport == src_port: # and tcp.sport == port
ipid_map[socket.inet_ntoa(ip.src)].append(ip.id)
f.close()
return
def parse_candidate(file_name):
f = open(file_name,'rb')
db = pickle.load(f)
f.close()
return db
def main():
parse_ip = parse_candidate("ip_port_record.pickle")
ipid_map = parse_candidate("save.p")
f1 = open("not_found","w+")
f2 = open("diff_port","w+")
for ip,port in parse_ip.items():
if ip not in ipid_map:
f1.write(ip+"\n")
elif port != ipid_map[ip][0]:
f2.write("request to ip: "+ip+ " port: "+str(port)+"\n")
f2.write("respond to ip: "+ip+ " port: "+str(ipid_map[ip][0])+"\n")
f1.close()
f2.close()
f4 = open("cand","w+")
reflector_candidate = {}
for ip,lists in ipid_map.items():
result = [j-i for j, i in zip(lists[3::2],lists[1:-2:2])]
timestamp = [j-i for j, i in zip(lists[4::2],lists[2:-1:2])]
if all(time > 0.8 for time in timestamp) and len(result) <= 29 and len(result) >20 and (sum(result)/len(result))>0 and (sum(result)/len(result)) < 6:
reflector_candidate[ip] = lists[0]
f4.write("respond to ip: "+ip)
f4.close()
f3 = open( "reflector_candidate.pickle", "wb" )
pickle.dump( reflector_candidate, f3)
f3.close()
print(reflector_candidate)
#for i in range(30):
# for ip, port in parse_ip.items():
#send_packet(src_ip,src_port,ip,port,1,1)
# send_packet(spoof_ip,src_port,ip,port,1,1)
# print("ip: "+ip+" id: "+str(port)+"\n")
# exit(1)
# time.sleep(1)
#p.send_signal(subprocess.signal.SIGTERM)
#time.sleep(1)
#parse_pcap(pcap_name)
#f = open("result_measure.txt","w+")
#for ip, id_list in ipid_map.iteritems():
# f.write("ip: "+ip+" id: "+str(id_list)+"\n")
#f.close()
if __name__ == "__main__":
main()
| 2.640625 | 3 |
src/ralph/licences/tests/tests_models.py | DoNnMyTh/ralph | 1,668 | 12798145 | <filename>src/ralph/licences/tests/tests_models.py
# -*- coding: utf-8 -*-
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from ralph.accounts.tests.factories import RegionFactory, UserFactory
from ralph.back_office.tests.factories import BackOfficeAssetFactory
from ralph.lib.transitions.tests import TransitionTestCase
from ralph.licences.models import BaseObjectLicence, Licence, LicenceUser
from ralph.licences.tests.factories import LicenceFactory
from ralph.tests import RalphTestCase
from ralph.tests.mixins import ClientMixin
class BaseObjectLicenceCleanTest(RalphTestCase):
def setUp(self):
super().setUp()
self.region_pl = RegionFactory(name='pl')
self.region_de = RegionFactory(name='de')
self.licence_de = LicenceFactory(region=self.region_de)
self.bo_asset = BackOfficeAssetFactory(region=self.region_pl)
def test_region_validate(self):
base_object_licence = BaseObjectLicence()
base_object_licence.licence = self.licence_de
base_object_licence.base_object = self.bo_asset
with self.assertRaisesRegex(
ValidationError,
(
'Asset region is in a different region than licence.'
)
):
base_object_licence.clean()
class LicenceTest(RalphTestCase):
def setUp(self):
super().setUp()
self.licence_1 = LicenceFactory(number_bought=3)
self.licence_2 = LicenceFactory(number_bought=1)
self.user_1 = UserFactory()
self.bo_asset = BackOfficeAssetFactory()
def test_get_autocomplete_queryset(self):
with self.assertNumQueries(2):
self.assertCountEqual(
Licence.get_autocomplete_queryset().values_list(
'pk', flat=True
),
[self.licence_1.pk, self.licence_2.pk]
)
def test_get_autocomplete_queryset_all_used(self):
BaseObjectLicence.objects.create(
base_object=self.bo_asset, licence=self.licence_1, quantity=1,
)
LicenceUser.objects.create(
user=self.user_1, licence=self.licence_1, quantity=2
)
with self.assertNumQueries(2):
self.assertCountEqual(
Licence.get_autocomplete_queryset().values_list(
'pk', flat=True
),
[self.licence_2.pk]
)
class LicenceFormTest(TransitionTestCase, ClientMixin):
def test_service_env_not_required(self):
self.assertTrue(self.login_as_user())
licence = LicenceFactory()
url = reverse(
'admin:licences_licence_change',
args=(licence.pk,)
)
resp = self.client.get(url, follow=True)
self.assertEqual(resp.status_code, 200)
form = resp.context['adminform'].form
self.assertIn('service_env', form.fields)
self.assertFalse(
form.fields['service_env'].required
)
def test_depreciation_rate_not_required(self):
self.assertTrue(self.login_as_user())
licence = LicenceFactory()
url = reverse(
'admin:licences_licence_change',
args=(licence.pk,)
)
resp = self.client.get(url, follow=True)
self.assertEqual(resp.status_code, 200)
form = resp.context['adminform'].form
self.assertIn('depreciation_rate', form.fields)
self.assertFalse(
form.fields['depreciation_rate'].required
)
| 2.03125 | 2 |
leetcode-CP/Data-structures-Problems/566. Reshape the Matrix.py | vijay2020pc/100-days-of-code | 0 | 12798146 | In MATLAB, there is a handy function called reshape which can reshape an m x n matrix into a new one with a different size r x c keeping its original data.
You are given an m x n matrix mat and two integers r and c representing the number of rows and the number of columns of the wanted reshaped matrix.
The reshaped matrix should be filled with all the elements of the original matrix in the same row-traversing order as they were.
If the reshape operation with given parameters is possible and legal, output the new reshaped matrix; Otherwise, output the original matrix.
Example 1:
Input: mat = [[1,2],[3,4]], r = 1, c = 4
Output: [[1,2,3,4]]
Example 2:
Input: mat = [[1,2],[3,4]], r = 2, c = 4
Output: [[1,2],[3,4]]
Constraints:
m == mat.length
n == mat[i].length
1 <= m, n <= 100
-1000 <= mat[i][j] <= 1000
1 <= r, c <= 300
Solution-:
class Solution:
def matrixReshape(self, mat: List[List[int]], r: int, c: int) -> List[List[int]]:
N = len(mat)
M = len(mat[0])
if r*c != N*M:
return mat
q = []
for i in range(N):
for j in range(M):
q.append(mat[i][j])
new_mat = [[0]*c for _ in range(r)]
for i in range(r):
for j in range(c):
new_mat[i][j] = q.pop(0)
return new_mat
| 4.0625 | 4 |
src/convert/vukuzazi.py | IHCC-cohorts/data-harmonization-test | 0 | 12798147 | <filename>src/convert/vukuzazi.py
import csv
import pandas as pd
from argparse import ArgumentParser, FileType
def main():
p = ArgumentParser()
p.add_argument('input', type=str)
p.add_argument('output', type=FileType('w'))
args = p.parse_args()
input_xlsx = args.input
output_file = args.output
xlsx = pd.ExcelFile(input_xlsx)
catid_strings = {}
var_categories = pd.read_excel(xlsx, 'Variable Categories')
cat_ids = var_categories['CategoryId'].unique().tolist()
cat_ids = [int(x) for x in cat_ids]
current_cat_id = 1
while current_cat_id <= max(cat_ids):
current_cats = var_categories.loc[var_categories['CategoryId'] == current_cat_id]
cat_strings = []
for idx, row in current_cats.iterrows():
cat_value = int(row['CategoryValue'])
label = str(row['Description'])
cat_strings.append('{0} = {1}'.format(cat_value, label))
catid_strings[current_cat_id] = '\n'.join(cat_strings)
current_cat_id += 1
entities = []
variables = pd.read_excel(xlsx, 'Variables')
for idx, row in variables.iterrows():
local_id = str(row['VariableName']).strip()
if '%' in local_id:
local_id = local_id.replace('%', 'Percent')
name = str(row['Description']).strip()
cat_id = int(row['CategoryId'])
cat_string = catid_strings[cat_id]
entities.append({'Short ID': 'VZ:' + local_id, 'Label': name, 'Value': cat_string})
writer = csv.DictWriter(output_file, fieldnames=['Short ID', 'Label', 'Value'], delimiter='\t', lineterminator='\n')
writer.writeheader()
writer.writerow({'Short ID': 'ID', 'Label': 'LABEL', 'Value': 'A value'})
# TODO: Add validation
writer.writerow({})
for entity in entities:
writer.writerow(entity)
output_file.close()
if __name__ == '__main__':
main()
| 3.078125 | 3 |
radarly/influencer.py | gvannest/radarly-py | 6 | 12798148 | """
An influencer in Radarly is an author of several publications who has a
more-or-less large audience. The influencer module of ``radarly-py`` defines
several methods and functions in order to help you to understand who are the
influencers in your project and consequently understand your audience.
"""
from .api import RadarlyApi
from .model import GeneratorModel, SourceModel
from .utils._internal import parse_struct_stat
class Influencer(SourceModel):
"""Dict-like object storing information about an influencer. The value of
this object are available as value associated to a key, or as attribute of
the instance. Here are some useful attributes:
.. warning:: The structure of the ``Influencer`` object may change
depending on the platform
Args:
id (str): identifier for the influencer
platform (str): origin platform of the influencer
screen_name (str): display name on the social_accounts
permalink (str): link to the social_account
followers_count (int): numbers of followers of the influencer
count (int): number of documents published by the follower in your
project.
stats (dict): statitics about the influencers publications
"""
def __init__(self, data, project_id):
self.project_id = project_id
super().__init__()
translator = dict(
stats=parse_struct_stat
)
if 'user' in data:
super().add_data(data['user'], translator)
del data['user']
super().add_data(data, translator)
def __repr__(self):
influ_id, platform = getattr(self, 'id'), getattr(self, 'platform')
return '<Influencer.id={}.platform={}>'.format(influ_id, platform)
@classmethod
def find(cls, project_id, influencer_id, platform, api=None):
"""Retrieve information about an influencer.
Args:
project_id (int): id of the project
influencer_id (int): id of the influencer
platform (str): platform of the influencer
api (RadarlyApi, optional): API used to make the
request. If None, the default API will be used.
Returns:
Influencer:
"""
api = api or RadarlyApi.get_default_api()
url = api.router.influencer['find'].format(project_id=project_id)
params = dict(
uid=influencer_id,
platform=platform
)
res_data = api.get(url, params=params)
return Influencer(res_data, project_id)
@classmethod
def fetch(cls, project_id, parameter, api=None):
"""Retrieve influencers list from a project.
Args:
project_id (int): id of the project
parameter (InfluencerParameter): parameter sent as payload
to the API. See ``InfluencerParameter`` to see how to build
this object.
api (RadarlyApi): API used to performed the request. If None, the
default API will be used.
Returns:
list[Influencer]:
"""
api = api or RadarlyApi.get_default_api()
url = api.router.influencer['search'].format(project_id=project_id)
data = api.post(url, data=parameter)
return [cls(item, project_id) for item in data['users']]
@classmethod
def fetch_all(cls, project_id, parameter, api=None):
"""retrieve all influencers from a project.
Args:
project_id (int): identifier of a project
parameter (InfluencerParameter): parameter sent as payload
to the API. See ``InfluencerParameter`` to see how to build
this object. This object must contain pagination's parameters.
api (RadarlyApi): API used to performed the request. If None, the
default API will be used.
Returns:
InfluencerGenerator:
"""
return InfluencersGenerator(parameter,
project_id=project_id, api=api)
def get_metrics(self, api=None):
"""Retrieve metrics data about the influencer from the API.
Returns:
dict:
"""
api = api or RadarlyApi.get_default_api()
params = dict(
platform=self['platform'],
uid=self['id']
)
url = api.router.influencer['find'].format(project_id=self.project_id)
metrics = api.get(url, params=params)['metrics']
return metrics
class InfluencersGenerator(GeneratorModel):
"""Generator which yields all influencers matching some payload.
Args:
search_param (InfluencerParameter):
project_id (int):
api (RadarlyApi):
Yields:
Influencer:
"""
def _fetch_items(self):
"""Get next range of influencers"""
url = self._api.router.influencer['search'].format(project_id=self.project_id)
res_data = self._api.post(url, data=self.search_param)
self.total = 1000
self._items = (
Influencer(item, self.project_id) for item in res_data['users']
)
div = self.total // self.search_param['limit']
reste = self.total % self.search_param['limit']
self.total_page = div
if reste != 0: self.total_page += 1
self.search_param = self.search_param.next_page()
def __repr__(self):
return '<InfluencersGenerator.total={}.total_page={}>'.format(
self.total, self.total_page
)
| 2.921875 | 3 |
templates/node/node.py | threefoldtech/0-templates | 1 | 12798149 | <reponame>threefoldtech/0-templates
from jumpscale import j
from zerorobot import config
from zerorobot.template.base import TemplateBase
from zerorobot.template.decorator import retry, timeout
from zerorobot.template.state import StateCheckError
import netaddr
import time
CONTAINER_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/container/0.0.1'
VM_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/vm/0.0.1'
BOOTSTRAP_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/zeroos_bootstrap/0.0.1'
ZDB_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/zerodb/0.0.1'
CAPACITY_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/node_capacity/0.0.1'
NETWORK_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/network/0.0.1'
PORT_MANAGER_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/node_port_manager/0.0.1'
BRIDGE_TEMPLATE_UID = 'github.com/threefoldtech/0-templates/bridge/0.0.1'
NODE_CLIENT = 'local'
GiB = 1024 ** 3
class NoNamespaceAvailability(Exception):
pass
class Node(TemplateBase):
version = '0.0.1'
template_name = 'node'
def __init__(self, name, guid=None, data=None):
super().__init__(name=name, guid=guid, data=data)
self._node_sal = j.clients.zos.get(NODE_CLIENT)
self.recurring_action('_monitor', 30) # every 30 seconds
self.recurring_action('_network_monitor', 120) # every 2 minutes
self.gl_mgr.add("_register", self._register)
self.gl_mgr.add("_port_manager", self._port_manager)
def validate(self):
nodes = self.api.services.find(template_name='node')
if nodes and nodes[0].guid != self.guid:
raise RuntimeError('Another node service exists. Only one node service per node is allowed')
self.state.delete('disks', 'mounted')
network = self.data.get('network')
if network:
_validate_network(network)
def _monitor(self):
self.logger.info('Monitoring node %s' % self.name)
self.state.check('actions', 'install', 'ok')
# check for reboot
if self._node_sal.uptime() < self.data['uptime']:
self.install()
self.data['uptime'] = self._node_sal.uptime()
try:
self._node_sal.zerodbs.prepare()
self.state.set('disks', 'mounted', 'ok')
except:
self.state.delete('disks', 'mounted')
def _network_monitor(self):
self.state.check('actions', 'install', 'ok')
self.logger.info("network monitor")
def nic_mgmt_monitor():
self.logger.info("verify connectivity of management interface")
mgmt_addr = self._node_sal.management_address
mgmt_nic = None
for nic in self._node_sal.client.info.nic():
for addr in nic.get('addrs'):
addr = addr.get('addr')
if not addr:
continue
nw = netaddr.IPNetwork(addr)
if str(nw.ip) == mgmt_addr:
mgmt_nic = nic
break
self.logger.info(mgmt_nic)
if not mgmt_nic or 'up' not in mgmt_nic.get('flags', []) or mgmt_nic.get('speed') <= 0:
self.logger.error("management interface is not healthy")
hostname = self._node_sal.client.info.os()['hostname']
node_id = self._node_sal.name
data = {
'attributes': {},
'resource': hostname,
'text': 'network interface %s is down' % mgmt_nic['name'],
'environment': 'Production',
'severity': 'critical',
'event': 'Network',
'tags': ["node:%s" % hostname, "node_id:%s" % node_id, "interface:%s" % mgmt_nic['name']],
'service': [self.template_uid.name]
}
send_alert(self.api.services.find(template_uid='github.com/threefoldtech/0-templates/alerta/0.0.1'), data)
if 'nic_mgmt_monitor' not in self.gl_mgr.gls:
self.gl_mgr.add('nic_mgmt_monitor', nic_mgmt_monitor)
# make sure the bridges are installed
for service in self.api.services.find(template_uid=BRIDGE_TEMPLATE_UID):
try:
service.state.check('actions', 'install', 'ok')
self.logger.info("configuring bridge %s" % service.name)
service.schedule_action('install')
except StateCheckError:
pass
# make sure the networks are configured
for service in self.api.services.find(template_uid=NETWORK_TEMPLATE_UID):
try:
service.state.check('actions', 'install', 'ok')
self.logger.info("configuring network %s" % service.name)
service.schedule_action('configure')
except StateCheckError:
pass
def _register(self):
"""
make sure the node_capacity service is installed
"""
if config.SERVICE_LOADED:
config.SERVICE_LOADED.wait()
while True:
try:
self.state.check('actions', 'install', 'ok')
self.api.services.find_or_create(template_uid=CAPACITY_TEMPLATE_UID,
service_name='_node_capacity',
data={},
public=True)
return
except StateCheckError:
time.sleep(5)
def _port_manager(self):
"""
make sure the node_port_manager service is installed
"""
if config.SERVICE_LOADED:
config.SERVICE_LOADED.wait()
while True:
try:
self.state.check('actions', 'install', 'ok')
self.api.services.find_or_create(template_uid=PORT_MANAGER_TEMPLATE_UID,
service_name='_port_manager',
data={})
return
except StateCheckError:
time.sleep(5)
@retry(Exception, tries=2, delay=2)
def install(self):
self.logger.info('Installing node %s' % self.name)
self.data['version'] = '{branch}:{revision}'.format(**self._node_sal.client.info.version())
# Set host name
self._node_sal.client.system('hostname %s' % self.data['hostname']).get()
self._node_sal.client.bash('echo %s > /etc/hostname' % self.data['hostname']).get()
self.data['uptime'] = self._node_sal.uptime()
self.state.set('actions', 'install', 'ok')
def reboot(self):
self.logger.info('Rebooting node %s' % self.name)
self._node_sal.reboot()
def zdb_path(self, disktype, size, name):
"""Create zdb mounpoint and subvolume
:param disktype: type of the disk the zerodb will be deployed on
:type disktype: string
:param size: size of the zerodb
:type size: int
:param name: zerodb name
:type name: string
:param zdbinfo: list of zerodb services and their info
:param zdbinfo: [(service, dict)], optional
:return: zerodb mountpoint, subvolume name
:rtype: (string, string)
"""
node_sal = self._node_sal
disks_types_map = {
'hdd': ['HDD', 'ARCHIVE'],
'ssd': ['SSD', 'NVME'],
}
# get all usable filesystem path for this type of disk and amount of storage
reserved = node_sal.find_persistance()
def usable_storagepool(sp):
if sp.name == reserved.name:
return False
if sp.type.value not in disks_types_map[disktype]:
return False
free = (sp.size - sp.total_quota()) / GiB
if free <= size:
return False
return True
# all storage pool path of type disktypes and with more then size storage available
storagepools = list(filter(usable_storagepool, node_sal.storagepools.list()))
if not storagepools:
raise ZDBPathNotFound(
"Could not find any usable storage pool. Not enough space for disk type %s" % disktype)
storagepools.sort(key=lambda sp: sp.size - sp.total_quota(), reverse=True)
if disktype == 'hdd':
# sort less used pool first
fs_paths = []
for sp in storagepools:
try:
fs = sp.get('zdb')
fs_paths.append(fs.path)
except ValueError:
pass # no zdb filesystem on this storagepool
# all path used by installed zerodb services
zdb_infos = self._list_zdbs_info()
zdb_infos = filter(lambda info: info['service_name'] != name, zdb_infos)
# sort result by free size, first item of the list is the the one with bigger free size
results = sorted(zdb_infos, key=lambda r: r['free'], reverse=True)
zdb_paths = [res['path'] for res in results]
# path that are not used by zerodb services but have a storagepool, so we can use them
free_path = list(set(fs_paths) - set(zdb_paths))
if len(free_path) <= 0:
raise ZDBPathNotFound("all storagepools are already used by a zerodb")
return free_path[0]
if disktype == 'ssd':
fs = storagepools[0].create('zdb_{}'.format(name), size * GiB)
return fs.path
raise RuntimeError("unsupported disktype:%s" % disktype)
def create_zdb_namespace(self, disktype, mode, password, public, ns_size, name='', zdb_size=None):
if disktype not in ['hdd', 'ssd']:
raise ValueError('Disktype should be hdd or ssd')
if mode not in ['seq', 'user', 'direct']:
raise ValueError('ZDB mode should be user, direct or seq')
if disktype == 'hdd':
disktypes = ['HDD', 'ARCHIVE']
elif disktype == 'ssd':
disktypes = ['SSD', 'NVME']
else:
raise ValueError("disk type %s not supported" % disktype)
namespace_name = j.data.idgenerator.generateGUID() if not name else name
zdb_name = j.data.idgenerator.generateGUID()
zdb_size = zdb_size if zdb_size else ns_size
namespace = {
'name': namespace_name,
'size': ns_size,
'password': password,
'public': public,
}
try:
mountpoint = self.zdb_path(disktype, zdb_size, zdb_name)
self._create_zdb(zdb_name, mountpoint, mode, zdb_size, disktype, [namespace])
return zdb_name, namespace_name
except ZDBPathNotFound as err:
self.logger.warning("fail to create a 0-db namespace: %s", str(err))
# at this point we could find a place where to create a new zerodb
# let's look at the already existing one
pass
def usable_zdb(info):
if info['mode'] != mode:
return False
if info['free'] / GiB < zdb_size:
return False
if info['type'] not in disktypes:
return False
if not info['running']:
return False
return True
zdbinfos = list(filter(usable_zdb, self._list_zdbs_info()))
if len(zdbinfos) <= 0:
message = 'Not enough free space for namespace creation with size {} and type {}'.format(
ns_size, ','.join(disktypes))
raise NoNamespaceAvailability(message)
# sort result by free size, first item of the list is the the one with bigger free size
for zdbinfo in sorted(zdbinfos, key=lambda r: r['free'], reverse=True):
zdb = self.api.services.get(template_uid=ZDB_TEMPLATE_UID, name=zdbinfo['service_name'])
namespaces = [ns['name'] for ns in zdb.schedule_action('namespace_list').wait(die=True).result]
if namespace_name not in namespaces:
zdb.schedule_action('namespace_create', namespace).wait(die=True)
return zdb.name, namespace_name
message = 'Namespace {} already exists on all zerodbs'.format(namespace_name)
raise NoNamespaceAvailability(message)
@timeout(30, error_message='info action timeout')
def info(self):
return self._node_sal.client.info.os()
@timeout(30, error_message='stats action timeout')
def stats(self):
return self._node_sal.client.aggregator.query()
@timeout(30, error_message='processes action timeout')
def processes(self):
return self._node_sal.client.process.list()
@timeout(30, error_message='os_version action timeout')
def os_version(self):
return self._node_sal.client.ping()[13:].strip()
def _create_zdb(self, name, mountpoint, mode, zdb_size, disktype, namespaces):
"""Create a zerodb service
:param name: zdb name
:type name: string
:param mountpoint: zerodb mountpoint
:type mountpoint: string
:param mode: zerodb mode
:type mode: string
:param zdb_size: size of the zerodb
:type zdb_size: int
:param disktype: type of the disk to deploy the zerodb on
:type disktype: string
:param namespaces: list of namespaces to create on the zerodb
:type namespaces: [dict]
:return: zerodb service name
:rtype: string
"""
zdb_data = {
'path': mountpoint,
'mode': mode,
'sync': False,
'diskType': disktype,
'size': zdb_size,
'namespaces': namespaces
}
zdb = self.api.services.find_or_create(ZDB_TEMPLATE_UID, name, zdb_data)
zdb.schedule_action('install').wait(die=True)
zdb.schedule_action('start').wait(die=True)
def _list_zdbs_info(self):
"""
list the paths used by all the zerodbs installed on the node
:param excepted: list of zerodb service name that should be skipped
:type excepted: [str]
:return: a list of zerodb path sorted by free size descending
:rtype: [str]
"""
zdbs = self.api.services.find(template_uid=ZDB_TEMPLATE_UID)
tasks = [zdb.schedule_action('info') for zdb in zdbs]
results = []
for t in tasks:
result = t.wait(timeout=120, die=True).result
result['service_name'] = t.service.name
results.append(result)
return results
def _validate_network(network):
cidr = network.get('cidr')
if cidr:
netaddr.IPNetwork(cidr)
vlan = network.get('vlan')
if not isinstance(vlan, int):
raise ValueError('Network should have vlan configured')
def send_alert(alertas, alert):
for alerta in alertas:
alerta.schedule_action('send_alert', args={'data': alert})
class ZDBPathNotFound(Exception):
pass
| 1.789063 | 2 |
authorization/migrations/0005_auto_20200207_2204.py | KariSpace/CRM_Sedicomm | 0 | 12798150 | <reponame>KariSpace/CRM_Sedicomm
# Generated by Django 2.2.4 on 2020-02-07 20:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('authorization', '0004_auto_20200207_2147'),
]
operations = [
migrations.AlterField(
model_name='people',
name='course',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='authorization.Course'),
),
]
| 1.382813 | 1 |
Subsets and Splits