max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
allink_core/core_apps/allink_legacy_redirect/config.py | allink/allink-core | 5 | 12791851 | # -*- coding: utf-8 -*-
from django.apps import AppConfig
class AllinkLegacyConfig(AppConfig):
name = 'allink_core.core_apps.allink_legacy_redirect'
verbose_name = "Legacy Redirect"
| 1.015625 | 1 |
test/test_mnist_gan.py | kevjn/simplegrad | 0 | 12791852 | # rough copy of https://github.com/geohot/tinygrad/blob/master/examples/mnist_gan.py
from simplegrad import Tensor, Device, Adam
import numpy as np
import itertools as it
from torchvision.utils import make_grid, save_image
import torch
from abc import abstractmethod
import os
def leakyrelu(x, neg_slope=0.2):
return x.relu().sub(x.fork().mul(Tensor(neg_slope).mul(Tensor(-1.0))).relu())
torch.functional.F.leaky_relu(torch.tensor(x.val), negative_slope=0.2)
def random_uniform(*shape):
return np.random.uniform(-1., 1., size=shape)/np.sqrt(np.prod(shape)).astype(np.float32)
class nn:
@abstractmethod
def forward(self, x):
raise NotImplementedError
@property
def params(self):
return tuple(v for k,v in self.__dict__.items() if isinstance(v, Tensor))
class LinearGen(nn):
def __init__(self):
self.l1 = Tensor(random_uniform(128,256))
self.l2 = Tensor(random_uniform(256, 512))
self.l3 = Tensor(random_uniform(512, 1024))
self.l4 = Tensor(random_uniform(1024, 784))
def forward(self, x):
for layer in [self.l1, self.l2, self.l3]:
leakyrelu(x.dot(layer))
return x.dot(self.l4).tanh()
class LinearDisc(nn):
def __init__(self):
self.l1 = Tensor(random_uniform(784, 1024))
self.l2 = Tensor(random_uniform(1024, 512))
self.l3 = Tensor(random_uniform(512, 256))
self.l4 = Tensor(random_uniform(256, 2))
def forward(self, x):
for layer in [self.l1, self.l2, self.l3]:
leakyrelu(x.dot(layer))
return x.dot(self.l4).logsoftmax()
import gzip
def fetch(url):
import requests, tempfile, os
fp = os.path.join(tempfile.gettempdir(), url.encode()[-10:].hex())
if os.path.isfile(fp) and os.stat(fp).st_size:
with open(fp, 'rb') as f:
return f.read()
dat = requests.get(url).content
with open(fp + '.tmp', 'wb') as f:
f.write(dat)
os.rename(fp+'.tmp', fp)
return dat
def test_minst_gan():
generator = LinearGen()
discriminator = LinearDisc()
parse = lambda dat: np.frombuffer(gzip.decompress(dat), dtype=np.uint8).copy()
x_train = parse(fetch(url = "http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz"))[0x10:].reshape((-1, 28*28)).astype(np.float32)
# Hyperparameters
epochs = 10
batch_size = 512
n_batches = int(len(x_train) / batch_size)
output_folder = "outputs"
ds_noise = np.random.randn(64,128).astype(np.float32)
optim_g = Adam(generator.params, learning_rate=0.0002, beta1=0.5)
optim_d = Adam(discriminator.params, learning_rate=0.0002, beta1=0.5)
def batches_generator():
batch_nr = 0
while batch_nr < n_batches:
idx = np.random.randint(0, x_train.shape[0], size=(batch_size))
image_b = x_train[idx].reshape(-1, 28*28).astype(np.float32)/255.
image_b = (image_b - 0.5)/0.5
yield image_b
batch_nr += 1
def real_label(bs):
y = np.zeros((bs,2), np.float32)
y[range(bs), [1]*bs] = -2.0
real_labels = Tensor(y)
return real_labels
def fake_label(bs):
y = np.zeros((bs,2), np.float32)
y[range(bs), [0]*bs] = -2.0
fake_labels = Tensor(y)
return fake_labels
def train_discriminator(optim, data_real, data_fake):
real_labels = real_label(batch_size)
fake_labels = fake_label(batch_size)
optim.zero_grad()
output_real = discriminator.forward(data_real)
loss_real = real_labels.mul(output_real).mean(axis=(0,1))
output_fake = discriminator.forward(data_fake)
loss_fake = fake_labels.mul(output_fake).mean(axis=(0,1))
loss_real.backward()
loss_fake.backward()
optim.step()
return loss_fake.val + loss_real.val
def train_generator(optim, data_fake):
real_labels = real_label(batch_size)
optim.zero_grad()
output = discriminator.forward(data_fake)
loss = real_labels.mul(output).mean(axis=(0,1))
loss.backward()
optim.step()
return loss.val
for epoch in range(epochs):
batches = tuple(batches_generator())
for data_real in batches:
data_real = Tensor(data_real)
noise = Tensor(np.random.randn(batch_size, 128))
data_fake = generator.forward(noise)
data_fake = Tensor(data_fake.val)
loss_d = train_discriminator(optim_d, data_real, data_fake).item()
noise = Tensor(np.random.randn(batch_size, 128))
data_fake = generator.forward(noise)
loss_g = train_generator(optim_g, data_fake).item()
# generate images after each epoch
fake_images = generator.forward(Tensor(ds_noise)).val
fake_images = (fake_images.reshape(-1, 1, 28, 28)+ 1) / 2
fake_images = make_grid(torch.tensor(fake_images))
save_image(fake_images, os.path.join(output_folder, f'image_{epoch}.jpg')) | 2.453125 | 2 |
Scripts/UpdateCopyright.py | davidbrownell/Common_Environment | 1 | 12791853 | <filename>Scripts/UpdateCopyright.py
# ---------------------------------------------------------------------------
# |
# | UpdateCopyright.py
# |
# | <NAME> (<EMAIL>)
# |
# | 01/01/2016 06:12:15 PM
# |
# ---------------------------------------------------------------------------
# |
# | Copyright <NAME> 2016-18.
# |
# | Distributed under the Boost Software License, Version 1.0.
# | (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
# |
# ---------------------------------------------------------------------------
"""\
Iterates through a directory of files looking for common copyright signatures.
When one is encountered, it will be updated to include the current year.
"""
import io
import inflect
import os
import re
import sys
import time
import traceback
from CommonEnvironment import CommandLine
from CommonEnvironment import FileSystem
from CommonEnvironment.StreamDecorator import StreamDecorator
# ---------------------------------------------------------------------------
_script_fullpath = os.path.abspath(__file__) if "python" in sys.executable.lower() else sys.executable
_script_dir, _script_name = os.path.split(_script_fullpath)
# ---------------------------------------------------------------------------
COPYRIGHT_EXPRESSIONS = [ re.compile(r".*?Copyright \(c\) (?P<copyright>\S*).*"), # Matches 'Copyright (c) 2011-18 <NAME>. Permission to use, copy, '
re.compile(r".*?Copyright (?P<copyright>[^\.]+)\..*"), # Matches 'Copyright <NAME> 2011.'
]
# The following expressions must have a 'begin' capture; 'end' is optional.
YEAR_EXPRESSIONS = [ re.compile(r"(?P<begin>\d{4})-(?P<end>\d{2,4})"), # Matches multi-year range
re.compile(r"(?P<begin>\d{4})"), # Matches single year
]
MAX_FILE_SIZE = 100 * 1024 * 1024 # 100 Mb
# ---------------------------------------------------------------------------
plural = inflect.engine()
# ---------------------------------------------------------------------------
@CommandLine.EntryPoint
@CommandLine.FunctionConstraints( code_dir=CommandLine.DirectoryTypeInfo(),
year=CommandLine.IntTypeInfo(min=1, max=10000, arity='?'),
output_stream=None,
)
def EntryPoint( code_dir,
year=None,
output_stream=sys.stdout,
verbose=False,
):
year = year or str(time.localtime()[0])
two_digit_year = str(int(year) % 100)
updates = [ 0, ]
# ---------------------------------------------------------------------------
def GlobalDoneSuffix():
return "{} {} updated".format( plural.no("file", updates[0]),
plural.plural_verb("was", updates[0]),
)
# ---------------------------------------------------------------------------
output_stream.write("Processing files in '{}'...".format(code_dir))
with StreamDecorator(output_stream).DoneManager( display_exceptions=False,
done_suffix_functor=GlobalDoneSuffix,
) as dm:
for fullpath in FileSystem.WalkFiles( code_dir,
exclude_file_extensions=[ ".pyc", ".pyo", ".obj", ".pdb", ".idb", ],
traverse_exclude_dir_names=[ "Generated", lambda name: name[0] == '.', ],
):
try:
if os.path.getsize(fullpath) > MAX_FILE_SIZE:
if verbose:
dm.stream.write("INFO: '{}' is too large to process.\n".format(fullpath))
continue
copyright_updated = [ False, ]
# ---------------------------------------------------------------------------
def DoneSuffix():
if copyright_updated[0]:
return "***** Copyright was updated *****"
# ---------------------------------------------------------------------------
dm.stream.write("Processing '{}'...".format(fullpath))
with dm.stream.DoneManager( done_suffix_functor=DoneSuffix,
) as file_dm:
with io.open(fullpath, 'r') as f:
try:
lines = f.read().split('\n')
newline_char = (f.newlines[0] if isinstance(f.newlines, tuple) else f.newlines) or '\r\n'
except (UnicodeDecodeError, MemoryError):
if verbose:
file_dm.stream.write("INFO: '{}' appears to be a binary file name cannot be processed.\n".format(fullpath))
continue
for index, line in enumerate(lines):
for copyright_expr in COPYRIGHT_EXPRESSIONS:
copyright_match = copyright_expr.match(line)
if not copyright_match:
continue
copyright = copyright_match.group("copyright")
year_match = None
for year_expr in YEAR_EXPRESSIONS:
year_match = year_expr.search(copyright)
if year_match:
break
if not year_match:
file_dm.stream.write("WARNING: '{}' appears to have a copyright, but it isn't in an expected format ('{}') [0].\n".format(fullpath, line.strip()))
continue
begin = year_match.group("begin")
end = year_match.group("end") if "end" in year_match.groupdict() else begin
if len(end) == 2:
end = str(((int(year) // 100) * 100) + int(end))
if len(begin) != 4:
file_dm.stream.write("WARNING: '{}' appears to have a copyright, but it isn't in an expected format ('{}') [1].\n".format(fullpath, line.strip()))
continue
if len(end) != 4:
file_dm.stream.write("WARNING: '{}' appears to have a copyright, but it isn't in an expected format ('{}') [2].\n".format(fullpath, line.strip()))
continue
if end == year:
continue
copyright = "{}{}{}".format( copyright[:year_match.start()],
"{}-{}".format(begin, two_digit_year),
copyright[year_match.end():],
)
line = "{}{}{}".format( line[:copyright_match.start() + copyright_match.start("copyright")],
copyright,
line[copyright_match.end("copyright"):],
)
lines[index] = line
copyright_updated[0] = True
if copyright_updated[0]:
file_dm.stream.write("Updating...")
with file_dm.stream.DoneManager():
with io.open(fullpath, 'w', newline=newline_char) as f:
f.write('\n'.join(lines))
updates[0] += 1
except:
content = traceback.format_exc()
output_stream.write("ERROR: {}".format(StreamDecorator.LeftJustify(content, len("ERROR: "))))
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
if __name__ == "__main__":
try: sys.exit(CommandLine.Main())
except KeyboardInterrupt: pass
| 2.3125 | 2 |
spiral/utils.py | miyosuda/variational_walkback | 0 | 12791854 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pylab as plt
def save_figure(xs, file_path):
plt.figure()
plt.ylim([-2.0, 2.0])
plt.xlim([-2.0, 2.0])
plt.plot(xs[:,0], xs[:,1], "ro")
plt.savefig(file_path)
plt.close()
| 2.390625 | 2 |
api/models.py | dschien/greendoors-web | 0 | 12791855 | <gh_stars>0
import random
import string
import datetime
from django.contrib.contenttypes.generic import GenericRelation
from django.core.urlresolvers import reverse
from tinymce.models import HTMLField
__author__ = 'schien'
import re
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth.models import User
from django.db import models
from django.forms import fields
from django.forms import ValidationError
from django.utils.encoding import smart_unicode
from django_extensions.db.fields import UUIDField
class HexColorField(fields.Field):
default_error_messages = {
'hex_error': u'This is an invalid color code. It must be a html hex color code e.g. #000000'
}
def clean(self, value):
super(HexColorField, self).clean(value)
if value in fields.EMPTY_VALUES:
return u''
value = smart_unicode(value)
value_length = len(value)
if value_length != 7 or not re.match('^\#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$', value):
raise ValidationError(self.error_messages['hex_error'])
return value
def widget_attrs(self, widget):
if isinstance(widget, (fields.TextInput)):
return {'maxlength': str(7)}
class UserProfile(models.Model):
"""
Message between two users
"""
user = models.OneToOneField(User, verbose_name="django authentication user", related_name='user_profile')
newsletter = models.NullBooleanField(null=False, blank=False)
research = models.NullBooleanField(null=False, blank=False)
def __unicode__(self):
return "%s " % self.user.username
class TrackableURL(models.Model):
url = models.URLField(max_length=255, unique=True)
def __unicode__(self):
return self.url
class RelatedTrackableURL(TrackableURL):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
def __unicode__(self):
return self.url
class RedirectUrl(models.Model):
redirect_key = UUIDField(unique=True, auto=True)
target_url = models.ForeignKey(TrackableURL, related_name='redirect_urls')
user = models.ForeignKey(User, verbose_name="django authentication user", related_name='links')
def __unicode__(self):
return "%s %s %s" % (self.user.username, self.redirect_key, self.target_url.url)
class Meta:
unique_together = ('target_url', 'user')
class Click(models.Model):
redirect = models.ForeignKey(RedirectUrl, verbose_name="redirection url", related_name='clicks')
time = models.DateTimeField(auto_now_add=True)
user_agent = models.CharField(max_length=100, blank=True, null=True)
def __unicode__(self):
return "%s - %s - %s" % (self.redirect.user.username, self.redirect.target_url.url, self.time)
class Scan(models.Model):
"""
Barcode scans
"""
created = models.DateTimeField(auto_now_add=True)
text = models.CharField(max_length=8)
user = models.ForeignKey(User, verbose_name="django authentication user", related_name='scans')
# timestamp = models.BigIntegerField()
class Meta:
ordering = ('created',)
# unique_together = ('user', 'text', 'timestamp')
def __unicode__(self):
return u'%s %s' % (self.text, self.user.username)
@property
def house(self):
return House.objects.get(pk=int(self.text[0:4]))
@property
def measure(self):
m = int(self.text[5:8])
if m > 0:
measure = Measure.objects.get(pk=m)
imeasure = InstalledMeasure.objects.filter(house=self.house, measure=measure)[0]
return imeasure
return None
class Device(models.Model):
"""
UUID from devices
"""
created = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, verbose_name="django authentication user", related_name='phones')
uuid = models.CharField(null=True, blank=True, max_length=40)
cordova = models.CharField(null=True, blank=True, max_length=400)
platform = models.CharField(null=True, blank=True, max_length=400)
version = models.CharField(null=True, blank=True, max_length=400)
model = models.CharField(null=True, blank=True, max_length=400)
def __unicode__(self):
return u'%s %s %s' % (self.user.username, self.platform, self.version)
class HomeOwnerProfile(models.Model):
"""
"""
user = models.OneToOneField(User, verbose_name="django authentication user", related_name='home_owner_profile')
class MeasureCategory(models.Model):
"""
A measure category
"""
name = models.TextField()
is_renewable = models.BooleanField(default=False)
def __unicode__(self):
return u'%s' % (self.name, )
class Measure(models.Model):
"""
A measure
"""
name = models.CharField(max_length=200)
description = models.TextField(null=True)
short = models.CharField(max_length=80, null=True)
color = models.CharField(max_length=7, null=True, blank=True)
category = models.ForeignKey(MeasureCategory, related_name='measures')
report_template = models.CharField(max_length=200, default='report/general_measure_text.html')
def __unicode__(self):
return u'%s' % (self.name,)
class House(models.Model):
"""
Houses
"""
owner = models.ForeignKey(HomeOwnerProfile, verbose_name="home owner profile", related_name='house', null=True)
address = models.CharField(max_length=1024)
latitude = models.FloatField(null=True)
longitude = models.FloatField(null=True)
adults = models.IntegerField(null=True)
children = models.IntegerField(null=True)
bedrooms = models.IntegerField(null=True)
comments = models.CharField(max_length=1024, null=True, blank=True)
OPEN_SATURDAY_AND_SUNDAY = 3
OPEN_SUNDAY = 2
OPEN_SATURDAY = 1
OPEN_CLOSED = 0
OPEN_CHOICES = (
(OPEN_CLOSED, 'Closed'),
(OPEN_SATURDAY, 'Saturday'),
(OPEN_SUNDAY, 'Sunday'),
(OPEN_SATURDAY_AND_SUNDAY, 'Saturday and Sunday'),
)
open = models.IntegerField(max_length=1, choices=OPEN_CHOICES, null=True)
ACCESSIBILITY_FULL = 1
ACCESSIBILITY_PARTIAL = 2
ACCESSIBILITY_NONE = 0
ACCESSIBILITY_CHOICES = (
(ACCESSIBILITY_FULL, 'Full'),
(ACCESSIBILITY_PARTIAL, 'Partial'),
(ACCESSIBILITY_NONE, 'None')
)
accessibility = models.IntegerField(max_length=1, choices=ACCESSIBILITY_CHOICES, null=True)
AGE_VICTORIAN = 1
AGE_30s = 3
AGE_50s = 5
AGE_70s = 7
AGE_NEW = 8
AGE_GEORGIAN = 0
AGE_20s = 2
AGE_60s = 6
AGE_CHOICES = ((AGE_VICTORIAN, "Victorian"),
(AGE_30s, "1930s"),
(AGE_50s, "1950s"),
(AGE_70s, "1970s"),
(AGE_NEW, "New"),
(AGE_GEORGIAN, "Georgian"),
(AGE_20s, "1920s"),
(AGE_60s, "1960s"))
age = models.IntegerField(max_length=1, choices=AGE_CHOICES, null=True)
TYPE_MULTI_OCCUPANT = 5
TYPE_DETACHED = 1
TYPE_BUNGALOW = 4
TYPE_TERRACE = 3
TYPE_SEMI = 2
TYPE_CHOICES = ((TYPE_MULTI_OCCUPANT, "Multi Occupant"),
(TYPE_DETACHED, "Detached"),
(TYPE_BUNGALOW, "Bungalow"),
(TYPE_TERRACE, "Terrace"),
(TYPE_SEMI, "Semi")
)
type = models.IntegerField(max_length=1, choices=TYPE_CHOICES, null=True)
CONTACT_NONE = 0
CONTACT_YEAR = 2
CONTACT_MONTH = 1
CONTACT_CHOICES = (
(CONTACT_NONE, "None"),
(CONTACT_YEAR, "Year"),
(CONTACT_MONTH, "Month")
)
contact = models.IntegerField(max_length=1, choices=CONTACT_CHOICES, default=CONTACT_NONE, null=True)
MAPPING_MONTH = 1
MAPPING_YEAR = 2
MAPPING_CHOICES = ((MAPPING_MONTH, "Month"), (MAPPING_YEAR, "Year"))
mapping = models.IntegerField(max_length=1, choices=MAPPING_CHOICES, null=True)
image = models.TextField()
report_text = models.TextField(null=True, blank=True)
# urls = GenericRelation(RelatedTrackableURL, null=True, blank=True)
def __unicode__(self):
return u'%s %s' % (self.pk, self.address)
class Note(models.Model):
"""
Notes for houses
"""
created = models.DateTimeField(auto_now_add=True)
text = models.TextField()
user = models.ForeignKey(User, verbose_name="django authentication user", related_name='notes')
house = models.ForeignKey(House, related_name='note')
timestamp = models.BigIntegerField(null=True, blank=True)
class Meta:
ordering = ('created',)
unique_together = ('user', 'house')
def __unicode__(self):
return u'%s %s' % (self.house.id, self.text)
def get_absolute_url(self):
return reverse('web:note', kwargs={'pk': self.pk})
class InstalledMeasure(models.Model):
measure = models.ForeignKey(Measure)
cost = models.IntegerField(null=True, blank=True)
disruption = models.IntegerField(null=True, blank=True)
house = models.ForeignKey(House, null=True, blank=True, related_name='measures')
report_text = models.TextField(null=True, blank=True)
supplier = models.CharField(max_length=1024, null=True, blank=True)
supplier_urls = GenericRelation(RelatedTrackableURL, null=True, blank=True, related_name='supplier_urls')
product = models.CharField(max_length=1024, null=True, blank=True)
product_urls = GenericRelation(RelatedTrackableURL, null=True, blank=True, related_name='product_urls')
def __unicode__(self):
return u'%s' % (self.measure.short,)
class MessageThread(models.Model):
pass
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
DEFAULT_THREAD_ID = 1
class Message(models.Model):
"""
Message between two users
"""
created = models.DateTimeField(auto_now_add=True)
text = HTMLField()
sender = models.ForeignKey(User, verbose_name="sending django authentication user", related_name='sent_messages')
receiver = models.ForeignKey(User, verbose_name="receiving django authentication user",
related_name='received_messages')
sent = models.BooleanField(default=False)
thread = models.ForeignKey(MessageThread, default=DEFAULT_THREAD_ID, related_name='messages')
key = UUIDField(auto=True)
class Meta:
ordering = ('created',)
unique_together = ('sender', 'created')
def __unicode__(self):
return u'%s %s' % (self.text, self.sent)
class Favourite(models.Model):
created = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, verbose_name="django authentication user", related_name='favourites')
house = models.ForeignKey(House, null=True)
timestamp = models.BigIntegerField(null=True, blank=True)
class Meta:
unique_together = ('user', 'house')
def __unicode__(self):
return u'%s' % (self.house.address,)
class App(models.Model):
model_version = models.CharField(max_length=8, unique=True)
openday = models.DateField(default=datetime.date(day=26, month=9, year=2013))
class MessageKey(models.Model):
"""
Provides a url key to compose a message as response
"""
message_key = models.BigIntegerField(unique=True)
previous_message = models.ForeignKey(Message)
class LoggerMessage(models.Model):
message = models.TextField()
created = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, verbose_name="django user", related_name='log_messages', null=True, blank=True)
def __unicode__(self):
return u'%s %s %s' % (self.created, self.user, self.message[:80])
| 2.078125 | 2 |
src/api.py | kolbl/HAR_master_thesis | 1 | 12791856 | import flask
from tensorflow import keras
import pandas as pd
from flask import request, jsonify
from pandas.io.json import json_normalize
app = flask.Flask(__name__)
app.config["DEBUG"] = True
@app.route('/api/prediction', methods=['POST'])
def precict():
# Validate the request body contains JSON
if request.is_json:
# Parse the JSON into a Python dictionary
req = request.get_json()
sample_df = json_normalize(req)
timesteps = 40
#sample_df = sample_df.drop(["TIMESTAMP"], axis=1)
sample_df = sample_df.astype(float)
x_test, y_test = sample_df.iloc[:, :-1], sample_df.iloc[:, -1]
n_features = 83
x_test_reshaped = x_test.values.reshape(x_test.shape[0], timesteps + 1, n_features)
optimizer = keras.optimizers.Nadam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, schedule_decay=0.004)
model = keras.models.load_model('models/CNN-1.h5', compile=False) # todo: get right model
model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['acc'])
y_pred = model.predict(x_test_reshaped)
y_class = y_pred.argmax(axis=-1)
y_class = y_class + 1
y_pred_pd = pd.DataFrame(y_class, columns=["class"])
y_test_pd = pd.DataFrame(y_test.tolist(), columns=["class"])
# activity_map = {0: "no activity", 1: "Act01", 2: "Act02", 3: "Act03", 4: "Act04", 5: "Act05", 6: "Act06", 7: "Act07", 8: "Act08",
# 9: "Act09", 10: "Act10", 11: "Act11", 12: "Act12", 13: "Act13", 14: "Act14", 15: "Act15",
# 16: "Act16", 17: "Act17", 18: "Act18", 19: "Act19", 20: "Act20", 21: "Act21", 22: "Act22",
# 23: "Act23", 24: "Act24"}
activity_map = {0: "no activity", 1: "Take medication", 2: "Prepare breakfast", 3: "Prepare lunch", 4: "Prepare dinner",
5: "Breakfast", 6: "Lunch", 7: "Dinner", 8: "Eat a snack", 9: "Watch TV", 10: "Enter the SmartLab",
11: "Play a videogame", 12: "Relax on the sofa", 13: "Leave the SmartLab", 14: "Visit in the SmartLab",
15: "Put waste in the bin", 16: "Wash hands", 17: "Brush teeth", 18: "Use the toilet", 19: "Wash dishes",
20: "Put washin into the washing machine", 21: "Work at the table", 22: "Dressing", 23: "Go to the bed",
24: "Wake up"}
predicted_class = y_pred_pd["class"].map(activity_map)
y_test_pd = y_test_pd.astype(float)
actual_class = y_test_pd["class"].map(activity_map)
prediction_result = "The new data point is predicted to be the activity {} ({}). The ground truth activity is {} ({}). ".format(predicted_class[0], y_class[0], actual_class[0], int(y_test[0]))
if(y_class[0] == int(y_test[0])):
prediction_result += "The system predicted correctly! "
else:
prediction_result += "The system predicted wrong! "
print(prediction_result)
# Return a string along with an HTTP status code
return prediction_result, 200
else:
# The request body wasn't JSON so return a 400 HTTP status code
return "Request was not JSON", 400
app.run()
| 2.8125 | 3 |
problems/find-all-anagrams-in-a-string/solution.py | tonymontaro/leetcode-hints | 1 | 12791857 | from collections import Counter
class Solution:
def findAnagrams(self, word: str, substr: str):
"""O(n) time | O(1) space"""
if not word or not substr: return []
l = 0
r = -1
seen = 0
ln = len(substr)
counts = Counter(substr)
counts = {char: -counts[char] for char in substr}
result = []
while r < len(word)-1:
r += 1
char = word[r]
if char in counts:
counts[char] += 1
if counts[char] <= 0:
seen += 1
if seen == ln:
result.append(l)
if r-l+1 == ln:
char = word[l]
l += 1
if char in counts:
counts[char] -= 1
if counts[char] < 0:
seen -= 1
return result
| 3.578125 | 4 |
nautilus/utils/env.py | LeptoSpira/nautilus-chambers | 1 | 12791858 | <reponame>LeptoSpira/nautilus-chambers
"""Set up local enviroment."""
import yaml
try:
with open('config.yml') as file:
env = yaml.load(file, Loader=yaml.FullLoader)
except FileNotFoundError:
env = {}
| 1.78125 | 2 |
main.py | Pzqqt/Whyred_Rom_Update_Checker | 11 | 12791859 | <reponame>Pzqqt/Whyred_Rom_Update_Checker<gh_stars>10-100
#!/usr/bin/env python3
# encoding: utf-8
from argparse import ArgumentParser
import json
import time
import traceback
import sys
import threading
from concurrent.futures import ThreadPoolExecutor
from requests import exceptions
from config import (
ENABLE_SENDMESSAGE, LOOP_CHECK_INTERVAL, ENABLE_MULTI_THREAD, MAX_THREADS_NUM, LESS_LOG
)
from check_init import PAGE_CACHE
from check_list import CHECK_LIST
from database import create_dbsession, Saved
from logger import write_log_info, write_log_warning, print_and_log
# 为True时将强制将数据保存至数据库并发送消息
FORCE_UPDATE = False
_THREADING_LOCK = threading.Lock()
def database_cleanup():
"""
将数据库中存在于数据库但不存在于CHECK_LIST的项目删除掉
:return: 被删除的项目名字的集合
"""
with create_dbsession() as session:
saved_ids = {x.ID for x in session.query(Saved).all()}
checklist_ids = {x.__name__ for x in CHECK_LIST}
drop_ids = saved_ids - checklist_ids
for id_ in drop_ids:
session.delete(session.query(Saved).filter(Saved.ID == id_).one())
session.commit()
return drop_ids
def _abort(text):
print_and_log(str(text), level="warning", custom_prefix="-")
sys.exit(1)
def _abort_by_user():
return _abort("Abort by user")
def _sleep(sleep_time):
try:
time.sleep(sleep_time)
except KeyboardInterrupt:
_abort_by_user()
def _get_time_str(time_num=None, offset=0):
if time_num is None:
time_num = time.time()
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time_num+offset))
def check_one(cls, disable_pagecache=False):
if isinstance(cls, str):
cls_str = cls
cls = {cls_.__name__: cls_ for cls_ in CHECK_LIST}.get(cls_str)
if not cls:
raise Exception("Can not found '%s' from CHECK_LIST!" % cls_str)
cls_obj = cls()
if disable_pagecache:
cls_obj.enable_pagecache = False
try:
cls_obj.do_check()
except exceptions.ReadTimeout:
print_and_log("%s check failed! Timeout." % cls_obj.fullname, level="warning")
except (exceptions.SSLError, exceptions.ProxyError):
print_and_log("%s check failed! Proxy error." % cls_obj.fullname, level="warning")
except exceptions.ConnectionError:
print_and_log("%s check failed! Connection error." % cls_obj.fullname, level="warning")
except exceptions.HTTPError as error:
print_and_log("%s check failed! %s." % (cls_obj.fullname, error), level="warning")
except:
traceback_string = traceback.format_exc()
print(traceback_string)
write_log_warning(*traceback_string.splitlines())
print_and_log("%s check failed!" % cls_obj.fullname, level="warning")
else:
if cls_obj.is_updated() or FORCE_UPDATE:
print_and_log(
"%s has update: %s" % (cls_obj.fullname, cls_obj.info_dic["LATEST_VERSION"]),
custom_prefix=">",
)
try:
cls_obj.after_check()
except:
traceback_string = traceback.format_exc()
print("\n%s\n! Something wrong when running after_check!" % traceback_string)
write_log_warning(*traceback_string.splitlines())
write_log_warning("%s: Something wrong when running after_check!" % cls_obj.fullname)
cls_obj.write_to_database()
if ENABLE_SENDMESSAGE:
cls_obj.send_message()
else:
print("- %s no update" % cls_obj.fullname)
if not LESS_LOG:
write_log_info("%s no update" % cls_obj.fullname)
return True
def single_thread_check(check_list):
# 单线程模式下连续检查失败5项则判定为网络异常, 并提前终止
req_failed_flag = 0
check_failed_list = []
is_network_error = False
for cls in check_list:
if not check_one(cls):
req_failed_flag += 1
check_failed_list.append(cls)
if req_failed_flag == 5:
is_network_error = True
break
else:
req_failed_flag = 0
_sleep(2)
return check_failed_list, is_network_error
def multi_thread_check(check_list):
# 多线程模式下累计检查失败10项则判定为网络异常, 并在之后往线程池提交的任务中不再进行检查操作而是直接返回
check_failed_list = []
is_network_error = False
def _check_one(cls_):
nonlocal check_failed_list, is_network_error
if is_network_error:
return
result = check_one(cls_)
time.sleep(2)
if not result:
with _THREADING_LOCK:
check_failed_list.append(cls_)
if len(check_failed_list) >= 10:
with _THREADING_LOCK:
is_network_error = True
with ThreadPoolExecutor(MAX_THREADS_NUM) as executor:
executor.map(_check_one, check_list)
return check_failed_list, is_network_error
def loop_check():
write_log_info("Run database cleanup before start")
drop_ids = database_cleanup()
write_log_info("Abandoned items: {%s}" % ", ".join(drop_ids))
loop_check_func = multi_thread_check if ENABLE_MULTI_THREAD else single_thread_check
check_list = [cls for cls in CHECK_LIST if not cls._skip]
while True:
start_time = _get_time_str()
print(" - " + start_time)
print(" - Start...")
write_log_info("=" * 64)
write_log_info("Start checking at %s" % start_time)
# loop_check_func必须返回两个值,
# 检查失败的项目的列表, 以及是否为网络错误或代理错误的Bool值
check_failed_list, is_network_error = loop_check_func(check_list)
if is_network_error:
print_and_log("Network or proxy error! Sleep...", level="warning")
else:
# 对于检查失败的项目, 强制单线程检查
print_and_log("Check again for failed items")
for cls in check_failed_list:
check_one(cls)
PAGE_CACHE.clear()
print(" - The next check will start at %s\n" % _get_time_str(offset=LOOP_CHECK_INTERVAL))
write_log_info("End of check")
_sleep(LOOP_CHECK_INTERVAL)
def get_saved_json():
# 以json格式返回已保存的数据
with create_dbsession() as session:
return json.dumps(
[
result.get_kv()
for result in sorted(session.query(Saved), key=lambda x: x.FULL_NAME)
if result.ID != "GoogleClangPrebuilt"
],
# ensure_ascii=False,
)
def show_saved_data():
# 以MySQL命令行风格打印已保存的数据
with create_dbsession() as session:
results = session.query(Saved).with_entities(Saved.ID, Saved.FULL_NAME, Saved.LATEST_VERSION)
kv_dic = {k: (v1, v2) for k, v1, v2 in results if k != "GoogleClangPrebuilt"}
try:
# 可以的话, 使用rich库
import rich
except ImportError:
id_maxlen = len(max(kv_dic.keys(), key=len))
fn_maxlen = max([len(x[0]) for x in kv_dic.values()])
lv_maxlen = max([len(x[1]) for x in kv_dic.values()])
print("+%s+%s+%s+" % ("-" * id_maxlen, "-" * fn_maxlen, "-" * lv_maxlen))
print("|%s|%s|%s|" % (
"ID".ljust(id_maxlen), "Full Name".ljust(fn_maxlen), "Latest Version".ljust(lv_maxlen)
))
print("+%s+%s+%s+" % ("-" * id_maxlen, "-" * fn_maxlen, "-" * lv_maxlen))
for id_ in sorted(kv_dic.keys()):
fn, lv = kv_dic[id_]
print("|%s|%s|%s|" % (
id_.ljust(id_maxlen), fn.ljust(fn_maxlen), lv.ljust(lv_maxlen)
))
print("+%s+%s+%s+" % ("-" * id_maxlen, "-" * fn_maxlen, "-" * lv_maxlen))
else:
del rich
from rich.console import Console
from rich.table import Table
console = Console()
table = Table(show_header=True, header_style="bold magenta")
table.add_column("ID", style="dim")
table.add_column("Full Name")
table.add_column("Latest Version")
for id_ in sorted(kv_dic.keys()):
table.add_row(id_, *kv_dic[id_])
console.print(table)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--force", help="Force save to database & send message to Telegram", action="store_true")
parser.add_argument("--dontpost", help="Do not send message to Telegram", action="store_true")
parser.add_argument("-a", "--auto", help="Automatically loop check all items", action="store_true")
parser.add_argument("-c", "--check", help="Check one item")
parser.add_argument("-s", "--show", help="Show saved data", action="store_true")
parser.add_argument("-j", "--json", help="Show saved data as json", action="store_true")
args = parser.parse_args()
if args.force:
FORCE_UPDATE = True
if args.dontpost:
ENABLE_SENDMESSAGE = False
if args.auto:
loop_check()
elif args.check:
check_one(args.check, disable_pagecache=True)
elif args.show:
show_saved_data()
elif args.json:
print(get_saved_json())
else:
parser.print_usage()
| 2.15625 | 2 |
Chat_bot.py | Ananda602/Chat-bot | 1 | 12791860 | import random
import speech_recognition as sr
import datetime
import calendar
import time
import webbrowser
import wikipedia
from gtts import gTTS
import playsound
import os
import win10toast
from bs4 import BeautifulSoup
import requests
import re
import nltk
from googletrans import Translator
import sports
from newspaper import Article
bot_name = "Rag2020"
bot_template = "{0}"
user_template = "{1}"
def send_message(message):
response = respond(message)
alex_speak(bot_template.format(response))
def respond(message):
if message in responses:
bot_message = random.choice(responses[message])
elif 'Search' in message:
search = record_audio("Specify the word")
url = "https://google.com/search?q=" +search
bot_message = webbrowser.get().open(url)
elif message == "Find Location":
location = record_audio("City name")
url = "https://google.ml/maps/place/" + location +'/&'
bot_message = webbrowser.get().open(url)
elif message == "Calculate":
m = record_audio("What you have to compute")
bot_message = calculate(m)
elif 'who is' in message:
person = person_name(message)
bot_message = wikipedia.summary(person, sentences=2)
elif message == "Set An Remainder":
bot_message = remainder()
elif message == "Set An Alarm":
bot_message = alarm()
elif message == "Play Me A Song":
bot_message = melody()
elif message == "Weather":
bot_message = weather_manager()
elif message == "Wikipedia":
bot_message = scrap()
elif message == "Translate":
bot_message = trans()
elif message == "Headlines":
bot_message = news_scrap()
elif message == "Live Score":
bot_message = sport_score()
elif message == "Exit":
breakpoint()
else:
bot_message = random.choice(responses["Default"])
return bot_message
def date_and_time():
now = datetime.datetime.now()
today = datetime.datetime.today()
weekday = calendar.day_name[today.weekday()]
month = now.month
day = now.day
month_list = ['January', 'February', 'March', 'April', 'May', 'June',
'July', 'August', 'September', 'October', 'November', 'December']
Numbers = ['1st', '2nd', '3rd', '4th', '5th', '6th', '7th', '8th', '9th', '10th', '11th', '12th', '13th',
'14th', '15th', '16th', '17th', '18th', '19th', '20th', '21st', '22nd', '23rd', '24th', '25th', '26th', '27th',
'28th', '29th', '30th', '31st']
return "Today is "+weekday + ' '+month_list[month-1]+' the ' + Numbers[day-1]
def month():
now = datetime.datetime.now()
month = now.month
month_list = ['January', 'February', 'March', 'April', 'May', 'June',
'July', 'August', 'September', 'October', 'November', 'December']
return month_list[month-1]
def current_time():
local_time = time.ctime()
return local_time
def calculate(message):
message = message.split()
i = 0
request_d = {}
for req in message:
request_d[i] = req
i = i + 1
for key,value in request_d.items():
if value == '+':
return int(request_d[key - 1]) + int(request_d[key + 1])
if value == '-':
return int(request_d[key - 1]) - int(request_d[key + 1])
if value == '*':
return int(request_d[key - 1]) * int(request_d[key + 1])
if value == '/':
return int(request_d[key - 1]) / int(request_d[key + 1])
def person_name(text):
name = text.split()
for i in range(0, len(name)):
if i + 3 <= len(name)-1 and name[i].lower == 'who' and name[i+1].lower == 'is':
return name[i+2]+ ' '+ name[i+3]
def remainder():
Remainder_message = record_audio("Enter the remainder message:")
time = str(input("Enter the timing in format HH:MM"))
date = str(input("Enter the remainder date in format DD/MM/YYYY"))
time = time.split(":")
date = date.split("/")
timings = str(input("Enter AM or PM"))
timings = timings.lower()
alarmHour = int(time[0])
alarmMinute = int(time[1])
rem_date = int(date[0])
rem_month = int(date[1])
rem_year = int(date[2])
if timings == "pm":
alarmHour = alarmHour + 12
while True:
if alarmHour == datetime.datetime.now().hour and alarmMinute == datetime.datetime.now().minute and rem_date == datetime.datetime.now().day and rem_month == datetime.datetime.now().month and rem_year == datetime.datetime.now().year:
toaster = win10toast.ToastNotifier()
notification_message = toaster.show_toast("Pycharm", Remainder_message, duration=10)
return notification_message
def news_scrap():
url = 'https://www.indiatoday.in/top-stories'
article = Article(url)
article.download()
article.parse()
nltk.download('punkt')
article.nlp()
return article.text
def sport_score():
import sports
matches = sports.all_matches()
match_invoked = record_audio("Enter the game you want to search")
if match_invoked == 'Cricket':
cricket = matches['cricket']
elif match_invoked == 'Football':
cricket = matches['football']
else:
cricket = "no matches found"
return cricket
def trans():
trans = Translator()
text = record_audio("Specify the sentence or word to be translated:")
source = record_audio("From Languages:")
source = source.lower()
source = source[0:2]
desti = record_audio("To Languages:")
desti = desti.lower()
desti = desti[0:2]
t = trans.translate(
text, src=source, dest=desti
)
return t.text
def scrap():
search = record_audio("Enter the word")
url = f"https://en.wikipedia.org/wiki/{search}"
r = requests.get(url)
soup = BeautifulSoup(r.text, "html.parser")
text = ""
for paragraph in soup.find_all('p'):
text += paragraph.text
text = re.sub(r'\[[0-9]*\]', ' ', text)
text = re.sub(r'\s+', ' ', text)
text = re.sub(r'\d', ' ', text)
text = re.sub(r'\s+', ' ', text)
sentences = nltk.sent_tokenize(text)
return (sentences[0],sentences[1])
def alarm():
time = record_audio("Enter the Time in the format HH:MM")
time = time.split(":")
alarmHour = int(time[0])
alarmMinute = int(time[1])
timings_module = str(input("Mention PM or AM"))
timings_module = timings_module.lower()
if timings_module == "pm":
alarmHour = alarmHour + 12
while True:
if alarmHour == datetime.datetime.now().hour and alarmMinute == datetime.datetime.now().minute:
from playsound import playsound
alarm = playsound('C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/the-purge-siren-ringtone.mp3')
return alarm
def melody():
from playsound import playsound
melody = playsound('C:/Users/Anandatirtha/PycharmProjects/Chat_ananda/nature-alarm-sounds.mp3')
return melody
def weather_manager():
place = record_audio("Enter the name of place")
search = f"Weather in {place}"
url = f"https://www.google.com/search?&q={search}"
r = requests.get(url)
soup = BeautifulSoup(r.text, "html.parser")
update = soup.find("div", class_="BNeawe").text
weather_report = "The current temperature in {0} is {1}".format(place, update)
return weather_report
responses = {
"Hey Alex": ["Your bot is activating..."," Bot is Launcing 3 2 1"],
"Good Morning": ["Good Morning have a great day", "great day ahead", "have a wonderful day", "Good Morning"],
"Hi": ["Hi", "Hello", "Hola", "Hi there", "what's special today"],
"Default": ["I can't get you", "sorry one more time", "Sorry! again"],
"Who Created You": ["I was developed by Anandatirtha", "By Anandatirtha", "I was developed by Anandatirtha as a demo bot"],
"What Is Your Name": ["My name is {0}".format(bot_name), "Call me {0}".format(bot_name)],
"Good Afternoon": ["Good Afternoon", "Good Afternoon after your great meal", "Good Afternoon don't forget to check notifications"],
"Good Night": ["Good Night", "Good Night !! Sweet dreams", "Good Night we will meet Next day"],
"What Is Today Date": [date_and_time()],
"What Is The Month": [month()],
"What Is The Time Now": [ time.ctime()],
"Thank You": ["Welcome", "It's nice to hear from you"],
"When Is Your Birthday": ["It's on June 2nd 2020", "It's on June 2nd 2020 at Rag labs"],
"Happy Birthday Rag": ["Thank you for your wishes","Thak you so much for thinking of me", "Thanks for making me feel special on my birthday",
"I can't tell you how I enjoyed hearing from you"],
"I Feel Stressed": ["Here are some tips to get rid of stress:\n 1) Avoid Caffine and Alcohol \n 2) Get more sleep \n 3)Talk to someone who cares you",
"Here are few tips to get rid of stress:\n 1) Listen some melody songs \n 2) Exercise regularly \n 3) Get enough sleep and rest",
"Follow these tips:\n 1) Make time for hobbies\n 2) Avoid using Mobile Phone \n 3) Get advise from mental health professional\n "
"4) Be positive"],
"Feels Stressed": ["Here are some tips to get rid of stress:\n 1) Avoid Caffine and Alcohol \n 2) Get more sleep \n 3)Talk to someone who cares you",
"Here are few tips to get rid of stress:\n 1) Listen some melody songs \n 2) Exercise regularly \n 3) Get enough sleep and rest",
"Follow these tips:\n 1) Make time for hobbies\n 2) Avoid using Mobile Phone \n 3) Get advise from mental health professional\n "
"4) Be positive"],
"How To Relieve Stress": ["Here are some tips to get rid of stress:\n 1) Avoid Caffine and Alcohol \n 2) Get more sleep \n 3)Talk to someone who cares you",
"Here are few tips to get rid of stress:\n 1) Listen some melody songs \n 2) Exercise regularly \n 3) Get enough sleep and rest",
"Follow these tips:\n 1) Make time for hobbies\n 2) Avoid using Mobile Phone \n 3) Get advise from mental health professional\n "
"4) Be positive"],
"I Feel Bored": ["Here Some Melody songs", "I tired to play music but vain", "Sleep well"],
# Medical field questions
"Cold": ["The common cold is medically referred to as a viral upper respiratory tract infection. "
"Symptoms of the common cold may include cough, sore throat, low-grade fever, nasal congestion, runny nose, and sneezing."],
"I Have Cold": ["Sad to har from you", "Please, take rest from you", "Properly take medicines",
"Consult doctor before it becomes complicated"],
"Symptoms For Cold": ["Here are results \n 1)Runny nose \n 2)Sore throat \n 3)Cough \n 4)Congestion \n 5)Body Achnes \n 6)Sneezing \n 7) Fever"],
"How To Prevent From Cold": ["Here are some Prevention methods \n 1. Wash your hands properly \n 2. Disinfect your stuff \n 3. Avoid touching your eyes,nose and mouth \n 4. Stay away"],
"Symptoms For Fever": ["1)Sweating 2)Headaches 3)Muscle aches 4) Loss of appetite 5)Dehydration"],
"Symptoms For Throat Pain": ["1) Scratchy sensation in the throat \n 2)Difficulty in Swallowing \n 3)Sore"],
"Symptoms For Acidity": ["1)Bloating \n 2) Burping \n 3)Dry Cough \n 4)Sore throat"],
#Political questions
"The 11Th President Of India": ["<NAME>"],
"Member Of Rajya Sabha": ["Selected by elected members of Legislative Assembly"],
"Current Prime Minister of India":["<NAME>"],
"Chief Minister Of Andhra Pradesh": ["<NAME>"],
"Chief Minister Of Arunachal Pradesh": ["<NAME>"],
"Chief Minister Of Assam": ["<NAME>"],
"Chief Minister Of Bihar": ["<NAME>"],
"Chief Minister Of Chhattisgarh": ["<NAME>"],
"Chief Minister Of Delhi": ["<NAME>"],
"Chief Minister Of Goa": ["<NAME>"],
"Chief Minister Of Gujarat": ["<NAME>"],
"Chief Minister Of Haryana": ["<NAME>"],
"Chief Minister Of Himachal Pradesh": ["<NAME>"],
"Chief Minister Of Jammu and Kashmir": ["President's rule"],
"Chief Minister Of Jharkhand": ["<NAME>"],
"Chief Minister Of Karnataka": ["<NAME>"],
"Chief Minister Of Kerala": ["<NAME>"],
"Chief Minister Of Madhya Pradesh": ["<NAME>"],
"Chief Minister Of Maharashtra": ["<NAME>"],
"Chief Minister Of Manipur": ["<NAME>"],
"Chief Minister Of Meghalaya": ["<NAME>"],
"Chief Minister Of Mizoram": ["Zoramthanga"],
"Chief Minister Of Nagaland": ["<NAME>"],
"Chief Minister Of Odisha": ["<NAME>"],
"Chief Minister Of Puducherry": ["<NAME>"],
"Chief Minister Of Punjab": ["<NAME>"],
"Chief Minister Of Rajasthan": ["<NAME>"],
"Chief Minister Of Sikkim": ["<NAME>"],
"Chief Minister Of Tamil Nadu": ["<NAME>"],
"Chief Minister Of Telangana": ["<NAME>"],
"Chief Minister Of Tripura": ["<NAME>"],
"Chief Minister Of Uttar Pradesh": ["<NAME>"],
"Chief Minister Of Uttarakhand": ["<NAME>"],
"Chief Minister Of West Bengal": ["<NAME>"],
"Defence Minster Of India": ["<NAME>"],
"Ministry Of Home Affairs": ["<NAME>"],
#capital of States in India
"Capital Of Tripura": ["Agartala"],
"Capital Of Rajasthan": ["Jaipur"],
"Capital Of Sikkim": ["Gangtok"],
"Capital Of Arunachal Pradesh": ["Itanagar"],
"Capital Of Maharasthtra": ["Mumbai"],
"Capital Of Mizoram": ["Aizawl"],
"Capital Of Chhattisgarh": ["Raipur"],
"Capital Of Telangana": [" Hyderabad"],
"Capital Of Assam": ["Dispur"],
"Capital Of Uttar Pradesh": ["Lucknow"],
"Capital Of Himachal Pradesh": ["Shimla"],
"Capital Of Gujarat": ["Gandhinagar"],
"Capital Of Bihar": ["Patna"],
"Capital Of Haryana": ["Chandigarh"],
"Capital Of Jammu & Kashmir": [" Srinagar & Jammu"],
"Capital Of Uttaranchal": ["Dehradun"],
"Capital Of Nagaland": ["Kohima"],
"Capital Of Tamil Nadu": ["Chennai"],
"Capital Of Meghalaya": ["Shillong"],
#national games
"What Is The National Game Of Bangladesh": ["Kabaddi"],
"What Is The National Game Of Argentina": ["Pato"],
"What Is The National Game Of United States": ["Baseball"],
"What Is The National Game Of Afghanistan": ["Buzkashi"],
"What Is The National Game Of Bhutan": [" Archery"],
"What Is The National Game Of Sri Lanka": ["Volley ball"],
"What Is The National Game Of Turkey": ["Oil Wrestling"],
"What Is The National Game Of India": [" Field Hockey"],
"What Is The National Game Of England": ["Cricket"],
"What Is The National Game Of Scotland": ["Golf"],
"What Is The National Game Of Iran": ["Wrestling"],
"What Is The National Game Of Hungary": [" Water Polo"],
"What Is The National Game Of Cuba": ["Baseball"],
"What Is The National Game Of Pakistan": ["Field Hockey"],
"What Is The National Game Of Brazil": ["Football"],
"What Is The National Game Of Russia": ["Bandy"],
"What Is The National Game Of Canada in Summer ": ["Lacrosse"],
"What Is The National Game Of Canada in Winter": ["Ice Hockey"],
"What Is The National Game Of Spain": ["Bull Fighting"],
}
def record_audio(ask=False):
r = sr.Recognizer()
with sr.Microphone() as source:
if ask:
alex_speak(ask)
audio = r.listen(source)
data = ''
try:
data = r.recognize_google(audio)
except sr.UnknownValueError():
alex_speak("Error")
except sr.RequestError():
alex_speak("Error 1")
return data
def alex_speak(audio_string):
tts = gTTS(text=audio_string, lang='en')
r = random.randint(1, 10000000)
audio_file = 'audio-' +str(r)+'.mp4'
tts.save(audio_file)
print(audio_string)
playsound.playsound(audio_file)
os.remove(audio_file)
alex_speak("What can I do for you")
while True:
message = record_audio()
send_message(message.title()) | 2.90625 | 3 |
script/decawave_driver_shell.py | horverno/ros_decawave | 3 | 12791861 | #!/usr/bin/env python
import rospy
import tf
import time
import serial
import struct
from geometry_msgs.msg import PointStamped
from ros_decawave.msg import Tag, Anchor, AnchorArray, Acc
class DecawaveDriver(object):
""" docstring for DecawaveDriver """
def __init__(self):
rospy.init_node('decawave_driver', anonymous=False)
# Getting Serial Parameters
self.port_ = rospy.get_param('port', '/dev/ttyACM0')
self.baudrate_ = int(rospy.get_param('baudrate', '115200'))
self.tf_publisher_ = rospy.get_param('tf_publisher', 'True')
self.rate_ = int(rospy.get_param('rate', '10'))
# Initiate Serial
self.ser = serial.Serial(self.port_, self.baudrate_, timeout=0.1)
rospy.loginfo("\33[96mConnected to %s at %i\33[0m", self.ser.portstr, self.baudrate_)
self.get_uart_mode()
self.switch_uart_mode()
#self.get_tag_status()
#self.get_tag_version()
self.anchors = AnchorArray()
self.anchors.anchors = []
self.tag = Tag()
self.accel = Acc()
def get_uart_mode(self):
""" Check UART Mode Used """
rospy.loginfo("\33[96mChecking which UART mode is the gateway...\33[0m")
self.mode_ = 'UNKNOWN'
self.ser.flushInput()
self.ser.write(b'\r') # Test Mode
time.sleep(0.1)
while(self.ser.inWaiting() == 0):
pass
cc = self.ser.readline()
if cc == '@\x01\x01': # GENERIC MODE
rospy.loginfo("\33[96mDevice is on GENERIC MODE! It must to be changed to SHELL MODE!\33[0m")
self.mode_ = "GENERIC"
else: # SHELL MODE
rospy.loginfo("\33[96mDevice is on SHELL MODE! Ok!\33[0m")
self.mode_ = "SHELL"
return self.mode_
def switch_uart_mode(self):
self.ser.flushInput()
if self.mode_ == "GENERIC":
rospy.loginfo("\33[96mChanging UART mode to SHELL MODE...\33[0m")
self.ser.write(b'\r\r') # Go to Shell Mode
while(self.ser.inWaiting()==0):
pass
time.sleep(1.0)
self.ser.flushInput()
rospy.loginfo("\33[96m%s\33[0m", self.ser.readline().replace('\n', ''))
elif self.mode_ == "UNKNOWN":
rospy.logerr("%s", "Unknown Mode Detected! Please reset the device and try again!")
def get_tag_version(self):
self.ser.flushInput()
self.ser.write(b'\x15\x00') # Status
while(self.ser.inWaiting() < 21):
pass
version = self.ser.read(21)
data_ = struct.unpack('<BBBBBLBBLBBL', bytearray(version))
rospy.loginfo("\33[96m--------------------------------\33[0m")
rospy.loginfo("\33[96mFirmware Version:0x"+format(data_[5], '04X')+"\33[0m")
rospy.loginfo("\33[96mConfiguration Version:0x"+format(data_[8], '04X')+"\33[0m")
rospy.loginfo("\33[96mHardware Version:0x"+format(data_[11], '04X')+"\33[0m")
rospy.loginfo("\33[96m--------------------------------\33[0m")
#def get_tag_status(self):
# self.ser.flushInput()
# self.ser.write(b'\x32\x00') # Status
# while(self.ser.inWaiting()==0):
# pass
# status = self.ser.readline()
# data_ = struct.unpack('<BBBBBB', bytearray(status))
# if data_[0] != 64 and data_[2] != 0:
# rospy.logwarn("Get Status Failed! Packet does not match!")
# print("%s", data_)
# if data_[5] == 3:
# rospy.loginfo("\33[96mTag is CONNECTED to a UWB network and LOCATION data are READY!\33[0m")
# elif data_[5] == 2:
# rospy.logwarn("Tag is CONNECTED to a UWB network but LOCATION data are NOT READY!")
# elif data_[5] == 1:
# rospy.logwarn("Tag is NOT CONNECTED to a UWB network but LOCATION data are READY!")
# elif data_[5] == 0:
# rospy.logwarn("Tag is NOT CONNECTED to a UWB network and LOCATION data are NOT READY!")
def get_tag_acc(self):
""" Read Acc Value: The values are raw values. So to convert them to g you first have to divide the
values by 2^6 ( as it is shifted) and then multiply it into 0.004 (assuming you are using the
+-2g scale). With regards to the getting the accelerometer readings to the UART, I have written
specific functions to read the data . I could put the github link up if you want."""
self.ser.flushInput()
self.ser.write(b'av\r') # Test Mode
while(self.ser.inWaiting() == 0):
pass
cc = ''
t = rospy.Time.now()
while not 'acc' in cc:
cc = self.ser.readline()
if rospy.Time.now() - t > rospy.Duration(0.5):
rospy.logwarn("Could not get accel data!")
cc = cc.replace('\r\n', '').replace('acc: ', '').split(',')
if len(cc) == 3:
self.accel.x = float(int(cc[0].replace('x = ', ''))>>6) * 0.04
self.accel.y = float(int(cc[1].replace('y = ', ''))>>6) * 0.04
self.accel.z = float(int(cc[2].replace('z = ', ''))>>6) * 0.04
self.accel.header.frame_id = 'tag'
self.accel.header.stamp = rospy.Time.now()
def get_tag_location(self):
self.ser.flushInput()
self.ser.write(b'lec\r') # Test Mode
while(self.ser.inWaiting() == 0):
pass
cc = ''
t = rospy.Time.now()
while not 'DIST' in cc:
cc = self.ser.readline()
print (cc)
if rospy.Time.now() - t > rospy.Duration(0.5):
rospy.logwarn("Could not get tag data!")
self.ser.flushInput()
self.ser.write(b'\r') # Test Mode
#cc = ''
#t = rospy.Time.now()
#while not 'acc' in cc:
# cc = self.ser.readline()
# if rospy.Time.now() - t > rospy.Duration(0.5):
# rospy.logwarn("Could not get accel data!")
#cc = cc.replace('\r\n', '').replace('acc: ', '').split(',')
#if len(cc) == 3:
# self.accel.x = float(int(cc[0].replace('x = ', ''))/64.0) * 0.04
# self.accel.y = float(int(cc[1].replace('y = ', ''))/64.0) * 0.04
# self.accel.z = float(int(cc[2].replace('z = ', ''))/64.0) * 0.04
# self.accel.header.frame_id = 'tag'
# self.accel.header.stamp = rospy.Time.now()
def tf_callback(self, timer):
if self.tf_publisher_ == 'True':
self.br.sendTransform((self.tag.x, self.tag.y, self.tag.z),
tf.transformations.quaternion_from_euler(0, 0, 0),
rospy.Time.now(),
"tag",
"world")
for anchor in self.anchors.anchors:
self.br.sendTransform((anchor.x, anchor.y, anchor.z),
tf.transformations.quaternion_from_euler(0, 0, 0),
rospy.Time.now(),
anchor.header.frame_id,
"world")
def run(self):
self.rate = rospy.Rate(self.rate_)
rospy.loginfo("\33[96mInitiating Driver...\33[0m")
self.tag_pub_ = rospy.Publisher('pose', Tag, queue_size=1)
self.anchors_pub_ = rospy.Publisher('status', AnchorArray, queue_size=1)
self.acc_pub_ = rospy.Publisher('accelerometer', Acc, queue_size=1)
self.timer = rospy.Timer(rospy.Duration(0.2), self.tf_callback)
self.br = tf.TransformBroadcaster()
while not rospy.is_shutdown():
self.get_tag_acc()
self.acc_pub_.publish(self.accel)
#self.get_tag_location()
#self.tag.header.stamp = rospy.Time.now()
#self.tag_pub_.publish(self.tag)
#self.anchors.header.stamp = rospy.Time.now()
#self.anchors_pub_.publish(self.anchors)
self.rate.sleep()
# Main function
if __name__ == '__main__':
try:
dd = DecawaveDriver()
dd.run()
except rospy.ROSInterruptException:
rospy.loginfo("[Decawave Driver]: Closed!")
| 2.296875 | 2 |
keep_alive.py | DHRUV-CODER/Captcha-Image-Api | 5 | 12791862 | <gh_stars>1-10
from PIL import Image, ImageFont, ImageDraw
from PIL import Image
from io import BytesIO
from flask import Flask, jsonify, render_template, send_file, abort, redirect, url_for
from threading import Thread
import random
import string
app = Flask('')
@app.route('/')
def sup():
return redirect(url_for('give_the_c'))
s1 = string.ascii_letters
s3 = string.digits
s4 = string.hexdigits
s = []
s.extend(list(s1))
# s.extend(list(s2))
s.extend(list(s3))
s.extend(list(s4))
randm1 = "".join(random.sample(s, 15))
randm2 = "".join(random.sample(s, 38))
@app.route("/gimme/some/captcha")
def give_the_c():
result_random_Stuff = "".join(random.sample(s, 6))
result = {
"asked_query": result_random_Stuff,
"answer_to_captcha": result_random_Stuff,
"img_url":
f"https://Captcha-Image-Api.dhruvnation1.repl.co/captchame/{randm1}{result_random_Stuff}{randm2}",
"font": "./assets/Font/arial.ttf",
"credits": "© Dhruv"
}
return jsonify(result)
@app.route(f'/captchame/{randm1}<string:a>{randm2}')
def imagemal(a):
image = Image.open('./assets/Images/captchimage.jpg')
draw = ImageDraw.Draw(image)
font = ImageFont.truetype("./assets/Font/arial.ttf", 35)
points = 86, 29
text = a
draw.text(points, text, "black", font=font)
image.save('./assets/Images/resulted_captcha.jpg')
image_file = './assets/Images/resulted_captcha.jpg'
try:
return send_file(image_file)
except:
abort(404)
def run():
app.run(host="0.0.0.0", port=8080)
def keep_alive():
server = Thread(target=run)
server.start()
| 2.6875 | 3 |
snowddl/resolver/stage_file.py | littleK0i/SnowDDL | 21 | 12791863 | <reponame>littleK0i/SnowDDL<gh_stars>10-100
from io import BytesIO
from hashlib import md5
from pathlib import Path
from snowddl.blueprint import StageBlueprint, StageFileBlueprint
from snowddl.error import SnowDDLExecuteError
from snowddl.resolver.abc_resolver import AbstractResolver, ResolveResult, ObjectType
class StageFileResolver(AbstractResolver):
def get_object_type(self) -> ObjectType:
return ObjectType.STAGE_FILE
def get_existing_objects(self):
existing_objects = {}
for stage_bp in self.config.get_blueprints_by_type(StageBlueprint).values():
if not stage_bp.upload_stage_files:
continue
try:
cur = self.engine.execute_meta("LIST @{stage_name:i}", {
"stage_name": stage_bp.full_name,
})
except SnowDDLExecuteError as e:
# Stage does not exist or not authorized
# Skip this error during planning
if e.snow_exc.errno == 2003:
continue
else:
raise
all_files = {}
all_hashes = {}
for r in cur:
path = Path(r['name'])
if path.suffix == '.md5':
all_hashes[path.with_suffix('').with_suffix('')] = path.suffixes[-2].lstrip('.')
else:
all_files[path] = True
for path in all_files:
# Snowflake LIST commands adds stage name implicitly, which should be removed
stage_path = f"/{path.relative_to(path.parts[0])}"
full_name=f"{stage_bp.full_name}({stage_path})"
# Snowflake LIST commands provides "md5" and "size", but it is not reliable due to encryption
existing_objects[full_name] = {
"stage_name": stage_bp.full_name,
"stage_path": stage_path,
"original_md5": all_hashes.get(path, None),
}
return existing_objects
def get_blueprints(self):
return self.config.get_blueprints_by_type(StageFileBlueprint)
def create_object(self, bp: StageFileBlueprint):
self._upload_file(bp)
self._upload_md5_marker(bp)
return ResolveResult.CREATE
def compare_object(self, bp: StageFileBlueprint, row: dict):
if row['original_md5'] == self._md5_file(bp.local_path):
return ResolveResult.NOCHANGE
self._upload_file(bp)
self._upload_md5_marker(bp)
return ResolveResult.REPLACE
def drop_object(self, row: dict):
# One call deletes original file and MD5 marker in one go
self.engine.execute_safe_ddl("REMOVE @{stage_name:i}{stage_path:r}", {
"stage_name": row['stage_name'],
"stage_path": row['stage_path'],
})
return ResolveResult.DROP
def _upload_file(self, bp: StageFileBlueprint):
self.engine.execute_safe_ddl("PUT {local_path} @{stage_name:i}{stage_target:r} PARALLEL=1 OVERWRITE=TRUE AUTO_COMPRESS=FALSE", {
"local_path": f"file://{bp.local_path}",
"stage_name": bp.stage_name,
"stage_target": Path(bp.stage_path).parent,
})
def _upload_md5_marker(self, bp: StageFileBlueprint):
# Placeholder path for PUT command, directory does not matter
# Actual contents of marker pseudo-file is empty and come from zero-length BytesIO in file_stream
md5_marker_path = Path(bp.local_path).name + f".{self._md5_file(bp.local_path)}.md5"
self.engine.execute_safe_ddl("PUT {local_path} @{stage_name:i}{stage_target:r} PARALLEL=1 OVERWRITE=TRUE AUTO_COMPRESS=FALSE", {
"local_path": f"file://{md5_marker_path}",
"stage_name": bp.stage_name,
"stage_target": Path(bp.stage_path).parent,
}, file_stream=BytesIO())
def _md5_file(self, local_path: str):
hash_md5 = md5()
with open(local_path, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def destroy(self):
# No need to delete stage files explicitly, files are destroyed automatically when stage is gone
pass
| 2.140625 | 2 |
lab3/es3/to_bike_webservice.py | haraldmeister/Programming_for_IoT_applications | 0 | 12791864 | <filename>lab3/es3/to_bike_webservice.py
import cherrypy
import json
import requests
class BikeSharing():
exposed=True
@cherrypy.tools.json_out()
def GET(self,*uri,**params):
if len(uri)==0:
self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json()
return json.loads(json.dumps(self.json_data,default=lambda x: x.__dict__))
if uri[0]=="order_slots":
self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json()
self.json_out=[]
if "N" in params:
self.N=int(params["N"])
else:
self.N=10
if "order" in params:
if params["order"]=="ascend":
self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('empty_slots', 0)), reverse=False)
if params["order"]=="descend":
self.json_data['network']['stations'] = sorted(self.json_data["network"]["stations"], key=lambda k: int(k.get('empty_slots', 0)), reverse=True)
else:
self.json_data['network']['stations'] = sorted(self.json_data["network"]["stations"], key=lambda k: int(k.get('empty_slots', 0)), reverse=True)
for i in range(0,self.N):
self.json_out.append(self.json_data["network"]["stations"][i])
return json.loads(json.dumps(self.json_out,default=lambda x: x.__dict__))
if uri[0]=="order_bikes":
self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json()
self.json_out=[]
if "N" in params:
self.N=int(params["N"])
else:
self.N=10
if "order" in params:
if params["order"]=="ascend":
self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('free_bikes', 0)), reverse=False)
if params["order"]=="descend":
self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('free_bikes', 0)), reverse=True)
else:
self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('free_bikes', 0)), reverse=True)
for i in range(0,self.N):
self.json_out.append(self.json_data['network']['stations'][i])
return json.loads(json.dumps(self.json_out,default=lambda x: x.__dict__))
if uri[0]=="count_bikes_slots":
self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json()
self.bikes=0
self.slots=0
if "lat" and "lon" in params:
self.lat=float(params["lat"])
self.lon=float(params["lon"])
else:
return "District number not set"
for i in range(0,len(self.json_data["network"]["stations"])):
if ((float(self.json_data["network"]["stations"][i]["latitude"])<self.lat+0.005 and
float(self.json_data["network"]["stations"][i]["latitude"])>self.lat-0.005) and
(float(self.json_data["network"]["stations"][i]["longitude"])<self.lon+0.01 and
float(self.json_data["network"]["stations"][i]["longitude"])>self.lon-0.01)):
self.bikes+=int(self.json_data["network"]["stations"][i]["free_bikes"])
self.slots+=int(self.json_data["network"]["stations"][i]["empty_slots"])
self.json_out={"latitude":float(params["lat"]),"longitude":float(params["lon"]),"bikes":self.bikes,"slots":self.slots}
return json.loads(json.dumps(self.json_out,default=lambda x: x.__dict__))
if __name__ == '__main__':
conf = {
'/': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
'tools.sessions.on': True
}
}
cherrypy.tree.mount(BikeSharing(), '/', conf)
cherrypy.config.update({'server.socket_host': '0.0.0.0'})
cherrypy.config.update({'server.socket_port': 9090})
cherrypy.engine.start()
cherrypy.engine.block() | 2.953125 | 3 |
Modulo 1/ex014.py | Werberty/Curso-em-Video-Python3 | 1 | 12791865 | graus = float(input('Informe a temperatura em °C: '))
coversao = graus * 1.8 + 32
print(f'A temperatura de {graus:.1f}°C corresponde a {coversao:.1f}°F!') | 3.96875 | 4 |
CameraTest.py | StanislavEng/Autonomous_Rover | 0 | 12791866 | <gh_stars>0
from picamera import PiCamera
from time import sleep
#camera = picamera.PiCamera()
camera = PiCamera()
#camera.resolution = (1920x1080)
#camera.capture('Test.jpg')
camera.start_preview()
# for if you're feelng feisty
#for i in range(5)
sleep(15)
# camera.capture('/home/pi/capture%s.jpg' %i)
camera.capture('/home/pi/capture1.jpg')
# camera.start_recording('/home/pi/video.h264')
#camera.wait_recording(10)
#camera.stop_recording()
camera.stop_preview()
| 2.453125 | 2 |
notification/admin.py | anyidea/django-user-notification | 2 | 12791867 | from django.contrib import admin
from . import models
from .models import Notification
from django.contrib.contenttypes.admin import GenericTabularInline
# Register your models here.
class NotificationAdmin(GenericTabularInline):
model = models.Notification
extra = 0
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("to")
@admin.register(models.Template)
class TemplateAdmin(admin.ModelAdmin):
list_display = ("name", "description", "code", "title", "text", "kwargs")
list_filter = ("name", "code")
ordering = ("name",)
@admin.register(models.DingDingMessage)
class DingDingMessageAdmin(admin.ModelAdmin):
list_display = [
"title",
"content",
"at_mobiles",
"is_at_all",
"extra",
"created_at",
]
list_filter = ("title", "created_at")
inlines = (NotificationAdmin,)
ordering = ("-id",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("notify")
@admin.register(models.EmailMessage)
class EmailMessageAdmin(admin.ModelAdmin):
list_display = [
"subject",
"sender",
"receivers",
"cc",
"content_subtype",
"content",
"created_at",
]
list_filter = ("subject", "content_subtype", "created_at")
inlines = (NotificationAdmin,)
ordering = ("-id",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("notify")
@admin.register(models.WebsocketMessage)
class WebsocketMessageAdmin(admin.ModelAdmin):
list_display = ["title", "content", "msgtype", "groups", "created_at"]
inlines = (NotificationAdmin,)
ordering = ("-id",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related("notify")
| 1.929688 | 2 |
self_play.py | mojtabamozaffar/toolpath-design-rl | 1 | 12791868 | import numpy as np
import torch
import math
import ray
import copy
import networks
import global_config
def play_one_game(model, env_func, config, temperature, save=False, filename = ''):
game_history = GameHistory()
game = env_func(max_steps = config.max_moves, window_size = config.observation_shape[1])
observation = game.reset()
game_history.action_history.append(0)
game_history.observation_history.append(observation)
game_history.reward_history.append(0)
done = False
with torch.no_grad():
while (not done and len(game_history.action_history) <= config.max_moves):
root = MCTS(config).run(model, observation, game.actions,
False if temperature == 0 else True)
action = select_action(root, temperature
if len(game_history.action_history) < config.temperature_threshold else 0)
observation, reward, done, _ = game.step(action)
game_history.store_search_statistics(root, [i for i in range(config.action_space_size)])
game_history.action_history.append(action)
game_history.observation_history.append(observation)
game_history.reward_history.append(reward)
if save:
game.plot_toolpath(save = True, folder = config.logdir, filename = filename)
game.close()
return game_history
def select_action(node, temperature):
visit_counts = np.array(
[child.visit_count for child in node.children.values()]
)
actions = [action for action in node.children.keys()]
if temperature == 0:
action = actions[np.argmax(visit_counts)]
elif temperature == float("inf"):
action = np.random.choice(actions)
else:
visit_count_distribution = visit_counts ** (1 / temperature)
visit_count_distribution = visit_count_distribution / sum(
visit_count_distribution
)
action = np.random.choice(actions, p=visit_count_distribution)
return action
class MCTS:
def __init__(self, config):
self.config = config
def run(self, model, observation, legal_actions, add_exploration_noise):
root = Node(0)
observation = (torch.tensor(observation).float().unsqueeze(0).to(next(model.parameters()).device))
_, reward, policy_logits, hidden_state = model.initial_inference(observation)
reward = reward.item()
root.expand(legal_actions, reward, policy_logits, hidden_state)
if add_exploration_noise:
root.add_exploration_noise(
dirichlet_alpha=self.config.root_dirichlet_alpha,
exploration_fraction=self.config.root_exploration_fraction,
)
min_max_stats = MinMaxStats()
for _ in range(self.config.num_simulations):
node = root
search_path = [node]
while node.expanded():
action, node = self.select_child(node, min_max_stats)
search_path.append(node)
parent = search_path[-2]
value, reward, policy_logits, hidden_state = model.recurrent_inference(
parent.hidden_state,
torch.tensor([[action]]).to(parent.hidden_state.device),
)
value = networks.support_to_scalar(value).item()
reward = reward.item()
node.expand(
[i for i in range(self.config.action_space_size)],
reward,
policy_logits,
hidden_state,
)
self.backpropagate(
search_path, value, min_max_stats
)
return root
def select_child(self, node, min_max_stats):
max_ucb = max(self.ucb_score(node, child, min_max_stats) for action, child in node.children.items())
action = np.random.choice([action for action, child in node.children.items() if self.ucb_score(node, child, min_max_stats) == max_ucb])
return action, node.children[action]
def ucb_score(self, parent, child, min_max_stats):
pb_c = (
math.log(
(parent.visit_count + self.config.pb_c_base + 1) / self.config.pb_c_base
)
+ self.config.pb_c_init
)
pb_c *= math.sqrt(parent.visit_count) / (child.visit_count + 1)
prior_score = pb_c * child.prior
if child.visit_count > 0:
value_score = min_max_stats.normalize(
child.reward + self.config.discount * child.value()
)
else:
value_score = 0
return prior_score + value_score
def backpropagate(self, search_path, value, min_max_stats):
for node in reversed(search_path):
node.value_sum += value #if node.to_play == to_play else -value
node.visit_count += 1
min_max_stats.update(node.reward + self.config.discount * node.value())
value = node.reward + self.config.discount * value
class Node:
def __init__(self, prior):
self.visit_count = 0
self.prior = prior
self.value_sum = 0
self.children = {}
self.hidden_state = None
self.reward = 0
def expanded(self):
return len(self.children) > 0
def value(self):
if self.visit_count == 0:
return 0
return self.value_sum / self.visit_count
def expand(self, actions, reward, policy_logits, hidden_state):
self.reward = reward
self.hidden_state = hidden_state
policy = {}
for a in actions:
try:
policy[a] = 1/sum(torch.exp(policy_logits[0] - policy_logits[0][a]))
except OverflowError:
print("Warning: prior has been approximated")
policy[a] = 0.0
for action, p in policy.items():
self.children[action] = Node(p)
def add_exploration_noise(self, dirichlet_alpha, exploration_fraction):
actions = list(self.children.keys())
noise = np.random.dirichlet([dirichlet_alpha] * len(actions))
frac = exploration_fraction
for a, n in zip(actions, noise):
self.children[a].prior = self.children[a].prior * (1 - frac) + n * frac
class GameHistory:
def __init__(self):
self.observation_history = []
self.action_history = []
self.reward_history = []
self.child_visits = []
self.root_values = []
def store_search_statistics(self, root, action_space):
if root is not None:
sum_visits = sum(child.visit_count for child in root.children.values())
self.child_visits.append([root.children[a].visit_count / sum_visits
if a in root.children else 0 for a in action_space])
self.root_values.append(root.value())
else:
self.root_values.append(None)
class MinMaxStats:
def __init__(self):
self.maximum = -float("inf")
self.minimum = float("inf")
def update(self, value):
self.maximum = max(self.maximum, value)
self.minimum = min(self.minimum, value)
def normalize(self, value):
if self.maximum > self.minimum:
return (value - self.minimum) / (self.maximum - self.minimum)
return value
if global_config.use_ray:
play_one_game = ray.remote(play_one_game) | 2.234375 | 2 |
bvc/management/commands/test_mail.py | Vayel/GUCEM-BVC | 2 | 12791869 | import io
import csv
from smtplib import SMTPException
from django.core.management.base import BaseCommand
from django.core.mail import EmailMessage
from django.conf import settings
from bvc import utils
class Command(BaseCommand):
def handle(self, *args, **options):
csvfile = io.StringIO()
writer = csv.writer(csvfile)
writer.writerow(['Col A', 'Col B',])
email = EmailMessage(
utils.format_mail_subject("Démarrage de l'application - mail test"),
"Test de l'envoi des mails depuis l'application BVC.",
settings.EMAIL_HOST_USER,
[settings.EMAIL_HOST_USER],
[],
)
email.attach('test.csv', csvfile.getvalue(), 'text/csv')
if not email.send():
raise SMTPException()
| 2.28125 | 2 |
workspace/extract_psf.py | thhsieh00/utoolbox-core | 3 | 12791870 | <filename>workspace/extract_psf.py
import logging
import os
import imageio
import numpy as np
from vispy import app, scene
from vispy.color.colormap import BaseColormap, Colormap, ColorArray
from vispy.visuals.transforms import STTransform
from utoolbox.analysis.psf_average import PSFAverage
from utoolbox.data.datastore import FolderDatastore
logger = logging.getLogger(__name__)
def preview_volume(vols, shifts=None):
canvas = scene.SceneCanvas(keys="interactive")
canvas.size = 1024, 1024
canvas.show()
# create view box
view = canvas.central_widget.add_view()
# genereate colormap
"""
n_colors = 256
alphas = np.linspace(0.0, 1.0, n_colors)
color = np.c_[
alphas, alphas, alphas, alphas
]
cmap = Colormap(color)
"""
from utoolbox.data.io.amira import AmiraColormap
color = AmiraColormap("volrenGlow.am")
color = np.array(color)
color[0, :] = 0
color[:, 3] /= 100
cmap = Colormap(color)
for i, vol in enumerate(vols):
volume = scene.visuals.Volume(
vol, cmap=cmap, parent=view.scene, emulate_texture=False
)
volume.method = "translucent"
volume.transform = scene.STTransform(scale=(2, 2, 5.5))
volume.set_gl_state("translucent", depth_test=False)
if shifts:
volume.transform = scene.STTransform(translate=shifts[i])
# assign camera
camera = scene.cameras.TurntableCamera(
parent=view.scene, fov=60.0, name="Arcball", elevation=30.0
)
view.camera = camera
view.camera.flip = (False, True, True)
view.camera.reset()
# axis
axis = scene.visuals.XYZAxis(parent=view)
s = STTransform(translate=(50, 50), scale=(50, 50, 50, 1))
affine = s.as_matrix()
axis.transform = affine
# link with camera
@canvas.events.mouse_move.connect
def on_mouse_move(event):
if event.button == 1 and event.is_dragging:
axis.transform.reset()
axis.transform.rotate(camera.roll, (0, 0, 1))
axis.transform.rotate(camera.elevation, (1, 0, 0))
axis.transform.rotate(camera.azimuth, (0, 1, 0))
axis.transform.scale((50, 50, 0.001))
axis.transform.translate((50.0, 50.0))
axis.update()
# render rotation movie
"""
n_steps = 240
axis = [0, 0, 0]
logger.debug(".. rendering")
step_angle = 360.0 / n_steps
writer = imageio.get_writer("t1-head_split_rotate.mp4", fps=24)
for i in range(n_steps):
im = canvas.render()
writer.append_data(im)
view.camera.transform.rotate(step_angle, axis)
writer.close()
"""
app.run()
def main(root="fusion_psf"):
ds = FolderDatastore(root, read_func=imageio.volread, extensions=["tif"])
logger.info(f"{len(ds)} file(s) found")
for key, vol in ds.items():
logger.info(key)
dst_dir = os.path.join(root, key)
try:
os.mkdir(dst_dir)
except:
# folder exists
pass
psf_avg = PSFAverage((97, 97, 97))
psfs = psf_avg(vol, return_coords=True)
psf_average = None
for i, (sample, coord) in enumerate(psfs):
coord = [f"{c:04d}" for c in reversed(coord)]
coord = "-".join(coord)
fname = f"psf_{i:03d}_{coord}.tif"
imageio.volwrite(os.path.join(dst_dir, fname), sample)
try:
psf_average = (psf_average + sample) / 2
except TypeError:
psf_average = sample
import cupy as cp
psf_average = cp.asarray(psf_average)
from utoolbox.exposure import auto_contrast
psf_average = auto_contrast(psf_average)
psf_average = cp.asnumpy(psf_average)
preview_volume(psf_average)
break
if __name__ == "__main__":
import coloredlogs
logging.getLogger("tifffile").setLevel(logging.ERROR)
logging.getLogger("matplotlib").setLevel(logging.WARNING)
coloredlogs.install(
level="DEBUG", fmt="%(asctime)s %(levelname)s %(message)s", datefmt="%H:%M:%S"
)
from vispy import io
vol = np.load(io.load_data_file("brain/mri.npz"))["data"]
print(vol.dtype)
"""
import imageio
vol = imageio.volread('20181019_expanded_hippo/1-Pos_002_005.tif')
"""
import cupy as cp
vol = cp.asarray(vol)
from utoolbox.exposure import auto_contrast
vol = auto_contrast(vol)
vol = cp.asnumpy(vol)
vol = np.swapaxes(vol, 0, 1)
print(vol.dtype)
avg, std = vol.mean(), vol.std()
vol[vol < (avg - std)] = 0
nz, ny, nx = vol.shape
mid = ny // 2
vol1 = vol[:, :mid, :]
vol2 = vol[:, mid:, :]
preview_volume((vol1, vol2), ((0, -mid, 0), (0, mid, 0)))
# vol = vol[:, fc00:db20:35b:7399::5, ::2]
# preview_volume((vol, ))
# main()
| 2 | 2 |
docs/sajou_examples/markers.py | cristobaltapia/sajou | 1 | 12791871 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Example to show the new marker styles"""
import matplotlib.pyplot as plt
from sajou.plot.lines_mpl import Line2D
fig = plt.figure(figsize=(12, 3))
ax = fig.add_subplot(111)
markers = ['ap', 'an', 'psx', 'rsx', 'es', 'rex', 'rc']
for ix, mark in enumerate(markers):
marker = Line2D([ix], [0], marker=mark, fillstyle='none', color='k')
ax.add_line(marker)
ax.set_xlim(-1, len(markers))
ax.set_ylim(-1, 1)
plt.show()
| 2.8125 | 3 |
ocs_ci/ocs/ui/validation_ui.py | prsurve/ocs-ci | 0 | 12791872 | <reponame>prsurve/ocs-ci<gh_stars>0
import logging
from ocs_ci.ocs.ui.base_ui import PageNavigator
from ocs_ci.ocs.ui.views import locators
from ocs_ci.utility.utils import get_ocp_version, TimeoutSampler
from ocs_ci.framework import config
from ocs_ci.ocs import constants
logger = logging.getLogger(__name__)
class ValidationUI(PageNavigator):
"""
User Interface Validation Selenium
"""
def __init__(self, driver):
super().__init__(driver)
self.ocp_version = get_ocp_version()
self.err_list = list()
self.validation_loc = locators[self.ocp_version]["validation"]
def verify_object_service_page(self):
"""
Verify Object Service Page UI
"""
self.navigate_overview_page()
self.do_click(self.validation_loc["object_service_tab"])
platform = config.ENV_DATA.get("platform").lower()
if platform in constants.ON_PREM_PLATFORMS:
logger.info("Click on Object Service button")
self.do_click(self.validation_loc["object_service_button"])
logger.info("Click on Data Resiliency button")
self.do_click(self.validation_loc["data_resiliency_button"])
strings_object_service_tab = ["Total Reads", "Total Writes"]
self.verify_page_contain_strings(
strings_on_page=strings_object_service_tab, page_name="object_service"
)
def verify_persistent_storage_page(self):
"""
Verify Persistent Storage Page
"""
self.navigate_overview_page()
self.do_click(self.validation_loc["persistent_storage_tab"])
strings_object_service_tab = [
"IOPS",
"Latency",
"Throughput",
"Recovery",
"Utilization",
"Used Capacity Breakdown",
"Raw Capacity",
]
self.verify_page_contain_strings(
strings_on_page=strings_object_service_tab, page_name="persistent_storage"
)
def verify_ocs_operator_tabs(self):
"""
Verify OCS Operator Tabs
"""
self.navigate_installed_operators_page()
logger.info("Search OCS operator installed")
self.do_send_keys(
locator=self.validation_loc["search_ocs_installed"],
text="OpenShift Container Storage",
)
logger.info("Click on ocs operator on Installed Operators")
self.do_click(locator=self.validation_loc["ocs_operator_installed"])
logger.info("Verify Details tab on OCS operator")
strings_details_tab = ["Description", "Succeeded", "openshift-storage"]
self.verify_page_contain_strings(
strings_on_page=strings_details_tab, page_name="details_tab"
)
logger.info("Verify Subscription tab on OCS operator")
self.do_click(self.validation_loc["osc_subscription_tab"])
strings_subscription_tab = [
"Healthy",
"openshift-storage",
]
self.verify_page_contain_strings(
strings_on_page=strings_subscription_tab, page_name="subscription_tab"
)
logger.info("Verify All instances tab on OCS operator")
self.do_click(self.validation_loc["osc_all_instances_tab"])
strings_all_instances_tab = ["Phase", "Ready", "Status"]
self.verify_page_contain_strings(
strings_on_page=strings_all_instances_tab, page_name="all_instances_tab"
)
logger.info("Verify Storage Cluster tab on OCS operator")
self.do_click(self.validation_loc["osc_storage_cluster_tab"])
strings_storage_cluster_tab = ["Phase", "Ready", "Status"]
self.verify_page_contain_strings(
strings_on_page=strings_storage_cluster_tab, page_name="storage_cluster_tab"
)
logger.info("Verify Backing Store tab on OCS operator")
self.do_click(self.validation_loc["osc_backing_store_tab"])
strings_backing_store_tab = ["Phase", "Ready", "Status"]
self.verify_page_contain_strings(
strings_on_page=strings_backing_store_tab, page_name="backing_store_tab"
)
logger.info("Verify Bucket Class tab on OCS operator")
self.do_click(self.validation_loc["osc_bucket_class_tab"])
strings_bucket_class_tab = ["Phase", "Ready", "Status"]
self.verify_page_contain_strings(
strings_on_page=strings_bucket_class_tab, page_name="bucket_class_tab"
)
def verify_page_contain_strings(self, strings_on_page, page_name):
"""
Verify Page Contain Strings
Args:
strings_on_page (list): list of strings on page
page_name (str): the name of the page
"""
logger.info(f"verify {strings_on_page} exist on {page_name}")
for string in strings_on_page:
sample = TimeoutSampler(
timeout=3,
sleep=1,
func=self.check_element_text,
expected_text=string,
)
if not sample.wait_for_func_status(result=True):
self.err_list.append(f"{string} string not found on {page_name}")
def verification_ui(self):
"""
Verification UI
"""
self.verify_object_service_page()
self.verify_persistent_storage_page()
self.verify_ocs_operator_tabs()
self.take_screenshot()
for err in self.err_list:
logger.error(err)
assert len(self.err_list) == 0, f"{self.err_list}"
| 2.265625 | 2 |
headbang/motion.py | sevagh/headbang.py | 4 | 12791873 | import numpy
import sys
import scipy
from scipy.signal import find_peaks_cwt
import matplotlib.pyplot as plt
from headbang.params import DEFAULTS
from headbang.util import find_closest
openpose_install_path = "/home/sevagh/thirdparty-repos/openpose"
openpose_dir = openpose_install_path
sys.path.append(openpose_dir + "/build/python/openpose")
import pyopenpose as op
class OpenposeDetector:
undef_coord_default = numpy.nan
object_limit = 3
min_confidence = 0.5
def __init__(
self,
n_frames,
frame_duration,
keypoints=DEFAULTS["pose_keypoints"],
):
config = {}
config["logging_level"] = 3
config["net_resolution"] = "320x320"
config["model_pose"] = "BODY_25"
config["alpha_pose"] = 0.6
config["scale_gap"] = 0.3
config["scale_number"] = 1
config["render_threshold"] = 0.05
config["num_gpu_start"] = 0
config["disable_blending"] = False
config["model_folder"] = openpose_dir + "/models/"
self.opWrapper = op.WrapperPython()
self.opWrapper.configure(config)
self.opWrapper.start()
self.keypoints = [int(i) for i in keypoints.split(",")]
self.n_frames = int(n_frames)
self.all_y_coords = [OpenposeDetector.undef_coord_default] * self.n_frames
self.frame_idx = 0
self.frame_duration = frame_duration
self.total_duration = self.frame_duration * self.n_frames
print("Started OpenposeDetector for keypoints {0}".format(self.keypoints))
def detect_pose(self, image):
datum = op.Datum()
datum.cvInputData = image
ret = self.opWrapper.emplaceAndPop(op.VectorDatum([datum]))
if not ret:
raise ValueError("couldn't emplaceAndPop")
return datum.poseKeypoints, datum.cvOutputData
def process_frame(self, frame):
multiple_detected_poses, outframe = self.detect_pose(frame)
if multiple_detected_poses is not None:
poses_of_interest = []
# collect (x, y) coordinates of the head, median across the first object_limit objects
for detected_poses in multiple_detected_poses[
: OpenposeDetector.object_limit
]:
for keypoint, d in enumerate(detected_poses):
if (
keypoint in self.keypoints
and d[2] > OpenposeDetector.min_confidence
):
poses_of_interest.append((d[0], d[1]))
poses_of_interest = numpy.asarray(poses_of_interest)
median_coords = numpy.median(poses_of_interest, axis=0)
if not numpy.any(numpy.isnan(median_coords)):
median_y = median_coords[1]
y_norm = median_y / frame.shape[0]
self.all_y_coords[self.frame_idx] = y_norm
self.frame_idx += 1
return outframe
def find_peaks(self):
min_coord = numpy.nanmin(self.all_y_coords)
adjusted_y_coords = numpy.nan_to_num(self.all_y_coords, nan=min_coord)
# wavelets are good for peaks
# https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2631518/
peaks = find_peaks_cwt(adjusted_y_coords, numpy.arange(2, 4))
peaks = peaks[numpy.where(numpy.diff(peaks) > 11)[0]]
return peaks
def plot_ycoords(
self, bop_bpm_plot_history, debug_bpm=False, debug_bpm_frame_skip=30
):
plt.figure(1)
plt.title("normalized median y coordinate motion")
plt.xlabel("time (s)")
plt.ylabel("normalized y coordinate")
frame_times = numpy.arange(0.0, self.total_duration, self.frame_duration)
peaks = self.find_peaks()
y_coords = numpy.asarray(self.all_y_coords)
plt.plot(
frame_times,
y_coords,
"-D",
markevery=peaks,
mec="black",
)
if debug_bpm:
# skip every 10 frames for bpm plot
for i, bop_bpm_hist in enumerate(
bop_bpm_plot_history[:-debug_bpm_frame_skip]
):
if i % debug_bpm_frame_skip != 0:
continue
bop_times, bpm = bop_bpm_hist
x = find_closest(frame_times, bop_times)
if x.size > 2:
text_x = (
frame_times[x[-2]]
+ (frame_times[x[-1]] - frame_times[x[-2]]) / 2
)
y = y_coords[x]
text_y = max(y) + 0.03
plt.plot(frame_times[x], y, "r")
plt.text(text_x, text_y, "{0}".format(int(round(bpm))))
plt.grid()
plt.show()
def bpm_from_beats(beats):
if beats.size == 0:
return 0
m_res = scipy.stats.linregress(numpy.arange(len(beats)), beats)
beat_step = m_res.slope
return 60 / beat_step
def align_beats_motion(beats, motion, thresh):
i = 0
j = 0
aligned_beats = []
while i < len(motion) and j < len(beats):
curr_motion = motion[i]
curr_beat = beats[j]
if numpy.abs(curr_motion - curr_beat) <= thresh:
aligned_beats.append(min(curr_motion, curr_beat))
i += 1
j += 1
continue
if curr_beat < curr_motion:
# increment beats
j += 1
elif curr_beat > curr_motion:
i += 1
return aligned_beats
| 1.875 | 2 |
Exec/testing/Thermal-RMI-implosion/movie.py | darylbond/cerberus | 5 | 12791874 | import sys
cmd_folder = "../../../vis" # nopep8
if cmd_folder not in sys.path: # nopep8
sys.path.insert(0, cmd_folder)
from tile_mov import tile_movie
from make_mov import make_all, get_particle_trajectories
import pylab as plt
import numpy as np
from mpl_toolkits.axes_grid1 import make_axes_locatable
import matplotlib.gridspec as gridspec
plt.rcParams.update({
"text.usetex": True,
"font.family": "sans-serif",
"font.sans-serif": ["Helvetica"]})
# ==============================================================================
# MAKE MOVIES
# ==============================================================================
def smooth_limits(vmin, vmax):
from scipy.signal import savgol_filter
vmin = savgol_filter(vmin, 11, 3)
vmax = savgol_filter(vmax, 11, 3)
return vmin, vmax
def get_number_density(ds, c):
x, r = ds.get("rho-%s"%c["component"])
x, m = ds.get("mass-%s"%c["component"], grid='node')
return {"x":x[0], "y":x[1], "value":r/m}
def get_D_mag(ds, c):
x, Dx = ds.get("x_D-field")
x, Dy = ds.get("y_D-field")
return {"x":x[0], "y":x[1], "value":np.sqrt(Dx**2 + Dy**2)}
def get_Bz(ds, c):
x, Bz = ds.get("z_B-field")
return {"x":x[0], "y":x[1], "value":Bz}
def plot(frame, data, output_name):
xn = data["nd-ion"]["x"][()]
yn = data["nd-ion"]["y"][()]
ni = data["nd-ion"]["value"][()]
ni_min = frame["nd-ion"]["min"]
ni_max = frame["nd-ion"]["max"]
ne = data["nd-electron"]["value"][()]
ne_min = frame["nd-electron"]["min"]
ne_max = frame["nd-electron"]["max"]
D = data["D"]["value"][()]
D_min = frame["D"]["min"]
D_max = frame["D"]["max"]
B = data["B"]["value"][()]
B_min = frame["B"]["min"]
B_max = frame["B"]["max"]
x = np.concatenate((-xn[::-1][0:-1], xn))
y = np.concatenate((-yn[::-1][0:-1], yn))
y, x = np.meshgrid(y, x)
axes = []
# join the data
nx = xn.size - 1
ny = yn.size - 1
fig = plt.figure(figsize=(3,3))
gs = gridspec.GridSpec(ncols=1, nrows=1, hspace=0.01, wspace=0.01)
ax = fig.add_subplot(gs[0,0]); axes.append(ax)
# number densities
J = np.zeros((2*nx, 2*ny))*np.nan
# J[0:nx, 0:ny] = np.rot90(ne.T,2)
J[0:nx, ny::] = np.rot90(ne)
# J[nx::, 0:ny] = np.rot90(ni.T,3)
J[nx::, ny::] = ni
vmin = min(ne_min, ni_min)
vmax = max(ne_max, ni_max)
pcm = ax.pcolormesh(x, y, J, vmin=vmin, vmax=vmax)
ax.text(0.025, 0.975, r'$n_e$', horizontalalignment='left',
verticalalignment='top', transform=ax.transAxes, fontsize=10)
ax.text(0.975, 0.975, r'$n_i$', horizontalalignment='right',
verticalalignment='top', transform=ax.transAxes, fontsize=10)
# fields
J = np.zeros((2*nx, 2*ny))*np.nan
J[0:nx, 0:ny] = np.rot90(D.T,2)
pcm = ax.pcolormesh(x, y, J, vmin=D_min, vmax=D_max)
J = np.zeros((2*nx, 2*ny))*np.nan
J[nx::, 0:ny] = np.rot90(B.T,3)
big = max(abs(B_max), abs(B_min))
pcm = ax.pcolormesh(x, y, J, vmin=-big, vmax=big, cmap="bwr")
ax.text(0.025, 0.025, r'$\left|\vec{D}\right|$', horizontalalignment='left',
verticalalignment='bottom', transform=ax.transAxes, fontsize=10)
ax.text(0.975, 0.025, r'$B_z$', horizontalalignment='right',
verticalalignment='bottom', transform=ax.transAxes, fontsize=10)
for ax in axes:
ax.set_xlim(-2, 2)
ax.set_ylim(-2, 2)
ax.set_aspect(1)
ax.axes.xaxis.set_visible(False)
ax.axes.yaxis.set_visible(False)
# fig.tight_layout()
fig.savefig(output_name, dpi=300, bbox_inches="tight")
plt.close(fig)
return
if 1:
Q = []
q = {}
q["files_dir"] = "."
q["level"] = -1
q["get"] = [
{"func":get_number_density, "tag":"nd-ion", "component":"ion"},
{"func":get_number_density, "tag":"nd-electron", "component":"electron"},
{"func":get_D_mag, "tag":"D"},
{"func":get_Bz, "tag":"B"}
]
q["plot"] = plot
q["name"] = "movie"
dt = 0.005
##
q["framerate"] = 20
q["mov_save"] = q["files_dir"] + "/mov"
q["offset"] = [0.0, 0.0]
q["xy_limits"] = [[0,0], [4,4]]
q["file_include"] = ["TRMI.plt"]
q["file_exclude"] = []
q["cores"] = 11
q["time_span"] = [] #np.arange(1.95,2+dt, dt)
q["force_data"] = False
q["force_frames"] = True
q["only_frames"] = False
q["redo_streaks"] = False
q["dpi"] = 300
q["normalize"] = "none" #{"smooth":smooth_limits}
Q.append(q)
make_all(Q)
print("DONE")
| 2.125 | 2 |
services/aws/support.py | shunyeka/autobotAI-backend | 0 | 12791875 | from autobot_helpers import boto3_helper, context_helper
from botocore.exceptions import ClientError
import traceback
class Support:
def __init__(self):
self.client = boto3_helper.get_client('support')
def refresh_checks(self):
try:
ta_checks = self.client.describe_trusted_advisor_checks(language='en')
for checks in ta_checks['checks']:
try:
self.client.refresh_trusted_advisor_check(checkId=checks['id'])
except ClientError as e:
print('Cannot refresh check: ' + checks['name'])
print("Not able to refresh the trusted adviser check: " + traceback.format_exc() +
": Check name:" +checks['name'])
continue
return {'success': True}
except BaseException as e:
err_str = traceback.format_exc()
context_helper.logger().exception("Some exception occurred while refreshing checks=%s", err_str)
return {'success': False, 'error_code': 'EXCEPTION', 'message': err_str}
def generate_report(self):
try:
ta_checks = self.client.describe_trusted_advisor_checks(language='en')
check_summary_list = {}
for checks in ta_checks['checks']:
try:
check_summary = self.client.describe_trusted_advisor_check_summaries(
checkIds=[checks['id']])['summaries'][0]
if check_summary['status'] != 'not_available':
if checks['category'] not in check_summary_list:
check_summary_list[checks['category']] = []
check_summary_list[checks['category']].append({
'name': checks['name'],
'status': check_summary['status'],
'resourcesProcessed': str(check_summary['resourcesSummary']['resourcesProcessed']),
'resourcesFlagged': str(check_summary['resourcesSummary']['resourcesFlagged']),
'resourcesSuppressed': str(check_summary['resourcesSummary']['resourcesSuppressed']),
'resourcesIgnored': str(check_summary['resourcesSummary']['resourcesIgnored']),
})
except BaseException as e:
print('Failed to get check: ' + checks['id'] + ' --- ' + checks['name'])
traceback.print_exc()
continue
for k1, v1 in check_summary_list.items():
if isinstance(v1, (dict, list)) and len(v1) != 0:
for dict_val_v1 in v1:
if dict_val_v1['status'] == 'error':
v1[v1.index(dict_val_v1)] = (dict_val_v1, 1)
elif dict_val_v1['status'] == 'warning':
v1[v1.index(dict_val_v1)] = (dict_val_v1, 2)
elif dict_val_v1['status'] == 'ok':
v1[v1.index(dict_val_v1)] = (dict_val_v1, 3)
else:
v1[v1.index(dict_val_v1)] = (dict_val_v1, 4)
v1.sort(key=lambda x: x[1])
return {'success': True, 'response': check_summary_list}
except BaseException as e:
err_str = traceback.format_exc()
context_helper.logger().exception("Some exception occurred while generating report=%s", err_str)
if 'SubscriptionRequiredException' in err_str:
return {'success': False, 'error_code': 'NO_PREMIUM_SUBSCRIPTION',
'message': "AWS Premium Support Subscription is required to generate this report."}
return {'success': False, 'error_code': 'EXCEPTION', 'message': err_str}
| 1.976563 | 2 |
kiteGetAccessToken.py | vishalr4202/ZerodhaCode | 0 | 12791876 | import logging
import kitesettings
from kiteconnect import KiteConnect
logging.basicConfig(level=logging.DEBUG)
kite = KiteConnect(kitesettings.API_KEY)
# https://kite.zerodha.com/connect/login?v=4&API_KEY=Q8JPzjkt8ftXgqvmXa
request_token = input("Request Token: ")
data = kite.generate_session(request_token, kitesettings.api_secret)
kite.set_access_token(data["access_token"])
print("====================")
print("Access Token: ", data["access_token"])
| 2.015625 | 2 |
balsa/formatter.py | jamesabel/balsa | 4 | 12791877 | from typing import Union
from datetime import datetime
from logging import Formatter, LogRecord
class BalsaFormatter(Formatter):
"""
Format time in ISO 8601
"""
def formatTime(self, record: LogRecord, datefmt: Union[str, None] = None) -> str:
assert datefmt is None # static format
time_stamp = datetime.fromtimestamp(record.created)
return time_stamp.astimezone().isoformat()
| 3.296875 | 3 |
last_file.py | CarlosPetrikov/random_python_functions | 1 | 12791878 | <reponame>CarlosPetrikov/random_python_functions
import os
from datetime import datetime, timezone
# Function that will return the most recent file from a directory, filtering by extension
def last_file(path, extension):
directory = os.scandir(path)
dict_file = {}
for file in directory:
if file.name[-3:] in (extension.lower(), extension.upper()):
dict_file[datetime.fromtimestamp(file.stat().st_mtime, tz=timezone.utc)] = file.name
time_file = sorted(dict_file)
return dict_file[time_file[-1]]
#Example
a = last_file('C:/Users/user/Downloads', 'exe')
print(a)
| 3.609375 | 4 |
LeetCode/03_Hard/lc_460.py | Zubieta/CPP | 8 | 12791879 | <reponame>Zubieta/CPP<gh_stars>1-10
# 460 - LFU Cache (Hard)
# https://leetcode.com/problems/lfu-cache/
# Implement a Least Frequently Used cache. GODDAMN I almost died.
from collections import OrderedDict, defaultdict
class LFUCache(object):
def __init__(self, capacity):
"""
:type capacity: int
"""
# From key to value.
self.dic = {}
# Times a key has been used.
self.count = {}
# Keys grouped by amount of usage.
# e.g. from a key 2 (as in two times used), get the keys that have been
# used that much times.
self.reverse = defaultdict(lambda: OrderedDict())
# Capacity of the LFU.
self.cap = capacity
def get(self, key):
"""
:type key: int
:rtype: int
"""
# If the key exists. Make sure to put "is not None" otherwise a 0 Value
# will make the condition evaluate to False.
if self.dic.get(key) is not None:
# Update the amount of times key has been used.
prevCount = self.count[key]
newCount = prevCount + 1
self.count[key] = newCount
# Delete the key from the previous grouping of times used.
del self.reverse[prevCount][key]
# If that grouping is now empty, erase it too.
if len(self.reverse[prevCount]) == 0:
del self.reverse[prevCount]
# Insert key into the new grouping of times used.
self.reverse[newCount][key] = True
# Return the value associated to this key.
return self.dic[key]
# If the key doesn't exists, just return -1.
else:
return -1
def set(self, key, value):
"""
:type key: int
:type value: int
:rtype: void
"""
# Check that the value exists, so that it will be updated.
if self.dic.get(key) is not None:
# Times used previously.
prevCount = self.count[key]
# New amount of times used.
newCount = prevCount + 1
# Set the new amount.
self.count[key] = newCount
# Delete the key from the previous grouping of times used.
del self.reverse[prevCount][key]
# If that grouping is now empty, erase it too.
if len(self.reverse[prevCount]) == 0:
del self.reverse[prevCount]
# Insert key into the new grouping of times used.
self.reverse[newCount][key] = True
# Now update the value associated to this key.
self.dic[key] = value
# If the value doesn't exists...
else:
# If capacity will be exceeded, erase the currently least used one.
if len(self.dic) == self.cap and len(self.reverse) > 0:
# Because the "reverse" (from count to keys) dict groups keys
# by accessed amount, lets get the least amount of uses.
leastAmount = sorted(self.reverse.keys())[0]
# Now, because this is an OrderedDict, lets get the least freq
# used key by accessing with the leastAmount of uses value.
leastKey = (self.reverse[leastAmount].keys())[0]
# Delete that number from the grouping of keys used that times.
del self.reverse[leastAmount][leastKey]
# If there are no more keys for this count, delete the count.
if len(self.reverse[leastAmount]) == 0:
del self.reverse[leastAmount]
# Delete the individual amount of uses for the LFU key.
del self.count[leastKey]
# Delete the LFU key and its value.
del self.dic[leastKey]
# Now, insert the new key, with a single usage (the insertion).
if len(self.dic) + 1 <= self.cap:
self.dic[key] = value
self.count[key] = 1
self.reverse[1][key] = True
# Your LFUCache object will be instantiated and called as such:
# obj = LFUCache(capacity)
# param_1 = obj.get(key)
# obj.set(key,value) | 3.359375 | 3 |
src/utils/prepare2.py | comword/TCD20-DP-DeepModel | 0 | 12791880 | <filename>src/utils/prepare2.py
import argparse
import json
from pathlib import Path
from glob import glob
class DatasetPrepare:
def __init__(self, config: argparse.Namespace):
self.config = config
def run(self, input_dir: str):
in_path = Path(input_dir)
if not in_path.exists():
raise ValueError("Input directory does not exist: " + input_dir)
if not (in_path / "types.json"):
raise ValueError(
"Not a valid dataset folder, cannot find types.json in the input directory")
with open(str(in_path / "types.json"), "r") as f:
self.cls_types = json.load(f)
for folder, cls_num in self.cls_types.items():
if folder.startswith("Unused"):
print("Skipping", folder)
continue
if not (in_path / folder).exists():
raise ValueError(
"Not a valid dataset folder, cannot find category directory", folder)
else:
print(str(in_path / folder), "mapping to class", cls_num)
files = glob(str(in_path / "**/*.mp4"), recursive=True)
with open(str(in_path / "all.csv"), "w") as f:
for video in files:
v_split = str(video).split('/')
label = self.cls_types[v_split[-2]]
f.write("%s %d\n" % ("/".join(v_split[-2:]), label))
if __name__ == '__main__':
args = argparse.ArgumentParser(description='Video prepare')
args.add_argument('-i', '--input', default=None, type=str, required=True,
help='input directory path')
args_parsed = args.parse_args()
prepare = DatasetPrepare(args_parsed)
prepare.run(args_parsed.input)
# python src/utils/prepare2.py -i data/orig-front-videos
| 2.875 | 3 |
vm_manager/constants.py | NeCTAR-RC/bumblebee | 3 | 12791881 | from novaclient.v2 import servers as nova_servers
LINUX = "linux"
SCRIPT_ERROR = 0
SCRIPT_OKAY = 1
ERROR = -1
# These are Openstack Nova server status values that the
# python client library doesn't define constants for.
ACTIVE = "ACTIVE"
BUILD = "BUILD"
REBOOT = "REBOOT"
REBUILD = "REBUILD"
RESCUE = "RESCUE"
RESIZE = "RESIZE"
SHUTDOWN = "SHUTOFF"
VERIFY_RESIZE = "VERIFY_RESIZE"
# (There are more ...)
NO_VM = VM_DELETED = "No_VM"
VM_WAITING = VM_CREATING = VM_RESIZING = "VM_Waiting"
VM_OKAY = "VM_Okay"
VM_SUPERSIZED = "VM_Supersized"
VM_SHELVED = "VM_Shelved"
VM_ERROR = "VM_Error"
VM_MISSING = "VM_Missing"
VM_SHUTDOWN = "VM_Shutdown"
ALL_VM_STATES = frozenset([NO_VM, VM_WAITING, VM_OKAY, VM_SUPERSIZED,
VM_SHELVED, VM_ERROR, VM_MISSING, VM_SHUTDOWN])
REBOOT_SOFT = nova_servers.REBOOT_SOFT
REBOOT_HARD = nova_servers.REBOOT_HARD
LAUNCH_WAIT_SECONDS = 300
REBOOT_WAIT_SECONDS = 180
REBOOT_CONFIRM_WAIT_SECONDS = 10
REBOOT_CONFIRM_RETRIES = 5
REBOOT_COMPLETE_SECONDS = 60
RESIZE_WAIT_SECONDS = 120
RESIZE_CONFIRM_WAIT_SECONDS = 240
SHELVE_WAIT_SECONDS = 180
CLOUD_INIT_FINISHED = "finished"
CLOUD_INIT_STARTED = "started"
DELETION_RETRY = 5
DELETION_TIMEOUT = 30
INSTANCE_DELETION_RETRY_WAIT_TIME = 30
INSTANCE_DELETION_RETRY_COUNT = INSTANCE_CHECK_SHUTOFF_RETRY_COUNT = 2
INSTANCE_CHECK_SHUTOFF_RETRY_WAIT_TIME = 10
VOLUME_CREATION_TIMEOUT = 120
INSTANCE_LAUNCH_TIMEOUT = 120
DOWNSIZE_PERIOD = 7 # Number of days before downsizing.
REBOOT_BUTTON = "REBOOT_BUTTON"
SHELVE_BUTTON = "SHELVE_BUTTON"
DELETE_BUTTON = "DELETE_BUTTON"
BOOST_BUTTON = "BOOST_BUTTON"
DOWNSIZE_BUTTON = "DOWNSIZE_BUTTON"
| 2.203125 | 2 |
renthouse/apps/users/basic_tools.py | huifeng-kooboo/RentHouseSite | 2 | 12791882 | '''
@description: basic function for common
@return : return value is all by jsondata
'''
#-*- coding:utf-8 -*-
import json
from django.contrib.auth.hashers import make_password,check_password
from django.contrib.auth import authenticate
def checkUserLoginInfo(username,password):
'''
@brief: basic function to check password and username
:param username: 用户名
:param password: 密码
:return: jsondata that whether has password or user problem
if OK == 1 : means checkResult ok
'''
dict_Result = {}
dict_Result['OK'] = 0
if len(username) < 6 :
str_error_type = "输入用户名过短,请重新输入"
dict_Result['error'] = str_error_type
return json.dumps(dict_Result,ensure_ascii=False,sort_keys=True)
if len(password) < 6 :
str_error_type = "输入密码过短,请重新输入"
dict_Result['error'] = str_error_type
return json.dumps(dict_Result,ensure_ascii=False,sort_keys=True,)
dict_Result['OK'] = 1
return json.dumps(dict_Result)
''' password method : 加密 or 解密 '''
def generateSecurityPassword(password):
'''
@description: gernerate Security Word:
:param password:
:return: str type that has make it security
'''
security_password = make_password(password)
return security_password
def checkSecurityPassword(password,security_password):
'''
@description: check security password
:param password:
:param security_password:
:return: bool type
'''
b_Result = check_password(password,security_password)
return b_Result | 2.9375 | 3 |
src/Tasks/UnionOfIntervals.py | PaulLafytskyi/Hackerrank-Tests | 0 | 12791883 | <reponame>PaulLafytskyi/Hackerrank-Tests<gh_stars>0
if __name__ == '__main__':
start = [5, 10]
end = [3, 12]
#if start[- 1] >
| 1.875 | 2 |
src/etools_validator/utils.py | unicef/etools-validator | 0 | 12791884 | <filename>src/etools_validator/utils.py
from django.contrib.contenttypes.fields import GenericForeignKey
from django.db.models import Model, ObjectDoesNotExist
from django.db.models.fields.files import FieldFile
from itertools import chain
def get_all_field_names(model):
'''Return a list of all field names that are possible for
this model (including reverse relation names).
Any internal-only field names are not included.
Replacement for MyModel._meta.get_all_field_names()
which does not exist under Django 1.10.
https://github.com/django/django/blob/stable/1.7.x/django/db/models/options.py#L422
https://docs.djangoproject.com/en/1.10/ref/models/meta/#migrating-from-the-old-api
'''
return list(set(chain.from_iterable(
(field.name, field.attname) if hasattr(field, 'attname') else (field.name, )
for field in model._meta.get_fields()
if not (field.many_to_one and field.related_model is None) and not isinstance(field, GenericForeignKey)
)))
def check_editable_fields(obj, fields):
if not getattr(obj, 'old_instance', None):
return False, fields
for field in fields:
old_instance = obj.old_instance
if getattr(obj, field) != getattr(old_instance, field):
return False, field
return True, None
def check_required_fields(obj, fields):
error_fields = []
for f_name in fields:
try:
field = getattr(obj, f_name)
except ObjectDoesNotExist:
return False, f_name
try:
response = field.filter().count() > 0
except AttributeError:
if isinstance(field, FieldFile):
response = getattr(field, 'name', None) or False
else:
response = field is not None
if response is False:
error_fields.append(f_name)
if error_fields:
return False, error_fields
return True, None
def field_comparison(f1, f2):
if isinstance(f1, FieldFile):
new_file = getattr(f1, 'name', None)
old_file = getattr(f2, 'name', None)
if new_file != old_file:
return False
elif f1 != f2:
return False
return True
def check_rigid_model_instance(old_obj, new_obj):
if not isinstance(old_obj, Model) or not isinstance(new_obj, Model):
# one of instances can be None, in this case we don't need to check all fields
return old_obj == new_obj
field_names = get_all_field_names(old_obj)
for field in field_names:
try:
new_value = getattr(old_obj, field, None)
except ObjectDoesNotExist:
new_value = None
try:
old_value = getattr(new_obj, field, None)
except ObjectDoesNotExist:
old_value = None
if not field_comparison(new_value, old_value):
return False
# there is no check for instance class so we don't go deeper than 1 level
return True
def check_rigid_related(obj, related):
current_related = list(getattr(obj, related).filter())
old_related = getattr(obj.old_instance, '{}_old'.format(related), None)
if old_related is None:
# if old related was not set as an attribute on the object, assuming no changes
return True
if len(current_related) != len(old_related):
return False
if len(current_related) == 0:
return True
current_related.sort(key=lambda x: x.id)
old_related.sort(key=lambda x: x.id)
comparison_map = zip(current_related, old_related)
# check if any field on the related model was changed
for old_obj, new_obj in comparison_map:
if not check_rigid_model_instance(old_obj, new_obj):
return False
return True
def check_rigid_fields(obj, fields, old_instance=None, related=False):
if not old_instance and not getattr(obj, 'old_instance', None):
# since no old version of the object was passed in, we assume there were no changes
return True, None
for f_name in fields:
old_instance = old_instance or obj.old_instance
try:
new_field = getattr(obj, f_name, None)
except ObjectDoesNotExist:
new_field = None
try:
old_field = getattr(old_instance, f_name, None)
except ObjectDoesNotExist:
# in case it's OneToOne related field
old_field = None
if hasattr(new_field, 'all'):
# this could be a related field, unfortunately i can't figure out a isinstance check
if related:
if not check_rigid_related(obj, f_name):
return False, f_name
elif isinstance(old_field, Model) or isinstance(new_field, Model):
if not check_rigid_model_instance(old_field, new_field):
return False, f_name
elif not field_comparison(new_field, old_field):
return False, f_name
return True, None
def update_object(obj, kwdict):
for k, v in kwdict.items():
if isinstance(v, list):
getattr(obj, k).set(v)
else:
setattr(obj, k, v)
| 2.234375 | 2 |
scripts/detokenize.py | mishu45/lang2sign | 3 | 12791885 | #!python
# pylint: disable=redefined-outer-name,unexpected-keyword-arg
"""Script to detokenize text file"""
from lang2sign.lang2gloss.tokenizers.en_asl import EnAslTokenizer
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description="Detokenize text files"
)
parser.add_argument(
"--input-file",
dest="input_file",
type=str,
help="filepath of text to be detokenized"
)
args = parser.parse_args()
output_filepath = args.input_file + ".detok"
tokenizer = EnAslTokenizer()
print(
"Writing detokenized file to {}".format(
output_filepath
)
)
tokenizer.write_detokenized_file(
args.input_file,
output_filepath
)
| 3.078125 | 3 |
web/web.py | BAFurtado/PolicySpace2 | 10 | 12791886 | import os
from flask import Blueprint, render_template, redirect, url_for, send_from_directory
import conf
from . import manager
from .forms import SimulationForm
bp = Blueprint('web', __name__)
@bp.route('/')
def index():
return render_template('status.html')
@bp.route('/start', methods=['GET', 'POST'])
def start():
form = SimulationForm()
if form.validate_on_submit():
manager.start(**form.data)
return redirect(url_for('web.index'))
return render_template('start.html', form=form)
@bp.route('/results')
def runs():
# sort by datetime, most recent first
ids = os.listdir(conf.RUN['OUTPUT_PATH'])
ids = sorted(ids, key=lambda d: d.split('__')[-1], reverse=True)
return render_template('runs.html', runs=ids)
@bp.route('/results/<string:id>')
def results(id):
# Currently just showing top-level plots
path = os.path.join(conf.RUN['OUTPUT_PATH'], id)
plots = os.path.join(path, 'plots')
try:
plots = [os.path.join('/output', id, 'plots', p) for p in os.listdir(plots)]
except FileNotFoundError:
plots = []
return render_template('results.html', id=id, plots=plots)
@bp.route('/output/<path:filename>')
def output(filename):
"""serve simulation result files from the output path"""
return send_from_directory(conf.RUN['OUTPUT_PATH'], filename)
| 2.28125 | 2 |
code/model/common.py | lewisyangliu/LDP | 3 | 12791887 | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
def default_conv(in_channels, out_channels, kernel_size, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), bias=bias)
class MeanShift(nn.Conv2d):
def __init__(self, rgb_range, rgb_mean, rgb_std, sign=-1):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = torch.eye(3).view(3, 3, 1, 1)
self.weight.data.div_(std.view(3, 1, 1, 1))
self.bias.data = sign * rgb_range * torch.Tensor(rgb_mean)
self.bias.data.div_(std)
self.weight.requires_grad = False
self.bias.requires_grad = False
class BasicBlock(nn.Sequential):
def __init__(
self, in_channels, out_channels, kernel_size, stride=1, bias=False,
bn=True, act=nn.ReLU(True)):
m = [nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), stride=stride, bias=bias)
]
if bn: m.append(nn.BatchNorm2d(out_channels))
if act is not None: m.append(act)
super(BasicBlock, self).__init__(*m)
| 2.5625 | 3 |
lab2.5/individual.py | etozhekimm/lab2 | 0 | 12791888 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
if __name__ == '__main__':
tpl = tuple(map(float, input().split()))
if not tpl:
print("Заданный кортеж пуст", file=sys.stderr)
exit(1)
if tuple(sorted(tpl, reverse=True)) == tpl:
print("Команды перечислены в соответствии с занятыми местами")
else:
print("Команды перечислены не в соответствии с занятыми местами") | 3.40625 | 3 |
getchapp/migrations/0003_post_tag.py | gem763/getch | 0 | 12791889 | <filename>getchapp/migrations/0003_post_tag.py
# Generated by Django 2.2.7 on 2020-01-08 09:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('getchapp', '0002_brand_item'),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('channel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='getchapp.Channel')),
],
options={
'abstract': False,
},
bases=('getchapp.channel',),
),
migrations.CreateModel(
name='Tag',
fields=[
('channel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='getchapp.Channel')),
('x', models.FloatField(default=0)),
('y', models.FloatField(default=0)),
('target', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='getchapp.Pix')),
('with_brand', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='getchapp.Brand')),
('with_item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='getchapp.Item')),
],
options={
'abstract': False,
},
bases=('getchapp.channel',),
),
]
| 1.875 | 2 |
test/pyaz/sql/dw/__init__.py | bigdatamoore/py-az-cli | 0 | 12791890 | <filename>test/pyaz/sql/dw/__init__.py
import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def create(__CATALOG_COLLATION=None, collation=None, __ELASTIC_POOL_ID=None, __LICENSE_TYPE=None, max_size=None, service_objective=None, __RESTORE_POINT_IN_TIME=None, __SAMPLE_NAME=None, __SKU=None, __SOURCE_DATABASE_DELETION_DATE=None, tags=None, zone_redundant=None, __AUTO_PAUSE_DELAY=None, __MIN_CAPACITY=None, __COMPUTE_MODEL=None, __READ_SCALE=None, __HIGH_AVAILABILITY_REPLICA_COUNT=None, backup_storage_redundancy=None, __MAINTENANCE_CONFIGURATION_ID=None, __IS_LEDGER_ON=None, __CAPACITY=None, __FAMILY=None, __TIER=None, name, server, resource_group, no_wait=None):
params = get_params(locals())
command = "az sql dw create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, server, name):
params = get_params(locals())
command = "az sql dw show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(server, resource_group):
params = get_params(locals())
command = "az sql dw list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(resource_group, server, name, yes=None, no_wait=None):
params = get_params(locals())
command = "az sql dw delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def pause(name, server, resource_group):
params = get_params(locals())
command = "az sql dw pause " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def resume(name, server, resource_group):
params = get_params(locals())
command = "az sql dw resume " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(resource_group, server, name, max_size=None, service_objective=None, set=None, add=None, remove=None, force_string=None, no_wait=None):
params = get_params(locals())
command = "az sql dw update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 1.992188 | 2 |
advanced_databases/lab2/populate.py | piotrgiedziun/university | 0 | 12791891 | #!/usr/bin/python
import MySQLdb
import random
from datetime import datetime as dt, timedelta
# MySQL format
DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="sakila")
cur = db.cursor()
print "connected"
# truncate old data
cur.execute("SET FOREIGN_KEY_CHECKS = 0;");
cur.execute("TRUNCATE `ticket`;");
cur.execute("TRUNCATE `seat`;");
cur.execute("TRUNCATE `show`;");
cur.execute("TRUNCATE `cinema`;");
cur.execute("TRUNCATE `theater`;");
cur.execute("SET FOREIGN_KEY_CHECKS = 1;");
print "cleaned"
# create cinema
cur.execute("INSERT INTO `cinema` (name, address) VALUES ('cinema', 'wroclaw');")
seat_id = 0
for theater_id in range(1, 1001):
#is_3D = random.randint(0,1)
is_3D = 1
query = "INSERT INTO `theater` (theater_id, name, is_3D, cinema_cinema_id) VALUES ('%d', 'theater%d', '%d', '1');" % (theater_id, theater_id, is_3D,)
#print query
cur.execute(query)
theater_real_id = db.insert_id()
# create seats for theater
for seat_col in range(0, 10):
for seat_row in range(0, 10):
price = random.randint(18,25)
query = "INSERT INTO `seat` (row, col, price, theater_theater_id) VALUES (%d, %d, %d, %d);" % (seat_row, seat_col, price, theater_real_id)
#print ">", query
cur.execute(query)
# create shows
now = dt.now() + timedelta(days=1)
for show_id in range(0, 1):
film_id = random.randint(1,999)
now += timedelta(minutes=185);
query = "INSERT INTO `show` (start_date, theater_theater_id, film_film_id) VALUES ('%s', %d, %d);" % (now.strftime(DATE_FORMAT), theater_real_id, film_id)
#print ">", query
cur.execute(query)
show_real_id = db.insert_id()
# craete ticket
for seat_col in range(0, 10):
for seat_row in range(0, 10):
price = random.randint(18,25)
# get seat_id
seat_id += 1
query = "INSERT INTO `ticket` (price, seat_seat_id, show_show_id, cinema_cinema_id, theater_id) VALUES (%d, %d, %d, 1, %d);" % (price, seat_id, show_real_id, theater_real_id)
#print ">", query
cur.execute(query)
db.commit() | 2.8125 | 3 |
Classes/Magnet.py | PMSMcqut/pyleecan-of-manatee | 2 | 12791892 | # -*- coding: utf-8 -*-
"""Warning : this file has been generated, you shouldn't edit it"""
from os import linesep
from pyleecan.Classes.check import check_init_dict, check_var
from pyleecan.Functions.save import save
from pyleecan.Classes.frozen import FrozenClass
from pyleecan.Methods.Machine.Magnet.comp_angle_opening import comp_angle_opening
from pyleecan.Methods.Machine.Magnet.comp_height import comp_height
from pyleecan.Methods.Machine.Magnet.comp_mass import comp_mass
from pyleecan.Methods.Machine.Magnet.comp_ratio_opening import comp_ratio_opening
from pyleecan.Methods.Machine.Magnet.comp_surface import comp_surface
from pyleecan.Methods.Machine.Magnet.comp_volume import comp_volume
from pyleecan.Methods.Machine.Magnet.is_outwards import is_outwards
from pyleecan.Methods.Machine.Magnet.plot import plot
from pyleecan.Classes.check import InitUnKnowClassError
from pyleecan.Classes.Material import Material
class Magnet(FrozenClass):
VERSION = 1
# cf Methods.Machine.Magnet.comp_angle_opening
comp_angle_opening = comp_angle_opening
# cf Methods.Machine.Magnet.comp_height
comp_height = comp_height
# cf Methods.Machine.Magnet.comp_mass
comp_mass = comp_mass
# cf Methods.Machine.Magnet.comp_ratio_opening
comp_ratio_opening = comp_ratio_opening
# cf Methods.Machine.Magnet.comp_surface
comp_surface = comp_surface
# cf Methods.Machine.Magnet.comp_volume
comp_volume = comp_volume
# cf Methods.Machine.Magnet.is_outwards
is_outwards = is_outwards
# cf Methods.Machine.Magnet.plot
plot = plot
# save method is available in all object
save = save
def __init__(self, mat_type=-1, type_magnetization=0, Lmag=0.95, init_dict=None):
"""Constructor of the class. Can be use in two ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for Matrix, None will initialise the property with an empty Matrix
for pyleecan type, None will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary wiht every properties as keys
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object"""
if mat_type == -1:
mat_type = Material()
if init_dict is not None: # Initialisation by dict
check_init_dict(init_dict, ["mat_type", "type_magnetization", "Lmag"])
# Overwrite default value with init_dict content
if "mat_type" in list(init_dict.keys()):
mat_type = init_dict["mat_type"]
if "type_magnetization" in list(init_dict.keys()):
type_magnetization = init_dict["type_magnetization"]
if "Lmag" in list(init_dict.keys()):
Lmag = init_dict["Lmag"]
# Initialisation by argument
self.parent = None
# mat_type can be None, a Material object or a dict
if isinstance(mat_type, dict):
self.mat_type = Material(init_dict=mat_type)
else:
self.mat_type = mat_type
self.type_magnetization = type_magnetization
self.Lmag = Lmag
# The class is frozen, for now it's impossible to add new properties
self._freeze()
def __str__(self):
"""Convert this objet in a readeable string (for print)"""
Magnet_str = ""
if self.parent is None:
Magnet_str += "parent = None " + linesep
else:
Magnet_str += "parent = " + str(type(self.parent)) + " object" + linesep
Magnet_str += "mat_type = " + str(self.mat_type.as_dict()) + linesep + linesep
Magnet_str += "type_magnetization = " + str(self.type_magnetization) + linesep
Magnet_str += "Lmag = " + str(self.Lmag)
return Magnet_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
if other.mat_type != self.mat_type:
return False
if other.type_magnetization != self.type_magnetization:
return False
if other.Lmag != self.Lmag:
return False
return True
def as_dict(self):
"""Convert this objet in a json seriable dict (can be use in __init__)
"""
Magnet_dict = dict()
if self.mat_type is None:
Magnet_dict["mat_type"] = None
else:
Magnet_dict["mat_type"] = self.mat_type.as_dict()
Magnet_dict["type_magnetization"] = self.type_magnetization
Magnet_dict["Lmag"] = self.Lmag
# The class name is added to the dict fordeserialisation purpose
Magnet_dict["__class__"] = "Magnet"
return Magnet_dict
def _set_None(self):
"""Set all the properties to None (except pyleecan object)"""
if self.mat_type is not None:
self.mat_type._set_None()
self.type_magnetization = None
self.Lmag = None
def _get_mat_type(self):
"""getter of mat_type"""
return self._mat_type
def _set_mat_type(self, value):
"""setter of mat_type"""
check_var("mat_type", value, "Material")
self._mat_type = value
if self._mat_type is not None:
self._mat_type.parent = self
# The Magnet material
# Type : Material
mat_type = property(
fget=_get_mat_type, fset=_set_mat_type, doc=u"""The Magnet material"""
)
def _get_type_magnetization(self):
"""getter of type_magnetization"""
return self._type_magnetization
def _set_type_magnetization(self, value):
"""setter of type_magnetization"""
check_var("type_magnetization", value, "int", Vmin=0, Vmax=5)
self._type_magnetization = value
# Permanent magnet magnetization type: 0 for radial, 1 for parallel, 2 for HallBach []
# Type : int, min = 0, max = 5
type_magnetization = property(
fget=_get_type_magnetization,
fset=_set_type_magnetization,
doc=u"""Permanent magnet magnetization type: 0 for radial, 1 for parallel, 2 for HallBach []""",
)
def _get_Lmag(self):
"""getter of Lmag"""
return self._Lmag
def _set_Lmag(self, value):
"""setter of Lmag"""
check_var("Lmag", value, "float", Vmin=0)
self._Lmag = value
# Magnet axial length
# Type : float, min = 0
Lmag = property(fget=_get_Lmag, fset=_set_Lmag, doc=u"""Magnet axial length""")
| 1.867188 | 2 |
tests/emmet-builders/test_electronic_structure.py | acrutt/emmet | 19 | 12791893 | from pathlib import Path
import pytest
from maggma.stores import JSONStore, MemoryStore
from monty.serialization import dumpfn, loadfn
from emmet.builders.materials.electronic_structure import ElectronicStructureBuilder
from emmet.builders.vasp.materials import MaterialsBuilder
@pytest.fixture(scope="session")
def tasks_store(test_dir):
return JSONStore(
test_dir / "electronic_structure/es_task_docs.json.gz", key="task_id"
)
@pytest.fixture(scope="session")
def materials_store(tasks_store):
materials_store = MemoryStore(key="material_id")
builder = MaterialsBuilder(tasks=tasks_store, materials=materials_store)
builder.run()
return materials_store
@pytest.fixture
def electronic_structure_store():
return MemoryStore(key="material_id")
@pytest.fixture
def bandstructure_fs(test_dir):
return JSONStore(
test_dir / "electronic_structure/es_bs_objs.json.gz", key="task_id"
)
@pytest.fixture
def dos_fs(test_dir):
return JSONStore(
test_dir / "electronic_structure/es_dos_objs.json.gz", key="task_id"
)
def test_electronic_structure_builder(
tasks_store, materials_store, electronic_structure_store, bandstructure_fs, dos_fs
):
builder = ElectronicStructureBuilder(
tasks=tasks_store,
materials=materials_store,
electronic_structure=electronic_structure_store,
bandstructure_fs=bandstructure_fs,
dos_fs=dos_fs,
)
builder.run()
assert electronic_structure_store.count() == 3
def test_serialization(tmpdir):
builder = ElectronicStructureBuilder(
MemoryStore(), MemoryStore(), MemoryStore(), MemoryStore(), MemoryStore()
)
dumpfn(builder.as_dict(), Path(tmpdir) / "test.json")
loadfn(Path(tmpdir) / "test.json")
| 1.882813 | 2 |
uri/2533.py | AdilsonTorres/programming-problems | 0 | 12791894 | <filename>uri/2533.py<gh_stars>0
while True:
try:
T = int(input())
except EOFError:
break
a, b = 0, 0
for i in range(T):
n, c = map(int, input().split(' '))
a += n * c
b += c * 100
print("{:.4f}".format(a / b)) | 2.6875 | 3 |
src/download.py | n3ssuno/MSA-patents | 0 | 12791895 | #!/usr/bin/env python
"""
Download needed raw data
Author: <NAME>
Copyright (c) 2021 - <NAME>
License: See the LICENSE file.
Date: 2021-02-05
"""
import os
import time
import random
import requests
import tempfile
import sys
import zipfile
import shutil
import tarfile
from tqdm import tqdm
from parse_args import parse_io
def download_url(url, output_dir, file_name):
response = None
try:
response = requests.get(url, stream=True)
except:
print(f'Connection error occurred trying to get URL: {url}',
file=sys.stderr)
if response is None or response.status_code != 200:
print(f'Error {response.status_code}',
f'while downloading file from URL: {url}')
return None
tmp_fd, tmp_fn = tempfile.mkstemp()
total_size_in_bytes = int(response.headers.get('content-length', 0))
with os.fdopen(tmp_fd, 'wb') as f_out, \
tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True) as progress_bar:
if total_size_in_bytes is None:
f_out.write(response.content)
else:
total_size_in_bytes = int(total_size_in_bytes)
block_size = 1024 # 1 KB
for data in response.iter_content(block_size):
progress_bar.update(len(data))
f_out.write(data)
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
print(f'ERROR, something went wrong while downloading {url}')
target = os.path.join(output_dir, file_name)
if target.endswith('.zip') and not zipfile.is_zipfile(tmp_fn):
with zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED) as f_zip:
f_zip.write(tmp_fn)
os.unlink(tmp_fn)
elif any([el.endswith('.tar') for el in url.split('?')]):
shutil.move(tmp_fn, target)
with tarfile.open(target) as f_tar:
f_tar.extractall(output_dir)
os.remove(target)
else:
shutil.move(tmp_fn, target)
return target
def main():
args = parse_io()
source_url = args.input
output_file = args.output
output_dir, file_name = os.path.split(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
download_url(source_url, output_dir, file_name)
time.sleep(random.random()*5)
if __name__ == '__main__':
main()
| 2.796875 | 3 |
Samples/Simple_Demo/Python_Export/Export_forms_separately/child_window.py | Embarcadero/Delphi4PythonExporter | 18 | 12791896 | import os
from delphifmx import *
class Child_Form(Form):
def __init__(self, owner):
self.child_heading = None
self.result_text_heading = None
self.result_text_label = None
self.LoadProps(os.path.join(os.path.dirname(os.path.abspath(__file__)), "child_window.pyfmx")) | 1.96875 | 2 |
pypi_wheel/setup.py | mcellteam/mcell_build | 2 | 12791897 | import setuptools
import platform
import sys
import os
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_DIR = 'mcell/utils/pybind11_test/build/'
if platform.system() == 'Linux':
# TODO: copy mcell library to the current directory
pass
elif platform.system() == 'Darwin':
#
pass
elif 'Windows' in platform.system():
pass
else:
sys.exit("Operating system '" + platform.system() + "' is not supported in this build system yet.")
def get_mcell_version():
# TODO
return '3.99.0'
setuptools.setup(
name='mcell',
version=get_mcell_version(), # todo: set automatically - has to be number
py_modules=['lib/mcell'],
author="Salk Institute for Biologocal Studies",
author_email="<EMAIL>",
description="MCell4",
long_description="MCell4",
long_description_content_type="text/markdown",
url="https://www.mcell.org",
download_url="https://mcell.org/download.html",
python_requires='>=3.8',
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)"
],
zip_safe=True
)
| 1.648438 | 2 |
app/jobs/plex.py | Foxboron/Frank | 7 | 12791898 | #!/usr/bin/env python
import requests
from jobs import AbstractJob
from lxml import etree
class Plex(AbstractJob):
def __init__(self, conf):
self.interval = conf['interval']
self.movies = conf['movies']
self.shows = conf['shows']
self.timeout = conf.get('timeout')
def _parse_movies(self, xml):
tree = etree.fromstring(xml)
movies = []
for movie in tree.xpath('/MediaContainer/Video'):
movies.append({
'title': movie.get('title'),
'year': movie.get('year')
})
return movies
def _parse_shows(self, xml):
tree = etree.fromstring(xml)
shows = []
for show in tree.xpath('/MediaContainer/Video'):
shows.append({
'name': show.get('grandparentTitle'),
'title': show.get('title'),
'episode': show.get('index').zfill(2),
'season': show.get('parentIndex').zfill(2)
})
return shows
def get(self):
try:
r = requests.get(self.movies, timeout=self.timeout)
movies = self._parse_movies(r.content)
r = requests.get(self.shows)
shows = self._parse_shows(r.content)
return {'movies': movies, 'shows': shows}
except requests.exceptions.ConnectionError:
return {}
| 2.640625 | 3 |
.env-cbre/bin/django-admin.py | ThebiggunSeeoil/app-cbre-exxon | 0 | 12791899 | <reponame>ThebiggunSeeoil/app-cbre-exxon
#!/Users/yutthachaithongkumchum/myproject/app-cbre-exxon/app-cbre-exxon/.env-cbre/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| 1.023438 | 1 |
custom/commands/osb_fix.py | M-Spencer-94/configNOW | 3 | 12791900 | <gh_stars>1-10
def run(cfg):
"""OSB Fix to deal with 11.1.1.5+ requirements"""
username=cfg.getProperty('wls.admin.username')
password=cfg.getProperty('wls.admin.password')
admin=cfg.getProperty('osb.as.host')
port=cfg.getProperty('wls.admin.listener.port')
urladmin=admin + ":" + port
connect(username, password, urladmin)
edit()
cd('/AppDeployments/ALSB Cluster Singleton Marker Application')
startEdit()
set('Targets',jarray.array([ObjectName('com.bea:Name=osb_cluster,Type=Cluster')], ObjectName))
cd('/AppDeployments/ALSB Domain Singleton Marker Application')
set('Targets',jarray.array([ObjectName('com.bea:Name=osb_cluster,Type=Cluster')], ObjectName))
activate()
| 1.765625 | 2 |
Project/src/uff/ic/mell/sentimentembedding/statistical_evaluation/statistic_test.py | MeLLL-UFF/tuning_sentiment | 2 | 12791901 | from scipy import stats
import scikit_posthocs as sp
import numpy as np
import pandas as pd
import glob
def friedman_test(dataframe):
return stats.friedmanchisquare(*[row for index, row in dataframe.T.iterrows()])
def nemenyi_test(dataframe):
nemenyi = sp.posthoc_nemenyi_friedman(dataframe)
list_index=[]
for col in nemenyi.columns:
list_index.append([col,list(nemenyi[nemenyi[col]<0.05].index),list(nemenyi[nemenyi[col]<0.05][col].values)])
return pd.DataFrame(list_index)
def read_dataset(dataframe_path):
return pd.read_csv(dataframe_path, skiprows=[0,2], sep=",",decimal='.')
PATH='/Users/sergiojunior/sentiment-embeddings-final/Experiment Results/Experiments Results/'
PATH_OUT='/Users/sergiojunior/sentiment-embeddings-final/Experiment Results/Statistical_Reslts/'
#list_experiment=['Static','Transformers','Fine_tuning','Task_Fine_tuning']#'Static','Transformers','Fine_tuning','Task_Fine_tuning'
list_experiment=['Fine_tuning']#'Static','Transformers','Fine_tuning','Task_Fine_tuning'
list_classifiers = ['MLPClassifier','Random_Forest','SVM','XGboost','Reg_Logistica']
list_metrics = ['accuracy','f1_macro']
list_models=['BERT',"RoBERTa",'BERTweet']
for experiment in list_experiment:
for classifier in list_classifiers:
for metric in list_metrics:
print("{}_{}_{}".format(experiment,classifier,metric))
if experiment=='Static':
print("Static_embedding")
df = read_dataset(glob.glob(PATH+experiment+'/Pivot_tables/pivot_'+classifier+'*'+metric+'*.csv')[0])
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}.csv".format(experiment,
classifier,
metric))
if experiment=="Transformers":
df = read_dataset(glob.glob(PATH+list_models[0]+'/Pivot_tables/pivot_'+classifier+'*'+metric+'*.csv')[0])
for models in list_models[1:]:
print(models)
df = df.merge(read_dataset(glob.glob(PATH+models+'/Pivot_tables/pivot_'+classifier+'*'+metric+'*.csv')[0]),
how='left',
on='Embedding')
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}.csv".format(experiment,
classifier,
metric))
if experiment=='Fine_tuning':
for models in list_models:
print(models)
df = pd.read_csv(glob.glob(PATH +'Fine_tuning_Generic_tweets/'+ models + '-1-LM/pivot_' + classifier + '*'+metric+'*.csv')[0])
for k in ['5','05','10','25','50','250','500','1500','6600']:
df = df.merge(pd.read_csv(glob.glob(PATH +'Fine_tuning_Generic_tweets/'+ models + '-'+k+'-LM/pivot_' + classifier + '*'+metric+'*.csv')[0]),
how='left',
on='Embedding',
suffixes=("","_"+str(k)))
#df_original = pd.read_csv(glob.glob(PATH + models+'/Pivot_tables/pivot_' + classifier + '*'+metric+'*.csv')[0],
# skiprows=[0,2],sep=",",decimal='.')
#df = df.merge(df_original,how='left', on='Embedding')
#df.columns=['Embedding','1','5','05','10','25','50','250','500','1500','6600','original']
df.columns=['Embedding','1','5','05','10','25','50','250','500','1500','6600']
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}_{}.csv".format(models,experiment,
classifier,
metric))
if experiment=='Task_Fine_tuning':
for models in list_models:
print(models)
df=None
df = pd.read_csv(glob.glob(PATH + 'InData/'+models+'-LM/pivot_' + classifier + '*'+metric+'*.csv')[0],sep=",",decimal='.')
df.iloc[:,1] = round(df.iloc[:,1]*100,2)
for k in ['LOO','22Dt']:
df = df.merge(pd.read_csv(glob.glob(PATH + k +'/'+models+'-LM/pivot_' + classifier + '*'+metric+'*.csv')[0],sep=",",decimal='.'),
how='left',
on='Embedding',
suffixes=("","_"+str(k)))
df.columns=['Embedding','InData','LOO','22Dt']
df['22Dt'] = round(df['22Dt']*100,2)
print('friedman_test: ',friedman_test(df.iloc[:,1:]))
nemenyi_test(df.iloc[:,1:]).to_csv(PATH_OUT+"nemenyi_{}_{}_{}_{}.csv".format(models,experiment,
classifier,
metric))
print() | 2.78125 | 3 |
client.py | jorgebg/pysoa-example | 1 | 12791902 | <filename>client.py
from pysoa.client import Client
from settings import SOA_CLIENT_SETTINGS
if __name__ == '__main__':
client = Client({'example': SOA_CLIENT_SETTINGS})
action_response = client.call_action('example', 'square', {'number': 42})
print(action_response)
| 2.3125 | 2 |
lightreid/models/architectures/build.py | nataliamiccini/light-reid | 296 | 12791903 | <gh_stars>100-1000
from lightreid.utils import Registry
ARCHs_REGISTRY = Registry('arch')
| 1.21875 | 1 |
dz/dz-02/src/searches/nelder_mead.py | Yalfoosh/AIPR | 0 | 12791904 | # Copyright 2020 Yalfoosh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from typing import Optional, Tuple, Union
import numpy as np
from . import constants
from .function import Function
def clean_nelder_mead_simplex_search_arguments(
function: Function,
alpha: float,
beta: float,
gamma: float,
sigma: float,
use_jakobovic_expand: bool,
epsilon: float,
max_iterations: int,
verbosity: Optional[str],
decimal_precision: int,
) -> Tuple[Function, float, float, float, float, bool, float, int, int, int]:
"""
Checks the Nelder Mead Simplex Search arguments and returns them prepared for work.
Args:
function (Function): A Function representing the loss function.
alpha (float): A float used in point reflection.
beta (float): A float used in point contraction.
gamma (float): A float used in point expansion.
sigma (float): A float used when moving points to the optimum.
use_jakobovic_expand (bool): A bool determining whether or not to use the
__expand_jakobovic method instead of the __expand method for point expansion.
Defaults to False.
epsilon (float): A float representing the error threshold.
max_iterations (int): An int representing the maximum number of iterations
before the algorithm times out and returns the last found optimum.
verbosity (Optional[str]): A str representing the verbosity of the output during
algorithm execution.
decimal_precision (int): An int representing the number of decimal digits to
round numbers outputted during algorithm execution.
Raises:
TypeError: Raised if argument function is not a Function.
TypeError: Raised if argument alpha is not a float.
TypeError: Raised if argument beta is not a float.
TypeError: Raised if argument gamma is not a float.
TypeError: Raised if argument sigma is not a float.
TypeError: Raised if argument use_jakobovic_expand is not a bool.
TypeError: Raised if argument epsilon is not a float.
ValueError: Raised if argument epsilon is a negative number.
TypeError: Raised if argument max_iterations is not an int.
ValueError: Raised if argument max_iterations is a negative number.
TypeError: Raised if argument verbosity is not a str.
KeyError: Raised if argument verbosity is an invalid key.
TypeError: Raised if argument decimal_precision is not an int.
ValueError: Raised if argument decimal_precision is a negative number.
Returns:
Tuple[Function, float, float, float, float, bool, float, int, int, int]: Cleaned
arguments.
"""
if not isinstance(function, Function):
raise TypeError(
"Expected argument function to be a Function, instead it is "
f"{type(function)}."
)
if isinstance(alpha, int):
alpha = float(alpha)
if not isinstance(alpha, float):
raise TypeError(
"Expected argument alpha to be a float, instead it is " f"{type(alpha)}."
)
if isinstance(beta, int):
beta = float(beta)
if not isinstance(beta, float):
raise TypeError(
"Expected argument beta to be a float, instead it is " f"{type(beta)}."
)
if isinstance(gamma, int):
gamma = float(gamma)
if not isinstance(gamma, float):
raise TypeError(
"Expected argument gamma to be a float, instead it is " f"{type(gamma)}."
)
if isinstance(sigma, int):
sigma = float(sigma)
if not isinstance(sigma, float):
raise TypeError(
"Expected argument sigma to be a float, instead it is " f"{type(sigma)}."
)
if not isinstance(use_jakobovic_expand, bool):
raise TypeError(
"Expected argument use_jakobovic_expand to be a bool, instead it is "
f"{type(use_jakobovic_expand)}."
)
if not isinstance(epsilon, float):
raise TypeError(
"Expected argument epsilon to be a float, instead it is "
f"{type(epsilon)}."
)
if epsilon < 0:
raise ValueError(
"Expected argument epsilon to be a positive float, instead it is "
f"{epsilon}."
)
if not isinstance(max_iterations, int):
raise TypeError(
"Expected argument max_interations to be an int, instead it is "
f"{type(max_iterations)}."
)
if max_iterations < 1:
raise ValueError(
"Expected argument max_interations to be a positive integer, instead it is "
f"{max_iterations}."
)
if verbosity is None:
verbosity = "none"
if not isinstance(verbosity, str):
raise TypeError(
f"Expected argument verbosity to be a str, instead it is {type(verbosity)}."
)
if verbosity not in constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT:
verbosity_dict_length = len(constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT)
if verbosity_dict_length == 0:
verbosity_string = "There are no keys available."
elif verbosity_dict_length == 1:
_key = list(constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT.keys())[0]
verbosity_string = f'The only available key is "{_key}".'
else:
_keys = list(sorted(constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT.keys()))
verbosity_string = "The available keys are "
verbosity_string += ", ".join([str(f'"{x}"') for x in _keys[:-1]])
verbosity_string += f' and "{_keys[-1]}"".'
raise KeyError(
f'Verbosity key "{verbosity}" is not in the Nelder Mead Simplex Verbosity '
f"dictionary. {verbosity_string}"
)
verbosity = constants.NELDER_MEAD_SIMPLEX_VERBOSITY_DICT[verbosity]
if not isinstance(decimal_precision, int):
raise TypeError(
"Expected argument decimal_precision to be an int, instead it is "
f"{type(decimal_precision)}."
)
if decimal_precision < 1:
raise ValueError(
"Expected argument decimal_precision to be a positive int, instead it is"
f"{decimal_precision}."
)
return (
function,
alpha,
beta,
gamma,
sigma,
use_jakobovic_expand,
epsilon,
max_iterations,
verbosity,
decimal_precision,
)
def clean_get_simplex_points(
start: np.ndarray, stride: Union[float, int]
) -> Tuple[np.ndarray, float]:
"""
Checks the __get_simplex_points arguments and returns them prepared for work.
Args:
start (np.ndarray): A numpy.ndarray representing the starting point for simplex
generation.
stride (Union[float, int]): A float or int representing the stride.
Raises:
TypeError: Raised if argument start is not a numpy.ndarray.
ValueError: Raised if argument start is a zero-length vector.
TypeError: Raised if argument stride is not a float or int.
Returns:
Tuple[np.ndarray, float]: Cleaned arguments.
"""
if not isinstance(start, np.ndarray):
raise TypeError(
"Expected argument start to be a numpy.ndarray, instead it is "
f"{type(start)}."
)
start = np.reshape(start, -1)
if start.shape[0] == 0:
raise ValueError(
"Expected argument starting point to be a vector with at least one "
"element, instead it is empty."
)
if not isinstance(stride, (float, int)):
raise TypeError(
"Expected argument stride to be a float or int, instead it is "
f"{type(stride)}."
)
stride = float(stride)
return start, stride
def __get_simplex_points(start: np.ndarray, stride: float) -> np.ndarray:
"""
Generates simplex points for a starting point.
Args:
start (np.ndarray): A numpy.ndarray representing the starting point for simplex
generation.
stride (float): A float representing the stride.
Returns:
np.ndarray: A matrix with each row representing a point of the simplex.
"""
points = np.tile(start, reps=(start.shape[0], 1))
points = points + stride * np.eye(points.shape[0])
return np.vstack([start, points])
def __reflect(
centroid: np.ndarray, maximum_point: np.ndarray, alpha: float
) -> np.ndarray:
"""
Reflects argument maximum_points wrt centroid by argument alpha.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
alpha (float): A float representing the amount a point will be reflected.
Returns:
np.ndarray: A numpy.ndarray representing the reflected point.
"""
return (1 + alpha) * centroid - alpha * maximum_point
def __contract(
centroid: np.ndarray, maximum_point: np.ndarray, beta: float
) -> np.ndarray:
"""
Contracts argument maximum_points wrt centroid by argument beta.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
beta (float): A float representing the amount a point will be contracted.
Returns:
np.ndarray: A numpy.ndarray representing the contracted point.
"""
return (1 - beta) * centroid + beta * maximum_point
def __expand(
centroid: np.ndarray, reflected_point: np.ndarray, gamma: float
) -> np.ndarray:
"""
Expands argument reflected_point wrt centroid by argument alpha.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
gamma (float): A float representing the amount a point will be expanded.
Returns:
np.ndarray: A numpy.ndarray representing the expanded point.
"""
return (1 - gamma) * centroid + gamma * reflected_point
def __expand_jakobovic(
centroid: np.ndarray, reflected_point: np.ndarray, gamma: float
) -> np.ndarray:
"""
Expands argument reflected_point wrt centroid by argument alpha. This is a modified
version which is supposedly the correct one, as said by prof. Jakobović.
Args:
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
maximum_point (np.ndarray): A numpy.ndarray representing the worst point of a
simplex.
gamma (float): A float representing the amount a point will be expanded.
Returns:
np.ndarray: A numpy.ndarray representing the expanded point.
"""
return (1 - gamma) * centroid - gamma * reflected_point
def __time_to_stop(
simplex_values: np.ndarray, centroid_value: float, epsilon: float
) -> bool:
"""
Checks if it's time to stop Nelder Mead Simplex Search.
Args:
simplex_values (np.ndarray): A numpy.ndarray representing the vector of simplex
values.
centroid_value (float): A float representing the value of the simplex centroid.
epsilon (float): A float representing the error threshold.
Returns:
bool: True if the stopping condition of Nelder Mead Simplex Search has been met,
False otherwise.
"""
difference_in_values = simplex_values - centroid_value
squared_difference_in_values = np.square(difference_in_values)
mean_squared_difference_in_values = np.mean(squared_difference_in_values)
return np.sqrt(mean_squared_difference_in_values) <= epsilon
def __print_nmss_values(
function: Function,
centroid: np.ndarray,
verbosity: int,
decimal_precision: int,
):
"""
Prints the Nelder Mead Simplex Search values.
Args:
function (Function): A Function representing the loss function.
centroid (np.ndarray): A numpy.ndarray representing the simplex centroid.
verbosity (int): An int representing the level of verbosity of the output during
algorithm execution.
decimal_precision (int): An int representing the number of decimal digits to
round numbers outputted during algorithm execution.
"""
if verbosity == 1:
print(f"c = {np.around(centroid, decimal_precision)}")
elif verbosity > 1:
result = function(centroid, dont_count=True)
result = (
np.around(result, 3)
if isinstance(result, np.ndarray)
else f"{result:.0{decimal_precision}f}"
)
print(f"F(c = {np.around(centroid, decimal_precision)}) = {result}")
def nelder_mead_simplex_search(
function: Function,
start: np.ndarray,
stride: Union[float, int] = 1,
alpha: float = 1.0,
beta: float = 0.5,
gamma: float = 2.0,
sigma: float = 0.5,
use_jakobovic_expand: bool = False,
epsilon: float = 1e-6,
max_iterations: int = 100000,
verbosity: Optional[str] = None,
decimal_precision: int = 3,
) -> np.ndarray:
"""
Uses Nelder Mead Simplex Search to find an n-D optimum of a function.
Args:
function (Function): A Function representing the loss function.
start (np.ndarray): A numpy.ndarray representing the starting point of the
search.
stride (Union[float, int], optional): A float or int representing the stride for
simplex generation. Defaults to 1.
alpha (float, optional): A float used in point reflection. Defaults to 1.0.
beta (float, optional): A float used in point contraction. Defaults to 0.5.
gamma (float, optional): A float used in point expansion. Defaults to 2.0.
sigma (float, optional): A float used when moving points to the optimum.
Defaults to 0.5.
use_jakobovic_expand (float, optional): A bool determining whether or not to use
the __expand_jakobovic method instead of the __expand method for point
expansion. Defaults to False.
epsilon (float, optional): A float representing the error threshold. Defaults to
1e-6.
max_iterations (int, optional): An int representing the maximum number of
iterations before the algorithm times out and returns the last found optimum.
Defaults to 100000.
verbosity (Optional[str], optional): A str representing the verbosity of the
output during algorithm execution. Defaults to None (no output during algorithm
execution).
decimal_precision (int, optional): An int representing the number of decimal
digits to round numbers outputted during algorithm execution. Defaults to 3.
Returns:
np.ndarray: A numpy.ndarray representing the last found optimum.
"""
(
function,
alpha,
beta,
gamma,
sigma,
use_jakobovic_expand,
epsilon,
max_iterations,
verbosity,
decimal_precision,
) = clean_nelder_mead_simplex_search_arguments(
function=function,
alpha=alpha,
beta=beta,
gamma=gamma,
sigma=sigma,
use_jakobovic_expand=use_jakobovic_expand,
epsilon=epsilon,
max_iterations=max_iterations,
verbosity=verbosity,
decimal_precision=decimal_precision,
)
start, stride = clean_get_simplex_points(start=start, stride=stride)
simplex_points = __get_simplex_points(start=start, stride=stride)
simplex_values = np.array([function(x) for x in simplex_points])
timed_out = True
expansion_method = __expand_jakobovic if use_jakobovic_expand else __expand
for _ in range(max_iterations):
minimum_index = np.argmin(simplex_values)
maximum_index = np.argmax(simplex_values)
centroid = np.mean(np.delete(simplex_points, maximum_index, axis=0), axis=0)
__print_nmss_values(
function=function,
centroid=centroid,
verbosity=verbosity,
decimal_precision=decimal_precision,
)
reflected_point = __reflect(
centroid=centroid, maximum_point=simplex_points[maximum_index], alpha=alpha
)
reflected_value = function(reflected_point)
minimum_value = simplex_values[minimum_index]
if reflected_value < minimum_value:
expanded_point = expansion_method(
centroid=centroid, reflected_point=reflected_point, gamma=gamma
)
expanded_value = function(expanded_point)
if expanded_value < minimum_value:
simplex_points[maximum_index] = expanded_point
simplex_values[maximum_index] = expanded_value
else:
simplex_points[maximum_index] = reflected_point
simplex_values[maximum_index] = reflected_value
else:
maximum_value = simplex_values[maximum_index]
if all(np.delete(simplex_values, maximum_index, axis=0) < reflected_value):
if reflected_value < maximum_value:
simplex_points[maximum_index] = reflected_point
simplex_values[maximum_index] = reflected_value
# We need this here since we're introducing a new point and value
minimum_index = np.argmin(simplex_values)
maximum_index = np.argmax(simplex_values)
# We need to do this since the maximum value has potentially changed
maximum_value = simplex_values[maximum_index]
contracted_point = __contract(
centroid=centroid,
maximum_point=simplex_points[maximum_index],
beta=beta,
)
contracted_value = function(contracted_point)
if contracted_value < maximum_value:
simplex_points[maximum_index] = contracted_point
simplex_values[maximum_index] = contracted_value
else:
for i, simplex_point in enumerate(simplex_points):
if i == minimum_index:
continue
simplex_points[i] += (
simplex_points[minimum_index] - simplex_points[i]
) * sigma
simplex_values[i] = function(simplex_points[i])
else:
simplex_points[maximum_index] = reflected_point
simplex_values[maximum_index] = reflected_value
if __time_to_stop(
simplex_values=simplex_values,
centroid_value=function(centroid),
epsilon=epsilon,
):
timed_out = False
break
if timed_out:
print(
f"WARNING: Nelder Mead Simplex Search timed out after {max_iterations} "
"iterations - result might not be a minimum.",
file=sys.stderr,
)
# Do this to get a more precise result
maximum_index = np.argmax(simplex_values)
centroid = np.mean(np.delete(simplex_points, maximum_index, axis=0), axis=0)
return centroid
| 2.6875 | 3 |
autobump.py | rfrandse/mytools | 0 | 12791905 | #!/usr/bin/env python2
import argparse
import os
#import sh
import sys
import math
import subprocess
PIPE = subprocess.PIPE
try:
import git
from git import GitCommandError
HAVE_GIT = True
except ImportError:
# _log.debug('Failed to import git module')
HAVE_GIT = False
def log(msg, args):
if args.noisy:
sys.stderr.write('{}\n'.format(msg))
def extract_project_from_uris(i_args, uris):
# remove SRC_URI = and quotes (does not handle escaped quotes)
uris = uris.split('"')[1]
for uri in uris.split():
remote = i_args.remote + '/' + i_args.org
if remote not in uri:
continue
# remove fetcher arguments
uri = uri.split(';')[0]
# the project is the right-most path segment
return uri.split('/')[-1].replace('.git', '')
return None
def extract_sha_from_recipe(args, recipe):
with open(recipe) as fp:
uris = ''
project = None
sha = None
for line in fp:
line = line.rstrip()
if 'SRCREV' in line:
sha = line.split('=')[-1].replace('"', '').strip()
elif not project and uris or '_URI' in line:
uris += line.split('\\')[0]
if '\\' not in line:
# In uris we've gathered a complete (possibly multi-line)
# assignment to a bitbake variable that ends with _URI.
# Try to pull an OpenBMC project out of it.
project = extract_project_from_uris(args, uris)
if project is None:
# We didn't find a project. Unset uris and look for
# another bitbake variable that ends with _URI.
uris = ''
if project and sha:
return (project, sha)
print('No SRCREV or URI found in {}'.format(recipe))
return(project, sha)
def git_add(recipe):
git_args = ['git', 'add', recipe]
process = subprocess.Popen(git_args, stdout=PIPE, stderr=PIPE)
stdoutput, stderroutput = process.communicate()
if 'fatal' in stdoutput:
print('fatal')
def git_commit(commit_msg):
git_args = ['git', 'commit', '-m', commit_msg]
process = subprocess.Popen(git_args, stdout=PIPE, stderr=PIPE)
stdoutput, stderroutput = process.communicate()
if 'fatal' in stdoutput:
print('fatal')
def git_show(sha):
git_args = ['git', 'show', sha]
process = subprocess.Popen(git_args, stdout=PIPE, stderr=PIPE)
stdoutput, stderroutput = process.communicate()
if 'fatal' in stdoutput:
print('fatal')
else:
print(stdoutput)
def git_log(parms_array):
git_args = ['git', 'log']+parms_array
process = subprocess.Popen(git_args, stdout=PIPE, stderr=PIPE)
stdoutput, stderroutput = process.communicate()
if 'fatal' in stdoutput:
print('fatal')
else:
return stdoutput
return []
def find_recipes(i_args):
git_args = ['git','--no-pager','grep','-l', '-e', '_URI', '--and', '-e', i_args.remote+'/'+i_args.org]
process = subprocess.Popen(git_args, stdout=PIPE, stderr=PIPE)
stdoutput, stderroutput = process.communicate()
if 'fatal' in stdoutput:
print('fatal')
else:
return stdoutput.decode('utf-8').split()
return []
def find_and_process_bumps(args):
project_sha = args.project_sha
candidate_recipes = find_recipes(args)
for recipe in candidate_recipes:
project_name, recipe_sha = extract_sha_from_recipe(args, recipe)
if project_name in args.project_name:
if args.dry_run:
print project_name
print recipe
recipe_basename = os.path.basename(recipe)
if project_sha == recipe_sha:
message_args = (recipe_basename, recipe_sha[:10])
print('{} is up to date ({})'.format(*message_args))
continue
if not args.dry_run:
recipe_content = None
with open(recipe) as fd:
recipe_content = fd.read()
recipe_content = recipe_content.replace(recipe_sha, project_sha)
with open(recipe, 'w') as fd:
fd.write(recipe_content)
git_add(recipe)
commit_summary_args = (project_name, recipe_sha[:10], project_sha[:10])
commit_msg = '{}: downstream srcrev bump {}..{}'.format(*commit_summary_args)
if not args.dry_run:
git_commit(commit_msg)
else:
print "dry run"
print commit_msg
def parse_arguments(i_args):
app_description = '''Local recipe bumping tool.
Find bitbake metadata files (recipes) that use the git.ibm.com
and check the project repository for given revision.
Generate commits that update bitbake metadata files with SRCREV
if given revision is new
Generate commit.
'''
l_parser = argparse.ArgumentParser(description=app_description)
l_parser.add_argument(
'project_name',
help='target project name to change sha')
l_parser.add_argument(
'project_sha',
help='input sha commit length 40 digits')
l_parser.add_argument(
'-d', '--dry-run', dest='dry_run', action='store_true',
help='perform a dry run only')
l_parser.add_argument(
'-v', '--verbose', dest='noisy', action='store_true',
help='enable verbose status messages')
l_parser.add_argument(
'-r', '--remote', default='github.com',
help='set remote value to scan for')
l_parser.add_argument(
'-o', '--org', default='ibm-openbmc',
help='set org value to scan for')
return l_parser.parse_args(i_args)
def main(i_args):
# Parse the arguments
l_args = parse_arguments(i_args)
digits = len(str(l_args.project_sha))
if digits != 40:
message_args = (l_args.project_sha, digits)
print('sha number {} is {} not 40'.format(*message_args))
exit(1)
find_and_process_bumps(l_args)
if not l_args.dry_run:
log_sha = git_log(['-n 1', '--pretty=format:%h'])
git_show(log_sha)
if __name__ == '__main__':
main(sys.argv[1:])
| 2.421875 | 2 |
wapps/migrations/0005_drop_identity_logo.py | apihackers/wapps | 7 | 12791906 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-08 14:52
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wapps', '0004_add_wapps_image'),
]
operations = [
migrations.RemoveField(
model_name='identitysettings',
name='logo',
),
]
| 1.328125 | 1 |
experiments/jaccard_metric/test_jaccard.py | jajajaqlt/nsg | 10 | 12791907 | <reponame>jajajaqlt/nsg
# Copyright 2017 Rice University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from experiments.jaccard_metric.utils import plotter
from synthesis.ops.candidate_ast import API_NODE
from trainer_vae.infer import BayesianPredictor
from experiments.jaccard_metric.get_jaccard_metrics import helper
from data_extraction.data_reader.data_loader import Loader
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
def main(clargs):
num_centroids = 10
predictor = BayesianPredictor(clargs.continue_from, batch_size=5)
loader = Loader(clargs.data, predictor.config)
psis, labels = [], []
for i in range(10000):
nodes, edges, targets, var_decl_ids, \
node_type_numbers, \
type_helper_val, expr_type_val, ret_type_val, \
ret_type, fp_in, fields, \
apicalls, types, keywords, method, classname, javadoc_kws, \
surr_ret, surr_fp, surr_method = loader.next_batch()
psi = predictor.get_latent_state(apicalls, types, keywords,
ret_type, fp_in, fields, method, classname, javadoc_kws,
surr_ret, surr_fp, surr_method
)
psis.extend(psi)
apiOrNot = node_type_numbers == API_NODE
for t, api_bool in zip(targets, apiOrNot):
label = get_apis(t, api_bool, predictor.config.vocab.chars_api)
labels.append(label)
predictor.close()
new_states, new_labels = [], []
for state, label in zip(psis, labels):
if len(label) != 0:
new_states.append(state)
new_labels.append(label)
print('API Call Jaccard Calculations')
jac_api_matrix, jac_api_vector = helper(new_states, new_labels, num_centroids=num_centroids)
plotter(jac_api_matrix, jac_api_vector, name=clargs.filename)
return
def get_apis(calls, apiOrNot, vocab):
apis = []
for call, api_bool in zip(calls, apiOrNot):
if api_bool and call > 0:
api = vocab[call]
apis.append(api)
return apis
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--continue_from', type=str, default='save',
help='directory to load model from')
parser.add_argument('--top', type=int, default=10,
help='plot only the top-k labels')
parser.add_argument('--data', default='../data_extraction/data_reader/data')
clargs = parser.parse_args()
clargs.folder = 'results/test_jaccard/'
if not os.path.exists(clargs.folder):
os.makedirs(clargs.folder)
clargs.filename = clargs.folder + 'jaccard_' + clargs.continue_from
main(clargs)
| 1.5625 | 2 |
options/__init__.py | kwshh/ImageDeconvlution | 25 | 12791908 | <reponame>kwshh/ImageDeconvlution
from .running_options import * | 1.007813 | 1 |
PetService/apps.py | sifullahrakin/HelloPaw | 0 | 12791909 | from django.apps import AppConfig
class PetserviceConfig(AppConfig):
name = 'PetService'
| 1.164063 | 1 |
video_module.py | RoboQYD/cobblr-video-recorder | 0 | 12791910 | from engine import SystemState
from engine import Utilities
from engine import Menu
from engine import Screen
from engine import TextWriter
from engine import Events
import RPi.GPIO
import pyaudio
import wave
import atexit
import io
import stat
import os
import signal
import picamera
import time
import sys
import threading
import Queue
signal.signal(signal.SIGINT, Utilities.GracefulExit)
class VideoState(object):
pass
def Init():
# System Setup
RPi.GPIO.setup(7, RPi.GPIO.OUT) #Flash RPi.GPIO
RPi.GPIO.setup(8, RPi.GPIO.IN, pull_up_down=RPi.GPIO.PUD_UP) #Button RPi.GPIO
RPi.GPIO.output(7, False)
SystemState.camera.image_effect = 'none'
# Iterating Variable Setup
SystemState.VideoState = VideoState
SystemState.VideoState.setting = 'none'
SystemState.VideoState.image_effect = 0
SystemState.VideoState.iso = 0
SystemState.VideoState.rotation = 0
SystemState.VideoState.brightness = 5
SystemState.VideoState.saturation = 10
SystemState.VideoState.contrast = 10
SystemState.VideoState.sharpness = 10
SystemState.VideoState.zoom = 0
SystemState.VideoState.meter_mode = 0
SystemState.VideoState.awb_mode = 0
SystemState.VideoState.exposure_mode = 0
SystemState.VideoState.video_stabilization = 0
# Video Associated Variable Setup
SystemState.VideoState.current_video = None
SystemState.VideoState.video_filename = None
SystemState.VideoState.video_archive = None
SystemState.VideoState.video_tally = None
SystemState.VideoState.video_count = 0
SystemState.VideoState.video_stream = True
SystemState.VideoState.video_duration = 0
SystemState.VideoState.video_recording = False
SystemState.VideoState.playback_state = 'pause'
SystemState.VideoState.video_path = 'media/video/'
SystemState.VideoState.video_preview_path = SystemState.VideoState.video_path + '.preview/'
SystemState.VideoState.audio_message_queue = Queue.Queue()
SystemState.VideoState.video_message_queue = Queue.Queue()
# Lists of camera effects
SystemState.VideoState.iso_values = [0, 100, 200, 320, 400, 500, 640, 800]
SystemState.VideoState.image_effect_values = [
'none', 'negative', 'solarize', 'sketch', 'denoise', 'emboss', 'oilpaint',
'hatch','gpen', 'pastel', 'watercolor', 'film', 'blur', 'saturation',
'colorswap', 'washedout', 'posterise', 'colorpoint', 'colorbalance',
'cartoon', 'deinterlace1', 'deinterlace2'
]
SystemState.VideoState.awb_mode_values = [
'auto', 'sunlight', 'cloudy', 'shade', 'tungsten', 'fluorescent',
'incandescent', 'flash', 'horizon', 'off'
]
SystemState.VideoState.rotation_values = [0, 90, 180, 270]
SystemState.VideoState.brightness_values = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
hundred_container = [-100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
SystemState.VideoState.saturation_values = hundred_container
SystemState.VideoState.contrast_values = hundred_container
SystemState.VideoState.sharpness_values = hundred_container
SystemState.VideoState.zoom_values = [
(0.0, 0.0, 1.0, 1.0),
(0.1, 0.1, 0.9, 0.9),
(0.225, 0.225, 0.8, 0.8),
(0.25, 0.25, 0.7, 0.7),
(0.275, 0.275, 0.6, 0.6),
(0.3, 0.3, 0.5, 0.5),
(0.325, 0.325, 0.4, 0.4),
(0.35, 0.25, 0.3, 0.3),
(0.375, 0.375, 0.2, 0.2),
(0.4, 0.4, 0.1, 0.1),
]
SystemState.VideoState.meter_mode_values = [
'average', 'spot', 'backlit', 'matrix'
]
SystemState.VideoState.exposure_mode_values = [
'auto', 'night', 'nightpreview', 'backlight', 'spotlight',
'sports', 'snow', 'beach', 'verylong', 'fixedfps', 'antishake',
'fireworks', 'off'
]
SystemState.VideoState.video_stabilization_values = [False, True]
__MakeVideoPath()
return SystemState
def __PreviousSetting(property_list, property_name):
"""Moves to the previous setting in the menu."""
properties = getattr(SystemState.VideoState, property_list)
index = getattr(SystemState.VideoState, property_name)
if index > 0:
index -= 1
else:
index = len(properties) - 1
__ProcessSettingsValues(property_name, properties, index)
def __NextSetting(property_list, property_name):
"""Moves to the next settng in the menu."""
properties = getattr(SystemState.VideoState, property_list)
index = getattr(SystemState.VideoState, property_name)
if index < len(properties) - 1:
index += 1
else:
index = 0
__ProcessSettingsValues(property_name, properties, index)
def __CurrentSetting(property_list, property_name):
"""Display's items on screen when you first enter a menu."""
properties = getattr(SystemState.VideoState, property_list)
index = getattr(SystemState.VideoState, property_name)
__ProcessSettingsValues(property_name, properties, index)
def __ProcessSettingsValues(property_name, properties, index):
"""Fetches values and prints them on screen for Next and Previous. """
property_value = properties[index]
# Setting values in SystemState.camera from SystemState.VideoState.
setattr(SystemState.camera, property_name, property_value)
setattr(SystemState.VideoState, property_name, index)
property_type = type(property_value)
# Ensures default 'auto' values are printed on screen.
if property_value == 0 and property_type is not bool:
property_value = 'Auto'
# Makes 'zoom' human readable.
if property_type is tuple:
if index == 0:
index = None
property_value = str(index)
# Removing underscores and writing values to the screen.
property_name = ' '.join(property_name.split('_'))
__WriteSettingsTitle(property_name)
__WriteSettingsValue(property_value)
def __WriteSettingsValue(text):
"""Writes settings values for each menu item."""
TextWriter.Write(
state=SystemState,
text=str(text).title(),
position=(160, 110),
centered=True,
size=20,
permatext=True,
color=(57, 255, 20)
)
def __WriteSettingsTitle(text):
"""Writes title values for each menu item."""
TextWriter.Write(
state=SystemState,
text=str(text).title(),
position=(160, 10),
centered=True,
size=25,
permatext=True,
color=(57, 255, 20)
)
def Process():
"""Processing button presses."""
button = str(SystemState.pressed_button)
pygame = SystemState.pygame
screen = SystemState.screen
screen_mode = SystemState.screen_mode
if button == 'library':
OpenAlbum()
Menu.JumpTo(screen_mode=4)
elif button == 'go_back':
Menu.Back()
SystemState.VideoState.setting = 'none'
elif button == 'play':
__PlayVideo()
elif button == 'settings':
Menu.JumpTo(screen_mode=2, refresh_screen=False)
elif button == 'delete':
if SystemState.VideoState.video_count > 0:
Menu.JumpTo(screen_mode=5)
TextWriter.Write(
state=SystemState,
text='Delete?',
position=(125, 75),
size=20
)
elif button == 'right_arrow':
__ProcessRightArrow()
elif button == 'left_arrow':
__ProcessLeftArrow()
elif button == 'iso':
Menu.JumpTo(screen_mode=3)
SystemState.VideoState.setting = 'iso'
elif button == 'image_effect':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'image_effect'
elif button == 'rotation':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'rotation'
elif button == 'brightness':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'brightness'
elif button == 'saturation':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'saturation'
elif button == 'contrast':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'contrast'
elif button == 'sharpness':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'sharpness'
elif button == 'zoom':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'zoom'
elif button == 'meter_mode':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'meter_mode'
elif button == 'awb':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'awb_mode'
elif button == 'video_stabilization':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'video_stabilization'
elif button == 'exposure_mode':
Menu.JumpTo(screen_mode=3, refresh_screen=False)
SystemState.VideoState.setting = 'exposure_mode'
elif button == 'accept':
__DeleteVideo()
Menu.Back()
OpenAlbum()
elif button == 'decline':
Menu.Back()
OpenAlbum()
# Displaying settings title and values when you first enter a menu.
if SystemState.screen_mode == 2 and SystemState.next_screen_mode == 3:
setting = SystemState.VideoState.setting
setting_values = setting + '_values'
__CurrentSetting(setting_values, setting)
def __ProcessLeftArrow():
"""Processing left arrow input for each menu item."""
if SystemState.VideoState.setting == 'image_effect':
__PreviousSetting('image_effect_values', 'image_effect')
elif SystemState.VideoState.setting == 'iso':
__PreviousSetting('iso_values', 'iso')
elif SystemState.VideoState.setting == 'rotation':
__PreviousSetting('rotation_values', 'rotation')
elif SystemState.VideoState.setting == 'brightness':
__PreviousSetting('brightness_values', 'brightness')
elif SystemState.VideoState.setting == 'saturation':
__PreviousSetting('saturation_values', 'saturation')
elif SystemState.VideoState.setting == 'contrast':
__PreviousSetting('contrast_values', 'contrast')
elif SystemState.VideoState.setting == 'sharpness':
__PreviousSetting('sharpness_values', 'sharpness')
elif SystemState.VideoState.setting == 'zoom':
__PreviousSetting('zoom_values', 'zoom')
elif SystemState.VideoState.setting == 'meter_mode':
__PreviousSetting('meter_mode_values', 'meter_mode')
elif SystemState.VideoState.setting == 'awb_mode':
__PreviousSetting('awb_mode_values', 'awb_mode')
elif SystemState.VideoState.setting == 'video_stabilization':
__PreviousSetting('video_stabilization_values', 'video_stabilization')
elif SystemState.VideoState.setting == 'exposure_mode':
__PreviousSetting('exposure_mode_values', 'exposure_mode')
elif SystemState.screen_mode == 4:
if SystemState.VideoState.video_count > 0:
__PreviousVideo()
def __ProcessRightArrow():
"""Processing right arrow input for each menu item."""
if SystemState.VideoState.setting == 'image_effect':
__NextSetting('image_effect_values', 'image_effect')
elif SystemState.VideoState.setting == 'iso':
__NextSetting('iso_values', 'iso')
elif SystemState.VideoState.setting == 'rotation':
__NextSetting('rotation_values', 'rotation')
elif SystemState.VideoState.setting == 'brightness':
__NextSetting('brightness_values', 'brightness')
elif SystemState.VideoState.setting == 'saturation':
__NextSetting('saturation_values', 'saturation')
elif SystemState.VideoState.setting == 'contrast':
__NextSetting('contrast_values', 'contrast')
elif SystemState.VideoState.setting == 'sharpness':
__NextSetting('sharpness_values', 'sharpness')
elif SystemState.VideoState.setting == 'zoom':
__NextSetting('zoom_values', 'zoom')
elif SystemState.VideoState.setting == 'meter_mode':
__NextSetting('meter_mode_values', 'meter_mode')
elif SystemState.VideoState.setting == 'awb_mode':
__NextSetting('awb_mode_values', 'awb_mode')
elif SystemState.VideoState.setting == 'video_stabilization':
__NextSetting('video_stabilization_values', 'video_stabilization')
elif SystemState.VideoState.setting == 'exposure_mode':
__NextSetting('exposure_mode_values', 'exposure_mode')
elif SystemState.screen_mode == 4:
if SystemState.VideoState.video_count > 0:
__NextVideo()
def __MakeVideoPath():
"""Creates a folder that holds videos."""
if os.path.exists(SystemState.VideoState.video_preview_path) == False:
os.makedirs(SystemState.VideoState.video_preview_path)
os.chown(SystemState.VideoState.video_preview_path, SystemState.uid, SystemState.gid)
def __CallRecordAudio(timestamp):
"""Calls the _RecordAudio function in a thread."""
args = (timestamp)
thread = threading.Thread(target=__RecordAudio, args=(timestamp,))
thread.setDaemon(True)
thread.start()
def __CallRecordVideo(timestamp):
"""Calls the __RecordVideo function in a thread."""
args = (timestamp)
thread = threading.Thread(target=__RecordVideo, args=(timestamp,))
thread.setDaemon(True)
thread.start()
def __CallConvertVideo(timestamp):
"""Calls the __ConvertVideo function in a thread."""
args = (timestamp)
thread = threading.Thread(target=__ConvertVideo, args=(timestamp,))
thread.setDaemon(True)
thread.start()
def __RecordAudio(timestamp):
"""Setting up variables for camera."""
CHUNK = 8192
FORMAT = pyaudio.paInt16
CHANNELS = 1
RATE = int(SystemState.pyaudio.get_device_info_by_index(0)['defaultSampleRate'])
FILENAME = SystemState.VideoState.video_path + timestamp + '.wav'
RECORD_SECONDS = 10800
frames = []
# Clearing the queue messages just in case.
with SystemState.VideoState.audio_message_queue.mutex:
SystemState.VideoState.audio_message_queue.queue.clear()
# Setting up stream for audio.
stream = SystemState.pyaudio.open(
format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
output=True,
frames_per_buffer=CHUNK
)
# Recording data to a wave file.
for i in range(0, int(RATE/CHUNK * RECORD_SECONDS)):
data = stream.read(CHUNK)
frames.append(data)
# Try placing the information inside the audio message queue.
try:
audio_message_queue = SystemState.VideoState.audio_message_queue.get(False)
# If the queue is already empty, set it to none.
except Queue.Empty:
audio_message_queue = None
#If there is something inside the queue, read it.
if audio_message_queue != None:
if audio_message_queue.get('recording') == False:
break
# Stopping and closing stream.
stream.stop_stream()
stream.close()
# Converting stream data into a wave file.
wavefile = wave.open(FILENAME, 'wb')
wavefile.setnchannels(CHANNELS)
wavefile.setsampwidth(SystemState.pyaudio.get_sample_size(FORMAT))
wavefile.setframerate(RATE)
wavefile.writeframes(b''.join(frames))
wavefile.close()
def __StopRecordingAudio():
"""Setting up all the variables to stop recording audio."""
SystemState.VideoState.recording_audio = False
audio_action = {'recording': False}
video_action = {'recording': False}
SystemState.VideoState.video_message_queue.put(video_action)
SystemState.VideoState.audio_message_queue.put(audio_action)
def __RecordVideo(timestamp):
"""Records video files."""
video_path = SystemState.VideoState.video_path
video_preview_path = SystemState.VideoState.video_preview_path
# Setting up paths for videos.
h264_filepath = video_path + timestamp
mjpeg_filepath = video_preview_path + timestamp
# Start recording a high res (.h264) and low res (.mjpeg).
SystemState.camera.start_recording(h264_filepath + '.h264', splitter_port=2, resize=(1920, 1080))
SystemState.camera.start_recording(mjpeg_filepath + '.mjpeg', splitter_port=3, resize=(320, 240))
# Wait until the red button is released.
RPi.GPIO.wait_for_edge(8, RPi.GPIO.RISING)
# Stop recording the high res and low res video_archive.
__StopRecordingAudio()
SystemState.camera.stop_recording(splitter_port=2)
SystemState.camera.stop_recording(splitter_port=3)
# Call threading function to convert a video.
__CallConvertVideo(timestamp)
def __ConvertVideo(timestamp):
"""Convert's second mpjpeg video to mpeg which pygame can play."""
# Setting up local varables.
video_path = SystemState.VideoState.video_path
video_preview_path = SystemState.VideoState.video_preview_path
mjpeg_filepath = video_preview_path + timestamp + '.mjpeg'
mpeg_filepath = video_preview_path + timestamp + '.mpeg'
wav_filepath = video_path + timestamp + '.wav'
process_filepath = mjpeg_filepath + '.process'
mode = 0600|stat.S_IRUSR
time.sleep(1)
# Converting video files to make preview files.
os.mknod(process_filepath, mode)
ffmpeg_a = 'ffmpeg -i ' + mjpeg_filepath + " -target ntsc-vcd "
ffmpeg_b = ' -vcodec mpeg1video -an ' + mpeg_filepath + ' -threads 0'
ffmpeg_convert = ffmpeg_a + ffmpeg_b
# Executing the ffmpeg command and removing the process files.
os.system(ffmpeg_convert)
os.remove(mjpeg_filepath)
os.remove(process_filepath)
def OpenAlbum():
"""Opens the contents inside of the videos folder."""
# Setup the preview path as the path for the video count.
path = SystemState.VideoState.video_preview_path
SystemState.VideoState.video_archive = os.listdir(path)
SystemState.VideoState.video_archive = [os.path.join(path, pic) for pic in SystemState.VideoState.video_archive]
SystemState.VideoState.video_archive = sorted(SystemState.VideoState.video_archive)
SystemState.VideoState.video_count = len(SystemState.VideoState.video_archive)
processing_videos = []
# If there's a video in the directory, set it as current video.
if SystemState.VideoState.video_count > 0:
if SystemState.VideoState.current_video in SystemState.VideoState.video_archive:
SystemState.VideoState.video_index = SystemState.VideoState.video_archive.index(SystemState.VideoState.current_video)
else:
SystemState.VideoState.video_index = SystemState.VideoState.video_count - 1
SystemState.VideoState.current_video = SystemState.VideoState.video_archive[SystemState.VideoState.video_index]
__ShowVideo(SystemState.VideoState.current_video)
# If there are no videos, just write "no videos".
else:
TextWriter.Write(
state=SystemState,
text='No Videos',
position=(110, 100),
centred=True,
size=20,
permatext=True
)
def __ShowVideo(filename):
"""Shows a picture of the video file."""
pygame = SystemState.pygame
screen = SystemState.screen
# Setting up movie for pygame
SystemState.VideoState.movie = pygame.movie.Movie(filename)
SystemState.VideoState.movie.render_frame(1)
if SystemState.VideoState.video_archive != None and SystemState.screen_mode == 3:
# Remove 'PREVIEW-' and path leaving just unix time.
utime_string = os.path.basename(filename).split('-')[-1].split('.')[0]
timestamp = time.ctime(int(utime_string))
# Writing the time and position of the photo on the screen.
TextWriter.Write(
state=SystemState,
text=timestamp,
position=(90, 10),
size=12
)
def __PlayVideo():
"""Plays the video file (preview) on the camera's screen."""
# If there's more than one video, go ahead and play the video we're on.
if SystemState.VideoState.video_count > 0:
pygame = SystemState.pygame
modes = pygame.display.list_modes(16)
movie_screen = pygame.display.set_mode(modes[0], pygame.FULLSCREEN, 16)
SystemState.VideoState.movie.set_display(movie_screen)
SystemState.VideoState.movie.play()
SystemState.VideoState.movie_duration = SystemState.VideoState.movie.get_length()
time.sleep(SystemState.VideoState.movie_duration + .02)
OpenAlbum()
def __NextVideo():
"""Moves to the next video in the library."""
# If the video is not at the end of the list, go to the next one.
if SystemState.VideoState.video_index < SystemState.VideoState.video_count - 1:
SystemState.VideoState.video_index += 1
# If the video is at the end of the list, send it back to the first one.
else:
SystemState.VideoState.video_index = 0
filename = SystemState.VideoState.video_archive[SystemState.VideoState.video_index]
SystemState.VideoState.video_tally = str(SystemState.VideoState.video_index + 1) + '/' + str(SystemState.VideoState.video_count)
__ShowVideo(filename)
def __PreviousVideo():
"""Moves to the previous video in the library."""
# If the video more than the first one, then move backwards through the list.
if SystemState.VideoState.video_index > 0:
SystemState.VideoState.video_index -= 1
# If the video is the last one, then go back to the beginning.
else:
SystemState.VideoState.video_index = SystemState.VideoState.video_count - 1
filename = SystemState.VideoState.video_archive[SystemState.VideoState.video_index]
SystemState.VideoState.video_tally = str(SystemState.VideoState.video_index + 1) + '/' + str(SystemState.VideoState.video_count)
__ShowVideo(filename)
def __DeleteVideo():
"""Delete a video."""
preview_video = SystemState.VideoState.current_video
# Setting up files to be deleted.
full_video = preview_video.split('/.preview')
full_video = full_video[0] + full_video[1]
full_video = full_video.split('.')
full_video = full_video[0] + '.h264'
# Attempting to delete the files above.
try:
os.remove(preview_video)
except: # TODO:print that preview couldn't be removed.
print "Couldn't remove preview image"
try:
SystemState.VideoState.video_archive.remove(preview_video)
except: # TODO: print that file was not removed from library.
print "Couldn't remove from library"
try:
os.remove(full_video)
except: # TODO: print that image not removed.
print "Image not removed"
def Main():
"""Main loop for the camera application."""
pygame = SystemState.pygame
SystemState.camera.resolution = (320, 240)
while SystemState.application == 'video':
# Check for button presses, messages, and which mode we're in.
Events.CheckEvents()
if SystemState.screen_mode in (1, 2, 3):
SystemState.VideoState.video_stream = True
else:
SystemState.VideoState.video_stream = False
try:
video_message_queue = SystemState.VideoState.video_message_queue.get(None)
except Queue.Empty:
video_message_queue = None
# Checking video message queue for record messages.
if video_message_queue != None:
recording_state = video_message_queue.get('recording')
if recording_state == True:
timestamp = str(int(time.time()))
__CallRecordAudio(timestamp)
__CallRecordVideo(timestamp)
SystemState.VideoState.video_recording = True
elif recording_state == False:
SystemState.VideoState.video_recording = False
TextWriter.ClearPermatext()
# Checking the gpio button that starts recording.
if SystemState.VideoState.video_recording == False:
if not RPi.GPIO.input(8) and SystemState.screen_mode == 1:
SystemState.VideoState.video_message_queue.put({'recording': True})
Menu.JumpTo(screen_mode=6)
TextWriter.Write(
text='Rec',
position=(10, 10),
color=(255,0,0),
permatext=True,
state=SystemState,
size=20
)
# Check if we are in a streaming mode. If so, throw frames at the screen.
if SystemState.VideoState.video_stream == True:
SystemState.VideoState.stream = io.BytesIO() # Capture into in-memory stream
SystemState.camera.capture(SystemState.VideoState.stream, use_video_port=True, splitter_port=0, format='rgb')
SystemState.VideoState.stream.seek(0)
SystemState.VideoState.stream.readinto(SystemState.rgb)
SystemState.VideoState.stream.close()
SystemState.VideoState.img = SystemState.pygame.image.frombuffer(SystemState.rgb[0: (320 * 240 * 3)], (320, 240), 'RGB' )
xa = (320 - SystemState.VideoState.img.get_width() ) / 2
ya = (240 - SystemState.VideoState.img.get_height()) / 2
Screen.RefreshScreen(image=SystemState.VideoState.img, wx=xa, wy=ya)
| 2.296875 | 2 |
mysqlsmo/objects/table_constraints/__init__.py | DaeunYim/pgtoolsservice | 33 | 12791911 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from mysqlsmo.objects.table_constraints.check import CheckConstraint
from mysqlsmo.objects.table_constraints.unique import UniqueConstraint
from mysqlsmo.objects.table_constraints.primary_key import PrimaryKeyConstraint
from mysqlsmo.objects.table_constraints.foreign_key import ForeignKeyConstraint
__all__ = [
"CheckConstraint",
"UniqueConstraint",
"PrimaryKeyConstraint",
"ForeignKeyConstraint"
]
| 1.375 | 1 |
tf_datasets/datasets_old/cifar10.py | tmattio/tf_datasets | 5 | 12791912 | <reponame>tmattio/tf_datasets
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import hashlib
import shutil
import sys
from six.moves import cPickle
import numpy as np
import tensorflow as tf
from tf_datasets.core.download import download_http, extract_tgz
from tf_datasets.core.base_dataset import BaseDataset
from tf_datasets.core.dataset_utils import create_image_example
from tf_datasets.core.dataset_utils import create_dataset_split
from tf_datasets.core.dataset_utils import ImageCoder
slim = tf.contrib.slim
def _get_data_points_from_cifar_file(filepath):
with open(filepath, 'rb') as f:
if sys.version_info < (3,):
data = cPickle.load(f)
else:
data = cPickle.load(f, encoding='bytes')
images = data[b'data']
num_images = images.shape[0]
images = images.reshape((num_images, 3, 32, 32))
images = [np.squeeze(image).transpose((1, 2, 0)) for image in images]
labels = data[b'labels']
return [(images[i], labels[i]) for i in range(num_images)]
class cifar10(BaseDataset):
image_size = 32
image_channel = 3
num_train_files = 5
class_names = [
'airplane',
'automobile',
'bird',
'cat',
'deer',
'dog',
'frog',
'horse',
'ship',
'truck',
]
public_url = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
def __init__(self, dataset_dir):
super().__init__(dataset_dir, self.class_names, zero_based_labels=True)
self.dataset_name = 'cifar10'
self.download_dir = os.path.join(self.dataset_dir, 'download')
self._coder = ImageCoder()
def download(self):
try:
os.makedirs(self.download_dir)
except FileExistsError:
pass
output_path = os.path.join(self.download_dir, 'cifar-10-python.tar.gz')
if not os.path.exists(output_path):
download_http(self.public_url, output_path)
def extract(self):
output_path = os.path.join(self.download_dir, 'cifar-10-batches-py')
if not os.path.exists(output_path):
extract_tgz(
os.path.join(self.download_dir, 'cifar-10-python.tar.gz'),
self.download_dir
)
def _get_data_points(self):
train_datapoints = []
for i in range(self.num_train_files):
filename = os.path.join(self.download_dir,
'cifar-10-batches-py',
'data_batch_%d' % (i + 1))
train_datapoints += _get_data_points_from_cifar_file(filename)
test_filename = os.path.join(self.download_dir,
'cifar-10-batches-py',
'test_batch')
val_datapoints = _get_data_points_from_cifar_file(test_filename)
return np.stack(train_datapoints), val_datapoints
def convert(self):
splits = self._get_data_points()
split_names = ['train', 'validation']
for split, split_name in zip(splits, split_names):
create_dataset_split('cifar10',
self.dataset_dir,
split_name,
split,
self._convert_to_example)
self.write_label_file()
def cleanup(self):
shutil.rmtree(self.download_dir)
def _convert_to_example(self, data_point):
image, label = data_point
encoded = self._coder.encode_png(image)
image_format = 'png'
height, width, channels = (
self.image_size,
self.image_size,
self.image_channel
)
class_name = self.labels_to_class_names[label]
key = hashlib.sha256(encoded).hexdigest()
return create_image_example(height,
width,
channels,
key,
encoded,
image_format,
class_name,
label)
def load(self, split_name, reader=None):
# TODO(tmattio): Implement the load methods
pass
| 2.3125 | 2 |
nicos_demo/vrefsans/setups/nok/sc2.py | jkrueger1/nicos | 12 | 12791913 | description = "sc2 height after nok9"
group = 'lowlevel'
devices = dict(
sc2 = device('nicos.devices.generic.VirtualMotor',
description = 'sc2 Motor',
abslimits = (-150, 150),
speed = 1.,
unit = 'mm',
# refpos = -7.2946,
),
)
| 1.6875 | 2 |
gh_build.py | sonvt1710/manga-py | 337 | 12791914 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from helpers.gh_pages import main
main()
| 1.234375 | 1 |
officials/migrations/0003_auto_20210523_0810.py | Fabrice-64/advocacy_project | 0 | 12791915 | <filename>officials/migrations/0003_auto_20210523_0810.py
# Generated by Django 3.1.5 on 2021-05-23 08:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('communities', '0002_auto_20210523_0725'),
('officials', '0002_auto_20210522_1139'),
]
operations = [
migrations.AddField(
model_name='mandatecity',
name='department',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.department', verbose_name='Département'),
),
migrations.AddField(
model_name='mandatecity',
name='intercom',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.intercom', verbose_name='Intercommunalité'),
),
migrations.AddField(
model_name='mandateintercom',
name='department',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.department', verbose_name='Département'),
),
migrations.AlterField(
model_name='mandatecity',
name='city',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.city', verbose_name='Commune'),
),
migrations.AlterField(
model_name='mandatedepartment',
name='department',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.department', verbose_name='Département'),
),
migrations.AlterField(
model_name='mandateintercom',
name='intercom',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.intercom', verbose_name='Intercommunalité'),
),
migrations.AlterField(
model_name='mandateregion',
name='region',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.region'),
),
migrations.AlterField(
model_name='mpmandate',
name='department',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='communities.department'),
),
migrations.AlterField(
model_name='senatormandate',
name='department',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='senator_mandate', to='communities.department', verbose_name='Département'),
),
]
| 1.523438 | 2 |
tests/src/Composite_report/click_on_district_block_cluster_home.py | JalajaTR/cQube | 0 | 12791916 | from selenium.webdriver.support.select import Select
from Data.parameters import Data
from reuse_func import GetData
class click_on_home():
def __init__(self,driver):
self.driver = driver
def test_homeicon(self):
self.p = GetData()
self.driver.implicitly_wait(20)
self.driver.find_element_by_xpath(Data.hyper).click()
self.p.page_loading(self.driver)
dist = Select(self.driver.find_element_by_name("myDistrict"))
dist.select_by_index(2)
self.p.page_loading(self.driver)
block = Select(self.driver.find_element_by_name("myBlock"))
block.select_by_index(2)
self.p.page_loading(self.driver)
cluster = Select(self.driver.find_element_by_name("myCluster"))
cluster.select_by_index(2)
self.p.page_loading(self.driver)
self.driver.find_element_by_id(Data.homeicon).click()
self.p.page_loading(self.driver)
| 2.796875 | 3 |
en-us/sbs/wire/led.py | chain01/wiki | 13 | 12791917 | <reponame>chain01/wiki<gh_stars>10-100
from machine import Pin
gpio1 = Pin(GPIO1, Pin.OUT, Pin.PULL_DISABLE, 0)
import utime
i = 1
# GPIOn 整型。引脚号。
# 引脚对应关系如下:
# GPIO1–引脚号 22
# GPIO2–引脚号 23
# GPIO3–引脚号 178
# GPIO4–引脚号 199
# GPIO5–引脚号 204
# direction 整型。
# IN 输入模式
# OUT 输出模式
# pullMode 整型。
# PULL_DISABLE 浮空模式
# PULL_PU 上拉模式
# PULL_PD 下拉模式
# level 整型。引脚电平。
# 0 设置引脚为低电平
# 1 设置引脚为高电平
while i<100:
gpio1.write(0)
utime.sleep(1)
gpio1.write(1)
utime.sleep(1)
i += 1 | 2.890625 | 3 |
tests/unit/test_lists.py | scherroman/mugen | 119 | 12791918 | import pytest
from mugen import lists
from mugen.lists import MugenList
class Dummy(object):
foo = 1
@pytest.fixture
def mugen_list() -> MugenList:
return MugenList([Dummy(), Dummy(), Dummy(), Dummy(), Dummy(), Dummy()])
@pytest.mark.parametrize("l, expected_foo", [
(mugen_list(), [1, 1, 1, 1, 1, 1])
])
def test_lget(l, expected_foo):
assert l.lget('foo') == expected_foo
@pytest.mark.parametrize("l, expected_l", [
([1, [2, 3], [[4, 5], [6, 7]]], [1, 2, 3, 4, 5, 6, 7])
])
def test_flatten(l, expected_l):
assert lists.flatten(l) == expected_l
def test_mugen_list__operations_yield_mugen_list():
assert type(MugenList() + MugenList()) == MugenList
assert type(MugenList()[1:2]) == MugenList
| 2.75 | 3 |
utilities/mytests/fileexplorerdict.py | Saldenisov/pyconlyse | 0 | 12791919 | <filename>utilities/mytests/fileexplorerdict.py
from PyQt5.QtWidgets import QApplication, QTreeWidget, QTreeWidgetItem
from PyQt5.QtCore import QModelIndex
class ViewTree(QTreeWidget):
def __init__(self, value):
super().__init__()
self.clicked.connect(self.click)
def fill_item(item, value):
def new_item(parent, key, val=None):
child = QTreeWidgetItem([key])
if not isinstance(val, str):
fill_item(child, val)
parent.addChild(child)
child.setExpanded(True)
if not value:
return
elif isinstance(value, dict):
for key, val in sorted(value.items()):
if key not in ['dirs', 'files']:
new_item(item, str(key), val)
else:
fill_item(item, val)
elif isinstance(value, (list, tuple)):
for val in value:
text = (str(val) if not isinstance(val, (dict, list, tuple))
else '[%s]' % type(val).__name__)
new_item(item, text, val)
else:
new_item(item, str(value))
fill_item(self.invisibleRootItem(), value)
def click(self, item: QModelIndex):
from pathlib import Path
path = []
path.append(item.data())
parent = item.parent()
while parent.data():
path.append(parent.data())
parent = parent.parent()
path = path[::-1]
if __name__ == '__main__':
app = QApplication([])
v1 = {'key1': 'value1', '2': {'10': 10}, 'key3': [1, 2, 3, {1: 3, 7: 9}]}
v2 = {'dirs':
{'C:\\':
{'dirs':
{'dev': {'dirs': {},
'files': ['device_start.py', 'test.py']}},
'files': []}},
'files': []}
v3 = {'dirs':
{'C:\\': {'dirs': {},
'files': ['device_start.py', 'test.py']}},
'files': []}
window = ViewTree(v2)
window.show()
app.exec_() | 2.78125 | 3 |
src/osm2paths.py | MikeNezumi/osm2tracks | 1 | 12791920 | <filename>src/osm2paths.py
"""
This is the main function with main script, the application.
Inputs: from console
Output: Window animation + console metrics (potentially)
"""
import pyglet
import subprocess
import json
import os
from pyglet.gl import *
from scripts.read_json import get_dict
from scripts.render_json import graph_lines, rails_lines
from scripts.write_json import replicate_json, insert_rails
def generate_paths(osm_path, half_gauge = 2, min_radius = 4.5, drive_right = True, display = True):
"""
Integrates all components of the generator, from .osm file to .JSON with rails,
optionally displays OpenGL lines of these rails in a system window
Includes: cwd_relpath()
Uses: Vendors/OsmToRoadGraph/run.convert_osm_to_roadgraph()
read_json.get_dict()
write_json.insert_rails()
render_json.rails_lines()
Input: osm_path (str), half_gauge (float), max_radius, drive_left (bool), display (bool)
Output: all good -> 0
"""
def cwd_relpath(): # returns path from current working directory to this script (str)
file_path = __file__
working_dir = os.getcwd()
common = os.path.commonpath([file_path, working_dir])
file_path = file_path[len(common):]
working_dir = working_dir[len(common):]
prefix = "."
for char in working_dir:
if char == '/':
prefix += "/.."
return prefix + file_path[:-len(list("osm2paths.py"))]
subprocess.run(["python", cwd_relpath() + "vendors/OsmToRoadGraph/run.py", "-f", osm_path, "-n", "c"])
roads_JSON = get_dict(osm_path[:-3] + "pycgr")
with open(osm_path[:-3] + "json", "w", encoding="utf-8") as json_file:
json.dump(roads_JSON, json_file)
print("Wrote graph in " + osm_path[:-3] + "json")
insert_rails(osm_path[:-3] + "json", half_gauge, False, min_radius)
print("\nInserted tracks into " + osm_path)
# display window
if display:
WINDOW_RESOLUTION = (1920, 1020) # 1020 + window head = 1080
win = pyglet.window.Window(*WINDOW_RESOLUTION, caption = "Generated JSON")
@win.event
def on_draw():
roads = rails_lines(osm_path[:-3] + "json", (1, 1, 1), padding = (15, 15), multiplier=1.5)
for road in roads:
road.draw(GL_LINES)
return pyglet.app.run()
| 2.59375 | 3 |
server.py | doppioandante/covid_andamento_regionale | 5 | 12791921 | <gh_stars>1-10
# -*- coding: utf-8 -*-
import argparse
from datetime import datetime
from pathlib import Path
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
import covid_data
external_scripts = ["https://cdn.plot.ly/plotly-locale-it-latest.js"]
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(
external_stylesheets=external_stylesheets,
external_scripts=external_scripts,
url_base_pathname='/covid-19/'
)
by_region = covid_data.get_data_by_region()
by_province = covid_data.get_data_by_province()
last_update = ''
try:
iso_timestamp = Path('update_timestamp.txt').read_text().strip()
last_update = datetime.fromisoformat(iso_timestamp)
except:
pass
app.title = 'Andamento territoriale contagi'
app.layout = html.Div(children=[
html.H1(children='Andamento territoriale COVID-19'),
html.Div([
'''
Visualizzazione degli andamenti territoriali del COVID-19.
I dati sono aggiornati automaticamente dalla
''',
html.A('fonte ufficiale della protezione civile', href='https://github.com/pcm-dpc/COVID-19'),
'.', html.Br(),
'Il codice è open source sotto licenza MIT e disponibile su ',
html.A('github', href='https://github.com/doppioandante/covid_andamento_regionale'),
'.'
]),
html.Div(f'''
Ultimo aggiornamento: {last_update}
'''),
dcc.RadioItems(
id='plot-type',
options=[{'label': i, 'value': i} for i in ['Confronto Regioni', 'Dettaglio Regione', 'Dettaglio Province per Regione']],
value='Confronto Regioni',
labelStyle={'display': 'inline-block'}
),
dcc.Dropdown(
id='plot-variable'
),
dcc.Graph(
id='trend-plot',
config=dict(
locale='it'
)
)
])
@app.callback(
Output('plot-variable', 'options'),
[Input('plot-type', 'value')])
def set_dropdown_options(plot_type):
if plot_type == 'Confronto Regioni':
return [{'label': label, 'value': key} for key, label in covid_data.fields.items()]
elif plot_type == 'Dettaglio Regione':
return [{'label': r, 'value': r} for r in covid_data.extended_regions]
elif plot_type == 'Dettaglio Province per Regione':
return [{'label': r, 'value': r} for r in covid_data.regions]
@app.callback(
Output('plot-variable', 'value'),
[Input('plot-variable', 'options')])
def set_plot_variable(available_options):
return available_options[0]['value']
@app.callback(
Output('trend-plot', 'figure'),
[Input('plot-type', 'value'),
Input('plot-variable', 'value')]
)
def update_graph(plot_type, plot_variable):
if plot_type == 'Confronto Regioni':
return {
'data': [{
'x': by_region[plot_variable][nome_regione].index,
'y': by_region[plot_variable][nome_regione].to_list(),
'name': nome_regione,
'visible': 'legendonly' if nome_regione == 'Italia' else 'true'
} for nome_regione in covid_data.extended_regions],
'layout': {
'title': covid_data.fields[plot_variable],
'showlegend': True
}
}
elif plot_type == 'Dettaglio Regione':
region = plot_variable
return {
'data': [{
'x': by_region[key][region].index,
'y': by_region[key][region].to_list(),
'name': covid_data.fields[key],
'visible': 'legendonly' if key == 'tamponi' else 'true'
} for key in covid_data.fields.keys()],
'layout': {
'title': 'Trend ' + region,
'showlegend': True
}
}
elif plot_type == 'Dettaglio Province per Regione':
region = plot_variable
key = list(covid_data.province_fields.keys())[0]
return {
'data': [{
'x': by_province[key][region][province_name].index,
'y': by_province[key][region][province_name].to_list(),
'name': province_name
} for province_name in covid_data.provinces[region]],
'layout': {
'title': 'Casi totali - ' + region,
'showlegend': True
}
}
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run debug server')
parser.add_argument('--port', dest='port', type=int, default=8080, help='HTTP server port')
args = parser.parse_args()
app.run_server(debug=True, port=args.port)
| 2.375 | 2 |
basecategory/views.py | RevolutionTech/revolutiontech.ca | 0 | 12791922 | """
:Created: 13 July 2015
:Author: <NAME>
"""
import random
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = "home.html"
class CategoryPageView(TemplateView):
template_name = "category-page.html"
def get_context_data(self, items, **kwargs):
context = super().get_context_data(**kwargs)
context["page"] = items._meta.verbose_name_plural.lower()
item_qs = items.objects.filter(visible=True).order_by("order")
heroes = item_qs.filter(hero=True)
regular = item_qs.filter(hero=False)
context["items"] = {"heroes": heroes, "regular": regular}
context["random_hero_unit_index"] = (
random.randint(0, heroes.count() - 1) if heroes.count() > 0 else 0
)
return context
class ItemPageView(TemplateView):
template_name = "item-page.html"
def dispatch(self, request, items, slug, *args, **kwargs):
try:
self.item = items.objects.get(slug=slug)
except items.DoesNotExist:
verbose_name_plural = items._meta.verbose_name_plural.lower()
items_list = "{items}:{items}_list".format(items=verbose_name_plural)
return HttpResponseRedirect(reverse(items_list))
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["item"] = self.item
context["absolute_uri"] = self.request.build_absolute_uri()
return context
| 2.390625 | 2 |
musicbot/cogs/music.py | richteer/py-music-bot | 0 | 12791923 | from discord.ext import commands
import discord
import asyncio
import youtube_dl
import logging
import math
import random
import heapq
from urllib import request
from ..video import Video
from ..video import Setlist
# TODO: abstract FFMPEG options into their own file?
FFMPEG_BEFORE_OPTS = '-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5'
"""
Command line options to pass to `ffmpeg` before the `-i`.
See https://stackoverflow.com/questions/43218292/youtubedl-read-error-with-discord-py/44490434#44490434 for more information.
Also, https://ffmpeg.org/ffmpeg-protocols.html for command line option reference.
"""
async def audio_playing(ctx):
"""Checks that audio is currently playing before continuing."""
client = ctx.guild.voice_client
if client and client.channel and client.source:
return True
else:
raise commands.CommandError("Not currently playing any audio.")
async def in_voice_channel(ctx):
"""Checks that the command sender is in the same voice channel as the bot."""
voice = ctx.author.voice
bot_voice = ctx.guild.voice_client
if voice and bot_voice and voice.channel and bot_voice.channel and voice.channel == bot_voice.channel:
return True
else:
raise commands.CommandError(
"You need to be in the channel to do that.")
async def is_audio_requester(ctx):
"""Checks that the command sender is the song requester."""
music = ctx.bot.get_cog("Music")
state = music.get_state(ctx.guild)
permissions = ctx.channel.permissions_for(ctx.author)
if permissions.administrator or state.is_requester(ctx.author):
return True
else:
raise commands.CommandError(
"You need to be the song requester to do that.")
class Music(commands.Cog):
"""Bot commands to help play music."""
def __init__(self, bot, config):
self.bot = bot
self.config = config[__name__.split(".")[
-1]] # retrieve module name, find config entry
self.states = {}
self.bot.add_listener(self.on_reaction_add, "on_reaction_add")
def get_state(self, guild):
"""Gets the state for `guild`, creating it if it does not exist."""
if guild.id in self.states:
return self.states[guild.id]
else:
self.states[guild.id] = GuildState()
return self.states[guild.id]
@commands.command(aliases=["stop"])
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def leave(self, ctx):
"""Leaves the voice channel, if currently in one."""
client = ctx.guild.voice_client
state = self.get_state(ctx.guild)
if client and client.channel:
await client.disconnect()
state.playlist = []
state.now_playing = None
else:
raise commands.CommandError("Not in a voice channel.")
@commands.command(aliases=["resume", "p"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.check(in_voice_channel)
@commands.check(is_audio_requester)
async def pause(self, ctx):
"""Pauses any currently playing audio."""
client = ctx.guild.voice_client
self._pause_audio(client)
def _pause_audio(self, client):
if client.is_paused():
client.resume()
else:
client.pause()
@commands.command(aliases=["vol", "v"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.check(in_voice_channel)
@commands.check(is_audio_requester)
async def volume(self, ctx, volume: int):
"""Change the volume of currently playing audio (values 0-250)."""
state = self.get_state(ctx.guild)
# make sure volume is nonnegative
if volume < 0:
volume = 0
max_vol = self.config["max_volume"]
if max_vol > -1: # check if max volume is set
# clamp volume to [0, max_vol]
if volume > max_vol:
volume = max_vol
client = ctx.guild.voice_client
state.volume = float(volume) / 100.0
client.source.volume = state.volume # update the AudioSource's volume to match
@commands.command()
@commands.guild_only()
@commands.check(audio_playing)
@commands.check(in_voice_channel)
async def skip(self, ctx):
"""Skips the currently playing song, or votes to skip it."""
state = self.get_state(ctx.guild)
client = ctx.guild.voice_client
if ctx.channel.permissions_for(
ctx.author).administrator or state.is_requester(ctx.author):
# immediately skip if requester or admin
client.stop()
elif self.config["vote_skip"]:
# vote to skip song
channel = client.channel
self._vote_skip(channel, ctx.author)
# announce vote
users_in_channel = len([
member for member in channel.members if not member.bot
]) # don't count bots
required_votes = math.ceil(
self.config["vote_skip_ratio"] * users_in_channel)
if required_votes == 0:
required_votes = 1
await ctx.send(
f"{ctx.author.mention} voted to skip ({len(state.skip_votes)}/{required_votes} votes)"
)
else:
raise commands.CommandError("Sorry, vote skipping is disabled.")
def _vote_skip(self, channel, member):
"""Register a vote for `member` to skip the song playing."""
logging.info(f"{member.name} votes to skip")
state = self.get_state(channel.guild)
state.skip_votes.add(member)
users_in_channel = len([
member for member in channel.members if not member.bot
]) # don't count bots
if (float(len(state.skip_votes)) /
users_in_channel) >= self.config["vote_skip_ratio"]:
# enough members have voted to skip, so skip the song
logging.info(f"Enough votes, skipping...")
channel.guild.voice_client.stop()
async def _set_status(self, song=None):
if song:
await self.bot.change_presence(activity=discord.Game(name=f"♫ {song.title}"))
else:
await self.bot.change_presence(activity=None)
def _play_song(self, client, state, song):
state.now_playing = song
state.skip_votes = set() # clear skip votes
asyncio.run_coroutine_threadsafe(self._set_status(song=song), self.bot.loop)
source = discord.PCMVolumeTransformer(
discord.FFmpegPCMAudio(song.stream_url, before_options=FFMPEG_BEFORE_OPTS), volume=state.volume)
def after_playing(err):
if state.autoplay:
more = state.playlist_state.target_length - len(state.playlist)
if more > 0:
state.playlist.append(state.playlist_state.get_num(more))
if len(state.playlist) > 0:
next_song = state.playlist.pop(0)
self._play_song(client, state, next_song)
else:
asyncio.run_coroutine_threadsafe(client.disconnect(),
self.bot.loop)
asyncio.run_coroutine_threadsafe(self._set_status(), self.bot.loop)
client.play(source, after=after_playing)
@commands.command(aliases=["np"])
@commands.guild_only()
@commands.check(audio_playing)
async def nowplaying(self, ctx):
"""Displays information about the current song."""
state = self.get_state(ctx.guild)
message = await ctx.send("", embed=state.now_playing.get_embed())
await self._add_reaction_controls(message)
@commands.command(aliases=["q", "playlist"])
@commands.guild_only()
@commands.check(audio_playing)
async def queue(self, ctx):
"""Display the current play queue."""
state = self.get_state(ctx.guild)
text = self._queue_text(state.playlist)
if state.autoplay:
text += "\n\nAutoplay is enabled."
await ctx.send(text)
def _queue_text(self, queue):
"""Returns a block of text describing a given song queue."""
if len(queue) > 0:
message = [f"{len(queue)} songs in queue:"]
message += [
f" {index+1}. **{song.title}** (requested by **{song.requested_by.display_name}**)"
for (index, song) in enumerate(queue)
] # add individual songs
return "\n".join(message)
else:
return "The play queue is empty."
@commands.command(aliases=["cq"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.has_permissions(administrator=True)
async def clearqueue(self, ctx):
"""Clears the play queue without leaving the channel."""
state = self.get_state(ctx.guild)
state.playlist = []
@commands.command(aliases=["jq"])
@commands.guild_only()
@commands.check(audio_playing)
@commands.has_permissions(administrator=True)
async def jumpqueue(self, ctx, song: int, new_index: int):
"""Moves song at an index to `new_index` in queue."""
state = self.get_state(ctx.guild) # get state for this guild
if 1 <= song <= len(state.playlist) and 1 <= new_index:
song = state.playlist.pop(song - 1) # take song at index...
state.playlist.insert(new_index - 1, song) # and insert it.
await ctx.send(self._queue_text(state.playlist))
else:
raise commands.CommandError("You must use a valid index.")
@commands.command(brief="Plays audio from <url>.")
@commands.guild_only()
async def play(self, ctx, *, url):
"""Plays audio hosted at <url> (or performs a search for <url> and plays the first result)."""
client = ctx.guild.voice_client
state = self.get_state(ctx.guild) # get the guild's state
await self._play(ctx, client, state, url)
async def _play(self, ctx, client, state, url):
if client and client.channel:
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
logging.warn(f"Error downloading video: {e}")
await ctx.send(
"There was an error downloading your video, sorry.")
return
state.playlist.append(video)
message = await ctx.send(
"Added to queue.", embed=video.get_embed())
await self._add_reaction_controls(message)
else:
if ctx.author.voice is not None and ctx.author.voice.channel is not None:
channel = ctx.author.voice.channel
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
await ctx.send(
"There was an error downloading your video, sorry.")
return
client = await channel.connect()
self._play_song(client, state, video)
message = await ctx.send("", embed=video.get_embed())
await self._add_reaction_controls(message)
logging.info(f"Now playing '{video.title}'")
else:
raise commands.CommandError(
"You need to be in a voice channel to do that.")
@commands.command(brief="Queue <url> to play after the one currently playing")
@commands.guild_only()
async def playnext(self, ctx, *, url):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild)
# TODO: maybe make better "nowplaying" checking logic
if not client:
await self._play(ctx, client, state, url)
else:
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
logging.warn(f"Error downloading video: {e}")
await ctx.send(
"There was an error downloading your video, sorry.")
return
state.playlist.insert(0, video)
# TODO: probably make this admin-only, vote, etc
@commands.command(brief="Stop the current song and play <url> right now")
@commands.guild_only()
async def playnow(self, ctx, *, url):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild)
if not client:
await self._play(ctx, client, state, url)
else:
try:
video = Video(url, ctx.author)
except youtube_dl.DownloadError as e:
logging.warn(f"Error downloading video: {e}")
await ctx.send(
"There was an error downloading your video, sorry.")
return
state.playlist.insert(0, video)
client.stop()
@commands.command(brief="Register the playlist at <url> to the requesting user")
@commands.guild_only()
async def setlist(self, ctx, *, url):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild) # get the guild's state
if url == "remove":
del state.setlists[ctx.author.id]
await ctx.send(f"Deleted playlist for {ctx.author.display_name}")
return
state.setlists[ctx.author.id] = Setlist(url, ctx.author)
await ctx.send(f"Playlist registered for {ctx.author.display_name}")
#self._shuffle_setlists(state, client)
#await self._play(ctx, client, state, state.playlist.pop(0).video_url)
# Shuffle all user's setlists together
def _shuffle_setlists(self, state, client):
temp = []
# Grab a random 5 songs from each user's setlists
for user,setlist in state.setlists.items():
temp += list(map(lambda x: Video(x, user), random.sample(setlist, k=5)))
# Shuffle all the songs together
random.shuffle(temp)
state.playlist = temp
# TODO: rename to something better
@commands.command(brief="TODO")
@commands.guild_only()
async def build(self, ctx, *, num):
try:
num = int(num)
if num <= 0:
raise Exception("not greater than zero")
except:
await ctx.send(f"{num} is not an integer greater than zero")
await self._build(ctx, num)
async def _build(self, ctx, num):
state = self.get_state(ctx.guild)
if not state.setlists.items():
await ctx.send("No registered setlists, ignoring")
return
client = ctx.guild.voice_client
state.playlist_state = PlaylistState(state.setlists)
state.playlist = state.playlist_state.get_num(num)
await self._play(ctx, client, state, state.playlist.pop(0).video_url)
@commands.command(brief="TODO")
@commands.guild_only()
async def extend(self, ctx, *, num):
try:
num = int(num)
if num <= 0:
raise Exception("not greater than zero")
except:
await ctx.send(f"{num} is not an integer greater than zero")
state = self.get_state(ctx.guild)
if not state.setlists.items():
await ctx.send("No registered setlists, ignoring")
return
if not state.playlist_state:
await ctx.send("Playlist mode not activated, use !build to start")
return
state.playlist += state.playlist_state.get_num(num)
@commands.command(brief="Toggle autoplay mode from registered setlists",
aliases=["a", "ap", "yolo"])
@commands.guild_only()
async def autoplay(self, ctx):
state = self.get_state(ctx.guild)
state.autoplay = not state.autoplay
await ctx.send(f"Autoplay has been {'enabled' if state.autoplay else 'disabled'}")
if state.autoplay and not state.playlist_state:
await self._build(ctx, 10)
elif not state.autoplay:
state.playlist_state = None
@commands.command(brief="Reshuffle user setlists and generate a new queue")
@commands.guild_only()
@commands.check(audio_playing)
async def reshuffle(self, ctx):
client = ctx.guild.voice_client
state = self.get_state(ctx.guild) # get the guild's state
await ctx.send("Regenerating play queue.")
self._shuffle_setlists(state, client)
await ctx.send(self._queue_text(state.playlist))
async def on_reaction_add(self, reaction, user):
"""Respods to reactions added to the bot's messages, allowing reactions to control playback."""
message = reaction.message
if user != self.bot.user and message.author == self.bot.user:
await message.remove_reaction(reaction, user)
if message.guild and message.guild.voice_client:
user_in_channel = user.voice and user.voice.channel and user.voice.channel == message.guild.voice_client.channel
permissions = message.channel.permissions_for(user)
guild = message.guild
state = self.get_state(guild)
if permissions.administrator or (
user_in_channel and state.is_requester(user)):
client = message.guild.voice_client
if reaction.emoji == "⏯":
# pause audio
self._pause_audio(client)
elif reaction.emoji == "⏭":
# skip audio
client.stop()
elif reaction.emoji == "⏮":
state.playlist.insert(
0, state.now_playing
) # insert current song at beginning of playlist
client.stop() # skip ahead
elif reaction.emoji == "⏭" and self.config["vote_skip"] and user_in_channel and message.guild.voice_client and message.guild.voice_client.channel:
# ensure that skip was pressed, that vote skipping is
# enabled, the user is in the channel, and that the bot is
# in a voice channel
voice_channel = message.guild.voice_client.channel
self._vote_skip(voice_channel, user)
# announce vote
channel = message.channel
users_in_channel = len([
member for member in voice_channel.members
if not member.bot
]) # don't count bots
required_votes = math.ceil(
self.config["vote_skip_ratio"] * users_in_channel)
if required_votes == 0:
required_votes = 1
await channel.send(
f"{user.mention} voted to skip ({len(state.skip_votes)}/{required_votes} votes)"
)
async def _add_reaction_controls(self, message):
"""Adds a 'control-panel' of reactions to a message that can be used to control the bot."""
CONTROLS = ["⏮", "⏯", "⏭"]
for control in CONTROLS:
await message.add_reaction(control)
# TODO: Holy crap absolutely don't expose this one to the public.
@commands.command()
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def debug(self, ctx, *, url):
state = self.get_state(ctx.guild) # get the guild's state
try:
ret = f"```{str(eval(url))[:1900]}```"
except Exception as e:
ret = e
await ctx.send(f"{ret}")
class GuildState:
"""Helper class managing per-guild state."""
def __init__(self):
self.volume = 1.0
self.playlist = []
self.skip_votes = set()
self.now_playing = None
# userid -> Setlist
self.setlists = {}
self.playlist_state = None
self.autoplay = False
def is_requester(self, user):
return self.now_playing.requested_by == user
class PlaylistState:
"""Helper class to manage a playlist state"""
# users: list(userid, userid...)
def __init__(self, setlists):
# list((num, userid))
self.user_playtime = [(0, u) for u in setlists.keys()]
random.shuffle(self.user_playtime) # ensure the first song picked is random
# userid -> Setlist
# copy from guild state, pops played songs
self.user_setlists = {u:v.copy() for u,v in setlists.items()}
# TODO: probably make this configurable
self.target_length = 10
# Shuffle each setlist so we can always just take from the front
for _,v in self.user_setlists.items():
random.shuffle(v)
# Get a list of <num> songs, increment play times
def get_num(self, num):
ret = []
# TODO: yeah this is a problem.
# This function stalls if you build too much, so this needs to be reconsidered.
# Maybe autoplay should be the only behavior, and it only queues out maybe 10 in advance
if num >= 20:
num = 20
for i in range(num):
ret.append(self.next())
return ret
# Return a video object for the next song to play
def next(self):
time, userid = heapq.heappop(self.user_playtime)
# TODO: refill playlist when a user's runs out
video = self.user_setlists[userid].pop(0)
video = Video(video, self.user_setlists[userid].requester)
time += video.duration
heapq.heappush(self.user_playtime, (time, userid))
return video
| 2.734375 | 3 |
lwip_py/stack/exceptions.py | vvish/py-lwip | 2 | 12791924 | class StackException(Exception):
"""Base class for stack-related exceptions."""
class LwipError(StackException):
"""Class representing lwip error codes."""
_mapping = {
-1: 'Out of memory',
-2: 'Buffer error',
-3: 'Timeout',
-4: 'Routing problem',
-5: 'Operation in progress',
-6: 'Illegal value',
-7: 'Operation would block',
-8: 'Address in use',
-9: 'Already connecting',
-10: 'Connection already established',
-11: 'Not connected',
-12: 'Low level interface error',
-13: 'Connection aborted',
-14: 'Connection reset',
-15: 'Connection closed',
-16: 'Illegal argument',
}
def __init__(self, code):
self._code = code
def __str__(self):
return self._mapping[self._code]
def get_code(self):
return self._code
class AllocationError(StackException):
"""Class representing error in stack memory allocation."""
def __str__(self):
return 'Allocation failed'
| 3.3125 | 3 |
frontend/web/core/view_mixins.py | uktrade/trade-access-program | 1 | 12791925 | <filename>frontend/web/core/view_mixins.py
from django.urls import reverse
from django.utils.http import urlencode
from django.utils.translation import gettext_lazy as _
class BackContextMixin:
back_text = None
back_url = None
def get_back_url(self):
if hasattr(self, 'back_url_name') and getattr(self, 'object', None):
return reverse(self.back_url_name, args=(self.object.pk,))
elif hasattr(self, 'back_url_name'):
return reverse(self.back_url_name)
def get_context_data(self, **kwargs):
self.back_url = self.get_back_url()
if self.back_url:
kwargs['back'] = {
'text': self.back_text or _('Back'),
'url': self.back_url
}
return super().get_context_data(**kwargs)
class SuccessUrlObjectPkMixin:
success_url_name = None
def get_success_url(self):
if self.object.has_viewed_review_page:
return reverse('grant-applications:application-review', args=(self.object.pk,))
return reverse(self.success_url_name, args=(self.object.pk,))
class PaginationMixin:
"""
Pagination data mixin for views
"""
ellipsis = '...'
def get_extra_pagination_href_params(self):
return ''
def get_current_page(self):
try:
return int(self.request.GET.get('page', 1))
except ValueError:
return 1
def get_pagination_total_pages(self):
raise NotImplementedError('.get_pagination_total_pages() must be overridden.')
def get_basic_pagination_pages(self, current_page, total_pages, extra_href_params):
pages = []
for i in range(1, total_pages + 1):
if i == current_page:
pages.append({
'class': 'hmcts-pagination__item hmcts-pagination__item--active',
'text': current_page
})
else:
pages.append({'href': f'?page={i}&{extra_href_params}', 'page': i})
return pages
def get_dotted_pagination_pages(self, current_page, total_pages, previous_page, next_page,
extra_href_params):
pages = [
{'href': f'?page=1&{extra_href_params}', 'page': 1},
{'href': f'?page={previous_page}&{extra_href_params}', 'page': previous_page},
{
'class': 'hmcts-pagination__item hmcts-pagination__item--active',
'text': current_page
},
{'href': f'?page={next_page}&{extra_href_params}', 'page': next_page},
]
if current_page == 1:
pages.pop(1)
pages.pop(0)
pages.append({'href': f'?page=3&{extra_href_params}', 'page': 3})
pages.append({'href': f'?page=4&{extra_href_params}', 'page': 4})
if current_page == 2:
pages.pop(1)
pages.append({'href': f'?page=4&{extra_href_params}', 'page': 4})
if current_page == total_pages:
pages.pop(-1)
# Insert start "..."
if current_page > 3:
pages.insert(1, {
'class': 'hmcts-pagination__item hmcts-pagination__item--dots',
'text': self.ellipsis
})
# Insert end "..."
if current_page <= total_pages - 2:
pages.insert(len(pages), {
'class': 'hmcts-pagination__item hmcts-pagination__item--dots',
'text': self.ellipsis
})
return pages
def get_pagination(self):
"""
:return: Any of:
- 1 page --> None (no pagination)
- 6 pages or fewer --> Previous 1 2 3 4 5 6 Next
- 7 pages or more
- and current is page 1 --> Previous 1* 2 3 4 ... Next
- and current is page 2 --> Previous 1 2* 3 4 ... Next
- and current is page 3 --> Previous 1 2 3* 4 ... Next
- and current is somewhere in the middle --> Previous 1 ... 7 8* 9 ... Next
- and current is penultimate --> Previous 1 ... 7 8* 9 Next
- and current is last --> Previous 1 ... 7 8 9* Next
"""
total_pages = self.get_pagination_total_pages()
current_page = self.get_current_page()
extra_href_params = urlencode(self.get_extra_pagination_href_params())
if total_pages and total_pages > 1:
pagination = {
'previous': {
'page': max(current_page - 1, 1),
'href': f'?page={max(current_page - 1, 1)}&{extra_href_params}'
},
'next': {
'page': min(current_page + 1, total_pages),
'href': f'?page={min(current_page + 1, total_pages)}&{extra_href_params}'
}
}
if 1 < total_pages <= 6:
pagination['pages'] = self.get_basic_pagination_pages(
current_page, total_pages, extra_href_params
)
elif total_pages >= 7:
pagination['pages'] = self.get_dotted_pagination_pages(
current_page,
total_pages,
pagination['previous']['page'],
pagination['next']['page'],
extra_href_params
)
return pagination
def get_context_data(self, **kwargs):
kwargs['pagination'] = self.get_pagination()
return super().get_context_data(**kwargs)
class SaveStateMixin:
success_url_name = None
def save_state(self, form):
application = form.instance
application.state_url_name = self.success_url_name
application.save()
def form_valid(self, form, *args, **kwargs):
form_valid = super().form_valid(form, *args, **kwargs)
self.save_state(form)
return form_valid
| 2.171875 | 2 |
config.py | marijawo/jp | 0 | 12791926 | <gh_stars>0
# config.py
import os
basedir = os.path.abspath((os.path.dirname(__file__)))
class BaseConfig(object):
DEBUG = True
SECRET_KEY = <KEY>'
SQLALCHEMY_DATABASE_URI = 'mysql://username:password@localhost/mitdb'
# SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
# 'sqlite:///' + os.path.join(basedir, 'app.db')
SECURITY_PASSWORD_SALT = 'bcrypt'
SQLALCHEMY_TRACK_MODIFICATIONS = False
# FLASK_APP=run.py
USERNAME = 'ablie'
PASSWORD = '<PASSWORD>'
class DevelopmentConfig(BaseConfig):
DEBUG = True
class ProductionConfig(BaseConfig):
DEBUG = False
app_config = {
'development': DevelopmentConfig,
'production': ProductionConfig
}
| 2.125 | 2 |
crabageprediction/venv/Lib/site-packages/fontTools/ttLib/tables/_c_i_d_g.py | 13rianlucero/CrabAgePrediction | 2,705 | 12791927 | # coding: utf-8
from .otBase import BaseTTXConverter
class table__c_i_d_g(BaseTTXConverter):
"""The AAT ``cidg`` table has almost the same structure as ``gidc``,
just mapping CIDs to GlyphIDs instead of the reverse direction.
It is useful for fonts that may be used by a PDF renderer in lieu of
a font reference with a known glyph collection but no subsetted
glyphs. For instance, a PDF can say “please use a font conforming
to Adobe-Japan-1”; the ``cidg`` mapping is necessary if the font is,
say, a TrueType font. ``gidc`` is lossy for this purpose and is
obsoleted by ``cidg``.
For example, the first font in ``/System/Library/Fonts/PingFang.ttc``
(which Apple ships pre-installed on MacOS 10.12.6) has a ``cidg`` table.
"""
pass
| 2.46875 | 2 |
python-pass.py | Ri-dha/GIZ-pass-python | 0 | 12791928 | <gh_stars>0
class Solution:
def longestPalindrome(self, s: str) -> str:
# function to find the longest palindrome from the string
def pointersfun(left,right):
#we start from the middle of the string then go left and right to find out the longest Palindrome
while (left >= 0 and right < len(s) and s[left]==s[right]):
left-= 1
right+= 1
return s[left+1:right]
# we store the result of the longestPalindrome
palindrome = ""
# for loop for two cases (odd number of characters string and even number of characters string)
for i in range(len(s)):
# for odd numbers characters
initPalndrome = pointersfun(i,i)
if len(initPalndrome) > len(palindrome): palindrome = initPalndrome
#two pointers for even numbers characters
initPalndrome = pointersfun(i,i+1)
# check which palindrome is the longest then store it in the result variable
if len(initPalndrome) > len(palindrome): palindrome = initPalndrome
print (palindrome)
s = input('enter a string:')
longestPalindrome('self',s) | 3.875 | 4 |
heightmaptilemaker/mesh/greiner_hormann_clipper.py | ulrichji/HeightmapTileMaker | 0 | 12791929 | <reponame>ulrichji/HeightmapTileMaker
from .polygon_boolean_operator import PolygonBooleanOperator
from enum import Enum
from math import sqrt
class IntersectionType(Enum):
NOT_AN_INTERSECTION=0
UNKNOWN=1
ENTRY=2
EXIT=3
def isIntersection(self):
return self == self.UNKNOWN or self == self.ENTRY or self == self.EXIT
def getInverted(self):
if(self == self.ENTRY):
return self.EXIT
elif(self == self.EXIT):
return self.ENTRY
return enum_type
class TraversalDirection(Enum):
FORWARD=1
BACKWARDS=2
class PolygonPoint:
def __init__(self, pos, is_intersection=False):
self.pos = pos
self.next = None
self.prev = None
self.other_polygon_link = None
self.intersection_type = IntersectionType.UNKNOWN if is_intersection else IntersectionType.NOT_AN_INTERSECTION
self.processed = False
def setNext(self, next_point):
self.next = next_point
def setPrev(self, prev_point):
self.prev = prev_point
def getNext(self, traversal_direction):
if traversal_direction == TraversalDirection.FORWARD:
return self.next
elif traversal_direction == TraversalDirection.BACKWARDS:
return self.prev
def linkToPoint(self, other_point):
self.other_polygon_link = other_point
def __str__(self):
return '(' + ','.join(str(p) for p in self.pos) + ')'
class PolygonEdge:
def __init__(self, from_point, to_point):
self.from_point = from_point
self.to_point = to_point
self.intersections = []
def insertIntersectionAt(self, intersection_point, t):
self.intersections.append((intersection_point, t))
def getIntersectionsAsPoints(self):
self.intersections.sort(key=lambda intersection: intersection[1])
return [intersection[0] for intersection in self.intersections]
# Source: https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
# Title: Line–line intersection
# Author: Wikipedia
# Last edit date: 16 August 2019
def computeIntersection(self, other_edge):
x1,y1 = (self.from_point.pos[0], self.from_point.pos[1])
x2,y2 = (self.to_point.pos[0], self.to_point.pos[1])
x3,y3 = (other_edge.from_point.pos[0], other_edge.from_point.pos[1])
x4,y4 = (other_edge.to_point.pos[0], other_edge.to_point.pos[1])
t_dividend = ((x1 - x3) * (y3 - y4)) - ((y1 - y3) * (x3 - x4))
u_dividend = -(((x1 - x2) * (y1 - y3)) - ((y1 - y2) * (x1 - x3)))
tu_divisor = ((y3 - y4) * (x1 - x2)) - ((y1 - y2) * (x3 - x4))
if abs(tu_divisor) <= 1e-9:
return None
t = t_dividend / tu_divisor
u = u_dividend / tu_divisor
if t <= 1e-9 or t > 1 - 1e-9 or u < 1e-9 or u >= 1 - 1e-9:
return None
intersection_point = (x1 + (t * (x2 - x1)), y1 + (t * (y2 - y1)), 0)
return (intersection_point, t)
# Source: https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
# Title: Line–line intersection
# Author: Wikipedia
# Last edit date: 16 August 2019
def computeIntersectionForPointInPolygon(self, other_edge):
x1,y1 = (self.from_point.pos[0], self.from_point.pos[1])
x2,y2 = (self.to_point.pos[0], self.to_point.pos[1])
x3,y3 = (other_edge.from_point.pos[0], other_edge.from_point.pos[1])
x4,y4 = (other_edge.to_point.pos[0], other_edge.to_point.pos[1])
t_dividend = ((x1 - x3) * (y3 - y4)) - ((y1 - y3) * (x3 - x4))
u_dividend = -(((x1 - x2) * (y1 - y3)) - ((y1 - y2) * (x1 - x3)))
tu_divisor = ((y3 - y4) * (x1 - x2)) - ((y1 - y2) * (x3 - x4))
if abs(tu_divisor) <= 1e-9:
return None
t = t_dividend / tu_divisor
u = u_dividend / tu_divisor
is_upward = y3 < y4
is_downward = y3 > y4
if is_upward:
if t <= 1e-9 or t >= 1 - 1e-9 or u <= -1e-9 or u >= 1 - 1e-9:
return None
elif is_downward:
if t < 1e-9 or t >= 1 - 1e-9 or u <= 1e-9 or u >= 1 + 1e-9:
return None
else:
return None
intersection_point = (x1 + (t * (x2 - x1)), y1 + (t * (y2 - y1)), 0)
return (intersection_point, t)
def isPointLeft(point):
P0 = self.from_point.pos
P1 = self.to_point.pos
P2 = point
return ((P1[0] - P0[0]) * (P2[1] - P0[1]) - (P2[0] - P0[0]) * (P1[1] - P0[1]))
def intersectsPoint(self, point):
x0,y0 = (point.pos[0], point.pos[1])
x1,y1 = (self.from_point.pos[0], self.from_point.pos[1])
x2,y2 = (self.to_point.pos[0], self.to_point.pos[1])
dividend = sqrt((y2 - y1)**2 + (x2 - x1)**2)
if dividend <= 1e-9:
return False
divisor = (y2 - y1)*x0 - (x2 - x1)*y0 + x2*y1 + y2*x1
distance = abs(divisor / dividend)
if distance < 1e-9:
return True
return False
class GreinerHormannPolygon:
def __init__(self, points_list=[]):
self.points = [PolygonPoint(point) for point in points_list]
self.edges = self.__getEdgesFromPoints()
self.__setupPointsOrder()
def remakeFromEdges(self, edge_list):
edge_points = [[edge.from_point, *edge.getIntersectionsAsPoints()] for edge in edge_list]
#print("Edge points:", ','.join('(' + ','.join(str(pt) for pt in edge_point) + ')' for edge_point in edge_points))
self.points = [point for edge in edge_points for point in edge]
#print('Points:', ','.join(str(pt) for pt in self.points))
#print("Edges first: ", ', '.join('(' + ', '.join((str(edge.from_point), str(edge.to_point))) + ')' for edge in self.edges))
self.edges = self.__getEdgesFromPoints()
#print("Edges after: ", ', '.join('(' + ', '.join((str(edge.from_point), str(edge.to_point))) + ')' for edge in self.edges))
self.__setupPointsOrder()
def isPointInside(self, point):
#cn = 0; # the crossing number counter
## loop through all edges of the polygon
#for i in range(len(self.points)):
# current_vertex = self.points[i]
# next_vertex = self.points[(i + 1) % len(self.points)]
# if (((current_vertex.pos[1] <= point[1]) and (next_vertex.pos[1] > point[1])) or ((current_vertex.pos[1] > point[1]) and (next_vertex.pos[1] <= point[1]))):
# vt = (point[1] - current_vertex.pos[1]) / (next_vertex.pos[1] - current_vertex.pos[1])
# if point[1] < current_vertex.pos[1] + vt * (next_vertex.pos[0] - current_vertex.pos[0]):
# cn += 1
#return cn % 2 == 1
ray_from_point = PolygonPoint((point[0], point[1]))
# This has length of polygon width to maximize the floating point resolution
max_x_point = max(p.pos[0] for p in self.points)
ray_to_point = PolygonPoint((max_x_point + 1, point[1]))
ray = PolygonEdge(ray_from_point, ray_to_point)
ray_intersections = (ray.computeIntersectionForPointInPolygon(edge) for edge in self.edges)
ray_intersections_count = sum(1 for intersection in ray_intersections if intersection)
is_point_inside = ray_intersections_count % 2 == 1
return is_point_inside
#vertex_intersections = [ray.intersectsPoint(p) for p in self.points]
#edge_intersection_count = sum(0 if intersection is None else 1 for intersection in ray_intersections)
#vertex_intersection_count = sum(a for a in vertex_intersections)
##if edge_intersection_count % 2 == 1 or vertex_intersection_count > 0:
## print()
## print(edge_intersection_count, vertex_intersection_count)
## print()
#intersection_count = edge_intersection_count + vertex_intersection_count
##print('Points: ', ','.join(str(pt) for pt in self.points))
##print('INts:', vertex_intersections)
#is_point_inside = intersection_count % 2 == 1
##print("Edge cnt:", len(self.edges), ", edgeint:", edge_intersection_count, ", pointint:", vertex_intersection_count)
##print("Edges: ", ', '.join(', '.join((str(edge.from_point), str(edge.to_point))) for edge in self.edges))
##print("Is point inside:", point, is_point_inside, ", ray:", ray.from_point, ray.to_point, intersection_count)
#if abs(point[0] - 0.16535) < 0.00005 and abs(point[1] - 0.20472) < 0.00005:
# print(','.join(str(pt) for pt in self.points))
# print(is_point_inside, edge_intersection_count, vertex_intersection_count, len(self.points), ray.from_point, ray.to_point, " ")
#return is_point_inside
@staticmethod
def linkPolygons(first_polygon, second_polygon):
GreinerHormannPolygon.__computeEdgeIntersections(first_polygon, second_polygon)
first_polygon.__updateIntersectionTypes(second_polygon)
second_polygon.__updateIntersectionTypes(first_polygon)
def getPolygonPointsFromBoolean(self, other_polygon, boolean_operator):
return self.__createPolygonFromIntersections(other_polygon, boolean_operator)
def __getEdgesFromPoints(self):
#print('Points:', ', '.join(str(point) for point in self.points))
return [PolygonEdge(self.points[i], self.points[(i+1) % len(self.points)]) for i in range(len(self.points))]
def __setupPointsOrder(self):
for i in range(len(self.points)):
point = self.points[i]
next_point = self.points[(i+1) % len(self.points)]
prev_point = self.points[i-1]
point.setNext(next_point)
point.setPrev(prev_point)
def __computePolygonWidth(self):
return max(point.pos[0] for point in self.points) - min(point.pos[0] for point in self.points)
@staticmethod
def __computeEdgeIntersections(first_polygon, second_polygon):
intersections = []
for first_edge in first_polygon.edges:
for second_edge in second_polygon.edges:
first_intersection = first_edge.computeIntersection(second_edge)
second_intersection = second_edge.computeIntersection(first_edge)
if first_intersection is not None and second_intersection is not None:
first_intersection_pos, t = first_intersection
second_intersection_pos, u = second_intersection
first_intersection_point = PolygonPoint(first_intersection_pos, is_intersection=True)
second_intersection_point = PolygonPoint(second_intersection_pos, is_intersection=True)
first_intersection_point.linkToPoint(second_intersection_point)
second_intersection_point.linkToPoint(first_intersection_point)
first_edge.insertIntersectionAt(first_intersection_point, t)
second_edge.insertIntersectionAt(second_intersection_point, u)
first_polygon.remakeFromEdges(first_polygon.edges)
second_polygon.remakeFromEdges(second_polygon.edges)
def __updateIntersectionTypes(self, other_polygon):
if len(self.points) <= 0:
return None
current_intersection_type = self.__getFirstIntersectionType(other_polygon)
for point in self.points:
if point.intersection_type.isIntersection():
point.intersection_type = current_intersection_type
current_intersection_type = current_intersection_type.getInverted()
def __getFirstIntersectionType(self, other_polygon):
is_inside_other = other_polygon.isPointInside(self.points[0].pos)
return IntersectionType.EXIT if is_inside_other else IntersectionType.ENTRY
def __createPolygonFromIntersections(self, other_polygon, boolean_operator):
intersections = [point for point in self.points if point.intersection_type.isIntersection()]
if len(intersections) <= 0:
return self.__getNonIntersectingPolygon(other_polygon, boolean_operator)
return self.__tracePolygonPerimetersFromIntersections(intersections, boolean_operator)
def __getNonIntersectingPolygon(self, other_polygon, boolean_operator):
# Currently the only supported boolean operator
assert(boolean_operator == PolygonBooleanOperator.INTERSECTION)
this_is_inside_other = other_polygon.isPointInside(self.points[0].pos)
other_is_inside_this = self.isPointInside(other_polygon.points[0].pos)
if not this_is_inside_other and not other_is_inside_this:
return GreinerHormannPolygon()
if this_is_inside_other:
return self
else:
return other_polygon
def __tracePolygonPerimetersFromIntersections(self, intersections, boolean_operator):
# Currently the only supported boolean operator
assert(boolean_operator == PolygonBooleanOperator.INTERSECTION)
result_polygon_points = []
current_point = intersections[0]
traversal_direction = TraversalDirection.FORWARD
_cnt = 0
while(any(not intersection.processed for intersection in intersections) or current_point != intersections[0]):
result_polygon_points.append(current_point.pos)
if current_point.intersection_type.isIntersection():
current_point.processed = True
current_point = current_point.other_polygon_link
current_point.processed = True
traversal_direction = TraversalDirection.FORWARD if current_point.intersection_type == IntersectionType.ENTRY else TraversalDirection.BACKWARDS
current_point = current_point.getNext(traversal_direction)
_cnt += 1
if(_cnt > 1000):
print("Fail")
return GreinerHormannPolygon(result_polygon_points)
return GreinerHormannPolygon(result_polygon_points)
| 3.15625 | 3 |
app/routes/models/form_model.py | mampilly/fileaccess | 0 | 12791930 | <filename>app/routes/models/form_model.py<gh_stars>0
from pydantic import BaseModel
from fastapi.param_functions import Body
from typing import Optional
class FormModel(BaseModel):
first_name: str = None
second_name: str
| 1.859375 | 2 |
python_meteorologist/forecast/__init__.py | AlertingAvian/python-meteorologist | 0 | 12791931 | from .forecast import Forecaster | 1.117188 | 1 |
NintendoOne/api/noe.py | Hkakashi/nintendo-one | 2 | 12791932 | <filename>NintendoOne/api/noe.py
from typing import Iterator, Optional
import requests, json
SEARCH_URL = "http://search.nintendo-europe.com/en/select"
def _search(
query: str = "*",
nsuid: str = None,
) -> Iterator[dict]:
'''
make query to noe api
return a iterator of diction
useful fields: dates_released_dts[0],excerpt:str,game_categories_txt[],
language_availability[],nsuid_txt[0],
price_discount_percentage_f:float,price_has_discount_b:bool,
price_lowest_f:float,price_regular_f:float,
product_code_txt[0],title:str,url:str,popularity:int,
image_url:str,image_url_h2x1_s:str
'''
rows = 200
params = {
"fq": "type:GAME AND system_type:nintendoswitch", # filter
"q": query, # filter
"rows": rows, # no of results per response
"sort": "title asc", # sort
"start": -rows, # offset
"wt": "json", # format
}
if nsuid:
params["fq"] += f' AND nsuid_txt:"{nsuid}"'
while True:
params["start"] += rows
response = requests.get(url=SEARCH_URL, params=params)
if response.status_code != 200:
break
json = response.json()['response'].get('docs', [])
if not len(json):
break
for data in json:
yield data
if __name__ == "__main__":
res = _search()
for item in res:
print(json.dumps(item, indent=4, sort_keys=True))
input("continue?")
| 3.390625 | 3 |
backend/api/migrations/0009_merge_20180723_0852.py | pietervdvn/healthdata | 6 | 12791933 | # Generated by Django 2.0.7 on 2018-07-23 08:52
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0008_merge_20180723_0613'),
('api', '0006_auto_20180719_2243'),
]
operations = [
]
| 1.351563 | 1 |
utils.py | AnyByte/ErgoKB | 0 | 12791934 | import matplotlib.pyplot as plt
from multiprocessing import Pool, Manager, cpu_count
from functools import partial
import numpy as np
from bs4 import BeautifulSoup
from colour import Color
import copy
import math
import re
import time
from consts import QWERTY, THUMBS, COORDS
CACHE = {}
def cleanhtml(raw_html):
soup = BeautifulSoup(raw_html, "lxml")
spans = soup.find_all('span')
lowercase = ''.join([i.text.replace('Пользователь 2: ', '').replace('Пользователь 1: ', '') for i in spans]).lower()
return re.sub('[^а-я]+', '', lowercase)
def generate_strokes(sample, QWERTY):
zones = {}
for idr, row in enumerate(QWERTY):
for idk, key in enumerate(row):
zones[key] = THUMBS[idr][idk]
strokes = {}
stroke = ''
for idx, char in enumerate(sample):
current_zone = zones[char]
stroke += char
if idx + 1 < len(sample) and zones[sample[idx + 1]] != current_zone:
r_stroke = stroke[::-1]
if stroke in strokes:
strokes[stroke]["count"] += 1
elif r_stroke in strokes:
strokes[r_stroke]["count"] += 1
else:
strokes[stroke] = {"zone": current_zone, "count": 1}
stroke = ''
if idx + 1 == len(sample):
r_stroke = stroke[::-1]
if stroke in strokes:
strokes[stroke]["count"] += 1
elif r_stroke in strokes:
strokes[r_stroke]["count"] += 1
else:
strokes[stroke] = {"zone": current_zone, "count": 1}
return strokes
def calculateDistance(x1,y1,x2,y2):
global CACHE
if f"{x1}{y1}{x2}{y2}" in CACHE:
return CACHE[f"{x1}{y1}{x2}{y2}"]
if f"{x2}{y2}{x1}{y1}" in CACHE:
return CACHE[f"{x2}{y2}{x1}{y1}"]
dist = math.sqrt((x2 - x1)**2 + (y2 - y1)**2)
CACHE[f"{x1}{y1}{x2}{y2}"] = dist
return dist
def finger_heatmap(finger_distances):
return [[
finger_distances['ЛМ'],
finger_distances['ЛБ'],
finger_distances['ЛС'],
finger_distances['ЛУ'],
finger_distances['ПУ'],
finger_distances['ПС'],
finger_distances['ПБ'],
finger_distances['ПМ']
]]
def shift_row(c, row_num, value):
new_coords = copy.deepcopy(c)
for idx, cell in enumerate(new_coords[row_num]):
new_coords[row_num][idx][0] = new_coords[row_num][idx][0] + value
return new_coords
def shift_col(c, col_num, value):
new_coords = copy.deepcopy(c)
for idx, row in enumerate(new_coords):
new_coords[idx][col_num][1] = new_coords[idx][col_num][1] + value
return new_coords
def get_mapper(c, k):
text_mapper = {
item: {
'x': c[idx][idy][0],
'y': c[idx][idy][1],
'thumb': THUMBS[idx][idy]
} for idx, sublist in enumerate(k) for idy, item in enumerate(sublist)
}
# print(json.dumps(text_mapper, indent=2, ensure_ascii=False))
return text_mapper
def draw_keyboard(coords, QWERTY):
x = [i[0] for i in [item for sublist in coords for item in sublist]]
y = [i[1] for i in [item for sublist in coords for item in sublist]]
n = [item for sublist in QWERTY for item in sublist]
fig, ax = plt.subplots()
ax.scatter(x, y, marker=",", s=620, color=(0.5, 0.5, 0.5))
ax.set_title('Координаты клавиш', fontsize=10)
ax.set_aspect('equal', 'box')
# Or if you want different settings for the grids:
major_ticks = np.arange(-20, 210, 20)
minor_ticks = np.arange(-20, 210, 5)
ax.set_xticks(major_ticks)
ax.set_xticks(minor_ticks, minor=True)
ax.set_yticks(major_ticks)
ax.set_yticks(minor_ticks, minor=True)
# And a corresponding grid
ax.grid(which='both')
# Or if you want different settings for the grids:
ax.grid(which='minor', alpha=0.2)
ax.grid(which='major', alpha=0.5)
ax.axis([-12, 210, -12, 48])
for i, txt in enumerate(n):
ax.annotate(txt, (x[i], y[i]), color=(1, 1, 1))
def get_keyboard(coords, QWERTY):
x = [i[0] for i in [item for sublist in coords for item in sublist]]
y = [i[1] for i in [item for sublist in coords for item in sublist]]
n = [item for sublist in QWERTY for item in sublist]
fig, ax = plt.subplots()
ax.scatter(x, y, marker=",", s=620, color=(0.5, 0.5, 0.5))
ax.set_title('Координаты клавиш', fontsize=10)
ax.set_aspect('equal', 'box')
# Or if you want different settings for the grids:
major_ticks = np.arange(-20, 210, 20)
minor_ticks = np.arange(-20, 210, 5)
ax.set_xticks(major_ticks)
ax.set_xticks(minor_ticks, minor=True)
ax.set_yticks(major_ticks)
ax.set_yticks(minor_ticks, minor=True)
# And a corresponding grid
ax.grid(which='both')
# Or if you want different settings for the grids:
ax.grid(which='minor', alpha=0.2)
ax.grid(which='major', alpha=0.5)
ax.axis([-12, 210, -12, 48])
for i, txt in enumerate(n):
ax.annotate(txt, (x[i], y[i]), color=(1, 1, 1))
return ax
def count_presses(text):
press_count = {}
for idx, char in enumerate(text):
if char not in press_count:
press_count[char] = 1
else:
press_count[char] += 1
return press_count
def press_heatmap(presses_counts, QWERTY):
return [[presses_counts[item] if item in presses_counts else 0 for item in row] for row in QWERTY]
def zone_distances(zone, press_count):
keys = []
default_position = {
'ЛМ': COORDS[1][0],
'ЛБ': COORDS[1][1],
'ЛС': COORDS[1][2],
'ЛУ': COORDS[1][3],
'ПУ': COORDS[1][6],
'ПС': COORDS[1][7],
'ПБ': COORDS[1][8],
'ПМ': COORDS[1][9],
}
for idr, row in enumerate(QWERTY):
for idk, key in enumerate(row):
if THUMBS[idr][idk] == zone and len(QWERTY[idr][idk]) > 0:
x1, y1 = default_position[zone][0], default_position[zone][1]
x2, y2 = COORDS[idr][idk][0], COORDS[idr][idk][1]
distance = calculateDistance(x1, y1, x2, y2)
keys.append({
"symbol": QWERTY[idr][idk],
"distance": distance,
"press_count": press_count[QWERTY[idr][idk]]
})
return sorted(keys, key=lambda i: i["press_count"], reverse=True)
def distance_deltas(distance, distance_1):
sum = 0
for k, v in distance.items():
delta = v - distance_1[k]
sum += delta
print(f"{k}: {distance_1[k] / 1000:.2f} м - меньше на {delta / 1000:.2f} м ({(1 - (distance_1[k] / v)) * 100:.2f}%)")
print(f"\nОбщая дистанция уменшилась на {sum / 1000:.2f} м")
def count_stroke_distance(default_position, default_keys, mapper, stroke):
text = stroke["stroke"]
zone = stroke["zone"]
count = stroke["count"]
pairs = []
total_distance = 0
if len(text) <= 1:
return
for idx, char in enumerate(text):
if idx + 1 == len(text):
char_1 = char
x1 = default_position[mapper[char]['thumb']][0]
y1 = default_position[mapper[char]['thumb']][1]
char_2 = default_keys[zone]
x2 = mapper[char]['x']
y2 = mapper[char]['y']
distance = calculateDistance(x1, y1, x2, y2)
total_distance += distance
pair = f"{char_1}{char_2}"
pairs.append({
"pair": pair,
"distance": distance
})
if idx == 0:
char_1 = default_keys[zone]
x1 = default_position[mapper[char]['thumb']][0]
y1 = default_position[mapper[char]['thumb']][1]
char_2 = char
x2 = mapper[char]['x']
y2 = mapper[char]['y']
distance = calculateDistance(x1, y1, x2, y2)
total_distance += distance
pair = f"{char_1}{char_2}"
pairs.append({
"pair": pair,
"distance": distance
})
else:
char_1 = text[idx - 1]
x1 = mapper[char_1]['x']
y1 = mapper[char_1]['y']
char_2 = char
x2 = mapper[char_2]['x']
y2 = mapper[char_2]['y']
distance = calculateDistance(x1, y1, x2, y2)
total_distance += distance
pair = f"{char_1}{char_2}"
pairs.append({
"pair": pair,
"distance": distance
})
return {
"pairs": pairs,
"count": count,
"total_distance": total_distance,
"zone": zone
}
def draw_stroke_lines(pairs, COORDS, QWERTY, row_count, max_value, max_line_width):
ax = get_keyboard(COORDS, QWERTY)
mapper = get_mapper(COORDS, QWERTY)
red = Color("green")
colors = list(red.range_to(Color("red"),100))
for pair, distance in pairs.items():
stroke_a, stroke_b = pair[0], pair[1]
x1 = mapper[stroke_a]['x']
y1 = mapper[stroke_a]['y']
x2 = mapper[stroke_b]['x']
y2 = mapper[stroke_b]['y']
linewidth = (max_line_width / max_value) * distance
color_hue = (100 / max_value) * distance
color_hue = int(round(color_hue))
r, g, b = colors[color_hue - 1].rgb
ax.plot([x1,x2],[y1,y2], linewidth=linewidth, color=(r, g, b, 1))
def process_strokes(strokes, coords, qwerty):
distances = {
'ЛМ': 0,
'ЛБ': 0,
'ЛС': 0,
'ЛУ': 0,
'ПУ': 0,
'ПС': 0,
'ПБ': 0,
'ПМ': 0,
}
default_keys = {
'ЛМ': qwerty[1][0],
'ЛБ': qwerty[1][1],
'ЛС': qwerty[1][2],
'ЛУ': qwerty[1][3],
'ПУ': qwerty[1][6],
'ПС': qwerty[1][7],
'ПБ': qwerty[1][8],
'ПМ': qwerty[1][9],
}
default_position = {
'ЛМ': coords[1][0],
'ЛБ': coords[1][1],
'ЛС': coords[1][2],
'ЛУ': coords[1][3],
'ПУ': coords[1][6],
'ПС': coords[1][7],
'ПБ': coords[1][8],
'ПМ': coords[1][9],
}
start_time = time.time()
mapper = get_mapper(coords, qwerty)
pairs = {}
num_workers = cpu_count()
p = Pool(num_workers)
manager = Manager()
func = partial(count_stroke_distance, default_position, default_keys, mapper)
results = p.map_async(func, strokes).get()
p.close()
p.join()
for stroke_distance in results:
if stroke_distance is None:
continue
# stroke_distance = count_stroke_distance(COORDS, QWERTY, THUMBS, default_position, default_keys, stroke)
distances[stroke_distance["zone"]] += stroke_distance["total_distance"] * stroke_distance["count"]
for pair in stroke_distance["pairs"]:
if pair["pair"] in pairs:
pairs[pair["pair"]] += pair["distance"] * stroke_distance["count"]
elif f'{pair["pair"][1]}{pair["pair"][0]}' in pairs:
pairs[f'{pair["pair"][1]}{pair["pair"][0]}'] += pair["distance"] * stroke_distance["count"]
else:
pairs[pair["pair"]] = pair["distance"] * stroke_distance["count"]
print("--- %s seconds ---" % (time.time() - start_time))
return {
"pairs": pairs,
"distances": distances
} | 2.4375 | 2 |
Project/source/data_retrieval/remoteGet.py | EricPapagiannis/CSCC01-team10-Project | 1 | 12791935 | class remoteGet:
def __init__(self, link, saveTo):
self._link = link
self._saveTo = saveTo
def getFile(self):
'''(NoneType) -> NoneType
Retrieves file from set url to set local destination
Raises CannotRetrieveFileException
Returns NoneType
'''
import urllib
try:
urllib.request.urlretrieve(self._link, self._saveTo)
except:
raise CannotRetrieveFileException(self._link, self._saveTo)
def isNew(self):
'''(NoneType) -> bool
returns true if file at remote URL is different than file located at local destination
else returns false
Raises CannotRetrieveFileException
Returns bool
'''
import hashlib
import urllib
import os
try:
urllib.request.urlretrieve(self._link, self._saveTo + ".TMP")
except:
raise CannotRetrieveFileException(self._link, self._saveTo)
hashgen = hashlib.md5()
with open(self._saveTo + ".TMP", 'rb') as afile:
buf = afile.read()
hashgen.update(buf)
csumNew = hashgen.hexdigest()
hashgen2 = hashlib.md5()
with open(self._saveTo, 'rb') as afile:
buf2 = afile.read()
hashgen2.update(buf2)
csumOriginal = hashgen2.hexdigest()
os.remove(self._saveTo + ".TMP")
return not (csumNew == csumOriginal)
class CannotRetrieveFileException(Exception):
pass
| 2.796875 | 3 |
agent/input/sysinfo.py | tcarlisi/dxagent | 3 | 12791936 | """
sysinfo.py
obtain system informations
@author: K.Edeline
"""
import platform
class SysInfo():
"""
extend me
"""
def __init__(self):
self.system = platform.system()
self.node = platform.node()
self.release = platform.release()
self.version = platform.version()
self.machine = platform.machine()
self.processor = platform.processor()
self.platform = platform.platform()
self.architecture = platform.architecture()
def __str__(self):
return "node: {} system: {} release: {} arch: {}".format(self.node,
self.system, self.release, self.processor)
| 2.890625 | 3 |
tclothes/clothes/models/interactions.py | EstebanMongui/tclothes | 1 | 12791937 | """Interactions model"""
# Django
from django.db import models
# Utils
from tclothes.utils.baseModels import TClothesModel
class InteractionsModel(TClothesModel):
"""Interactions interactions model."""
clothe = models.ForeignKey(
'clothes.ClothesModel',
on_delete=models.CASCADE
)
user = models.ForeignKey(
'users.User',
on_delete=models.CASCADE,
)
INTERACTIVE_VALUES = [
('LIKE', 'like'),
('SUPERLIKE', 'superlike'),
('DISLIKE', 'dislike')
]
value = models.CharField(
'Interaction type',
max_length=9,
choices=INTERACTIVE_VALUES,
)
def __str__(self):
"""Return clothe, user, and interactive values"""
return f'clothe: {self.clothe} | user: {self.user} | value: {self.value}'
| 3.015625 | 3 |
_broken/old/matching_functions.py | SU-ECE-17-7/ibeis | 0 | 12791938 | <filename>_broken/old/matching_functions.py
# -*- coding: utf-8 -*-
"""
#=================
# matching_functions:
# Module Concepts
#=================
PREFIXES:
qaid2_XXX - prefix mapping query chip index to
qfx2_XXX - prefix mapping query chip feature index to
TUPLES:
* nns - a (qfx2_dx, qfx2_dist) tuple
* nnfilt - a (qfx2_fs, qfx2_valid) tuple
SCALARS
* dx - the index into the database of features
* dist - the distance to a corresponding feature
* fs - a score of a corresponding feature
* valid - a valid bit for a corresponding feature
REALIZATIONS:
qaid2_nns - maping from query chip index to nns
{
* qfx2_dx - ranked list of query feature indexes to database feature indexes
* qfx2_dist - ranked list of query feature indexes to database feature indexes
}
* qaid2_norm_weight - mapping from qaid to (qfx2_normweight, qfx2_selnorm)
= qaid2_nnfilt[qaid]
"""
# TODO: Remove ibs control as much as possible or abstract it away
from __future__ import absolute_import, division, print_function
# Python
from six.moves import zip, range
import six
from collections import defaultdict
import sys
# Scientific
import numpy as np
from vtool import keypoint as ktool
from vtool import linalg as ltool
from vtool import spatial_verification as sver
# Hotspotter
from ibeis.model.hots import hots_query_result
from ibeis.model.hots import exceptions as hsexcept
from ibeis.model.hots import coverage_image
from ibeis.model.hots import nn_filters
from ibeis.model.hots import voting_rules2 as vr2
import utool
from functools import partial
#profile = utool.profile
print, print_, printDBG, rrr, profile = utool.inject(__name__, '[mf]', DEBUG=False)
np.tau = 2 * np.pi # tauday.com
NOT_QUIET = utool.NOT_QUIET and not utool.get_flag('--quiet-query')
VERBOSE = utool.VERBOSE or utool.get_flag('--verbose-query')
#=================
# Cython Metadata
#=================
"""
ctypedef np.float32_t float32_t
ctypedef np.float64_t float64_t
ctypedef np.uint8_t uint8_t
ctypedef np.uint8_t desc_t
ctypedef ktool.KPTS_T kpts_t
ctypedef ktool.DESC_T desc_t
cdef int MARK_AFTER
cdef double tau
"""
#=================
# Globals
#=================
START_AFTER = 2
# specialized progress func
log_prog = partial(utool.log_progress, startafter=START_AFTER)
#=================
# Helpers
#=================
#def compare(qreq, qreq_):
# qaid = 1
# qvecs_list = qreq_.get_internal_qvecs()
# qaids = qreq.get_internal_qaids()
# qdesc_list = qreq_.get_annot_desc(qaids) # Get descriptors
# assert np.all(qvecs_list[0] == qdesc_list[0])
# assert np.all(qreq_.indexer.dx2_vec == qreq.data_index.dx2_data)
# assert np.all(qreq_.indexer.dx2_rowid == qreq.data_index.dx2_aid)
# assert np.all(qreq_.indexer.dx2_fx == qreq.data_index.dx2_fx)
# qfx2_dx_, qfx2_dist_ = qaid2_nns_[qaid]
# qfx2_dx, qfx2_dist = qaid2_nns[qaid]
# assert id(qaid2_nns) != id(qaid2_nns_)
# assert np.all(qfx2_dx_ == qfx2_dx)
# assert np.all(qfx2_dist_ == qfx2_dist)
# index = np.where(qfx2_dx_ != qfx2_dx)
# qfx2_dx.shape == qfx2_dx.shape
# qfx2_dx_[index]
# qfx2_dx[index]
class QueryException(Exception):
def __init__(self, msg):
super(QueryException, self).__init__(msg)
def NoDescriptorsException(ibs, qaid):
msg = ('QUERY ERROR IN %s: qaid=%r has no descriptors!' +
'Please delete it.') % (ibs.get_dbname(), qaid)
ex = QueryException(msg)
return ex
#============================
# 1) Nearest Neighbors
#============================
@profile
def nearest_neighbors(ibs, qaids, qreq):
""" Plain Nearest Neighbors
Input:
ibs - an IBEIS Controller
qaids - query annotation-ids
qreq - a QueryRequest object
Output:
qaid2_nnds - a dict mapping query annnotation-ids to a nearest neighbor
tuple (indexes, dists). indexes and dist have the shape
(nDesc x K) where nDesc is the number of descriptors in the
annotation, and K is the number of approximate nearest
neighbors.
cdef:
dict qaid2_nns
object ibs
object qreq
"""
# Neareset neighbor configuration
nn_cfg = qreq.cfg.nn_cfg
K = nn_cfg.K
Knorm = nn_cfg.Knorm
checks = nn_cfg.checks
if NOT_QUIET:
cfgstr_ = nn_cfg.get_cfgstr()
print('[mf] Step 1) Assign nearest neighbors: ' + cfgstr_)
num_neighbors = K + Knorm # number of nearest neighbors
qdesc_list = ibs.get_annot_desc(qaids) # Get descriptors
nn_func = qreq.data_index.flann.nn_index # Approx nearest neighbor func
# Call a tighter (hopefully cythonized) nearest neighbor function
qaid2_nns = _nearest_neighbors(nn_func, qaids, qdesc_list, num_neighbors, checks)
return qaid2_nns
def _nearest_neighbors(nn_func, qaids, qdesc_list, num_neighbors, checks):
""" Helper worker function for nearest_neighbors
cdef:
list qaids, qdesc_list
long num_neighbors, checks
dict qaid2_nns
long nTotalNN, nTotalDesc
np.ndarray[desc_t, ndim=2] qfx2_desc
np.ndarray[int32_t, ndim=2] qfx2_dx
np.ndarray[float64_t, ndim=2] qfx2_dist
np.ndarray[int32_t, ndim=2] qfx2_dx
np.ndarray[float64_t, ndim=2] qfx2_dist
"""
# Output
qaid2_nns = {}
# Internal statistics reporting
nTotalNN, nTotalDesc = 0, 0
mark_, end_ = log_prog('Assign NN: ', len(qaids))
for count, qaid in enumerate(qaids):
mark_(count) # progress
qfx2_desc = qdesc_list[count]
# Check that we can query this annotation
if len(qfx2_desc) == 0:
# Assign empty nearest neighbors
qfx2_dx = np.empty((0, num_neighbors), dtype=np.int32)
qfx2_dist = np.empty((0, num_neighbors), dtype=np.float64)
qaid2_nns[qaid] = (qfx2_dx, qfx2_dist)
continue
# Find Neareset Neighbors nntup = (indexes, dists)
(qfx2_dx, qfx2_dist) = nn_func(qfx2_desc, num_neighbors, checks=checks)
# Associate query annotation with its nearest descriptors
qaid2_nns[qaid] = (qfx2_dx, qfx2_dist)
# record number of query and result desc
nTotalNN += qfx2_dx.size
nTotalDesc += len(qfx2_desc)
end_()
if NOT_QUIET:
print('[mf] * assigned %d desc from %d chips to %r nearest neighbors'
% (nTotalDesc, len(qaids), nTotalNN))
return qaid2_nns
#============================
# 2) Nearest Neighbor weights
#============================
def weight_neighbors(ibs, qaid2_nns, qreq):
if NOT_QUIET:
print('[mf] Step 2) Weight neighbors: ' + qreq.cfg.filt_cfg.get_cfgstr())
if qreq.cfg.filt_cfg.filt_on:
return _weight_neighbors(ibs, qaid2_nns, qreq)
else:
return {}
@profile
def _weight_neighbors(ibs, qaid2_nns, qreq):
nnfilter_list = qreq.cfg.filt_cfg.get_active_filters()
filt2_weights = {}
filt2_meta = {}
for nnfilter in nnfilter_list:
nn_filter_fn = nn_filters.NN_FILTER_FUNC_DICT[nnfilter]
# Apply [nnfilter] weight to each nearest neighbor
# TODO FIX THIS!
qaid2_norm_weight, qaid2_selnorms = nn_filter_fn(ibs, qaid2_nns, qreq)
filt2_weights[nnfilter] = qaid2_norm_weight
filt2_meta[nnfilter] = qaid2_selnorms
return filt2_weights, filt2_meta
#==========================
# 3) Neighbor scoring (Voting Profiles)
#==========================
@profile
def _apply_filter_scores(qaid, qfx2_nndx, filt2_weights, filt_cfg):
qfx2_score = np.ones(qfx2_nndx.shape, dtype=hots_query_result.FS_DTYPE)
qfx2_valid = np.ones(qfx2_nndx.shape, dtype=np.bool)
# Apply the filter weightings to determine feature validity and scores
for filt, aid2_weights in six.iteritems(filt2_weights):
qfx2_weights = aid2_weights[qaid]
sign, thresh, weight = filt_cfg.get_stw(filt) # stw = sign, thresh, weight
if thresh is not None and thresh != 'None':
thresh = float(thresh) # corrects for thresh being strings sometimes
if isinstance(thresh, (int, float)):
qfx2_passed = sign * qfx2_weights <= sign * thresh
qfx2_valid = np.logical_and(qfx2_valid, qfx2_passed)
if not weight == 0:
qfx2_score += weight * qfx2_weights
return qfx2_score, qfx2_valid
@profile
def filter_neighbors(ibs, qaid2_nns, filt2_weights, qreq):
qaid2_nnfilt = {}
# Configs
filt_cfg = qreq.cfg.filt_cfg
cant_match_sameimg = not filt_cfg.can_match_sameimg
cant_match_samename = not filt_cfg.can_match_samename
K = qreq.cfg.nn_cfg.K
if NOT_QUIET:
print('[mf] Step 3) Filter neighbors: ')
if filt_cfg.gravity_weighting:
# We dont have an easy way to access keypoints from nearest neighbors yet
aid_list = np.unique(qreq.data_index.dx2_aid) # FIXME: Highly inefficient
kpts_list = ibs.get_annot_kpts(aid_list)
dx2_kpts = np.vstack(kpts_list)
dx2_oris = ktool.get_oris(dx2_kpts)
assert len(dx2_oris) == len(qreq.data_index.dx2_data)
# Filter matches based on config and weights
mark_, end_ = log_prog('Filter NN: ', len(qaid2_nns))
for count, qaid in enumerate(six.iterkeys(qaid2_nns)):
mark_(count) # progress
(qfx2_dx, _) = qaid2_nns[qaid]
qfx2_nndx = qfx2_dx[:, 0:K]
# Get a numeric score score and valid flag for each feature match
qfx2_score, qfx2_valid = _apply_filter_scores(qaid, qfx2_nndx, filt2_weights, filt_cfg)
qfx2_aid = qreq.data_index.dx2_aid[qfx2_nndx]
if VERBOSE:
print('[mf] * %d assignments are invalid by thresh' %
((True - qfx2_valid).sum()))
if filt_cfg.gravity_weighting:
qfx2_nnori = dx2_oris[qfx2_nndx]
qfx2_kpts = ibs.get_annot_kpts(qaid) # FIXME: Highly inefficient
qfx2_oris = ktool.get_oris(qfx2_kpts)
# Get the orientation distance
qfx2_oridist = ltool.rowwise_oridist(qfx2_nnori, qfx2_oris)
# Normalize into a weight (close orientations are 1, far are 0)
qfx2_gvweight = (np.tau - qfx2_oridist) / np.tau
# Apply gravity vector weight to the score
qfx2_score *= qfx2_gvweight
# Remove Impossible Votes:
# dont vote for yourself or another chip in the same image
cant_match_self = not cant_match_sameimg
if cant_match_self:
####DBG
qfx2_notsamechip = qfx2_aid != qaid
if VERBOSE:
nChip_all_invalid = ((True - qfx2_notsamechip)).sum()
nChip_new_invalid = (qfx2_valid * (True - qfx2_notsamechip)).sum()
print('[mf] * %d assignments are invalid by self' % nChip_all_invalid)
print('[mf] * %d are newly invalided by self' % nChip_new_invalid)
####
qfx2_valid = np.logical_and(qfx2_valid, qfx2_notsamechip)
if cant_match_sameimg:
qfx2_gid = ibs.get_annot_gids(qfx2_aid)
qgid = ibs.get_annot_gids(qaid)
qfx2_notsameimg = qfx2_gid != qgid
####DBG
if VERBOSE:
nImg_all_invalid = ((True - qfx2_notsameimg)).sum()
nImg_new_invalid = (qfx2_valid * (True - qfx2_notsameimg)).sum()
print('[mf] * %d assignments are invalid by gid' % nImg_all_invalid)
print('[mf] * %d are newly invalided by gid' % nImg_new_invalid)
####
qfx2_valid = np.logical_and(qfx2_valid, qfx2_notsameimg)
if cant_match_samename:
qfx2_nid = ibs.get_annot_nids(qfx2_aid)
qnid = ibs.get_annot_nids(qaid)
qfx2_notsamename = qfx2_nid != qnid
####DBG
if VERBOSE:
nName_all_invalid = ((True - qfx2_notsamename)).sum()
nName_new_invalid = (qfx2_valid * (True - qfx2_notsamename)).sum()
print('[mf] * %d assignments are invalid by nid' % nName_all_invalid)
print('[mf] * %d are newly invalided by nid' % nName_new_invalid)
####
qfx2_valid = np.logical_and(qfx2_valid, qfx2_notsamename)
#printDBG('[mf] * Marking %d assignments as invalid' % ((True - qfx2_valid).sum()))
qaid2_nnfilt[qaid] = (qfx2_score, qfx2_valid)
end_()
return qaid2_nnfilt
@profile
def identity_filter(qaid2_nns, qreq):
""" testing function returns unfiltered nearest neighbors
this does check that you are not matching yourself
"""
qaid2_nnfilt = {}
K = qreq.cfg.nn_cfg.K
for count, qaid in enumerate(six.iterkeys(qaid2_nns)):
(qfx2_dx, _) = qaid2_nns[qaid]
qfx2_nndx = qfx2_dx[:, 0:K]
qfx2_score = np.ones(qfx2_nndx.shape, dtype=hots_query_result.FS_DTYPE)
qfx2_valid = np.ones(qfx2_nndx.shape, dtype=np.bool)
# Check that you are not matching yourself
qfx2_aid = qreq.data_index.dx2_aid[qfx2_nndx]
qfx2_notsamechip = qfx2_aid != qaid
qfx2_valid = np.logical_and(qfx2_valid, qfx2_notsamechip)
qaid2_nnfilt[qaid] = (qfx2_score, qfx2_valid)
return qaid2_nnfilt
#============================
# 4) Conversion from featurematches to chipmatches qfx2 -> aid2
#============================
@profile
def _fix_fmfsfk(aid2_fm, aid2_fs, aid2_fk):
minMatches = 2 # TODO: paramaterize
# Convert to numpy
fm_dtype = hots_query_result.FM_DTYPE
fs_dtype = hots_query_result.FS_DTYPE
fk_dtype = hots_query_result.FK_DTYPE
# FIXME: This is slow
aid2_fm_ = {aid: np.array(fm, fm_dtype)
for aid, fm in six.iteritems(aid2_fm)
if len(fm) > minMatches}
aid2_fs_ = {aid: np.array(fs, fs_dtype)
for aid, fs in six.iteritems(aid2_fs)
if len(fs) > minMatches}
aid2_fk_ = {aid: np.array(fk, fk_dtype)
for aid, fk in six.iteritems(aid2_fk)
if len(fk) > minMatches}
# Ensure shape
for aid, fm in six.iteritems(aid2_fm_):
fm.shape = (fm.size // 2, 2)
chipmatch = (aid2_fm_, aid2_fs_, aid2_fk_)
return chipmatch
def new_fmfsfk():
aid2_fm = defaultdict(list)
aid2_fs = defaultdict(list)
aid2_fk = defaultdict(list)
return aid2_fm, aid2_fs, aid2_fk
@profile
def build_chipmatches(qaid2_nns, qaid2_nnfilt, qreq):
"""
Input:
qaid2_nns - dict of assigned nearest features (only indexes are used here)
qaid2_nnfilt - dict of (featmatch_scores, featmatch_mask)
where the scores and matches correspond to the assigned
nearest features
qreq - QueryRequest object
Output:
qaid2_chipmatch - dict of (
Notes:
The prefix qaid2_ denotes a mapping where keys are query-annotation-id
vsmany/vsone counts here. also this is where the filter
weights and thershold are applied to the matches. Essientally
nearest neighbors are converted into weighted assignments
"""
# Config
K = qreq.cfg.nn_cfg.K
query_type = qreq.cfg.agg_cfg.query_type
is_vsone = query_type == 'vsone'
if NOT_QUIET:
print('[mf] Step 4) Building chipmatches %s' % (query_type,))
# Return var
qaid2_chipmatch = {}
nFeatMatches = 0
#Vsone
if is_vsone:
assert len(qreq.qaids) == 1
aid2_fm, aid2_fs, aid2_fk = new_fmfsfk()
# Iterate over chips with nearest neighbors
mark_, end_ = log_prog('Build Chipmatch: ', len(qaid2_nns))
for count, qaid in enumerate(six.iterkeys(qaid2_nns)):
mark_(count) # Mark progress
(qfx2_dx, _) = qaid2_nns[qaid]
(qfx2_fs, qfx2_valid) = qaid2_nnfilt[qaid]
nQKpts = len(qfx2_dx)
# Build feature matches
qfx2_nndx = qfx2_dx[:, 0:K]
qfx2_aid = qreq.data_index.dx2_aid[qfx2_nndx]
qfx2_fx = qreq.data_index.dx2_fx[qfx2_nndx]
qfx2_qfx = np.tile(np.arange(nQKpts), (K, 1)).T
qfx2_k = np.tile(np.arange(K), (nQKpts, 1))
# Pack valid feature matches into an interator
valid_lists = [qfx2[qfx2_valid] for qfx2 in (qfx2_qfx, qfx2_aid, qfx2_fx, qfx2_fs, qfx2_k,)]
# TODO: Sorting the valid lists by aid might help the speed of this
# code. Also, consolidating fm, fs, and fk into one vector will reduce
# the amount of appends.
match_iter = zip(*valid_lists)
# Vsmany - Append query feature matches to database aids
if not is_vsone:
aid2_fm, aid2_fs, aid2_fk = new_fmfsfk()
for qfx, aid, fx, fs, fk in match_iter:
aid2_fm[aid].append((qfx, fx)) # Note the difference
aid2_fs[aid].append(fs)
aid2_fk[aid].append(fk)
nFeatMatches += 1
chipmatch = _fix_fmfsfk(aid2_fm, aid2_fs, aid2_fk)
qaid2_chipmatch[qaid] = chipmatch
#if not QUIET:
# nFeats_in_matches = [len(fm) for fm in six.itervalues(aid2_fm)]
# print('nFeats_in_matches_stats = ' +
# utool.dict_str(utool.mystats(nFeats_in_matches)))
# Vsone - Append database feature matches to query aids
else:
for qfx, aid, fx, fs, fk in match_iter:
aid2_fm[qaid].append((fx, qfx)) # Note the difference
aid2_fs[qaid].append(fs)
aid2_fk[qaid].append(fk)
nFeatMatches += 1
#Vsone
if is_vsone:
chipmatch = _fix_fmfsfk(aid2_fm, aid2_fs, aid2_fk)
qaid = qreq.qaids[0]
qaid2_chipmatch[qaid] = chipmatch
end_()
if NOT_QUIET:
print('[mf] * made %d feat matches' % nFeatMatches)
return qaid2_chipmatch
#============================
# 5) Spatial Verification
#============================
def spatial_verification(ibs, qaid2_chipmatch, qreq, dbginfo=False):
sv_cfg = qreq.cfg.sv_cfg
if not sv_cfg.sv_on or sv_cfg.xy_thresh is None:
print('[mf] Step 5) Spatial verification: off')
return (qaid2_chipmatch, {}) if dbginfo else qaid2_chipmatch
else:
return _spatial_verification(ibs, qaid2_chipmatch, qreq, dbginfo=dbginfo)
@profile
def _spatial_verification(ibs, qaid2_chipmatch, qreq, dbginfo=False):
sv_cfg = qreq.cfg.sv_cfg
print('[mf] Step 5) Spatial verification: ' + sv_cfg.get_cfgstr())
prescore_method = sv_cfg.prescore_method
nShortlist = sv_cfg.nShortlist
xy_thresh = sv_cfg.xy_thresh
scale_thresh = sv_cfg.scale_thresh
ori_thresh = sv_cfg.ori_thresh
use_chip_extent = sv_cfg.use_chip_extent
min_nInliers = sv_cfg.min_nInliers
qaid2_chipmatchSV = {}
nFeatSVTotal = 0
nFeatMatchSV = 0
nFeatMatchSVAff = 0
if dbginfo:
qaid2_svtups = {} # dbg info (can remove if there is a speed issue)
def print_(msg, count=0):
""" temp print_. Using count in this way is a hack """
if NOT_QUIET:
if count % 25 == 0:
sys.stdout.write(msg)
count += 1
# Find a transform from chip2 to chip1 (the old way was 1 to 2)
for qaid in six.iterkeys(qaid2_chipmatch):
chipmatch = qaid2_chipmatch[qaid]
aid2_prescore = score_chipmatch(ibs, qaid, chipmatch, prescore_method, qreq)
#print('Prescore: %r' % (aid2_prescore,))
(aid2_fm, aid2_fs, aid2_fk) = chipmatch
topx2_aid = utool.util_dict.keys_sorted_by_value(aid2_prescore)[::-1]
nRerank = min(len(topx2_aid), nShortlist)
# Precompute output container
if dbginfo:
aid2_svtup = {} # dbg info (can remove if there is a speed issue)
aid2_fm_V, aid2_fs_V, aid2_fk_V = new_fmfsfk()
# Query Keypoints
kpts1 = ibs.get_annot_kpts(qaid)
topx2_kpts = ibs.get_annot_kpts(topx2_aid)
# Check the diaglen sizes before doing the homography
topx2_dlen_sqrd = _precompute_topx2_dlen_sqrd(ibs, aid2_fm, topx2_aid,
topx2_kpts, nRerank,
use_chip_extent)
# spatially verify the top __NUM_RERANK__ results
for topx in range(nRerank):
aid = topx2_aid[topx]
fm = aid2_fm[aid]
dlen_sqrd = topx2_dlen_sqrd[topx]
kpts2 = topx2_kpts[topx]
fs = aid2_fs[aid]
fk = aid2_fk[aid]
sv_tup = sver.spatial_verification(kpts1, kpts2, fm,
xy_thresh, scale_thresh, ori_thresh, dlen_sqrd,
min_nInliers)
nFeatSVTotal += len(fm)
if sv_tup is None:
print_('o') # sv failure
else:
# Return the inliers to the homography
homog_inliers, H, aff_inliers, Aff = sv_tup
if dbginfo:
aid2_svtup[aid] = sv_tup
aid2_fm_V[aid] = fm[homog_inliers, :]
aid2_fs_V[aid] = fs[homog_inliers]
aid2_fk_V[aid] = fk[homog_inliers]
nFeatMatchSV += len(homog_inliers)
nFeatMatchSVAff += len(aff_inliers)
if NOT_QUIET:
#print(inliers)
print_('.') # verified something
# Rebuild the feature match / score arrays to be consistent
chipmatchSV = _fix_fmfsfk(aid2_fm_V, aid2_fs_V, aid2_fk_V)
if dbginfo:
qaid2_svtups[qaid] = aid2_svtup
qaid2_chipmatchSV[qaid] = chipmatchSV
print_('\n')
if NOT_QUIET:
print('[mf] * Affine verified %d/%d feat matches' % (nFeatMatchSVAff, nFeatSVTotal))
print('[mf] * Homog verified %d/%d feat matches' % (nFeatMatchSV, nFeatSVTotal))
if dbginfo:
return qaid2_chipmatchSV, qaid2_svtups
else:
return qaid2_chipmatchSV
def _precompute_topx2_dlen_sqrd(ibs, aid2_fm, topx2_aid, topx2_kpts,
nRerank, use_chip_extent):
""" helper for spatial verification, computes the squared diagonal length of
matching chips
"""
if use_chip_extent:
topx2_chipsize = list(ibs.get_annot_chipsizes(topx2_aid))
def chip_dlen_sqrd(tx):
(chipw, chiph) = topx2_chipsize[tx]
dlen_sqrd = chipw ** 2 + chiph ** 2
return dlen_sqrd
topx2_dlen_sqrd = [chip_dlen_sqrd(tx) for tx in range(nRerank)]
else:
# Use extent of matching keypoints
def kpts_dlen_sqrd(tx):
kpts2 = topx2_kpts[tx]
aid = topx2_aid[tx]
fm = aid2_fm[aid]
x_m, y_m = ktool.get_xys(kpts2[fm[:, 1]])
dlensqrd = (x_m.max() - x_m.min()) ** 2 + (y_m.max() - y_m.min()) ** 2
return dlensqrd
topx2_dlen_sqrd = [kpts_dlen_sqrd(tx) for tx in range(nRerank)]
return topx2_dlen_sqrd
#============================
# 6) QueryResult Format
#============================
@profile
def chipmatch_to_resdict(ibs, qaid2_chipmatch, filt2_meta, qreq):
if NOT_QUIET:
print('[mf] Step 6) Convert chipmatch -> qres')
cfgstr = qreq.get_cfgstr()
score_method = qreq.cfg.agg_cfg.score_method
# Create the result structures for each query.
qaid2_qres = {}
for qaid in six.iterkeys(qaid2_chipmatch):
# For each query's chipmatch
chipmatch = qaid2_chipmatch[qaid]
# Perform final scoring
aid2_score = score_chipmatch(ibs, qaid, chipmatch, score_method, qreq)
# Create a query result structure
qres = hots_query_result.QueryResult(qaid, cfgstr)
qres.aid2_score = aid2_score
(qres.aid2_fm, qres.aid2_fs, qres.aid2_fk) = chipmatch
qres.filt2_meta = {} # dbgstats
for filt, qaid2_meta in six.iteritems(filt2_meta):
qres.filt2_meta[filt] = qaid2_meta[qaid] # things like k+1th
qaid2_qres[qaid] = qres
# Retain original score method
return qaid2_qres
@profile
def try_load_resdict(qreq):
""" Try and load the result structures for each query.
returns a list of failed qaids
cdef:
object qreq
list qaids
dict qaid2_qres
list failed_qaids
"""
qaids = qreq.qaids
#cfgstr = qreq.get_cfgstr() # NEEDS FIX TAKES 21.9 % time of this function
cfgstr = qreq.get_cfgstr2() # hack of a fix
qaid2_qres = {}
failed_qaids = []
for qaid in qaids:
try:
qres = hots_query_result.QueryResult(qaid, cfgstr)
qres.load(qreq.get_qresdir()) # 77.4 % time
qaid2_qres[qaid] = qres
except hsexcept.HotsCacheMissError:
failed_qaids.append(qaid)
except hsexcept.HotsNeedsRecomputeError:
failed_qaids.append(qaid)
return qaid2_qres, failed_qaids
#============================
# Scoring Mechanism
#============================
@profile
def score_chipmatch(ibs, qaid, chipmatch, score_method, qreq=None):
(aid2_fm, aid2_fs, aid2_fk) = chipmatch
# HACK: Im not even sure if the 'w' suffix is correctly handled anymore
if score_method.find('w') == len(score_method) - 1:
score_method = score_method[:-1]
# Choose the appropriate scoring mechanism
if score_method == 'csum':
aid2_score = vr2.score_chipmatch_csum(chipmatch)
elif score_method == 'pl':
aid2_score, nid2_score = vr2.score_chipmatch_PL(ibs, qaid, chipmatch, qreq)
elif score_method == 'borda':
aid2_score, nid2_score = vr2.score_chipmatch_pos(ibs, qaid, chipmatch, qreq, 'borda')
elif score_method == 'topk':
aid2_score, nid2_score = vr2.score_chipmatch_pos(ibs, qaid, chipmatch, qreq, 'topk')
elif score_method.startswith('coverage'):
# Method num is at the end of coverage
method = int(score_method.replace('coverage', '0'))
aid2_score = coverage_image.score_chipmatch_coverage(ibs, qaid, chipmatch, qreq, method=method)
else:
raise Exception('[mf] unknown scoring method:' + score_method)
return aid2_score
| 2.03125 | 2 |
kg/ner/preprocess.py | ToddMorrill/knowledge-graphs | 2 | 12791939 | """
This module contains preprocessing code to prepare data for training and inference.
Examples:
$ python preprocess.py \
--config configs/baseline.yaml
"""
import argparse
from collections import Counter
import os
from types import SimpleNamespace
import pandas as pd
import torch
import torchtext
from torch.nn.utils.rnn import pad_sequence
import yaml
from yaml import parser
class CoNLL2003Dataset(torch.utils.data.Dataset):
"""Custom dataset to contain the CoNLL2003 dataset.
"""
def __init__(self, df: pd.DataFrame, transform: list = None) -> None:
"""Initializes the dataset and prepares sequences of tokens and labels.
Args:
df (pd.DataFrame): DF containing training examples.
transform (list, optional): List of transforms (e.g. index lookups, etc.). Defaults to None.
"""
self.df = df
self.transform = transform
self.sentences, self.labels = self._prepare_data()
def _prepare_data(self) -> tuple:
"""Groups data into sequences of tokens and labels.
Returns:
tuple: sentences, labels
"""
temp_df = self.df.groupby(['Article_ID', 'Sentence_ID'],
as_index=False).agg(
Sentence=('Token', list),
Labels=('NER_Tag_Normalized', list))
sentences = temp_df['Sentence'].values.tolist()
labels = temp_df['Labels'].values.tolist()
return sentences, labels
def __len__(self) -> int:
"""Retrieve the length of the dataset.
Returns:
int: Dataset length.
"""
return len(self.sentences)
def __getitem__(self, idx: int) -> tuple:
"""Retrieves the idx item from the dataset, potentially transformed.
Args:
idx (int): idx item from the dataset.
Returns:
tuple: sentences, labels
"""
if self.transform is None:
return self.sentences[idx], self.labels[idx]
# TODO: probably should wrap this in a for-loop
indices = self.transform[0](self.sentences[idx])
labels = self.transform[1](self.labels[idx])
return indices, labels
class Preprocessor(object):
"""Preproccessor class to handle data preparation at train and inference time.
"""
def __init__(self, config: str) -> None:
"""Initialize the preprocessor and generate vocabulary and label dictionary based on the training set.
Args:
config (str): File path to the configuration yaml file.
"""
with open(config, 'r') as f:
config = yaml.safe_load(f)
self.config = SimpleNamespace(**config)
self.vocab, self.label_dict = self._create_vocabs()
self.idx_to_label = {v: k for k, v in self.label_dict.items()}
def _create_vocabs(self) -> tuple:
"""Generate vocabulary object and label dictionary.
Returns:
tuple: vocab, label_dict
"""
# load train data to build the dictionaries
train_df = pd.read_csv(os.path.join(self.config.data_dir, 'train.csv'))
# create vocabulary
vocab = torchtext.vocab.Vocab(
Counter(train_df['Token'].value_counts().to_dict()))
# create label dictionary
label_dict = {}
i = 0
for k in train_df['NER_Tag_Normalized'].unique():
label_dict[k] = i
i += 1
return vocab, label_dict
@staticmethod
def _collate_fn(batch: tuple, train: bool = True) -> tuple:
"""Custom collate function that combines variable length sequences into padded batches.
Args:
batch (tuple): sentence_indices, sentences_labels OR just sentences_indices (a list).
train (bool, optional): If train=True, expects tuple of
sentence_indices, sentences_labels, else just a list of sentence_indices. Defaults to True.
Returns:
tuple: (sentences_padded, sentence_lens), labels_padded if train=True, else (sentences_padded, sentence_lens).
"""
if train:
sentence_indices, sentence_labels = zip(*batch)
else:
sentence_indices = batch
sentence_lens = [len(x) for x in sentence_indices]
# vocab['<pad>'] = 1
sentences_padded = pad_sequence(sentence_indices,
batch_first=True,
padding_value=1)
if train:
labels_padded = pad_sequence(sentence_labels,
batch_first=True,
padding_value=-1)
return (sentences_padded, sentence_lens), labels_padded
else:
return (sentences_padded, sentence_lens)
def get_train_datasets(self) -> tuple:
"""Generates all the datasets needed for model training.
Returns:
tuple: train_dataset, val_dataset, test_dataset
"""
train_file_path = os.path.join(self.config.data_dir, 'train.csv')
val_file_path = os.path.join(self.config.data_dir, 'validation.csv')
test_file_path = os.path.join(self.config.data_dir, 'test.csv')
transform = [self._transform_sentence, self._transform_labels]
train_dataset = CoNLL2003Dataset(pd.read_csv(train_file_path),
transform)
val_dataset = CoNLL2003Dataset(pd.read_csv(val_file_path), transform)
test_dataset = CoNLL2003Dataset(pd.read_csv(test_file_path), transform)
return train_dataset, val_dataset, test_dataset
def get_train_dataloaders(self) -> tuple:
"""Generates all the dataloaders needed for model training.
Returns:
tuple: train_dataloader, val_dataloader, test_dataloader
"""
train_dataset, val_dataset, test_dataset = self.get_train_datasets()
train_dataloader = torch.utils.data.DataLoader(
train_dataset,
batch_size=self.config.batch_size,
collate_fn=self._collate_fn,
shuffle=True)
val_dataloader = torch.utils.data.DataLoader(
val_dataset,
batch_size=self.config.batch_size,
collate_fn=self._collate_fn)
test_dataloader = torch.utils.data.DataLoader(
test_dataset,
batch_size=self.config.batch_size,
collate_fn=self._collate_fn)
return train_dataloader, val_dataloader, test_dataloader
@staticmethod
def _tokenize(sentence: str) -> list:
"""Utility function to tokenize sentences.
Args:
sentence (str): Sentence string.
Returns:
list: Tokenized sentence.
"""
return sentence.split(' ')
def _transform_sentence(self, sentence: list) -> torch.tensor:
"""Transform function that accepts a sentence as a string or tokenized list and returns vocabulary indices.
Args:
sentence (list): Tokenized list or sentence string.
Returns:
torch.tensor: Vocabulary indices.
"""
if isinstance(sentence, str):
sentence = self._tokenize(sentence)
indices = []
for token in sentence:
indices.append(self.vocab[token])
return torch.tensor(indices)
def _transform_labels(self, label_sequence: list) -> torch.tensor:
"""Transform function that accepts a sequence of labels and returns label indices.
Args:
label_sequence (list): Sequence of string labels.
Returns:
torch.tensor: Label indices.
"""
labels = []
for label in label_sequence:
labels.append(self.label_dict[label])
return torch.tensor(labels)
def preprocess(self, sentences: list) -> tuple:
"""Preprocess any arbitrary list of string sentences and return indices that can be fed into the model.
Args:
sentences (list): List of sentences to tokenize and retrieve indices for.
Returns:
tuple: (sentences_padded, sentence_lens)
"""
# TODO: see if there is a way to reuse the CoNLL2003Dataset class + dataloaders
# for guaranteed consistency with the way that we're preparing training data
preprocessed = []
if isinstance(sentences, str):
preprocessed.append(self._transform_sentence(sentences))
else:
for sentence in sentences:
preprocessed.append(self._transform_sentence(sentence))
return self._collate_fn(preprocessed, train=False)
def main(args):
# contains vocab and label_dict embedded in the transform function
preprocessor = Preprocessor(args.config)
sample_sentence = '<NAME> lives in New York City.'
prepared_sentence = preprocessor.preprocess(sample_sentence)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--config',
type=str,
help='File path where the model configuration file is located.',
required=True)
args = parser.parse_args()
main(args) | 3.015625 | 3 |
tests/test_zero_forcing.py | somacdivad/grinpy | 12 | 12791940 | import grinpy as gp
import pytest
class TestZeroForcing:
def test_non_integral_value_for_k_raises_TypeError_in_is_k_forcing(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.is_k_forcing_vertex(G, 1, [1], 1.5)
def test_0_value_for_k_raises_ValueError_in_is_k_forcing(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.is_k_forcing_vertex(G, 1, [1], 0)
def test_integral_float_for_k_works(self):
G = gp.star_graph(2)
assert gp.is_k_forcing_vertex(G, 1, [1], 1.0) == True
def test_leaf_is_zero_forcing_vertex_for_star(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_vertex(G, 1, [1]) == True
def test_center_is_not_zero_forcing_vertex_for_star(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_vertex(G, 0, [0]) == False
def test_no_vertex_is_zero_forcing_vertex_for_empty_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_vertex(G, 0, set()) == False
assert gp.is_zero_forcing_vertex(G, 1, set()) == False
assert gp.is_zero_forcing_vertex(G, 2, set()) == False
def test_center_of_S3_is_3_forcing_vertex(self):
G = gp.star_graph(3)
assert gp.is_k_forcing_vertex(G, 0, [0], 3) == True
def test_center_of_S3_is_not_2_forcing_vertex(self):
G = gp.star_graph(3)
assert gp.is_k_forcing_vertex(G, 0, [0], 2) == False
def test_leaf_of_star_is_zero_forcing_active_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_active_set(G, [1]) == True
def test_center_of_star_is_not_zero_forcing_active_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_active_set(G, [0]) == False
def test_empy_set_is_not_zero_forcing_active_set(self):
G = gp.star_graph(2)
assert gp.is_zero_forcing_active_set(G, set()) == False
def test_leaf_is_zero_forcing_set_of_path(self):
G = gp.path_graph(3)
assert gp.is_zero_forcing_set(G, [0]) == True
def test_leaf_is_not_zero_forcing_set_of_S3(self):
G = gp.star_graph(3)
assert gp.is_zero_forcing_set(G, [1]) == False
def test_leaf_is_max_degree_minus_one_forcing_set_for_star(self):
for i in range(3, 13):
G = gp.star_graph(i)
D = gp.max_degree(G)
assert gp.is_k_forcing_set(G, [1], D - 1) == True
def test_zero_forcing_number_of_star_is_order_minus_2(self):
for i in range(2, 12):
G = gp.star_graph(i)
assert gp.zero_forcing_number(G) == G.order() - 2
def test_zero_forcing_number_of_petersen_graph_is_5(self):
G = gp.petersen_graph()
assert gp.zero_forcing_number(G) == 5
def test_2_forcing_number_of_petersen_graph_is_2(self):
G = gp.petersen_graph()
assert gp.k_forcing_number(G, 2) == 2
def test_leaf_is_not_total_forcing_set_of_path(self):
G = gp.path_graph(3)
assert gp.is_total_zero_forcing_set(G, [0]) == False
def test_pair_of_adjacent_nodes_is_total_forcing_set_of_path(self):
G = gp.path_graph(6)
assert gp.is_total_zero_forcing_set(G, [2, 3]) == True
def test_total_zero_forcing_number_of_path_is_2(self):
G = gp.path_graph(5)
assert gp.total_zero_forcing_number(G) == 2
def test_connected_zero_forcing_number_of_monster_is_4(self):
G = gp.star_graph(3)
G.add_edge(3, 4)
G.add_edge(3, 5)
assert gp.connected_zero_forcing_number(G) == 4
def test_non_int_value_for_k_raises_error_in_is_connected_k_forcing(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.is_connected_k_forcing_set(G, [0], 1.5)
def test_0_value_for_k_raises_error_in_is_connected_k_forcing(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.is_connected_k_forcing_set(G, [0], 0)
def test_non_int_value_for_k_raises_error_in_min_connected_k_forcing(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.min_connected_k_forcing_set(G, 1.5)
def test_0_value_for_k_raises_error_in_min_connected_k_forcing(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.min_connected_k_forcing_set(G, 0)
def test_non_int_value_for_k_raises_error_in_connected_k_forcing_num(self):
with pytest.raises(TypeError):
G = gp.star_graph(2)
gp.connected_k_forcing_number(G, 1.5)
def test_0_value_for_k_raises_error_in_connected_k_forcing_num(self):
with pytest.raises(ValueError):
G = gp.star_graph(2)
gp.connected_k_forcing_number(G, 0)
def test_total_zero_forcing_num_of_trivial_graph_is_None(self):
G = gp.trivial_graph()
assert gp.total_zero_forcing_number(G) == None
def test_endpoint_is_connected_forcing_set_of_path(self):
G = gp.path_graph(2)
assert gp.is_connected_zero_forcing_set(G, [0])
def test_connected_zero_forcing_num_of_disconnected_graph_is_None(self):
G = gp.empty_graph(5)
assert gp.connected_zero_forcing_number(G) == None
| 2.265625 | 2 |
StudentsPerformance.py | AbhigyanRanjan0505/dvyo5sh6g9vdynxfoiubte | 0 | 12791941 | import plotly.figure_factory as ff
import plotly.graph_objects as go
import pandas as pd
import statistics
dataframe = pd.read_csv("StudentsPerformance.csv")
data_list = dataframe["reading score"].to_list()
data_mean = statistics.mean(data_list)
data_median = statistics.median(data_list)
data_mode = statistics.mode(data_list)
data_std_deviation = statistics.stdev(data_list)
data_first_std_deviation_start, data_first_std_deviation_end = data_mean - \
data_std_deviation, data_mean+data_std_deviation
data_second_std_deviation_start, data_second_std_deviation_end = data_mean - \
(2*data_std_deviation), data_mean+(2*data_std_deviation)
data_third_std_deviation_start, data_third_std_deviation_end = data_mean - \
(3*data_std_deviation), data_mean+(3*data_std_deviation)
data_list_of_data_within_1_std_deviation = [
result for result in data_list if result > data_first_std_deviation_start and result < data_first_std_deviation_end]
data_list_of_data_within_2_std_deviation = [result for result in data_list if result >
data_second_std_deviation_start and result < data_second_std_deviation_end]
data_list_of_data_within_3_std_deviation = [
result for result in data_list if result > data_third_std_deviation_start and result < data_third_std_deviation_end]
print("Mean of this data is {}.".format(data_mean))
print("Median of this data is {}.".format(data_median))
print("Mode of this data is {}.".format(data_mode))
print("{}% of data for data lies within 1 standard deviation".format(
len(data_list_of_data_within_1_std_deviation)*100.0/len(data_list)))
print("{}% of data for data lies within 2 standard deviations".format(
len(data_list_of_data_within_2_std_deviation)*100.0/len(data_list)))
print("{}% of data for data lies within 3 standard deviations".format(
len(data_list_of_data_within_3_std_deviation)*100.0/len(data_list)))
fig = ff.create_distplot([data_list], ["reading scores"], show_hist=False)
fig.add_trace(go.Scatter(x=[data_mean, data_mean], y=[
0, 0.17], mode="lines", name="MEAN"))
fig.add_trace(go.Scatter(x=[data_first_std_deviation_start, data_first_std_deviation_start], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 1"))
fig.add_trace(go.Scatter(x=[data_first_std_deviation_end, data_first_std_deviation_end], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 1"))
fig.add_trace(go.Scatter(x=[data_second_std_deviation_start, data_second_std_deviation_start], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 2"))
fig.add_trace(go.Scatter(x=[data_third_std_deviation_end, data_third_std_deviation_end], y=[
0, 0.17], mode="lines", name="STANDARD DEVIATION 3"))
fig.show()
| 3.484375 | 3 |
models/entidad_paciente.py | gopherss/PClinicaRehabilitacion | 0 | 12791942 | <filename>models/entidad_paciente.py
from database import conexion, consulta
from math import pow
class Paciente:
nombre: str
apellido: str
celular: str
genero: str
dni: str
peso: float
talla: float
fecha_nacimiento: str
def __init__(self, nombre, apellido, celular, genero, dni, peso, talla, fecha_nacimiento):
self.nombre = nombre
self.apellido = apellido
self.celular = celular
self.genero = genero
self.dni = dni
self.peso = peso
self.talla = talla
self.fecha_nacimiento = fecha_nacimiento
def obtener_paciente(self):
mi_consulta = consulta.CONSULTAS_PACIENTE['buscar_paciente']
pacientes = conexion.obtener_datos(consulta=mi_consulta, valores=())
return pacientes
def buscar_paciente(self, informacion):
mi_consulta = consulta.CONSULTAS_PACIENTE['leer_paciente']
pacientes = conexion.leer_datos(consulta=mi_consulta, valores=informacion)
return pacientes
def nuevo_paciente(self, paciente, id_empleado):
mi_consulta = consulta.CONSULTAS_PACIENTE['registrar_paciente']
conexion.crud_datos(consulta=mi_consulta, valores=(
paciente.nombre, paciente.apellido, paciente.celular,
paciente.genero, paciente.dni, paciente.peso,
paciente.talla, paciente.fecha_nacimiento, id_empleado
))
def actualizar_paciente(self, paciente, id_paciente, id_empleado):
mi_consulta = consulta.CONSULTAS_PACIENTE['editar_paciente']
conexion.crud_datos(consulta=mi_consulta, valores=(
paciente.nombre, paciente.apellido, paciente.celular,
paciente.genero, paciente.dni, paciente.peso,
paciente.talla, paciente.fecha_nacimiento, id_empleado,
id_paciente
))
def calcular_imc(self, peso, talla):
imc = peso / pow(talla, 2)
return round(imc,2)
| 2.484375 | 2 |
crazyflie_demo/scripts/hl_traj.py | wydmynd/crazyflie_tom | 0 | 12791943 | #!/usr/bin/env python
# source - https://github.com/whoenig/crazyflie_ros/commit/b048c1f2fd3ee34f899fa0e2f6c58a4885a39405#diff-970be3522034ff436332d391db26982a
from __future__ import absolute_import, division, unicode_literals, print_function
import rospy
import crazyflie
import uav_trajectory
import time
import tf
#from crazyflie_driver.msg import Hover
from std_msgs.msg import Empty
from crazyflie_driver.srv import UpdateParams
from crazyflie_driver.msg import GenericLogData
from threading import Thread
import tty, termios
import sys
if __name__ == '__main__':
rospy.init_node('test_high_level')
#rospy.Subscriber('/cf1/log_ranges', GenericLogData, get_ranges)
prefix = '/cf1'
cf = crazyflie.Crazyflie("/cf1", "world")
rospy.wait_for_service(prefix + '/update_params')
rospy.loginfo("found update_params service")
cf.setParam("commander/enHighLevel", 1)
cf.setParam("stabilizer/estimator", 2) # Use EKF
cf.setParam("ctrlMel/kp_z", 1.0) # reduce z wobble - default 1.25
#cf.setParam("ctrlMel/ki_z", 0.06) # reduce z wobble - default 0.05
#cf.setParam("ctrlMel/kd_z", 0.2) # reduce z wobble - default 0.4
## reset kalman
cf.setParam("kalman/initialX", 0)
cf.setParam("kalman/initialY", 0)
cf.setParam("kalman/initialZ", 0)
cf.setParam("kalman/resetEstimation", 1)
########
cf.setParam("stabilizer/controller", 2) # 2=Use mellinger controller
time.sleep(1.0)
rospy.loginfo("launching")
#cf.takeoff(targetHeight = 0.4, duration = 3.0)
#time.sleep(5.0)
traj1 = uav_trajectory.Trajectory()
traj1.loadcsv("/home/user/catkin_ws/src/crazyflie_ros/crazyflie_demo/scripts/takeoff.csv")
traj2 = uav_trajectory.Trajectory()
traj2.loadcsv("/home/user/catkin_ws/src/crazyflie_ros/crazyflie_demo/scripts/sine.csv")
print('traj2 duration :', traj2.duration)
cf.uploadTrajectory(0, 0, traj1)
cf.uploadTrajectory(1, len(traj1.polynomials), traj2)
cf.startTrajectory(0, timescale=1.0)
time.sleep(traj1.duration * 2.0)
cf.startTrajectory(1, timescale=1.5)
time.sleep(traj2.duration * 1.5)
time.sleep(1) #additional delay at end
cf.startTrajectory(0, timescale=1.0, reverse=True)
time.sleep(1.2)
cf.stop()
| 1.90625 | 2 |
Authentication.py | ueabu/RaspberryPi-Spotify-Controller | 2 | 12791944 | import json
from flask import Flask, request, redirect, session
import requests
import json
from urllib.parse import quote
app = Flask(__name__)
app.secret_key = "super secret key"
# Authentication Steps, paramaters, and responses are defined at https://developer.spotify.com/web-api/authorization-guide/
# Visit this url to see all the steps, parameters, and expected response.
# Client Keys
CLIENT_ID = ""
CLIENT_SECRET = ""
# Spotify URLS
SPOTIFY_AUTH_URL = "https://accounts.spotify.com/authorize"
SPOTIFY_TOKEN_URL = "https://accounts.spotify.com/api/token"
SPOTIFY_API_BASE_URL = "https://api.spotify.com"
API_VERSION = "v1"
SPOTIFY_API_URL = "{}/{}".format(SPOTIFY_API_BASE_URL, API_VERSION)
# Server-side Parameters
CLIENT_SIDE_URL = "http://localhost"
PORT = 5000
REDIRECT_URI = 'http://localhost:5000/callback'
SCOPE = "playlist-modify-public playlist-modify-private streaming user-read-playback-state"
STATE = ""
SHOW_DIALOG_bool = True
SHOW_DIALOG_str = str(SHOW_DIALOG_bool).lower()
auth_query_parameters = {
"response_type": "code",
"redirect_uri": REDIRECT_URI,
"scope": SCOPE,
# "state": STATE,
# "show_dialog": SHOW_DIALOG_str,
"client_id": CLIENT_ID
}
@app.route("/")
def index():
# Auth Step 1: Authorization
url_args = "&".join(["{}={}".format(key, quote(val)) for key, val in auth_query_parameters.items()])
auth_url = "{}/?{}".format(SPOTIFY_AUTH_URL, url_args)
return redirect(auth_url)
@app.route("/callback")
def callback():
# Auth Step 4: Requests refresh and access tokens
auth_token = request.args['code']
code_payload = {
"grant_type": "authorization_code",
"code": str(auth_token),
"redirect_uri": REDIRECT_URI,
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET,
}
post_request = requests.post(SPOTIFY_TOKEN_URL, data=code_payload)
# Auth Step 5: Tokens are Returned to Application
response_data = json.loads(post_request.text)
session['access_token'] = response_data["access_token"]
session['refresh_token'] = response_data["refresh_token"]
session['expires_in'] = response_data["expires_in"]
access_token = response_data["access_token"]
refresh_token = response_data["refresh_token"]
token_type = response_data["token_type"]
expires_in = response_data["expires_in"]
# print(access_token)
# print(refresh_token)
# print(expires_in)
return ''
@app.route("/play")
def play():
authorization_header = getAuthorizationHeader()
body = {
"context_uri": "spotify:playlist:5XCRfaXW22GIQIZrUrw2gc",
"offset": {
"position": 6
},
"position_ms": 0
}
# Auth Step 6: Use the access token to access Spotify API
play_endpoint = "{}/me/player/play".format(SPOTIFY_API_URL)
play_request = requests.put(play_endpoint, headers=authorization_header, data=json.dumps(body))
# print(play_request.json())
return 'play_request.status_code'
@app.route("/pause")
def pause():
authorization_header = getAuthorizationHeader()
pause_profile_endpoint = "{}/me/player/pause".format(SPOTIFY_API_URL)
pause_request = requests.put(pause_profile_endpoint, headers=authorization_header)
print((pause_request.status_code))
return 'pause_request.status_code'
@app.route("/next")
def next():
authorization_header = getAuthorizationHeader()
pause_profile_endpoint = "{}/me/player/devices".format(SPOTIFY_API_URL)
pause_request = requests.get(pause_profile_endpoint, headers=authorization_header)
print((pause_request.json()))
return 'pause_request.status_code'
def refreshAccessToken():
print('yea')
def getAuthorizationHeader():
authorization_header = {"Authorization": "Bearer {}".format(session['access_token'])}
return authorization_header
if __name__ == "__main__":
app.run(debug=True, port=PORT) | 3 | 3 |
books/urls.py | simonv3/django-reading-list | 1 | 12791945 | from django.conf.urls import patterns, include, url
from rest_framework import routers
from books.api import views as api_views
from books import views
router = routers.DefaultRouter()
# TODO: Nest API endpoints
# # from rest_framework_extensions.routers import ExtendedSimpleRouter
router.register(r'books', api_views.BookViewSet)
router.register(r'editions', api_views.EditionViewSet)
router.register(r'authors', api_views.AuthorViewSet)
router.register(r'publishers', api_views.PublisherViewSet)
urlpatterns = patterns(
'',
url(r'^api/search/external/(?P<q>[\w ]+)/$', api_views.search_external),
url(r'^api/', include(router.urls)),
)
| 1.96875 | 2 |
Machine Learning/House price prediction/server/app.py | rokingshubham1/Python_Scripts | 20 | 12791946 | from flask import Flask , request , jsonify,render_template
import util
app=Flask(__name__)
@app.route('/')
def get_location_names():
response = util.get_location_names()
print(response)
#response.headers.add('Access-control-Allow-origin','*')
return render_template('app.html',response=response)
@app.route('/predict_house_price',methods=['POST'])
def predict_house_price():
total_sqft=float(request.form['total_sqft'])
location = float(request.form['location'])
bhk = int(request.form['bhk'])
bath = float(request.form['bhk'])
response = util.get_location_names()
#response =jsonify({
estimated_price = util.get_estimateud_price(location,total_sqft,bhk,bath)
#})
return render_template('app.html', response=response,price=estimated_price)
if __name__=="__main__":
print("Starting Python flask server from Home proce prediction...")
app.run() | 3.046875 | 3 |
estimagic/inference/bootstrap_samples.py | vishalbelsare/estimagic | 83 | 12791947 | import numpy as np
import pandas as pd
def get_bootstrap_indices(data, cluster_by=None, seed=None, n_draws=1000):
"""Draw positional indices for the construction of bootstrap samples.
Storing the positional indices instead of the full bootstrap samples saves a lot
of memory for datasets with many variables.
Args:
data (pandas.DataFrame): original dataset.
cluster_by (str): column name of the variable to cluster by.
seed (int): Random seed.
n_draws (int): number of draws, only relevant if seeds is None.
Returns:
list: list of numpy arrays with positional indices
"""
np.random.seed(seed)
n_obs = len(data)
if cluster_by is None:
bootstrap_indices = list(np.random.randint(0, n_obs, size=(n_draws, n_obs)))
else:
clusters = data[cluster_by].unique()
drawn_clusters = np.random.choice(
clusters, size=(n_draws, len(clusters)), replace=True
)
bootstrap_indices = _convert_cluster_ids_to_indices(
data[cluster_by], drawn_clusters
)
return bootstrap_indices
def _convert_cluster_ids_to_indices(cluster_col, drawn_clusters):
"""Convert the drawn clusters to positional indices of individual observations.
Args:
cluster_col (pandas.Series):
"""
bootstrap_indices = []
cluster_to_locs = pd.Series(np.arange(len(cluster_col)), index=cluster_col)
for draw in drawn_clusters:
bootstrap_indices.append(cluster_to_locs[draw].to_numpy())
return bootstrap_indices
def get_bootstrap_samples(data, cluster_by=None, seed=None, n_draws=1000):
"""Draw bootstrap samples.
If you have memory issues you should use get_bootstrap_indices instead and construct
the full samples only as needed.
Args:
data (pandas.DataFrame): original dataset.
cluster_by (str): column name of the variable to cluster by.
seed (int): Random seed.
n_draws (int): number of draws, only relevant if seeds is None.
Returns:
list: list of resampled datasets.
"""
indices = get_bootstrap_indices(
data=data,
cluster_by=cluster_by,
seed=seed,
n_draws=n_draws,
)
datasets = _get_bootstrap_samples_from_indices(data=data, bootstrap_indices=indices)
return datasets
def _get_bootstrap_samples_from_indices(data, bootstrap_indices):
"""convert bootstrap indices into actual bootstrap samples.
Args:
data (pandas.DataFrame): original dataset.
bootstrap_indices (list): List with numpy arrays containing positional indices
of observations in data.
Returns:
list: list of DataFrames
"""
out = [data.iloc[idx] for idx in bootstrap_indices]
return out
| 3.296875 | 3 |
demo_app/lib/layers/fetch.py | HiImJayHireMe/garnish | 0 | 12791948 | <gh_stars>0
from concurrent.futures import ThreadPoolExecutor
from garnish.garnish import Layer
class ConcurrentFetchLayer(Layer):
def __call__(self, *args, **kwargs):
def call(t):
return t.__call__()
with ThreadPoolExecutor(max_workers=4) as pool:
results = list(pool.map(call, self.tasks))
return self.f(results, *args, **kwargs)
| 2.40625 | 2 |
services/workers/settings/base.py | paulowe/aws-boilerplate | 711 | 12791949 | import json
import boto3
from environs import Env
env = Env()
AWS_ENDPOINT_URL = env('AWS_ENDPOINT_URL', None)
SMTP_HOST = env('SMTP_HOST', None)
EMAIL_ENABLED = env.bool('EMAIL_ENABLED', default=True)
secrets_manager_client = boto3.client('secretsmanager', endpoint_url=AWS_ENDPOINT_URL)
def fetch_db_secret(db_secret_arn):
if db_secret_arn is None:
return None
response = secrets_manager_client.get_secret_value(SecretId=db_secret_arn)
return json.loads(response['SecretString'])
LAMBDA_TASK_ROOT = env('LAMBDA_TASK_ROOT', '')
DB_CONNECTION = env('DB_CONNECTION', None)
if DB_CONNECTION:
DB_CONNECTION = json.loads(DB_CONNECTION)
else:
DB_CONNECTION = fetch_db_secret(env('DB_SECRET_ARN', None))
FROM_EMAIL = env('FROM_EMAIL', None)
| 2 | 2 |
examples/cmdui_example.py | TheBrokenEstate/CMDUI | 3 | 12791950 | <filename>examples/cmdui_example.py
import sys
sys.path.insert(0,'..')
import threading
import time
import CMDUI as CMD
def counter():
btn_txt.set("Stop")
tt = time.time()
while running:
t = f"{time.time()-tt:.2f}"
txt.set(t)
time.sleep(0.01)
btn_txt.set("Reset")
def stopwatch():
if btn_txt.get() == "Reset":
btn_txt.set("Start")
txt.set("")
return
global running
running = not running
threading.Thread(target=counter).start()
running = False
root = CMD.CMDUI()
txt = CMD.StringVar()
btn_txt = CMD.StringVar()
btn_txt.set("Start")
frm = CMD.Frame(root)
frm.pack()
lab = CMD.Label(frm, textvariable=txt)
lab.pack(side="bottom")
but = CMD.Button(root, textvariable=btn_txt, command=stopwatch)
but.pack(side="top", expand=True)
but = CMD.Button(root, textvariable=btn_txt, command=stopwatch)
but.pack(side="right")
but = CMD.Button(root, textvariable=btn_txt, command=stopwatch)
but.pack()
but = CMD.Button(root, textvariable=btn_txt, command=stopwatch)
but.pack(side="left")
root.mainloop()
| 2.9375 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.