max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
aioamqp/tests/test_recover.py | michael-k/aioamqp | 0 | 12798451 | """
Amqp basic tests for recover methods
"""
import unittest
from . import testcase
from . import testing
class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase):
@testing.coroutine
def test_basic_recover_async(self):
yield from self.channel.basic_recover_async(requeue=True)
@testing.coroutine
def test_basic_recover_async_no_requeue(self):
yield from self.channel.basic_recover_async(requeue=False)
@testing.coroutine
def test_basic_recover(self):
result = yield from self.channel.basic_recover(requeue=True)
self.assertTrue(result)
| 2.546875 | 3 |
applications/physbam/physbam-lib/Scripts/Archives/pd/mon/SERVER.py | schinmayee/nimbus | 20 | 12798452 | <reponame>schinmayee/nimbus
#!/usr/bin/python
from pd.common import CONFIG
from pd.common import SOCKET
import os
import mutex
import time
import threading
import pickle
class SERVER:
def __init__(self):
self.sessions={}
self.next_id=1
# Define RPC interface
self.mutex=threading.Lock()
self.commands=["Register_Client","Session_Info","Session_List","Create_Session","Delete_Session","Label_Session","Update_Status","Remove_Status_If_Exists"]
self.backup_interval=30
# Save loop
self.session_file=CONFIG.pdmon_session_file
self.backup_thread=threading.Thread()
self.backup_thread.run=self.Backup
self.backup_thread.start()
# read in data
try:
self.sessions=pickle.load(open(self.session_file,"r"))
if(len(self.sessions.keys())>0):
self.next_id=max(self.sessions.keys())+1
except:
pass
print "MON_SERVER: Next id starts at %d"%self.next_id
def Client_Connect(self,x):
pass
def Client_Disconnect(self,x):
pass
def Register_Client(self,client_id,user,host):
pass
# private
def Validate_Session_Id(self,session_id):
if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION("Invalid session id")
elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION("Invalid session id %d"%session_id)
# PUBLIC ROUTINES
def Session_Info(self,client,session_id):
self.Validate_Session_Id(session_id)
return self.sessions[session_id]
def Session_List(self,client):
return self.sessions
def Create_Session(self,client,username):
session_id,directory=None,None
session_id=self.next_id
self.next_id+=1
info={"id":session_id, "label": "<unnamed>","username": username,"created_date":time.time(),"last_update": 0,"user_status":{}}
self.sessions[session_id]=info
return info
def Delete_Session(self,client,session_id):
self.Validate_Session_Id(session_id)
del self.sessions[session_id]
def Label_Session(self,client,session_id,label):
self.Validate_Session_Id(session_id)
self.sessions[session_id]["label"]=label
def Update_Status(self,client,session_id,key,value):
self.Validate_Session_Id(session_id)
self.sessions[session_id]["user_status"][key]=value
self.sessions[session_id]["last_update"]=time.time()
def Remove_Status_If_Exists(self,client,session_id,key):
self.Validate_Session_Id(session_id)
try:
del self.sessions[session_id]["user_status"][key]
except:
pass
def Backup(self):
while 1:
time.sleep(self.backup_interval)
print "Backing up..."
try:
self.mutex.acquire()
pickle.dump(self.sessions,open(self.session_file,"w"))
finally:
self.mutex.release()
import socket
if __name__ == "__main__":
server=SERVER()
SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server)
# SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server,
# (CONFIG.server_private_key_file,CONFIG.server_certificate_file,CONFIG.ca_certificate_file))
| 2.390625 | 2 |
problems/daily-temperatures/solution.py | HearyShen/leetcode-cn | 1 | 12798453 | <reponame>HearyShen/leetcode-cn
import time
from typing import List
class Solution:
def dailyTemperatures(self, T: List[int]) -> List[int]:
if not T:
return []
deltaDays = [0] * len(T)
stack = []
for i in range(len(T)):
# print([(i, T[i]) for i in stack], (i, T[i]))
if not stack:
stack.append(i)
continue
# record and pop all the colder days in stack
j = len(stack) - 1
while j >= 0 and T[stack[j]] < T[i]:
deltaDays[stack[j]] = i - stack[j]
stack.pop()
j -= 1
stack.append(i)
return deltaDays
if __name__ == "__main__":
testCases = [([73, 74, 75, 71, 69, 72, 76, 73], [1, 1, 4, 2, 1, 1, 0, 0])]
for i, testCase in enumerate(testCases):
temperatures, ans = testCase
tic = time.time()
ret = Solution().dailyTemperatures(temperatures)
toc = time.time()
print(f"{i}: {ret == ans}, return {ret} in {toc-tic:.3f}s.")
| 3.296875 | 3 |
EasyRecycle/recycle/serializers.py | YuriyLisovskiy/EasyRecycle | 0 | 12798454 | from rest_framework import serializers
from recycle.models import Location, CommercialRequest, Transaction
from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast
class LocationSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
open_time = serializers.SerializerMethodField()
close_time = serializers.SerializerMethodField()
owner_id = serializers.SerializerMethodField()
garbage_types = serializers.SerializerMethodField()
@staticmethod
def get_open_time(obj):
return obj.open_time.strftime('%H:%M')
@staticmethod
def get_close_time(obj):
return obj.close_time.strftime('%H:%M')
@staticmethod
def get_owner_id(obj):
return obj.owner.id
@staticmethod
def get_garbage_types(obj):
return [{
'short': gt.garbage_type,
'long': gt.get_garbage_type_display()
} for gt in obj.garbagetype_set.all()]
class Meta:
model = Location
fields = (
'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id'
)
class CreateLocationSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
garbage_types = serializers.ListField(required=False)
class Meta:
model = Location
fields = (
'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner'
)
validators = (
IsGarbageCollectorValidator('owner'),
)
class EditLocationSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
garbage_types = serializers.ListField(required=False)
class Meta:
model = Location
fields = (
'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner'
)
validators = (
IsGarbageCollectorValidator('owner'),
)
extra_kwargs = {
'address': {'required': False},
'open_time': {'required': False},
'close_time': {'required': False},
'price_per_kg': {'required': False},
'garbage_types': {'required': False},
'owner': {'required': False}
}
class CommercialOrderSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
email = serializers.SerializerMethodField()
location_id = serializers.SerializerMethodField()
user_id = serializers.SerializerMethodField()
@staticmethod
def get_email(obj):
return obj.user.email if obj.user else ''
@staticmethod
def get_location_id(obj):
return obj.location.id if obj.location else -1
@staticmethod
def get_user_id(obj):
return obj.user.id if obj.user else -1
class Meta:
model = CommercialRequest
fields = (
'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id'
)
class CreateCommercialOrderSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = CommercialRequest
fields = (
'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user'
)
validators = (
IsCommercialValidator('user'), DateIsNotPast('date')
)
class EditCommercialOrderSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = CommercialRequest
fields = (
'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user'
)
validators = (
IsCommercialValidator('user'),
)
extra_kwargs = {
'address': {'required': False},
'date': {'required': False},
'garbage_type': {'required': False},
'mass': {'required': False},
'status': {'required': False},
'location': {'required': False},
'user': {'required': False}
}
class TransactionSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
datetime = serializers.SerializerMethodField()
user_id = serializers.SerializerMethodField()
collector_id = serializers.SerializerMethodField()
@staticmethod
def get_datetime(obj):
return obj.datetime.strftime('%b %d, %Y at %H:%M')
@staticmethod
def get_user_id(obj):
return obj.user.id if obj.user else -1
@staticmethod
def get_collector_id(obj):
return obj.collector.id if obj.collector else -1
class Meta:
model = Transaction
fields = (
'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id'
)
class CreateTransactionSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = Transaction
fields = (
'id', 'garbage_type', 'points', 'mass', 'user', 'collector'
)
| 2.078125 | 2 |
DS-400/Medium/61-Rotate List/OnePointer.py | ericchen12377/Leetcode-Algorithm-Python | 2 | 12798455 | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution(object):
def rotateRight(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
if not head:
return None
n = 1
cur = head
while cur.next:
n+=1
cur = cur.next
cur.next = head
m = n - k % n
i = 0
while i < m:
cur = cur.next
i += 1
res = cur.next
cur.next = None
return res
| 3.8125 | 4 |
ofstest/ofs/doctype/store/test_store.py | keithyang77/ofstest | 0 | 12798456 | <reponame>keithyang77/ofstest
# Copyright (c) 2021, mds and Contributors
# See license.txt
# import frappe
import unittest
class TestStore(unittest.TestCase):
pass
| 1.125 | 1 |
steps/oci-database-step-instances-terminate/step.py | Bryxxit/relay-oci-oracleDB | 0 | 12798457 | <filename>steps/oci-database-step-instances-terminate/step.py
#!/usr/bin/env python
import oci
config = oci.config.from_file()
from oci.config import validate_config
validate_config(config)
# initialize the DatabaseClient
database = oci.database.DatabaseClient(config)
db_system_ids = ["dfsdfgsfdsdf","fsdxfgsd"]
if not db_system_ids:
print("No instance IDs found")
exit(0)
print('Terminateing instances: {}'.format(db_system_ids))
for db_system_id in db_system_ids:
database.terminate_db_system(db_system_ids)
| 2.359375 | 2 |
superutils/utils.py | cshanxiao/superutils | 0 | 12798458 | <reponame>cshanxiao/superutils<filename>superutils/utils.py
# -*- coding: utf-8 -*-
u'''
@summary:
@author: cshanxiao
@date: 2016-07-18
'''
import time
def print_obj(obj, inner=True, full=False):
print "\nStart {} {}".format(obj, "=" * 50)
print "dir info: {}".format(dir(obj))
for attr in dir(obj):
try:
if attr.startswith("__") and not full:
continue
if attr.startswith("_") and not inner:
continue
attr_value = getattr(obj, attr)
if "method" in str(type(attr_value)):
print "### method {}".format(attr)
elif "wrapper" in str(type(attr_value)):
print "=== wrapper {}".format(attr)
else:
print "--- attribute {}: {}".format(attr, attr_value)
except Exception, e:
print "*** read error {}: {}".format(attr, e)
print ("End {} {}").format(obj, "=" * 50)
def func_time(func):
_id = [0]
def _wrapper(*args,**kwargs):
start_time = time.time()
result = func(*args,**kwargs)
end_time = time.time()
_id[0] += 1
print "{} [{}] [{:.3f} ms] {}".format(
_id[0],
time.strftime("%Y-%m-%d %H:%M:%S"),
(end_time - start_time) * 1000,
{"func_name": func.func_name,
"file_name": func.func_code.co_filename,
"file_lineno": func.func_code.co_firstlineno
}
)
return result
return _wrapper
def super_test():
print_obj(str, full=True)
print_obj(lambda x: x, inner=True)
if __name__ == '__main__':
super_test()
| 2.21875 | 2 |
BotMessageSender.py | sharry008/anonymise | 1 | 12798459 | <reponame>sharry008/anonymise
"""
This software has been developed by github user fndh (http://github.com/fndh)
You are free to use, modify and redistribute this software as you please, as
long as you follow the conditions listed in the LICENSE file of the github
repository indicated. I want to thank you for reading this small paragraph,
and please consider sending me a message if you are using this software! It
will surely make my day.
"""
from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup,
InlineKeyboardButton)
class MessageSender:
def __init__(self, logger):
self.logger = logger
def send_text(self, bot, user, message, tried=0, reply=None,
reply_markup=None, parse_mode=None):
"""
Send messages with markdown, markup or replies.
Returns the sent message.
"""
try:
return bot.sendMessage(
str(user),
message,
reply_to_message_id=reply,
reply_markup=reply_markup,
parse_mode=parse_mode)
except TelegramError as e:
# Log the errors
self.logger.log(
f"TelegramError when sending message to {user}:")
self.logger.log(f"\t{e} - Try #{tried}/3")
if e == 'Timed out' and tried < 3:
# Retry up to 3 times
return self.send_text(
bot, user, message,
tried=tried+1,
reply=reply,
reply_markup=reply_markup,
parse_mode=parse_mode)
except RuntimeError as e:
self.logger.log("RuntimeError when sending message")
self.logger.log(e)
except Exception as e:
self.logger.log("Unhandled error when sending message")
self.logger.log(e)
def send_typing(self, bot, chat_id):
"""
Send "Bot is typing..." action to chat
"""
bot.sendChatAction(chat_id, ChatAction.TYPING)
def forward_message(self, message, user_id):
return message.forward(user_id)
def create_inline_keyboard(self, button_texts, callbacks):
"""Generate a keyboard with the options specified.
Make sure bot handles callback methods before creating a keyboard.
"""
if button_texts is None or callbacks is None:
return None
if len(button_texts) != len(callbacks):
raise ValueError("Buttons and callbacks size doesn't match")
kb_buttons = []
# Iterate over information rows
for n in range(len(button_texts)):
# Extract display text and callback function
button_text_row = button_texts[n]
callback_row = callbacks[n]
button_row = []
# Verify size
if len(button_text_row) != len(callback_row):
raise ValueError("Buttons and callbacks size doesn't match")
# Iterate over button texts
for m in range(len(button_text_row)):
text = button_text_row[m]
callback = callback_row[m]
# Create button
kb_button = InlineKeyboardButton(
text=text,
callback_data=callback)
# Add to button row
button_row.append(kb_button)
# Add row to keyboard
kb_buttons.append(button_row)
keyboard = InlineKeyboardMarkup(kb_buttons)
return keyboard
| 2.203125 | 2 |
foo/api_image.py | CyberlifeCN/qrcode | 1 | 12798460 | #!/usr/bin/env python
# _*_ coding: utf-8_*_
#
# Copyright 2016-2017 <EMAIL>
# <EMAIL>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tornado.web
import logging
import time
import sys
import os
import json as JSON # 启用别名,不会跟方法里的局部变量混淆
from comm import *
from global_const import *
from base_handler import *
from tornado.escape import json_encode, json_decode
from tornado.httpclient import *
from tornado.httputil import url_concat
from tornado_swagger import swagger
from image_verify import generate_verify_image
@swagger.model()
class ImageResp:
def __init__(self, errCode, errMsg, code, imgUrl):
self.errCode = errCode
self.errMsg = errMsg
self.code = code
self.imgUrl = imgUrl
# /api/image-verify
class ApiImageVerifyXHR(tornado.web.RequestHandler):
@swagger.operation(nickname='post')
def post(self):
"""
@description: 生成图片校验码
@rtype: L{ImageResp}
@raise 400: Invalid Input
@raise 500: Internal Server Error
"""
logging.info("POST %r", self.request.uri)
_id = generate_uuid_str()
timestamp = current_timestamp()
_datehour = timestamp_to_datehour(timestamp)
path = cur_file_dir()
logging.debug("got path %r", path)
if not os.path.exists(path + "/static/image-verify/" + _datehour):
os.makedirs(path + "/static/image-verify/" + _datehour)
# To save it
filepath = path + "/static/image-verify/" + _datehour + "/" + _id + '.gif'
mstream, _code = generate_verify_image(save_img=True, filepath=filepath)
img_url = self.request.protocol + "://" + self.request.host
img_url = img_url + '/static/image-verify/' + _datehour + "/" + _id + '.gif'
logging.info("Success[200]: generate image-verify code=[%r] img_url=[%r]", _code, img_url)
self.set_status(200) # Success
self.write(JSON.dumps({"errCode":200,"errMsg":"Success","code":_code,"imageUrl":img_url}))
self.finish()
| 2.0625 | 2 |
lectures/slides_tex/example_imports.py | materialsvirtuallab/nano281 | 38 | 12798461 | <filename>lectures/slides_tex/example_imports.py<gh_stars>10-100
import math
z = math.sin(3.14159) # Gives ~0 | 1.648438 | 2 |
scripts/movielens/process_raw.py | NighTurs/discovery-rs | 3 | 12798462 | import pickle
import pandas as pd
import os
from os import path
from scripts.process_raw import keep_positive_ratings, count_filter
from scripts.config import params
def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold):
ds = pd.read_csv(path.join(input_dir, 'ratings.csv'))
print('Overall records:', ds.shape[0])
print('Overall users:', len(ds['userId'].unique()))
print('Overall movies:', len(ds['movieId'].unique()))
ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating')
ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId')
ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId')
print('Left records:', ds.shape[0])
print('Left users:', len(ds['userId'].unique()))
print('Left movies:', len(ds['movieId'].unique()))
u2i = {user: ind for ind, user in enumerate(ds['userId'].unique())}
x2i = {movie: ind for ind, movie in enumerate(ds['movieId'].unique())}
processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]),
'item': ds['movieId'].apply(lambda x: x2i[x])})
if not path.exists(output_dir):
os.makedirs(output_dir)
processed.to_csv(path.join(output_dir, 'ds.csv'), index=False)
with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle:
pickle.dump(u2i, handle)
with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle:
pickle.dump(x2i, handle)
if __name__ == '__main__':
common_params = params['ml']['common']
proc_params = params['ml']['process_raw']
process_raw(common_params['raw_dir'],
common_params['proc_dir'],
int(proc_params['movie_users_threshold']),
int(proc_params['user_movies_threshold']))
| 2.640625 | 3 |
scripts/model_selection/cross_validate_utils.py | riccardopoiani/recsys_2019 | 2 | 12798463 | def get_seed_list():
return [6910, 1996, 2019, 153, 12, 5, 1010, 9999, 666, 467]
def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results):
with open(file_path, "w") as f:
f.write("Recommender class: {}\n".format(recommender_name))
f.write("Recommender fit parameters: {}\n".format(recommender_fit_parameters))
f.write("Number of folds: {}\n".format(num_folds))
f.write("Seed list: {}\n\n".format(str(seed_list)))
f.write(str(results))
| 2.875 | 3 |
CircadianDesktops/app.py | Luke943/CircadianDesktops | 0 | 12798464 | <reponame>Luke943/CircadianDesktops<gh_stars>0
"""
Main script for Circadian Desktops app.
Settings file and logo images are stored locally.
Contains MainWindow class and script to run app.
"""
import os
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
import custom_qt
import functions
from ui_mainwindow import Ui_MainWindow
settingsFile = "settings.txt"
logoFile = "Icons\\logo.png"
class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
"""
MainWindow class for the UI.
Inherits from Ui_MainWindow, which contains the layout of the widgets.
"""
def __init__(self, parent=None, settings=None):
# setup
super(MainWindow, self).__init__(parent)
self.setupUi(self)
self.settingsPath = settings
self.isClosedFromTray = False
self.settings = functions.get_settings(settings)
self.activeImage = ''
# connect widgets to methods
self.btnSelectDayImg.clicked.connect(
lambda: self.get_image(self.labelDayImg))
self.btnSelectDDImg.clicked.connect(
lambda: self.get_image(self.labelDDImg))
self.btnSelectNightImg.clicked.connect(
lambda: self.get_image(self.labelNightImg))
self.comboBox.currentIndexChanged.connect(self.set_background_style)
self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time)
self.radioDefaultTimes.clicked.connect(self.default_times)
self.radioCustomTimes.clicked.connect(self.custom_times)
self.boxDark.stateChanged.connect(self.set_palette)
self.boxMinimize.stateChanged.connect(self.minimize_behaviour)
self.boxStartup.stateChanged.connect(self.startup_behaviour)
# tray icon
self.trayIcon = QtWidgets.QSystemTrayIcon()
self.trayIcon.setIcon(QtGui.QIcon(logoFile))
self.trayIcon.setToolTip("Circadian Desktops")
self.trayIcon.activated.connect(self.__icon_activated)
self.trayIcon.show()
self.trayMenu = QtWidgets.QMenu()
self.trayMenu.addAction("Open Circadian Desktops", self.show_window)
self.trayMenu.addSeparator()
self.trayMenu.addAction(
"Exit Circadian Desktops", self.close_from_tray)
self.trayIcon.setContextMenu(self.trayMenu)
# timers
self.mainTimer = QtCore.QTimer()
self.mainTimer.timeout.connect(self.set_desktop)
self.shuffleTimer = QtCore.QTimer()
self.shuffleTimer.timeout.connect(self.shuffle_images)
# populate data
self.set_image(self.settings['labelDayImg'], self.labelDayImg)
self.set_image(self.settings['labelDDImg'], self.labelDDImg)
self.set_image(self.settings['labelNightImg'], self.labelNightImg)
self.load_times()
self.load_preferences()
self.set_desktop()
self.set_background_style()
def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel):
if self.settings['isSlideshow']:
fileName = functions.random_image(fileName)
pixmap = QtGui.QPixmap(fileName)
pixmap = pixmap.scaled(
imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio)
imageLbl.setPixmap(pixmap)
imageLbl.setAlignment(QtCore.Qt.AlignCenter)
self.settings[imageLbl.objectName()] = fileName
def get_image(self, imageLbl: QtWidgets.QLabel):
fileName, _ = QtWidgets.QFileDialog.getOpenFileName(
None, "Select image", "", "Image files (*.png *.jpg *.jpeg *.bmp)")
if fileName:
self.set_image(fileName, imageLbl)
self.set_desktop()
def shuffle_images(self):
self.set_image(self.settings['labelDayImg'], self.labelDayImg)
self.set_image(self.settings['labelDDImg'], self.labelDDImg)
self.set_image(self.settings['labelNightImg'], self.labelNightImg)
self.shuffleTimer.start(self.settings['shuffleTime'] * 60000)
self.set_desktop()
def set_desktop(self):
now = QtCore.QTime.currentTime()
if self.timeDawn.time() < now <= self.timeDay.time():
imageFile = self.settings['labelDDImg']
elif self.timeDay.time() < now <= self.timeDusk.time():
imageFile = self.settings['labelDayImg']
elif self.timeDusk.time() < now <= self.timeNight.time():
imageFile = self.settings['labelDDImg']
else:
imageFile = self.settings['labelNightImg']
if imageFile != self.activeImage:
functions.set_desktop(imageFile)
self.activeImage = imageFile
self.mainTimer.start(60000)
def set_background_style(self):
if self.comboBox.currentText() == 'single image':
self.shuffleTimer.stop()
self.settings['isSlideshow'] = 0
self.spinShuffleTime.setReadOnly(True)
elif self.comboBox.currentText() == 'slideshow from folders':
self.shuffleTimer.start(self.settings['shuffleTime'] * 60000)
self.settings['isSlideshow'] = 1
self.spinShuffleTime.setReadOnly(False)
def set_shuffle_time(self):
newTime = self.spinShuffleTime.value() * 60000
if self.shuffleTimer.remainingTime() > newTime:
self.shuffleTimer.start(newTime)
self.settings['shuffleTime'] = self.spinShuffleTime.value()
def load_times(self):
if int(self.settings['isCustomTimes']):
self.timeDawn.setTime(QtCore.QTime(
int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0))
self.timeDay.setTime(QtCore.QTime(
int(self.settings['dayhour']), int(self.settings['daymin']), 0))
self.timeDusk.setTime(QtCore.QTime(
int(self.settings['duskhour']), int(self.settings['duskmin']), 0))
self.timeNight.setTime(QtCore.QTime(
int(self.settings['nighthour']), int(self.settings['nightmin']), 0))
self.custom_times()
self.radioCustomTimes.setChecked(True)
else:
self.default_times()
def custom_times(self):
self.timeDawn.setReadOnly(False)
self.timeDay.setReadOnly(False)
self.timeDusk.setReadOnly(False)
self.timeNight.setReadOnly(False)
def default_times(self):
d = functions.get_times()
self.timeDawn.setTime(QtCore.QTime(
d['dawn'].hour, d['dawn'].minute, 0))
self.timeDay.setTime(QtCore.QTime(
d['sunrise'].hour, d['sunrise'].minute, 0))
self.timeDusk.setTime(QtCore.QTime(
d['sunset'].hour, d['sunset'].minute, 0))
self.timeNight.setTime(QtCore.QTime(
d['dusk'].hour, d['dusk'].minute, 0))
self.timeDawn.setReadOnly(True)
self.timeDay.setReadOnly(True)
self.timeDusk.setReadOnly(True)
self.timeNight.setReadOnly(True)
def load_preferences(self):
if self.settings['isSlideshow']:
self.comboBox.setCurrentIndex(1)
else:
self.spinShuffleTime.setReadOnly(True)
self.spinShuffleTime.setValue(self.settings['shuffleTime'])
if self.settings['isDarkMode']:
self.boxDark.setChecked(True)
self.set_palette()
if self.settings['minimizeToTray']:
self.boxMinimize.setChecked(True)
else:
self.isClosedFromTray = True
if self.settings['runOnStartup']:
self.boxStartup.setChecked(True)
def set_palette(self):
if self.boxDark.isChecked():
self.setPalette(custom_qt.DarkPalette())
self.settings['isDarkMode'] = 1
else:
self.setPalette(QtGui.QPalette())
self.settings['isDarkMode'] = 0
def startup_behaviour(self):
if self.boxStartup.isChecked():
functions.run_on_startup(True)
self.settings['runOnStartup'] = 1
else:
functions.run_on_startup(False)
self.settings['runOnStartup'] = 0
def minimize_behaviour(self):
if self.boxMinimize.isChecked():
self.isClosedFromTray = False
self.settings['minimizeToTray'] = 1
else:
self.isClosedFromTray = True
self.settings['minimizeToTray'] = 0
def show_window(self):
functions.set_background_priority(False)
getattr(self, "raise")()
self.activateWindow()
self.setWindowState(QtCore.Qt.WindowNoState)
self.show()
def close_from_tray(self):
self.isClosedFromTray = True
self.close()
def closeEvent(self, event):
if self.radioCustomTimes.isChecked():
self.settings['isCustomTimes'] = 1
self.settings['dawnhour'] = self.timeDawn.time().hour()
self.settings['dawnmin'] = self.timeDawn.time().minute()
self.settings['dayhour'] = self.timeDay.time().hour()
self.settings['daymin'] = self.timeDay.time().minute()
self.settings['duskhour'] = self.timeDusk.time().hour()
self.settings['duskmin'] = self.timeDusk.time().minute()
self.settings['nighthour'] = self.timeNight.time().hour()
self.settings['nightmin'] = self.timeNight.time().minute()
else:
self.settings['isCustomTimes'] = 0
functions.write_settings(self.settingsPath, self.settings)
if self.isClosedFromTray:
event.accept()
else:
event.ignore()
self.hide()
functions.set_background_priority(True)
def __icon_activated(self, reason):
if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger:
self.show_window()
if __name__ == "__main__":
os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings & images
functions.set_process_explicit() # So Windows uses logo icon
app = QtWidgets.QApplication([])
ui = MainWindow(settings=settingsFile)
app.setStyle('fusion')
if '/noshow' in sys.argv:
functions.set_background_priority(True)
else:
ui.show()
app.setWindowIcon(QtGui.QIcon(logoFile))
ui.setWindowIcon(QtGui.QIcon(logoFile))
sys.exit(app.exec_())
| 2.15625 | 2 |
tw_serverinfo/models/__init__.py | DaRealFreak/Teeworlds-ServerInfo | 6 | 12798465 | #!/usr/local/bin/python
# coding: utf-8
import abc
class Server(abc.ABC):
"""Server Model Template, containing properties for same attributes of MasterServer and GameServer objects"""
_ip: str = ''
_port: int = 8300
_response: bool = False
_token = b''
_request_token: bytes = b''
def __eq__(self, other) -> bool:
"""Check for equality of objects
:type other: Server
:return:
"""
return self.ip == other.ip and self.port == other.port
@property
def ip(self) -> str:
return self._ip
@ip.setter
def ip(self, ip: str) -> None:
self._ip = ip
@property
def port(self) -> int:
return self._port
@port.setter
def port(self, port: int) -> None:
self._port = port
@property
def response(self) -> bool:
return self._response
@response.setter
def response(self, response: bool) -> None:
self._response = response
@property
def token(self) -> bytes:
return self._token
@token.setter
def token(self, token: bytes) -> None:
self._token = token
@property
def request_token(self) -> bytes:
return self._request_token
@request_token.setter
def request_token(self, token: bytes) -> None:
self._request_token = token
| 3.21875 | 3 |
src/dataset/base_face.py | chuanli11/SADRNet | 67 | 12798466 | import os
import sys
import numpy as np
import scipy.io as sio
from skimage import io
import time
import math
import skimage
import src.faceutil
from src.faceutil import mesh
from src.faceutil.morphable_model import MorphabelModel
from src.util.matlabutil import NormDirection
from math import sin, cos, asin, acos, atan, atan2
from PIL import Image
import matplotlib.pyplot as plt
# global data
bfm = MorphabelModel('data/Out/BFM.mat')
def get_transform_matrix(s, angles, t, height):
"""
:param s: scale
:param angles: [3] rad
:param t: [3]
:return: 4x4 transmatrix
"""
x, y, z = angles[0], angles[1], angles[2]
Rx = np.array([[1, 0, 0],
[0, cos(x), sin(x)],
[0, -sin(x), cos(x)]])
Ry = np.array([[cos(y), 0, -sin(y)],
[0, 1, 0],
[sin(y), 0, cos(y)]])
Rz = np.array([[cos(z), sin(z), 0],
[-sin(z), cos(z), 0],
[0, 0, 1]])
# rotate
R = Rx.dot(Ry).dot(Rz)
R = R.astype(np.float32)
T = np.zeros((4, 4))
T[0:3, 0:3] = R
T[3, 3] = 1.
# scale
S = np.diagflat([s, s, s, 1.])
T = S.dot(T)
# offset move
M = np.diagflat([1., 1., 1., 1.])
M[0:3, 3] = t.astype(np.float32)
T = M.dot(T)
# revert height
# x[:,1]=height-x[:,1]
H = np.diagflat([1., 1., 1., 1.])
H[1, 1] = -1.0
H[1, 3] = height
T = H.dot(T)
return T.astype(np.float32)
| 2.15625 | 2 |
alipay/aop/api/response/AlipayOpenMiniPlanOperateBatchqueryResponse.py | antopen/alipay-sdk-python-all | 213 | 12798467 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo
class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__()
self._page_data = None
self._page_num = None
self._page_size = None
self._total_number = None
@property
def page_data(self):
return self._page_data
@page_data.setter
def page_data(self, value):
if isinstance(value, list):
self._page_data = list()
for i in value:
if isinstance(i, PaymentSuccessPagePlanInfo):
self._page_data.append(i)
else:
self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i))
@property
def page_num(self):
return self._page_num
@page_num.setter
def page_num(self, value):
self._page_num = value
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, value):
self._page_size = value
@property
def total_number(self):
return self._total_number
@total_number.setter
def total_number(self, value):
self._total_number = value
def parse_response_content(self, response_content):
response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content)
if 'page_data' in response:
self.page_data = response['page_data']
if 'page_num' in response:
self.page_num = response['page_num']
if 'page_size' in response:
self.page_size = response['page_size']
if 'total_number' in response:
self.total_number = response['total_number']
| 2 | 2 |
python/wecall/utils/tabix_wrapper.py | dylex/wecall | 8 | 12798468 | # All content Copyright (C) 2018 Genomics plc
from wecall.genomics.chromosome import standardise_chromosome
import pysam
class TabixWrapper(object):
def __init__(self, tabix_filename):
self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r')
self.__contig_mapping = {standardise_chromosome(
contig): contig for contig in self.__tabix_file.contigs}
@property
def header(self):
return (line for line in self.__tabix_file.header)
@property
def contigs(self):
return self.__tabix_file.contigs
def fetch_generator(self, chrom_interval):
# Tabix will throw a ValueError if the chromosome specified is not
# present in the index for this file.
try:
if chrom_interval.chrom is None:
return self.__tabix_file.fetch()
else:
return self.__tabix_file.fetch(
self.__contig_mapping.get(
chrom_interval.chrom,
chrom_interval.chrom),
chrom_interval.interval.start,
chrom_interval.interval.end)
except ValueError:
raise StopIteration
def fetch_region(self, region):
try:
return self.__tabix_file.fetch(region=region)
except ValueError:
raise StopIteration
def close(self):
self.__tabix_file.close()
def __enter__(self):
return self
def __exit__(self, ex_type, value, traceback):
self.close()
| 2.6875 | 3 |
simplestore/products/admin.py | martinstastny/django-store | 36 | 12798469 | <reponame>martinstastny/django-store
from django.contrib import admin
from simplestore.products.models.category import Category
from simplestore.products.models.product import Product
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ('name',)}
class ProductAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ('name',)}
list_display = ('name', 'sku', 'price', 'slug', 'is_active',)
ordering = ['-is_active', 'name']
list_filter = ('is_active',)
admin.site.register(Product, ProductAdmin)
admin.site.register(Category, CategoryAdmin)
| 2.09375 | 2 |
command_center.py | sceeter89/HomeCommandCenter | 0 | 12798470 | from collections import namedtuple, defaultdict
import time
import logging
from datetime import datetime, timedelta
from yapsy.PluginManager import PluginManager
from api.exceptions import TerminateApplication
from api.sensor import Sensor
from api.motor import Motor
PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path'])
ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10
MINIMAL_LOOP_DURATION = timedelta(seconds=0.2)
class CoreApplication:
def __init__(self, sensors, motors):
self._motors = motors
self._sensors = sensors
self._disabled_plugins = set()
self._runtime_stats = {
'start_time': datetime.now(),
'loop_counter': 0,
'errors': defaultdict(list),
'average_loop_duration': timedelta(seconds=0),
'last_loop_duration': timedelta(seconds=0)
}
self._termination = None
self._total_loops_duration = timedelta()
def _process_sensors(self, state):
for plugin in self._sensors:
if plugin.key in self._disabled_plugins:
continue
try:
state[plugin.key] = plugin.instance.get_state()
except TerminateApplication as exception:
self._termination = (plugin.key, type(plugin.instance), exception.reason)
except KeyboardInterrupt:
self._termination = (None, None, "User interruption")
except Exception as exception:
logging.debug('"%s" threw exception.', plugin.key, exc_info=exception)
self._runtime_stats['errors'][plugin.key].append(exception)
state['errors'].append((plugin.key, exception))
def _process_motors(self, state):
for plugin in self._motors:
if plugin.key in self._disabled_plugins:
continue
try:
plugin.instance.on_trigger(state)
except TerminateApplication as exception:
self._termination = (plugin.key, type(plugin.instance), exception.reason)
except KeyboardInterrupt:
self._termination = (None, None, "User interruption")
except Exception as exception:
logging.debug('"%s" threw exception.', plugin.key, exc_info=exception)
self._runtime_stats['errors'][plugin.key].append(exception)
state['errors'].append((plugin.key, exception))
def _disable_failing_plugins(self):
for key in self._runtime_stats['errors']:
if key in self._disabled_plugins:
continue
if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN:
logging.warning('Disabling plugin due to repeating failures: %s', key)
self._disabled_plugins.add(key)
def _update_runtime_statistics(self, loop_duration):
self._total_loops_duration += loop_duration
self._runtime_stats['loop_counter'] += 1
self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter']
self._runtime_stats['last_loop_duration'] = loop_duration
def _build_loop_state(self):
return {
'errors': [],
'now': datetime.now(),
'runtime': self._runtime_stats,
'disabled_plugins': self._disabled_plugins,
'termination': self._termination
}
def start_main_loop(self):
while self._termination is None:
try:
loop_start = datetime.now()
state = self._build_loop_state()
self._process_sensors(state)
self._process_motors(state)
self._disable_failing_plugins()
if len(self._disabled_plugins) == len(self._sensors) + len(self._motors):
logging.warning('All plugins have been disabled. Terminating application..')
break
if state['errors']:
logging.warning('Current loop was interrupted by following exceptions: %s', repr(state['errors']))
loop_stop = datetime.now()
loop_duration = loop_stop - loop_start
self._update_runtime_statistics(loop_duration)
if loop_duration < MINIMAL_LOOP_DURATION:
time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds())
except KeyboardInterrupt:
self._termination = (None, None, "User interruption")
logging.info("Initiating shutdown procedure...")
terminal_state = self._build_loop_state()
for plugin in self._motors:
if plugin.key in self._disabled_plugins or not plugin.wants_last_chance:
continue
try:
logging.debug('Executing last chance motor: %s', plugin.key)
plugin.instance.on_trigger(terminal_state)
except Exception as exception:
self._runtime_stats['errors'][plugin.key].append(exception)
logging.info("Shutdown complete.")
logging.info(repr(self._runtime_stats))
def collect_all_plugins():
plugin_manager = PluginManager()
plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors'])
plugin_manager.collectPlugins()
for plugin in plugin_manager.getAllPlugins():
name = plugin.name
key = plugin.details.get('Core', 'key')
wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() == "true"
instance = plugin.plugin_object
path = plugin.path
yield PluginDetails(name, key, instance, wants_last_chance, path)
def load_plugins(all_plugins):
used_plugin_keys = set()
motor_plugins = []
sensor_plugins = []
for plugin in all_plugins:
logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance))
if plugin.key in used_plugin_keys:
logging.warning('Attempt to load already loaded plugin. Duplicate: name="%s", key="%s", path "%s"',
plugin.name, plugin.key, plugin.path)
continue
if isinstance(plugin.instance, Motor):
logging.debug("\tFound motor plugin.")
motor_plugins.append(plugin)
if isinstance(plugin.instance, Sensor):
logging.debug("\tFound sensor plugin with key: %s", plugin.key)
sensor_plugins.append(plugin)
used_plugin_keys.add(plugin.key)
return sensor_plugins, motor_plugins
def main():
all_plugins = collect_all_plugins()
sensors, motors = load_plugins(all_plugins)
app = CoreApplication(sensors=sensors, motors=motors)
app.start_main_loop()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG,
format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s')
try:
main()
except Exception as e:
logging.error('Unexpected error occurred. If you believe issue is related to some bug in application, ' +
'please open issue with exception details at https://github.com/sceeter89/command-center/issues',
exc_info=e)
| 2.109375 | 2 |
python/feature_extraction.py | RElbers/strotss-pytorch | 0 | 12798471 | from torch import nn
from torchvision import models
from torchvision.transforms import transforms
import util
class VGGFeatureExtractor(nn.Module):
def __init__(self):
super().__init__()
self._vgg = models.vgg16(pretrained=True).features
self._vgg.eval()
for parameter in self._vgg.parameters():
parameter.requires_grad = False
self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
self.keep_idc = [1, 3, 6, 8, 11, 13, 15, 22, 29]
def __call__(self, xs):
assert xs.dim() == 4
xs = util.denormalize(xs)
xs = xs / 255.0
xs = self.normalize(xs)
feats = [xs]
for i, layer in enumerate(self._vgg):
xs = layer(xs)
if i in self.keep_idc:
feats.append(xs)
return feats
| 2.359375 | 2 |
app/log.py | barry-ran/werobot | 1 | 12798472 | import os
import logging
from logging import handlers
from werkzeug.exceptions import InternalServerError
basedir = os.path.abspath(os.path.dirname(__file__))
def handle_error(error):
Log.logger().error(error)
return error
class Log:
LOG_PATH = os.path.join(basedir, 'logs')
LOG_NAME = os.path.join(LOG_PATH, 'log.txt')
LOG_LEVEL = 'INFO'
current_app = None
@staticmethod
def init_app(app):
Log.current_app = app
if not os.path.exists(Log.LOG_PATH):
os.makedirs(Log.LOG_PATH)
# 根据时间重命名log
file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8')
file_handler.suffix = '%Y-%m-%d.log'
# 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler)
# file_handler用来写入文件
file_handler.setLevel(Log.LOG_LEVEL)
fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s'
formatter = logging.Formatter(fmt)
file_handler.setFormatter(formatter)
# 设置logger的日志级别:大于等于该级别才会交给handler处理
app.logger.setLevel('DEBUG')
app.logger.addHandler(file_handler)
# DEBUG模式下不会走到handle_error
app.register_error_handler(InternalServerError, handle_error)
@staticmethod
def logger():
return Log.current_app.logger | 2.5625 | 3 |
simulator/services/demand_generation_service.py | marina-haliem/Dynamic-RideSharing-Pooling-Simulator | 3 | 12798473 | <reponame>marina-haliem/Dynamic-RideSharing-Pooling-Simulator
from simulator.models.customer.customer import Customer
from db import Session
# import request
query = """
SELECT *
FROM {table}
WHERE request_datetime >= {t1} and request_datetime < {t2};
"""
class DemandGenerator(object):
def __init__(self, use_pattern=False):
if use_pattern:
self.table = "request_pattern"
else:
self.table = "request_backlog"
def generate(self, current_time, timestep):
try:
# List of requests within a certain timeframe
requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep)))
# List of customers associated with each request
customers = [Customer(request) for request in requests]
# for r in requests:
# print("Iterating R: ", r)
# print("Cust: ", len(customers), requests)
except:
Session.rollback()
raise
finally:
Session.remove()
return customers
| 2.890625 | 3 |
Assignment3_for_students/Util.py | jay-z007/Natural-Language-Processing | 0 | 12798474 | <reponame>jay-z007/Natural-Language-Processing
from DependencyTree import DependencyTree
def loadConll(inFile):
sents = []
trees = []
with open('data/' + inFile, 'rb') as fin:
sentenceTokens = []
tree = DependencyTree()
for line in fin:
line = line.strip()
line = line.split('\t')
if len(line) < 10:
if len(sentenceTokens) > 0:
trees.append(tree)
sents.append(sentenceTokens)
tree = DependencyTree()
sentenceTokens = []
else:
word = line[1]
pos = line[4]
head = int(line[6])
depType = line[7]
token = {}
token['word'] = word
token['POS'] = pos
token['head'] = head
token['depType'] = depType
sentenceTokens.append(token)
tree.add(head, depType)
return sents, trees
def writeConll(outFile, sentences, trees):
with open(outFile, 'wb') as fout:
for i in range(len(sentences)):
sent = sentences[i]
tree = trees[i]
for j in range(len(sent)):
fout.write("%d\t%s\t_\t%s\t%s\t_\t%d\t%s\t_\t_\n" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1)))
fout.write("\n")
"""
sents, trees = loadConll("train.conll")
print sents[1]
trees[1].print_tree()
"""
| 2.390625 | 2 |
vimeo/auth/__init__.py | greedo/vimeo.py | 0 | 12798475 | <filename>vimeo/auth/__init__.py
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
class GrantFailed(Exception): pass
| 1.117188 | 1 |
dwinelle/video/gen_3d.py | oliverodaa/cs184-final-proj | 1 | 12798476 | <filename>dwinelle/video/gen_3d.py
#!/usr/bin/env python3
# This file is part of dwinelle-tools.
# dwinelle-tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# dwinelle-tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>.
# This can be used to generate data3d.js for the web frontend.
import utils
edge_lengths = utils.load_edge_lengths()
print('var el = {{{}}};'.format(','.join('"{} {}":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items())))
print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in utils.get_node_coords().items())))
print('var eh = {{{}}};'.format(','.join('"{} {}":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2]) for k, v in utils.load_edge_heights().items())))
| 1.835938 | 2 |
stocksearch.py | mrahman4782/Stock-Strike | 0 | 12798477 | from bs4 import BeautifulSoup
import requests, smtplib, time
from flask import Flask, render_template, request, url_for
from threading import Thread
app = Flask(__name__)
@app.route('/')
def progstart():
return render_template("site.html")
@app.route('/start_task')
def start_task():
def do_work(stockInput, targetprice, email):
targetprice = float(targetprice)
while True:
URL = "https://finance.yahoo.com/quote/" + stockInput.upper() + "?p=" + stockInput.upper() + "&.tsrc=fin-srch"
htmlFound = requests.get(URL).text
retrieved = BeautifulSoup(htmlFound, 'html')
price = retrieved.find("span", class_ = "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)").text
oldprice = float(price.replace(",", ""))
newtargetprice = price.replace(",", "")
print("The price is: " + price)
newprice = float(price.replace(",", ""))
server = smtplib.SMTP("smtp.gmail.com", 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login("email", "password")
head = stockInput + " price update!"
if oldprice < targetprice:
if newprice >= targetprice:
body = stockInput.upper() + " rose to " + str(newprice) + "!"
message = f"Subject: {head}\n\n{body}"
server.sendmail("<EMAIL>", email, message)
if oldprice > targetprice:
if newprice <= targetprice:
body = stockInput.upper() + " fell to " + str(newprice) + "!"
message = f"Subject: {head}\n\n{body}"
server.sendmail("<EMAIL>", email, message)
if oldprice == targetprice:
body = stockInput.upper() + " has reached $" + str(newprice) + "!"
message = f"Subject: {head}\n\n{body}"
server.sendmail("<EMAIL>", email, message)
time.sleep(30)
kwargs = {
'stockInput':request.args.get('ticker'),
'targetprice':request.args.get('target'),
'email':request.args.get('email')
}
print(request.args)
thread = Thread(target=do_work, kwargs=kwargs)
thread.start()
return render_template("site.html")
if __name__ == "__main__":
app.run(debug=True)
| 2.96875 | 3 |
serverwamp/session.py | JustinTArthur/server_wamp | 14 | 12798478 | <filename>serverwamp/session.py
import logging
from abc import ABC, abstractmethod
from typing import Any, Iterable, Iterator
from serverwamp.adapters.async_base import AsyncTaskGroup
from serverwamp.protocol import (abort_msg, cra_challenge_msg,
cra_challenge_string, event_msg,
generate_global_id, goodbye_msg, scram_nonce,
subscribed_response_msg, ticket_challenge_msg,
unsubscribed_response_msg, welcome_msg)
NO_MORE_EVENTS = object()
NO_IDENTITY = object()
logger = logging.getLogger(__name__)
class AbstractAsyncQueue(ABC):
@abstractmethod
async def get(self) -> Any:
pass
@abstractmethod
def task_done(self) -> None:
pass
def put_nowait(self, item: Any):
pass
class WAMPSession:
def __init__(
self,
connection,
realm,
tasks: AsyncTaskGroup,
auth_id=None,
auth_methods=()
):
"""Represents a WAMP session happening over a connection.
The session is available to RPC and event topic routes.
The session can be used to store information to be retrieved or changed
by later effects:
session['customer_id'] = 345
"""
self.connection = connection
self.id = generate_global_id()
self.auth_id = auth_id
self.auth_methods = auth_methods
self.is_open = False
self.realm = realm
self.identity = NO_IDENTITY
self._custom_state = {}
self._said_goodbye = False
self._subscriptions = {}
self._subscriptions_ids = {}
self._tasks = tasks
self._authenticated = False
def __getitem__(self, key: str) -> Any:
return self._custom_state[key]
def __setitem__(self, key: str, value: Any) -> None:
self._custom_state[key] = value
def __delitem__(self, key: str) -> None:
del self._custom_state[key]
def __len__(self) -> int:
return len(self._custom_state)
def __iter__(self) -> Iterator[str]:
return iter(self._custom_state)
async def spawn_task(self, fn, *fn_args, **fn_kwargs):
await self._tasks.spawn(fn, *fn_args, **fn_kwargs)
async def send_raw(self, msg: Iterable):
await self.connection.send_msg(msg)
async def send_event(self, topic, args=(), kwargs=None, trust_level=None):
if topic not in self._subscriptions:
logger.debug("An event for %s was not sent to %s, as session "
"isn't subscribed.", topic, self.id)
return
subscription_id = self._subscriptions[topic]
msg = event_msg(
subscription_id=subscription_id,
publication_id=generate_global_id(),
args=args,
kwargs=kwargs,
trust_level=trust_level
)
await self.connection.send_msg(msg)
async def request_ticket_authentication(self):
await self.connection.send_msg(ticket_challenge_msg())
async def request_cra_auth(self, auth_role: str, auth_provider: str):
challenge_string = cra_challenge_string(
self.id,
auth_id=self.auth_id,
auth_provider=auth_role,
auth_role=auth_provider,
nonce=scram_nonce()
)
await self.connection.send_msg(cra_challenge_msg(challenge_string))
async def register_subscription(self, topic_uri: str) -> int:
sub_id = self.subscription_id_for_topic(topic_uri)
self._subscriptions[topic_uri] = sub_id
self._subscriptions_ids[sub_id] = topic_uri
return sub_id
async def unregister_subscription(self, sub_id: int):
topic_uri = self._subscriptions_ids.pop(sub_id)
if not topic_uri:
raise NoSuchSubscription()
del self._subscriptions[topic_uri]
async def mark_subscribed(self, request, subscription_id: int):
await self.connection.send_msg(
subscribed_response_msg(request, subscription_id)
)
async def mark_unsubscribed(self, request):
await self.connection.send_msg(
unsubscribed_response_msg(request)
)
@staticmethod
def subscription_id_for_topic(topic):
return hash(topic) & 0xFFFFFFFF
async def mark_authenticated(self, identity: Any = None):
if not self._authenticated:
self._authenticated = True
await self.connection.send_msg(welcome_msg(self.id))
self.is_open = True
self.identity = identity
async def abort(self, uri=None, message=None):
await self.connection.send_msg(abort_msg(uri, message))
self._said_goodbye = True
await self.close(uri, message)
async def close(self, uri=None, message=None):
if self.is_open and not self._said_goodbye:
await self.connection.send_msg(goodbye_msg(uri, message))
self.is_open = False
class NoSuchSubscription(Exception):
pass
| 2.015625 | 2 |
pv_data.py | kmoy14-stanford/AA222FinalProject | 2 | 12798479 | <reponame>kmoy14-stanford/AA222FinalProject
"""
Some data cleansing for the solar PV data.
"""
#%%
import numpy as np
import pandas as pd
# 5 years of PV data
pvdata = pd.read_csv('solar_PV_15min_kWh.csv')
pv = pvdata[:8760*4]
pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True)
pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True)
pv.columns = ['gen']
#%% Save to CSV
pv.to_csv("pv_gen.csv")
# %%
| 2.28125 | 2 |
keras_classification_test.py | redkfa/PDF_classification | 0 | 12798480 | <reponame>redkfa/PDF_classification
from keras.preprocessing.image import ImageDataGenerator
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from keras.applications.vgg19 import VGG19
from keras.models import Model
from keras.layers.normalization import BatchNormalization
import numpy as np
from keras.models import load_model
from sklearn.metrics import classification_report, confusion_matrix
import numpy as np
from keras.models import Sequential
import tensorflow as tf
from sklearn import metrics
import matplotlib.pyplot as plt
from sklearn.metrics import auc
from sklearn.metrics import roc_curve
import os
from sklearn.metrics import auc
#validation_data_dir = r'C:\Users\randy\PycharmProjects\PJ1\classifiaction\test'
validation_data_dir = r'C:\Users\randy\PycharmProjects\PJ1\classifiaction\test5'
#C:\Users\randy\Downloads\betterdataset\test 494#
#C:\Users\randy\PycharmProjects\PJ1\classifiaction\test2 #16
test_count =sum([len(files) for r, d, files in os.walk(validation_data_dir)])
nb_validation_samples =test_count
batch_size =8
validation_steps= nb_validation_samples/batch_size
print(test_count)
print(validation_steps)
img_width, img_height = 224,224
my_model = load_model('VO_2_classification_model.h5')
test_datagen = ImageDataGenerator(rescale=1. / 255)
validation_generator = test_datagen.flow_from_directory(
validation_data_dir,
target_size=(img_width, img_height),
shuffle=False,
batch_size=batch_size)
Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1)
y_pred = np.argmax(Y_pred, axis=1)
y_true = validation_generator.classes
print('Confusion Matrix')
print(confusion_matrix(validation_generator.classes, y_pred))
print('Classification Report')
target_names = ['3view', 'others']
print(classification_report(y_true, y_pred, target_names=target_names))
'''
loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1)
print('test acc = %.3f'%(acc))
print('test loss = %.3f'%(loss))
'''
'''
y_pred_keras = Y_pred.ravel()
fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras)
auc_keras = auc(fpr_keras,tpr_keras)
print(auc_keras)
plt.figure(1)
plt.plot([0, 1], [0, 1], 'k--')
plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras))
plt.xlabel('False positive rate')
plt.ylabel('True positive rate')
plt.title('ROC curve')
plt.legend(loc='best')
plt.show()
print(auc_keras)
'''
'''
#fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2)
plt.plot(fpr_keras,tpr_keras,marker = 'o')
plt.show()
#AUC = auc(fpr, tpr)
''' | 2.734375 | 3 |
tests/test_submission_builder.py | mverteuil/mig3-client | 3 | 12798481 | <gh_stars>1-10
# -*- coding: utf-8 -*-
import mock
from mig3_client import SubmissionBuilder
def test_minimum_viable_submission(converted_tests):
"""Should produce something"""
submission = SubmissionBuilder("t", "b", converted_tests).build()
assert submission is not None
def test_configuration_id(converted_tests):
"""Should contain target configuration ID used to initialize the builder"""
submission = SubmissionBuilder("t", "b", converted_tests).build()
assert submission.get("target") == "t", submission
def test_build_number(converted_tests):
"""Should contain build number used to initialize the builder"""
submission = SubmissionBuilder("t", "b", converted_tests).build()
assert submission.get("number") == "b", submission
def test_tests():
"""Should contain test results used to initialize the builder"""
submission = SubmissionBuilder("t", "b", ["anything"]).build()
assert submission.get("results") == ["anything"], submission
def test_version_details(converted_tests):
"""Should contain version details from git head commit"""
with mock.patch("mig3_client.git") as patched_git:
patched_git.Repo().head.object.hexsha = "a1" * 20
patched_git.Repo().head.object.author.email = "<EMAIL>"
submission = SubmissionBuilder("t", "b", converted_tests).build()
assert submission.get("version", {}).get("hash") == ("a1" * 20), submission
assert submission.get("version", {}).get("author") == ("<EMAIL>"), submission
| 2.21875 | 2 |
src/exabgp/util/od.py | pierky/exabgp | 1,560 | 12798482 | <reponame>pierky/exabgp
# encoding: utf-8
"""
od.py
Created by <NAME> on 2009-09-06.
Copyright (c) 2009-2017 Exa Networks. All rights reserved.
License: 3-clause BSD. (See the COPYRIGHT file)
"""
def od(value):
def spaced(value):
even = None
for v in value:
if even is False:
yield ' '
yield '%02X' % v
even = not even
return ''.join(spaced(value))
| 2.28125 | 2 |
rfmembers.py | realistforeningen/rf-members | 0 | 12798483 | <gh_stars>0
# coding: utf-8
import string
from datetime import datetime, timedelta
import time
from pytz import timezone
import pytz
from functools import wraps
from flask import Flask, render_template, request, redirect, url_for, jsonify, session, g, abort
from calendar import month_name
from collections import defaultdict, namedtuple
import re
import os
from flask_sqlalchemy import SQLAlchemy
from flask_assets import Environment, Bundle
from sqlalchemy.ext.hybrid import hybrid_property
import vippsparser
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['ASSETS_DEBUG'] = True
app.config['SECRET_KEY'] = "development key"
app.config['TIMEZONE'] = 'Europe/Oslo'
app.config['TERM'] = "V16"
app.config['PRICE'] = 50
app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports')
app.config['PASSWORDS'] = {
'F<PASSWORD>': '<PASSWORD>',
'SM': 'sm',
'Admin': 'admin',
'Superadmin': 'superadmin',
}
app.config['BLACKLIST'] = []
app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True)
tz = timezone(app.config['TIMEZONE'])
assets = Environment(app)
if 'WEBASSETS_DIR' in os.environ:
assets.directory = os.getenv('WEBASSETS_DIR')
db = SQLAlchemy(app)
def compute_queryname(context):
return context.current_parameters['name'].lower()
class Membership(db.Model):
id = db.Column(db.Integer, primary_key=True)
_name = db.Column('name', db.Text, nullable=False)
queryname = db.Column(db.Text, nullable=False)
price = db.Column(db.Integer, nullable=False)
term = db.Column(db.Text, nullable=False)
account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown
vipps_transaction_id = db.Column(db.Text)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False)
settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True)
created_session = db.relationship("Session", foreign_keys=[created_by], backref="created_memberships")
settled_session = db.relationship("Session", foreign_keys=[settled_by], backref="settled_memberships")
valid_term = (term == "Lifetime") | (term == app.config['TERM'])
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
self.queryname = value.lower()
def is_free(self):
return self.price == 0
ALPHABET = "".join(str(x) for x in range(10))
ALPHABET += string.ascii_uppercase
ALPHABET = ALPHABET\
.replace("O", "")\
.replace("I", "") # too similar to 1
def code(self):
# convert to Unix epoch
epoch = int(time.mktime(self.created_at.timetuple()))
code = ""
while epoch > 0:
epoch, i = divmod(epoch, len(self.ALPHABET))
code = self.ALPHABET[i] + code
return code
@classmethod
def count_dict(cls, column):
query = db.session.query(column, db.func.count()).group_by(column)
result = {}
for row in query:
result[row[0]] = row[1]
return result
def price_for_term(term):
if term == 'Lifetime':
return app.config['PRICE'] * 10
else:
return app.config['PRICE']
levels = ['Funk', 'SM', 'Admin', 'Superadmin']
class Session(db.Model):
id = db.Column(db.Integer, primary_key=True)
description = db.Column(db.Text, nullable=False)
level = db.Column(db.Text, nullable=False)
user_name = db.Column(db.Text)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
closed_at = db.Column(db.DateTime, nullable=True)
def is_atleast(self, level):
return levels.index(self.level) >= levels.index(level)
def can(self, action, thing=None):
if self.level == 'Superadmin':
return True
if action == 'settlement':
return self.is_atleast('SM')
if action == 'settlement_all':
return self.is_atleast('Admin')
if action == 'wristband':
return app.config['ENABLE_WRISTBAND']
if action == 'memberships_new':
return True
if action == 'reports':
return self.is_atleast('Admin')
if action == 'sessions_list':
return self.is_atleast('SM')
if action == 'delete':
# We can only delete our own memberships which are not settled
if isinstance(thing, Membership):
if thing.settled_by is None:
return thing.created_by == self.id
if action == 'edit':
if isinstance(thing, Membership):
return True
return False
class VippsReport(db.Model):
id = db.Column(db.Integer, primary_key=True)
state = db.Column(db.Text)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
def file_path(self):
return os.path.join(app.config['VIPPS_STORAGE_PATH'], "%05d.xlsx" % self.id)
def transactions(self):
return vippsparser.load_transactions(self.file_path())
def bootstrap_class(self):
if self.state == "created":
return "danger"
if self.state == "uploaded":
return ""
if self.state == "resolved":
return "success"
if self.state == "pending":
return "warning"
class Entry:
COMMAND_PATTERN = r'^([vh]\d+)|(evig|evil)'
def __init__(self, transaction, memberships):
self.transaction = transaction
self.memberships = memberships
self.accuracy = 0
self.parse_transaction()
def is_complete(self):
return len(self.memberships) > 0
def parse_transaction(self):
amount = self.transaction.amount
if amount == price_for_term('Current'):
self.term = app.config['TERM']
elif amount == price_for_term('Lifetime'):
self.term = "Lifetime"
else:
return
self.accuracy = 1
cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I)
if cmd:
idx = cmd.end(0)
name = self.transaction.message[idx:]
name = re.sub(r'^[^\wæøåÆØÅ]+', '', name, re.U)
name = re.sub(r'[^\wæøåÆØÅ]+$', '', name, re.U)
self.name = name
if cmd.group(1) and amount == price_for_term('Current'):
self.accuracy = 2
if cmd.group(2) and amount == price_for_term('Lifetime'):
self.accuracy = 2
else:
self.name = "%s %s" % (self.transaction.first_name, self.transaction.last_name)
def entries(self):
transactions = list(self.transactions())
trans_ids = [t.id for t in transactions]
mapping = {}
memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids))
for m in memberships:
if m.vipps_transaction_id not in mapping:
mapping[m.vipps_transaction_id] = []
mapping[m.vipps_transaction_id].append(m)
return [self.Entry(t, mapping.get(t.id, [])) for t in transactions]
@app.before_request
def before_request():
if 'session_id' in session:
sess = Session.query.get(session['session_id'])
# Closed sessions are not valid
if sess.closed_at is not None:
sess = None
# Old sessions are not valid
elif (datetime.now() - sess.created_at) > timedelta(days = 1):
sess = None
setattr(g, 'sess', sess)
else:
setattr(g, 'sess', None)
@app.context_processor
def inject_helpers():
def localize(d):
if d.tzinfo is None:
d = d.replace(tzinfo=pytz.utc)
return d.astimezone(tz)
def latest_born_date():
now = datetime.now()
now = now.replace(year=now.year-18) - timedelta(days = 1)
return now
def epoch(d):
start = datetime.utcfromtimestamp(0)
return (d - start).total_seconds()
return dict(
localize=localize,
latest_born_date=latest_born_date,
epoch=epoch
)
def logout():
session.pop('session_id')
def requires(action):
def decorator(func):
@wraps(func)
def route(*args, **kwargs):
if g.sess and g.sess.can(action):
return func(*args, **kwargs)
else:
abort(404)
return route
return decorator
@app.route('/')
def index():
if g.sess is None:
return render_template('index.html')
else:
return redirect(url_for('memberships_new'))
@app.route('/sessions/new')
def sessions_new(error_message=None):
level = request.args['level']
description = request.args['description']
return render_template('sessions/new.html', level=level, description=description, error_message=error_message)
@app.route('/sessions/new', methods=['POST'])
def sessions_create():
level = request.form["level"]
real_password = app.config['PASSWORDS'][request.form["level"]]
if real_password != request.form["password"]:
return sessions_new(error_message="Wrong password")
if not request.form["name"]:
return sessions_new(error_message="Name is missing")
sess = Session(
level=level,
user_name=request.form["name"],
description=request.form.get("description", "Unknown"),
)
db.session.add(sess)
db.session.commit()
session["session_id"] = sess.id
return redirect(url_for('index'))
@app.route('/sessions/switch', methods=['POST'])
def sessions_switch():
new_session = Session(
level=g.sess.level,
user_name=request.form["name"],
description=g.sess.description
)
db.session.add(new_session)
g.sess.closed_at = datetime.utcnow()
db.session.commit()
session["session_id"] = new_session.id
return redirect(url_for('index'))
@app.route('/sessions/delete', methods=['POST'])
def sessions_destroy():
g.sess.closed_at = datetime.utcnow()
db.session.commit()
logout()
return redirect(url_for('index'))
@app.route('/memberships/new')
@requires('memberships_new')
def memberships_new():
last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10)
term = request.args.get('term', app.config['TERM'])
membership = Membership(term=term, account="Entrance")
membership.price = price_for_term(membership.term)
return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships)
@app.route('/memberships/new', methods=['POST'])
@requires('memberships_new')
def memberships_create():
membership = Membership(
name=request.form["name"],
term=request.form["term"],
account=request.form["account"],
created_by=g.sess.id
)
membership.price = price_for_term(membership.term)
if 'vipps_transaction_id' in request.form and g.sess.can('vipps'):
tid = request.form['vipps_transaction_id'].strip()
if len(tid) == 0:
tid = None
membership.vipps_transaction_id = tid
errors = []
if membership.name.strip() == '':
errors.append("Name is required")
if len(errors) > 0:
return render_template('memberships/new.html', membership=membership, errors=errors)
db.session.add(membership)
db.session.commit()
return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor')
@app.route('/memberships/<id>/edit')
def memberships_edit(id):
mem = Membership.query.get(id)
return render_template('memberships/edit.html', membership=mem)
@app.route('/memberships/<id>/delete', methods=['POST'])
@requires('memberships_new')
def memberships_destroy(id):
mem = Membership.query.get(id)
if g.sess.can('delete', mem):
db.session.delete(mem)
db.session.commit()
return redirect(url_for('memberships_new'))
@app.route('/memberships/search')
def memberships_search():
query_string = request.args['q']
query = Membership.query.filter(Membership.valid_term)
for part in query_string.split():
like_string = '%' + part.lower() + '%'
query = query.filter(Membership.queryname.like(like_string))
limit = 10
memberships = list(query.order_by(db.desc('created_at')).limit(limit))
banned = []
if len(memberships) < limit:
# Search in blacklist
banned = app.config["BLACKLIST"]
for part in query_string.split():
matches = lambda name: part.lower() in name.lower()
banned = filter(matches, banned)
return render_template('memberships/table.html', memberships=memberships, banned=banned)
@app.route('/memberships/settle')
@requires('settlement')
def memberships_settle():
max_id = db.session.query(db.func.max(Membership.id)).scalar()
if g.sess.can('settlement_all'):
account = request.args.get('account', 'Entrance')
else:
account = "Entrance"
sessions = db.session.query(
db.func.count(Membership.created_by),
db.func.sum(Membership.price),
Session
) \
.group_by(Membership.created_by) \
.filter(Membership.account == account) \
.filter(Membership.settled_by == None) \
.filter(Membership.id <= max_id) \
.join(Membership.created_session) \
.all()
summary = {
'count': sum(count for count,_,_ in sessions),
'price': sum(price for _,price,_ in sessions),
}
return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account)
@app.route('/memberships/settle', methods=['POST'])
@requires('settlement')
def memberships_settle_submit():
max_id = request.form["max_id"]
if g.sess.can('settlement_all'):
account = request.form['account']
else:
account = "Entrance"
update = db.update(Membership) \
.where(Membership.account == account) \
.where(Membership.settled_by == None) \
.where(Membership.id <= max_id) \
.values(settled_by=g.sess.id) \
.values(queryname=Membership.queryname)
db.session.execute(update)
db.session.commit()
return redirect(url_for('memberships_settle', account=account))
@app.route('/memberships')
@requires('memberships_list')
def memberships_list():
memberships = Membership.query.all()
return render_template('memberships/list.html', memberships=memberships)
@app.route('/reports')
@requires('reports')
def reports():
membership_count = db.session.query(
db.func.count(Membership.id),
Membership.term,
db.func.strftime('%Y', Membership.created_at).label('year'),
db.func.strftime('%W', Membership.created_at).label('week')
) \
.group_by('year', 'week', Membership.term) \
.order_by('year', 'week')
terms = defaultdict(lambda: [])
lifetime = 0
for count, term, year, week in membership_count:
if term == "Lifetime":
lifetime += count
else:
terms[term].append({"count": count, "year": int(year), "week": week})
summary = []
for term in terms:
summary.append({
"name": term,
"rows": terms[term],
"total": sum(r["count"] for r in terms[term]),
"year": int(term[1:]) + 2000,
"sortkey": term[1:] + str(int(term[0] == 'H'))
})
summary.sort(key=lambda k: k["sortkey"], reverse=True)
return render_template('reports.html', summary=summary, lifetime=lifetime)
@app.route('/reports/lifetime')
@requires('reports')
def reports_lifetime():
memberships = Membership.query \
.filter(Membership.term == "Lifetime") \
.order_by(Membership.created_at.desc())
return render_template('reports/lifetime.html', memberships=memberships)
@app.route('/sessions')
def sessions_list():
created = Membership.count_dict(Membership.created_by)
settled = Membership.count_dict(Membership.settled_by)
sessions = Session.query.order_by(db.desc('created_at'))
return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled)
@app.route('/vipps')
def vipps_index():
reports = VippsReport.query.order_by(VippsReport.created_at.desc())
return render_template('vipps/index.html', reports=reports)
@app.route('/vipps', methods=['POST'])
def vipps_import():
file = request.files['file']
report = VippsReport(state="created")
db.session.add(report)
db.session.commit()
file.save(report.file_path())
report.state = "uploaded"
db.session.commit()
return redirect(url_for('vipps_index'))
@app.route('/vipps/<id>')
def vipps_show(id):
report = VippsReport.query.get(id)
return render_template('vipps/show.html', report=report)
@app.route('/vipps/<id>', methods=['POST'])
def vipps_process(id):
report = VippsReport.query.get(id)
names = request.form.getlist("name")
terms = request.form.getlist("term")
tids = request.form.getlist("transaction_id")
accepted_tids = request.form.getlist("accepted_transaction_id")
for name, term, tid in zip(names, terms, tids):
if tid not in accepted_tids:
continue
mem = Membership(
name=name,
term=term,
account="Vipps",
vipps_transaction_id=tid,
created_by=g.sess.id,
price=price_for_term(term)
)
db.session.add(mem)
report.state = request.form["state"]
db.session.commit()
return redirect(url_for('vipps_index'))
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| 1.96875 | 2 |
buff/trinket/dragon_killer.py | dannl/hunter-sim-classic | 0 | 12798484 | from buff import Buff, LastingBuff
class DragonKiller(LastingBuff):
def __init__(self):
super().__init__('dragon_killer', 2 * 60, 20)
def equip(self,engine, char_state):
char_state.ap += 64
def dequip(self,engine, char_state):
char_state.ap -= 64
def perform_impl(self,rotation, engine, char_state):
char_state.ap += 260
def timeout(self, rotation, engine, char_state):
char_state.ap -= 260
| 2.421875 | 2 |
server/utils/workflow.py | Samsong1991/django-vue-admin | 425 | 12798485 | <gh_stars>100-1000
from django.conf import settings
import time
import requests
import hashlib
import traceback
import json
class WorkFlowAPiRequest(object):
def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL):
self.token = token
self.appname = appname
self.username = username
self.workflowurl = workflowurl
def getrequestheader(self):
timestamp = str(time.time())[:10]
ori_str = timestamp + self.token
signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest()
headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username)
return headers
def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()):
if method not in ['get','post','put','delete','patch']:
return False,'method must be one of get post put delete or patch'
if not isinstance(parameters,dict):
return False,'Parameters must be dict'
headers = self.getrequestheader()
try:
r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data))
result = r.json()
return True,result
except:
return False,traceback.format_exc()
# ins = WorkFlowAPiRequest()
# print (ins.getdata(parameters=dict(username='admin', per_page=20, name=''),method='get',url='/api/v1.0/workflows')) | 2.15625 | 2 |
cap2/extensions/experimental/strains/strainotyping/cli.py | nanusefue/CAP2-1 | 9 | 12798486 | <reponame>nanusefue/CAP2-1
import click
from .io import write_graph_to_filepath
from .api import merge_filter_graphs_from_filepaths
@click.group('strainotype')
def strainotype_cli():
pass
@strainotype_cli.command('merge')
@click.option('-m', '--min-weight', default=2)
@click.option('-o', '--outfile', type=click.File('w'), default='-')
@click.argument('filepaths', nargs=-1)
def merge_graphs_cli(min_weight, outfile, filepaths):
G = merge_filter_graphs_from_filepaths(filepaths, min_weight=min_weight)
write_graph_to_filepath(G, outfile)
| 2.1875 | 2 |
mumoco/mumoco_api.py | disroop/mumoco | 3 | 12798487 | import json
from pathlib import Path
from typing import List
import cli_ui as ui
import deserialize
from conans.client.conan_api import Conan
from .conanbuilder.configreader import ConfigReader
from .conanbuilder.package import Package
from .conanbuilder.runner import Runner
from .conanbuilder.signature import Signature
class MumocoAPI:
def __init__(self, config_file_path: str, root: str):
self.config: ConfigReader = config_reader_from_file(config_file_path)
self.runner: Runner = get_runner(self.config, root)
def sources(self, source_folder: str = "") -> None:
self.runner.get_all_sources(source_folder)
def add_remotes(self, username: str, password: str) -> None:
self.runner.add_all_remotes(self.config.remotes, username, password)
def remove(self, source_folder: str = "") -> None:
self.runner.remove_all_sources(source_folder)
def create(self) -> None:
self.runner.export_all()
self.runner.create_all(self.config.configurations)
def upload(self, remote_name: str) -> None:
self.runner.upload_all_packages(remote_name)
def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]:
conan_files = []
for path in Path(root_path).rglob("conanfile.py"):
path_string = str(path.absolute())
if "test_package" not in path_string:
conan_files.append(path_string)
return conan_files
def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) -> List[Package]:
conan_files = find_all_conanfiles_to_be_processed(root_path)
conan_packages = []
for file in conan_files:
conan_packages.append(Package(conan_factory, signature, file))
return conan_packages
def get_runner(config_reader: ConfigReader, root: str) -> Runner:
conan_factory, _, _ = Conan.factory()
packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature)
return Runner(conan_factory, packages)
def config_reader_from_file(file: str) -> ConfigReader:
try:
with open(file, encoding="utf-8") as json_file:
return config_reader_from_string(json.load(json_file))
except IOError:
ui.fatal("Config file not accessible or readable")
return ConfigReader()
def config_reader_from_string(load: str) -> ConfigReader:
reader: ConfigReader = deserialize.deserialize(ConfigReader, load)
return reader
| 2.1875 | 2 |
kyokigo/migrations/0002_auto_20180405_0755.py | seoworks0/docker_test2 | 0 | 12798488 | # Generated by Django 2.0.3 on 2018-04-05 07:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kyokigo', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='kyokigo_input',
name='ownurl',
),
migrations.AlterField(
model_name='kyokigo_input',
name='text',
field=models.CharField(max_length=100, verbose_name='テキスト'),
),
]
| 1.507813 | 2 |
data_collection/altdata_service/db/sql/migration_of_db/tweet_migration_big/migration_twitter_to_big.py | kaljuvee/openaltdata | 0 | 12798489 | from db.sql.migration_of_db.tweet_migration_big.psql_tweet_mig_queries import psql_connector_twitter_mig
from db.sql.migration_of_db.tweet_migration_big.big_queries_sql import big_connector_twitter_mig
import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner
def migration_tweet_tables():
print('start tweet migration')
#tweet_mig(table_name='tweet_clone')
print('start tweet_castag migration')
tweet_mig(table_name='tweet_cashtag_clone')
print('job done')
def tweet_mig(table_name):
psql_conn = psql_connector_twitter_mig()
big_conn = big_connector_twitter_mig()
tweet_df = psql_conn.get_twitter(table_name)
t = 0
while not tweet_df.empty:
bi_table = table_name.replace('_clone', '')
tweet_df = psql_conn.get_twitter(table_name)
if tweet_df.empty:
break
tweet_df = cleaner.clean_df_for_db(tweet_df)
big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table)
psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name)
t += len(tweet_df)
print('we have processed ' + str(t) + ' rows')
if __name__ == "__main__":
migration_tweet_tables()
| 2.421875 | 2 |
rsteg_socket.py | jahosp/rsteg-tcp | 1 | 12798490 | <reponame>jahosp/rsteg-tcp<filename>rsteg_socket.py
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
# Author: <NAME> <<EMAIL>>
from rsteg_tcp import RstegTcp
from utils import State, retrans_prob
import time
class RstegSocket:
"""A wrapper for RstegTcp that offers socket primitives for communicating like Python sockets."""
def __init__(self, rprob, host=None, dport=None, sport=49512):
"""Class constructor."""
self.sport = sport # Source port, defaults to 49512
self.dport = dport # Destination port
self.dst = host # Destination host
self.rprob = rprob
self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance
self.f_index = 0
# Flags
self.listening = False # Socket is listening on sport
def bind(self, host, port):
"""Configures the socket with the parameters supplied."""
self.dst = host
self.sport = port
self.rtcp.sport = self.sport
def listen(self):
"""Starts the RstegTCP module."""
self.rtcp.restart(self.rprob, self.sport)
self.rtcp.start()
self.listening = True
def accept(self):
"""Waits for a established TCP connection."""
while self.rtcp.state != State.ESTAB:
pass
def connect(self, host, port):
"""Establishes a TCP connection with the host on port."""
if not self.listening:
self.listen()
self.rtcp.connect(host, port)
while self.rtcp.state != State.ESTAB:
pass
def send(self, data):
"""Chunks the data according to MSS and sends it to the TCP receiver."""
data_chunks = []
interval = 1446 # payload chunk length
# Slice the binary data in chunks the size of the payload length
for n in range(0, len(data), interval):
data_chunks.append(data[n:n + interval])
# RTO vars
k = 4
g = 0.05
srtt = 0
rttvar = 0
rto = 1
alpha = 1/8
beta = 1/4
first_measurement = True
# Send chunks
for chunk in data_chunks:
self.rtcp.send_data(chunk)
# set timer
rtt = time.time()
res = False
# while we don't receive ACK
while not res:
#print(rto)
# Wait for ack event or timeout
res = self.rtcp.ack_event.wait(timeout=rto)
if not res: #timeout
self.rtcp.retrans_data(chunk)
else: #ack received
self.rtcp.ack_event.clear()
if first_measurement:
srtt = time.time() - rtt
rttvar = srtt/2
rto = srtt + max(g, int(k*rttvar))
first_measurement = False
else:
rttvar = (1- beta) * rttvar + beta * abs(srtt - (time.time() - rtt))
srtt = (1 - alpha) * srtt + alpha * (time.time() - rtt)
rto = srtt + max(g, k*rttvar)
def rsend(self, cover, secret):
"""Chunks the data and the secret according to the MSS. The data and secret will be sent to the
TCP receiver with the RSTEG method.
:param cover: binary data to transmit as cover
:param secret: binary data to transmit during fake retransmission
"""
# Do the same for the secret
secret_chunks = []
interval = 1444
for n in range(0, len(secret), interval):
secret_chunks.append(secret[n:n + interval])
self.rtcp.secret_chunks = secret_chunks
n = 0
start_time = time.time()
# RTO vars
k = 4
g = 0.05
srtt = 0
rttvar = 0
rto = 1
alpha = 1/8
beta = 1/4
first_measurement = True
# Send cover
while len(cover) > 0:
# Send cover signal and secret
if self.rtcp.secret_signal:
chunk = cover[:1414]
cover = cover[1414:]
self.rtcp.send_data(chunk) # data with signal
rtt = time.time()
res = False
while not res:
#print(rto)
res = self.rtcp.ack_event.wait(timeout=rto)
if not res: # timeout as expected
self.rtcp.send_secret()
n += 1
else:
self.rtcp.ack_event.clear()
if first_measurement:
srtt = time.time() - rtt
rttvar = srtt / 2
rto = srtt + max(g, int(k * rttvar))
first_measurement = False
else:
rttvar = (1 - beta) * rttvar + beta * abs(srtt - (time.time() - rtt))
srtt = (1 - alpha) * srtt + alpha * (time.time() - rtt)
rto = srtt + max(g, k * rttvar)
# Send cover
else:
chunk = cover[:1446]
cover = cover[1446:]
self.rtcp.send_data(chunk) # data without signal
# set timer
rtt = time.time()
res = False
# while we don't receive ACK
while not res:
#print(rto)
# Wait for ack event or timeout
res = self.rtcp.ack_event.wait(timeout=rto)
if not res: # timeout
self.rtcp.retrans_data(chunk)
else: # ack received
self.rtcp.ack_event.clear()
if first_measurement:
srtt = time.time() - rtt
rttvar = srtt / 2
rto = srtt + max(g, int(k * rttvar))
first_measurement = False
else:
rttvar = (1 - beta) * rttvar + beta * abs(srtt - (time.time() - rtt))
srtt = (1 - alpha) * srtt + alpha * (time.time() - rtt)
rto = srtt + max(g, k * rttvar)
# Update secret_signal flag according to the retrans_prob except if the secret has been sent.
if not self.rtcp.secret_sent:
self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob)
else:
self.rtcp.secret_signal = False
break
#print('# Cover Transfer time: %.2f' % round(time.time() - start_time, 2))
cover_time = round(time.time() - start_time, 2)
if self.rtcp.secret_sent:
#print('Secret successfully delivered.')
#print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2))
secret_time = round(self.rtcp.secret_endtime - start_time, 2)
else:
#print('# Cover data ended before delivering all the secret!')
#print('# Delivered ' + str(n * 1444) + ' secret bytes')
secret_time = cover_time
return cover_time, secret_time
def recv(self, size, timeout=0):
"""Reads the RstegTCP data buffer for new recv data.
:param size: integer for the data read size
:param timeout: seconds for waiting to new pushed data in the buffer
:return:
"""
data = None
self.rtcp.psh_event.wait(timeout)
if len(self.rtcp.ingress_buffer) != 0: # check if empty
if len(self.rtcp.ingress_buffer) <= size: #
length = len(self.rtcp.ingress_buffer)
data = self.rtcp.ingress_buffer[:length] # take chunk
self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:]
return data
else:
data = self.rtcp.ingress_buffer[:size] # take chunk
self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:]
return data
else: # if buffer is empty return None
return data
def wait_and_recv(self):
"""Waits until end_event is set before accessing to the data buffer."""
data = []
self.rtcp.end_event.wait()
if self.rtcp.ingress_buffer:
data.append(self.rtcp.ingress_buffer)
print('RECV ' + str(len(data[0])) + ' BYTES')
if self.rtcp.ingress_secret_buffer:
data.append(self.rtcp.ingress_secret_buffer)
print('RECV ' + str(len(data[1])) + ' SECRET BYTES')
return data
def close(self):
"""Closes the TCP stream."""
self.rtcp.close()
while self.rtcp.state != State.TIME_WAIT:
pass
| 3.046875 | 3 |
scripts/pughpore/test_1Dpugh.py | jhwnkim/nanopores | 8 | 12798491 | # (c) 2016 <NAME>
" 1D PNP, modelling reservoirs and membrane far away from pore "
import nanopores as nano
import solvers
geop = nano.Params(
R = 35.,
H = 70.,
)
physp = nano.Params(
bulkcon = 1000.,
bV = -1.,
)
geo, pnp = solvers.solve1D(geop, physp)
solvers.visualize1D(geo, pnp)
nano.showplots()
| 1.921875 | 2 |
sendgrid/helpers/__init__.py | tulikavijay/sendgrid-python | 1 | 12798492 | """v3/mail/send response body builder
Builder for assembling emails to be sent with the v3 SendGrid API.
Usage example:
def build_hello_email():
to_email = from_email = Email("<EMAIL>")
subject = "Hello World from the SendGrid Python Library"
content = Content("text/plain", "some text here")
mail = Mail(from_email, subject, to_email, content)
mail.personalizations[0].add_to(Email("<EMAIL>"))
return mail.get() # assembled request body
For more usage examples, see
https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail
For more information on the v3 API, see
https://sendgrid.com/docs/API_Reference/api_v3.html
"""
| 2.609375 | 3 |
utils.py | jodietrich/wgan_domain_adaptation | 4 | 12798493 | <gh_stars>1-10
# Authors:
# <NAME> (<EMAIL>)
# <NAME> (<EMAIL>)
# <NAME> (<EMAIL>)
# Useful functions
import nibabel as nib
import numpy as np
import os
import glob
from importlib.machinery import SourceFileLoader
import config.system as sys_config
import logging
import tensorflow as tf
from collections import Counter
from matplotlib.image import imsave
def fstr_to_label(fieldstrengths, field_strength_list, label_list):
# input fieldstrenghts hdf5 list
# field_strength_list must have the same size as label_list
# returns a numpy array of labels
assert len(label_list) == len(field_strength_list)
labels = np.empty_like(fieldstrengths, dtype=np.int16)
for fs_ind, current_field_strength in enumerate(fieldstrengths):
valid_value = False
for label_ind, current_label in enumerate(label_list):
if(current_field_strength == field_strength_list[label_ind]):
labels[fs_ind] = current_label
valid_value = True
break
if(not valid_value):
raise ValueError('unexpected value in fieldstrengths: %s' % current_field_strength)
return labels
def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)):
N = ages.shape[0]
P = len(bins)
ages_mat = np.transpose(np.tile(ages,(P,1)))
bins_mat = np.tile(bins, (N,1))
return np.array(ages_mat>bins_mat, dtype=np.uint8)
def age_to_bins(ages, bins=(65, 70, 75, 80, 85)):
ages_ordinal = age_to_ordinal_reg_format(ages, bins)
return np.sum(ages_ordinal, axis=-1)
def ordinal_regression_to_bin(ages_ord_reg):
# N = ages_ord_reg.shape[0]
# binned_list = []
# for nn in range(N):
# if np.sum(ages_ord_reg[nn,:]) > 0:
# binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1)
# else:
# binned_list.append(0)
return np.sum(ages_ord_reg, -1)
def get_ordinal_reg_weights(ages_ordinal_reg):
ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg))
P = ages_ordinal_reg.shape[1]
counts = [ages_binned.count(pp) for pp in range(P)]
counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts]
return counts
def all_argmax(arr, axis=None):
return np.argwhere(arr == np.amax(arr, axis=axis))
def makefolder(folder):
'''
Helper function to make a new folder if doesn't exist
:param folder: path to new folder
:return: True if folder created, False if folder already exists
'''
if not os.path.exists(folder):
os.makedirs(folder)
return True
return False
def load_nii(img_path):
'''
Shortcut to load a nifti file
'''
nimg = nib.load(img_path)
return nimg.get_data(), nimg.affine, nimg.header
def save_nii(img_path, data, affine, header):
'''
Shortcut to save a nifty file
'''
nimg = nib.Nifti1Image(data, affine=affine, header=header)
nimg.to_filename(img_path)
def create_and_save_nii(data, img_path):
img = nib.Nifti1Image(data, np.eye(4))
nib.save(img, img_path)
def get_latest_model_checkpoint_path(folder, name):
'''
Returns the checkpoint with the highest iteration number with a given name
:param folder: Folder where the checkpoints are saved
:param name: Name under which you saved the model
:return: The path to the checkpoint with the latest iteration
'''
iteration_nums = []
for file in glob.glob(os.path.join(folder, '%s*.meta' % name)):
file = file.split('/')[-1]
file_base, postfix_and_number, rest = file.split('.')[0:3]
it_num = int(postfix_and_number.split('-')[-1])
iteration_nums.append(it_num)
latest_iteration = np.max(iteration_nums)
return os.path.join(folder, name + '-' + str(latest_iteration))
def index_sets_to_selectors(*index_sets):
# takes in sets of indices and changes them to lists with True if the index was in the set and false otherwise
# works with lists or tuples of indices as well, but the in operation is O(n) instead of O(1)
selector_result = []
for ind_set in index_sets:
selector_result.append([(index in ind_set) for index in range(max(ind_set))])
return selector_result
# Useful shortcut for making struct like contructs
# Example:
# mystruct = Bunch(a=1, b=2)
# print(mystruct.a)
# >>> 1
class Bunch:
def __init__(self, **kwds):
self.__dict__.update(kwds)
def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']):
# loads the module of the experiment and returns a loader that can be used to access variables and classes in the
# module (loader.myClass())
# if the file_name of the module is not given then the file of the module must be the only .py file in the directory
# except for the files in other_py_files
if file_name is None:
# get experiment config file (assuming it is the first python file in log directory)
py_file_list = [file for file in os.listdir(experiment_path) if (file.endswith('.py') and file not in other_py_files)]
if len(py_file_list) != 1:
raise ValueError('unexpected py files in log directory or experiment file not found')
py_file_name = py_file_list[0]
else:
py_file_name = file_name
py_file_path = os.path.join(experiment_path, py_file_name)
# import config file
# remove the .py with [:-3]
experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module()
# experiment name is the same as the folder name
experiment_folder_name = experiment_path.split('/')[-1]
if experiment_folder_name != experiment_module.experiment_name:
logging.warning('warning: the experiment folder name %s is different from the experiment name %s'
% (experiment_folder_name, experiment_module.experiment_name))
return experiment_module, experiment_path
def string_dict_in_order(dict, key_function=None, key_string='', value_string=''):
# key is a function to give the elements in the dictionary a numerical value that is used for the order
separator = '\n'
lines = []
for dict_key in sorted(dict, key=key_function, reverse=True):
lines.append(key_string + str(dict_key) + ' ' + value_string + str(dict[dict_key]))
print_string = separator.join(lines)
return print_string
def module_from_path(path):
module_name = os.path.splitext(os.path.split(path)[1])[0]
return SourceFileLoader(module_name, path).load_module()
def get_latest_checkpoint_and_step(logdir, filename):
init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename)
logging.info('Checkpoint path: %s' % init_checkpoint_path)
last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1])
logging.info('Latest step was: %d' % last_step)
return init_checkpoint_path, last_step
def get_session_memory_config():
# prevents ResourceExhaustError when a lot of memory is used
config = tf.ConfigProto()
config.gpu_options.allow_growth = True # Do not assign whole gpu memory, just use it on the go
config.allow_soft_placement = True # If a operation is not defined in the default device, let it execute in another.
return config
def tuple_of_lists_to_list_of_tuples(tuple_in):
return list(zip(*tuple_in))
def list_of_tuples_to_tuple_of_lists(list_in):
# zip(*list_in) is a tuple of tuples
return tuple(list(element) for element in zip(*list_in))
def remove_count(list_of_tuples, remove_counter):
# remove tuples with labels specified by remove_counter from the front of the list in place
# tuples (something, label)
# remove_counter is a Counter or dict of with labels as keys and how many of each label should get removed
# as the corresponding value
# assuming only nonnegative counts
if not all([item[1] >= 0 for item in remove_counter.items()]):
raise ValueError('There are negative counts in remove_counter %s' % str(remove_counter))
remove_counter_copy = remove_counter.copy()
remove_indices = set()
for ind, tup in enumerate(list_of_tuples):
if sum(remove_counter.values()) == 0:
break
else:
if remove_counter_copy[tup[1]] > 0:
remove_counter_copy[tup[1]] -= 1
remove_indices.add(ind)
# make a list with only the tuples that have an index in keep_indices
all_indices = set(range(len(list_of_tuples)))
keep_indices = all_indices - remove_indices
reduced_list = [element for ind, element in enumerate(list_of_tuples) if ind in keep_indices]
return reduced_list
def balance_source_target(source, target, random_seed=None):
# source and target are tuples with (indices, labels corresponding to the indices) where indices and labels are lists
# the returned data has the same structure but the source and target data have the same cardinality and label distribution
# make sure there are an equal number of labels and indices
if len(source[0]) != len(source[1]):
raise ValueError('The number of source indices %d and source labels %d is not equal' % (len(source[0]),len(source[1])))
if len(target[0]) != len(target[1]):
raise ValueError('The number of target indices %d and target labels %d is not equal' % (len(target[0]),len(target[1])))
# count the labels
source_counter = Counter(source[1])
target_counter = Counter(target[1])
# only nonnegative counts remain, so just what needs to be removed
s_to_remove = source_counter - target_counter
t_to_remove = target_counter - source_counter
# change to a representation with a list of tuples [(index1, label1), ...]
source_samples = tuple_of_lists_to_list_of_tuples(source)
target_samples = tuple_of_lists_to_list_of_tuples(target)
# shuffle data
np.random.seed(random_seed)
np.random.shuffle(source_samples)
np.random.shuffle(source_samples)
# remove tuples
source_samples = remove_count(source_samples, s_to_remove)
target_samples = remove_count(target_samples, t_to_remove)
# sort by index
sort_key = lambda t: t[0]
source_samples.sort(key=sort_key)
target_samples.sort(key=sort_key)
# change back to a representation with a tuple of lists of tuples ([index1, index2, ...], [label1, label2, ...])
reduced_source = list_of_tuples_to_tuple_of_lists(source_samples)
reduced_target = list_of_tuples_to_tuple_of_lists(target_samples)
reduced_source_count = Counter(reduced_source[1])
reduced_target_count = Counter(reduced_target[1])
logging.info('source label count after reduction ' + str(reduced_source_count))
logging.info('target label count after reduction ' + str(reduced_target_count))
# check whether the label counts of source and target domain are now equal
assert reduced_source_count == reduced_target_count
return reduced_source, reduced_target
def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1):
# image is 3d numpy array
# path with image name at the end but without the ending .nii.gz
create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz')
# coronal cut through the hippocampy
image_cut = image[:, 38, :]
# rotate the image by 90 degree counterclockwise
image_cut = np.rot90(image_cut)
imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray')
if __name__ == '__main__':
source_indices1 = [0, 2, 3]
source_labels1 = [0, 2, 0]
target_indices1 = [1, 4, 5]
target_labels1 = [2, 2, 0]
source_labels2 = [0, 0, 0]
target_indices2 = [1, 4, 5, 6, 7]
target_labels2 = [2, 2, 0, 0, 2]
source = (source_indices1, source_labels2)
target = (target_indices2, target_labels2)
source_tuples = tuple_of_lists_to_list_of_tuples(source)
target_tuples = tuple_of_lists_to_list_of_tuples(target)
print(source)
print(target)
print(source_tuples)
print(target_tuples)
source2, target2 = balance_source_target(source, target, random_seed=0)
print(source2)
print(target2)
source_tuples2 = tuple_of_lists_to_list_of_tuples(source2)
target_tuples2 = tuple_of_lists_to_list_of_tuples(target2)
print(source_tuples2)
print(target_tuples2)
assert set(source_tuples2) <= set(source_tuples)
assert set(target_tuples2) <= set(target_tuples)
| 2.28125 | 2 |
databases/migrations/2021_12_09_065718_ThirdStorage.py | knguyen111601/test_penguin_project_4_backend | 0 | 12798494 | <filename>databases/migrations/2021_12_09_065718_ThirdStorage.py
"""ThirdStorage Migration."""
from masoniteorm.migrations import Migration
class ThirdStorage(Migration):
def up(self):
"""
Run the migrations.
"""
with self.schema.create("thirdstorages") as table:
table.increments("id")
table.string("thirdstorage_name")
table.string("thirdstorage_brand")
table.string("thirdstorage_type")
table.string("thirdstorage_size")
table.integer("thirdstorage_price")
table.string("thirdstorage_img")
table.timestamps()
def down(self):
"""
Revert the migrations.
"""
self.schema.drop("thirdstorages")
| 2.171875 | 2 |
examples/e164.py | SaidBySolo/dnspython | 0 | 12798495 | #!/usr/bin/env python3
import dns.e164
n = dns.e164.from_e164("+1 555 1212")
print(n)
print(dns.e164.to_e164(n))
| 2.078125 | 2 |
ServerComponent/DataLayer/DataSetEntry.py | CDU55/FakeNews | 0 | 12798496 | <reponame>CDU55/FakeNews
class FacebookDataSetEntry:
def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance,
label):
self.followers_number = followers_number
self.likes_number = likes_number
self.comments_number = comments_number
self.share_number = share_number
self.grammar_index = grammar_index
self.subject_relevance = subject_relevance
self.label = label
class FacebookDataSetEntryUnlabeled:
def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance):
self.followers_number = followers_number
self.likes_number = likes_number
self.comments_number = comments_number
self.share_number = share_number
self.grammar_index = grammar_index
self.subject_relevance = subject_relevance
class TwitterDataSetEntry:
def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index,
subject_relevance, label):
self.followers_number = followers_number
self.verified = verified
self.tweets_number = tweets_number
self.retweets = retweets
self.quote_tweets = quote_tweets
self.likes_number = likes_number
self.grammar_index = grammar_index
self.subject_relevance = subject_relevance
self.label = label
class TwitterDataSetEntryUnlabeled:
def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index,
subject_relevance):
self.followers_number = followers_number
self.verified = verified
self.tweets_number = tweets_number
self.retweets = retweets
self.quote_tweets = quote_tweets
self.likes_number = likes_number
self.grammar_index = grammar_index
self.subject_relevance = subject_relevance
| 2.46875 | 2 |
pyRFTests.py | softwarespartan/pyStk | 0 | 12798497 |
import pyRF;
import pyStk;
import numpy as np;
import math
from scipy import stats;
#print rf.refData.shape
#print len(rf.refStnList)
#
#print rf.npv.shape;
#print rf.nvv.shape;
#print rf.refEpoch.shape;
#
#npv = rf.npvForEpoch(2003.50414524)
#print npv.shape
ts = pyStk.pyTS().initFromMatFile('../data/ts.mat');
rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list));
fyear = ts.epochs[4000];
npv = ts.npvForEpoch(fyear);
npvRF = rf.npvForEpoch(fyear);
print "mean: ", stats.nanmean(npv-npvRF)
print "median: ", stats.nanmedian(npv-npvRF)
print "Aligning epoch ",fyear
T,npvT,stats = pyStk.helmert(npv, npvRF);
print
print 'iter:',stats['iter'];
print
print 'pout:',stats['pout'];
print 'nout:',stats['nout'];
print 'npts:',stats['npts'];
print
print ' RMS:',stats['RMS']/1e-3, '[mm]'
print 'wRMS:',stats['wRMS']/1e-3,'[mm]';
print
print 'max resid:',stats['dvMax']/1e-3,'[mm]'
print 'max resid indx:',stats['dvMaxIndx'][0]
print 'max resid stn:',ts.stn_list[stats['dvMaxIndx']/3]
| 2.15625 | 2 |
pypro/modulos/migrations/0004_populando_slug.py | wosubtil/curso-django | 0 | 12798498 | # Generated by Django 3.1.3 on 2020-11-25 11:09
from django.db import migrations
from django.utils.text import slugify
def popular_slug(apps, schema_editor):
Modulo = apps.get_model('modulos', 'Modulo')
for modulo in Modulo.objects.all():
modulo.slug = slugify(modulo.titulo)
modulo.save()
class Migration(migrations.Migration):
dependencies = [
('modulos', '0003_modulo_slug'),
]
operations = [
migrations.RunPython(popular_slug)
]
| 2.03125 | 2 |
todos/urls.py | sunilsm7/django-htmx | 0 | 12798499 | <gh_stars>0
from django.urls import path
from .views import index, search, todo_list_view
urlpatterns = [
path('', index, name='index'),
path('list', todo_list_view, name='list'),
path('search/', search, name='search')
]
| 1.710938 | 2 |
bin/submit_samples.py | vmware-samples/tau-clients | 2 | 12798500 | #!/usr/bin/env python
# Copyright 2021 VMware, Inc.
# SPDX-License-Identifier: BSD-2
import argparse
import configparser
import io
import sys
import tau_clients
import vt
from tau_clients import decoders
from tau_clients import exceptions
from tau_clients import nsx_defender
def download_from_vt(client: vt.Client, file_hash: str) -> bytes:
"""
Download file from VT.
:param vt.Client client: the VT client
:param str file_hash: the file hash
:rtype: bytes
:return: the downloaded data
:raises ValueError: in case of any error
"""
try:
buffer = io.BytesIO()
client.download_file(file_hash, buffer)
buffer.seek(0, 0)
return buffer.read()
except (IOError, vt.APIError) as e:
raise ValueError(str(e)) from e
def main():
"""Submit all samples or hashes by downloading from VT first."""
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--config-file",
dest="config_file",
default="./data/tau_clients.ini",
type=tau_clients.is_valid_config_file,
help="read config from here",
)
parser.add_argument(
"-b",
"--bypass-cache",
dest="bypass_cache",
action="store_true",
default=False,
help="whether to bypass the cache",
)
decoders.InputTypeDecoder.add_arguments_to_parser(
parser=parser,
choices=[
decoders.InputType.DIRECTORY,
decoders.InputType.FILE_HASH,
decoders.InputType.FILE,
],
)
args = parser.parse_args()
conf = configparser.ConfigParser()
conf.read(args.config_file)
# Load the analysis client
analysis_client = nsx_defender.AnalysisClient.from_conf(conf, "analysis")
# Decode input type
file_inputs, input_type = decoders.InputTypeDecoder().decode(
arguments=args.input_bits,
input_type=decoders.InputType(args.input_type),
inspect_content=False,
)
# Parse the input
vt_client = None
file_paths = []
file_hashes = []
if input_type is decoders.InputType.FILE_HASH:
try:
vt_client = vt.Client(apikey=conf.get("vt", "apikey"))
except configparser.Error:
print("VT credentials not found. Hash submissions are disabled")
return 1
file_hashes.extend(file_inputs)
elif input_type is decoders.InputType.FILE:
for file_input in file_inputs:
file_paths.extend(tau_clients.get_file_paths(file_input))
else:
raise ValueError("Unknown input type")
print(f"Decoded input into {len(file_hashes)} file hashes and {len(file_paths)} samples")
# Submit
submission_start_ts = analysis_client.get_api_utc_timestamp()
submissions = []
task_to_source = {}
for file_path in file_paths:
with open(file_path, "rb") as f:
try:
ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache)
submissions.append(ret)
task_to_source[ret["task_uuid"]] = file_path
except exceptions.ApiError as ae:
print(f"Error '{str(ae)}' when submitting file {file_path}")
for file_hash in file_hashes:
try:
file_data = download_from_vt(vt_client, file_hash)
ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache)
submissions.append(ret)
task_to_source[ret["task_uuid"]] = file_hash
except ValueError as ve:
print(f"Error '{str(ve)}' when downloading file {file_hash}")
except exceptions.ApiError as ae:
print(f"Error '{str(ae)}' when submitting file {file_hash}")
if vt_client:
vt_client.close()
print(f"All files have been submitted ({len(submissions)} submissions)")
# Wait for completion
try:
for submission in analysis_client.yield_completed_tasks(
submissions=submissions,
start_timestamp=submission_start_ts,
):
task_uuid = submission.get("task_uuid")
if not task_uuid:
print(f"File '{task_to_source[task_uuid]}' was not submitted correctly")
else:
task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True)
print(f"File '{task_to_source[task_uuid]}' finished analysis: {task_link}")
except KeyboardInterrupt:
print("Waiting for results interrupted by user")
print("Done")
return 0
if __name__ == "__main__":
sys.exit(main())
| 2.46875 | 2 |
robinhood.py | CThax12/Stonk-Tracker | 0 | 12798501 | import RobinhoodFunctions as rf
email, password = rf.getCredentials()
rf.loginToRH(email, password)
allPositions = []
allPositions = rf.getAllOptions(allPositions)
frequentTickers = rf.getFrequentTickers(allPositions)
rf.r.options.write_spinner()
rf.r.options.spinning_cursor()
optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions)
writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts)
rf.closeAndSave(writer)
print("Options successfully exported to:", excelPath)
| 2.203125 | 2 |
src/django_grainy_test/models.py | djeromov/django-grainy | 2 | 12798502 | from django.db import models
from django_grainy.decorators import grainy_model
from django_grainy.models import Permission, PermissionManager
from django_grainy.handlers import GrainyMixin
# Create your models here.
"""
These are the models used during the django_grainy
unit tests. There is no need to ever install the "django_grainy_test"
app in your project
"""
class ModelBase(GrainyMixin, models.Model):
class Meta:
abstract = True
@grainy_model()
class ModelA(ModelBase):
name = models.CharField(max_length=255)
@grainy_model(namespace="something.arbitrary")
class ModelB(ModelA):
pass
@grainy_model(
namespace=ModelB.Grainy.namespace(),
namespace_instance="{namespace}.{instance.b.id}.c.{instance.id}",
)
class ModelC(ModelA):
b = models.ForeignKey(ModelB, related_name="c", on_delete=models.CASCADE)
@grainy_model(
namespace="dynamic.{value}", namespace_instance="{namespace}.{other_value}"
)
class ModelD(ModelA):
pass
@grainy_model(namespace="x")
class ModelX(ModelA):
pass
@grainy_model(namespace="custom", parent="x")
class ModelY(ModelA):
x = models.ForeignKey(ModelX, related_name="y", on_delete=models.CASCADE)
@grainy_model(namespace="z", parent="y")
class ModelZ(ModelA):
y = models.ForeignKey(ModelY, related_name="z", on_delete=models.CASCADE)
class APIKey(models.Model):
key = models.CharField(max_length=255)
class APIKeyPermission(Permission):
api_key = models.ForeignKey(
APIKey, related_name="grainy_permissions", on_delete=models.CASCADE
)
objects = PermissionManager()
| 2.28125 | 2 |
shippingBot.py | raagn08/Shipping-Info-Telegram-Bot | 2 | 12798503 | from telegram.ext import Updater, CallbackContext, CommandHandler, MessageHandler, Filters, Handler
from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher
from telegram import Update, User, Message, ParseMode
from telegram.error import BadRequest
import requests_html
import requests
import json
import logging
#Enter API-KEY here
updater = Updater("API-KEY", use_context=True)
dispatcher = updater.dispatcher
logging.basicConfig(filename="shipping.log", format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
#Tracking Function for E-Kart Logistics
@run_async
def ekart(update: Update, context: CallbackContext):
if update.message!=None:
trackingID = (update.message.text).split()[1]
data = []
session = requests_html.HTMLSession()
response = session.get("https://ekartlogistics.com/track/"+str(trackingID)+"/")
for selector in response.html.xpath('//div[@id="no-more-tables"][1]/table/tbody'):
data.append(selector.text)
context.bot.send_message(chat_id=update.effective_chat.id, text="*Shipping Status: *\n\n`Latest Status: "+data[0]+"`\n\n*Tracking Info:*\n\n`"+data[1]+"`", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN)
#Tracking Function for Pitney Bowes
@run_async
def pitneyb(update: Update, context: CallbackContext):
if update.message!=None:
trackingID = (update.message.text).split()[1]
response = requests.get("https://parceltracking.pb.com/ptsapi/track-packages/"+trackingID)
jsonData = json.loads(response.text)
try:
currentStatusData = [
'Status: '+jsonData['currentStatus']['packageStatus'],
'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'],
'Description: '+jsonData['currentStatus']['eventDescription'],
'Location: '+jsonData['currentStatus']['eventLocation']['city']+", "+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode']
]
except KeyError:
currentStatusData = [
'Status: '+jsonData['currentStatus']['packageStatus'],
'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'],
'Description: '+jsonData['currentStatus']['eventDescription'],
'Location: '+jsonData['currentStatus']['eventLocation']['city']+", "+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '
]
currentStatusData = "\n".join(currentStatusData)
history = []
for x in jsonData['scanHistory']['scanDetails']:
try:
history.append([
'Status: '+x['packageStatus'],
'Last Updated: '+x['eventDate']+' '+x['eventTime'],
'Description: '+x['eventDescription'],
'Location: '+x['eventLocation']['city']+", "+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode']
])
except KeyError:
history.append([
'Status: '+x['packageStatus'],
'Last Updated: '+x['eventDate']+' '+x['eventTime'],
'Description: '+x['eventDescription'],
])
historyData = []
for i in range(len(history)):
historyData.append("\n".join(history[i]))
historyData = "\n\n".join(historyData)
context.bot.send_message(chat_id=update.effective_chat.id, text="*Shipping Status: *\n\n`Latest Status:\n"+currentStatusData+"`\n\n*Tracking Info:*\n\n`"+historyData+"`", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN)
#Tracking Function for Canada Post
@run_async
def canadapost(update: Update, context: CallbackContext):
if update.message!=None:
trackingID = (update.message.text).split()[1]
response = requests.get("https://www.canadapost.ca/trackweb/rs/track/json/package/"+trackingID+"/detail")
jsonData = json.loads(response.text)
status = jsonData['status']
history = []
for x in jsonData['events']:
history.append([
'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'],
'Location: '+ x['locationAddr']['city'] + ", " + x['locationAddr']['regionCd'] + " (" + x['locationAddr']['countryCd'] + ")",
'Description: '+ x['descEn']
])
currentStatusData = history[0]
currentStatusData = "\n".join(currentStatusData)
del history[0]
historyData = []
for i in range(len(history)):
historyData.append("\n".join(history[i]))
historyData = "\n\n".join(historyData)
context.bot.send_message(chat_id=update.effective_chat.id, text="*Shipping Status: *\n\n`Latest Status:\n"+currentStatusData+"`\n\n*Tracking Info:*\n\n`"+historyData+"`", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN)
#Bot Start Message /start
@run_async
def start(update: Update, context: CallbackContext):
context.bot.sendChatAction(update.effective_chat.id, "typing")
cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text="Hey there! I'm Shipping Info Bot!\nI can provide you latest tracking info on your package.\n\nUse the following commands to access your package tracking info.")
def main():
start_handler = CommandHandler("start", start)
dispatcher.add_handler(start_handler)
#Command handler for E-Kart Logistics
ekart_handler = CommandHandler("ekart", ekart)
dispatcher.add_handler(ekart_handler)
#Command handler for Pitney Bowes
pitneyb_handler = CommandHandler("pitneyb", pitneyb)
dispatcher.add_handler(pitneyb_handler)
#Command handler for Canada Post
canadapost_handler = CommandHandler("canadapost", canadapost)
dispatcher.add_handler(canadapost_handler)
updater.start_polling()
updater.idle()
if __name__ == "__main__":
print(" _____ _ _ _ _____ __ ______ _ \n")
print("/ ___| | (_) (_) |_ _| / _| | ___ \ | | \n")
print("\ `--.| |__ _ _ __ _ __ _ _ __ __ _ | | _ __ | |_ ___ | |_/ / ___ | |_ \n")
print(" `--. \ '_ \| | '_ \| '_ \| | '_ \ / _` || || '_ \| _/ _ \| ___ \/ _ \| __|\n")
print("/\__/ / | | | | |_) | |_) | | | | | (_| || || | | | || (_) | |_/ / (_) | |_ \n")
print("\____/|_| |_|_| .__/| .__/|_|_| |_|\__, \___/_| |_|_| \___/\____/ \___/ \__|\n")
print(" | | | | __/ | \n")
print(" |_| |_| |___/ ")
main()
| 2.234375 | 2 |
103. invert_binary_tree.py | chandravenky/puzzles | 0 | 12798504 | <gh_stars>0
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution(object):
def flip(self, node):
if not node:
return None
hold_node = node.left
node.left = node.right
node.right = hold_node
self.flip(node.left)
self.flip(node.right)
def invertTree(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
self.flip(root)
return root
| 3.90625 | 4 |
statistics_exercises.py | rzamoramx/data_science_exercises | 0 | 12798505 | """ Some exercises about statistics """
from matplotlib import pyplot as plt
from statistics.central_tendencies import *
from statistics.variance import variance, standard_deviation
from statistics.correlation import covariance, correlation
def main():
num_friends = [500, 50, 25, 30, 5, 6, 7, 8, 9, 10,
1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
19, 28, 37, 46, 55, 64, 73, 82, 91, 10,
19, 28, 37, 33, 55, 64, 73, 82, 91, 10]
daily_minutes = [1, 6, 10, 20, 4, 9, 12, 8, 9, 20,
5, 6, 10, 20, 4, 9, 12, 8, 9, 20,
1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
central_tendencies(num_friends)
dispersion(num_friends)
correlations(num_friends, daily_minutes)
correlation_outliers(num_friends, daily_minutes)
plot_graphs()
def correlation_outliers(num_friends: List[float], daily_minutes: List[float]):
outlier = num_friends.index(500)
num_friends_good = [x for i, x in enumerate(num_friends) if i != outlier]
daily_minutes_good = [x for i, x in enumerate(daily_minutes) if i != outlier]
# plotting
plt.figure()
plt.scatter(num_friends, daily_minutes)
plt.title("Correlation without outlier")
plt.xlabel("# of friends")
plt.ylabel("minutes")
plt.figure()
plt.scatter(num_friends_good, daily_minutes_good)
plt.title("Correlation with outlier")
plt.xlabel("# of friends")
plt.ylabel("minutes")
def correlations(num_frieds: List[float], daily_minutes: List[float]):
cov = covariance(num_frieds, daily_minutes)
print(f'covariance: {cov}')
corr = correlation(num_frieds, daily_minutes)
print(f'correlation: {corr}')
def dispersion(num_friends: List[float]):
print(data_range(num_friends))
varian = variance(num_friends)
print(f'variance: {varian}')
standard_devi = standard_deviation(num_friends)
print(f'standard deviation: {standard_devi}')
def central_tendencies(num_friends: List[float]):
assert median([1, 10, 2, 9, 5]) == 5
vector_a = [1, 9, 2, 10]
assert median(vector_a) == (2 + 9) / 2
print(median(vector_a))
print(4//2) # 2
print(9//2) # 4
result_q1 = quantile(num_friends, 0.10)
print(f'quatile 10%: {result_q1}')
result_q2 = quantile(num_friends, 0.25)
print(f'quatile 25%: {result_q2}')
result_q3 = quantile(num_friends, 0.50)
print(f'quatile 50%: {result_q3}')
result_q4 = quantile(num_friends, 0.75)
print(f'quatile 75%: {result_q4}')
result_q5 = quantile(num_friends, 0.90)
print(f'quatile 90%: {result_q5}')
moda = set(mode(num_friends))
print(f'moda: {moda}')
def plot_graphs():
plt.show()
if __name__ == "__main__":
main()
| 3.828125 | 4 |
wunderkafka/serdes/store.py | severstal-digital/wunderkafka | 0 | 12798506 | from typing import Type, Union, Optional
from pathlib import Path
from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription
from wunderkafka.serdes.abc import AbstractDescriptionStore
from wunderkafka.compat.types import AvroModel
from wunderkafka.compat.constants import PY36
from wunderkafka.serdes.avromodel import derive
class SchemaTextRepo(AbstractDescriptionStore):
def add(self, topic: TopicName, value: str, key: str) -> None:
self._values[topic] = ValueSchemaDescription(text=value)
if key is not None:
self._keys[topic] = KeySchemaDescription(text=key)
def _load_from_file(filename: Path) -> str:
with open(filename) as fl:
return fl.read()
# ToDo (tribunsky.kir): refactor it, maybe add hooks to parent class.
# Barbara, forgive us. Looks like AbstractDescriptionStore should be generic.
class SchemaFSRepo(AbstractDescriptionStore):
def add(self, topic: TopicName, value: Union[str, Path], key: Union[str, Path]) -> None:
self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value)))
if key is not None:
self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key)))
class AvroModelRepo(AbstractDescriptionStore):
def __init__(self) -> None:
super().__init__()
if PY36:
AvroModel()
# ToDo (tribunsky.kir): change Type[AvroModel] to more general alias + check derivation from python built-ins
def add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None:
self._values[topic] = ValueSchemaDescription(text=derive(value, topic))
if key is not None:
self._keys[topic] = KeySchemaDescription(text=derive(key, topic, is_key=True))
| 2.28125 | 2 |
bin/tests/test_design.py | broadinstitute/adapt | 12 | 12798507 | <filename>bin/tests/test_design.py
"""Tests for design.py
"""
import random
import os
import copy
import tempfile
import unittest
import logging
from collections import OrderedDict
from argparse import Namespace
from adapt import alignment
from adapt.prepare import align, ncbi_neighbors, prepare_alignment
from adapt.utils import seq_io
from bin import design
__author__ = '<NAME> <<EMAIL>>'
# Default args: window size 3, guide size 2, allow GU pairing
# GU pairing allows AA to match GG in 1st window
SEQS = OrderedDict()
SEQS["genome_1"] = "AACTA"
SEQS["genome_2"] = "AAACT"
SEQS["genome_3"] = "GGCTA"
SEQS["genome_4"] = "GGCTT"
# Specificity seq stops AA from being the best guide in the 1st window
SP_SEQS = OrderedDict()
SP_SEQS["genome_5"] = "AA---"
class TestDesign(object):
"""General class for testing design.py
Defines helper functions for test cases and basic setUp and
tearDown functions.
"""
class TestDesignCase(unittest.TestCase):
def setUp(self):
# Disable logging
logging.disable(logging.INFO)
# Create a temporary input file
self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False)
# Closes the file so that it can be reopened on Windows
self.input_file.close()
# Create a temporary output file
self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False)
self.output_file.close()
self.files_to_delete = [self.input_file.name, self.output_file.name]
def check_results(self, file, expected, header='target-sequences'):
"""Check the results of the test output
Given a TSV file of test output and expected output, fails the test
if the test output guide target sequences do not equal the expected
guide target sequences
Args:
file: string, path name of the file
expected: list of lists of strings, all the expected guide
target sequences in each line of the output
header: the header of the CSV that contains the guide target
sequences
"""
col_loc = None
with open(file) as f:
for i, line in enumerate(f):
if i == 0:
headers = line.split('\t')
# Will raise an error if header is not in output
col_loc = headers.index(header)
continue
self.assertLess(i, len(expected) + 1)
guide_line = line.split('\t')[col_loc]
guides = guide_line.split(' ')
for guide in guides:
self.assertIn(guide, expected[i-1])
self.assertEqual(len(guides), len(expected[i-1]))
self.assertEqual(i, len(expected))
def baseArgv(self, search_type='sliding-window', input_type='fasta',
objective='minimize-guides', model=False, specific=None,
specificity_file=None, output_loc=None):
"""Get arguments for tests
Produces the correct arguments for a test case given details of
what the test case is testing. See design.py help for details
on input
Args:
search_type: 'sliding-window' or 'complete-targets'
input_type: 'fasta', 'auto-from-args', or 'auto-from-file'
objective: 'minimize-guides' or 'maximize-activity'
model: boolean, true to use Cas13a built in model, false
to use simple binary prediction
specific: None, 'fasta', or 'taxa'; what sort of input
to be specific against
output_loc: path to the output file/directory; set to
self.output_file.name if None
Returns:
List of strings that are the arguments of the test
"""
input_file = self.input_file.name
if output_loc is None:
output_loc = self.output_file.name
argv = ['design.py', search_type, input_type]
if input_type == 'fasta':
argv.extend([input_file, '-o', output_loc])
elif input_type == 'auto-from-args':
argv.extend(['64320', 'None', output_loc])
elif input_type == 'auto-from-file':
argv.extend([input_file, output_loc])
if input_type in ['auto-from-args', 'auto-from-file']:
argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path'])
if search_type == 'sliding-window':
argv.extend(['-w', '3'])
if search_type == 'complete-targets':
argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1',
'--max-primers-at-site', '2'])
if objective == 'minimize-guides':
argv.extend(['-gm', '0', '-gp', '.75'])
elif objective =='maximize-activity':
argv.extend(['--maximization-algorithm', 'greedy'])
# ID-M (mismatches to be considered identical) must be set to 0 since otherwise
# having 1 base in common with a 2 base guide counts as a match
if specific == 'fasta':
argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0'])
elif specific == 'taxa':
argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0'])
if model:
argv.append('--predict-cas13a-activity-model')
elif objective =='maximize-activity':
argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0'])
argv.extend(['--obj', objective, '--seed', '0', '-gl', '2'])
return argv
def tearDown(self):
for file in self.files_to_delete:
if os.path.isfile(file):
os.unlink(file)
# Re-enable logging
logging.disable(logging.NOTSET)
class TestDesignFasta(TestDesign.TestDesignCase):
"""Test design.py given an input FASTA
"""
def setUp(self):
super().setUp()
self.real_output_file = self.output_file.name + '.tsv'
self.files_to_delete.append(self.real_output_file)
# Write to temporary input fasta
seq_io.write_fasta(SEQS, self.input_file.name)
def test_min_guides(self):
argv = super().baseArgv()
args = design.argv_to_args(argv)
design.run(args)
# Base args set the percentage of sequences to match at 75%
expected = [["AA"], ["CT"], ["CT"]]
self.check_results(self.real_output_file, expected)
def test_max_activity(self):
argv = super().baseArgv(objective='maximize-activity')
args = design.argv_to_args(argv)
design.run(args)
# Doesn't use model, just greedy binary prediction with 0 mismatches
# (so same outputs as min-guides)
expected = [["AA"], ["CT"], ["CT"]]
self.check_results(self.real_output_file, expected)
def test_complete_targets(self):
argv = super().baseArgv(search_type='complete-targets')
args = design.argv_to_args(argv)
design.run(args)
# Since sequences are short and need 1 base for primer on each side,
# only finds 1 target in middle
expected = [["CT"]]
self.check_results(self.real_output_file, expected,
header='guide-target-sequences')
def test_specificity_fastas(self):
# Create a temporary fasta file for specificity
self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False)
# Closes the file so that it can be reopened on Windows
self.sp_fasta.close()
seq_io.write_fasta(SP_SEQS, self.sp_fasta.name)
self.files_to_delete.append(self.sp_fasta.name)
argv = super().baseArgv(specific='fasta',
specificity_file=self.sp_fasta.name)
args = design.argv_to_args(argv)
design.run(args)
# AA isn't allowed in 1st window by specificity fasta,
# so 1st window changes
expected = [["AC", "GG"], ["CT"], ["CT"]]
self.check_results(self.real_output_file, expected)
class TestDesignAutos(TestDesign.TestDesignCase):
"""Test design.py given arguments to automatically download FASTAs
Does not run the entire design.py; prematurely stops by giving a fake path
to MAFFT. All are expected to return a FileNotFoundError
"""
def setUp(self):
super().setUp()
# Write to temporary input file
with open(self.input_file.name, 'w') as f:
f.write("Zika virus\t64320\tNone\tNC_035889\n")
# Create a temporary output directory
self.output_dir = tempfile.TemporaryDirectory()
def test_auto_from_file(self):
argv = super().baseArgv(input_type='auto-from-file',
output_loc=self.output_dir.name)
args = design.argv_to_args(argv)
try:
design.run(args)
except FileNotFoundError:
pass
def test_auto_from_args(self):
argv = super().baseArgv(input_type='auto-from-args')
args = design.argv_to_args(argv)
try:
design.run(args)
except FileNotFoundError:
pass
def test_specificity_taxa(self):
argv = super().baseArgv(input_type='auto-from-args',
specific='taxa', specificity_file='')
args = design.argv_to_args(argv)
try:
design.run(args)
except FileNotFoundError:
pass
def tearDown(self):
super().tearDown()
self.output_dir.cleanup()
class TestDesignFull(TestDesign.TestDesignCase):
"""Test design.py fully through
"""
def setUp(self):
super().setUp()
# Write to temporary input file
with open(self.input_file.name, 'w') as f:
f.write("Zika virus\t64320\tNone\tNC_035889\n")
# Create a temporary specificity file
self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False)
self.sp_file.write("123\tNone\n")
# Closes the file so that it can be reopened on Windows
self.sp_file.close()
# 'auto-from-args' gives different outputs for every cluster
# Our test only produces 1 cluster, so store the name of that file
self.real_output_file = self.output_file.name + '.0.tsv'
self.files_to_delete.extend([self.sp_file.name, self.real_output_file])
# We cannot access MAFFT, so override this function; store original so
# it can be fixed for future tests
self.set_mafft_exec = align.set_mafft_exec
align.set_mafft_exec = lambda mafft_path: None
# Curating requires MAFFT, so override this function; store original so
# it can be fixed for future tests
self.curate_against_ref = align.curate_against_ref
def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]):
return {seq: seqs[seq] for seq in seqs \
if seq.split('.')[0] not in remove_ref_accs}
align.curate_against_ref = small_curate
# Aligning requires MAFFT, so override this function and output simple
# test sequences; store original so it can be fixed for future tests
self.align = align.align
align.align = lambda seqs, am=None: SEQS
# We don't want to fetch sequences for the specificity file since we're
# doing a simple test case, so override this function; store original
# so it can be fixed for future tests
self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy
def small_fetch(taxid, segment):
# 123 is the taxonomic ID used in our specificity file
if taxid == 123:
return SP_SEQS
# If it's not the specificity taxonomic ID, test fetching the real
# sequences, but don't return them as they won't be used
else:
self.fetch_sequences_for_taxonomy(taxid, segment)
return SEQS
prepare_alignment.fetch_sequences_for_taxonomy = small_fetch
# Disable warning logging to avoid annotation warning
logging.disable(logging.WARNING)
def test_specificity_taxa(self):
argv = super().baseArgv(input_type='auto-from-args', specific='taxa',
specificity_file=self.sp_file.name)
args = design.argv_to_args(argv)
design.run(args)
# Same output as test_specificity_fasta, as sequences are the same
expected = [["AC", "GG"], ["CT"], ["CT"]]
self.check_results(self.real_output_file, expected)
def tearDown(self):
# Fix all overridden functions
align.set_mafft_exec = self.set_mafft_exec
align.curate_against_ref = self.curate_against_ref
align.align = self.align
prepare_alignment.fetch_sequences_for_taxonomy = self.fetch_sequences_for_taxonomy
super().tearDown()
| 2.578125 | 3 |
andela/car_park_roof.py | phacic/dsa-py | 0 | 12798508 | <filename>andela/car_park_roof.py<gh_stars>0
def carParkingRoof(cars: list, k):
cars.sort()
to_cover = cars[:k]
mx = max(to_cover)
mi = min(to_cover)
return mx - (mi - 1)
def process_file(filename: str) -> tuple:
fptr = open(filename, "w")
print("file opened")
cars_count = int(input().strip())
cars = []
for _ in range(cars_count):
cars_item = int(input().strip())
cars.append(cars_item)
k = int(input().strip())
print("running code...")
result = carParkingRoof(cars, k)
fptr.write(str(result) + "\n")
fptr.close()
if __name__ == "__main__":
# print(carParkingRoof([1], 3))
# print(carParkingRoof([6, 2, 12, 7], 3))
# print(carParkingRoof([2, 10, 8, 17], 3))
# print(carParkingRoof([1, 2, 3, 10], 4))
process_file("./andela/car_park_test_case.txt")
| 3.515625 | 4 |
parser.py | hrb23m/pydifier | 0 | 12798509 | <gh_stars>0
import argparse
parser = argparse.ArgumentParser(
prog = "pydifier",
add_help = True)
### File option ###
# Input File
parser.add_argument('pdf_file_path',
action = 'store',
type = str,
metavar = "PDF_PATH",
help = 'Pdf file path to be fixed.'
)
# Output File
parser.add_argument('-o', '--output',
action = 'store',
type = str,
help = 'Output pdf file path .'
)
# Output File suffix before extension
parser.add_argument('-s', '--suffix',
action = 'store',
default = 'fix',
type = str,
help = 'Output pdf file path suffix before .pdf extention.'
)
### Split option ###
split_group = parser.add_mutually_exclusive_group(required=False)
# Vertical split
split_group.add_argument('-sv', '--vertical',
action = 'store_true',
default = False,
help = 'Split pdf page vertically.')
# horizontal split
split_group.add_argument('-sh', '--horizontal',
action = 'store_true',
default = False,
help = 'Split pdf page horizontally.')
### Rotate option ###
# rotate
parser.add_argument('-r', '--rotate',
action = 'store',
type = int,
choices = [90, 180, 270],
help = 'Rotate pdf page clockwise specified degree.'
)
### Front page location ###
front_page = parser.add_mutually_exclusive_group(required=False)
# Front page is located
front_page.add_argument('-ff', '--front-first',
action = 'store_true',
default = False,
help = 'First page is located on first page when using saddle stitch option.'
)
front_page.add_argument('-fs', '--front-second',
action = 'store_true',
default = True,
help = 'First page is located on second page when using saddle stitch option.'
)
### Binding option ###
parser.add_argument('-ss', '--saddle-stitch',
action = 'store_true',
default = False,
help = 'Scanned PDF that is saddle stich binding.'
)
### Order option ###
# Reverse output 3
parser.add_argument('-rv', '--reverse',
action = 'store_true',
default = False,
help = 'Output pdf pages reversely.')
### Others ###
# Verbose mode
parser.add_argument('--verbose',
action = 'store_true',
default = False,
help = 'Run as verbose mode.'
)
# Version
parser.add_argument('-v', '--version',
action = 'version',
version = '%(prog)s 0.1',
help = 'Show version.'
)
| 2.75 | 3 |
androyara/core/dex_parser.py | BiteFoo/androyara | 2 | 12798510 | <reponame>BiteFoo/androyara<filename>androyara/core/dex_parser.py
# coding:utf8
'''
@File : dex_parser.py
@Author : Loopher
@Version : 1.0
@License : (C)Copyright 2020-2021,Loopher
@Desc : Dex文件解析
'''
"""
每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较
"""
from androyara.dex.dex_vm import DexFileVM
class DexParser(object):
parser_info = {
"name": "DexParser",
"desc": "Parsing Dex file into bytecode"
}
def __init__(self, pkg, buff):
self.vm = DexFileVM(pkg,buff)
self.vm.build_map()
| 1.539063 | 2 |
neurobeer/tractography/__init__.py | kaitj/Tractography | 2 | 12798511 | """
The package provides a number of modules to be used in the clustering,
extraction, and evaluation of white matter tractography.
"""
| 0.804688 | 1 |
Fullbit.py | Mizogg/Fillbit-Bitcoin-Address | 4 | 12798512 | <reponame>Mizogg/Fillbit-Bitcoin-Address
#Fullbit.py =====Made by <EMAIL> Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB =====
from bitcoinaddress import Wallet
import random
filename ='puzzle.txt'
with open(filename) as f:
line_count = 0
for line in f:
line != "\n"
line_count += 1
with open(filename) as file:
add = file.read().split()
add = set(add)
print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count))
x=int(input("'Start range in BITs 0 or Higher(Puzzle StartNumber) -> "))
a = 2**x
y=int(input("Stop range Max in BITs 256 Max (Puzzle StopNumber) -> "))
b = 2**y
print("Starting search... Please Wait min range: " + str(a))
print("Max range: " + str(b))
print("==========================================================")
print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count))
count=0
total=0
while True:
count+=1
total+=5
ran=random.randrange(a,b)
HEX = "%064x" % ran
wallet = Wallet(HEX)
uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address
caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address
saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address
bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH']
bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH']
wif = wallet.key.__dict__['mainnet'].__dict__['wif']
wifc = wallet.key.__dict__['mainnet'].__dict__['wifc']
#print('\nPrivatekey (dec): ', ran,'\nPrivatekey (hex): ', HEX, '\nPrivatekey Uncompressed: ', wif, '\nPrivatekey compressed: ', wifc, '\nPublic Address 1 Uncompressed: ', uaddr, '\nPublic Address 1 Compressed: ', caddr, '\nPublic Address 3 Segwit: ', saddr, '\nPublic Address bc1 P2WPKH: ', bcaddr, '\nPublic Address bc1 P2WSH: ', bc1addr)
print('Scan : ', count , ' : Total : ', total, ' : HEX : ', HEX, end='\r')
if caddr in add or uaddr in add or saddr in add or bcaddr in add or bc1addr in add:
print('\nMatch Found')
f=open("winner.txt","a")
f.write('\nPrivatekey (dec): ' + str(ran))
f.write('\nPrivatekey (hex): ' + HEX)
f.write('\nPrivatekey Uncompressed: ' + wif)
f.write('\nPrivatekey compressed: ' + wifc)
f.write('\nPublic Address 1 Compressed: ' + caddr)
f.write('\nPublic Address 1 Uncompressed: ' + uaddr)
f.write('\nPublic Address 3 Segwit: ' + saddr)
f.write('\nPublic Address bc1 P2WPKH: ' + bcaddr)
f.write('\nPublic Address bc1 P2WSH: ' + bc1addr)
f.write('\n =====Made by mizogg.co.uk Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB =====' )
f.close() | 2.765625 | 3 |
src/pyscripts/mindTrain.py | widmerin/OpenBCI_NodeJS_IP6 | 0 | 12798513 | ##
# train eeg data of mind commands
# (beta)
#
##
import json
import os
import sys
import time
import pickle
import numpy as np
from mindFunctions import filterDownsampleData
import codecs, json
from scipy.signal import butter, lfilter
from sklearn import svm, preprocessing, metrics
from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit
from pathlib import Path
# enable/disable debug Mode
debug = False
# the 5 commands from player
commands = ['volup', 'playpause', 'next', 'prev', 'voldown']
cmdCount = len(commands) # nr of commands
def main():
# read training data from files
# default path with stored traingsdata
# filepath-example = 'your project path'/data/mind/training-playpause.json'
cwd = os.getcwd()
traindataFolder = cwd + '/data/mind/'
# default path if python script runs standalone
if (os.path.basename(cwd) == "pyscripts"):
traindataFolder = cwd + '/../../data/mind/'
traindata = []
for cmd in range(cmdCount):
filepath = Path(traindataFolder + 'training-' + commands[cmd] + '.json')
# read file of trainingCmd
with open(filepath) as f:
data = json.load(f)
traindata.append(np.array(data, dtype='f'))
# read in baseline from file
baseline = []
blpath = Path(traindataFolder + 'training-baseline.json')
# read file of baseline
with open(blpath) as blf:
bl = json.load(blf)
baseline = np.array(bl, dtype='f')
## read in test data
with open(traindataFolder + 'test-baseline.json') as f:
baselineTest = json.load(f)
with open(traindataFolder + 'test-volts.json') as f:
voltsTest = json.load(f)
# create a numpy array
voltsTest = np.array(voltsTest, dtype='f')
baselineTest = np.array(baselineTest, dtype='f')
if debug:
print("\n------ Training Data ------")
print("traindata length should be 5 (cmds): " + str(len(traindata)))
print("traindata[0] length should be 1500 (samples): " + str(len(traindata[0])))
print("traindata[0][0] length should be 8 (channels): " + str(len(traindata[0][0])))
# 1. Filter and Downsample Trainingdata and Baseline
[filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug)
if debug:
print("\n------ Filtered Training Data ------")
print("filterdTraindata length should be 5 (cmds): " + str(len(filterdTraindata)))
print("filterdTraindata[0] length is now 8 (channels): " + str(len(filterdTraindata[0])))
print("filterdTraindata[0][0] length is now 250 (samples): " + str(len(filterdTraindata[0][0])))
# # save filterd Data
# filterdTraindata = np.array(filterdTraindata)
# baselineDataBP = np.array(baselineDataBP)
# outfile = '../../data/mind/model/filterdTraingdata.txt'
# json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True,
# indent=4) ### this saves the array in .json format
# outfile = '../../data/mind/model/baselineDataBP.txt'
# json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True,
# indent=4) ### this saves the array in .json format
## 2. Extract Features for Trainingdata (only commands)
[X, y] = extractFeature(filterdTraindata)
if debug:
print("Anz. Features: " + str(len(X)))
print("y: " + str(y))
## 3. Train Model with features
# gamma: defines how far the influence of a single training example reaches, with low values meaning ‘far’ and high values meaning ‘close’.
# C: trades off misclassification of training examples against simplicity of the decision surface.
# A low C makes the decision surface smooth, while a high C aims at classifying all training examples correctly by giving the model freedom to select more samples as support vectors.
# Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html
# TODO: Set correct SVM params
[C, gamma] = findTrainClassifier(X, y)
clf = svm.SVC(kernel='rbf', gamma=gamma, C=C)
clf.fit(X, y)
## save model
with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile:
pickle.dump(clf, outfile)
## Check if trainingdata get 100% accuracy
if debug:
[accuracy, _, _] = modelAccuracy(y, clf.predict(X))
if (accuracy == 1.0):
print("Correct classification with traingdata")
else:
print("Wrong classification with traingdata. check SVM algorithm")
print("\n------ Test Data ------")
## 4. Filter and Downsample Testdata
[filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug)
## 5. Extract Features from Testdata
targetCmd = 1 # Playpause===1
[X_test, y_test] = extractFeature(filterdTestdata, targetCmd)
print("Anz. Features X_Test: " + str(len(X_test)))
print("y_Test: " + str(y_test))
## 6. Check Model Accuracy
print("\n------ Model Accuracy ------")
y_pred = clf.predict(X_test) # Predict the response for test dataset
if debug: print("predicted y " + str(y_pred))
[accuracy, precision, recall] = modelAccuracy(y_test, y_pred)
print("Accuracy: " + str(accuracy))
print("Precision: " + str(precision))
print("Recall: " + str(recall))
# send success back to node
# TODO: implement real success boolean return
print('true')
def extractFeature(dataFilterd):
## Create X and Y data for SVM training
X = []
y = []
# TODO: Extract Features
## Reshape Data
reshapedData = []
dataFilterdNp = np.array(dataFilterd)
trainCmd, nx, ny = dataFilterdNp.shape
reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny))
if (debug):
print("\n-- Reshaped Data ---")
print("len(reshapedData) aka 5 cmds: " + str(len(reshapedData)))
print("len(reshapedData[0]) channels*samples aka 8*250=2000 : " + str(len(reshapedData[0])))
for cmd in range(cmdCount):
X.append(reshapedData[cmd][0:2000])
X.append(reshapedData[cmd][2000:4000])
X.append(reshapedData[cmd][4000:6000])
y.append(cmd)
y.append(cmd)
y.append(cmd)
# Feature Standardization
X = preprocessing.scale(X)
return X, y
def extractFeatureTest(dataDownSample, cmd):
## Create X and Y data for SVM test
X = []
y = []
print(len(X))
X.append(dataDownSample)
y.append(cmd)
if debug:
print("\n-- X and Y Data ---")
print("y : " + str(y))
## Feature Standardization
X = preprocessing.scale(X)
return X, y
def modelAccuracy(y_test, y_pred):
# Model Accuracy: how often is the classifier correct
accuracy = metrics.accuracy_score(y_test, y_pred)
# Model Precision: what percentage of positive tuples are labeled as such?
precision = metrics.precision_score(y_test, y_pred)
# Model Recall: what percentage of positive tuples are labelled as such?
recall = metrics.recall_score(y_test, y_pred)
return [accuracy, precision, recall]
def findTrainClassifier(X, y):
C_range = np.logspace(-2, 10, 13)
gamma_range = np.logspace(-9, 3, 13)
param_grid = dict(gamma=gamma_range, C=C_range)
cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42)
grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv)
grid.fit(X, y)
if debug:
print("The best parameters are %s with a score of %0.2f" % (grid.best_params_, grid.best_score_))
return grid.best_params_['C'], grid.best_params_['gamma']
# start process
if __name__ == '__main__':
main()
| 2.265625 | 2 |
ai/human_console.py | Dratui/AI-Arena | 2 | 12798514 | <gh_stars>1-10
from src.games.games import Game
def ai_output(board, game): #the ai's output corresponds to the human input
output=input(game.move_description)
while output not in [str(game.map_move_to_input[i]) for i in game.get_move_effective()]: #intput verification
output=input(game.move_description)
return(game.map_input_to_move[output])
| 3.09375 | 3 |
src/cmssh/cms_cmds.py | dmwm/cmssh | 2 | 12798515 | #!/usr/bin/env python
#-*- coding: ISO-8859-1 -*-
#pylint: disable-msg=W0702
"""
Set of UNIX commands, e.g. ls, cp, supported in cmssh.
"""
# system modules
import os
import re
import sys
import time
import json
import glob
import shutil
import base64
import pprint
import mimetypes
import traceback
import subprocess
# cmssh modules
from cmssh.iprint import msg_red, msg_green, msg_blue
from cmssh.iprint import print_warning, print_error, print_status, print_info
from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue
from cmssh.utils import list_results, check_os, unsupported_linux, access2file
from cmssh.utils import osparameters, check_voms_proxy, run, user_input
from cmssh.utils import execmd, touch, platform, fix_so
from cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info
from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance
from cmssh.cmsfs import release_info, run_lumi_info
from cmssh.github import get_tickets, post_ticket
from cmssh.cms_urls import dbs_instances, tc_url
from cmssh.das import das_client
from cmssh.url_utils import get_data, send_email
from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block
from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user
from cmssh.tagcollector import architectures as tc_architectures
from cmssh.results import RESMGR
from cmssh.auth_utils import PEMMGR, working_pem
from cmssh.cmssw_utils import crab_submit_remotely, crabconfig
from cmssh.cern_html import read
from cmssh.dashboard import jobsummary
from cmssh.reqmgr import reqmgr
from cmssh.cms_objects import get_dashboardname
def options(arg):
"""Extract options from given arg string"""
opts = []
for par in arg.split():
if len(par) > 0 and par[0] == '-':
opts.append(par)
return opts
class Magic(object):
"""
Class to be used with ipython magic functions. It holds given
command and provide a method to execute it in a shell
"""
def __init__(self, cmd):
self.cmd = cmd
def execute(self, args=''):
"Execute given command in current shell environment"
cmd = '%s %s' % (self.cmd, args.strip())
run(cmd)
def subprocess(self, args=''):
"Execute given command in original shell environment"
cmd = '%s %s' % (self.cmd, args.strip())
subprocess.call(cmd, shell=True)
def installed_releases():
"Print a list of releases installed on a system"
_osname, osarch = osparameters()
releases = []
for idir in os.listdir(os.environ['VO_CMS_SW_DIR']):
if idir.find(osarch) != -1:
rdir = os.path.join(\
os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir)
if os.path.isdir(rdir):
for rel in os.listdir(rdir):
releases.append('%s/%s' % (rel, idir))
if releases:
releases.sort()
print "\nInstalled releases:"
for rel in releases:
print rel
else:
msg = "\nYou don't have yet CMSSW release installed on your system."
msg += "\nPlease use " + msg_green('install CMSSW_X_Y_Z') \
+ ' command to install one'
print msg
def cms_read(arg):
"""
cmssh command to read provided HTML page (by default output dumps via pager)
Examples:
cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile
cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython
cmssh> read config.txt
"""
try:
debug = get_ipython().debug
except:
debug = 0
orig_arg = arg
if orig_arg.find('>') != -1:
arg, out = orig_arg.split('>', 1)
out = out.strip()
arg = arg.strip()
else:
out = None
if arg:
arg = arg.strip()
read(arg, out, debug)
def cms_releases(arg=None):
"""
List available CMS releases. Optional parameters either <list> or <all>
Examples:
cmssh> releases # show installed CMSSW releases
cmssh> releases list # list available CMSSW releases on given platform
cmssh> releases all # show all known CMS releases, including online, tests, etc.
"""
if arg:
print "CMSSW releases for %s platform" % platform()
res = release_info(release=None, rfilter=arg)
RESMGR.assign(res)
releases = [str(r) for r in res]
releases = list(set(releases))
releases.sort()
for rel in releases:
print rel
installed_releases()
def pkg_init(pkg_dir):
"Create CMS command to source pkg environment"
pkg_dir = '%s/%s/%s' \
% (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir)
cmd = 'source `find %s -name init.sh | tail -1`;' % pkg_dir
if not os.path.isdir(pkg_dir):
cmd = ''
return cmd
def cms_root(arg):
"""
cmssh command to run ROOT within cmssh
Examples:
cmssh> root -l
"""
pcre_init = pkg_init('external/pcre')
gcc_init = pkg_init('external/gcc')
root_init = pkg_init('lcg/root')
pkgs_init = '%s %s %s' % (pcre_init, gcc_init, root_init)
cmd = '%s root -l %s' % (pkgs_init, arg.strip())
run(cmd)
def cms_xrdcp(arg):
"""
cmssh command to run ROOT xrdcp via cmssh shell
Examples:
cmssh> xrdcp /a/b/c.root file:////tmp.file.root
"""
dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None)
root_path = os.environ['DEFAULT_ROOT']
if dyld_path:
os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib')
cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip())
run(cmd)
if dyld_path:
os.environ['DYLD_LIBRARY_PATH'] = dyld_path
#def debug(arg):
# """
# debug shell command
# """
# arg = arg.strip()
# if arg:
# print_info("Set debug level to %s" % arg)
# DEBUG.set(arg)
# else:
# print_info("Debug level is %s" % DEBUG.level)
def debug_http(arg):
"""
Show or set HTTP debug flag. Default is 0.
"""
arg = arg.strip()
if arg:
if arg not in ['0', '1']:
print_error('Please provide 0/1 for debug_http command')
return
print_info("Set HTTP debug level to %s" % arg)
os.environ['HTTPDEBUG'] = arg
else:
print_info("HTTP debug level is %s" % os.environ.get('HTTPDEBUG', 0))
def cms_find(arg):
"""
Perform lookup of given query in CMS data-services.
cmssh find command lookup given query in CMS data-services.
Examples:
cmssh> find dataset=/ZMM*
cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW
csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW
cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM
cmssh> find run=160915
cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD
cmssh> find lumi run=190704
cmssh> find user=oliver
List of supported entities:
dataset, block, file, run, lumi, site, user
"""
lookup(arg)
def cms_du(arg):
"""
cmssh disk utility cmssh command.
Examples:
cmssh> du # UNIX command
cmssh> du T3_US_Cornell
"""
arg = arg.strip()
if pat_site.match(arg):
lookup(arg)
else:
cmd = 'du ' + arg
cmd = cmd.strip()
subprocess.call(cmd, shell=True)
def lookup(arg):
"""
Perform lookup of given query in CMS data-services.
"""
arg = arg.strip()
debug = get_ipython().debug
args = arg.split('|')
if len(args) == 1: # no filter
res = CMSMGR.lookup(arg)
else:
gen = CMSMGR.lookup(args[0].strip())
for flt in args[1:]:
res = apply_filter(flt.strip(), gen)
RESMGR.assign(res)
list_results(res, debug)
def verbose(arg):
"""
Set/get verbosity level
"""
arg = arg.strip()
ipth = get_ipython()
if arg == '':
print_info("Verbose level is %s" % ipth.debug)
else:
if arg == 0 or arg == '0':
ipth.debug = False
else:
ipth.debug = True
# CMSSW commands
def bootstrap(arch):
"Bootstrap new architecture"
swdir = os.environ['VO_CMS_SW_DIR']
arch = os.environ['SCRAM_ARCH']
cmd = 'sh -x %s/bootstrap.sh setup -path %s -arch %s' % (swdir, swdir, arch)
if unsupported_linux():
cmd += ' -unsupported_distribution_hack'
sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW')
debug = 0
msg = 'Bootstrap %s ...' % arch
# run bootstrap command in subprocess.call since it invokes
# wget/curl and it can be spawned into serate process, therefore
# subprocess.Popen will not catch it
run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True)
cmd = 'source `find %s/%s/external/apt -name init.sh | tail -1`; ' \
% (swdir, arch)
cmd += 'apt-get install external+fakesystem+1.0; '
cmd += 'apt-get update; '
msg = 'Initialize %s apt repository ...' % arch
run(cmd, sdir, msg=msg, debug=debug, shell=True)
def get_release_arch(rel):
"Return architecture for given CMSSW release"
args = {'release': rel}
releases = get_data(tc_url('py_getReleaseArchitectures'), args)
output = []
for item in releases:
rel_arch = item[0]
status = item[1]
if check_os(rel_arch):
output.append((rel_arch, status))
return output
def check_release_arch(rel):
"Check release/architecture"
# check if given release name is installed on user system
rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel)
if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)):
return 'ok'
output = []
for arch, status in get_release_arch(rel):
if not status:
msg = '%s release is not officially supported under %s' \
% (rel, arch)
print_warning(msg)
if arch != os.environ['SCRAM_ARCH']:
msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \
% (os.environ['SCRAM_ARCH'], arch)
print_warning(msg)
msg = '\n%s/%s is not installed within cmssh, proceed' \
% (rel, arch)
if user_input(msg, default='N'):
os.environ['SCRAM_ARCH'] = arch
if not os.path.isdir(\
os.path.join(os.environ['VO_CMS_SW_DIR'], arch)):
bootstrap(arch)
return 'ok'
else:
msg = '%s/%s rejected by user' % (rel, arch)
output.append(msg)
if output:
return ', '.join(output)
osname, osarch = osparameters()
if osname == 'osx' and osarch == 'ia32':
return 'OSX/ia32 is not supported in CMSSW'
return 'no match'
def get_apt_init(arch):
"Return proper apt init.sh for given architecture"
apt_dir = os.path.join(\
os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch)
dirs = os.listdir(apt_dir)
dirs.sort()
name = 'etc/profile.d/init.sh'
script = os.path.join(os.path.join(apt_dir, dirs[-1]), name)
return script
def cms_install(rel):
"""
cmssh command to install given CMSSW release.
Examples:
cmssh> install CMSSW_5_2_4
"""
rel = rel.strip()
pat = pat_release
if not pat.match(rel):
msg = 'Fail to validate release name "%s"' % rel
print_error(msg)
msg = 'Please check the you provide correct release name,'
msg += ' e.g. CMSSW_X_Y_Z<_patchN>'
print msg
return
# check if we have stand-alone installation
if os.environ.get('CMSSH_CMSSW', None):
msg = '\nYou are not allowed to install new release, '
msg += 'since cmssh was installed with system CMSSW install area'
print msg
return
# check if given release/architecture is in place
status = check_release_arch(rel)
if status != 'ok':
msg = '\nCheck release architecture status: %s' % status
print msg
return
print "Searching for %s" % rel
script = get_apt_init(os.environ['SCRAM_ARCH'])
cmd = 'source %s; apt-cache search %s | grep -v -i fwlite' % (script, rel)
run(cmd)
if rel.lower().find('patch') != -1:
print "Installing cms+cmssw-patch+%s ..." % rel
cmd = 'source %s; apt-get install cms+cmssw-patch+%s' % (script, rel)
else:
print "Installing cms+cmssw+%s ..." % rel
cmd = 'source %s; apt-get install cms+cmssw+%s' % (script, rel)
subprocess.call(cmd, shell=True) # use subprocess due to apt-get interactive feature
if platform() == 'osx':
idir = '%s/%s/cms/cmssw/%s' \
% (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel)
fix_so(idir)
print "Create user area for %s release ..." % rel
cmsrel(rel)
def cmsenv(_arg):
"cmsenv command"
# in CMS cmsenv is an alias to: eval `scramv1 runtime -sh`'
msg = 'Within cmssh it is not required to use cmsenv\n'
msg += 'please use ' + msg_green('cmsrel') + ' command and '
msg += 'CMS release environment will be set for you'
print_info(msg)
def cmsrel(rel):
"""
cmssh release setup command, it setups CMSSW environment and creates user based
directory structure.
Examples:
cmssh> cmsrel # reset CMSSW environment to cmssh one
cmssh> cmsrel CMSSW_5_2_4
"""
ipython = get_ipython()
rel = rel.strip()
if not rel or rel in ['reset', 'clear', 'clean']:
path = os.environ['CMSSH_ROOT']
for idir in ['external', 'lib', 'root']:
pdir = os.path.join(path, 'install/lib/release_%s' % idir)
if os.path.islink(pdir):
os.remove(pdir)
if os.path.isdir(pdir):
shutil.rmtree(pdir)
os.makedirs(pdir)
# Set cmssh prompt
prompt = 'cms-sh'
ipython.prompt_manager.in_template = '%s|\#> ' % prompt
return
# check if given release name is installed on user system
rel_arch = None
for arch in cms_architectures():
rel_dir = '%s/cms/cmssw/%s' % (arch, rel)
if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)):
rel_arch = arch
break
if not rel_arch:
msg = 'Release ' + msg_red(rel)
msg += ' is not yet installed on your system.\n'
msg += 'Use ' + msg_green('releases')
msg += ' command to list available releases.\n'
msg += 'Use ' + msg_green('install %s' % rel)
msg += ' command to install given release.'
print msg
return
# set release architecture
os.environ['SCRAM_ARCH'] = rel_arch
# setup environment
cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd())
if not os.path.isdir(cmssw_dir):
os.makedirs(cmssw_dir)
root = os.environ['CMSSH_ROOT']
idir = os.environ['CMSSH_INSTALL_DIR']
base = os.path.realpath('%s/CMSSW' % root)
path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel)
os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel)
os.environ['CMSSW_RELEASE_BASE'] = path
for pkg in ['FWCore', 'DataFormats']:
pdir = '%s/%s' % (idir, pkg)
if os.path.exists(pdir):
shutil.rmtree(pdir)
os.mkdir(pdir)
touch(os.path.join(pdir, '__init__.py'))
pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger',
'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities',
'Services', 'Utilities']
for pkg in pkgs:
link = '%s/src/FWCore/%s/python' % (path, pkg)
dst = '%s/FWCore/%s' % (idir, pkg)
os.symlink(link, dst)
link = '%s/src/DataFormats/FWLite/python' % path
dst = '%s/DataFormats/FWLite' % idir
os.symlink(link, dst)
for lib in ['external', 'lib']:
link = '%s/%s/%s' % (path, lib, rel_arch)
dst = '%s/install/lib/release_%s' % (root, lib)
if os.path.islink(dst):
os.remove(dst)
else:
shutil.rmtree(dst)
os.symlink(link, dst)
# switch to given release
os.environ['CMSSW_VERSION'] = rel
os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel)
if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')):
os.chdir(os.path.join(cmssw_dir, rel + '/src'))
else:
os.chdir(cmssw_dir)
cmd = "scramv1 project CMSSW %s" % rel
run(cmd)
os.chdir(os.path.join(rel, 'src'))
# get ROOT from run-time environment
cmd = 'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS='
stdout, stderr = execmd(cmd)
if stderr:
print "While executing cmd=%s" % cmd
print_warning(stderr)
rootsys = stdout.replace('\n', '').replace('ROOTSYS=', '')
dst = '%s/install/lib/release_root' % root
if os.path.exists(dst):
if os.path.islink(dst):
os.remove(dst)
else:
shutil.rmtree(dst)
os.symlink(rootsys, dst)
# set edm utils for given release
ipython = get_ipython()
rdir = '%s/bin/%s' % (rel_dir, rel_arch)
reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir)
for name in os.listdir(reldir):
fname = os.path.join(reldir, name)
if name.find('edm') == 0 and os.path.isfile(fname):
# we use Magic(cmd).execute we don't need
# to add scramv1 command in front of edm one, since
# execute method will run in current shell environment
# old command for reference:
# cmd = "eval `scramv1 runtime -sh`; %s" % fname
cmd = fname
ipython.register_magic_function(Magic(cmd).execute, 'line', name)
# Set cmssh prompt
ipython.prompt_manager.in_template = '%s|\#> ' % rel
# final message
print "%s is ready, cwd: %s" % (rel, os.getcwd())
def cmsexe(cmd):
"""
Execute given command within CMSSW environment
"""
vdir = os.environ.get('VO_CMS_SW_DIR', None)
arch = os.environ.get('SCRAM_ARCH', None)
if not vdir or not arch:
msg = 'Unable to identify CMSSW environment, please run first: '
msg = msg_red(msg)
msg += msg_blue('cmsrel <rel>\n')
releases = os.listdir(os.environ['CMSSW_RELEASES'])
msg += '\nInstalled releases: ' + msg_green(', '.join(releases))
print msg
return
cmd = "eval `scramv1 runtime -sh`; %s" % cmd
run(cmd, shell=True, call=True)
def cmscrab(arg):
"""
Execute CRAB command, help is available at
https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq
"""
msg = \
'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq'
print_info(msg)
# check if release version and work area are set (should be set at cmsrel)
rel = os.environ.get('CMSSW_VERSION', None)
work_area = os.environ.get('CMSSW_WORKAREA', None)
if not rel or not work_area:
msg = 'In order to run crab command you must '
msg += 'run ' + msg_blue('cmsrel') + ' command'
print_error(msg)
return
# check existence of crab.cfg
crab_dir = os.path.join(work_area, 'crab')
crab_cfg = os.path.join(crab_dir, 'crab.cfg')
if not os.path.isdir(crab_dir):
os.makedirs(crab_dir)
os.chdir(crab_dir)
if not os.path.isfile(crab_cfg):
msg = 'No crab.cfg file found in %s' % crab_dir
print_warning(msg)
msg = 'Would you like to create one'
if user_input(msg, default='N'):
with open('crab.cfg', 'w') as config:
config.write(crabconfig())
msg = 'Your crab.cfg has been created, please edit it '
msg += 'appropriately and re-run crab command'
print_info(msg)
print "cwd:", os.getcwd()
return
if os.uname()[0] == 'Darwin' and arg == '-submit':
crab_submit_remotely(rel, work_area)
return
cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg
cmsexe(cmd)
def cmsrun(arg):
"""
cmssh command to execute CMSSW cmsRun command.
Requires cmsrel to setup CMSSW environment.
"""
cmd = 'cmsRun %s' % arg
cmsexe(cmd)
def cms_pager(arg=None):
"""
cmssh command to show or set internal pager
Examples:
cmssh> pager # shows current setting
cmssh> pager None # set pager to nill
"""
arg = arg.strip()
if arg:
if arg == '0' or arg == 'None' or arg == 'False':
if os.environ.has_key('CMSSH_PAGER'):
del os.environ['CMSSH_PAGER']
else:
os.environ['CMSSH_PAGER'] = arg
print "Set CMSSH pager to %s" % arg
else:
val = os.environ.get('CMSSH_PAGER', None)
msg = "cmssh pager is set to: %s" % val
print msg
def dbs_instance(arg=None):
"""
cmssh command to show or set DBS instance
Examples:
cmssh> dbs_instance
cmssh> dbs_instance cms_dbs_prod_global
"""
arg = arg.strip()
if arg:
if validate_dbs_instance(arg):
os.environ['DBS_INSTANCE'] = arg
print "Switch to %s DBS instance" % arg
else:
print "Invalid DBS instance"
else:
msg = "DBS instance is set to: %s" \
% os.environ.get('DBS_INSTANCE', 'global')
print msg
print '\nAvailable DBS instances:'
for inst in dbs_instances():
print inst
def cms_help_msg():
"""cmsHelp message"""
msg = 'Available cmssh commands:\n'
msg += msg_green('find ') \
+ ' search CMS meta-data (query DBS/Phedex/SiteDB)\n'
msg += msg_green('dbs_instance') \
+ ' show/set DBS instance, default is DBS global instance\n'
msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, ' \
+ 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\n'
msg += msg_green('ls ') \
+ ' list file/LFN, e.g. ls local.file or ls /store/user/file.root\n'
msg += msg_green('rm ') + ' remove file/LFN, ' \
+ 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\n'
msg += msg_green('cp ') \
+ ' copy file/LFN, e.g. cp local.file or cp /store/user/file.root .\n'
msg += msg_green('info ') \
+ ' provides detailed info about given CMS entity, ' \
+ 'e.g. info run=160915\n'
msg += msg_green('das ') + ' query DAS service\n'
msg += msg_green('das_json ') \
+ ' query DAS and return data in JSON format\n'
msg += msg_green('jobs ') \
+ ' status of job queue or CMS jobs\n'
msg += msg_green('read ') \
+ ' read URL/local file content\n'
msg += msg_green('root ') + ' invoke ROOT\n'
msg += msg_green('du ') \
+ ' display disk usage for given site, e.g. du T3_US_Cornell\n'
msg += '\nAvailable CMSSW commands (once you install any CMSSW release):\n'
msg += msg_green('releases ') \
+ ' list available CMSSW releases, accepts <list|all> args\n'
msg += msg_green('install ') \
+ ' install CMSSW release, e.g. install CMSSW_5_0_0\n'
msg += msg_green('cmsrel ') \
+ ' switch to given CMSSW release and setup its environment\n'
msg += msg_green('arch ') \
+ ' show or switch to given CMSSW architecture, accept <list|all> args\n'
msg += msg_green('scram ') + ' CMSSW scram command\n'
msg += msg_green('cmsRun ') \
+ ' cmsRun command for release in question\n'
msg += '\nAvailable GRID commands: <cmd> either grid or voms\n'
msg += msg_green('vomsinit ') \
+ ' setup your proxy (aka voms-proxy-init)\n'
msg += msg_green('vomsinfo ') \
+ ' show your proxy info (aka voms-proxy-info)\n'
msg += '\nQuery results are accessible via %s function, e.g.\n' \
% msg_blue('results()')
msg += ' find dataset=/*Zee*\n'
msg += ' for r in results(): print r, type(r)\n'
msg += '\nList cmssh commands : ' + msg_blue('commands')
msg += '\ncmssh command help : ' + msg_blue('cmshelp <command>')
msg += '\nInstall python software: ' + \
msg_blue('pip <search|(un)install> <package>')
return msg
def cms_help(arg=None):
"""
cmshelp command
Examples:
cmssh> cmshelp
cmssh> cmshelp commands
cmssh> cmshelp ls
"""
if arg:
if arg.strip() == 'commands':
cms_commands()
return
ipython = get_ipython()
if arg[0] == '(' and arg[-1] == ')':
arg = arg[1:-1]
for case in [arg, 'cms_'+arg, 'cms'+arg]:
func = ipython.find_magic(case)
if func:
doc = func.func_doc
break
else:
doc = 'Documentation is not available'
else:
doc = cms_help_msg()
print doc
def cms_rm(arg):
"""
CMS rm command works with local files/dirs and CMS storate elements.
Examples:
cmssh> rm local_file
cmssh> rm -rf local_dir
cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root
"""
arg = arg.strip()
try:
debug = get_ipython().debug
except:
debug = 0
if not arg:
print_error("Usage: rm <options> source_file")
dst = arg.split()[-1]
if os.path.exists(dst) or len(glob.glob(dst)):
cmd = "rm %s" % arg
run(cmd)
else:
if pat_lfn.match(arg.split(':')[-1]):
status = rm_lfn(arg, verbose=debug)
print_status(status)
else:
if not os.path.exists(dst):
print_error('File %s does not exists' % dst)
else:
raise Exception('Not implemented yet')
def cms_rmdir(arg):
"""
cmssh rmdir command removes directory from local file system or CMS storage element.
Examples:
cmssh> rmdir foo
cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo
"""
arg = arg.strip()
try:
debug = get_ipython().debug
except:
debug = 0
if not arg:
print_error("Usage: rmdir <options> dir")
if os.path.exists(arg):
run("rmdir %s" % arg)
else:
try:
status = rmdir(arg, verbose=debug)
print_status(status)
except:
traceback.print_exc()
def cms_mkdir(arg):
"""
cmssh mkdir command creates directory on local filesystem or remote CMS storage element.
Examples:
cmssh> mkdir foo
cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo
"""
arg = arg.strip()
try:
debug = get_ipython().debug
except:
debug = 0
if not arg:
print_error("Usage: mkdir <options> dir")
if arg.find(':') == -1: # not a SE:dir pattern
run("mkdir %s" % arg)
else:
try:
status = mkdir(arg, verbose=debug)
print_status(status)
except:
traceback.print_exc()
def cms_ls(arg):
"""
cmssh ls command lists local files/dirs/CMS storate elements or
CMS entities (se, site, dataset, block, run, release, file).
Examples:
cmssh> ls # UNIX command
cmssh> ls -l local_file
cmssh> ls T3_US_Cornell:/store/user/valya
cmssh> ls run=160915
"""
arg = arg.strip()
res = []
try:
debug = get_ipython().debug
except:
debug = 0
orig_arg = arg
if orig_arg.find('|') != -1:
arg, flt = orig_arg.split('|', 1)
arg = arg.strip()
else:
flt = None
startswith = None
entities = \
['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file']
for item in entities:
if arg.startswith(item + '='):
startswith = item
if os.path.isfile(orig_arg) or os.path.isdir(orig_arg):
cmd = 'ls ' + orig_arg
run(cmd, shell=True)
elif pat_se.match(arg):
arg = arg.replace('site=', '')
res = list_se(arg, debug)
elif pat_site.match(arg):
arg = arg.replace('site=', '')
res = site_info(arg, debug)
elif pat_lfn.match(arg):
arg = arg.replace('file=', '')
arg = arg.replace('lfn=', '')
res = file_info(arg, debug)
elif pat_block.match(arg):
arg = arg.replace('block=', '')
res = block_info(arg, debug)
elif pat_dataset.match(arg):
arg = arg.replace('dataset=', '')
try:
res = dataset_info(arg, debug)
except IndexError:
msg = "Given pattern '%s' does not exist on local filesystem or in DBS" % arg
print_error(msg)
elif pat_run.match(arg):
arg = arg.replace('run=', '')
res = run_info(arg, debug)
elif pat_release.match(arg):
arg = arg.replace('release=', '')
res = release_info(arg, debug)
elif startswith:
msg = 'No pattern is allowed for %s look-up' % startswith
print_error(msg)
else:
cmd = 'ls ' + orig_arg
run(cmd, shell=True)
if res:
RESMGR.assign(res)
list_results(res, debug=True, flt=flt)
def cms_jobs(arg=None):
"""
cmssh jobs command lists local job queue or provides information
about jobs at give site or for given user. It accepts the following
list of options:
- list, which lists local transfer jobs
- site, which lists jobs at given site
- dashboard, which lists jobs of current user
- user, which lists jobs of given user
Examples:
cmssh> jobs
cmssh> jobs list
cmssh> jobs site=T2_US_UCSD
cmssh> jobs dashboard
cmssh> jobs user=my_cms_user_name
"""
res = None
try:
debug = get_ipython().debug
except:
debug = 0
orig_arg = arg
if orig_arg.find('|') != -1:
arg, flt = orig_arg.split('|', 1)
arg = arg.strip()
else:
flt = None
if arg:
arg = arg.strip()
if not arg or arg == 'list':
print_info('Local data transfer')
dqueue(arg)
elif arg == 'dashboard':
userdn = os.environ.get('USER_DN', None)
if userdn:
user = get_dashboardname(userdn)
print_info('Dashboard information, user=%s' % user)
res = jobsummary({'user': user})
elif pat_site.match(arg):
site = arg.replace('site=', '')
print_info('Dashboard information, site=%s' % site)
res = jobsummary({'site': site})
elif pat_user.match(arg):
user = arg.replace('user=', '')
print_info('Dashboard information, user=%s' % user)
res = jobsummary({'user': user})
if res:
RESMGR.assign(res)
list_results(res, debug=True, flt=flt)
def cms_config(arg):
"""
Return configuration object for given dataset
Examples:
cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM
"""
if arg:
arg = arg.strip()
if pat_dataset.match(arg):
reqmgr(arg.replace('dataset=', ''))
def cms_lumi(arg):
"""
Return lumi info for a given dataset/file/block/lfn/run
Examples:
cmssh> lumi run=190704
cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD
cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find
cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root
cmssh> lumi run=190704
cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]}
"""
try:
debug = get_ipython().debug
except:
debug = 0
arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '')
arg = arg.replace('lfn=', '').replace('run=', '')
res = run_lumi_info(arg, debug)
def cms_json(arg):
"Print or set location of CMS JSON file"
if arg:
if access2file(arg):
os.environ['CMS_JSON'] = arg
print_info('CMS_JSON: %s' % arg)
else:
fname = os.environ.get('CMS_JSON')
print_info('CMS JSON: %s' % fname)
try:
debug = get_ipython().debug
except:
debug = 0
if debug and access2file(fname):
with open(fname, 'r') as cms_json:
print cms_json.read()
def integration_tests(_arg):
"Run series of integration tests for cmssh"
for fname in ['file1.root', 'file2.root']:
if os.path.isfile(fname):
os.remove(fname)
lfn = \
'/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root'
lfn2 = \
'/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root'
dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO'
dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM'
run = 160915
sename = 'T3_US_Cornell:/store/user/valya'
cmd_list = ['pager 0', 'debug_http 0']
cmd_list += ['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls']
cmd_list += ['ls dataset=%s' % dataset, 'ls run=%s' % run, 'ls file=%s' % lfn]
cmd_list += ['ls %s' % dataset, 'info %s' % dataset]
cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' % lfn]
cmd_list += ['find lumi dataset=%s' % dataset,
'find lumi {"190704":[1,2,3]}',
'find lumi {190704:[1,2,3]}']
cmd_list += ['find config dataset=%s' % dataset2]
cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell']
cmd_list += ['ls %s' % sename,
'mkdir %s/foo' % sename,
'ls %s' % sename,
'rmdir %s/foo' % sename,
'ls %s' % sename,
]
cmd_list += ['cp %s file.root' % lfn,
'ls',
'cp file.root %s' % sename,
'ls %s' % sename,
'rm %s/file.root' % sename,
'ls %s' % sename,
'rm file.root',
'cp %s file1.root &' % lfn,
'cp %s file2.root &' % lfn2,
'ls']
cmd_list += ['find user=oliver', 'jobs list', 'jobs user=AikenOliver']
cmd_list += ['releases list', 'arch list', 'jobs', 'ls']
cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS']
mgr = get_ipython()
for item in cmd_list:
print_info("Execute %s" % item)
split = item.split(' ', 1)
if len(split) == 1:
cmd = item
args = ''
else:
cmd = split[0]
args = split[-1]
mgr.run_line_magic(cmd, args)
def cms_info(arg):
"""
cmssh info command provides information for given meta-data entity, e.g.
dataset, block, file, run.
Examples:
cmssh> info dataset=/a/b/c
cmssh> info /a/b/c
cmssh> info run=160915
cmssh> info local_file.root
Please note: to enable access to RunSummary service please ensure that your
usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx
"""
if not arg:
return
try:
debug = get_ipython().debug
except:
debug = 0
fname = arg.replace('file=', '')
if arg and os.path.isfile(fname):
mtype = mimetypes.guess_type(arg)
if mtype[0]:
print "Mime type:", mtype[0]
ipython = get_ipython()
magic = ipython.find_line_magic('edmFileUtil')
if magic:
if arg[0] == '/':
cmd = '-e -f file:///%s' % fname
else:
cmd = '-e -f %s' % fname
ipython.run_line_magic('edmFileUtil', cmd)
if debug:
if ipython.find_line_magic('edmDumpEventContent'):
ipython.run_line_magic('edmDumpEventContent', fname)
else:
cms_ls(arg)
def cms_cp(arg):
"""
cmssh cp command copies local files/dirs to/from local files/dirs or CMS storate elements.
Examples:
cmssh> cp file1 file2
cmssh> cp file.root T3_US_Cornell:/store/user/name
cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name
cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha
"""
check_voms_proxy()
background = False
orig_arg = arg
arg = arg.strip()
try:
last_arg = arg.split(' ')[-1].strip()
if last_arg == '&':
background = True
arg = arg.replace('&', '').strip()
src, dst = arg.rsplit(' ', 1)
if dst.find('&') != -1:
background = True
dst = dst.replace('&', '').strip()
if dst == '.':
dst = os.getcwd()
# check if src still has options and user asked for -f
options = src.split(' ')
if len(options) > 1 and options[0] == '-f':
overwrite = True
else:
overwrite = False
except:
traceback.print_exc()
return
try:
debug = get_ipython().debug
except:
debug = 0
if not arg:
print_error("Usage: cp <options> source_file target_{file,directory}")
pat = pat_se
orig = src.split(' ')[-1]
if os.path.exists(orig) and not pat.match(dst):
if background:
cmd = 'cp %s' % orig_arg
subprocess.call(cmd, shell=True)
else:
run("cp %s %s" % (src, dst))
else:
try:
status = copy_lfn(orig, dst, debug, background, overwrite)
print_status(status)
except:
traceback.print_exc()
def cms_architectures(arch_type=None):
"Return list of CMSSW architectures (aka SCRAM_ARCH)"
archs = [a for a in tc_architectures(arch_type)]
return archs
def cms_arch(arg=None):
"""
Show or set CMSSW architecture. Optional parameters either <all> or <list>
Examples:
cmssh> arch # show current and installed architecture(s)
cmssh> arch all # show all known CMSSW architectures
cmssh> arch list # show all CMSSW architectures for given platform
"""
if not arg:
print "Current architecture: %s" % os.environ['SCRAM_ARCH']
archs = []
for name in os.listdir(os.environ['VO_CMS_SW_DIR']):
if check_os(name) and name.find('.') == -1:
archs.append(name)
if archs:
print '\nInstalled architectures:'
for item in archs:
print item
elif arg == 'all' or arg == 'list':
if arg == 'all':
print 'CMSSW architectures:'
else:
print 'CMSSW architectures for %s:' \
% os.uname()[0].replace('Darwin', 'OSX')
for name in cms_architectures('all'):
if arg == 'all':
print name
else:
if check_os(name):
print name
else:
cms_archs = cms_architectures('all')
if arg not in cms_archs:
msg = 'Wrong architecture, please choose from the following list\n'
msg += ', '.join(cms_archs)
raise Exception(msg)
print "Switch to SCRAM_ARCH=%s" % arg
os.environ['SCRAM_ARCH'] = arg
def cms_apt(arg=''):
"Execute apt commands"
if '-cache' in arg or '-get' in arg:
cmd = 'apt%s' % arg
else:
msg = 'Not supported apt command'
raise Exception(msg)
run(cmd)
def cms_das(query):
"""
cmssh command which queries DAS data-service with provided query.
Examples:
cmssh> das dataset=/ZMM*
"""
host = 'https://cmsweb.cern.ch'
idx = 0
limit = 0
debug = 0
das_client(host, query, idx, limit, debug, 'plain')
def cms_das_json(query):
"""
cmssh command which queries DAS data-service with provided query and
returns results in JSON data format
Examples:
cmssh> das_json dataset=/ZMM*
"""
host = 'https://cmsweb.cern.ch'
idx = 0
limit = 0
debug = 0
res = das_client(host, query, idx, limit, debug, 'json')
RESMGR.assign([res])
pprint.pprint(res)
def cms_vomsinit(_arg=None):
"""
cmssh command which executes voms-proxy-init on behalf of the user
Examples:
cmssh> vomsinit
By default it applies the following options
-rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem>
"""
cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem')
with working_pem(PEMMGR.pem) as key:
run("voms-proxy-destroy")
cmd = "voms-proxy-init -rfc -voms cms:/cms -key %s -cert %s" % (key, cert)
run(cmd)
userdn = os.environ.get('USER_DN', '')
if not userdn:
cmd = "voms-proxy-info -identity"
stdout, stderr = execmd(cmd)
os.environ['USER_DN'] = stdout.replace('\n', '')
def github_issues(arg=None):
"""
Retrieve information about cmssh tickets, e.g.
Examples:
cmssh> tickets # list all cmssh tickets
cmssh> ticket 14 # get details for given ticket id
cmssh> ticket new # post new ticket from cmssh
# or post it at https://github.com/vkuznet/cmssh/issues/new
"""
if arg == 'new':
msg = 'You can post new ticket via web interface at\n'
msg += 'https://github.com/vkuznet/cmssh/issues/new\n'
msg += 'otherwise it will be posted as anonymous gist ticket'
print_info(msg)
if not user_input('Proceed', default='N'):
return
email = raw_input('Your Email : ')
if not email:
msg = "You did your email address"
print_error(msg)
return
desc = ''
msg = 'Type your problem, attach traceback, etc. Once done print '
msg += msg_blue('EOF') + ' and hit ' + msg_blue('Enter') + '\n'
print msg
while True:
try:
uinput = raw_input()
if uinput.strip() == 'EOF':
break
desc += uinput + '\n'
except KeyboardInterrupt:
break
if not desc:
msg = "You did not provide bug description"
print_error(msg)
return
if not user_input('Send this ticket', default='N'):
print_info('Aborting your action')
return
key = '<KEY>' % time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(time.time()))
files = {key: {'content': desc}}
res = post_ticket(key, files)
if res.has_key('html_url'):
print_status('New gist ticket %s' % res['html_url'])
title = 'cmssh gist %s' % res['html_url']
if isinstance(res, dict):
ticket = pprint.pformat(res)
else:
ticket = res
to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\n')
send_email(to_user, email, title, ticket)
else:
res = get_tickets(arg)
RESMGR.assign(res)
pprint.pprint(res)
def demo(_arg=None):
"Show cmssh demo file"
root = os.environ.get('CMSSH_ROOT')
path = os.path.join(root, 'cmssh/DEMO')
with open(path, 'r') as demo_file:
print demo_file.read()
def results():
"""Return results from recent query"""
return RESMGR
def cms_commands(_arg=None):
"""
cmssh command which lists all registered cmssh commands in current shell.
Examples:
cmssh> cmshelp commands
"""
mdict = get_ipython().magics_manager.lsmagic()
cmds = [k for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1]
cmds.sort()
for key in cmds:
print key
| 1.664063 | 2 |
abstraction/dist_metrics.py | xoren22/tmp | 1 | 12798516 | import torch
class ACT_EMD:
"""
EMD stands for Earth Mover's Distance - Mallows distance or
1st Wasserstein distance between the two distributions, is a
measure of the distance between two probability distributions.
ACT or Approximate Constrained Transfers is a linear compelixty
approximation of the ICT or Iterative Constrained Transfers, which
is a symmetric lower bound approximation of the Eath Mover's Distance
Note that as the number of iterations of ACT approaches infinity, ACT
becomes the same as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf
"""
def __init__(self, histograms, use_gpu=True, cost_matrix=None):
self.use_gpu = use_gpu
# these are the histograms to which the distance must be calculated
self.histograms = histograms
# this is the cost matrix showing the cost of
# transporting one unit of 'dirt' from coordinate i to j
self.cost_matrix = cost_matrix
def act(self):
# rename?
pass
| 3.4375 | 3 |
vyatta/common/utils.py | Brocade-OpenSource/vrouter-plugins | 0 | 12798517 | # Copyright 2015 Brocade Communications System, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from eventlet import greenthread
RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop')
def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0):
args = args or []
kwargs = kwargs or {}
while limit > 0:
try:
return fn(*args, **kwargs)
except Exception as e:
if not exceptions or not isinstance(e, exceptions):
raise
if delay:
greenthread.sleep(delay)
limit -= 1
raise
| 1.992188 | 2 |
modi2_firmware_updater/util/modi_winusb/modi_serialport.py | LUXROBO/-modi2-firmware-updater- | 1 | 12798518 | import sys
import time
import serial
import serial.tools.list_ports as stl
def list_modi_serialports():
info_list = []
def __is_modi_port(port):
return (port.vid == 0x2FDE and port.pid == 0x0003)
modi_ports = [port for port in stl.comports() if __is_modi_port(port)]
for modi_port in modi_ports:
info_list.append(modi_port.device)
if sys.platform.startswith("win"):
from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths
path_list = list_modi_winusb_paths()
for index, value in enumerate(path_list):
info_list.append(value)
return info_list
class ModiSerialPort():
SERIAL_MODE_COMPORT = 1
SERIAL_MODI_WINUSB = 2
def __init__(self, port = None, baudrate = 921600, timeout = 0.2, write_timeout = None):
self.type = self.SERIAL_MODE_COMPORT
self._port = port
self._baudrate = baudrate
self._timeout = timeout
self._write_timeout = write_timeout
self.serial_port = None
self._is_open = False
if self._port is not None:
self.open(self._port)
def open(self, port):
self._port = port
if sys.platform.startswith("win"):
from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths
if port in list_modi_winusb_paths():
self.type = self.SERIAL_MODI_WINUSB
winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout)
self.serial_port = winusb
else:
ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True)
self.serial_port = ser
else:
ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True)
self.serial_port = ser
self.is_open = True
def close(self):
if self.is_open:
self.serial_port.close()
def write(self, data):
if not self.is_open:
raise Exception("serialport is not opened")
if type(data) is str:
data = data.encode("utf8")
self.serial_port.write(data)
def read(self, size=1):
if not self.is_open:
raise Exception("serialport is not opened")
if size == None and self.type == self.SERIAL_MODE_COMPORT:
size = 1
return self.serial_port.read(size)
def read_until(self, expected=b"\x0A", size=None):
if not self.is_open:
raise Exception("serialport is not opened")
lenterm = len(expected)
line = bytearray()
modi_timeout = self.Timeout(self._timeout)
while True:
c = self.read(1)
if c:
line += c
if line[-lenterm:] == expected:
break
if size is not None and len(line) >= size:
break
else:
break
if modi_timeout.expired():
break
return bytes(line)
def read_all(self):
if not self.is_open:
raise Exception("serialport is not opened")
return self.serial_port.read_all()
def flush(self):
if not self.is_open:
raise Exception("serialport is not opened")
self.serial_port.flush()
def flushInput(self):
if not self.is_open:
raise Exception("serialport is not opened")
self.serial_port.flushInput()
def flushOutput(self):
if not self.is_open:
raise Exception("serialport is not opened")
self.serial_port.flushOutput()
def setDTR(self, state):
if not self.is_open:
raise Exception("serialport is not opened")
self.serial_port.setDTR(state)
def setRTS(self, state):
if not self.is_open:
raise Exception("serialport is not opened")
self.serial_port.setRTS(state)
def inWaiting(self):
if not self.is_open:
raise Exception("serialport is not opened")
waiting = None
if self.type == self.SERIAL_MODE_COMPORT:
waiting = self.serial_port.inWaiting()
return waiting
@property
def port(self):
return self._port
@port.setter
def port(self, value):
self._port = value
self.serial_port.port = value
@property
def baudrate(self):
return self._baudrate
@baudrate.setter
def baudrate(self, value):
self._baudrate = value
self.serial_port.baudrate = value
@property
def timeout(self):
return self._timeout
@timeout.setter
def timeout(self, value):
self._timeout = value
self.serial_port.timeout = value
@property
def write_timeout(self):
return self._write_timeout
@write_timeout.setter
def write_timeout(self, value):
self._write_timeout = value
self.serial_port.write_timeout = value
@property
def dtr(self):
if self.type == self.SERIAL_MODE_COMPORT:
return self.serial_port.dtr
else:
return False
class Timeout(object):
"""\
Abstraction for timeout operations. Using time.monotonic() if available
or time.time() in all other cases.
The class can also be initialized with 0 or None, in order to support
non-blocking and fully blocking I/O operations. The attributes
is_non_blocking and is_infinite are set accordingly.
"""
if hasattr(time, 'monotonic'):
# Timeout implementation with time.monotonic(). This function is only
# supported by Python 3.3 and above. It returns a time in seconds
# (float) just as time.time(), but is not affected by system clock
# adjustments.
TIME = time.monotonic
else:
# Timeout implementation with time.time(). This is compatible with all
# Python versions but has issues if the clock is adjusted while the
# timeout is running.
TIME = time.time
def __init__(self, duration):
"""Initialize a timeout with given duration"""
self.is_infinite = (duration is None)
self.is_non_blocking = (duration == 0)
self.duration = duration
if duration is not None:
self.target_time = self.TIME() + duration
else:
self.target_time = None
def expired(self):
"""Return a boolean, telling if the timeout has expired"""
return self.target_time is not None and self.time_left() <= 0
def time_left(self):
"""Return how many seconds are left until the timeout expires"""
if self.is_non_blocking:
return 0
elif self.is_infinite:
return None
else:
delta = self.target_time - self.TIME()
if delta > self.duration:
# clock jumped, recalculate
self.target_time = self.TIME() + self.duration
return self.duration
else:
return max(0, delta)
def restart(self, duration):
"""\
Restart a timeout, only supported if a timeout was already set up
before.
"""
self.duration = duration
self.target_time = self.TIME() + duration
# main
if __name__ == "__main__":
stop = False
def handle_received(serialport):
global stop
while not stop:
init = time.time()
recv = serialport.read_until(b"}")
dt = time.time() - init
if recv == None:
print("disconnected")
stop = True
break
print(f"dt: {int(dt * 1000.0)}ms - {recv}")
time.sleep(0.001)
serialport.close()
import threading
info_list = list_modi_serialports()
if not info_list:
raise Exception("No MODI+ is connected")
serialport = ModiSerialPort(info_list[0])
threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start()
print("To exit the program, enter 'exit'.")
while not stop:
input_data = input()
if input_data == "exit":
stop = True
break
serialport.close() | 2.515625 | 3 |
addon/pycThermopack/gui/widgets/mpl_canvas.py | SINTEF/Thermopack | 28 | 12798519 | from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg
from matplotlib.figure import Figure
from gui.utils import MessageBox
import numpy as np
class MplCanvas(FigureCanvasQTAgg):
"""
A canvas for matplotlib plots. Contains all plot functionality for Plot Mode
"""
def __init__(self, components, plotting_preferences):
self.fig = Figure(dpi=100)
self.empty = True
self.components = components
self.isenthalps = None
self.isentropes = None
self.isotherms = None
self.isobars = None
super(MplCanvas, self).__init__(figure=self.fig)
self.plotting_preferences = plotting_preferences
def toggle_isenthalps(self, is_checked):
"""
Hides / shows isenthalp lines in the plot if a plot exists
:param is_checked: Status of isenthalp button (bool)
"""
if not self.empty and self.isenthalps:
if is_checked:
for line in self.isenthalps:
line.set_linestyle("solid")
else:
for line in self.isenthalps:
line.set_linestyle("None")
self.draw()
def toggle_isentropes(self, is_checked):
"""
Hides / shows isentrope lines in the plot if a plot exists
:param is_checked: Status of isentrope button (bool)
"""
if not self.empty and self.isentropes:
if is_checked:
for line in self.isentropes:
line.set_linestyle("solid")
else:
for line in self.isentropes:
line.set_linestyle("None")
self.draw()
else:
return
def toggle_isotherms(self, is_checked):
"""
Hides / shows isotherm lines in the plot if a plot exists
:param is_checked: Status of isotherm button (bool)
"""
if not self.empty and self.isotherms:
if is_checked:
for line in self.isotherms:
line.set_linestyle("solid")
else:
for line in self.isotherms:
line.set_linestyle("None")
self.draw()
else:
return
def toggle_isobars(self, is_checked):
"""
Hides / shows isobar lines in the plot if a plot exists
:param is_checked: Status of isobar button (bool)
"""
if not self.empty and self.isobars:
if is_checked:
for line in self.isobars:
line.set_linestyle("solid")
else:
for line in self.isobars:
line.set_linestyle("None")
self.draw()
else:
return
def plot_envelope(self, tp, prim_vars, fractions):
"""
Plots a phase envelope
:param tp: Thermopack instance
:param prim_vars: Primary variables for the plot (e.g. PT, PH, ..)
:param fractions: List of molar fractions for the components
"""
tpv_settings = self.plotting_preferences["Phase envelope"]["TPV"]
isopleth_settings = self.plotting_preferences["Phase envelope"]["Isopleths"]
critical_settings = self.plotting_preferences["Phase envelope"]["Critical"]
plot_settings = self.plotting_preferences["Phase envelope"]["Plotting"]
p_initial = tpv_settings["Initial pressure"]
t_min = tpv_settings["Minimum temperature"]
p_max = tpv_settings["Maximum pressure"]
step_size = tpv_settings["Step size"]
# Calculate T, P, V
T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max,
minimum_temperature=t_min, step_size=step_size, calc_v=True)
H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))])
S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))])
global H_list
global T_list
global S_list
global P_list
n_isopleths = isopleth_settings["Number of isopleths"]
H_list = np.linspace(np.min(H), np.max(H), n_isopleths)
S_list = np.linspace(np.min(S), np.max(S), n_isopleths)
T_list = np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths)
P_list = np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths)
temp = critical_settings["Temperature"]
v = critical_settings["Volume"]
tol = critical_settings["Error tolerance"]
# Calculate critical variables
try:
T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol)
H_c = tp.enthalpy_tv(T_c, V_c, fractions)
S_c = tp.entropy_tv(T_c, V_c, fractions)
except Exception as e:
msg = MessageBox("Error", str(e))
msg.exec_()
T_c, V_c, P_c, H_c, S_c = None, None, None, None, None
# Set global variables, so that they are accessible in all phase envelope plot functions
global isopleth_1_color
global isopleth_2_color
global P_min
global P_max
global T_min
global T_max
global nmax
isopleth_1_color = plot_settings["Colors"][2]
isopleth_2_color = plot_settings["Colors"][3]
P_min = isopleth_settings["Minimum pressure"]
P_max = isopleth_settings["Maximum pressure"]
T_min = isopleth_settings["Minimum temperature"]
T_max = isopleth_settings["Maximum temperature"]
nmax = isopleth_settings["N max"]
# Plot depending on which primary variables are chosen
if prim_vars == "PT":
x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions)
elif prim_vars == "PH":
x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions)
elif prim_vars == "PS":
x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions)
elif prim_vars == "TH":
x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions)
elif prim_vars == "TS":
x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions)
else:
return
# Plotting
line_color = plot_settings["Colors"][0]
point_color = plot_settings["Colors"][1]
grid_on = plot_settings["Grid on"]
xlabel = plot_settings["x label"]
ylabel = plot_settings["y label"]
title = plot_settings["Title"]
self.axes.plot(x, y, color=line_color, label="Phase envelope")
self.axes.scatter([crit_x], [crit_y], color=point_color, label="Critical point")
self.axes.set_title(title)
self.axes.grid(grid_on)
self.axes.set_xlabel(xlabel)
self.axes.set_ylabel(ylabel)
# Sort entries in the legend
legend = True
if legend:
if n_isopleths > 0:
handles, labels = self.axes.get_legend_handles_labels()
self.axes.legend([handles[3], handles[2], handles[0], handles[1]],
[labels[3], labels[2], labels[0], labels[1]],
loc="best")
else:
self.axes.legend()
self.draw()
def plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions):
"""
Return plot data for a PT phase envelope
:param tp: Thermopack instance
:param T: Temperature values
:param P: Pressure values
:param T_c: Critical temperature
:param P_c: Critical pressure
:param fractions: List of molar fractions
:return: x: x values for plot,
y: y values for plot,
crit_x: x value for critical point,
crit_y: y value for critical point,
"""
# Display correct buttons
self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0)
self.parent().parent().parent().PT_H_btn.setChecked(True)
self.parent().parent().parent().PT_S_btn.setChecked(True)
x = T
y = P
crit_x = T_c
crit_y = P_c
# Isenthalps, isentropes
enthalpies = H_list
entropies = S_list
self.isenthalps = []
self.isentropes = []
for i in range(len(enthalpies)):
t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, minimum_temperature=T_min,
maximum_temperature=T_max, nmax=nmax)
if i == 0:
h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label="Isenthalp")
else:
h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color)
self.isenthalps.append(h_line)
t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, minimum_temperature=T_min,
maximum_temperature=T_max, nmax=nmax)
if i == 0:
s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label="Isentrope")
else:
s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color)
self.isentropes.append(s_line)
self.isotherms = None
self.isobars = None
return x, y, crit_x, crit_y
def plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions):
"""
Return plot data for a PH phase envelope
:param tp: Thermopack instance
:param P: Pressure values
:param H: Enthalpy values
:param P_c: Critical pressure
:param H_c: Critical enthalpy
:param fractions: List of molar fractions
:return: x: x values for plot,
y: y values for plot,
crit_x: x value for critical point,
crit_y: y value for critical point,
"""
# Display correct buttons
self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1)
self.parent().parent().parent().PH_T_btn.setChecked(True)
self.parent().parent().parent().PH_S_btn.setChecked(True)
x = H
y = P
crit_x = H_c
crit_y = P_c
# isotherms, isentropes
temperatures = T_list
entropies = S_list
self.isotherms = []
self.isentropes = []
for i in range(len(temperatures)):
p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, nmax=nmax)
if i == 0:
t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label="Isotherm")
else:
t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color)
self.isotherms.append(t_line)
t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, minimum_temperature=T_min,
maximum_temperature=T_max, nmax=nmax)
if i == 0:
s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label="Isentrope")
else:
s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color)
self.isentropes.append(s_line)
self.isenthalps = None
self.isobars = None
return x, y, crit_x, crit_y
def plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions):
"""
Return plot data for a PS phase envelope
:param tp: Thermopack instance
:param P: Pressure values
:param S: Entropy values
:param P_c: Critical pressure
:param S_c: Critical entropy
:param fractions: List of molar fractions
:return: x: x values for plot,
y: y values for plot,
crit_x: x value for critical point,
crit_y: y value for critical point,
"""
# Display correct buttons
self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2)
self.parent().parent().parent().PS_T_btn.setChecked(True)
self.parent().parent().parent().PS_H_btn.setChecked(True)
x = S
y = P
crit_x = S_c
crit_y = P_c
# isotherms, isenthalps
temperatures = T_list
enthalpies = H_list
self.isotherms = []
self.isenthalps = []
for i in range(len(temperatures)):
p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, nmax=nmax)
if i == 0:
t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label="Isotherm")
else:
t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color)
self.isotherms.append(t_line)
t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, minimum_temperature=T_min,
maximum_temperature=T_max, nmax=nmax)
if i == 0:
h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label="Isenthalp")
else:
h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color)
self.isenthalps.append(h_line)
self.isentropes = None
self.isobars = None
return x, y, crit_x, crit_y
def plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions):
"""
Return plot data for a PS phase envelope
:param tp: Thermopack instance
:param T: Temperature values
:param H: Enthalpy values
:param T_c: Critical temperature
:param H_c: Critical enthalpy
:param fractions: List of molar fractions
:return: x: x values for plot,
y: y values for plot,
crit_x: x value for critical point,
crit_y: y value for critical point,
"""
# Display correct buttons
self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3)
self.parent().parent().parent().TH_S_btn.setChecked(True)
self.parent().parent().parent().TH_P_btn.setChecked(True)
x = H
y = T
crit_x = H_c
crit_y = T_c
# isobars, isentropes
pressures = P_list
entropies = S_list
self.isobars = []
self.isentropes = []
for i in range(len(pressures)):
t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0,
maximum_temperature=500.0, nmax=100)
if i == 0:
p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label="Isobar")
else:
p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color)
self.isobars.append(p_line)
t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, minimum_temperature=T_min,
maximum_temperature=T_max, nmax=nmax)
if i == 0:
s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label="Isentrope")
else:
s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color)
self.isentropes.append(s_line)
self.isenthalps = None
self.isotherms = None
return x, y, crit_x, crit_y
def plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions):
"""
Return plot data for a PS phase envelope
:param tp: Thermopack instance
:param T: Temperature values
:param S: Entropy values
:param T_c: Critical temperature
:param S_c: Critical entropy
:param fractions: List of molar fractions
:return: x: x values for plot,
y: y values for plot,
crit_x: x value for critical point,
crit_y: y value for critical point,
"""
# Display correct buttons
self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4)
self.parent().parent().parent().TS_P_btn.setChecked(True)
self.parent().parent().parent().TS_H_btn.setChecked(True)
x = S
y = T
crit_x = S_c
crit_y = T_c
# Isenthalps, isobars
pressures = P_list
enthalpies = H_list
self.isenthalps = []
self.isobars = []
for i in range(len(pressures)):
t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min,
maximum_temperature=T_max)
if i == 0:
p_line, = self.axes.plot(s_vals, t_vals, color="#ffd2d2", label="Isobar")
else:
p_line, = self.axes.plot(s_vals, t_vals, color="#ffd2d2")
self.isobars.append(p_line)
t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min,
maximum_pressure=P_max, minimum_temperature=T_min,
maximum_temperature=T_max, nmax=nmax)
if i == 0:
h_line, = self.axes.plot(s_vals, p_vals, color="#d5d3ff", label="Isenthalp")
else:
h_line, = self.axes.plot(s_vals, p_vals, color="#d5d3ff")
self.isenthalps.append(h_line)
self.isentropes = None
self.isotherms = None
return x, y, crit_x, crit_y
def plot_binary_pxy(self, tp):
"""
Plots a binary pxy plot
:param tp: Thermopack instance
"""
calc_settings = self.plotting_preferences["Binary pxy"]["Calc"]
plot_settings = self.plotting_preferences["Binary pxy"]["Plotting"]
T = calc_settings["Temperature"]
p_max = calc_settings["Maximum pressure"]
p_min = calc_settings["Minimum pressure"]
dz_max = calc_settings["Maximum dz"]
dlns_max = calc_settings["Maximum dlns"]
LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min,
maximum_dz=dz_max, maximum_dlns=dlns_max)
line_color = plot_settings["Colors"][0]
if LLE[0] is not None:
self.axes.plot(LLE[0], LLE[2], color=line_color)
self.axes.plot(LLE[1], LLE[2], color=line_color)
if L1VE[0] is not None:
self.axes.plot(L1VE[0], L1VE[2], color=line_color)
self.axes.plot(L1VE[1], L1VE[2], color=line_color)
if L2VE[0] is not None:
self.axes.plot(L2VE[0], L2VE[2], color=line_color)
self.axes.plot(L2VE[1], L2VE[2], color=line_color)
grid_on = plot_settings["Grid on"]
title = plot_settings["Title"]
xlabel = plot_settings["x label"]
ylabel = plot_settings["y label"]
self.axes.grid(grid_on)
self.axes.set_title(title)
self.axes.set_xlabel(xlabel)
self.axes.set_ylabel(ylabel)
self.draw()
def plot_pressure_density(self, tp, fractions):
"""
Plots a pressure density plot
:param tp: Thermopack instance
:param fractions: List of molar fractions
"""
calc_settings = self.plotting_preferences["Pressure density"]["Calc"]
tpv_settings = self.plotting_preferences["Pressure density"]["TPV"]
crit_settings = self.plotting_preferences["Pressure density"]["Critical"]
plot_settings = self.plotting_preferences["Pressure density"]["Plotting"]
p_initial = tpv_settings["Initial pressure"]
t_min = tpv_settings["Minimum temperature"]
p_max = tpv_settings["Maximum pressure"]
step_size = tpv_settings["Step size"]
# Calculate T, P, V
T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions,
maximum_pressure=p_max,
minimum_temperature=t_min, step_size=step_size,
calc_v=True)
crit_t_guess = crit_settings["Temperature"]
crit_v_guess = crit_settings["Volume"]
crit_tol = crit_settings["Error tolerance"]
# Calculate critical T, V, P
T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol)
T_list = calc_settings["Temperatures"]
V_start = V_c * calc_settings["Volume range start"]
V_end = V_c * calc_settings["Volume range end"]
V_num_points = calc_settings["Num points"]
V_list = np.linspace(V_start, V_end, V_num_points)
P_lists = []
for T in T_list:
P_list = []
for V in V_list:
P, = tp.pressure_tv(temp=T, volume=V, n=fractions)
P_list.append(P)
P_lists.append(P_list)
rho_list = 1 / V_list
title = plot_settings["Title"]
grid_on = plot_settings["Grid on"]
xlabel = plot_settings["x label"]
ylabel = plot_settings["y label"]
self.axes.plot([1 / v for v in V_ph_env], P_ph_env, label="Phase envelope")
self.axes.scatter([1 / V_c], [P_c], label="Critical point")
for i in range(len(P_lists)):
self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + " K")
self.axes.set_title(title)
self.axes.grid(grid_on)
self.axes.set_xlabel(xlabel)
self.axes.set_ylabel(ylabel)
self.axes.legend(loc="best")
self.draw()
def plot_global_binary(self, tp):
"""
Plots a binary pxy plot
:param tp: Thermopack instance
"""
calc_settings = self.plotting_preferences["Global binary"]["Calc"]
plot_settings = self.plotting_preferences["Global binary"]["Plotting"]
min_press = calc_settings["Minimum pressure"]
min_temp = calc_settings["Minimum temperature"]
azeotropes = calc_settings["Azeotropes"]
KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp,
include_azeotropes=azeotropes)
colors = plot_settings["Colors"]
linestyles = ["-", "--", ":", "-."]
label = "VLE"
for i in range(len(VLE)):
self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label)
label = None
label = "LLVE"
for i in range(len(LLVE)):
self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label)
label = None
label = "CRIT"
for i in range(len(CRIT)):
self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label)
label = None
label = "AZ"
for i in range(len(AZ)):
self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label)
label = None
ks_strings = {
1: "I",
2: "II",
3: "III",
4: "IV",
5: "V"
}
title = plot_settings["Title"]
xlabel = plot_settings["x label"]
ylabel = plot_settings["y label"]
grid_on = plot_settings["Grid on"]
if title == "<NAME> and Scott type: ":
title += ks_strings[KSTYPE]
self.axes.set_title(title)
legend = self.axes.legend(loc="best", numpoints=1)
legend.get_frame().set_linewidth(0.0)
self.axes.set_xlabel(xlabel)
self.axes.set_ylabel(ylabel)
self.axes.grid(grid_on)
self.draw()
| 2.75 | 3 |
christmas_tree..py | SmashedFrenzy16/christmas-tree | 0 | 12798520 | import turtle
s = turtle.Screen()
t = turtle.Turtle()
s.title("Christmas Tree")
s.setup(width=800, height=600)
# Title on the window
pen = turtle.Turtle()
pen.speed(0)
pen.color("black")
pen.penup()
pen.hideturtle()
pen.goto(0, 260)
pen.write("Christmas Tree", align="center",font=("Arial", 24, "normal"))
# Starting position
t.up()
t.rt(90)
t.fd(100)
t.lt(90)
t.down()
# Stump
t.color("brown")
t.begin_fill()
t.fd(40)
t.lt(90)
t.fd(60)
t.lt(90)
t.fd(40)
t.lt(90)
t.fd(60)
t.end_fill()
t.up()
# First triangle
t.lt(180)
t.fd(60)
t.lt(90)
t.fd(20)
t.down()
t.color("green")
t.begin_fill()
t.rt(180)
t.fd(80)
t.lt(120)
t.fd(80)
t.lt(120)
t.fd(80)
t.end_fill()
t.up()
# Second Triangle
t.lt(180)
t.fd(80)
t.lt(120)
t.lt(90)
t.fd(20)
t.rt(90)
t.down()
t.begin_fill()
t.fd(35)
t.rt(120)
t.fd(70)
t.rt(120)
t.fd(70)
t.rt(120)
t.fd(35)
t.end_fill()
t.up()
# Thrid Triangle
t.fd(35)
t.rt(120)
t.fd(70)
t.lt(120)
t.lt(90)
t.fd(20)
t.rt(90)
t.down()
t.begin_fill()
t.fd(30)
t.rt(120)
t.fd(60)
t.rt(120)
t.fd(60)
t.rt(120)
t.fd(30)
t.end_fill()
t.up()
# Star
t.fd(30)
t.rt(120)
t.fd(60)
t.lt(120)
t.rt(180)
t.lt(90)
t.fd(15)
t.rt(90)
t.back(20)
t.color("yellow")
t.down()
t.begin_fill()
for i in range(5):
t.forward(40)
t.right(144)
t.end_fill()
t.hideturtle()
while True:
s.update()
| 3.6875 | 4 |
pxr/usd/lib/usdGeom/testenv/testUsdGeomMesh.py | YuqiaoZhang/USD | 88 | 12798521 | #!/pxrpythonsubst
#
# Copyright 2017 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
from pxr import Usd, UsdGeom, Vt
import unittest
class TestUsdGeomMesh(unittest.TestCase):
def test_ValidateTopology(self):
"""Tests helpers for validating mesh topology."""
# sum(vertexCounts) != len(vertexIndices)
faceVertexIndices = Vt.IntArray([0,1,2])
faceVertexCounts = Vt.IntArray([2,2])
valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,
faceVertexCounts,
numPoints=3)
self.assertFalse(valid)
# Make sure we have a reason.
self.assertTrue(why)
# Negative vertex indices.
faceVertexIndices = Vt.IntArray([0,-1,1])
faceVertexCounts = Vt.IntArray([3])
valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,
faceVertexCounts,
numPoints=3)
self.assertFalse(valid)
# Make sure we have a reason.
self.assertTrue(why)
# Out of range vertex indices.
faceVertexIndices = Vt.IntArray([1,2,3])
faceVertexCounts = Vt.IntArray([3])
valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,
faceVertexCounts,
numPoints=3)
self.assertFalse(valid)
# Make sure we have a reason.
self.assertTrue(why)
# Valid topology.
faceVertexIndices = Vt.IntArray([0,1,2,3,4,5])
faceVertexCounts = Vt.IntArray([3,3])
valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,
faceVertexCounts,
numPoints=6)
self.assertTrue(valid)
# Shoult not have set a reason.
self.assertFalse(why)
if __name__ == '__main__':
unittest.main()
| 2.09375 | 2 |
src/TheLanguage/Grammars/v0_0_1/Expressions/MatchValueExpression.py | davidbrownell/DavidBrownell_TheLanguage | 0 | 12798522 | <reponame>davidbrownell/DavidBrownell_TheLanguage
# ----------------------------------------------------------------------
# |
# | MatchValueExpression.py
# |
# | <NAME> <<EMAIL>>
# | 2021-10-12 10:28:57
# |
# ----------------------------------------------------------------------
# |
# | Copyright <NAME> 2021
# | Distributed under the Boost Software License, Version 1.0. See
# | accompanying file LICENSE_1_0.txt or copy at
# | http://www.boost.org/LICENSE_1_0.txt.
# |
# ----------------------------------------------------------------------
"""Contains the MatchValueExpression object"""
import os
from typing import Callable, Tuple, Union
import CommonEnvironment
from CommonEnvironment import Interface
from CommonEnvironmentEx.Package import InitRelativeImports
# ----------------------------------------------------------------------
_script_fullpath = CommonEnvironment.ThisFullpath()
_script_dir, _script_name = os.path.split(_script_fullpath)
# ----------------------------------------------------------------------
with InitRelativeImports():
from ..Common.Impl.MatchExpressionBase import MatchExpressionBase
from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo
from ....Parser.Expressions.MatchValueExpressionParserInfo import (
MatchValueCasePhraseParserInfo,
MatchValueExpressionParserInfo,
)
# ----------------------------------------------------------------------
class MatchValueExpression(MatchExpressionBase):
"""\
Value-based version of a match expression.
Examples:
str_value = (
match value Add(1, 2):
case 1, 2: "Too low"
case 3: "Correct"
default: "Way off!"
)
"""
PHRASE_NAME = "Match Value Expression"
# ----------------------------------------------------------------------
def __init__(self):
super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME)
# ----------------------------------------------------------------------
@classmethod
@Interface.override
def ExtractParserInfo(
cls,
node: AST.Node,
) -> Union[
None,
ParserInfo,
Callable[[], ParserInfo],
Tuple[ParserInfo, Callable[[], ParserInfo]],
]:
return cls._ExtractParserInfoImpl(
MatchValueExpressionParserInfo,
MatchValueCasePhraseParserInfo,
node,
)
| 2.0625 | 2 |
features/support/actions.py | alexgarzao/beeweb | 5 | 12798523 | from parse import parse
class Actions:
def __init__(self):
self.actions = {}
self.unused = set()
self.used = set()
# TODO: Refactor: Deveria ter classe Action, e ela deveria ser retornada nesta funcao.
def add_action(self, action_name):
action_name = action_name.lower()
if self.actions.get(action_name) is not None:
raise DuplicatedActionException("Action {} already exists".format(action_name))
self.actions[action_name] = []
self.unused.add(action_name)
def add_event(self, action_name, event):
action_name = action_name.lower()
events = self.actions.get(action_name)
if events is None:
possible = ','.join(list(self.actions))
raise UndefinedActionException("Undefined action {}. Possible values: {}".format(action_name, possible))
events.append(event)
def get_action(self, action_name):
action_name = action_name.lower()
return self.actions.get(action_name)
def get_steps_to_execute(self, action_name):
events, parameters = self.__match_action(action_name)
if events is None:
possible = ','.join(list(self.actions))
raise UndefinedActionException("Undefined action {}. Possible values: {}".format(action_name, possible))
assert events is not None
steps_to_execute = ''
for event in events:
step_event = self.__replace_parameters(event, parameters)
steps_to_execute += step_event + '\n'
return steps_to_execute
def get_unused_actions(self):
unused_actions = list(self.unused)
return unused_actions
def was_used(self, action_name):
return action_name in self.used
def __match_action(self, action_name):
for action_type in self.actions.keys():
r = parse(action_type, action_name)
if r:
self.unused.discard(action_type)
self.used.add(action_type)
return self.actions[action_type], r.named
return None, None
def __replace_parameters(self, step, parameters):
for parameter, value in parameters.items():
token_to_find = "{" + parameter + "}"
step = step.replace(token_to_find, value)
return step
class DuplicatedActionException(Exception):
pass
class UndefinedActionException(Exception):
pass
| 3.21875 | 3 |
ws/handler/event/enum/holiday/christmas.py | fabaff/automate-ws | 0 | 12798524 | import home
from ws.handler.event.enum import Handler as Parent
class Handler(Parent):
KLASS = home.event.holiday.christmas.Event
TEMPLATE = "event/enum.html"
LABEL = "Christmas"
DAY = "day"
EVE = "eve"
TIME = "time"
OVER = "is over"
def _get_str(self, e):
if e == home.event.holiday.christmas.Event.Day:
return self.DAY
elif e == home.event.holiday.christmas.Event.Eve:
return self.EVE
elif e == home.event.holiday.christmas.Event.Time:
return self.TIME
elif e == home.event.holiday.christmas.Event.Over:
return self.OVER
return e
def get_icon(self, e):
if e == home.event.holiday.christmas.Event.Day:
return "fas fa-gifts"
elif e == home.event.holiday.christmas.Event.Eve:
return "fas fa-holly-berry"
elif e == home.event.holiday.christmas.Event.Time:
return "far fa-calendar-check"
elif e == home.event.holiday.christmas.Event.Over:
return "far fa-calendar-times"
return e
| 2.421875 | 2 |
ad-hoc/p11496.py | sajjadt/competitive-programming | 10 | 12798525 | <filename>ad-hoc/p11496.py
from sys import stdin, stdout
while True:
n = int(input())
if n == 0:
break
line = list(map(int, stdin.readline().strip().split()))
local_extremas = 0
for i in range(1, len(line) - 1):
if line[i] > line[i-1] and line[i] > line[i+1]:
local_extremas += 1
if line[i] < line[i-1] and line[i] < line[i+1]:
local_extremas += 1
if line[0] > line[-1] and line[0] > line[1]:
local_extremas += 1
if line[0] < line[-1] and line[0] < line[1]:
local_extremas += 1
if line[0] > line[-1] and line[-2] > line[-1]:
local_extremas += 1
if line[0] < line[-1] and line[-2] < line[-1]:
local_extremas += 1
print(local_extremas)
| 3.1875 | 3 |
test.py | andrey1908/hero_radar_odometry | 0 | 12798526 | import argparse
import json
from time import time
import os
import shutil
import numpy as np
import torch
from datasets.oxford import get_dataloaders
from datasets.boreas import get_dataloaders_boreas
from datasets.radiate import get_dataloaders_radiate
from networks.under_the_radar import UnderTheRadar
from networks.hero import HERO
from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError
from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \
draw_weights, draw_keypoints, draw_src_tgt_matches
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.enabled = True
torch.backends.cudnn.deterministic = True
def build_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--config', type=str, required=True)
parser.add_argument('--checkpoint', type=str, required=True)
parser.add_argument('-no-vis', '--no-visualization', action='store_true')
parser.add_argument('-out-fld', '--out-folder', type=str, required=True)
return parser
def makedirs_for_visualization(out_folder):
os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True)
os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True)
def visualize(batchi, batch, out, config, out_folder):
radar_img = draw_radar(batch, i=1)
radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1)))
mask_img = draw_mask(batch, i=1)
mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1)))
masked_radar_img = draw_masked_radar(batch, i=1)
masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1)))
detector_scores_img = draw_detector_scores(out, i=1)
detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1)))
weights_img = draw_weights(out, i=1)
weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1)))
keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20)
keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1)))
keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask')
keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1)))
keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none')
keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1)))
keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20)
keypoints_on_detector_scores_img.save(os.path.join(out_folder,
'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1)))
keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask')
keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder,
'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1)))
keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none')
keypoints_on_detector_scores_all_img.save(os.path.join(out_folder,
'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1)))
src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20)
src_tgt_matches_img.save(os.path.join(out_folder,
'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi)))
src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask')
src_tgt_matches_only_masked_img.save(os.path.join(out_folder,
'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi)))
src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none')
src_tgt_matches_all_img.save(os.path.join(out_folder,
'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi)))
src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20)
src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder,
'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi)))
src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask')
src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder,
'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi)))
src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none')
src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder,
'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi)))
def print_used_time(model):
print("Time used:")
print(" All: {} s".format(np.mean(model.time_used['all'])))
print(" Feature map extraction: {} s".format(np.mean(model.time_used['feature_map_extraction'])))
print(" Keypoint extraction: {} s".format(np.mean(model.time_used['keypoint_extraction'])))
print(" Keypoint matching: {} s".format(np.mean(model.time_used['keypoint_matching'])))
print(" Optimization: {} s".format(np.mean(model.time_used['optimization'])))
if __name__ == '__main__':
torch.set_num_threads(8)
parser = build_parser()
args = parser.parse_args()
out_folder = args.out_folder
with_visualization = not args.no_visualization
os.makedirs(out_folder, exist_ok=True)
with open(args.config) as f:
config = json.load(f)
config_copy = os.path.join(out_folder, os.path.basename(args.config))
if args.config != config_copy:
shutil.copy(args.config, config_copy)
if config['model'] == 'UnderTheRadar':
model = UnderTheRadar(config).to(config['gpuid'])
elif config['model'] == 'HERO':
model = HERO(config).to(config['gpuid'])
model.solver.sliding_flag = False
checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid']))
failed = False
try:
model.load_state_dict(checkpoint['model_state_dict'], strict=False)
except Exception as e:
print(e)
failed = True
if failed:
model.load_state_dict(checkpoint, strict=False)
model.eval()
model.no_throw = True
seq_name_all = list()
time_used_all = list()
T_gt_all = list()
T_pred_all = list()
t_err_all = list()
r_err_all = list()
seq_nums = config['test_split']
for seq_num in seq_nums:
config['test_split'] = [seq_num]
if config['dataset'] == 'oxford':
_, _, test_loader = get_dataloaders(config)
elif config['dataset'] == 'boreas':
_, _, test_loader = get_dataloaders_boreas(config)
elif config['dataset'] == 'radiate':
_, _, test_loader = get_dataloaders_radiate(config)
seq_len = test_loader.dataset.seq_lens[0]
seq_name = test_loader.dataset.sequences[0]
time_used = list()
T_gt = list()
T_pred = list()
print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name))
if with_visualization:
out_vis_folder = os.path.join(out_folder, seq_name)
makedirs_for_visualization(out_vis_folder)
model.solver.solver_cpp.resetTraj()
for batchi, batch in enumerate(test_loader):
ts = time()
with torch.no_grad():
out = model(batch)
if out['exception'] is not None:
fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi))
os.makedirs(fail_folder, exist_ok=True)
makedirs_for_visualization(fail_folder)
visualize(batchi, batch, out, config, fail_folder)
print_used_time(model)
raise out['exception']
if with_visualization and batchi % config['vis_rate'] == 0:
visualize(batchi, batch, out, config, out_vis_folder)
if config['model'] == 'UnderTheRadar':
if 'T_21' in batch:
T_gt.append(batch['T_21'][0].numpy().squeeze())
R_pred = out['R'][0].detach().cpu().numpy().squeeze()
t_pred = out['t'][0].detach().cpu().numpy().squeeze()
T_pred.append(get_transform2(R_pred, t_pred))
elif config['model'] == 'HERO':
if batchi == len(test_loader) - 1:
for w in range(config['window_size'] - 1):
if 'T_21' in batch:
T_gt.append(batch['T_21'][w].numpy().squeeze())
T_pred.append(get_T_ba(out, a=w, b=w+1))
else:
w = 0
if 'T_21' in batch:
T_gt.append(batch['T_21'][w].numpy().squeeze())
T_pred.append(get_T_ba(out, a=w, b=w+1))
time_used.append(time() - ts)
if (batchi + 1) % config['print_rate'] == 0:
print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:])))
time_used_all.extend(time_used)
if len(T_gt) > 0:
seq_name_all.append(seq_name)
T_gt_all.extend(T_gt)
T_pred_all.extend(T_pred)
t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)])
print('SEQ: {} : {}'.format(seq_num, seq_name))
print('KITTI t_err: {} %'.format(t_err))
print('KITTI r_err: {} deg/m'.format(r_err))
t_err_all.append(t_err)
r_err_all.append(r_err)
fname = os.path.join(out_folder, seq_name + '.png')
if len(T_gt) > 0:
plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname])
else:
plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname])
print('time_used: {}'.format(sum(time_used_all) / len(time_used_all)))
if len(T_gt_all) > 0:
results = computeMedianError(T_gt_all, T_pred_all)
print('dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3]))
t_err_mean = np.mean(t_err_all)
r_err_mean = np.mean(r_err_all)
print('Average KITTI metrics over all test sequences:')
print('KITTI t_err: {} %'.format(t_err_mean))
print('KITTI r_err: {} deg/m'.format(r_err_mean))
with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f:
f.write('sequence name: translation error (%) rotation error (deg/m)\n')
for seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all):
line = '{}: {} {}\n'.format(seq_name, t_err, r_err)
f.write(line)
f.write("\n")
f.write("mean: {} {}\n".format(t_err_mean, r_err_mean))
print_used_time(model)
| 1.9375 | 2 |
Codebase/Deprecated_Codebase_I/circular_magnetic_field.py | psmd-iberutaru/Akamai_Internship | 0 | 12798527 | <reponame>psmd-iberutaru/Akamai_Internship
import inspect
import numpy as np
import scipy as sp
import scipy.special as sp_spcl
import matplotlib.pyplot as plt
from Robustness.exception import *
import Robustness.validation as valid
import gaussian_fitting as gaussfit
import bessel_fitting as bessfit
import misc_functions as misc
def circular_magnetic_field_cyln(r, phi, z, propagation_function,
):
"""
This makes circular magnetic fields, in a way, fields without any
divergence. However, this measures not the tangential vector, but rotational
vector.
"""
# Type check
r = valid.validate_float_array(r, deep_validate=True, greater_than=0)
phi = valid.validate_float_array(phi, deep_validate=True,
greater_than=0, less_than=2*np.pi)
z = valid.validate_float_array(z)
# Because of its invariantness in phi and z, also, only the r value
# matters in this function.
B_r = np.zeros_like(r)
B_phi = propagation_function(r)
B_z = np.zeros_like(r)
# Return
return B_r, B_phi, B_z
def circular_magnetic_field_cart(x, y, propagation_function,
tangential_axis='z'):
"""
This makes circular magnetic fields, in a way, fields without any
divergence. However, this measures not the rotational vector, but tangential
vector.
The tangential axis is the axis of which is the axis of rotation for the
field. Assume that the positive direction is pointing to the user.
"""
# Type check
x = valid.validate_float_array(x)
y = valid.validate_float_array(y)
# Convert to a polar system for tangential vector.
r_subaxis = np.hypot(x,y)
phi_subaxis = np.arctan2(y,x)
# Calculate the magnitude of the tangential vector.
B_t = propagation_function(r_subaxis)
# The vector is tangent to a circle made by r, thus the angle is related to
# phi, but is not phi.
B_angle = phi_subaxis + np.pi/2
# Calculate the components of the magnetic field vector based on the
# magnitude and the angle.
B_x = B_t * np.cos(B_angle)
B_y = B_t * np.sin(B_angle)
# Return
return B_x, B_y | 2.53125 | 3 |
server/attendance/admin.py | CS305-software-Engineering/vehicle-attendance-system | 1 | 12798528 | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Vehicle)
admin.site.register(VehicleLogging)
admin.site.register(RegisteredUserLogging)
admin.site.register(VisitorUserLogging) | 1.375 | 1 |
expfactory_deploy/experiments/migrations/0013_auto_20211119_2336.py | rwblair/expfactory-deploy | 0 | 12798529 | # Generated by Django 3.1.7 on 2021-11-19 23:36
from django.db import migrations
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('experiments', '0012_repoorigin_name'),
]
operations = [
migrations.AddField(
model_name='battery',
name='status',
field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True),
),
migrations.AlterField(
model_name='assignment',
name='status',
field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True),
),
migrations.AlterField(
model_name='result',
name='status',
field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True),
),
]
| 1.648438 | 2 |
setup.py | TheRockXu/aifin | 3 | 12798530 | #!/usr/bin/env python
from distutils.core import setup
setup(name='aifin',
version='1.0.1',
description='Python Distribution Utilities',
author='<NAME>',
author_email='<EMAIL>',
url='aitroopers.com',
packages=['aifin'],
install_requires=[
'pandas','scipy'
]
)
| 1.226563 | 1 |
src/quick_sort.py | sean-bai/sort | 0 | 12798531 | from typing import List, NoReturn
def quick_sort(in_list: List[int], s_idx: int, e_idx: int) -> NoReturn:
if e_idx > s_idx:
first_idx = s_idx
last_idx = e_idx
base_idx = int((s_idx + e_idx)/2)
base_val = in_list[base_idx]
in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx]
s_idx += 1
while e_idx > s_idx:
if in_list[s_idx] <= base_val:
s_idx += 1
continue
if in_list[e_idx] > base_val:
e_idx -= 1
continue
in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx]
s_idx += 1
e_idx -= 1
if in_list[s_idx] <= base_val:
final_idx = s_idx
else:
final_idx = s_idx - 1
in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx]
quick_sort(in_list, first_idx, final_idx-1)
quick_sort(in_list, final_idx+1, last_idx)
if __name__ == "__main__":
in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33]
quick_sort(in_list, 0, len(in_list)-1)
assert in_list == [2, 3, 12, 12, 15, 23, 33, 54, 73, 87, 89, 94, 162]
print("Test succeeded.")
| 3.515625 | 4 |
transmogrify/network.py | natgeosociety/Transmogrify | 0 | 12798532 | <reponame>natgeosociety/Transmogrify
import os
import urlparse
class Http404(Exception):
pass
def get_path(environ):
"""
Get the path
"""
from wsgiref import util
request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', ''))
if request_uri == '':
uri = util.request_uri(environ)
host = environ.get('HTTP_HOST', '')
scheme = util.guess_scheme(environ)
prefix = "{scheme}://{host}".format(scheme=scheme, host=host)
request_uri = uri.replace(prefix, '')
return request_uri
def handle_purge(environ, start_response):
"""
Handle a PURGE request.
"""
from utils import is_valid_security, get_cached_files
from settings import DEBUG
server = environ['SERVER_NAME']
try:
request_uri = get_path(environ)
path_and_query = request_uri.lstrip("/")
query_string = environ.get('QUERY_STRING', '')
if is_valid_security('PURGE', query_string):
cached_files = get_cached_files(path_and_query, server)
for i in cached_files:
try:
os.remove(i)
except OSError as e:
return do_500(environ, start_response, e.message)
start_response("204 No Content", [])
return []
else:
return do_405(environ, start_response)
except Http404 as e:
return do_404(environ, start_response, e.message, DEBUG)
def do_redirect(environ, start_response, path):
# if get_path(environ) == path:
# return do_500(environ, start_response, 'Redirect Loop Detected')
start_response("302 Found", [("Location", path)])
return []
def do_500(environ, start_response, message):
resp = {
'message': message,
'status_code': 500,
'status_message': 'Internal Server Error',
}
start_response("500 Internal Server Error", [("Content-Type", "text/html")])
return [ERROR.format(**resp)]
def do_405(environ, start_response):
resp = {
'message': "Method not allowed",
'status_code': 405,
'status_message': 'Method Not Allowed',
}
start_response("405 Method Not Allowed", [("Content-Type", "text/html")])
return [ERROR.format(**resp)]
def do_404(environ, start_response, why, debug):
if debug:
message = "<h2>%s</h2>" % why
else:
message = "File not found"
resp = {
'message': message,
'status_code': 404,
'status_message': 'Not Found',
}
start_response("404 Not Found", [("Content-Type", "text/html")])
return [ERROR.format(**resp)]
ERROR = """
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>{status_code} - {status_message}</title>
</head>
<body>
<h1>{status_code} - {status_message}</h1>{message}
</body>
</html>
"""
class DemoApp(object):
def __init__(self, fallback):
from static import Cling
from settings import BASE_PATH
self.app = Cling(BASE_PATH)
self.fallback = fallback
def __call__(self, environ, start_response):
import wsgiref
response = {}
def sr(status, headers):
response['status'] = status
response['headers'] = headers
result = self.app(environ, sr)
if response['status'] == '404 Not Found':
request_uri = wsgiref.util.request_uri(environ)
p = urlparse.urlparse(request_uri)
if p.query:
request_uri = p.path + "?" + p.query
else:
request_uri = p.path
environ['REQUEST_URI'] = request_uri
return self.fallback(environ, start_response)
elif response['status'] == '405 Method Not Allowed':
request_uri = wsgiref.util.request_uri(environ)
p = urlparse.urlparse(request_uri)
if p.query:
request_uri = p.path + "?" + p.query
else:
request_uri = p.path
environ['REQUEST_URI'] = request_uri
return self.fallback(environ, start_response)
else:
start_response(response['status'], response['headers'])
return result
| 2.578125 | 3 |
Coordinates/coordinates.py | FelipeLSP/Python | 0 | 12798533 | <gh_stars>0
import math
def centroid(listCoordinates):
xCentroid = 0
yCentroid = 0
listCentroid=[]
for i in range(0, len(listCoordinates)):
xCentroid += listCoordinates[i][0]
yCentroid += listCoordinates[i][1]
xCentroid = round(xCentroid / len(listCoordinates), 1)
yCentroid = round(yCentroid / len(listCoordinates), 1)
listCentroid.append(xCentroid)
listCentroid.append(yCentroid)
return listCentroid
def coordinatesDistance(listCoordinates, centroid):
listSmaller=[]
listBigger=[]
listDistance=[]
for i in range(0, len(listCoordinates)):
distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1]) ** 2))
listDistance.append(distance)
temp_a = listDistance[0]
for i in range(1, len(listDistance)):
if listDistance[i] < temp_a:
temp_b = listDistance[i]
listSmaller = listCoordinates[i]
temp_b = listDistance[0]
for i in range(1, len(listDistance)):
if listDistance[i] > temp_b:
temp_b = listDistance[i]
listBigger = listCoordinates[i]
return listSmaller, listBigger
#--------------------------Program Body--------------------------------------------------------------
listCoordinates = []
name = input("Type a coordinate: ")
partsA = name.split()
if name == "":
print("No points read. So there is no centroid!!!")
exit()
for ind in range(0,2):
partsA[ind] = int(partsA[ind])
listCoordinates.append(partsA)
i = 1
while i >= 1:
name = input("Type a coordinate: ")
if name == "":
break
else:
partsB = name.split()
for ind in range(0,2):
partsB[ind] = int(partsB[ind])
listCoordinates.append(partsB)
listCentroid = centroid(listCoordinates)
listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid)
print("Centroid: ", listCentroid)
print ("Closest point to the Centroid: ", listSmaller)
print ("Farthest point from the Centroid: ", listBigger) | 3.1875 | 3 |
appengine/components/tests/auth_endpoints_smoke_test.py | pombreda/swarming | 0 | 12798534 | #!/usr/bin/env python
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
"""Smoke test for Cloud Endpoints support in auth component.
It launches app via dev_appserver and queries a bunch of cloud endpoints
methods.
"""
import unittest
import os
import test_env
test_env.setup_test_env()
from support import local_app
# /components/tests/.
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
# /components/tests/endpoints_app/.
TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app')
class CloudEndpointsSmokeTest(unittest.TestCase):
def setUp(self):
super(CloudEndpointsSmokeTest, self).setUp()
self.app = local_app.LocalApplication(TEST_APP_DIR, 9700)
self.app.start()
self.app.ensure_serving()
def tearDown(self):
try:
self.app.stop()
if self.has_failed():
self.app.dump_log()
finally:
super(CloudEndpointsSmokeTest, self).tearDown()
def has_failed(self):
# pylint: disable=E1101
return not self._resultForDoCleanups.wasSuccessful()
def test_smoke(self):
self.check_who_anonymous()
self.check_who_authenticated()
self.check_host_token()
self.check_forbidden()
def check_who_anonymous(self):
response = self.app.client.json_request('/_ah/api/testing_service/v1/who')
self.assertEqual(200, response.http_code)
self.assertEqual('anonymous:anonymous', response.body.get('identity'))
self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1'))
def check_who_authenticated(self):
# TODO(vadimsh): Testing this requires interacting with real OAuth2 service
# to get OAuth2 token. It's doable, but the service account secrets had to
# be hardcoded into the source code. I'm not sure it's a good idea.
pass
def check_forbidden(self):
response = self.app.client.json_request(
'/_ah/api/testing_service/v1/forbidden')
self.assertEqual(403, response.http_code)
expected = {
u'error': {
u'code': 403,
u'errors': [
{
u'domain': u'global',
u'message': u'Forbidden',
u'reason': u'forbidden',
}
],
u'message': u'Forbidden',
},
}
self.assertEqual(expected, response.body)
def check_host_token(self):
# Create token first.
response = self.app.client.json_request(
'/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'})
self.assertEqual(200, response.http_code)
token = response.body.get('host_token')
self.assertTrue(token)
# Verify it is usable.
response = self.app.client.json_request(
'/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token})
self.assertEqual(200, response.http_code)
self.assertEqual('host-name', response.body.get('host'))
if __name__ == '__main__':
unittest.main()
| 2.09375 | 2 |
tp2/src/ASA.py | Qjao02/compiladores | 0 | 12798535 | <gh_stars>0
from Token import Token
class AST(object):
def __init__(self, nome):
self.nome = nome;
self.children = []
self.tipo = None #tipo do nó. Compound, Assign, ArithOp, etc
self.value = None
def __str__(self, level=0):
ret = "\t"*level+ repr(self) +"\n"
for child in self.children:
if (child != None):
ret += child.__str__(level+1) #level+1
return ret
def __repr__(self):
return self.nome
def __evaluate__(self):
for child in self.children:
if (child != None):
return child.__evaluate__()
class Compound(AST):
"""Represents a 'BEGIN ... END' block"""
def __init__(self):
AST.__init__(self,'Block')
print('Criando um nó do tipo Block.')
#self.children = []
def __repr__(self):
return self.nome
class Assign(AST):
def __init__(self, left, op, right):
AST.__init__(self,'Assign');
print('Criando um nó do tipo Assign.')
if(not(left is None)):
self.children.append(left)
if(not(right is None)):
self.children.append(right)
self.left = left
self.token = self.op = op
self.right = right
def __repr__(self):
return self.nome
class If(AST):
def __init__(self, exp, c_true, c_false):
AST.__init__(self, 'If')
print('Criando um nó do tipo If.')
if(not(exp is None)):
self.children.append(exp)
if(not(c_true is None)):
self.children.append(c_true)
if(not(c_false is None)):
self.children.append(c_false)
self.exp = exp;
self.c_true = c_true;
self.c_false = c_false;
def __repr__(self):
return self.nome
class While(AST):
def __init__(self, exp, commands):
AST.__init__(self,'While')
print('Criando um nó do tipo While.')
if(not(exp is None)):
self.children.append(exp)
if(not (commands is None)):
self.children.append(commands)
self.exp = exp;
self.commands = commands;
def __repr__(self):
return self.nome
class For(AST):
def __init__(self, attr, exp, attr2, commands):
AST.__init__(self,'For')
print('Criando um nó do tipo For.')
if (not(attr is None)):
self.children.append(attr)
self.attr = attr
if(not(exp is None)):
self.children.append(exp)
self.exp = exp;
if (not(attr is None)):
self.children.append(attr2)
self.attr2 = attr2
if(not (commands is None)):
self.children.append(commands)
self.commands = commands
def __repr__(self):
return self.nome
class Read(AST):
def __init__(self, id_):
AST.__init__(self,'Read')
print('Criando um nó do tipo Read.')
if(not(id_ is None)):
self.children.append(id_)
self.id = id_;
def __repr__(self):
return self.nome
class Print(AST):
def __init__(self, exp):
AST.__init__(self,'Print')
print('Criando um nó do tipo Print.')
if(not(exp is None)):
self.children.append(exp)
self.exp = exp;
def __repr__(self):
return self.nome
class Expr(AST):
def __init__(self, nome, op, left, right):
AST.__init__(self,nome)
if(not(left is None)):
self.children.append(left)
if(not(right is None)):
self.children.append(right)
self.left = left
self.op = op
self.right = right
def __repr__(self):
#self.left.repr();
return self.op
class LogicalOp(Expr):
def __init__(self, op, left, right):
Expr.__init__(self,'LogicalOp', op, left, right)
print('Criando um nó do tipo LogicalOp com operador ' + str(op))
class ArithOp(Expr):
def __init__(self, op, left, right):
Expr.__init__(self,'ArithOp', op, left, right)
print('Criando um nó do tipo ArithOp com operador ' + str(op))
class RelOp(Expr):
def __init__(self, left, op, right):
Expr.__init__(self,'RelOp', op, left, right)
print('Criando um nó do tipo RelOp com operador ' + str(op))
class Id(AST):
"""The Var node is constructed out of ID token."""
def __init__(self, token):
AST.__init__(self,'Id')
print('Criando um nó do tipo Id.')
#self.children.append(token)
self.token = token
self.value = token.value
def __repr__(self):
return repr(self.token.getLexema())
def __evaluate__(self):
return self.value
class Num(AST):
def __init__(self, token):
AST.__init__(self,'Num')
print('Criando um nó do tipo Num.')
#self.children.append(token)
self.token = token
self.value = token.value #em python, não precisamos nos preocupar com o tipo de value
def __repr__(self):
return repr(self.token.getLexema())
def __evaluate__(self):
return self.value
def print_tree(current_node, indent="", last='updown'):
nb_children = lambda node: sum(nb_children(child) for child in node.children) + 1
size_branch = {child: nb_children(child) for child in current_node.children}
""" Creation of balanced lists for "up" branch and "down" branch. """
up = sorted(current_node.children, key=lambda node: nb_children(node))
down = []
while up and sum(size_branch[node] for node in down) < sum(size_branch[node] for node in up):
down.append(up.pop())
""" Printing of "up" branch. """
for child in up:
next_last = 'up' if up.index(child) is 0 else ''
next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last else '│', " " * len(current_node.__repr__()))
print_tree(child, indent=next_indent, last=next_last)
""" Printing of current node. """
if last == 'up': start_shape = '┌'
elif last == 'down': start_shape = '└'
elif last == 'updown': start_shape = ' '
else: start_shape = '├'
if up: end_shape = '┤'
elif down: end_shape = '┐'
else: end_shape = ''
print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape))
""" Printing of "down" branch. """
for child in down:
next_last = 'down' if down.index(child) is len(down) - 1 else ''
next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last else '│', " " * len(current_node.__repr__()))
print_tree(child, indent=next_indent, last=next_last)
class ToXML:
@staticmethod
def toXML(no):
count = 1
arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w')
arvoreToXML.close()
arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w')
arvoreToXML.write('<' + no.nome + '>\r\n')
for child in no.children:
i = 0
for i in range(0,count):
arvoreToXML.write('\t')
if(child.nome == 'Id' or child.nome == 'Num'):
arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\r\n')
else:
arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\r\n')
ToXML.deepSearch(child, count, arvoreToXML)
for i in range(0,count):
arvoreToXML.write('\t')
arvoreToXML.write('</' + child.nome + '>\r\n')
arvoreToXML.write('</' + no.nome + '>\r\n')
@staticmethod
def deepSearch( no, count,arvoreToXML):
count = count + 1
for child in no.children:
i = 0
for i in range(0,count):
arvoreToXML.write('\t')
if(child.nome == 'Id' or child.nome == 'Num'):
arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\r\n')
else:
arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\r\n')
ToXML.deepSearch(child, count, arvoreToXML)
for i in range(0,count):
arvoreToXML.write('\t')
arvoreToXML.write('</' + child.nome + '>\r\n')
@staticmethod
def classifierPrint(no):
if(no.nome == 'Id'):
return ' lexema=\'' + no.token.getLexema() + '\''
elif(no.nome == 'Num'):
return ' value=\'' + no.token.getLexema() + ' type:\'' + no.value + '\''
elif(no.nome == 'ArithOp' or no.nome == 'RelOp' or no.nome == 'LogicalOp'):
return ' op=\'' + no.op + '\''
else:
return ''
| 3.359375 | 3 |
Day41-55/code/oa/hrs/migrations/0002_auto_20180523_0923.py | xie186/Python-100-Days-EN | 6 | 12798536 | # Generated by Django 2.0.5 on 2018-05-23 01:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hrs', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='dept',
name='excellent',
field=models.BooleanField(default=0, verbose_name='是否优秀'),
),
migrations.AlterField(
model_name='dept',
name='location',
field=models.CharField(max_length=10, verbose_name='部门所在地'),
),
migrations.AlterField(
model_name='dept',
name='name',
field=models.CharField(max_length=20, verbose_name='部门名称'),
),
migrations.AlterField(
model_name='dept',
name='no',
field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'),
),
migrations.AlterField(
model_name='emp',
name='comm',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True),
),
migrations.AlterField(
model_name='emp',
name='mgr',
field=models.IntegerField(blank=True, null=True),
),
]
| 1.648438 | 2 |
scripts/mechanics/ammoFind.py | TheNewGuy100/PyxelGameProject | 0 | 12798537 | <gh_stars>0
import random
import pyxel
class ammoSpawner():
probability = 0
ammo_package_list = []
ammo_package_x = 0
ammo_package_y = 0
ammo_package_img = 0
ammo_package_u = 48
ammo_package_v = 0
ammo_package_w = 16
ammo_package_h = 16
ammo_package_color_exclusion = 0
def spawnAmmoInMap(self):
if pyxel.frame_count % 500 == 0 and pyxel.frame_count != 0:
if random.randint(0,1) == 1:
self.ammo_package_list.append({
"x": self.APP_X_MAX_PLAYABLE_AREA + 10,
"y": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA),
"img": self.ammo_package_img,
"u": self.ammo_package_u,
"v": self.ammo_package_v,
"w": self.ammo_package_w,
"h": self.ammo_package_h,
"colkey": self.ammo_package_color_exclusion
})
def drawAmmoBox(self):
if self.ammo_package_list.__len__() > 0:
for index, ammo_box in enumerate(self.ammo_package_list):
pyxel.blt(
ammo_box["x"],
ammo_box["y"],
ammo_box["img"],
ammo_box["u"],
ammo_box["v"],
ammo_box["w"],
ammo_box["h"],
ammo_box["colkey"]
)
ammo_box['x'] -= 1
if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16:
self.player_ammo += 40
self.ammo_package_list.pop(index)
pyxel.play(0, 4)
if(ammo_box['x'] < self.APP_X_MIN_PLAYABLE_AREA):
self.ammo_package_list.pop(index)
| 2.65625 | 3 |
web3auth/backend.py | sneeu/django-web3-auth | 0 | 12798538 | <reponame>sneeu/django-web3-auth
from typing import Optional
from typing import Optional
from django.contrib.auth import get_user_model, backends
from django.conf import settings
from web3auth.utils import recover_to_addr
User = get_user_model()
DEFAULT_ADDRESS_FIELD = 'username'
class Web3Backend(backends.ModelBackend):
def authenticate(
self,
request,
address,
token,
signature
) -> Optional[User]:
# check if the address the user has provided matches the signature
if address != recover_to_addr(token, signature):
raise ValueError('Wallet address does not match signature')
else:
# get address field for the user model
address_field = getattr(
settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD)
kwargs = {
f"{address_field}__iexact": address
}
# try to get user with provided data
user = User.objects.filter(**kwargs).first()
if user is None:
# create the user if it does not exist
user = User(**{address_field: address})
fields = [field.name for field in User._meta.fields]
if (
address_field != DEFAULT_ADDRESS_FIELD
and 'username' in fields
):
user.username = user.generate_username()
user.save()
return user
| 2.421875 | 2 |
tools/telemetry/telemetry/core/timeline/event.py | nagineni/chromium-crosswalk | 2 | 12798539 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class TimelineEvent(object):
"""Represents a timeline event."""
def __init__(self, category, name, start, duration, args=None):
self.category = category
self.name = name
self.start = start
self.duration = duration
self.args = args
@property
def end(self):
return self.start + self.duration
def __repr__(self):
if self.args:
args_str = ', ' + repr(self.args)
else:
args_str = ''
return "TimelineEvent(name='%s', start=%f, duration=%s%s)" % (
self.name,
self.start,
self.duration,
args_str)
| 2.828125 | 3 |
batchglm/models/glm_norm/utils.py | le-ander/batchglm | 0 | 12798540 | import logging
import numpy as np
import scipy.sparse
from typing import Union
from .external import closedform_glm_mean, closedform_glm_scale
logger = logging.getLogger("batchglm")
def closedform_norm_glm_mean(
x: Union[np.ndarray, scipy.sparse.csr_matrix],
design_loc: np.ndarray,
constraints_loc,
size_factors=None,
link_fn=lambda x: x,
inv_link_fn=lambda x: x
):
r"""
Calculates a closed-form solution for the `mean` parameters of normal GLMs.
:param x: The sample data
:param design_loc: design matrix for location
:param constraints_loc: tensor (all parameters x dependent parameters)
Tensor that encodes how complete parameter set which includes dependent
parameters arises from indepedent parameters: all = <constraints, indep>.
This form of constraints is used in vector generalized linear models (VGLMs).
:param size_factors: size factors for X
:return: tuple: (groupwise_means, mean, rmsd)
"""
return closedform_glm_mean(
x=x,
dmat=design_loc,
constraints=constraints_loc,
size_factors=size_factors,
link_fn=link_fn,
inv_link_fn=inv_link_fn
)
def closedform_norm_glm_logsd(
x: Union[np.ndarray, scipy.sparse.csr_matrix],
design_scale: np.ndarray,
constraints=None,
size_factors=None,
groupwise_means=None,
link_fn=np.log
):
r"""
Calculates a closed-form solution for the log-scale parameters of normal GLMs.
:param x: The sample data
:param design_scale: design matrix for scale
:param constraints: some design constraints
:param size_factors: size factors for X
:param groupwise_means: optional, in case if already computed this can be specified to spare double-calculation
:return: tuple (groupwise_scales, logsd, rmsd)
"""
def compute_scales_fun(variance, mean):
groupwise_scales = np.sqrt(variance)
return groupwise_scales
return closedform_glm_scale(
x=x,
design_scale=design_scale,
constraints=constraints,
size_factors=size_factors,
groupwise_means=groupwise_means,
link_fn=link_fn,
compute_scales_fun=compute_scales_fun
)
| 2.78125 | 3 |
optic_store/doc_events/serial_no.py | iptelephony/optic_store | 14 | 12798541 | <reponame>iptelephony/optic_store
# -*- coding: utf-8 -*-
# Copyright (c) 2019, 9T9IT and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
def after_insert(doc, method):
is_gift_card = frappe.db.get_value("Item", doc.item_code, "is_gift_card")
if is_gift_card:
gift_card_value = frappe.db.get_value("Item", doc.item_code, "gift_card_value")
frappe.get_doc(
{
"doctype": "Gift Card",
"gift_card_no": doc.serial_no,
"amount": gift_card_value,
}
).insert()
def on_trash(doc, method):
gift_card_no = frappe.db.exists("Gift Card", {"gift_card_no": doc.serial_no})
if gift_card_no:
frappe.delete_doc("Gift Card", gift_card_no)
| 2.125 | 2 |
soc-wk1-cert-Diana-Ilinca.py | dianaproca/toolkitten | 0 | 12798542 | <reponame>dianaproca/toolkitten<filename>soc-wk1-cert-Diana-Ilinca.py
# # soc-wk1-cert-Diana-Ilinca.py
# # Day1 homework
# #hours in a year:8760
# print(365*24)
# #minutes in a decade: 5256000
# print(60*24*365*10)
# #age in seconds:1135296000
# print(60*60*24*365*36)
# #days 32-bit system to timeout:497
# print((2**32-1)/100/60/60/24)
# #days 64-bit system to timeout:1067519911673
# print((2**63)/100/60/60/24)
# Day3 homework
# Program that asks for a person's first name, middle and last then greet with full name
# firstname = raw_input("What is your first name?")
# middlename = raw_input("What is your middle name?")
# lastname = raw_input("What is your last name?")
# print('Hello there '+firstname+ middlename +lastname+ '!!')
# # Program that asks for a person's favourite number, add 1 and suggest the result
# fav_number = int(raw_input("What is your favourite number?"))
# suggestion = fav_number+1
# print('your number is nice. However you might want to consider ' +str(suggestion)+ ' as a new fav. Just a thought..')
# #AngryBoss
# Boss= raw_input("What do you want this time?!?".upper())
# print('SO YOU THINK ' + str(Boss).upper()+' IS A GOOD IDEA??? FIRED!')
#Table of Contents
print('Chapter1 : Getting Started'+ ' page 10'.rjust(10))
print('Chapter 2: Numbers'+ ' page 9'.rjust(17))
print('Chapter 3: Letters page'+ ' page 13'.rjust(13))
| 3.328125 | 3 |
software/Opal/spud/dxdiff/dxdiff/editscript.py | msc-acse/acse-9-independent-research-project-Wade003 | 2 | 12798543 | #!/usr/bin/env python
# This file is part of dxdiff.
#
# dxdiff is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dxdiff is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Diamond. If not, see <http://www.gnu.org/licenses/>.
from lxml import etree
class EditScript:
def __init__(self):
self.script = []
def __str__(self):
return etree.tostring(self.to_xml(), pretty_print = True)
def __len__(self):
return len(self.script)
def __getitem__(self, key):
return self.script[key]
def __iter__(self):
return self.script.__iter__()
def update(self, path, value, userdata = None):
self.script.append({ "type": "update",
"location": path,
"value": value,
"userdata": userdata })
def insert(self, path, index, tag, value = None, userdata = None):
self.script.append({ "type": "insert",
"location": path,
"index": index,
"value": tag + (" " + value if value is not None else ""),
"userdata": userdata})
def delete(self, path, userdata = None):
self.script.append({ "type": "delete",
"location": path,
"userdata": userdata})
def move(self, path, destination, index, userdata = None):
self.script.append({ "type": "move",
"location": path,
"index": index,
"value": destination,
"userdata": userdata })
def to_xml(self):
tree = etree.Element("xmldiff")
for edit in self.script:
node = etree.Element(edit["type"], location = edit["location"])
if "index" in edit:
node.attrib["index"] = edit["index"]
if edit["userdata"] is not None:
node.attrib["userdata"] = edit["userdata"]
if "value" in edit:
node.text = edit["value"]
tree.append(node)
return etree.ElementTree(tree)
def write(self, path):
self.to_xml().write(path, pretty_print = True, xml_declaration = True, encoding = "utf-8")
| 2.234375 | 2 |
Web/chat/chatserver.py | kasztp/python-lessons | 35 | 12798544 | import logging
from time import time
from flask import Flask, request
PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'}
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s')
log = logging.getLogger('chatserver')
app = Flask(__name__)
messages = []
@app.route('/post/<who>/<message>')
def post_message(who, message):
messages.append((time(), request.remote_addr, who, message))
print(messages)
return "Message saved.\n" + str(messages), 200, PLAIN_HEADER
app.run(host='localhost', debug=True, threaded=True)
| 2.625 | 3 |
main/path-sum-iii/path-sum-iii.py | EliahKagan/old-practice-snapshot | 0 | 12798545 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def pathSum(self, root, total):
"""
:type root: TreeNode
:type total: int
:rtype: int
"""
if root is None:
return 0
path = []
count = 0
def doCount(partial):
nonlocal count
if partial == 0:
count += 1
for val in path:
partial += val
if partial == 0:
count += 1
def dfs(node):
nonlocal total
total -= node.val
doCount(total)
path.append(node.val)
if node.left is not None:
dfs(node.left)
if node.right is not None:
dfs(node.right)
total += path.pop()
dfs(root)
return count
| 3.6875 | 4 |
examples/wordcount/lambda_filter.py | dsouzajude/xFlow | 13 | 12798546 | <filename>examples/wordcount/lambda_filter.py
import json
import boto3
import base64
OUTBOUND_EVENT = 'FileFiltered'
LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
def reword(word):
''' Removes non-letters from word '''
reworded = ''
for letter in word:
if letter not in LETTERS:
continue
reworded = reworded + letter
return reworded
def is_word(word):
if len(word) <= 1:
return False
return True
def filter_out_non_words(event, context):
kinesis = boto3.client('kinesis')
print("Received event: " + json.dumps(event, indent=4))
for record in event['Records']:
# Kinesis data is base64 encoded so decode here
payload = base64.b64decode(record['kinesis']['data'])
print("Decoded payload: " + payload)
try:
payload = json.loads(payload)
execution_id = payload.get('execution_id')
words_arr = payload['words_arr']
# Filter non words
words_filtered = []
for w in words_arr:
reworded = reword(w)
if not reworded:
continue
if not is_word(reworded):
continue
words_filtered.append(reworded)
data = json.dumps({
'execution_id': execution_id,
'words_filtered': words_filtered
})
kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data)
except Exception as ex:
print "Error processing record, error=%s" % str(object=ex)
return 'Processed all records.'
| 2.765625 | 3 |
Uche Clare/Phase 1/Python Basic 1/Day 2/Task-7.py | CodedLadiesInnovateTech/-python-challenge-solutions | 6 | 12798547 | <gh_stars>1-10
file = input('Enter the file name: ')
file_extsn= file.split(".")
print(f"The file extension is {file_extsn[1]}") | 3.203125 | 3 |
CoachYacc.py | crabster15/Coach_plus_plus_plus | 0 | 12798548 | import ply.yacc as yacc
from CoachLex import tokens
#enviromental variables
enviro_vars = {}
def p_statement_assign(p):
'statement : VARINT VAR expression'
enviro_vars[p[2]] = p[3]
def p_statement_expr(p):
'statement : expression'
def p_statement_output(p):
'statement : OUTPUT expression'
print("Coach says " + str(p[2]) + "!")
def p_statement_if(p):
'''statement : IFA VAR IFB statement'''
if p[4]: p[6]
def p_statement_file_in(p):
'statement : FILEIN VAR'
file_str = ""
f = open(p[2] + "." + 'osxc', "r")
for line in f:
file_str = ''
file_str += line.rstrip('\n')
yaccer.parse(file_str)
#Basic Math
def p_expression_basicop(p):
'''expression : expression ADD expression
| expression SUBA SUBB expression
| expression MULT expression
| expression DIV expression'''
if p[1] == "add": p[0] = p[2] + p[0]
elif p[1] == 'finished' and p[2] == 'of': p[0] = p[0] - p[3]
elif p[2] == 'by': p[0] = p[1] * p[3]
elif p[2] == 'split': p[0] = p[1] / p[3]
def p_expression_number(p):
'expression : NUMBER'
p[0] = p[1]
def p_expression_var(p):
'expression : VAR'
try:
p[0] = enviro_vars[p[1]]
except LookupError:
print("undefined var, resorting to 0")
p[0] = 0
def p_comparison_binop(p):
'''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression
| expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression
| expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression'''
if p[4] == 'same': p[0] = p[1] == p[6]
elif p[3] == 'faster': p[0] = p[1] > p[5]
elif p[3] == 'slower': p[0] = p[1] < p[5]
def p_error(p):
print(f"Synax error at {p.value!r}")
#set up yacc
yaccer = yacc.yacc()
while True:
try:
s = input('> ')
except EOFError:
break
yaccer.parse(s) | 2.953125 | 3 |
recipe/admin.py | wichmannpas/recipemanager | 1 | 12798549 | from django.contrib import admin, messages
from django.db import transaction
from django.db.models import Prefetch
from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \
RecipeInstanceImage, Tag
admin.site.register(Tag)
@admin.register(Ingredient)
class IngredientAdmin(admin.ModelAdmin):
list_display = (
'name',
)
search_fields = (
'name',
)
@transaction.atomic
def merge_ingredients(self, request, queryset):
if len(queryset) < 2:
self.message_user(
request, 'At least two ingredients need to be selected!',
messages.WARNING)
return
main = queryset.first()
others = queryset[1:]
len_others = len(others)
RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main)
Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete()
self.message_user(
request, '{} ingredients were merged into {}'.format(len_others, main),
messages.SUCCESS)
merge_ingredients.short_description = 'Merge selected ingredients'
actions = (
merge_ingredients,
)
class RecipeIngredientInline(admin.TabularInline):
model = RecipeIngredient
autocomplete_fields = (
'ingredient',
)
class RecipeInstanceImageInline(admin.TabularInline):
model = RecipeInstanceImage
@admin.register(RecipeInstance)
class RecipeInstanceAdmin(admin.ModelAdmin):
list_display = (
'day',
'recipe',
)
inlines = (
RecipeInstanceImageInline,
)
@admin.register(Recipe)
class RecipeAdmin(admin.ModelAdmin):
list_display = (
'name',
'tag_str',
'view_count',
)
list_filter = (
'tags',
)
search_fields = (
'name',
)
inlines = (
RecipeIngredientInline,
)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related(Prefetch('tags', Tag.objects.order_by('name')))
| 2.03125 | 2 |
am/ls_importer/management/commands/import_people.py | access-missouri/am-django-project | 4 | 12798550 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Import a folder (~/people) full of person JSON from Legiscan to the database.
"""
from django.core.management.base import BaseCommand
from general.models import Person
from ls_importer.models import LSIDPerson
import json
import os
from tqdm import tqdm
class Command(BaseCommand):
"""
Import a folder (~/people) full of person JSON from Legiscan to the database.
"""
help = 'Import a folder full of person JSON from Legiscan to the database.'
def handle(self, *args, **options):
"""
Make it happen.
"""
def json_to_person(json_path):
json_data = open(json_path)
person_json = json.load(json_data)
pj_unfold = person_json['person']
person_ls_id = pj_unfold['people_id']
# person_ls_role_id = pj_unfold['role_id']
# person_role = pj_unfold['role']
# person_ls_party_id = pj_unfold['party_id']
# person_name = pj_unfold['name']
person_first_name = pj_unfold['first_name']
person_middle_name = pj_unfold['middle_name']
person_last_name = pj_unfold['last_name']
person_suffix = pj_unfold['suffix']
person_nickname = pj_unfold['nickname']
# This try/catch structure exists
# to prevent edge cases where
# a person may be stored twice in Legiscan
# under slightly different names.
try:
LSIDPerson.objects.get(
lsid=person_ls_id
)
except(LSIDPerson.DoesNotExist):
person_object, person_created = Person.objects.get_or_create(
first_name=person_first_name,
middle_name=person_middle_name,
last_name=person_last_name,
suffix=person_suffix,
defaults={
'nickname': person_nickname,
}
)
link_object, link_created = LSIDPerson.objects.get_or_create(
lsid=person_ls_id,
person=person_object,
)
target_directory = os.path.join(os.path.expanduser("~"), 'people')
for file in tqdm(os.listdir(target_directory)):
if file.endswith(".json"):
json_to_person(os.path.join(target_directory, file))
| 2.25 | 2 |
Subsets and Splits