max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
dueros/directive/VideoPlayer/Stop.py | Mryan2005/bot-sdk-python | 70 | 12790851 | #!/usr/bin/env python3
# -*- encoding=utf-8 -*-
# description:
# author:jack
# create_time: 2018/7/2
from dueros.directive.BaseDirective import BaseDirective
class Stop(BaseDirective):
"""
VideoPlayer.Stop指令
详见文档:https://dueros.baidu.com/didp/doc/dueros-bot-platform/dbp-custom/videoplayer_markdown#VideoPlayer.Stop%E6%8C%87%E4%BB%A4
"""
def __init__(self):
super(Stop, self).__init__('VideoPlayer.Stop') | 2.015625 | 2 |
sum.py | Suvradippaul/actions | 0 | 12790852 | <reponame>Suvradippaul/actions<gh_stars>0
# simple addition program
a = 20
b = 20
print("The sum is ", a+b)
| 1.976563 | 2 |
meppel_afvalkalender/__init__.py | westenberg/python-meppel-afvalkalender | 0 | 12790853 | # -*- coding: utf-8 -*-
"""Asynchronous Python client for the Meppel Afvalkalender API."""
from .const import ( # noqa
WASTE_TYPE_NON_RECYCLABLE,
WASTE_TYPE_ORGANIC,
WASTE_TYPE_PAPER,
WASTE_TYPE_PLASTIC,
)
from .meppel_afvalkalender import ( # noqa
MeppelAfvalkalender,
MeppelAfvalkalenderAddressError,
MeppelAfvalkalenderConnectionError,
MeppelAfvalkalenderError,
)
| 1.164063 | 1 |
checknotuse.py | danjia/CheckNotUsePic | 0 | 12790854 | <gh_stars>0
# -*- coding:utf-8 -*-
#=====================================
# 通过分析所有代码,找出没有用到的图片
# 没有用的图片资源主要有如下3种情况:
# (1)png => code
# (2)png => csd, csd => code
# (3)
# 1. png => plist, plist => code
# 2. png => plist, plist => ExportJson, ExportJson => code
# (4)
# 1. png => fnt, fnt => code
# 2. png => fnt, fnt=>csd, csd => code
#=====================================
import os
import shutil
global g_cnt
g_cnt = 0
def getCnt():
global g_cnt
g_cnt = g_cnt + 1
return g_cnt
#===================================
# @brief 读取所有图片到列表里面
#===================================
def readPicNameList(picPath):
picNameList = []
for prePath, folderList, fileNameList in os.walk(picPath):
for fileName in fileNameList:
suffixName = fileName[-3:]
if "png" == suffixName or "jpg" == suffixName:
picNameList.append([fileName, prePath])
return picNameList
# print("read PicNameList ok")
#===============================================
# @brief 读取文件目录下某个后缀的数据到列表里面
#===============================================
def readFileDataToList(filePath, suffixName):
n = len(suffixName)
saveToList = []
for prePath, folderList, fileNameList in os.walk(filePath):
for fileName in fileNameList:
# fielSuffixName = fileName[-3:]
fielSuffixName = fileName[-n:]
if suffixName == fielSuffixName:
f = open(prePath+"/"+fileName, "rb")
data = f.read()
# saveToList.append([data, fileName, prePath+"/"+fileName])
saveToList.append([data, fileName, prePath])
f.close()
return saveToList
#===================================
# @brief 在数据列表里面检查
#===================================
def checkInDataList(keyword, dataList, show):#, n):
# 各种情况如下:
# csd: <FileData Type="Normal" Path="Res/abc.png" />
# code: ["abc"] = "abc.png",
# cc.Sprite:create("res/abc/abc.png")
# plist: <key>abc.png</key>
#
# 规律如下:
# (1)" (2)' (3)/ (4)>
for index in range(0, len(dataList)):
fileInfo = dataList[index]
findIndex = fileInfo[0].find(keyword)
if -1 != findIndex:
preCh = fileInfo[0][findIndex-1]
if '"'==preCh or "'"==preCh or "/"==preCh or ">"==preCh:
return index
return -1
#==========================
# @brief 处理没有用的文件
#==========================
def handleNotFoundFile(filePathList):
for fileInfo in filePathList:
fileName = fileInfo[0]
filePath = fileInfo[1]
if not os.path.exists("needdelete/"+filePath):
os.makedirs("needdelete/"+filePath)
shutil.copyfile(filePath+"/"+fileName, "needdelete/"+filePath+"/"+fileName)
#==========================
# @brief 在代码里面处理
#==========================
def handleInCode(picIndex, picNameList, codeDataList):
picName = picNameList[picIndex][0]
index = checkInDataList(picName, codeDataList, False)
return -1!=index
#==========================
# @brief 在csd里面处理
#==========================
def handleInCsd(picIndex, picNameList, csdDataList,codeDataList):
picName = picNameList[picIndex][0]
#pic => csd
csdIndex = checkInDataList(picName, csdDataList, False)
if -1 != csdIndex:
fileName = csdDataList[csdIndex][1]
# csd => code
codeIndex = checkInDataList(fileName[:-1]+"b", codeDataList, False)
if -1 == codeIndex:
print(getCnt(), picName, fileName, "pic=>csd")
handleNotFoundFile([
(picName, picNameList[picIndex][1]),
(fileName, csdDataList[csdIndex][2])])
return True
return False
#==========================
# @brief 在plist里面处理
#==========================
def handleInPlist(picIndex, picNameList, plistDataList, exportJsonDataList, codeDataList):
picName = picNameList[picIndex][0]
#pic => plist
plistIndex = checkInDataList(picName, plistDataList, False)
if -1 != plistIndex:
plistFileName = plistDataList[plistIndex][1]
codeIndex = checkInDataList(plistFileName, codeDataList, False)
# plist => code
if -1 != codeIndex:
#代码里面找到plist
return True
else:
exportJsonIndex = checkInDataList(plistFileName, exportJsonDataList, False)
# plist = > exportJston
if -1 != exportJsonIndex:
exportJsonFileName = exportJsonDataList[exportJsonIndex][1]
codeIndex = checkInDataList(exportJsonFileName, codeDataList, False)
# exportJston => code
if -1 == codeIndex:
print(getCnt(), picName, plistFileName, exportJsonFileName, "pic=>plist=>json")
handleNotFoundFile([
(picName, picNameList[picIndex][1]),
(plistFileName, plistDataList[plistIndex][2]),
(exportJsonFileName, exportJsonDataList[exportJsonIndex][2])])
return True
return False
#==========================
# @brief 在fnt里面处理
#==========================
def handleInFnt(picIndex, picNameList, fntDataList, csdDataList, codeDataList):
picName = picNameList[picIndex][0]
#pic => fnt
fntIndex = checkInDataList(picName, fntDataList, False)
if -1 != fntIndex:
# fnt => code
fntName = fntDataList[fntIndex][1]
codeIndex = checkInDataList(fntName, codeDataList, False)
if -1 != codeIndex:
return True
else:
#fnt => csd
csdIndex = checkInDataList(fntName, csdDataList, False)
if -1 != csdIndex:
csdFileName = csdDataList[csdIndex][1]
# csd => code
codeIndex = checkInDataList(csdFileName[:-1]+"b", codeDataList, False)
if -1 == codeIndex:
print(getCnt(), picName, csdFileName, "pic=>fnt=>csd")
handleNotFoundFile([
(picName, picNameList[picIndex][1]),
(csdFileName, csdDataList[csdIndex][2])])
return True
return False
#===============================================================
# @brief 查找没用的图片
# @params picPath 要查找的图片路径
# codePath 要查找的代码路径
# csdPath 要查找的csd路径(只有csd才能用来查询,csb不行)
# plist 要查找的plist路径
# fntPath 要查找的fnt路径
#===============================================================
def findNotUsePic(picPath, codePath, csdPath, plistPath, fntPath):
picNameList = readPicNameList(picPath)
codeDataList = readFileDataToList(codePath, "lua")
csdDataList = readFileDataToList(csdPath, "csd")
plistDataList = readFileDataToList(plistPath,"plist")
fntDataList = readFileDataToList(fntPath, "fnt")
exportJsonDataList = readFileDataToList(fntPath, "ExportJson")
for picIndex in range(0, len(picNameList)):
# code
if handleInCode(picIndex, picNameList, codeDataList):
pass
# csd
elif handleInCsd(picIndex, picNameList, csdDataList, codeDataList):
pass
# plist, exportJson
elif handleInPlist(picIndex, picNameList, plistDataList, exportJsonDataList, codeDataList):
pass
# fnt
elif handleInFnt(picIndex, picNameList, fntDataList, csdDataList, codeDataList):
pass
if "__main__" == __name__:
findNotUsePic("./src", "./src", "./ccs_pro", "./src", "./src")
print("ok")
| 3.125 | 3 |
src/lib/pedal/report/imperative.py | Skydler/skulpt | 4 | 12790855 | <filename>src/lib/pedal/report/imperative.py<gh_stars>1-10
"""
Imperative style commands for constructing feedback in a convenient way.
Uses a global report object (MAIN_REPORT).
"""
__all__ = ['set_success', 'compliment', 'give_partial', 'explain',
'gently', 'hide_correctness', 'suppress', 'log', 'debug',
'clear_report', 'get_all_feedback', 'MAIN_REPORT']
from pedal.report.report import Report
#: The global Report object. Meant to be used as a default singleton
#: for any tool, so that instructors do not have to create their own Report.
#: Of course, all APIs are expected to work with a given Report, and only
#: default to this Report when no others are given.
MAIN_REPORT = Report()
def set_success():
"""
Creates Successful feedback for the user, indicating that the entire
assignment is done.
"""
MAIN_REPORT.set_success()
def compliment(message, line=None):
"""
Create a positive feedback for the user, potentially on a specific line of
code.
Args:
message (str): The message to display to the user.
line (int): The relevant line of code to reference.
"""
MAIN_REPORT.compliment(message, line)
def give_partial(value, message=None):
"""
Increases the user's current score by the `value`. Optionally display
a positive message too.
Args:
value (number): The number to increase the user's score by.
message (str): The message to display to the user.
"""
MAIN_REPORT.give_partial(value, message)
def explain(message, priority='medium', line=None, label='explain'):
MAIN_REPORT.explain(message, priority, line, label=label)
def gently(message, line=None, label='explain'):
MAIN_REPORT.gently(message, line, label=label)
def hide_correctness():
MAIN_REPORT.hide_correctness()
def suppress(category, label=True):
MAIN_REPORT.suppress(category, label)
def log(message):
MAIN_REPORT.log(message)
def debug(message):
MAIN_REPORT.debug(message)
def clear_report():
MAIN_REPORT.clear()
def get_all_feedback():
return MAIN_REPORT.feedback
| 2.953125 | 3 |
lib/config.py | galeadmn/aRNApipe | 0 | 12790856 | # -*- coding: utf-8 -*-
import sys
import os
# LIBRARY USED TO SUBMIT JOBS:
# - 'LSF' FOR IBM LSF WORKLOAD MANAGER (it uses 'sys_LSF.py')
# - 'LOCAL' FOR SEQUENTIAL RUN ON SINGLE MACHINE (it uses 'sys_single.py')
# - 'OTHER' FOR LIBRARIES ADAPTED TO OTHER WORKLOAD MANAGERS (it uses 'sys_OTHER.py')
mode = "OTHER"
# PATH TO THE FOLDER "genomes_processed" WHERE THE DIFFERENT GENOME BUILDS ARE STORED
path_db = "/share/"
path_code = "/share/code/"
# FULL PATHS TO BINARIES USED BY aRNApipe (users must change these values to match
# the current locations of the binaries used by aRNApipe in their system).
path_trimgalore = "/usr/local/bin/trim_galore"
path_bowtie2 = "/usr/local/bin/bowtie2"
path_fastqc = "/usr/local/FastQC/fastqc"
path_star = "/usr/local/bin/STAR"
path_htseq = "/usr/bin/htseq-count"
path_samtools = "/usr/local/bin/samtools"
path_cutadapt = "/usr/local/bin/cutadapt"
# STAR options (users can add their own options):
# The keys of this dict are used in the project config files to use the
# referenced STAR arguments within the corresponding dictionary values
star_options = {"default": "",
"encode": "--outFilterType BySJout --outFilterMultimapNmax 20 --alignSJoverhangMin 8 --alignSJDBoverhangMin 1 --outFilterMismatchNmax 999 --outFilterMismatchNoverLmax 0.04 --alignIntronMin 20 --alignIntronMax 1000000 --alignMatesGapMax 1000000"}
# ENVIRONMENT VARIABLES:
# The following system environment variables are changed to add or overwrite
# their current values.
environment = {"JAVA_HOME": ["/usr/java/jdk1.8.0_60/","add"],
"PYTHONPATH": ["/usr/lib64/python2.7/site-packages","overwrite"],
# "PATH": ["/gpfs/gpfs1/software/Python-2.7.2/bin","add"],
# "PATH": ["/gpfs/gpfs1/software/bedtools2-2.20.0/bin","add"],
# "PATH": ["/gpfs/gpfs1/software/samtools-1.2/bin","add"],
# "LD_LIBRARY_PATH":["/gpfs/gpfs1/software/gcc-4.8.2/usr/lib64","add"],
"PERL5LIB" :["/gpfs/gpfs1/software/perl-modules/lib/perl5/5.10.1:/gpfs/gpfs1/software/perl-modules/lib/perl5/5.10.1/lib64/perl5","add"]}
# ANNOTATIONS AND FULL PATH TO THE PIPELINE BASE DIRECTORY (do not change)
path_genome = path_db + "/genomes_processed/#LABEL/STAR_genome"
path_annotation = path_db + "/genomes_processed/#LABEL/genesets.gtf"
path_fasta = path_db + "/genomes_processed/#LABEL/genome.fa"
annots = [path_db + "/genomes_processed/#LABEL/genesets.refFlat",
path_db + "/genomes_processed/#LABEL/refFlats/protein_coding.refFlat",
path_db + "/genomes_processed/#LABEL/refFlats/rRNA.refFlat"]
nannots = ["general","protein_coding","ribosomal"]
| 1.835938 | 2 |
pyhumio/__init__.py | dsb-automation/pyhumio | 0 | 12790857 | <gh_stars>0
from .humio_handler import HumioHandler | 1.054688 | 1 |
src/cache3/memory.py | SHIELD616416/hello-world | 0 | 12790858 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# DATE: 2021/7/24
# Author: <EMAIL>
from collections import OrderedDict
from threading import Lock
from time import time as current
from typing import Dict, Any, Type, Union, Optional, NoReturn, Tuple, List, Callable
from cache3 import AbstractCache
from cache3.setting import DEFAULT_TIMEOUT, DEFAULT_TAG
from cache3.utils import NullContext
LK: Type = Union[NullContext, Lock]
Number: Type = Union[int, float]
TG: Type = Optional[str]
SK: Type = Tuple[Any, TG]
Time: Type = float
VT: Type = int
VH = Callable[[Any, VT], NoReturn]
VT_SET = 0
VT_GET = 1
VT_INCR = 2
_caches: Dict[Any, Any] = {}
_expire_info: Dict[Any, Any] = {}
_locks: Dict[Any, Any] = {}
# Thread unsafe cache in memory
class SimpleCache(AbstractCache):
"""
Simple encapsulation of ``OrderedDict``, so it has a performance similar
to that of a ``dict``, at the same time, it requirements for keys and
values are also relatively loose.
It is entirely implemented by memory, so use the required control capacity
and expiration time to avoid wast memory.
>>> cache = SimpleCache('test_cache', 60)
>>> cache.set('name', 'venus')
True
>>> cache.get('name')
'venus'
>>> cache.delete('name')
True
>>> cache.get('name')
>>> cache.set('gender', 'male', 0)
True
>>> cache.get('gender')
"""
LOCK: LK = NullContext
def __init__(self, *args, **kwargs) -> None:
super(SimpleCache, self).__init__(*args, **kwargs)
self.visit_hook: VH = getattr(self, f'{self.evict_type}_hook_visit')
# Attributes _name, _timeout from validate.
self._cache: OrderedDict[SK, Any] = _caches.setdefault(
self.name, OrderedDict()
)
self._expire_info: Dict[SK, Any] = _expire_info.setdefault(self.name, {})
self._lock: LK = _locks.setdefault(self.name, self.LOCK())
def set(
self, key: Any, value: Any, timeout: Number = DEFAULT_TIMEOUT,
tag: TG = DEFAULT_TAG
) -> bool:
store_key: SK = self.store_key(key, tag=tag)
serial_value: Any = self.serialize(value)
with self._lock:
return self._set(store_key, serial_value, timeout)
def get(self, key: str, default: Any = None, tag: TG = DEFAULT_TAG) -> Any:
store_key: SK = self.store_key(key, tag=tag)
with self._lock:
if self._has_expired(store_key):
self._delete(store_key)
return default
value: Any = self.deserialize(self._cache[store_key])
self.visit_hook(store_key, VT_GET)
return value
def ex_set(
self, key: str, value: Any, timeout: float = DEFAULT_TIMEOUT,
tag: Optional[str] = DEFAULT_TAG
) -> bool:
""" Realize the mutually exclusive operation of data through thread lock.
but whether the mutex takes effect depends on the lock type.
"""
store_key: SK = self.store_key(key, tag=tag)
serial_value: Any = self.serialize(value)
with self._lock:
if self._has_expired(store_key):
self._set(store_key, serial_value, timeout)
return True
return False
def touch(self, key: str, timeout: Number, tag: TG = DEFAULT_TAG) -> bool:
""" Renew the key. When the key does not exist, false will be returned """
store_key: SK = self.store_key(key, tag=tag)
with self._lock:
if self._has_expired(store_key):
return False
self._expire_info[store_key] = self.get_backend_timeout(timeout)
return True
def delete(self, key: str, tag: TG = DEFAULT_TAG) -> bool:
store_key: SK = self.store_key(key, tag=tag)
with self._lock:
return self._delete(store_key)
def inspect(self, key: str, tag: TG = DEFAULT_TAG) -> Optional[Dict[str, Any]]:
""" Get the details of the key value include stored key and
serialized value.
"""
store_key: SK = self.store_key(key, tag)
if not self._has_expired(store_key):
return {
'key': key,
'store_key': store_key,
'store_value': self._cache[store_key],
'value': self.deserialize(self._cache[store_key]),
'expire': self._expire_info[store_key]
}
def incr(self, key: str, delta: int = 1, tag: TG = DEFAULT_TAG) -> Number:
""" Will throed ValueError when the key is not existed. """
store_key: SK = self.store_key(key, tag=tag)
with self._lock:
if self._has_expired(store_key):
self._delete(store_key)
raise ValueError("Key '%s' not found" % key)
value: Any = self.deserialize(self._cache[store_key])
serial_value: int = self.serialize(value + delta)
self._cache[store_key] = serial_value
self.visit_hook(store_key, VT_INCR)
return serial_value
def has_key(self, key: str, tag: TG = DEFAULT_TAG) -> bool:
store_key: SK = self.store_key(key, tag=tag)
with self._lock:
if self._has_expired(store_key):
self._delete(store_key)
return False
return True
def ttl(self, key: Any, tag: TG) -> Time:
store_key: Any = self.store_key(key, tag)
if self._has_expired(store_key):
return -1
return self._expire_info[store_key] - current()
def clear(self) -> bool:
with self._lock:
self._cache.clear()
self._expire_info.clear()
return True
def evict(self) -> NoReturn:
if self.cull_size == 0:
self._cache.clear()
self._expire_info.clear()
else:
count = len(self._cache) // self.cull_size
for i in range(count):
store_key, _ = self._cache.popitem()
del self._expire_info[store_key]
def store_key(self, key: Any, tag: TG) -> SK:
return key, tag
def restore_key(self, store_key: SK) -> SK:
return store_key
def _has_expired(self, store_key: SK) -> bool:
exp: float = self._expire_info.get(store_key, -1.)
return exp is not None and exp <= current()
def _delete(self, store_key: SK) -> bool:
try:
del self._cache[store_key]
del self._expire_info[store_key]
except KeyError:
return False
return True
def _set(self, store_key: SK, value: Any, timeout=DEFAULT_TIMEOUT) -> bool:
if self.timeout and len(self) >= self.max_size:
self.evict()
self._cache[store_key] = value
self.visit_hook(store_key, VT_SET)
self._expire_info[store_key] = self.get_backend_timeout(timeout)
return True
def __iter__(self) -> Tuple[Any, ...]:
for store_key in reversed(self._cache.keys()):
if not self._has_expired(store_key):
key, tag = self.restore_key(store_key)
yield key, self.deserialize(self._cache[store_key]), tag
def __len__(self) -> int:
return len(self._cache)
def lru_hook_visit(self, store_key: Any, vt: VT) -> NoReturn:
self._cache.move_to_end(store_key, last=False)
def lfu_hook_visit(self, store_key: Any, vt: VT) -> NoReturn:
""""""
def fifo_hook_visit(self, store_key: Any, vt: VT) -> NoReturn:
if vt == VT_SET:
self._cache.move_to_end(store_key, last=False)
__delitem__ = delete
__getitem__ = get
__setitem__ = set
# Thread safe cache in memory
class SafeCache(SimpleCache):
LOCK: LK = Lock
| 2.703125 | 3 |
code/pi-estimators.py | jakryd/0800-fizobl | 0 | 12790859 | <reponame>jakryd/0800-fizobl
import argparse
import numpy as np
import matplotlib.pyplot as plt
def pi_estimator(n_samples):
est = list()
n_inside = 0; n_all = 0
for i in range(n_samples):
x = np.random.uniform(-1.0, 1.0)
y = np.random.uniform(-1.0, 1.0)
dist = np.sqrt(np.power(x, 2) + np.power(y, 2))
if dist <= 1.0:
n_inside += 1
n_all += 1
est.append(4.0 * np.float(n_inside) / np.float(n_all))
return np.array(est)
def pi_estimator_integral(n_samples):
est = list()
e = 0.0
for i in range(n_samples):
x = np.random.uniform(0.0, 1.0)
y = 4.0 * np.sqrt(1.0 - np.power(x, 2))
e += (y - e) / (np.float(i) + 1.0)
est.append(e)
return np.array(est)
def pi_estimator_markov_chain(n_samples, step=0.1):
est = list()
n_inside = 0; n_all = 0; n_accepted = 0
x = np.random.uniform(-1.0, 1.0)
y = np.random.uniform(-1.0, 1.0)
for i in range(n_samples):
dx = np.random.uniform(-step, step)
dy = np.random.uniform(-step, step)
xn = x + dx
yn = y + dy
if abs(xn) < 1.0 and abs(yn) < 1.0:
x = xn
y = yn
n_accepted += 1
dist = np.sqrt(np.power(x, 2) + np.power(y, 2))
if dist <= 1.0:
n_inside += 1
n_all += 1
est.append(4.0 * np.float(n_inside) / np.float(n_all))
acc_ratio = np.float(n_accepted) / np.float(n_all)
return np.array(est)
def run(n_runs, n_samples, func):
runs = []
for i in range(n_runs):
e = func(n_samples)
runs.append(e)
return np.mean(runs, axis=0), np.std(runs, axis=0)
parser = argparse.ArgumentParser(usage='python %(prog)s [options]',
description='Implements pi estimators.')
parser.add_argument('--n_samples', type=int, help='number of samples',
required=True)
parser.add_argument('--n_runs', type=int, help='number of runs', required=True)
args = parser.parse_args()
n_runs = args.n_runs
n_samples = args.n_samples
func = pi_estimator_markov_chain
mean, std = run(n_runs, n_samples, func)
plt.fill_between(x=range(n_samples), y1=mean-std, y2=mean+std, label='mean $\pm$ std')
plt.axhline(y=np.pi, color='k', linestyle='--', label='$\pi$')
plt.ylim([np.pi-0.5, np.pi+0.5])
plt.xlim([0, 10000])
plt.xlabel('step')
plt.ylabel('estimate of $\pi$')
plt.legend()
plt.show()
| 3.140625 | 3 |
awx/main/migrations/0118_auto_20200326_1123.py | Pavloid21/awx | 0 | 12790860 | <filename>awx/main/migrations/0118_auto_20200326_1123.py
# Generated by Django 2.2.8 on 2020-03-26 11:23
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import taggit.managers
class Migration(migrations.Migration):
dependencies = [
('taggit', '0003_taggeditem_add_unique_index'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0117_auto_20200326_1043'),
]
operations = [
migrations.AddField(
model_name='deployhistory',
name='created',
field=models.DateTimeField(default=None, editable=False),
),
migrations.AddField(
model_name='deployhistory',
name='created_by',
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'deployhistory', 'model_name': 'deployhistory', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='deployhistory',
name='description',
field=models.TextField(blank=True, default=''),
),
migrations.AddField(
model_name='deployhistory',
name='modified',
field=models.DateTimeField(default=None, editable=False),
),
migrations.AddField(
model_name='deployhistory',
name='modified_by',
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'deployhistory', 'model_name': 'deployhistory', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='deployhistory',
name='name',
field=models.CharField(default='nothing', max_length=512, unique=True),
preserve_default=False,
),
migrations.AddField(
model_name='deployhistory',
name='tags',
field=taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'),
),
]
| 1.625 | 2 |
mainapp/urls.py | RashidRysaev/geekshop | 0 | 12790861 | from django.urls import re_path
import mainapp.views as mainapp
from .apps import MainappConfig
app_name = MainappConfig.name
urlpatterns = [
re_path(r"^$", mainapp.products, name="index"),
re_path(r"^category/(?P<pk>\d+)/$", mainapp.products, name="category"),
re_path(r"^category/(?P<pk>\d+)/page/(?P<page>\d+)/$", mainapp.products, name="page"),
re_path(r"^product/(?P<pk>\d+)/$", mainapp.product, name="product"),
]
| 1.804688 | 2 |
crichtonweb/cli/crichtoncli/commands/indexzenoss.py | bpluly/crichton | 0 | 12790862 | <reponame>bpluly/crichton<gh_stars>0
# Crichton, Admirable Source Configuration Management
# Copyright 2012 British Broadcasting Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# example that you can run on mac:
# ./crichton.py indexzenoss https://monitor.forge.domain.local/ Devices/BBC/ApplicationHost/Platform/Delivery \
# --cert-file=/Users/$USER/.bbc/dev.domain.com.pem \
# --key-file=/Users/$USER/.bbc/dev.domain.com.key \
# --ca-file=/Users/$USER/.bbc/ca.pem
import re
from optparse import make_option
from django.core.management.base import CommandError
from django.db import transaction
from django.utils.encoding import iri_to_uri
from crichtoncli.apihelpers import *
from crichtoncli.commands import ApiCommand
from crichtonweb.core.httpshelpers import *
from crichtonweb.system.models import Environment, Pool, Node, PoolMembership, Role, crichtonCronJobStatus
import logging
logger = logging.getLogger(__name__)
class Command(ApiCommand):
help = ("Crawl a zenoss device collection and add all"
"devices it contains to the crichton db as nodes."
" Can only run locally.")
args = "<zenossbaseurl> <zenossselector>"
# option_list = ApiCommand.option_list + (
# )
# uses database!
requires_model_validation = True
def print_help(self, zenossbaseurl, zenossselector):
super(ApiCommand, self).print_help(zenossbaseurl, zenossselector)
def _ensure_role_for_pool(self, pool_name):
role_name = pool_name.split("_")[0]
role, created = Role.objects.get_or_create(name=role_name)
role.undelete()
return role
def _format_pool_name(self, name):
return re.sub(r'[^a-zA-Z0-9_]', '_', name).lower()
@transaction.commit_manually
def ensure_pool(self, environment, name):
# the pool creation we do here seems a bit hacky / our-zenoss-setup-specific
# so it is not worthy of migration into the PoolManager class imho
pool = None
created = False
try:
name = self._format_pool_name(name)
role = self._ensure_role_for_pool(name)
pool, created = Pool.objects.get_or_create(environment=environment, name=name, defaults={
"role": role,
})
if pool.role != role:
pool.role = role
pool.save()
pool.undelete()
except Exception, e:
logger.error("ensure_pool failed with %s, roolling this transaction back" % str(e))
transaction.rollback()
raise
else:
transaction.commit()
return pool, created
# @transaction.commit_manually
# def ensure_ip_address(self, node, ip_address):
# try:
# ip_address, created = IpAddress.objects.get_or_create(address=ip_address)
# if not node.ip_addresses.filter(address=ip_address):
# node.ip_addresses.add(ip_address)
# node.save()
# except:
# transaction.rollback()
# raise
# else:
# transaction.commit()
def handle(self, *args, **options):
if len(args) < 2:
raise CommandError(
"You must provide at least zenossbaseurl and zenossselector")
zenossbaseurl = args[0]
if not zenossbaseurl.endswith("/"):
zenossbaseurl += "/"
zenossselector = args[1]
zenoss_client = makeHttps(zenossbaseurl, **options)
# issue_tracker = self.ensure_issue_tracker(jiraname, jirabaseurl)
# project = self.ensure_project(issue_tracker, projectname)
logger.info("Getting list of nodes for %s", zenossselector)
device_list_url = "%szport/dmd/%s/getSubDevices" % (zenossbaseurl, zenossselector)
resp, content = zenoss_client.request(iri_to_uri(device_list_url), "GET")
expect_ok(resp, content)
# expect_xml(resp, content)
# ['BBCApplicationHostDevice at /zport/dmd/Devices/BBC/ApplicationHost/Platform/Delivery/Database/MySQL-Only/devices/db030.back.live.cwwtf.local>',
# ....
# 'BBCApplicationHostDevice at /zport/dmd/Devices/BBC/ApplicationHost/Platform/Delivery/InterPlatformMQ/Integration/devices/ipmq001.back.int.cwwtf.local']
# get rid of [' and of ']
content = content[2:][:-1]
# split on , then remove whitespace, then get rid of the start quote ' and end quote ',
devlist = [x.strip()[1:][:-1] for x in content.split(",")]
# split on " at " and throw away the first part
devlist = [x.split(" at ")[1].strip() for x in devlist]
# get rid of /zport/dmd/
devlist = [x.replace("/zport/dmd/","", 1) for x in devlist]
# get rid of Devices/BBC/ApplicationHost/Platform/Delivery/
devlist = [x.replace(zenossselector + "/","", 1) for x in devlist]
# so now we have "InterPlatformMQ/Integration/devices/ipmq001.back.int.cwwtf.local"
# split on "/devices/"
devlist = [x.split("/devices/") for x in devlist]
devlist = [(p.replace("/", "_"), n) for (p, n) in devlist]
# so now we have ("InterPlatformMQ_Integration", "ipmq001.back.int.cwwtf.local")
def get_env(n): # ipmq001.back.int.cwwtf.local
env = n[n.find(".")+1:] # back.int.cwwtf.local
env = env.replace("back.", "") # int.cwwtf.local
env = env.replace(".local", "") # int.cwwtf
env = env.split(".")[0] # int
return env
pools = {}
environments = {}
c = 0
for p, n in devlist:
e = get_env(n)
# an exmaple
# n -> db118.back.stage.telhc.local
# p -> Database_Postgres
# e -> stage
if not e in environments:
environment, created = Environment.objects.ensure(e)
if created:
logger.info("Created environment %s", unicode(environment))
environments[e] = environment
else:
environment = environments[e]
pool_success = True # lets be positive :)
if not p in pools:
logger.info("Ensuring pool %s", unicode(p))
try:
pool, created = self.ensure_pool(environment, p)
pools[p] = pool
except:
pass
pool_success = False
else:
pool = pools[p]
c += 1
node, created = Node.objects.ensure(environment, n)
if pool_success:
pool_membership, created = PoolMembership.objects.ensure(pool, node)
logger.info("Saw %d nodes", c)
crichtonCronJobStatus.objects.update_success('index_zenoss')
| 1.445313 | 1 |
tools/map_use_tool.py | BenSmithers/MultiHex2 | 0 | 12790863 | from math import pi
import os
from MultiHex2.tools import Basic_Tool
from MultiHex2.core.coordinates import screen_to_hex, hex_to_screen
from MultiHex2.actions import NullAction
from tools.basic_tool import ToolLayer
from PyQt5 import QtGui
art_dir = os.path.join( os.path.dirname(__file__),'..','assets','buttons')
class MapUse(Basic_Tool):
"""
Define the tool that can be used to move mobiles around, look at them, edit them, etc...
"""
def __init__(self, parent=None):
super().__init__(parent)
self.dimensions = self.parent.dimensions
@classmethod
def buttonIcon(cls):
assert(os.path.exists(os.path.join(art_dir, "temp.svg")))
return QtGui.QPixmap(os.path.join(art_dir, "temp.svg")).scaled(48,48)
@classmethod
def tool_layer(cls):
return ToolLayer.mapuse
def primary_mouse_released(self, event):
locid = screen_to_hex( event.scenePos() )
pos = hex_to_screen(locid)
longitude = 2*pi*pos.x()/self.dimensions[0]
latitude = -(pi*pos.y()/self.dimensions[1]) + 0.5*pi
self.parent.config_with(latitude,longitude)
self.parent.update_times()
# check for mobile here,
return NullAction() | 2.609375 | 3 |
hero_rpg.py | darroyo97/hero-rpg | 0 | 12790864 | <gh_stars>0
#!/usr/bin/env python
# In this simple RPG game, the hero fights the goblin. He has the options to:
# 1. fight goblin
# 2. do nothing - in which case the goblin will attack him anyway
# 3. flee
class Character:
def __init__(self, name, health, power):
self.health = health
self.power = power
self.name = name
def alive(self):
if self.health > 0:
return True
def attack(self, enemy):
if enemy.name != "Zombie" and self.alive() == True:
enemy.health -= self.power
print(f'{self.name} does {self.power} damage to the {enemy.name}')
if enemy.alive() != True:
print(f'The {enemy.name} is dead')
class Hero(Character):
def __init__(self):
super(Hero, self).__init__("Hero", 10, 5)
# def attack(self, goblin):
# if self.alive() == True:
# goblin.health -= self.power
# print(f'You do {self.power} damage to the goblin')
# if goblin.alive() != True:
# print("The goblin is dead.")
class Goblin(Character):
def __init__(self):
super(Goblin, self).__init__("Goblin", 6, 2)
# def attack(self, hero):
# if self.alive() == True:
# hero.health -= self.power
# print(f'The goblin does {self.power} to you')
# if hero.alive() != True:
# print("You are dead.")
class Zombie(Character):
def __init__(self):
super(Zombie, self).__init__("Zombie", 10, 1)
hero = Hero()
goblin = Goblin()
zombie = Zombie()
def main(enemy):
# hero_health = 10
# hero_power = 5
# goblin_health = 6
# goblin_power = 2
while enemy.alive() and hero.alive():
print("You have {} health and {} power.".format(hero.health, hero.power))
print("The {} has {} health and {} power.".format(
enemy.name, enemy.health, enemy.power))
print()
print("What do you want to do?")
print(f'1. fight {enemy.name}')
print("2. do nothing")
print("3. flee")
print("> ", end=' ')
raw_input = input()
if raw_input == "1":
hero.attack(enemy)
elif raw_input == "2":
pass
elif raw_input == "3":
print("Goodbye.")
break
else:
print("Invalid input {}".format(raw_input))
if enemy.alive():
enemy.attack(hero)
main(zombie)
| 4.0625 | 4 |
src/dataset_miku.py | shidoy/talking-miku | 0 | 12790865 | <filename>src/dataset_miku.py
import requests
from bs4 import BeautifulSoup
from time import sleep
seq = []
print("Fetching", end="")
for i in range(1, 1000):
sleep(1.0)
url = "https://icws.indigo-bell.com/search?q=前川みく&st=n&page=" + str(i)
responce = requests.get(url)
print(".", end="", flush=True)
soup = BeautifulSoup(responce.text, "lxml")
lines = soup.find("tbody", class_="result").find_all("tr")
if lines == []:
break
for line in lines:
contents = line.find_all("td")[-1].contents
seq += contents
print("\nGot {} lines.".format(len(seq)))
seq = [line.replace("○○", "P").replace(" ", "") for line in seq if line != "なし"]
with open("./dataset/charactor/miku.txt", "w") as f:
f.write("\n".join(seq))
| 2.65625 | 3 |
setup.py | Arinerron/pystrfry | 3 | 12790866 | <gh_stars>1-10
#!/usr/bin/env python3
from setuptools import setup
with open('README.md', 'r') as f:
long_description = f.read()
setup(
name = 'pystrfry',
version = '1.0.3',
license = 'BSD-3-Clause',
description = 'a tool for solving those annoying strfry CTF challenges',
long_description = long_description,
long_description_content_type = 'text/markdown',
author = '<NAME>',
author_email = '<EMAIL>',
packages = ['strfry'],
scripts = ['scripts/strfry'],
python_requires = '>=3.6'
)
| 1.265625 | 1 |
setup.py | jlakkis/sciPENN | 1 | 12790867 | import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="sciPENN",
version="0.9.6",
author="<NAME>",
author_email="<EMAIL>",
description="A package for integrative and predictive analysis of CITE-seq data",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/jlakkis/sciPENN",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
install_requires=['torch>=1.6.1', 'numba<=0.50.0', 'scanpy>=1.7.1', 'pandas>=1.1.5', 'numpy>=1.20.1', 'scipy>=1.6.1', 'tqdm>=4.59.0', 'anndata>=0.7.5'],
python_requires=">=3.7",
) | 1.351563 | 1 |
main.py | k5924/TheVault | 2 | 12790868 | <filename>main.py
import sys
import os
import random
import csv
import json
from platform import system
from string import ascii_uppercase, ascii_lowercase, digits, punctuation
from startPage import Ui_startPage
from genPassPage import Ui_passwordGen
from allAccountsPage import Ui_allAccounts
from AddAccountPage import Ui_addAccount
from viewAccountPage import Ui_viewAccount
from changePassPage import Ui_changePass
from importAccountsPage import Ui_importAccounts
from exportAccountsPage import Ui_exportPage
from PyQt5 import QtWidgets, QtCore, QtGui
from Crypto.Random import get_random_bytes
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad, unpad
# global variables to store paths to the vault and key file
global KEYPATH, VAULTPATH, VIEWEDITEM
class MainWindow(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_startPage()
self.ui.setupUi(self) # initializes start page
self.ui.startButton.clicked.connect(self.createVaultFiles)
self.ui.selectKeyFile.clicked.connect(self.getKeyFile)
self.ui.selectVaultFile.clicked.connect(self.getVaultFile)
self.ui.openButton.clicked.connect(self.openVaultFiles)
# button variables which execute a specific function
def createVaultFiles(self):
key = get_random_bytes(32) # 32 bytes is 256 bits
data = ''.encode('utf-8') # basic data for file to encrypt
desktopPath = getPathToDesktop() # gets path to desktop
keyFile = open(desktopPath + "\\key.bin", "wb")
keyFile.write(key) # writes encryption key to file
keyFile.close
cipher = AES.new(key, AES.MODE_CBC)
ciphered_data = cipher.encrypt(pad(data, AES.block_size))
vaultFile = open(desktopPath + "\\vault.bin", "wb") # creates vault file
vaultFile.write(cipher.iv)
vaultFile.write(ciphered_data)
vaultFile.close()
Alert("Process Completed", QtWidgets.QMessageBox.Information, "Created vault.bin and key.bin")
# Alert function to reuse the code to generate a QMessageBox
def getKeyFile(self):
file = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open file', "", "All Files (*)") # lets user choose files from explorer
url = QtCore.QUrl.fromLocalFile(file[0]) # gets path to file and stores it as an object
self.ui.keyFileLabel.setText(url.fileName()) # adjusts file name in gui
self.ui.keyFileLabel.adjustSize() # adjusts size of text wrapper for file name in gui
self.keyPath = file[0] # makes keyPath accessible in all of MainWindow class
def getVaultFile(self):
file = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open file', "", "All Files (*)") # lets user choose files from explorer
url = QtCore.QUrl.fromLocalFile(file[0]) # gets path to file and stores it as an object
self.ui.vaultFileLabel.setText(url.fileName()) # adjusts file name in gui
self.ui.vaultFileLabel.adjustSize() # adjusts size of text wrapper for file name in gui
self.vaultPath = file[0] # makes vaultPath accessible in all of MainWindow class
def openVaultFiles(self):
keyFile = self.ui.keyFileLabel.text()
vaultFile = self.ui.vaultFileLabel.text()
if (keyFile == "Key File") or (vaultFile == "Vault File"):
# checks that a Key File or Vault file have been selected
Alert("Error", QtWidgets.QMessageBox.Critical,
"Either one or no files were selected. Please select files to open the vault")
# Alert function to display error QMessageBox
else:
# exception handling
try:
key, iv, data = getData(self.keyPath, self.vaultPath)
# display new window for generating password or viewing accounts
self.newWindow = generatePasswordWin()
self.newWindow.show() # show new window
self.hide() # close old window
except (ValueError, FileNotFoundError) as e:
Alert("Error", QtWidgets.QMessageBox.Critical, "Incorrect files selected")
# Alert function to show error message
class generatePasswordWin(QtWidgets.QWidget):
# displays generate password window when vault is open
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_passwordGen()
self.ui.setupUi(self)
self.ui.genBtn.clicked.connect(self.genPassword)
self.ui.saveBtn.clicked.connect(self.savePassword)
self.ui.viewAccountsTab.clicked.connect(self.openAccountsPage)
def genPassword(self):
passwordOptions = ""
if self.ui.lowerCaseCheck.isChecked() or self.ui.upperCaseCheck.isChecked() or self.ui.numbersCheck.isChecked() or self.ui.specialCharsCheck.isChecked():
if self.ui.lowerCaseCheck.isChecked():
passwordOptions += ascii_lowercase
if self.ui.upperCaseCheck.isChecked():
passwordOptions += ascii_uppercase
if self.ui.numbersCheck.isChecked():
passwordOptions += digits
if self.ui.specialCharsCheck.isChecked():
passwordOptions += punctuation.replace(',', '')
lengths = [i for i in range(8, 17)]
passLength = random.choice(lengths)
password = ""
for i in range(0, passLength):
password += random.choice(passwordOptions)
self.ui.generatedPassLabel.setText(password)
self.ui.nameOfAccountEdit.setEnabled(True)
self.ui.usernameEdit.setEnabled(True)
self.ui.saveBtn.setEnabled(True)
else:
Alert("Error", QtWidgets.QMessageBox.Critical, "No options to generate password from")
def savePassword(self):
if (self.ui.nameOfAccountEdit.text() == (None or "")) or (self.ui.usernameEdit.text() == (None or "")):
Alert("Error", QtWidgets.QMessageBox.Critical,
"Account name or Username has been left empty")
else: # displays any error message if the user input fields are empty or incorrectly entered
if (self.ui.nameOfAccountEdit.text()[0] == " ") or (self.ui.nameOfAccountEdit.text()[-1] == " "):
Alert("Error", QtWidgets.QMessageBox.Critical,
"Please remove spaces from the beginning or end of Account name")
elif " " in self.ui.usernameEdit.text():
Alert("Error", QtWidgets.QMessageBox.Critical,
"Please remove spaces from Username")
elif ("," in self.ui.nameOfAccountEdit.text()) or ("," in self.ui.usernameEdit.text()):
Alert("Error", QtWidgets.QMessageBox.Critical,
"Please remove commas from name of account or username")
else:
nameOfAccount = self.ui.nameOfAccountEdit.text()
username = self.ui.usernameEdit.text()
password = self.ui.generatedPassLabel.text()
writeData(nameOfAccount, username, password)
Alert("Process Completed", QtWidgets.QMessageBox.Information, "Account saved")
# reset check boxes after saving accounts
self.ui.lowerCaseCheck.setChecked(False)
self.ui.upperCaseCheck.setChecked(False)
self.ui.numbersCheck.setChecked(False)
self.ui.specialCharsCheck.setChecked(False)
# the code below resets that generatedPassLabel, nameOfAccount input and username input after saving
self.ui.generatedPassLabel.setText("")
self.ui.nameOfAccountEdit.setText("")
self.ui.usernameEdit.setText("")
self.ui.nameOfAccountEdit.setEnabled(False)
self.ui.usernameEdit.setEnabled(False)
def openAccountsPage(self): # opens window to view all accounts
self.newWindow = allAccountsWin()
self.newWindow.show() # show new window
self.hide() # close old window
class allAccountsWin(QtWidgets.QWidget): # view all accounts window
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_allAccounts()
self.ui.setupUi(self)
# button which links to generate password window
self.ui.genPassTab.clicked.connect(lambda: self.changeWindow(generatePasswordWin()))
self.loadAccounts()
self.ui.accountsTable.itemClicked.connect(self.viewItem)
self.ui.addAccountBtn.clicked.connect(lambda: self.changeWindow(addAccountWin()))
self.ui.searchBox.returnPressed.connect(self.searchAccounts)
self.ui.importBtn.clicked.connect(lambda: self.changeWindow(importWin()))
self.ui.exportBtn.clicked.connect(lambda: self.changeWindow(exportWin()))
def changeWindow(self, classToAccess): # takes new window argument
self.newWindow = classToAccess
self.newWindow.show() # show new window
self.hide() # close old window
def loadAccounts(self): # added feature to read accounts from file
global KEYPATH, VAULTPATH
self.searchedAccounts = {}
self.ui.accountsTable.setEditTriggers(QtWidgets.QTableWidget.NoEditTriggers)
key, iv, data = getData(KEYPATH, VAULTPATH)
data = data.decode('utf-8')
self.count = 1 # count for resetting all accounts view
if data != "":
row = data.split('\n')
self.accounts = {}
i = 0
for value in row:
if value != "":
self.accounts[i] = value.split(',')
i += 1
self.ui.accountsTable.setRowCount(0) # removes all data in table before making table
for n, key in enumerate(sorted(self.accounts.keys())): # displays code in table in window
self.ui.accountsTable.insertRow(n)
newitem = QtWidgets.QTableWidgetItem(self.accounts[key][0])
viewLabel = QtWidgets.QTableWidgetItem("View")
viewLabel.setTextAlignment(QtCore.Qt.AlignCenter)
self.ui.accountsTable.setItem(n, 0, newitem)
self.ui.accountsTable.setItem(n, 1, viewLabel)
viewLabel.setBackground(QtGui.QColor(210, 210, 210))
viewLabel.setFlags(viewLabel.flags() ^ QtCore.Qt.ItemIsEditable)
else: # else disables table
self.ui.accountsTable.setEnabled(False)
self.ui.searchBox.setEnabled(False)
def viewItem(self):
global VIEWEDITEM
if (self.ui.accountsTable.currentItem().text() == "View") and (self.ui.accountsTable.currentColumn() == 1):
row = self.ui.accountsTable.currentRow()
if not(self.searchedAccounts): # checks if searchedAccounts is empty
VIEWEDITEM = self.accounts[row]
else:
for n, key in enumerate(sorted(self.searchedAccounts.keys())):
if row == n:
VIEWEDITEM = self.accounts[key]
self.changeWindow(viewAccountWin())
def searchAccounts(self):
term = self.ui.searchBox.text()
if term != (None or ""):
self.searchedAccounts = self.accounts.copy() # copy sets values to new variable to edit
self.count -= 1 # decreases count for table to reset when nothing in searchBox
self.ui.accountsTable.setRowCount(0) # deletes tables contents
for n, key in enumerate(sorted(self.accounts.keys())): # displays code in table in window
if not(term.lower() in self.accounts[key][0].lower()):
self.searchedAccounts.pop(key) # removes values not in search
# code below works just like in loadAccounts but with search terms
for n, key in enumerate(sorted(self.searchedAccounts.keys())):
self.ui.accountsTable.insertRow(n)
newitem = QtWidgets.QTableWidgetItem(self.searchedAccounts[key][0])
viewLabel = QtWidgets.QTableWidgetItem("View")
viewLabel.setTextAlignment(QtCore.Qt.AlignCenter)
self.ui.accountsTable.setItem(n, 0, newitem)
self.ui.accountsTable.setItem(n, 1, viewLabel)
viewLabel.setBackground(QtGui.QColor(210, 210, 210))
viewLabel.setFlags(viewLabel.flags() ^ QtCore.Qt.ItemIsEditable)
else: # if search box is empty
if self.count <= 0: # comparison to make sure you only run loadAccounts after a search
self.searchedAccounts = {}
self.loadAccounts()
class addAccountWin(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_addAccount()
self.ui.setupUi(self)
self.ui.cancelBtn.clicked.connect(self.goBack)
self.ui.saveBtn.clicked.connect(self.saveAccount)
def goBack(self):
self.newWindow = allAccountsWin()
self.newWindow.show()
self.hide()
def saveAccount(self):
if (self.ui.nameOfAccountEdit.text() == (None or "")) or (self.ui.usernameEdit.text() == (None or "")) or (self.ui.passwordEdit.text() == (None or "")):
Alert("Error", QtWidgets.QMessageBox.Critical,
"Account name, Username or the Password field has been left empty")
else: # displays any error message if the user input fields are empty or incorrectly entered
if (self.ui.nameOfAccountEdit.text()[0] == " ") or (self.ui.nameOfAccountEdit.text()[-1] == " "):
Alert("Error", QtWidgets.QMessageBox.Critical,
"Please remove spaces from the beginning or end of Account name")
elif (" " in self.ui.usernameEdit.text()) or (" " in self.ui.passwordEdit.text()):
Alert("Error", QtWidgets.QMessageBox.Critical,
"Please remove spaces from Username or Password")
elif ("," in self.ui.nameOfAccountEdit.text()) or ("," in self.ui.usernameEdit.text()) or ("," in self.ui.passwordEdit.text()):
Alert("Error", QtWidgets.QMessageBox.Critical,
"Please remove commas from Name of account, Username or Password")
else:
nameOfAccount = self.ui.nameOfAccountEdit.text()
username = self.ui.usernameEdit.text()
password = self.ui.passwordEdit.text()
writeData(nameOfAccount, username, password)
Alert("Process Completed", QtWidgets.QMessageBox.Information, "Account saved")
self.goBack()
class viewAccountWin(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_viewAccount()
self.ui.setupUi(self)
self.ui.backBtn.clicked.connect(lambda: self.changeWindow(allAccountsWin()))
self.ui.nameOfAccountLbl.setText(VIEWEDITEM[0])
self.ui.nameOfAccountLbl.adjustSize()
self.ui.usernameLbl.setText(VIEWEDITEM[1])
self.ui.usernameLbl.adjustSize()
self.ui.passwordLbl.setText(VIEWEDITEM[2])
self.ui.passwordLbl.adjustSize()
self.ui.copyUserBtn.clicked.connect(self.copyUsername)
self.ui.copyPassBtn.clicked.connect(self.copyPassword)
self.ui.changePassBtn.clicked.connect(lambda: self.changeWindow(changePassWin()))
self.ui.deleteBtn.clicked.connect(self.deleteAccount)
def changeWindow(self, classToAccess):
self.newWindow = classToAccess
self.newWindow.show()
self.hide()
def copyUsername(self):
cb = QtGui.QGuiApplication.clipboard()
cb.setText(self.ui.usernameLbl.text(), mode=cb.Clipboard)
Alert("Confirmed", QtWidgets.QMessageBox.Information,
"Username copied to clipboard")
def copyPassword(self):
cb = QtGui.QGuiApplication.clipboard()
cb.setText(self.ui.passwordLbl.text(), mode=cb.Clipboard)
Alert("Confirmed", QtWidgets.QMessageBox.Information,
"Password copied to clipboard")
def deleteAccount(self):
message = QtWidgets.QMessageBox()
message.setWindowTitle("Warning")
message.setIcon(QtWidgets.QMessageBox.Warning)
message.setText("Are you sure you want to delete the account?")
message.setStandardButtons(QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.Cancel)
message.setDefaultButton(QtWidgets.QMessageBox.Cancel)
message.buttonClicked.connect(self.confirmDelete)
message.exec_()
def confirmDelete(self, clickedBtn):
if clickedBtn.text() == "&Yes":
key, iv, data = getData(KEYPATH, VAULTPATH)
data = data.decode('utf-8')
row = data.split('\n')
accounts = []
for value in row:
if value != "":
# stores accounts as nested lists seperated by value
accounts.append(value.split(','))
for account in accounts:
if account == VIEWEDITEM:
index = accounts.index(account)
accounts.pop(index)
# when this code was a for loop in range len(accounts) sometimes it would give
# a random error when lots of accounts were added and then someone attempts to delete an account
# although the code is now longer, this fixes the index error issue
updateAccounts(accounts) # calls updateAccounts
self.changeWindow(allAccountsWin())
class changePassWin(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_changePass()
self.ui.setupUi(self)
self.ui.nameOfAccountLbl.setText(VIEWEDITEM[0])
self.ui.usernameLbl.setText(VIEWEDITEM[1])
self.ui.cancelBtn.clicked.connect(self.goBack)
self.ui.changePassBtn.clicked.connect(self.changePassword)
def goBack(self):
self.newWindow = viewAccountWin()
self.newWindow.show()
self.hide()
def changePassword(self):
if (self.ui.passwordEdit.text() == (None or "")) or (self.ui.confirmPassEdit.text() == (None or "")):
Alert("Error", QtWidgets.QMessageBox.Critical,
"One or Both of the password fields are empty")
else:
if self.ui.passwordEdit.text() != self.ui.confirmPassEdit.text():
Alert("Error", QtWidgets.QMessageBox.Critical, "Passwords dont match")
elif (" " in self.ui.passwordEdit.text()) or (" " in self.ui.confirmPassEdit.text()):
Alert("Error", QtWidgets.QMessageBox.Critical, "Remove spaces from password fields")
elif ("," in self.ui.passwordEdit.text()) or ("," in self.ui.confirmPassEdit.text()):
Alert("Error", QtWidgets.QMessageBox.Critical, "Remove commas from password fields")
else:
key, iv, data = getData(KEYPATH, VAULTPATH)
data = data.decode('utf-8')
row = data.split('\n')
accounts = []
for value in row:
if value != "":
# stores accounts as nested lists seperated by value
accounts.append(value.split(','))
for i in range(len(accounts)):
if accounts[i] == VIEWEDITEM:
VIEWEDITEM[2] = self.ui.passwordEdit.text() # updates the item being viewed
accounts[i] = VIEWEDITEM # updates the item in the accounts nested list
updateAccounts(accounts) # calls updateAccounts
Alert("Confirmed", QtWidgets.QMessageBox.Information, "Password Changed")
self.goBack() # go to view account page after password is changed successfully
class importWin(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_importAccounts()
self.ui.setupUi(self)
self.ui.cancelBtn.clicked.connect(self.goBack)
self.ui.selectFileBtn.clicked.connect(self.getFile)
self.ui.importBtn.clicked.connect(self.importData)
def goBack(self):
self.newWindow = allAccountsWin()
self.newWindow.show() # show new window
self.hide()
def getFile(self):
file = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open file', "", "All Files (*)") # lets user choose files from explorer
url = QtCore.QUrl.fromLocalFile(file[0]) # gets path to file and stores it as an object
self.ui.fileLbl.setText(url.fileName()) # adjusts file name in gui
self.ui.fileLbl.adjustSize() # adjusts size of text wrapper for file name in gui
self.Path = file[0] # makes path accessible in importWin
def importData(self):
if self.ui.fileLbl.text() == "Select file to import from":
# checks that a Key File or Vault file have been selected
Alert("Error", QtWidgets.QMessageBox.Critical,
"No file was selected. Please select a file to import from")
# Alert function to display error QMessageBox
else:
accounts = []
if self.ui.fileLbl.text().lower().endswith(".csv"):
with open(self.Path, 'r') as csvFile:
reader = csv.DictReader(csvFile, delimiter=',')
for row in reader:
if ('name' in row) and ('username' in row) and ('password' in row): # lastpass format
if (row['username'] != "") and (row['password'] != "") and (row['name'] != ""):
values = [row['name'], row['username'], row['password']]
accounts.append(values)
elif ('name' in row) and ('login_username' in row) and ('login_password' in row): # bitwarden format
if (row['name'] != "") and (row['login_username'] != "") and (row['login_password'] != ""):
values = [row['name'], row['login_username'], row['login_password']]
accounts.append(values)
if len(accounts) < 1:
Alert("Error", QtWidgets.QMessageBox.Critical,
"CSV file format not supported or no data to import was found")
else:
for item in accounts:
writeData(item[0], item[1], item[2])
Alert("Confirmed", QtWidgets.QMessageBox.Information,
"Imported accounts from .CSV")
self.goBack()
elif self.ui.fileLbl.text().lower().endswith(".json"):
with open(self.Path) as jsonFile:
data = json.load(jsonFile)
if 'items' in data:
for item in data['items']: # checks for bitwarden format
if 'login' in item:
if ('username' in item['login']) and ('password' in item['login']):
if (item['login']['username'] is not None) and (item['login']['password'] is not None):
values = [item['name'], item['login']
['username'], item['login']['password']]
accounts.append(values)
else:
Alert("Error", QtWidgets.QMessageBox.Critical,
"JSON file format not supported")
if len(accounts) < 1:
Alert("Error", QtWidgets.QMessageBox.Critical,
"JSON file has no data to import")
else:
for item in accounts:
writeData(item[0], item[1], item[2])
Alert("Confirmed", QtWidgets.QMessageBox.Information,
"Imported accounts from .JSON")
self.goBack()
else:
Alert("Error", QtWidgets.QMessageBox.Critical, "File type not supported")
class exportWin(QtWidgets.QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ui = Ui_exportPage()
self.ui.setupUi(self)
self.ui.cancelBtn.clicked.connect(self.goBack)
self.ui.csvBtn.clicked.connect(self.exportCSV)
self.ui.jsonBtn.clicked.connect(self.exportJSON)
def goBack(self):
self.newWindow = allAccountsWin()
self.newWindow.show()
self.hide()
def exportCSV(self):
key, iv, data = getData(KEYPATH, VAULTPATH)
data = data.decode('utf-8')
path = getPathToDesktop()
path += "\Accounts.csv"
if data != "":
row = data.split('\n')
accounts = []
for value in row:
if value != "":
terms = value.split(',')
temp = {}
temp["name"], temp["username"], temp["password"] = terms[0], terms[1], terms[2]
accounts.append(temp)
with open(path, 'w') as file: # writes to csv file in lastpass format as lastpass' format is widely supported
columns = ['url', 'username', 'password', 'extra', 'name', 'grouping', 'fav']
writer = csv.DictWriter(file, fieldnames=columns, lineterminator='\n')
writer.writeheader()
writer.writerows(accounts)
Alert("Confirmed", QtWidgets.QMessageBox.Information, "CSV file successfully created")
else:
Alert("Error", QtWidgets.QMessageBox.Critical, "No accounts to export")
self.goBack()
def exportJSON(self):
key, iv, data = getData(KEYPATH, VAULTPATH)
data = data.decode('utf-8')
path = getPathToDesktop()
path += "\Accounts.json"
if data != "":
row = data.split('\n')
accounts = {}
account = []
for value in row:
# json uses None for null and False for false when writing to a json
if value != "":
terms = value.split(',')
loginValues = {}
uris = [{"match": None, "uri": "http://"}]
loginValues['uris'], loginValues['username'], loginValues['password'], loginValues['totp'] = uris, terms[1], terms[2], None
temp = {}
temp['id'], temp['organizationId'], temp['folderId'], temp['type'], temp['name'], temp['notes'], temp[
'favorite'], temp['login'], temp['collectionIds'] = "", None, None, 1, terms[0], None, False, loginValues, False
account.append(temp)
accounts['items'] = account
with open(path, 'w') as file: # writes to csv in lastpass format
json.dump(accounts, file, indent=4)
Alert("Confirmed", QtWidgets.QMessageBox.Information, "JSON file successfully created")
else:
Alert("Error", QtWidgets.QMessageBox.Critical, "No accounts to export")
self.goBack()
def getPathToDesktop():
# path to desktop is different on windows and unix systems as on windows the drive the desktop is on can be changed
if system() == 'Windows':
desktopPath = os.environ["HOMEPATH"] + "\Desktop" # finds path to desktop
for driveLetter in ascii_uppercase: # find drive desktop folder is on
if os.path.exists("{0}:{1}".format(driveLetter, desktopPath)):
desktopPath = "{0}:{1}".format(driveLetter, desktopPath)
else:
desktopPath = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop')
return desktopPath
def Alert(title, icon, text):
# creates QMessageBox based on arguements in function
message = QtWidgets.QMessageBox()
message.setWindowTitle(title)
message.setIcon(icon)
message.setText(text)
message.exec_()
def getData(pathToKey, pathToVault): # allows me to access Paths throughout document
global KEYPATH, VAULTPATH
KEYPATH, VAULTPATH = pathToKey, pathToVault
readVaultFile = open(VAULTPATH, 'rb') # Open the file to read bytes
iv = readVaultFile.read(16) # Read the iv out - this is 16 bytes long
ciphered_data = readVaultFile.read() # Read the rest of the data
readVaultFile.close()
readKeyFile = open(KEYPATH, 'rb')
key = readKeyFile.read()
readKeyFile.close()
cipher = AES.new(key, AES.MODE_CBC, iv=iv) # Setup cipher
# Decrypt and then up-pad the result
data = unpad(cipher.decrypt(ciphered_data), AES.block_size)
return key, iv, data
def writeData(nameOfAccount, username, password): # writes name of account, username and password to vaultFile
global KEYPATH, VAULTPATH
key, iv, data = getData(KEYPATH, VAULTPATH)
data += ("{},{},{}\n".format(nameOfAccount, username, password)).encode('utf-8')
cipher = AES.new(key, AES.MODE_CBC, iv=iv)
ciphered_data = cipher.encrypt(pad(data, AES.block_size))
vaultFile = open(VAULTPATH, "wb") # creates vault file
vaultFile.write(cipher.iv)
vaultFile.write(ciphered_data)
vaultFile.close()
def updateAccounts(data):
global KEYPATH, VAULTPATH
key, iv, oldData = getData(KEYPATH, VAULTPATH)
accounts = []
for value in data:
row = ','.join(value)
accounts.append(row)
newData = b''
for line in accounts:
newData += ("{}\n".format(line)).encode('utf-8')
cipher = AES.new(key, AES.MODE_CBC, iv=iv)
ciphered_data = cipher.encrypt(pad(newData, AES.block_size))
vaultFile = open(VAULTPATH, "wb") # creates vault file
vaultFile.write(cipher.iv)
vaultFile.write(ciphered_data)
vaultFile.close()
if __name__ == "__main__":
# displays when starting application
app = QtWidgets.QApplication(sys.argv)
startPage = MainWindow()
startPage.show()
sys.exit(app.exec_())
| 2.53125 | 3 |
website/views/__init__.py | luxutao/django-blog | 0 | 12790869 | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
"""
@__Create Time__ = 2017/12/11 10:34
@__Description__ = " "
"""
| 1.234375 | 1 |
altar/altar/bayesian/__init__.py | AlTarFramework/altar | 32 | 12790870 | # -*- python -*-
# -*- coding: utf-8 -*-
#
# <NAME> <<EMAIL>>
#
# (c) 2013-2020 parasim inc
# (c) 2010-2020 california institute of technology
# all rights reserved
#
# the package
import altar
# and the protocols
from .Controller import Controller as controller
from .Sampler import Sampler as sampler
from .Scheduler import Scheduler as scheduler
from .Solver import Solver as solver
# implementations
@altar.foundry(
implements=controller,
tip="a Bayesian controller that implements simulated annealing")
def annealer():
# grab the factory
from .Annealer import Annealer
# attach its docstring
__doc__ = Annealer.__doc__
# and return it
return Annealer
@altar.foundry(
implements=scheduler,
tip="a Bayesian scheduler based on the COV algorithm")
def cov():
# grab the factory
from .COV import COV
# attach its docstring
__doc__ = COV.__doc__
# and return it
return COV
@altar.foundry(
implements=solver,
tip="a solver for δβ based on a Brent minimizer from gsl")
def brent():
# grab the factory
from .Brent import Brent
# attach its docstring
__doc__ = Brent.__doc__
# and return it
return Brent
@altar.foundry(
implements=solver,
tip="a solver for δβ based on a naive grid search")
def grid():
# grab the factory
from .Grid import Grid
# attach its docstring
__doc__ = Grid.__doc__
# and return it
return Grid
@altar.foundry(
implements=sampler,
tip="a Bayesian sampler based on the Metropolis algorithm")
def metropolis():
# grab the factory
from .Metropolis import Metropolis
# attach its docstring
__doc__ = Metropolis.__doc__
# and return it
return Metropolis
@altar.foundry(
implements=altar.simulations.monitor,
tip="a monitor that times the various simulation phases")
def profiler():
# grab the factory
from .Profiler import Profiler
# attach its docstring
__doc__ = Profiler.__doc__
# and return it
return Profiler
# end of file
| 2.0625 | 2 |
tti/indicators/_volume_oscillator.py | Bill-Software-Engineer/trading-technical-indicators | 68 | 12790871 | """
Trading-Technical-Indicators (tti) python library
File name: _volume_oscillator.py
Implements the Volume Oscillator technical indicator.
"""
import pandas as pd
from ._technical_indicator import TechnicalIndicator
from ..utils.constants import TRADE_SIGNALS
from ..utils.exceptions import NotEnoughInputData, WrongTypeForInputParameter,\
WrongValueForInputParameter
class VolumeOscillator(TechnicalIndicator):
"""
Volume Oscillator Technical Indicator class implementation.
Args:
input_data (pandas.DataFrame): The input data. Required input column
is ``volume``. The index is of type ``pandas.DatetimeIndex``.
long_period (int, default=5): The past periods to be used for the
calculation of the long moving average.
short_period (int, default=2): The past periods to be used for the
calculation of the short moving average.
fill_missing_values (bool, default=True): If set to True, missing
values in the input data are being filled.
Attributes:
_input_data (pandas.DataFrame): The ``input_data`` after preprocessing.
_ti_data (pandas.DataFrame): The calculated indicator. Index is of type
``pandas.DatetimeIndex``. It contains one column, the ``vosc``.
_properties (dict): Indicator properties.
_calling_instance (str): The name of the class.
Raises:
WrongTypeForInputParameter: Input argument has wrong type.
WrongValueForInputParameter: Unsupported value for input argument.
NotEnoughInputData: Not enough data for calculating the indicator.
TypeError: Type error occurred when validating the ``input_data``.
ValueError: Value error occurred when validating the ``input_data``.
"""
def __init__(self, input_data, long_period=5, short_period=2,
fill_missing_values=True):
# Validate and store if needed, the input parameters
if isinstance(long_period, int):
if long_period > 0:
self._long_period = long_period
else:
raise WrongValueForInputParameter(
long_period, 'long_period', '>0')
else:
raise WrongTypeForInputParameter(
type(long_period), 'long_period', 'int')
if isinstance(short_period, int):
if short_period > 0:
self._short_period = short_period
else:
raise WrongValueForInputParameter(
short_period, 'short_period', '>0')
else:
raise WrongTypeForInputParameter(
type(short_period), 'short_period', 'int')
if self._long_period <= self._short_period:
raise WrongValueForInputParameter(
long_period, 'long_period ',
'> short_period [' + str(self._short_period) + ']')
# Control is passing to the parent class
super().__init__(calling_instance=self.__class__.__name__,
input_data=input_data,
fill_missing_values=fill_missing_values)
def _calculateTi(self):
"""
Calculates the technical indicator for the given input data. The input
data are taken from an attribute of the parent class.
Returns:
pandas.DataFrame: The calculated indicator. Index is of type
``pandas.DatetimeIndex``. It contains one column, the ``vosc``.
Raises:
NotEnoughInputData: Not enough data for calculating the indicator.
"""
# Not enough data for the requested period
if len(self._input_data.index) < self._long_period:
raise NotEnoughInputData('Volume Oscillator', self._long_period,
len(self._input_data.index))
vosc = pd.DataFrame(index=self._input_data.index, columns=['vosc'],
data=None, dtype='float64')
vosc['vosc'] = self._input_data['volume'].rolling(
window=self._short_period, min_periods=self._short_period,
center=False, win_type=None, on=None, axis=0, closed=None
).mean() - self._input_data['volume'].rolling(
window=self._long_period, min_periods=self._long_period,
center=False, win_type=None, on=None, axis=0, closed=None).mean()
return vosc.round(4)
def getTiSignal(self):
"""
Calculates and returns the trading signal for the calculated technical
indicator.
Returns:
{('hold', 0), ('buy', -1), ('sell', 1)}: The calculated trading
signal.
"""
# Not enough data for calculating trading signal
if len(self._ti_data.index) < 3:
return TRADE_SIGNALS['hold']
if (0 < self._ti_data['vosc'].iat[-3] < self._ti_data['vosc'].iat[-2] <
self._ti_data['vosc'].iat[-1]):
return TRADE_SIGNALS['buy']
if (self._ti_data['vosc'].iat[-3] > self._ti_data['vosc'].iat[-2] >
self._ti_data['vosc'].iat[-1] > 0):
return TRADE_SIGNALS['sell']
return TRADE_SIGNALS['hold']
| 3.0625 | 3 |
bobby/tests/test_cass.py | rackerlabs/bobby | 0 | 12790872 | <gh_stars>0
# Copyright 2013 Rackspace, Inc.
"""
Tests for bobby.cass
"""
from bobby import cass
import mock
from silverberg.client import CQLClient
from twisted.internet import defer
from twisted.trial import unittest
class _DBTestCase(unittest.TestCase):
"""Abstract DB test case."""
def setUp(self):
"""Patch CQLClient."""
self.client = mock.create_autospec(CQLClient)
class TestGetGroupsByTenantId(_DBTestCase):
"""Test bobby.cass.get_groups_by_tenant_id."""
def test_get_grous_by_tenant_id(self):
"""Return all the groups by a given tenant id."""
expected = []
self.client.execute.return_value = defer.succeed(expected)
d = cass.get_groups_by_tenant_id(self.client, '101010')
result = self.successResultOf(d)
self.assertEqual(result, expected)
self.client.execute.assert_called_once_with(
'SELECT * FROM groups WHERE "tenantId"=:tenantId;',
{'tenantId': '101010'},
1)
class TestGetGroupById(_DBTestCase):
"""Test bobby.cass.get_group_by_id."""
def test_get_group_by_id(self):
"""Returns a single dict, rather than a single item list."""
expected = {'groupId': 'group-abc',
'tenantId': '101010',
'notification': 'notification-ghi',
'notificationPlan': 'notificationPlan-jkl'}
self.client.execute.return_value = defer.succeed([expected])
d = cass.get_group_by_id(self.client, '101010', 'group-abc')
result = self.successResultOf(d)
self.assertEqual(result, expected)
self.client.execute.assert_called_once_with(
'SELECT * FROM groups WHERE "tenantId"=:tenantId AND "groupId"=:groupId;',
{'tenantId': '101010', 'groupId': 'group-abc'},
1)
def test_get_group_by_id_no_such_id(self):
"""Raises an error if no group is found."""
self.client.execute.return_value = defer.succeed([])
d = cass.get_group_by_id(self.client, '101010', 'group-abc')
result = self.failureResultOf(d)
self.assertTrue(result.check(cass.ResultNotFoundError))
def test_get_group_by_id_integrity_problems(self):
"""Raises an error if more than one group is found."""
self.client.execute.return_value = defer.succeed(['group1', 'group2'])
d = cass.get_group_by_id(self.client, '101010', 'group-abc')
result = self.failureResultOf(d)
self.assertTrue(result.check(cass.ExcessiveResultsError))
class TestCreateGroup(_DBTestCase):
"""Test bobby.cass.create_group."""
def test_create_group(self):
"""Creates a group in Cassandra."""
expected = {'groupId': 'group-abc',
'tenantId': '101010',
'notification': 'notification-ghi',
'notificationPlan': 'notificationPlan-jkl'}
def execute(query, data, consistency):
if 'INSERT' in query:
return defer.succeed(None)
elif 'SELECT' in query:
return defer.succeed([expected])
self.client.execute.side_effect = execute
d = cass.create_group(self.client, expected['tenantId'], expected['groupId'],
expected['notification'], expected['notificationPlan'])
result = self.successResultOf(d)
self.assertEqual(result, expected)
self.assertEqual(
self.client.execute.mock_calls,
[mock.call(
' '.join([
'INSERT INTO groups ("tenantId", "groupId", "notification", "notificationPlan")',
'VALUES (:tenantId, :groupId, :notification, :notificationPlan);']),
{'notificationPlan': 'notificationPlan-jkl',
'notification': 'notification-ghi',
'groupId': 'group-abc',
'tenantId': '101010'},
1),
mock.call(
'SELECT * FROM groups WHERE "tenantId"=:tenantId AND "groupId"=:groupId;',
{'tenantId': '101010', 'groupId': 'group-abc'},
1)])
class TestDeleteGroup(_DBTestCase):
"""Test bobby.cass.delete_group."""
def test_delete_group(self):
"""Deletes a group."""
self.client.execute.return_value = defer.succeed(None)
d = cass.delete_group(self.client, '101010', 'group-abc')
self.successResultOf(d)
self.client.execute.assert_called_once_with(
'DELETE FROM groups WHERE "groupId"=:groupId AND "tenantId"=:tenantId;',
{'groupId': 'group-abc', 'tenantId': '101010'},
1)
class TestGetServersByGroupId(_DBTestCase):
"""Test bobby.cass.get_servers_by_group_id."""
def test_get_servers_by_group_id(self):
"""Returns all servers by a given group_id."""
expected = [{'serverId': 'server-abc',
'groupId': 'group-def',
'entityId': 'entity-ghi'},
{'serverId': 'server-xyz',
'groupId': 'group-def',
'entityId': 'entity-uvw'}]
self.client.execute.return_value = defer.succeed(expected)
d = cass.get_servers_by_group_id(self.client, '101010', 'group-def')
result = self.successResultOf(d)
self.assertEqual(result, expected)
self.client.execute.assert_called_once_with(
'SELECT * FROM servers WHERE "groupId"=:groupId;',
{'groupId': 'group-def'},
1)
class TestGetServerByServerId(_DBTestCase):
"""Test bobby.cass.get_server_by_server_id."""
def test_get_server_by_server_id(self):
"""Return a single server dict, rather than a single item list."""
expected = {'serverId': 'server-abc',
'groupId': 'group-def',
'entityId': 'entity-ghi'}
self.client.execute.return_value = defer.succeed([expected])
d = cass.get_server_by_server_id(self.client, '101010', 'group-xyz', 'server-abc')
result = self.successResultOf(d)
self.assertEqual(result, expected)
self.client.execute.assert_called_once_with(
'SELECT * FROM servers WHERE "groupId"=:groupId AND "serverId"=:serverId;',
{'serverId': 'server-abc', 'groupId': 'group-xyz'},
1)
def test_get_server_by_server_id_not_found(self):
"""Raises an error if no server is found."""
self.client.execute.return_value = defer.succeed([])
d = cass.get_server_by_server_id(self.client, '101010', 'group-xyz', 'server-abc')
result = self.failureResultOf(d)
self.assertTrue(result.check(cass.ResultNotFoundError))
def test_get_server_by_id_integrity_problems(self):
"""Raises an error if more than one group is found."""
self.client.execute.return_value = defer.succeed(['server-abc', 'server-def'])
d = cass.get_server_by_server_id(self.client, '101010', 'group-xyz', 'server-abc')
result = self.failureResultOf(d)
self.assertTrue(result.check(cass.ExcessiveResultsError))
class TestCreateServer(_DBTestCase):
"""Test bobby.cass.create_server."""
def test_create_server(self):
"""Creates and returns a server dict."""
expected = {'serverId': 'server-abc',
'groupId': 'group-def',
'entityId': 'entity-ghi',
'tenantId': '101010'}
def execute(query, data, consistency):
if 'INSERT' in query:
return defer.succeed(None)
elif 'SELECT' in query:
return defer.succeed([expected])
self.client.execute.side_effect = execute
d = cass.create_server(self.client, expected['tenantId'], expected['serverId'], expected['entityId'],
expected['groupId'])
result = self.successResultOf(d)
self.assertEqual(result, expected)
calls = [
mock.call(
' '.join([
'INSERT INTO servers ("serverId", "entityId", "groupId")',
'VALUES (:serverId, :entityId, :groupId);']),
{'serverId': 'server-abc',
'entityId': 'entity-ghi',
'groupId': 'group-def'},
1),
mock.call(
'SELECT * FROM servers WHERE "groupId"=:groupId AND "serverId"=:serverId;',
{'serverId': 'server-abc', 'groupId': 'group-def'},
1)]
self.assertEqual(self.client.execute.mock_calls, calls)
class TestDeleteServer(_DBTestCase):
"""Test bobby.cass.delete_server."""
def test_delete_server(self):
"""Delete and cascade to delete associated server policies."""
def execute(*args, **kwargs):
return defer.succeed(None)
self.client.execute.side_effect = execute
d = cass.delete_server(self.client, '101010', 'group-xyz', 'server-abc')
self.successResultOf(d)
calls = [
mock.call(
'DELETE FROM servers WHERE "groupId"=:groupId AND "serverId"=:serverId;',
{'serverId': 'server-abc', 'groupId': 'group-xyz'}, 1)
]
self.assertEqual(calls, self.client.execute.mock_calls)
class TestGetServerPoliciesByServerId(_DBTestCase):
"""Test bobby.cass.get_serverpolicies_by_server_id."""
def test_get_serverpolicies_by_server_id(self):
policies = [{'policyId': 'policy-abc'},
{'policyId': 'policy-xyz'}]
expected = [{'policyId': 'policy-abc',
'serverId': 'server-abc'},
{'policyId': 'policy-xyz',
'serverId': 'server-abc'}]
def execute(query, args, consistency):
if 'FROM policies' in query:
return defer.succeed(policies)
else:
return defer.succeed(expected)
self.client.execute.side_effect = execute
d = cass.get_serverpolicies_by_server_id(self.client, 'group-abc', 'server-abc')
result = self.successResultOf(d)
self.assertEqual(result, expected)
calls = [
mock.call('SELECT * FROM policies WHERE "groupId"=:groupId',
{'groupId': 'group-abc'}, 1),
mock.call('SELECT * FROM serverpolicies WHERE "policyId" IN (:policies) AND "serverId"=:serverId',
{'serverId': 'server-abc',
'policies': 'policy-abc, policy-xyz'},
1)
]
self.assertEqual(self.client.execute.mock_calls, calls)
class TestAddServerpolicy(_DBTestCase):
"""Test bobby.cass.add_serverpolicy"""
def test_add_serverpolicy(self):
"""Adding a server policy is an insert in the database."""
self.client.execute.return_value = defer.succeed(None)
d = cass.add_serverpolicy(self.client, 'server-abc', 'policy-def')
self.successResultOf(d)
self.client.execute.assert_called_once_with(
'INSERT INTO serverpolicies ("serverId", "policyId") VALUES (:serverId, :policyId);',
{'serverId': 'server-abc', 'policyId': 'policy-def'},
1)
class TestDeleteServerpolicy(_DBTestCase):
"""Test bobby.cass.delete_serverpolicy"""
def test_delete_serverpolicy(self):
"""Deleting a server policy is a delete in the database."""
self.client.execute.return_value = defer.succeed(None)
d = cass.delete_serverpolicy(self.client, 'server-abc', 'policy-def')
self.successResultOf(d)
self.client.execute.assert_called_once_with(
'DELETE FROM serverpolicies WHERE "serverId"=:serverId AND "policyId"=:policyId;',
{'serverId': 'server-abc', 'policyId': 'policy-def'},
1)
class TestGetPoliciesByGroupId(_DBTestCase):
"""Test bobby.cass.get_policies_by_group_id."""
def test_get_policies_by_group_id(self):
"""Gets all policies from a provided group."""
expected = [{'policyId': 'policy-abc',
'groupId': 'group-def',
'alarmTemplate': 'alarmTemplate-ghi',
'checkTemplate': 'checkTemplate-jkl'},
{'policyId': 'policy-xyz',
'groupId': 'group-def',
'alarmTemplate': 'alarmTemplate-uvw',
'checkTemplate': 'checkTemplate-rst'}]
self.client.execute.return_value = defer.succeed(expected)
d = cass.get_policies_by_group_id(self.client, 'group-def')
result = self.successResultOf(d)
self.assertEqual(result, expected)
self.client.execute.assert_called_once_with(
'SELECT * FROM policies WHERE "groupId"=:groupId;',
{'groupId': 'group-def'},
1)
class TestGetPolicyByPolicyId(_DBTestCase):
"""Test bobby.cass.get_policy_by_policy_id."""
def test_get_policy_by_policy_id(self):
"""Return a single policy dict, rather than a single item list."""
expected = {'policyId': 'policy-abc',
'groupId': 'group-def',
'alarmTemplate': 'alarmTemplate-ghi',
'checkTemplate': 'checkTemplate-jkl'}
self.client.execute.return_value = defer.succeed([expected])
d = cass.get_policy_by_policy_id(self.client, '101010', 'policy-abc')
result = self.successResultOf(d)
self.assertEqual(result, expected)
self.client.execute.assert_called_once_with(
'SELECT * FROM policies WHERE "policyId"=:policyId AND "groupId"=:groupId;',
{'policyId': 'policy-abc', 'groupId': '101010'},
1)
def test_get_policy_by_policy_id_not_found(self):
"""Raises an error if no policy is found."""
self.client.execute.return_value = defer.succeed([])
d = cass.get_policy_by_policy_id(self.client, '101010', 'policy-abc')
result = self.failureResultOf(d)
self.assertTrue(result.check(cass.ResultNotFoundError))
def test_get_policy_by_policy_id_integrity_problems(self):
"""Raises an error if more than one policy is found."""
self.client.execute.return_value = defer.succeed(['policy-abc', 'policy-def'])
d = cass.get_policy_by_policy_id(self.client, '101010', 'policy-abc')
result = self.failureResultOf(d)
self.assertTrue(result.check(cass.ExcessiveResultsError))
class TestCreatePolicy(_DBTestCase):
"""Test bobby.cass.create_policy."""
def test_create_policy(self):
"""Creates and returns a policy dict."""
expected = {'policyId': 'policy-abc',
'groupId': 'group-def',
'alarmTemplate': 'alarmTemplate-ghi',
'checkTemplate': 'checkTemplate-jkl'}
def execute(query, data, consistency):
if 'INSERT' in query:
return defer.succeed(None)
elif 'SELECT' in query:
return defer.succeed([expected])
self.client.execute.side_effect = execute
d = cass.create_policy(self.client, expected['policyId'],
expected['groupId'], expected['alarmTemplate'],
expected['checkTemplate'])
result = self.successResultOf(d)
self.assertEqual(result, expected)
calls = [
mock.call(
' '.join([
'INSERT INTO policies',
'("policyId", "groupId", "alarmTemplate", "checkTemplate")',
'VALUES (:policyId, :groupId, :alarmTemplate, :checkTemplate);']),
{'alarmTemplate': 'alarmTemplate-ghi',
'checkTemplate': 'checkTemplate-jkl',
'policyId': 'policy-abc',
'groupId': 'group-def'},
1),
mock.call(
'SELECT * FROM policies WHERE "policyId"=:policyId AND "groupId"=:groupId;',
{'policyId': 'policy-abc', 'groupId': 'group-def'},
1)
]
self.assertEqual(self.client.execute.mock_calls, calls)
class TestDeletePolicy(_DBTestCase):
"""Test bobby.cass.delete_policy."""
def test_delete_policy(self):
"""Deletes a policy."""
def execute(*args, **kwargs):
return defer.succeed(None)
self.client.execute.side_effect = execute
d = cass.delete_policy(self.client, 'group-xyz', 'policy-abc')
self.successResultOf(d)
calls = [
mock.call(
'DELETE FROM policies WHERE "groupId"=:groupId AND "policyId"=:policyId;',
{'policyId': 'policy-abc', 'groupId': 'group-xyz'}, 1),
]
self.assertEqual(calls, self.client.execute.mock_calls)
class TestServerPoliciesCreateDestroy(_DBTestCase):
"""Test bobby.cass.register_policy_on_server and bobby.cass.deregister_policy_on_server."""
def test_register_policy_on_server(self):
"""Registers a policy on a server and creates a serverpolicy record."""
def execute(*args, **kwargs):
return defer.succeed(None)
self.client.execute.side_effect = execute
d = cass.register_policy_on_server(self.client, 'policy-abc', 'server-abc', 'alABCD', 'chABCD')
self.successResultOf(d)
calls = [
mock.call(
('INSERT INTO serverpolicies ("serverId", "policyId", "alarmId", "checkId", state)'
' VALUES (:serverId, :policyId, :alarmId, :checkId, false);'),
{'policyId': 'policy-abc', 'serverId': 'server-abc',
'alarmId': 'alABCD', 'checkId': 'chABCD'}, 1),
]
self.assertEqual(calls, self.client.execute.mock_calls)
def test_deregister_policy_on_server(self):
"""Registers a policy on a server and creates a serverpolicy record."""
def execute(*args, **kwargs):
return defer.succeed(None)
self.client.execute.side_effect = execute
d = cass.deregister_policy_on_server(self.client, 'policy-abc', 'server-abc')
self.successResultOf(d)
calls = [
mock.call(
'DELETE FROM serverpolicies WHERE "policyId"=:policyId AND "serverId"=:serverId;',
{'policyId': 'policy-abc', 'serverId': 'server-abc'}, 1),
]
self.assertEqual(calls, self.client.execute.mock_calls)
class TestServerPolicies(_DBTestCase):
"""Test bobby.cass.register_policy_on_server and bobby.cass.deregister_policy_on_server."""
def test_policy_state(self):
"""Registers a policy on a server and creates a serverpolicy record."""
expected = [{'policyId': 'policy-abc',
'groupId': 'group-def',
'alarmId': 'alABCD',
'checkId': 'chABCD',
'state': 'false'}]
self.client.execute.return_value = defer.succeed(expected)
d = cass.get_policy_state(self.client, 'policy-abc')
result = self.successResultOf(d)
self.assertEqual(result, expected)
calls = [
mock.call(
'SELECT * FROM serverpolicies WHERE "policyId"=:policyId;',
{'policyId': 'policy-abc'}, 1),
]
self.assertEqual(calls, self.client.execute.mock_calls)
class TestAlterAlarmState(_DBTestCase):
"""Test bobby.cass.create_policy."""
def test_alter_alarm_state(self):
"""Creates and returns a policy dict."""
expected = {'policyId': 'policy-abc',
'serverId': 'server-def',
'alarmId': 'alghi',
'checkId': 'chjkl',
'state': True}
def execute(query, data, consistency):
if 'UPDATE' in query:
return defer.succeed(None)
elif 'SELECT' in query:
return defer.succeed([expected])
self.client.execute.side_effect = execute
d = cass.alter_alarm_state(self.client, expected['alarmId'], False)
result = self.successResultOf(d)
self.assertEqual(result, ('policy-abc', 'server-def'))
calls = [
mock.call(
'SELECT * FROM serverpolicies WHERE "alarmId"=:alarmId;',
{'alarmId': 'alghi'},
1),
mock.call(
('UPDATE serverpolicies SET state=:state WHERE "policyId"=:policyId AND '
'"serverId"=:serverId;'),
{'state': False,
'policyId': 'policy-abc',
'serverId': 'server-def'},
1)
]
self.assertEqual(self.client.execute.mock_calls, calls)
class TestCheckQuorumHealth(_DBTestCase):
"""Test bobby.cass.check_quorum_health."""
def test_unhealthy(self):
"""Results in a False when the quorum is unhealthy."""
def execute(query, data, consistency):
return defer.succeed([
{'policyId': 'policy-uvwxyz',
'serverId': 'server-abc',
'state': 'OK'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-def',
'state': 'OK'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-ghi',
'state': 'Critical'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-jkl',
'state': 'Critical'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-mno',
'state': 'Critical'},
])
self.client.execute.side_effect = execute
d = cass.check_quorum_health(self.client, 'alarm-uvwxyz')
result = self.successResultOf(d)
self.assertFalse(result)
def test_healthy(self):
"""Results in a False when the quorum is healthy."""
def execute(query, data, consistency):
return defer.succeed([
{'policyId': 'policy-uvwxyz',
'serverId': 'server-abc',
'state': 'OK'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-def',
'state': 'OK'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-ghi',
'state': 'OK'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-jkl',
'state': 'Critical'},
{'policyId': 'policy-uvwxyz',
'serverId': 'server-mno',
'state': 'Critical'},
])
self.client.execute.side_effect = execute
d = cass.check_quorum_health(self.client, 'policy-uvwxyz')
result = self.successResultOf(d)
self.assertTrue(result)
self.client.execute.assert_called_once_with(
'SELECT * FROM serverpolicies WHERE "policyId"=:policyId;',
{'policyId': 'policy-uvwxyz'}, 1)
| 2.1875 | 2 |
day56/ISS_position.py | Nitin-Diwakar/100-days-of-code | 1 | 12790873 | <gh_stars>1-10
import requests
response = requests.get(url="http://api.open-notify.org/iss-now.json")
response.raise_for_status()
data = response.json()
# print(data)
longitude = data["iss_position"]["longitude"]
latitude = data["iss_position"]["latitude"]
iss_position = (longitude, latitude)
print(iss_position)
| 2.9375 | 3 |
setup.py | nickp60/pyfastg | 1 | 12790874 | # NOTE: Derived from https://github.com/biocore/qurro/blob/master/setup.py
from setuptools import find_packages, setup
classes = """
Development Status :: 3 - Alpha
Topic :: Software Development :: Libraries
Topic :: Scientific/Engineering
Topic :: Scientific/Engineering :: Bio-Informatics
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
"""
classifiers = [s.strip() for s in classes.split("\n") if s]
description = "Minimal Python library for parsing SPAdes FASTG files"
with open("README.md") as f:
long_description = f.read()
version = "0.0.0"
setup(
name="pyfastg",
version=version,
license="MIT",
description=description,
long_description=long_description,
long_description_content_type="text/markdown",
author="<NAME>, <NAME>",
maintainer="<NAME>",
maintainer_email="<EMAIL>",
url="https://github.com/fedarko/pyfastg",
classifiers=classifiers,
packages=find_packages(),
install_requires=["networkx", "scikit-bio"],
extras_require={"dev": ["pytest", "pytest-cov", "flake8", "black"]},
)
| 1.367188 | 1 |
tools/instant_collada_export.py | maggo007/Raygun | 31 | 12790875 | <reponame>maggo007/Raygun
bl_info = {
"name": "Instant Collada Export",
"blender": (2, 80, 0),
"category": "Import-Export",
}
import bpy
import os
class InstantColladaExport(bpy.types.Operator):
"""Instantly export the current scene as collada"""
bl_idname = "object.instant_collada_export"
bl_label = "Instant Collada Export"
def execute(self, context):
filepath = os.path.splitext(bpy.data.filepath)[0]
if not filepath:
self.report({'ERROR'}, "Save the file!")
return {'CANCELLED'}
bpy.ops.wm.collada_export(filepath=filepath + '.dae',
apply_modifiers=True,
export_global_forward_selection='-Z',
export_global_up_selection='Y',
apply_global_orientation=True)
return {'FINISHED'}
def menu_func(self, context):
self.layout.operator(InstantColladaExport.bl_idname)
def register():
bpy.utils.register_class(InstantColladaExport)
bpy.types.TOPBAR_MT_file_export.append(menu_func)
def unregister():
bpy.utils.unregister_class(InstantColladaExport)
bpy.types.TOPBAR_MT_file_export.remove(menu_func)
if __name__ == "__main__":
register()
| 2.28125 | 2 |
simulation/vis_lib.py | stanford-iprl-lab/UniGrasp | 39 | 12790876 | from mayavi import mlab as mayalab
import numpy as np
import os
def plot_pc(pcs,color=None,scale_factor=.05,mode='point'):
if color == 'red':
mayalab.points3d(pcs[:,0],pcs[:,1],pcs[:,2],mode=mode,scale_factor=scale_factor,color=(1,0,0))
print("color",color)
elif color == 'blue':
mayalab.points3d(pcs[:,0],pcs[:,1],pcs[:,2],mode=mode,scale_factor=scale_factor,color=(0,0,1))
elif color == 'green':
mayalab.points3d(pcs[:,0],pcs[:,1],pcs[:,2],mode=mode,scale_factor=scale_factor,color=(0,1,0))
elif color == 'ycan':
mayalab.points3d(pcs[:,0],pcs[:,1],pcs[:,2],mode=mode,scale_factor=scale_factor,color=(0,1,1))
else:
print("unkown color")
mayalab.points3d(pcs[:,0],pcs[:,1],pcs[:,2],mode=mode,scale_factor=scale_factor,color=color)
def plot_pc_with_normal(pcs,pcs_n,scale_factor=1.0,color='red'):
if color == 'red':
mayalab.quiver3d(pcs[:, 0], pcs[:, 1], pcs[:, 2], pcs_n[:, 0], pcs_n[:, 1], pcs_n[:, 2], color=(1,0,0), mode='arrow',scale_factor=1.0)
elif color == 'blue':
mayalab.quiver3d(pcs[:, 0], pcs[:, 1], pcs[:, 2], pcs_n[:, 0], pcs_n[:, 1], pcs_n[:, 2], color=(0,0,1), mode='arrow',scale_factor=1.0)
elif color == 'green':
mayalab.quiver3d(pcs[:, 0], pcs[:, 1], pcs[:, 2], pcs_n[:, 0], pcs_n[:, 1], pcs_n[:, 2], color=(0,1,0), mode='arrow',scale_factor=1.0)
def plot_origin():
origin_pc = np.array([0.0,0.0,0.0]).reshape((-1,3))
plot_pc(origin_pc,color='ycan',mode='sphere',scale_factor=.01)
origin_pcs = np.tile(origin_pc,(3,1))
origin_pcns = np.eye(3) * 0.01
plot_pc_with_normal(origin_pcs,origin_pcns)
if __name__ == '__main__':
#save_dir = '/home/lins/MetaGrasp/Data/BlensorResult/2056'
#gripper_name = '056_rho0.384015_azi1.000000_ele89.505854_theta0.092894_xcam0.000000_ycam0.000000_zcam0.384015_scale0.146439_xdim0.084960_ydim0.084567_zdim0.08411000000_pcn_new.npz.npy'
#gripper_name ='339_rho0.308024_azi6.000000_ele89.850030_theta-0.013403_xcam0.000000_ycam0.000000_zcam0.308024_scale0.061975_xdim0.048725_ydim0.036192_zdim0.01252500000_pcn.npz'
gripper = np.load(os.path.join("robotiq2f_open.npy"))
#plot_pc(gripper,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.002)
plot_pc(gripper,color=(209/255.0,64/255.0,109/255.0),mode='sphere',scale_factor=0.002)
plot_origin()
mayalab.show()
#sle = np.array([1494,1806])
#plot_pc(gripper[sle],color='red',mode='sphere',scale_factor=0.002)
#mayalab.show()
#save_dir = '/home/lins/MetaGrasp/meta_grasping/saved_results/interp'
#save_dir = '/home/lins/MetaGrasp/Data/Gripper/Data3'
# #save_dir_gt = '/home/lins/MetaGrasp/Data/Gripper/Data'
save_dir = '/home/lins/MetaGrasp/Data/Gripper/Data_DB/G5/f2_5_close.npy'
a = np.load(save_dir)
plot_pc(a)
save_dirb = '/home/lins/MetaGrasp/Data/Gripper/Data_DB/G3/f2_3_close.npy'
b = np.load(save_dirb)
plot_pc(b,color='red')
mayalab.show()
#for i in range(10001,10300):
# gripper_name = 'f2_'+str(i)+'_middel.npy'
#print(gripper_name)
# gripper = np.load(os.path.join(save_dir,gripper_name))
# plot_pc(gripper,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.002)
# plot_origin()
# mayalab.show()
#save_dir_gt = '/home/lins/MetaGrasp/Data/Gripper/Data'
#gripper_gt = np.load(os.path.join(save_dir_gt,gripper_name))
#plot_pc(gripper_gt,color='red',mode='sphere',scale_factor=0.002)
if 0:
for i in range(0,199):
save_dir = '/home/lins/MetaGrasp/Data/Gripper/Data_noR'
#save_dir = '/home/lins/MetaGrasp/meta_grasping/saved_results/recon_old'
gripper_name = 'robotiq_3f_'+str(i)+'.npy'
print(gripper_name)
gripper = np.load(os.path.join(save_dir,gripper_name))
plot_pc(gripper,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.01)
plot_origin()
mayalab.show()
if 0:
save_dir = '/home/lins/MetaGrasp/meta_grasping/saved_results/interp'
gripper_name = 'kinova_kg3_0.npy'
print(gripper_name)
gripper = np.load(os.path.join(save_dir,gripper_name))
plot_pc(gripper,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.01)
plot_origin()
mayalab.show()
gripper_name = 'robotiq_3f_1.npy'
print(gripper_name)
gripper = np.load(os.path.join(save_dir,gripper_name))
plot_pc(gripper,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.01)
plot_origin()
mayalab.show()
save_dir = '/home/lins/MetaGrasp/meta_grasping/saved_results/interp'
gripper_name = 'middle0.npy'
print(gripper_name)
gripper = np.load(os.path.join(save_dir,gripper_name))
plot_pc(gripper,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.01)
plot_origin()
mayalab.show()
gripper_name = 'middle1.npy'
print(gripper_name)
gripper = np.load(os.path.join(save_dir,gripper_name))
plot_pc(gripper,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.01)
plot_origin()
mayalab.show()
save_dir = '/home/lins/MetaGrasp/Data/Gripper/Data_noR'
gripper_name1 = 'kinova_kg3_0.npy'
print(gripper_name)
gripper1 = np.load(os.path.join(save_dir,gripper_name1))
plot_pc(gripper1,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.01)
plot_origin()
mayalab.show()
gripper_name2 = 'robotiq_3f_1.npy'
print(gripper_name)
gripper2 = np.load(os.path.join(save_dir,gripper_name2))
plot_pc(gripper2,color=(139/255.0,177/255.0,212/255.0),mode='sphere',scale_factor=0.01)
plot_origin()
mayalab.show()
| 2.34375 | 2 |
offlinetools/views/login.py | OpenCIOC/offlinetools | 1 | 12790877 | # =========================================================================================
# Copyright 2016 Community Information Online Consortium (CIOC) and KCL Software Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================================
from __future__ import absolute_import
from hashlib import pbkdf2_hmac
from base64 import standard_b64encode
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember, forget
from formencode import Schema
from sqlalchemy import func
from offlinetools import models
from offlinetools.views.base import ViewBase
from offlinetools.views import validators
from offlinetools.syslanguage import _culture_list, default_culture
DEFAULT_REPEAT = 100000
class LoginSchema(Schema):
allow_extra_fields = True
filter_extra_fields = True
LoginName = validators.UnicodeString(max=50, not_empty=True)
LoginPwd = validators.String(not_empty=True)
came_from = validators.UnicodeString()
class Login(ViewBase):
def post(self):
request = self.request
_ = request.translate
model_state = request.model_state
model_state.schema = LoginSchema()
if not model_state.validate():
return self._get_edit_info()
LoginName = model_state.value('LoginName')
user = request.dbsession.query(models.Users).filter_by(UserName=LoginName).first()
if not user:
model_state.add_error_for('*', _('Invalid User Name or Password'))
return self._get_edit_info()
hash = Crypt(user.PasswordHashSalt, model_state.value('LoginPwd'), user.PasswordHashRepeat)
if hash != user.PasswordHash:
model_state.add_error_for('*', _('Invalid User Name or Password'))
return self._get_edit_info()
headers = remember(request, user.UserName)
start_ln = [x.Culture for x in _culture_list if x.LangID == user.LangID and x.Active]
if not start_ln:
start_ln = [default_culture()]
return HTTPFound(location=model_state.value('came_from', request.route_url('search', ln=start_ln[0])),
headers=headers)
def get(self):
request = self.request
login_url = request.route_url('login')
referrer = request.url
if referrer == login_url:
referrer = request.route_url('search') # never use the login form itself as came_from
came_from = request.params.get('came_from', referrer)
request.model_state.data['came_from'] = came_from
return self._get_edit_info()
def _get_edit_info(self):
request = self.request
session = request.dbsession
user_count = session.query(func.count(models.Users.UserName), func.count(models.Record.NUM)).one()
has_data = any(user_count)
failed_updates = False
has_updated = True
if not has_data:
config = request.config
failed_updates = not not config.update_failure_count
has_updated = not not config.last_update
return {'has_data': has_data, 'failed_updates': failed_updates, 'has_updated': has_updated}
def logout(request):
headers = forget(request)
return HTTPFound(location=request.route_url('login'),
headers=headers)
def Crypt(salt, password, repeat=DEFAULT_REPEAT):
return standard_b64encode(pbkdf2_hmac('sha1', password.encode('utf-8'), salt.encode('utf-8'), repeat, 33)).decode('utf-8').strip()
| 1.539063 | 2 |
dxmate.py | jtowers/dxmate | 1 | 12790878 | <gh_stars>1-10
import sublime
import sublime_plugin
import os
import subprocess
import threading
import sys
import json
import mdpopups
import time
from collections import OrderedDict
from .lib.printer import PanelPrinter
from .lib.threads import ThreadProgress
from .lib.threads import PanelThreadProgress
from .lib.languageServer import *
from .lib.event_hub import EventHub
from .lib.util import util
from .lib.diagnostic import *
import ntpath
class SymbolKind(object):
File = 1
Module = 2
Namespace = 3
Package = 4
Class = 5
Method = 6
Property = 7
Field = 8
Constructor = 9
Enum = 10
Interface = 11
Function = 12
Variable = 13
Constant = 14
String = 15
Number = 16
Boolean = 17
Array = 18
symbol_kind_names = {
SymbolKind.File: "file",
SymbolKind.Module: "module",
SymbolKind.Namespace: "namespace",
SymbolKind.Package: "package",
SymbolKind.Class: "class",
SymbolKind.Method: "method",
SymbolKind.Function: "function",
SymbolKind.Field: "field",
SymbolKind.Variable: "variable",
SymbolKind.Constant: "constant"
}
def format_symbol_kind(kind):
return symbol_kind_names.get(kind, str(kind))
def format_symbol(item):
"""
items may be a list of strings, or a list of string lists.
In the latter case, each entry in the quick panel will show multiple rows
"""
# file_path = uri_to_filename(location.get("uri"))
# kind = format_symbol_kind(item.get("kind"))
# return [item.get("name"), kind]
return [item.get("name")]
class DxmateOutputText(sublime_plugin.TextCommand):
def run(self, edit, text, erase=False, *args, **kwargs):
size = self.view.size()
self.view.set_read_only(False)
if erase == True:
size = sublime.Region(0, self.view.size())
self.view.replace(edit, size, text)
else:
self.view.insert(edit, size, text)
self.view.set_read_only(True)
self.view.show(size)
def is_visible(self):
return False
def is_enabled(self):
return True
def description(self):
return
class WriteOperationStatus(sublime_plugin.TextCommand):
def run(self, edit, text, *args, **kwargs):
kw_region = kwargs.get('region', [0, 0])
status_region = sublime.Region(kw_region[0], kw_region[1])
size = self.view.size()
self.view.set_read_only(False)
self.view.replace(edit, status_region, text)
self.view.set_read_only(True)
# self.view.show(size)
def is_visible(self):
return False
def is_enabled(self):
return True
def description(self):
return
# not ready for code completion yet
lsClient = None
printer = None
def plugin_loaded():
global lsClient
global printer
if util.dxProjectFolder() != '':
lsClient = start_client()
if lsClient is None:
util.debug('Unable start langauge server')
EventHub.subscribe('on_load_async', set_syntax)
active_window_id = sublime.active_window().id()
printer = PanelPrinter.get(active_window_id)
printer.write("sfdx plugin loaded", erase=True)
def plugin_unloaded():
if lsClient:
lsClient.kill()
def set_syntax(view):
if util.is_apex_file(view):
util.debug('setting syntax for file')
if "linux" in sys.platform or "darwin" in sys.platform:
view.set_syntax_file(os.path.join("Packages",util.plugin_name(),"sublime","lang","Apex.sublime-syntax"))
else:
view.set_syntax_file(os.path.join("Packages/"+util.plugin_name()+"/sublime/lang/Apex.sublime-syntax"))
class ExitHandler(sublime_plugin.EventListener):
def on_window_commad(self, window, command_name, args):
if command_name == 'exit':
plugin_unloaded()
class EventHandlers(sublime_plugin.EventListener):
def __init__(self):
self.completions = [] # type: List[Tuple[str, str]]
self.refreshing = False
def on_pre_close(self, view):
EventHub.publish('on_pre_close', view)
def on_close(self, view):
EventHub.publish('on_close', view)
def on_load_async(self, view):
EventHub.publish('on_load_async', view)
def on_activated_async(self, view):
EventHub.publish('on_activated_async', view)
def on_post_save_async(self, view):
EventHub.publish('on_post_save_async', view)
def on_close(self, view):
EventHub.publish('on_close', view)
def on_hover(self, view, point, hover_zone):
EventHub.publish('on_hover', view, point, hover_zone)
def on_window_command(self, window, command_name, *args):
if command_name == 'exit':
EventHub.publish('exit', window, *args)
elif command_name == 'close_window':
EventHub.publish('close_window', window, *args)
else:
EventHub.publish('on_window_command', window, command_name, *args)
def on_text_command(self, window, command_name, *args):
if command_name == 'exit':
EventHub.publish('exit', window, *args)
elif command_name == 'close_window':
EventHub.publish('close_window', window, *args)
else:
EventHub.publish('on_window_command', window, command_name, *args)
def on_modified_async(self, view):
active_file_extension = util.file_extension(view)
if active_file_extension != '.cls' and active_file_extension != '.trigger':
return None
EventHub.publish("on_modified_async", view)
def on_query_completions(self, view, prefix, locations):
active_file_extension = util.file_extension(view)
if active_file_extension != '.cls' and active_file_extension != '.trigger':
return None
if not self.refreshing:
client = lsClient
if not client:
return
completionProvider = client.get_capability('completionProvider')
if not completionProvider:
return
autocomplete_triggers = completionProvider.get('triggerCharacters')
if locations[0] > 0:
self.completions = []
purge_did_change(view.buffer_id())
client.send_request(
Request.complete(
util.get_document_position(view, locations[0])),
self.handle_response)
self.refreshing = False
return self.completions, (sublime.INHIBIT_WORD_COMPLETIONS
| sublime.INHIBIT_EXPLICIT_COMPLETIONS)
def format_completion(self, item) -> 'Tuple[str, str]':
label = item.get("label")
# kind = item.get("kind")
detail = item.get("kind")
detail = format_symbol_kind(detail)
#detail = format_symbol(detail)
insertText = label
if item.get("insertTextFormat") == 2:
insertText = item.get("insertText")
if insertText[0] == '$': # sublime needs leading '$' escaped.
insertText = '\$' + insertText[1:]
return ("{}\t{}".format(label, detail), insertText)
def handle_response(self, response):
self.completions = []
items = response["items"] if isinstance(response,
dict) else response
for item in items:
self.completions.append(self.format_completion(item))
sublime.active_window().active_view().run_command('hide_auto_complete')
self.run_auto_complete()
def run_auto_complete(self):
self.refreshing = True
sublime.active_window().active_view().run_command(
"auto_complete", {
'disable_auto_insert': True,
'api_completions_only': False,
'next_completion_if_showing': False,
'auto_complete_commit_on_tab': True,
})
class DxmateRunFileTestsCommand(sublime_plugin.WindowCommand):
def run(self):
self.dx_folder = util.dxProjectFolder()
self.active_file = util.active_file()
self.active_file = ntpath.split(self.active_file)[
1].replace('.cls', '')
self.class_name = 'ApexClassName'
t = threading.Thread(target=self.run_command)
t.start()
printer.show()
printer.write('\nRunning Tests')
printer.write('\nResult: ')
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Running tests', 'Tests run')
PanelThreadProgress(t, 'Running Tests')
def is_enabled(self):
self.dx_folder = util.dxProjectFolder()
if(self.dx_folder == ''):
return False
self.active_file = util.active_file()
if not self.active_file.endswith('.cls'):
return False
if not util.file_is_test(self.window.active_view()):
return False
return True
def run_command(self):
args = ['sfdx', 'force:apex:test:run', '-r', 'human',
'-l', 'RunSpecifiedTests', '-n', self.class_name]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo, cwd=self.dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\n' + str(out, 'utf-8'))
else:
printErr = err
if err is None or err == '':
printErr = out
printer.write('\n' + str(printErr, 'utf-8'))
class DxmateRunOrgTestsCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.dx_folder = util.dxProjectFolder()
sublime.active_window().show_input_panel(
'Org (leave blank for default)', '', self.run_tests, None, None)
def run_tests(self, input):
self.test_org = input
printer.show()
printer.write('\nRunning Org Tests')
printer.write('\nResult: ')
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Running Org Tests', 'Org tests run')
PanelThreadProgress(t, 'Running Org Tests')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
args = ['sfdx', 'force:apex:test:run', '-r', 'human']
if not self.test_org is None and len(self.test_org) > 0:
args.push('-u')
args.push(self.input)
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo, cwd=self.dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\n' + str(out, 'utf-8'))
else:
printErr = err
if err is None or err == '':
printErr = out
printer.write('\n' + str(printErr, 'utf-8'))
class DxmatePushSourceCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.dx_folder = util.dxProjectFolder()
printer.show()
printer.write('\nPushing Source')
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Pushing Source', 'Source Pushed')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Source Pushed')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
args = ['sfdx', 'force:source:push']
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo, cwd=self.dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\n' + str(out, 'utf-8'))
else:
printErr = err
if not err is None and not err == '':
printErr = out
else:
printer.write('\nError pushing source')
printer.write('\n' + str(printErr, 'utf-8'))
class DxmatePullSourceCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.dx_folder = util.dxProjectFolder()
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Pulling Source', 'Source Pulled')
printer.write('\nPulling Source')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Source Pulled')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
args = ['sfdx', 'force:source:pull']
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo, cwd=self.dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\n' + str(out, 'utf-8'))
else:
printErr = err
if not err is None and not err == '':
printErr = out
else:
printer.write('\nError pulling source')
printer.write('\n' + str(printErr, 'utf-8'))
class DxmateOpenScratchOrgCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.dx_folder = util.dxProjectFolder()
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Opening Org', 'Org Opened')
printer.write('\nOpening Org')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Org Opened')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
args = ['sfdx', 'force:org:open']
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo, cwd=self.dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nScratch org opened')
else:
printer.write('\nError opening')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateScratchOrgCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.dx_folder = util.dxProjectFolder()
self.def_file = os.path.join(
self.dx_folder, 'config', 'project-scratch-def.json')
sublime.active_window().show_input_panel(
'Class Name', self.def_file, self.create_org, None, None)
def create_org(self, input):
printer.show()
self.def_file = input
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Scratch Org', 'Scratch Org Created')
printer.write('\nCreatin Scratch Org')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Scratch Org Created')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
args = ['sfdx', 'force:org:create', '-f',
self.def_file, '-a', 'ScratchOrg', '-s']
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo, cwd=self.dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nScratch org created')
else:
printer.write('\nError creating scratch org')
printer.write('\n' + str(err, 'utf-8'))
class DxmateAuthDevHubCommand(sublime_plugin.TextCommand):
def run(self, edit):
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Opening Auth Page', 'Auth Page Opened')
printer.write('\nOpening Auth Page')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Auth Page Opened')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:auth:web:login', '-d', '-s', '-a', 'DevHub']
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nDevHub authorized')
else:
printer.write('\nError authorizing Dev Hub:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateRunSoqlCommand(sublime_plugin.WindowCommand):
def run(self):
sublime.active_window().show_input_panel(
'Query', '', self.run_query, None, None)
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_query(self, input):
self.query = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Running query', 'Query run')
printer.write('\nRunning query')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Query run')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:data:soql:query',
'-q', self.query]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nOpening results file')
content = str(out,'UTF-8')
#try:
# parsed = json.loads(content)
# content = json.dumps(parsed, sort_keys=True,indent=1, separators=(',', ':'))
# util.debug(content)
#except Exception as e:
# util.debug('could not format query results\n', e)
file = sublime.active_window().new_file()
file.set_scratch(True)
file.set_name('SOQL')
syntax_path = None
if "linux" in sys.platform or "darwin" in sys.platform:
syntax_path = os.path.join("Packages",plugin_name(),"sublime","lang","JSON.tmLanguage")
else:
syntax_path = os.path.join("Packages/"+plugin_name()+"/sublime/lang/JSON.tmLanguage")
#file.set_syntax_file(syntax_path)
file.run_command("insert", {"characters":content})
else:
printer.write('\nError running query:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateVisualforceComponentCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder to save the component')
return
self.page_name = 'ComponentName'
self.page_label = 'Component Label'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'Component API Name', self.page_name, self.get_label, None, None)
def get_label(self, input):
self.page_name = input
sublime.active_window().show_input_panel(
'Component Label', self.page_label, self.create_page, None, None)
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_page(self, input):
self.page_label = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Visualforce Component', 'Visualforce Component Created')
printer.write('\nCreating Visualforce Component')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Visualforce Component Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:visualforce:component:create',
'-n', self.page_name,'-l', self.page_label, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nVisaulforce Component created')
file = os.path.join(self.class_dir, self.page_name + '.component')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Visualforce Component:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateVisualforcePageCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder to save the page')
return
self.page_name = 'PageName'
self.page_label = 'Page Label'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'Page API Name', self.page_name, self.get_label, None, None)
def get_label(self, input):
self.page_name = input
sublime.active_window().show_input_panel(
'Page Label', self.page_label, self.create_page, None, None)
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_page(self, input):
self.page_label = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Visualforce Page', 'Visualforce Page Created')
printer.write('\nCreating Visualforce Page')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Visualforce Page Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:visualforce:page:create',
'-n', self.page_name,'-l', self.page_label, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nVisaulforce page created')
file = os.path.join(self.class_dir, self.page_name + '.page')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Visualforce page:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateLightningComponentCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder save the component')
return
self.cmp_name = 'ComponentName'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'App Name', self.cmp_name, self.create_cmp, None, None)
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_cmp(self, input):
self.cmp_name = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Lightning Component', 'Lightning Component Created')
printer.write('\nCreating Lightning Component')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Lightning Component Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:lightning:component:create',
'-n', self.cmp_name, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nLightning Component created')
file = os.path.join(self.class_dir, self.cmp_name, self.cmp_name + '.cmp')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Lightning Component:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateLightningComponentCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder save the component')
return
self.cmp_name = 'ComponentName'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'App Name', self.cmp_name, self.create_cmp, None, None)
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_cmp(self, input):
self.cmp_name = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Lightning Component', 'Lightning Component Created')
printer.write('\nCreating Lightning Component')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Lightning Component Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:lightning:component:create',
'-n', self.cmp_name, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nLightning Component created')
file = os.path.join(self.class_dir, self.cmp_name, self.cmp_name + '.cmp')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Lightning Component:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateLightningTestCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder save the test')
return
self.event_name = 'TestName'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'Test Name', self.event_name, self.create_event, None, None)
def is_enabled(self, paths=[]):
if util.isDXProject() == False:
return False
util.debug(paths)
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_event(self, input):
self.event_name = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Lightning Test', 'Lightning Interface Test')
printer.write('\nCreating Lightning Test')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Lightning Test Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:lightning:test:create',
'-n', self.event_name, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nLightning Test created')
file = os.path.join(self.class_dir, self.event_name + '.resource')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Lightning Test:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateLightningInterfaceCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder save the interface')
return
self.event_name = 'InterfaceName'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'Interface Name', self.event_name, self.create_event, None, None)
def is_enabled(self, paths=[]):
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_event(self, input):
self.event_name = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Lightning Interface', 'Lightning Interface Created')
printer.write('\nCreating Lightning Interface')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Lightning Interface Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:lightning:interface:create',
'-n', self.event_name, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nLightning Interface created')
file = os.path.join(self.class_dir, self.event_name, self.event_name + '.intf')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Lightning Interface:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateLightningEventCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder save the class')
return
self.event_name = 'EventName'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'Event Name', self.event_name, self.create_event, None, None)
def is_enabled(self, paths=[]):
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_event(self, input):
self.event_name = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Lightning Event', 'Lightning Event Created')
printer.write('\nCreating Lightning Event')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Lightning Event Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:lightning:event:create',
'-n', self.event_name, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nLightning Event created')
file = os.path.join(self.class_dir, self.event_name, self.event_name + '.evt')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Lightning Event:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateLightningAppCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder save the class')
return
self.app_name = 'AppName'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'App Name', self.app_name, self.create_app, None, None)
def is_enabled(self, paths=[]):
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_app(self, input):
self.app_name = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Lightning App', 'Lightning App Created')
printer.write('\nCreating Lightning App')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Lightning App Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:lightning:app:create',
'-n', self.app_name, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nLightning App created')
file = os.path.join(self.class_dir, self.app_name, self.app_name + '.app')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Lightning App:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateApexClassCommand(sublime_plugin.WindowCommand):
def run(self, paths=[]):
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
printer.show()
printer.write('\nPlease select a single folder save the class')
return
self.class_name = 'ApexClassName'
self.class_dir = paths[0]
sublime.active_window().show_input_panel(
'Class Name', self.class_name, self.create_class, None, None)
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
if len(paths) != 1 or (len(paths) > 0 and os.path.isfile(paths[0])):
return False
return True
def create_class(self, input):
self.class_name = input
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Apex Class', 'Apex Class Created')
printer.write('\nCreating Apex Class')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Apex Class Created')
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:apex:class:create',
'-n', self.class_name, '-d', self.class_dir]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nApex class created')
file = os.path.join(self.class_dir, self.class_name + '.cls')
sublime.active_window().open_file(file)
else:
printer.write('\nError creating Apex Class:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateUpgradeProjectCommand(sublime_plugin.TextCommand):
def run(self, edit):
printer.show()
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Upgrading Project', 'Project Upgraded')
printer.write('\nUpgrading Project')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Project Upgraded')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
dx_folder = util.dxProjectFolder()
args = ['sfdx', 'force:project:upgrade', '-f']
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, cwd=dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nProject upgraded')
else:
printer.write('\nError upgrading project:')
printer.write('\n' + str(err, 'utf-8'))
class DxmateCreateProjectCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.edit = edit
self.project_name = ''
self.template = 'Defaultsfdx-project.json'
self.project_path = ''
self.namespace = None
sublime.active_window().show_input_panel(
'Project Name', self.project_name, self.create_project_name, None, None)
def create_project_name(self, input):
self.project_name = input
sublime.active_window().show_input_panel('Project Template', self.template,
self.create_project_template, None, None)
def create_project_template(self, input):
self.project_template = input
sublime.active_window().show_input_panel('Project Path', self.project_path,
self.create_project_namespace, None, None)
def create_project_namespace(self, input):
self.project_path = input
sublime.active_window().show_input_panel(
'Project Namespace', '', self.create_project, None, None)
def create_project(self, input):
printer.show()
self.namespace = input
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Creating Project', 'Project Created')
printer.write('\nCreating Project')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Project Created')
def run_command(self):
args = ['sfdx', 'force:project:create', '-n', self.project_name,
'-t', self.template, '-d', self.project_path]
if self.namespace is not None and self.namespace != '':
args.push('-s')
args.push(self.namespace)
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, startupinfo=startupinfo)
p.wait()
out,err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nProject created')
else:
printer.write('\nError creating project:')
printer.write('\n' + str(out, 'UTF-8'))
class DxmateExecuteAnonymousApexCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.selection = ''
for region in self.view.sel():
if not region.empty():
self.selection += self.view.substr(region)
if self.selection == '':
self.selection = self.view.substr(sublime.Region(0, self.view.size()))
self.file_path = os.path.join(util.dxProjectFolder(), '.sfdx', 'tmpFile.cls')
with open(self.file_path, 'w+') as file_obj:
file_obj.write(self.selection)
printer.show()
self.namespace = input
t = threading.Thread(target=self.run_command)
t.start()
t.printer = printer
t.process_id = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
ThreadProgress(t, 'Running anonymous apex', 'Anonymous apex run')
printer.write('\nRunning anonymous apex')
printer.write('\nResult: ')
PanelThreadProgress(t, 'Anonymous apex run')
def is_enabled(self, paths=[]):
#dx_folder = util.dxProjectFolder()
if util.isDXProject() == False:
return False
return True
def run_command(self):
args = ['sfdx', 'force:apex:execute', '-f', self.file_path]
startupinfo = None
if os.name == 'nt':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo, cwd=self.dx_folder)
p.wait()
out, err = p.communicate()
r = p.returncode
if p.returncode == 0:
printer.write('\nFinished running apex')
printer.write('\n' + str(out, 'utf-8'))
else:
printErr = err
if err is None or err == '':
printErr = out
printer.write('\nError running apex')
printer.write('\n' + str(printErr, 'utf-8')) | 2.375 | 2 |
sdk/exception/validation_failed_exception.py | CLG0125/elemesdk | 1 | 12790879 | <gh_stars>1-10
class ValidationFailedException(Exception):pass
| 0.917969 | 1 |
nonequilibrium/noneq_data_io.py | mattsmart/biomodels | 0 | 12790880 | <gh_stars>0
import datetime
import numpy as np
import os
from os import sep
from noneq_settings import RUNS_FOLDER
def run_subdir_setup(run_subfolder=None):
current_time = datetime.datetime.now().strftime("%Y-%m-%d %I.%M.%S%p")
time_folder = current_time + os.sep
if run_subfolder is None:
current_run_folder = RUNS_FOLDER + time_folder
else:
current_run_folder = RUNS_FOLDER + run_subfolder + os.sep + time_folder
# subfolders in the timestamped run directory:
data_folder = os.path.join(current_run_folder, "data")
plot_lattice_folder = os.path.join(current_run_folder, "plot_lattice")
plot_data_folder = os.path.join(current_run_folder, "plot_data")
dir_list = [RUNS_FOLDER, current_run_folder, data_folder, plot_lattice_folder, plot_data_folder]
for dirs in dir_list:
if not os.path.exists(dirs):
os.makedirs(dirs)
return current_run_folder, data_folder, plot_lattice_folder, plot_data_folder
def state_write(state, row_vals, col_vals, dataname, rowname, colname, output_dir):
# here row refers to time array and col refers to gene labels (ie. name for ith element of state vector)
datapath = output_dir + sep + dataname + ".txt"
rowpath = output_dir + sep + dataname + '_' + rowname + ".txt"
colpath = output_dir + sep + dataname + '_' + colname + ".txt"
np.savetxt(datapath, np.array(state), delimiter=",", fmt="%d")
np.savetxt(rowpath, np.array(row_vals), delimiter=",")
np.savetxt(colpath, np.array(col_vals), delimiter=",", fmt="%s")
return datapath, rowpath, colpath
def state_read(datapath, rowpath, colpath):
# here row refers to time array and col refers to gene labels (ie. name for ith element of state vector)
state = np.loadtxt(datapath, delimiter=",")
row = np.loadtxt(rowpath, delimiter=",", dtype=float)
col = np.loadtxt(colpath, delimiter=",", dtype=str)
return state, row, col
| 2.4375 | 2 |
apps/users/admin.py | Houston-ARTCC/information-display-system | 1 | 12790881 | <filename>apps/users/admin.py<gh_stars>1-10
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from ..users.models import User
@admin.register(User)
class UserAdmin(UserAdmin):
model = User
list_display = ('cid', 'email', 'first_name', 'last_name')
search_fields = list_display
list_filter = list_display
ordering = ('cid',)
fieldsets = (
('Personal Information', {'fields': ('cid', 'first_name', 'last_name', 'email', 'password')}),
('Facilities', {'fields': ('facilities',)}),
('Permissions', {'fields': ('is_superuser', 'user_permissions')}),
)
| 2.046875 | 2 |
ds.py | metal-gear-solidworks/mgs-ds | 0 | 12790882 | <reponame>metal-gear-solidworks/mgs-ds
import pygame
import pygame_gui
WIDTH = 1600
HEIGHT = 1200
# pygame init
pygame.init()
window_surface = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption('MGS Driver Station')
background = pygame.Surface((WIDTH, HEIGHT))
background.fill(pygame.Color('#000000'))
clock = pygame.time.Clock()
# pygame_gui init
manager = pygame_gui.UIManager((WIDTH, HEIGHT))
hello_button = pygame_gui.elements.UIButton(relative_rect=pygame.Rect((350, 275), (100, 50)), text='Say Hello', manager=manager)
exit = False
while not exit:
delta_t = clock.tick(60)/1000.0
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit = True
manager.process_events(event)
window_surface.blit(background, (0, 0))
manager.update(delta_t)
manager.draw_ui(window_surface)
pygame.display.update() | 2.6875 | 3 |
CadVlan/Pool/facade.py | marcusgc/GloboNetworkAPI-WebUI | 17 | 12790883 | # -*- coding:utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ipaddress
def populate_expectstring_choices(client):
expectstring_choices = client.create_ambiente().listar_healtchcheck_expect_distinct()
expectstring_choices['healthcheck_expect'].insert(0, ('', '-'))
return expectstring_choices
def populate_enviroments_choices(client):
enviroments = client.create_pool().list_all_environment_related_environment_vip()
enviroments_choices = [('', '-')]
# Format enviroments
for obj in enviroments:
enviroments_choices.append((obj['id'], "%s - %s - %s" % (obj['divisao_dc_name'],
obj['ambiente_logico_name'],
obj['grupo_l3_name'])))
return enviroments_choices
def populate_optionsvips_choices(client, tips='Balanceamento'):
optionsvips = client.create_option_vip().get_all()
optionsvips_choices = [('', '-')]
for obj in optionsvips['option_vip']:
if obj['tipo_opcao'] == tips:
optionsvips_choices.append((obj['nome_opcao_txt'], obj['nome_opcao_txt']))
return optionsvips_choices
def populate_servicedownaction_choices(client, tips='ServiceDownAction'):
optionspool = client.create_option_pool().get_all_option_pool(option_type='ServiceDownAction')
servicedownaction_choices = [('', '-')]
for obj in optionspool:
servicedownaction_choices.append((obj['id'], obj['name']))
return servicedownaction_choices
def populate_healthcheck_choices(client):
optionspool = client.create_option_pool().get_all_option_pool(option_type='HealthCheck')
healthcheck_choices = [('', '-')]
for obj in optionspool:
healthcheck_choices.append((obj['name'], obj['name']))
return healthcheck_choices
def find_servicedownaction_id(client, option_name):
optionspool = client.create_option_pool().get_all_option_pool(option_type='ServiceDownAction')
for obj in optionspool:
if obj['name'] == option_name:
return obj['id']
def find_servicedownaction_object(client, option_name=None, id=None):
optionspool = client.create_option_pool().get_all_option_pool(option_type='ServiceDownAction')
if id:
for obj in optionspool:
if obj['id'] == id:
return obj['name']
for obj in optionspool:
if obj['name'] == option_name:
return obj
def populate_optionspool_choices(client, environment):
optionspool_choices = [('', '-')]
optionspools = client.create_pool().get_opcoes_pool_by_environment(environment["id"]) if type(environment) is not int else \
client.create_pool().get_opcoes_pool_by_environment(environment)
for obj in optionspools['options_pool']:
optionspool_choices.append((obj['id'], obj['name']))
return optionspool_choices
def populate_pool_members_by_lists(client, members):
pool_members = []
ip_list_full = []
if len(members.get("ports_reals")) > 0 and len(members.get("ips")) > 0:
for i in range(0, len(members.get("ports_reals"))):
pool_members.append({
'id': members.get("id_pool_member")[i],
'id_equip': members.get("id_equips")[i],
'nome_equipamento': members.get("name_equips")[i],
'priority': members.get("priorities")[i],
'port_real': members.get("ports_reals")[i],
'weight': members.get("weight")[i],
'id_ip': members.get("id_ips")[i],
'ip': members.get("ips")[i]
})
ip_list_full.append({'id': members.get("id_ips")[i], 'ip': members.get("ips")[i]})
return pool_members, ip_list_full
def populate_pool_members_by_obj(server_pool_members):
pool_members = []
for obj in server_pool_members:
mbs = bin(int(obj.get('member_status')))[2:5].zfill(3)
ip = obj['ip'] if obj['ip'] else obj['ipv6']
pool_members.append(
{'id': obj['id'],
'id_equip': obj['equipment']['id'],
'member_status_hab': mbs[1],
'member_status_updown': mbs[2],
'member_status': obj["member_status"],
'nome_equipamento': obj['equipment']['name'],
'priority': obj['priority'],
'port_real': obj['port_real'],
'weight': obj['weight'],
'id_ip': ip['id'] if ip else '',
'ip': ip['ip_formated'] if ip else ''})
return pool_members
def format_healthcheck(request):
healthcheck = dict()
healthcheck["identifier"] = ""
healthcheck["healthcheck_type"] = str(request.POST.get('healthcheck'))
healthcheck["healthcheck_request"] = request.POST.get('healthcheck_request')
healthcheck["healthcheck_expect"] = request.POST.get('healthcheck_expect')
healthcheck_destination = request.POST.get('healthcheck_destination')
healthcheck["destination"] = ("*:%s" % healthcheck_destination) \
if healthcheck_destination else '*:*'
return healthcheck
def format_servicedownaction(client, form):
servicedownaction = dict()
servicedownaction["id"] = int(form.cleaned_data['servicedownaction'])
servicedownaction["name"] = str(find_servicedownaction_object(client, id=servicedownaction['id']))
return servicedownaction
def format_server_pool_members(request, limit=0):
pool_members = []
equips = request.POST.getlist('id_equip')
for i in range(0, len(equips)):
server_pool_members = dict()
server_pool_members["id"] = int(request.POST.getlist('id_pool_member')[i]) \
if request.POST.getlist('id_pool_member')[i] else None
server_pool_members["identifier"] = str(request.POST.getlist('equip')[i])
server_pool_members["priority"] = int(request.POST.getlist('priority')[i])
server_pool_members["equipment"] = _format_equipments(request, i)
server_pool_members["weight"] = int(request.POST.getlist('weight')[i])
server_pool_members["limit"] = limit
server_pool_members["port_real"] = int(request.POST.getlist('ports_real_reals')[i])
try:
member_status = '1%s%s' % (
request.POST.getlist('member_status_hab')[i],
request.POST.getlist('member_status_updown')[i]
)
server_pool_members["member_status"] = int(member_status, 2)
except:
#When copying a pool, information required was already sent in request
# and there is no separation of hab and updown
server_pool_members["member_status"] = int(request.POST.getlist('member_status')[i])
v4, v6 = _format_ips(request, i)
server_pool_members["ip"] = v4
server_pool_members["ipv6"] = v6
pool_members.append(server_pool_members)
return pool_members
def _format_equipments(request, i):
equipments = dict()
equipments["id"] = int(request.POST.getlist('id_equip')[i])
equipments["nome"] = str(request.POST.getlist('equip')[i])
return equipments
def _format_ips(request, i):
ips = dict()
ips["id"] = int(request.POST.getlist('id_ip')[i])
ips["ip_formated"] = str(request.POST.getlist('ip')[i])
v4 = ips if "." in ips['ip_formated'] else None
v6 = ips if ":" in ips['ip_formated'] else None
return v4, v6
def format_name_ip_search(name):
try:
ip = ipaddress.ip_address(name)
except:
search = {'nome': name}
else:
if ip.version == 6:
ip = ip.compressed.split(':')
search = {
'ipv6equipament__ip__oct1': ip[0],
'ipv6equipament__ip__oct2': ip[1],
'ipv6equipament__ip__oct3': ip[2],
'ipv6equipament__ip__oct4': ip[3],
'ipv6equipament__ip__oct5': ip[4],
'ipv6equipament__ip__oct6': ip[5],
'ipv6equipament__ip__oct7': ip[6],
'ipv6equipament__ip__oct8': ip[7]
}
if ip.version == 4:
ip = ip.compressed.split('.')
search = {
'ipequipamento__ip__oct1': ip[0],
'ipequipamento__ip__oct2': ip[1],
'ipequipamento__ip__oct3': ip[2],
'ipequipamento__ip__oct4': ip[3]
}
return search
| 2 | 2 |
agent/heuristics/HeuristicSlr.py | bpiv400/eBay | 2 | 12790884 | import numpy as np
import torch
from agent.heuristics.util import get_agent_turn, wrapper, get_days, \
get_recent_byr_offers, get_last_norm
from agent.const import DELTA_SLR, NUM_COMMON_CONS
class HeuristicSlr:
def __init__(self, delta=None):
self.patient = np.isclose(delta, DELTA_SLR[-1])
def __call__(self, observation=None):
# noinspection PyProtectedMember
x = observation._asdict()
# turn number
turn = get_agent_turn(x=x, byr=False)
# index of action
f = wrapper(turn)
if turn == 2:
days = get_days(x=x, turn=turn)
tau = 5.05 if self.patient else 3.03
idx = f(0) if days <= tau else f(1)
elif turn == 4:
if self.patient:
days = get_days(x=x, turn=turn)
idx = f(0) if days <= 2.01 else f(.5)
else:
num_offers = get_recent_byr_offers(x=x, turn=turn)
idx = f(1) if num_offers <= .5 else f(0)
elif turn == 6:
if self.patient:
days4 = get_days(x=x, turn=4)
if days4 <= 2.01:
days6 = get_days(x=x, turn=6)
idx = f(0) if days6 <= 2.04 else f(1)
else:
norm = get_last_norm(x=x, turn=turn)
idx = f(.5) if norm <= .67 else f(1)
else:
idx = f(0)
else:
raise ValueError('Invalid turn: {}'.format(turn))
# deterministic categorical action distribution
pdf = torch.zeros(NUM_COMMON_CONS + 3, dtype=torch.float)
pdf[idx] = 1.
return pdf
| 2.203125 | 2 |
MachineLearning/TP1/normalEqn.py | piwithy/ENSTA_MACHINE_LEARNING | 0 | 12790885 | <reponame>piwithy/ENSTA_MACHINE_LEARNING<gh_stars>0
import numpy as np
def normalEqn(X, y):
""" Computes the closed-form solution to linear regression
normalEqn(X,y) computes the closed-form solution to linear
regression using the normal equations.
"""
# Initialize some useful values
theta = 0
# ====================== YOUR CODE HERE ======================
# Instructions: Complete the code to compute the closed form solution
# to linear regression and put the result in theta.
#
theta = np.dot(np.dot(np.linalg.inv(np.dot(X.T, X)), X.T), y)
# ==============================================================
return theta
| 3.703125 | 4 |
local_groups/migrations/0028_auto_20170308_1812.py | JoshZero87/site | 4 | 12790886 | <reponame>JoshZero87/site<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-03-08 18:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('local_groups', '0027_auto_20170308_1801'),
]
operations = [
migrations.AlterField(
model_name='group',
name='description',
field=models.TextField(max_length=250, null=True, verbose_name='Description (250 characters or less)'),
),
migrations.AlterField(
model_name='group',
name='postal_code',
field=models.CharField(max_length=12, null=True, verbose_name='Postal Code'),
),
migrations.AlterField(
model_name='group',
name='rep_email',
field=models.EmailField(max_length=254, null=True, verbose_name='Contact Email'),
),
migrations.AlterField(
model_name='group',
name='rep_first_name',
field=models.CharField(blank=True, default='John', max_length=9, verbose_name='First Name'),
preserve_default=False,
),
migrations.AlterField(
model_name='group',
name='rep_last_name',
field=models.CharField(blank=True, default='Doe', max_length=12, verbose_name='Last Name'),
preserve_default=False,
),
migrations.AlterField(
model_name='group',
name='slug',
field=models.SlugField(null=True, unique=True),
),
]
| 1.671875 | 2 |
deeplfinterp/util/analytics.py | leaveitout/deep_light_field_interp | 3 | 12790887 | <reponame>leaveitout/deep_light_field_interp
#!/usr/bin/env python3
"""
Copyright <NAME>, Trinity College Dublin, 2017.
Contact sbruton[á]tcd.ie.
"""
import json
import os
from typing import Type
# Need to ensure can run on server, i.e. no X session
import matplotlib; matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pickle
from sklearn.preprocessing import OneHotEncoder
from skimage.measure import compare_ssim as ssim
from skimage.measure import compare_mse as mse
from skimage.measure import compare_nrmse as nrmse
from skimage.measure import compare_psnr as psnr
import numpy as np
import torch
from torch import nn
import h5py
from .meters import CustomProgressBar, AverageMeter
from ..datasets import LightFieldDataset
def one_hot_to_dense(y: np.array) -> np.ndarray:
return np.fromiter((np.argmax(row) for row in y), dtype=np.int)
def dense_to_one_hot(y: np.array,
n_values: int = "auto"):
if n_values == 'auto':
n_values = np.max(y)
num_samples = y.shape[-1]
y_to_encode = y
if len(y.shape) == 1:
y_to_encode = np.reshape(y, (-1, 1))
encoder = OneHotEncoder(n_values=n_values, dtype=np.float32, sparse=False)
y_encoded = encoder.fit_transform(y_to_encode)
return y_encoded
def save_model(model: nn.Module,
output_path: os.path):
print("Saving " + str(output_path) + " model definition...")
model_output_path = os.path.join(output_path, "model.pkl")
torch.save(model.state_dict(), model_output_path)
def save_evaluation(model,
output_path: os.path,
time_per_image: float,
final_loss):
print("Saving the evaluation of model.")
save_model(model=model, output_path=output_path)
results = {'loss': float(final_loss), 'time_per_image': time_per_image}
print(results)
with open(os.path.join(output_path, "results.json"), 'w') as fp:
json.dump(results, fp, indent=4, sort_keys=True)
def save_training_config(train_config: dict,
output_path: os.path):
# TODO: This needs to be adapted for pytorch
print("Saving training config.")
json_filename = 'train_config.json'
with open(os.path.join(output_path, json_filename), 'w') as fp:
json.dump(train_config, fp, indent=4, sort_keys=True)
def save_history(train_loss_history,
valid_loss_history,
output_path: os.path):
print("Saving history to pickle file.")
with open(os.path.join(output_path, 'train_loss_history.pkl'), 'wb') as fp:
pickle.dump(train_loss_history, fp)
with open(os.path.join(output_path, 'valid_loss_history.pkl'), 'wb') as fp:
pickle.dump(valid_loss_history, fp)
save_history_plots(train_loss_history=train_loss_history,
valid_loss_history=valid_loss_history,
output_path=output_path)
def calc_and_save_all_metrics(test_set: Type[LightFieldDataset],
output_path: os.path,
h5_file_loc: os.path = None,
h5_dataset_key: str = None) -> dict:
with h5py.File(h5_file_loc, 'a') as h5_file:
output_images = h5_file[h5_dataset_key]
all_targets_shape = (
len(test_set),
test_set.num_views_y,
test_set.num_channels,
test_set.height_y,
test_set.width_y
)
assert output_images.shape == all_targets_shape
# TODO: If the images are in an array we need to reshape them
# TODO: And again when saving.
num_images = all_targets_shape[0]
num_views = all_targets_shape[1]
ssim_results = np.zeros((num_images, num_views), dtype=np.float32)
psnr_results = np.zeros((num_images, num_views), dtype=np.float32)
mse_results = np.zeros((num_images, num_views), dtype=np.float32)
nrmse_results = np.zeros((num_images, num_views), dtype=np.float32)
ssim_meter = AverageMeter(name='SSIM', cum=False)
custom = CustomProgressBar(label='SSIM')
print("Calculating image metrics.")
for image_idx in custom.bar(range(num_images)):
target_lf = test_set.get_only_y(image_idx)
for view_idx in range(num_views):
target_reshape = np.moveaxis(
target_lf[view_idx],
-3,
-1
)
output_reshape = np.moveaxis(
output_images[image_idx, view_idx],
-3,
-1
)
ssim_results[image_idx, view_idx] = ssim(
target_reshape,
output_reshape,
multichannel=True
)
psnr_results[image_idx, view_idx] = psnr(
target_reshape,
output_reshape
)
mse_results[image_idx, view_idx] = mse(
target_reshape,
output_reshape
)
nrmse_results[image_idx, view_idx] = nrmse(
target_reshape,
output_reshape
)
# Log errors
ssim_meter.update(float(np.mean(ssim_results[image_idx])))
custom.format_custom_text.update_mapping(value=ssim_meter.value())
metrics = {
'ssim_avg': float(np.mean(ssim_results)),
'ssim_std': float(np.std(ssim_results)),
'psnr_avg': float(np.mean(psnr_results)),
'psnr_std': float(np.std(psnr_results)),
'mse_avg': float(np.mean(mse_results)),
'mse_std': float(np.std(mse_results)),
'nrmse_avg': float(np.mean(nrmse_results)),
'nrmse_std': float(np.std(nrmse_results))
}
# Also save to a json for easy viewing.
with open(os.path.join(output_path, "metrics.json"), 'w') as fp:
json.dump(metrics, fp, indent=4, sort_keys=True)
output_images.attrs.create('ssim', ssim_results)
output_images.attrs.create('psnr', psnr_results)
output_images.attrs.create('mse', mse_results)
output_images.attrs.create('nrmse', nrmse_results)
output_images.attrs.create('ssim_avg', metrics['ssim_avg'])
output_images.attrs.create('ssim_std', metrics['ssim_std'])
output_images.attrs.create('psnr_avg', metrics['psnr_avg'])
output_images.attrs.create('psnr_std', metrics['psnr_std'])
output_images.attrs.create('mse_avg', metrics['mse_avg'])
output_images.attrs.create('mse_std', metrics['mse_std'])
output_images.attrs.create('nrmse_avg', metrics['nrmse_avg'])
output_images.attrs.create('nrmse_std', metrics['nrmse_std'])
def save_history_plots(train_loss_history,
valid_loss_history,
output_path: os.path):
loss_fig = plt.figure()
loss_plot = loss_fig.add_subplot(111)
loss_plot.plot(train_loss_history)
loss_plot.plot(valid_loss_history)
loss_plot.set_title('Model Loss')
loss_plot.set_xlabel('Updates')
loss_plot.legend(['Train', 'Test'], loc='upper left')
loss_fig.savefig(os.path.join(output_path, 'loss.png'))
plt.close(loss_fig)
def model_dump(full_model,
train_config,
output_path: os.path):
# TODO: This needs to be adapted for pytorch
if not os.path.isdir(output_path):
os.mkdir(output_path)
save_model(full_model, output_path)
save_training_config(train_config, output_path)
| 1.664063 | 2 |
easysocks5/__init__.py | keenser/easysocks5 | 5 | 12790888 | <reponame>keenser/easysocks5
""" easysocks5 is a simple SOCKS5 server implementation based on AsyncIO library
Currently easysocks5 only support unauthenticated socks5 and CONNECT command.
"""
__name__ = "easysocks5"
__version__ = "0.0.5"
__author__ = "<NAME>"
__author_email__ = "<EMAIL>"
__license__ = "MIT"
| 1.171875 | 1 |
fonts/__init__.py | pro585code/RaspDroid-SeverSide | 1 | 12790889 | <reponame>pro585code/RaspDroid-SeverSide
__author__ = 'youngsoul'
| 0.871094 | 1 |
mp_VTKRoutines.py | andregouws/dev | 0 | 12790890 | #!/usr/bin/python
'''
VTK engine room for mrMeshPy viewer
The main vtk processing is done by functions here - although some hardcore
processing is handled in subroutines of other imported modules.
A core concept here is the tracking (kepping in scope) or the "targetVTKWindow"
- this is a vtkRenderWindowInteractor instance in the main program UI (user
interface) - by creatoing multiple instances of vtk windows we can load
multiple meshes. Some functions reference this specifically with a reference
index passed from mrVista --- mainWindowUI.vtkInstances[int(theMeshInstance)]
while others just referene the most recently added instance (e.g. when adding
a new mesh) --- mainWindowUI.vtkInstances[-1]
Note that it is the mainWindowUI that is passed to all functions so that all
funcitons have the content of the main window in scope.
<NAME> 2017
'''
import vtk
from numpy import *
import time
from vtk.util import numpy_support
debug = True
# local modules
from mp_unpackIncomingData import unpackData
from mp_VTKProcessing import *
from mp_VTKDrawing import *
def loadNewMesh(currVTKInstance, commandArgs, mainWindowUI, the_TCPserver):
#first get all the data we are expecting from the server
## NB this assumes that the order of sending by the server is
# 1) vertices
# 2) triangles
# 3) color data r (rgba) for each vertex
# 4) color data g (rgba) for each vertex
# 5) color data b (rgba) for each vertex
# 6) color data a (rgba) for each vertex
if debug:
print('received request for new mesh with Args:')
print(commandArgs)
# sanity check
if ('vertices' in commandArgs[0]) and ('triangles' in commandArgs[1]):
pass
else:
return "error - expecting vertices, then triangles!"
# load the surfaces data
verticesArgs = commandArgs[0].strip().split(',')
vertices = unpackData(verticesArgs[1], int(verticesArgs[2]), the_TCPserver)
vertices = array(vertices,'f')
vertices = vertices.reshape((len(vertices)/3,3))
trianglesArgs = commandArgs[1].strip().split(',')
triangles = unpackData(trianglesArgs[1], int(trianglesArgs[2]), the_TCPserver)
triangles = array(triangles,'f')
if debug: print(triangles)
triangles = triangles.reshape((len(triangles)/3,3))
if debug: print(triangles)
# load the surface colour data
rVecArgs = commandArgs[2].strip().split(',')
r_vec = unpackData(rVecArgs[1], int(rVecArgs[2]), the_TCPserver)
r_vec = array(r_vec,'uint8')
if debug: print(r_vec)
gVecArgs = commandArgs[3].strip().split(',')
g_vec = unpackData(gVecArgs[1], int(gVecArgs[2]), the_TCPserver)
g_vec = array(g_vec,'uint8')
bVecArgs = commandArgs[4].strip().split(',')
b_vec = unpackData(bVecArgs[1], int(bVecArgs[2]), the_TCPserver)
b_vec = array(b_vec,'uint8')
aVecArgs = commandArgs[5].strip().split(',')
a_vec = unpackData(aVecArgs[1], int(aVecArgs[2]), the_TCPserver)
a_vec = array(a_vec,'uint8')
if debug:
print(len(r_vec))
print(len(g_vec))
print(len(b_vec))
print(len(a_vec))
#combine into numpy array
colorDat = squeeze(array(squeeze([r_vec,g_vec,b_vec,a_vec]),'B',order='F').transpose())
# convert this to a VTK unsigned char array
scalars = numpy_support.numpy_to_vtk(colorDat,0)
curr_scalars = vtk.vtkUnsignedCharArray()
curr_scalars.DeepCopy(scalars)
## ---- ok, we hav the data, lets turn it into vtk stuff
# Process vertices
points = vtk.vtkPoints()
for i in range(vertices.shape[0]):
points.InsertPoint(i,vertices[i][0],vertices[i][1],vertices[i][2])
# Process faces (triangles)
polys = vtk.vtkCellArray()
nTriangles = triangles.shape[0]
for i in range(nTriangles):
polys.InsertNextCell(3)
for j in range(3):
polys.InsertCellPoint(int(triangles[i][j]))
# check
if debug: print(points)
if debug: print(polys)
if debug: print(scalars)
if debug: print(currVTKInstance)
# Assemble as PolyData
polyData = vtk.vtkPolyData()
polyData.SetPoints(points)
polyData.SetPolys(polys)
polyData.GetPointData().SetScalars(scalars)
## TODO ? smoothing on first load?
smooth = vtk.vtkSmoothPolyDataFilter()
smooth = vtk.vtkSmoothPolyDataFilter()
smooth.SetNumberOfIterations(0)
smooth.SetRelaxationFactor(0.0)
smooth.FeatureEdgeSmoothingOff()
smooth.SetInputData(polyData)
pdm = vtk.vtkPolyDataMapper()
pdm.SetScalarModeToUsePointData()
pdm.SetInputConnection(smooth.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(pdm)
iren = mainWindowUI.vtkInstances[-1]
## ---- engine room for drawing on the surface
# add a picker that allows is top pick points on the surface
picker = vtk.vtkCellPicker()
picker.SetTolerance(0.0001)
mainWindowUI.vtkInstances[-1].SetPicker(picker)
mainWindowUI.vtkInstances[-1]._Iren.pickedPointIds = [] #place holder for picked vtk point IDs so we can track
mainWindowUI.vtkInstances[-1].pickedPointIds = mainWindowUI.vtkInstances[-1]._Iren.pickedPointIds
mainWindowUI.vtkInstances[-1]._Iren.pickedPointOrigValues = [] #place holder for picked vtk point IDs so we can track
mainWindowUI.vtkInstances[-1].pickedPointOrigValues = mainWindowUI.vtkInstances[-1]._Iren.pickedPointOrigValues
mainWindowUI.vtkInstances[-1]._Iren.pickedPoints = vtk.vtkPoints() #place holder for picked vtk point IDs so we can track
mainWindowUI.vtkInstances[-1].pickedPoints = mainWindowUI.vtkInstances[-1]._Iren.pickedPoints
mainWindowUI.vtkInstances[-1]._Iren.inDrawMode = 0 #TODO
mainWindowUI.vtkInstances[-1].inDrawMode = mainWindowUI.vtkInstances[-1]._Iren.inDrawMode
# drawing functions imported from mp_VTKDrawing
mainWindowUI.vtkInstances[-1].AddObserver('LeftButtonPressEvent', drawingPickPoint, 1.0)
mainWindowUI.vtkInstances[-1].AddObserver('RightButtonPressEvent', drawingMakeROI, 1.0)
ren = mainWindowUI.vtkInstances[-1].ren
mainWindowUI.vtkInstances[-1]._Iren.ren = ren
ren.AddActor(actor)
ren.SetBackground(1,1,1)
ren.ResetCamera()
ren.Render()
mainWindowUI.vtkInstances[-1].Render()
# lets put some of the data objects in the scope of the
# main window so that they can be manipulated later.
mainWindowUI.vtkInstances[-1].curr_actor = actor
mainWindowUI.vtkInstances[-1].curr_smoother = smooth
mainWindowUI.vtkInstances[-1].curr_polydata = polyData
mainWindowUI.vtkInstances[-1].curr_mapper = pdm
mainWindowUI.vtkInstances[-1].curr_camera = ren.GetActiveCamera()
# and the raw mesh coordinate data.. why not
mainWindowUI.vtkInstances[-1].curr_points = points
mainWindowUI.vtkInstances[-1].curr_polys = polys
mainWindowUI.vtkInstances[-1].curr_scalars = curr_scalars #Deep copied
# turns out that later processes access the inherited renderwindowinteractor (?)
# so lets put all the above in the scope of that too
mainWindowUI.vtkInstances[-1]._Iren.curr_actor = actor
mainWindowUI.vtkInstances[-1]._Iren.curr_smoother = smooth
mainWindowUI.vtkInstances[-1]._Iren.curr_polydata = polyData
mainWindowUI.vtkInstances[-1]._Iren.curr_mapper = pdm
mainWindowUI.vtkInstances[-1]._Iren.curr_camera = ren.GetActiveCamera()
mainWindowUI.vtkInstances[-1]._Iren.curr_points = points
mainWindowUI.vtkInstances[-1]._Iren.curr_polys = polys
mainWindowUI.vtkInstances[-1]._Iren.curr_scalars = curr_scalars #Deep copied
# and so we can access ui controls (e.g. statusbar) from the inherited window
mainWindowUI.vtkInstances[-1]._Iren.parent_ui = mainWindowUI
def KeyPress(obj, evt):
key = obj.GetKeySym()
if key == 'l':
currVTKinstance = len(mainWindowUI.vtkInstances)
print(key)
print(mainWindowUI.vtkInstances[currVTKinstance-1])
#let's also track key presses per instance esp for the draw routine :)
mainWindowUI.vtkInstances[-1].AddObserver("KeyPressEvent",KeyPress)
mainWindowUI.tabWidget.setCurrentIndex(len(mainWindowUI.vtkInstances)-1) #zero index
def smoothMesh(theMeshInstance, commandArgs, mainWindowUI, the_TCPserver):
#lets get the apt window
targetVTKWindow = mainWindowUI.vtkInstances[int(theMeshInstance)] #NB zero indexing
# lets show the correct tab
mainWindowUI.tabWidget.setCurrentIndex(int(theMeshInstance)) #zero index
#mainWindowUI.tabWidget.repaint()
mainWindowUI.tabWidget.update()
#lets get the original data
the_smoother = targetVTKWindow.curr_smoother
the_mapper = targetVTKWindow.curr_mapper
if debug: print(targetVTKWindow.curr_actor.GetMapper().GetInput().GetPointData().GetScalars())
if debug: print(targetVTKWindow.curr_actor.GetMapper().GetInput().GetPointData().GetScalars().GetTuple(1000))
#expecting a string that reads something like 'iterations,200,relaxationfactor,1.2'
# sanity check
if ('iterations' in commandArgs[0]) and ('relaxationfactor' in commandArgs[0]):
smoothingArgs = commandArgs[0].strip().split(',')
iterations = int(smoothingArgs[1])
relaxationfactor = float(smoothingArgs[3])
else:
return "error - expecting vertices, then curvature, then triangles!"
newActor = VTK_smoothing(the_smoother, the_mapper, iterations, relaxationfactor)
targetVTKWindow.ren.RemoveActor(targetVTKWindow.curr_actor)
targetVTKWindow.ren.AddActor(newActor)
targetVTKWindow.curr_actor = newActor #lets keep track
targetVTKWindow.ren.Render()
targetVTKWindow.Render()
# run mesh update to reset the color map (smoothing "messes" this up)
updateMeshData(theMeshInstance, [], mainWindowUI, the_TCPserver)
def updateMeshData(theMeshInstance, commandArgs, mainWindowUI, the_TCPserver):
# here the base mesh is already loaded and we are simply updating with the
# current View settings in from the vista session WITH THE COLOR VALUES FROM
# VISTA - i.e. do not go through a lookuptable
#lets get the apt window
targetVTKWindow = mainWindowUI.vtkInstances[int(theMeshInstance)] #NB zero indexing
# lets show the correct tab
mainWindowUI.tabWidget.setCurrentIndex(int(theMeshInstance)) #zero index
#mainWindowUI.tabWidget.repaint()
mainWindowUI.tabWidget.update()
#lets get the original data
the_polyData = targetVTKWindow.curr_polydata
the_mapper = targetVTKWindow.curr_mapper
#first get all the data we are expecting from the server
## NB this assumes that the order of sending by the server is
# 1) r_vector - red component
# 2) g_vector - blue component
# 3) b_vector - green component
# 4) a_vector - aplha component
if debug:
print('received request for UPDATE DIRECT mesh with Args:')
print(commandArgs)
if len(commandArgs) != 0 : #new data has come from MATLAB so recompute
# load the surfaces data
rVecArgs = commandArgs[0].strip().split(',')
r_vec = unpackData(rVecArgs[1], int(rVecArgs[2]), the_TCPserver)
r_vec = array(r_vec,'uint8')
if debug: print(r_vec)
gVecArgs = commandArgs[1].strip().split(',')
g_vec = unpackData(gVecArgs[1], int(gVecArgs[2]), the_TCPserver)
g_vec = array(g_vec,'uint8')
bVecArgs = commandArgs[2].strip().split(',')
b_vec = unpackData(bVecArgs[1], int(bVecArgs[2]), the_TCPserver)
b_vec = array(b_vec,'uint8')
aVecArgs = commandArgs[3].strip().split(',')
a_vec = unpackData(aVecArgs[1], int(aVecArgs[2]), the_TCPserver)
a_vec = array(a_vec,'uint8')
if debug:
print(len(r_vec))
print(len(g_vec))
print(len(b_vec))
print(len(a_vec))
#combine into numpy array
colorDat = squeeze(array(squeeze([r_vec,g_vec,b_vec,a_vec]),'B',order='F').transpose())
# convert this to a VTK unsigned char array
vtkColorArray = numpy_support.numpy_to_vtk(colorDat,0)
# keep a "deep" copy - this is to workaround some artifacts generated
# by vtk algorithms (e.g. smoothing) that also smooth the color data
# on the surface and then automatically update the inherited color map
# - we allow vtk to do this but then overwrite the recomptued color
# map AFTER the algorithms have run
deepCopyScalars = vtk.vtkUnsignedCharArray()
deepCopyScalars.DeepCopy(vtkColorArray)
targetVTKWindow.curr_scalars = deepCopyScalars
#TODO - this may have impact on later processing - investigate
else:
# no new data from MATLAB, probably just an internal re-draw call
# after something like smoothing - just grab the current deep
# copy of the required scalars
vtkColorArray = targetVTKWindow.curr_scalars
# OK - we have the data - let's update the mesh
newActor = VTK_updateMesh(targetVTKWindow, vtkColorArray, mainWindowUI)
targetVTKWindow.ren.AddActor(newActor)
targetVTKWindow.ren.RemoveActor(targetVTKWindow.curr_actor)
targetVTKWindow.curr_actor = newActor #lets keep track
targetVTKWindow.ren.Render()
targetVTKWindow.Render()
print('success with direct mesh update routine')
## --------------------------------------------------------------------------------
# test example animation
def rotateMeshAnimation(currVTKInstance, commandArgs, mainWindowUI, the_TCPserver):
#rotation args
rotations = commandArgs[0].strip().split(',')
rotations = unpackData(rotations[1], int(rotations[2]), the_TCPserver)
if debug: print(rotations)
targetVTKWindow = mainWindowUI.vtkInstances[int(currVTKInstance)] #NB zero indexing
camera = targetVTKWindow.ren.GetActiveCamera()
if debug: print(camera)
for i in range(len(rotations)):
camera.Azimuth(rotations[i])
#targetVTKWindow.ren.Render()
targetVTKWindow.iren.Render()
time.sleep(0.02)
the_TCPserver.socket.write(str('send useful message back here TODO'))
## --------------------------------------------------------------------------------
| 2.453125 | 2 |
src/hyperparameter_tuning.py | KatharinaHermann/tum-Advanced-DL-for-robotics-RL | 2 | 12790891 | <gh_stars>1-10
import os
import sys
import glob
import shutil
import numpy as np
import tensorflow as tf
import gym
import gym_pointrobo
from hwr.agents.pointrobo_ddpg import DDPG
from hwr.cae.cae import CAE
from hwr.training.pointrobot_trainer import PointrobotTrainer
from hwr.utils import load_params
# loading params:
params = load_params('params/hyperparam_tuning_params.json')
#Initialize the environment
env = gym.make(
params["env"]["name"],
params=params,
)
test_env = gym.make(
params["env"]["name"],
params=params
)
# deleting the previous runs logs:
logdir_files = glob.glob(os.path.join('results', 'hyperparam_tuning', '*'))
for f in logdir_files:
if os.path.isdir(f):
shutil.rmtree(f)
else:
os.remove(f)
# Hyperparameter grid search
for lr_i, lr in enumerate([5e-4, 1e-4, 5e-5]):
for max_grad_i, max_grad in enumerate([1, 0.5, 0.1]):
for tau_i, tau in enumerate([0.005, 0.001, 0.0005]):
for memory_capacity_i, memory_capacity in enumerate([1e6]):
print("Learning rate: {0: 1.8f} max_grad: {1: 3.2f} Tau_Target_update: {2: 1.3f} memory_capacity: {3: 4}".format(
lr, max_grad, tau, memory_capacity))
# the actual parameters:
params["agent"]["lr_actor"] = lr
params["agent"]["lr_critic"] = lr
params["agent"]["max_grad"] = max_grad
params["agent"]["tau"] = tau
params["agent"]["memory_capacity"] = memory_capacity
# setting up logdir for the current hyperparams:
logdir = os.path.join('results', 'hyperparam_tuning',
str(lr_i)+str(max_grad_i)+str(tau_i)+str(memory_capacity_i))
os.makedirs(logdir)
params["trainer"]["logdir"] = logdir
# writing the hyperparameters into a file:
info_file = os.path.join(logdir, 'params.txt')
with open(info_file, 'a') as f:
f.write('learning rate: {0: 1.8f}'.format(lr) + '\n')
f.write('max_grad: {0: 3.2f}'.format(max_grad) + '\n')
f.write('tau: {0: 1.3f}'.format(tau) + '\n')
f.write('batch size: {0: 4}'.format(memory_capacity) + '\n')
# deleting the previous checkpoints:
ckp_files = glob.glob(os.path.join(params["trainer"]["model_dir"], "*"))
for f in ckp_files:
os.remove(f)
# initialize the agent:
policy = DDPG(
env=env,
params=params
)
# initialize the trainer:
trainer = PointrobotTrainer(
policy,
env,
params,
test_env=test_env
)
trainer.train() | 2 | 2 |
prep/column_serializer.py | mack-the-psych/vdok3 | 0 | 12790892 | <reponame>mack-the-psych/vdok3<filename>prep/column_serializer.py
#!/usr/bin/env python
import pandas as pd
import ac_column_serializer as clsr
'''
Put a path file like "plimac-custom.pth" into any of your sys.path directories
(e.g. C:/ProgramData/Anaconda3/Lib/site-packages).
# plimac-custom.pth ###############################
# .pth file for the PLIMAC extension
C:/Users/macks/Documents/Research/ContentTextAnalysis/plimac/3.00/Lib
C:/Users/macks/Documents/Research/ContentTextAnalysis/plimac/3.00/Tools
###################################################
'''
class column_serializer:
'''
qid_csv_file_in is assumed to have the columns 'Question_ID' and 'Question'
'''
def __init__(self, data_dir=r'./', qid_csv_file_in = r'Questin_ID_Definition.csv'):
self.data_dir = data_dir
self.qid_csv_file_in = qid_csv_file_in
def serialize_record(self, csv_file_in, key_word = r'Definition'):
self.df_response_serialized = clsr.ac_column_serializer(self.data_dir + csv_file_in,
'Student_Index', self.columms_to_be_serialized(csv_file_in, key_word))
new_question_clm_name = key_word + r'-Question'
new_answer_clm_name = key_word + r'-Answer'
self.df_response_serialized = self.df_response_serialized.rename(
columns={r'Pre_Col_Name' : new_question_clm_name, r'Content' : new_answer_clm_name})
score_columns = self.columms_to_be_serialized(csv_file_in, r'Score')
self.df_score_serialized = clsr.ac_column_serializer(self.data_dir + csv_file_in,
r'Student_Index', score_columns)
new_score_clm_name = key_word + r'-Score'
self.df_score_serialized = self.df_score_serialized.rename(
columns={r'Content' : new_score_clm_name})
lang_columns = self.columms_to_be_serialized(csv_file_in, r'Language')
self.df_lang_serialized = clsr.ac_column_serializer(self.data_dir + csv_file_in,
'Student_Index', lang_columns)
new_lang_clm_name = key_word + r'-Language'
self.df_lang_serialized = self.df_lang_serialized.rename(
columns={r'Content' : new_lang_clm_name})
self.df_all_serialized = self.df_response_serialized.copy()
self.df_all_serialized[new_score_clm_name] = self.df_score_serialized[new_score_clm_name]
self.df_all_serialized[new_lang_clm_name] = self.df_lang_serialized[new_lang_clm_name]
self.df_all_serialized = self.df_all_serialized.drop(score_columns, axis=1)
self.df_all_serialized = self.df_all_serialized.drop(lang_columns, axis=1)
self.df_all_serialized = self.remove_word_from_clm_values(self.df_all_serialized, r'-' + key_word, new_question_clm_name)
self.df_all_serialized = self.add_questin_id(self.df_all_serialized, self.qid_csv_file_in, new_question_clm_name)
self.df_all_serialized = self.add_clm_values_to_index(self.df_all_serialized, new_question_clm_name)
def columms_to_be_serialized(self, csv_file_in, key_word):
df_file_in = pd.read_csv(self.data_dir + csv_file_in, encoding= 'latin1')
in_columns = df_file_in.columns
ser_columns = []
for x in in_columns:
if key_word in x:
ser_columns = ser_columns + [x]
return ser_columns
def remove_word_from_clm_values(self, df_to_be_mod, rm_word, column_to_be_mod):
mod_data = df_to_be_mod[column_to_be_mod]
new_data = []
for x in mod_data:
new_data = new_data + [x.replace(rm_word, r"")]
df_new_data = pd.DataFrame({column_to_be_mod : new_data}, index = df_to_be_mod.index)
df_to_be_mod[column_to_be_mod] = df_new_data[column_to_be_mod]
return df_to_be_mod
def add_questin_id(self, df_to_be_mod, qid_csv_file_in, column_to_be_matched):
df_file_in = pd.read_csv(self.data_dir + qid_csv_file_in, encoding= 'latin1')
df_file_in = df_file_in.rename(columns={'Question': column_to_be_matched})
original_index = df_to_be_mod.index
df_to_be_mod = pd.merge(df_to_be_mod, df_file_in, on=column_to_be_matched, how='left')
#df_to_be_mod['Question_ID_Sec'] = df_to_be_mod['Question_ID']
df_to_be_mod.index = original_index
return df_to_be_mod
def add_clm_values_to_index(self, df_to_be_mod, column_to_be_added):
mod_index = df_to_be_mod.index
add_data = df_to_be_mod[column_to_be_added]
new_index = []
for i in range(len(mod_index)):
new_index = new_index + [str(mod_index[i]) + r'-' + add_data.iloc[i]]
df_to_be_mod.index = new_index
df_to_be_mod.index.name = r'Student_Question_Index'
return df_to_be_mod
if __name__ == "__main__":
cs = column_serializer(r'../data/')
cs.serialize_record(r'Cleaned-Def-ELVA.PILOT.PRE-TEST (MODIFIED SCORES.Deidentified.11.19.17)12.13.2017.csv', r'Definition')
cs.df_all_serialized.to_csv(r'../data/' + r'Serialized-Def-ELVA.PILOT.PRE-TEST.csv', encoding= 'latin1')
'''
cs.serialize_record(r'Cleaned-Sen-ELVA.PILOT.PRE-TEST (MODIFIED SCORES.Deidentified.11.19.17)12.13.2017.csv', r'Sentence')
cs.df_all_serialized.to_csv(r'../data/' + r'Serialized-Sen-ELVA.PILOT.PRE-TEST.csv', encoding= 'latin1')
cs.serialize_record(r'Cleaned-Def-ELVA.PILOT.POST-TEST (MODIFIED SCORES.Deidentifed11.19.17)12.13.2017.csv', r'Definition')
cs.df_all_serialized.to_csv(r'../data/' + r'Serialized-Def-ELVA.PILOT.POST-TEST.csv', encoding= 'latin1')
cs.serialize_record(r'Cleaned-Sen-ELVA.PILOT.POST-TEST (MODIFIED SCORES.Deidentifed11.19.17)12.13.2017.csv', r'Sentence')
cs.df_all_serialized.to_csv(r'../data/' + r'Serialized-Sen-ELVA.PILOT.POST-TEST.csv', encoding= 'latin1')
'''
| 2.484375 | 2 |
memory_stats/main.py | makinteract/micropython-examples | 0 | 12790893 | from microbit import*
import gc
import micropython
def mem_stat():
print('MEMORY STATS')
gc.collect()
micropython.mem_info()
print('Initial free: {} allocated: {}'.format(
gc.mem_free(), gc.mem_alloc()))
print('END OF REPORT')
sleep(500)
mem_stat()
# Output will be printed via serial (115200 baud rate)
| 2.6875 | 3 |
home/adapt.py | the-kid89/django_adatp_demo | 0 | 12790894 | <filename>home/adapt.py
from adapt.intent import IntentBuilder
from adapt.engine import IntentDeterminationEngine
engine = IntentDeterminationEngine()
weather_keyword = [
"weather"
]
for wk in weather_keyword:
engine.register_entity(wk, "WeatherKeyword")
weather_types = [
"snow",
"rain",
"wind",
"sleet",
"sun"
]
for wt in weather_types:
engine.register_entity(wt, "WeatherType")
locations = [
"Seattle",
"San Francisco",
"Tokyo",
"Vancouver"
]
for loc in locations:
engine.register_entity(loc, "Location")
weather_intent = IntentBuilder("WeatherIntent")\
.require("WeatherKeyword")\
.optionally("WeatherType")\
.require("Location")\
.build()
engine.register_intent_parser(weather_intent)
def get_intent(message):
for intent in engine.determine_intent(message):
if intent.get('confidence') > 0:
return intent
| 2.65625 | 3 |
config/atlas/pybullet_simulation.py | junhyeokahn/PnC | 25 | 12790895 | import numpy as np
class Config(object):
CONTROLLER_DT = 0.001
N_SUBSTEP = 1
CAMERA_DT = 0.05
KP = 0.
KD = 0.
INITIAL_POS_WORLD_TO_BASEJOINT = [0, 0, 1.5 - 0.761]
INITIAL_QUAT_WORLD_TO_BASEJOINT = [0., 0., 0., 1.]
# INITIAL_QUAT_WORLD_TO_BASEJOINT = [0., 0., 0.7071, 0.7071]
PRINT_TIME = False
PRINT_ROBOT_INFO = False
VIDEO_RECORD = False
RECORD_FREQ = 10
SIMULATE_CAMERA = False
| 2.03125 | 2 |
home/migrations/0003_alter_blogmodel_image.py | CleoMenezes/My-Distro-Hope | 2 | 12790896 | # Generated by Django 3.2.9 on 2021-11-25 04:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0002_alter_blogmodel_slug'),
]
operations = [
migrations.AlterField(
model_name='blogmodel',
name='image',
field=models.ImageField(upload_to='uploads'),
),
]
| 1.546875 | 2 |
workflow/tasks/Start.py | cipher-ops/backend-kts | 1 | 12790897 | <gh_stars>1-10
import os
from workflow.task import Task
from workflow.utils.ansible import Ansible
class Start(Task):
def __init__(self, *args, **kwargs):
self.cmd = kwargs.get('cmd')
self.servers = kwargs.get('servers')
def info(self):
self.logger.info('TaskName=Start')
def exec(self):
self.info()
ansible = Ansible(inventory=self.servers['inventory'], connection='smart', become=True, become_method='sudo')
ansible.run(hosts=','.join(self.servers['hosts']), module='shell', args=self.cmd)
ansible.get_result() | 2.171875 | 2 |
Python/5 kyu/Human Readable Time/humanreadabletime.py | newtonsspawn/codewars_challenges | 3 | 12790898 | import math
def make_readable(seconds):
hh = math.floor(seconds / 3600)
mm = math.floor((seconds - (hh * 3600)) / 60)
ss = math.floor((seconds - (hh * 3600) - (mm * 60)))
readable_time = f'{hh:02}:{mm:02}:{ss:02}'
return readable_time
if __name__ == '__main__':
make_readable(0)
make_readable(5)
make_readable(60)
make_readable(86399)
make_readable(359999) | 3.359375 | 3 |
examples/s3-2017/rtu2a.py | pgaulon/minicps | 119 | 12790899 | <reponame>pgaulon/minicps
"""
rtu2a.py
"""
from minicps.devices import RTU
from utils import STATE, RTU2A_PROTOCOL
from utils import RTU_PERIOD_SEC
from utils import IP
# rtu2a tags
from utils import CO_0_2a, CO_1_2a, CO_2_2a, CO_3_2a
from utils import HR_0_2a, HR_1_2a, HR_2_2a
from utils import wadi1, wadi1_bin
import time
RTU2A_ADDR = IP['rtu2a'] + ':502'
RTU2B_ADDR = IP['rtu2b'] + ':502'
SCADA_ADDR = IP['scada'] + ':502'
class RTU2a(RTU):
def pre_loop(self, sleep=0.6):
"""rtu2a pre loop.
- sleep
"""
time.sleep(sleep)
def main_loop(self):
"""rtu2a main loop.
- challenge 1
"""
# print('DEBUG: wadi1: {}'.format(wadi1))
# print('DEBUG: wadi1_bin: {}'.format(wadi1_bin))
assert (len(wadi1_bin) / 8) == len(wadi1)
# print('DEBUG: len(wadi1): {}'.format(len(wadi1)))
# print('DEBUG: len(wadi1_bin): {}'.format(len(wadi1_bin)))
# print('DEBUG: len(wadi1_bin)/8: {}'.format(len(wadi1_bin) / 8))
count = 0
while(True):
if count >= len(wadi1_bin):
count = 0
if wadi1_bin[count] == '1':
#self.send(CO_0_2a, True, RTU2A_ADDR)
self.send(CO_0_2a, True, SCADA_ADDR)
# print("DEBUG: rtu2a send {} count {}".format(True, count))
else:
#self.send(CO_0_2a, False, RTU2A_ADDR)
self.send(CO_0_2a, False, SCADA_ADDR)
# print("DEBUG: rtu2a send {} count {}".format(False, count))
count += 1
# NOTE: read sensors
# co_0_2a = True if self.get(CO_0_2a) == '1' else False
# print("DEBUG: rtu2a co_0_2a: {}".format(co_0_2a))
# print("DEBUG: self.receive co_0_2a: \
# {}".format(self.receive(CO_0_2a, RTU2A_ADDR)))
# print("DEBUG: rtu2a main loop")
time.sleep(RTU_PERIOD_SEC)
if __name__ == "__main__":
rtu2a = RTU2a(
name='rtu2a',
state=STATE,
protocol=RTU2A_PROTOCOL)
| 2.65625 | 3 |
source/multimode/package/OPA.py | kevinwhere/Bleeding-Pineapple | 0 | 12790900 | <gh_stars>0
from __future__ import division
import random
import math
import QT,DP
def modeAudsley(tasks,scheme):
## to know how many priority levels we need to decide
num_modes=0
for itask in tasks:
num_modes+=len(itask)
for imode in itask:
## put an attribute for each mode used as an indicator for whether or not its priority level is assigned
imode['ifassigned']=False
### assign priority levels to modes, from the lowerest to the highest
for plevel in range(num_modes):
## check whether task i can be assigned with the priority level plevel
canAssign=0
for i in range(len(tasks)):
primeTasks=tasks[:i]+tasks[i+1:]
for imode in tasks[i]:
##ignore modes whose priority levels have been decided
if imode['ifassigned']==True:
continue
## checking if this mode can be assigned to this priority level by QT test
if tests.modeQT(imode,primeTasks):
imode['ifassigned']=True
canAssign=1
break
else:
continue
## greedily assign the first mode feasible to this priority level
if canAssign==1:
break
## if none of the modes can be assigned at this priority level, return unscheduable
if canAssign==0:
return False
return True
def Audsley(tasks,scheme):
if scheme == 'DT-FPT':
DP.table_init(tasks)
#Optimal Priority Assignment
priortyassigned=[0 for i in range(len(tasks))]
for plevel in range(len(tasks)):
canLevel=0
## check whether task i can be assigned with the priority level plevel
for i in range(len(tasks)):
##ignore lower priority tasks
if priortyassigned[i]==1:
continue
itask=tasks[i]
canAssign=1
## get higher prioirty tasks
primeTasks=[]
for j in range(len(tasks)):
if priortyassigned[j]==0 and i != j:
primeTasks.append(tasks[j])
#print "all :",tasks
#print "task:",itask
#print "prime:",primeTasks
#print ""
if len(primeTasks) ==0:
priortyassigned[i]=1
canLevel=1
#print "assign success at",i
break
## check feasiability of all modes
for imode in itask:
Tn=imode['period']
Un=imode['execution']/imode['period']
if scheme == 'QT-FPT':
if QT.QT(imode,primeTasks) == False:
canAssign=0
break
elif scheme == 'VRBL2-FPT':
if QT.VRBL2(imode,primeTasks) == False:
canAssign=0
break
elif scheme == 'DT-FPT':
if DP.DTest(i,tasks,imode,priortyassigned) == False:
canAssign=0
break
else:
sys.exit(0)
if canAssign == 1:
priortyassigned[i]=1
canLevel=1
#print "assign success at",i
break
if canLevel == 0:
return False
return True
| 2.625 | 3 |
vectorAngle.py | JustinBonus/Borja-Amies | 0 | 12790901 | <gh_stars>0
def vectorAngle(vec1, vec2, type):
# First take the dev() and hydroProjector() of the vectors if measuring
# angle from deviatoric view
#
#if vec1.shape == (6,1) or vec1.shape(6,) or vec1.shape == (1,6):
#theta = cos-1((u dot v)/(||u|| dot ||v||))
switch_type = {
1: 2, #Stress
2: 0.5, #Strain
3: 1 #Stress Strain
}
factor = switch_type.get(type, 'Invalid type')
angle = float(np.arccos(innerProduct(vec1, vec2,1)/(normS(vec1)*normS(vec2)))*(180/np.pi)) #Degrees
#else:
#angle = 'Incorrect input vector shape'
return angle
| 2.65625 | 3 |
backend/www/photo_store.py | sleepingAnt/viewfinder | 645 | 12790902 | # Copyright 2012 Viewfinder Inc. All Rights Reserved.
"""HTTP request handler for serving viewfinder photo image file
assets.
In case of a local file store, permissions for the current user and
the requested photo are verified and the requester is redirected to
the FileObjectStoreHandler.
For an s3 file store, permissions for the current user and the
requested photo are verified and the requester is redirected to a
pre-authorized, expiring S3 URL.
PhotoStoreHandler: Request handler for authorizing photo requests
"""
__authors__ = ['<EMAIL> (<NAME>)',
'<EMAIL> (<NAME>)']
import base64
import httplib
import logging
from tornado import gen, options, web
from viewfinder.backend.base import handler
from viewfinder.backend.db.episode import Episode
from viewfinder.backend.db.photo import Photo
from viewfinder.backend.db.post import Post
from viewfinder.backend.db.user_post import UserPost
from viewfinder.backend.db.viewpoint import Viewpoint
from viewfinder.backend.www import base
options.define('validate_cert', default=True,
help='set to False to allow insecure file obj store for testing')
def GeneratePhotoUrl(obj_store, photo_id, suffix):
"""Generate S3 signed URL for the given photo. The S3 response will contain a Cache-Control
header specifying private caching and a 1 year max age.
"""
return obj_store.GenerateUrl(photo_id + suffix, cache_control='private,max-age=31536000')
class PhotoStoreHandler(base.BaseHandler):
"""Handles PUT requests by storing image assets in the object
store. GET request retrieve image assets. Each method type
verifies user authentication credentials.
"""
@handler.asynchronous(datastore=True, obj_store=True)
@gen.engine
def get(self, episode_id, photo_id, suffix):
"""Verifies user credentials and then redirects to the URL where
the actual image bits are stored.
"""
url = yield PhotoStoreHandler.GetPhotoUrl(self._client,
self._obj_store,
episode_id,
photo_id,
suffix)
self.redirect(url)
@handler.asynchronous(datastore=True, obj_store=True)
@gen.engine
def put(self, episode_id, photo_id, suffix):
"""Verifies user credentials. If the user has write access to the
photo, and if an 'If-None-Match' is present, sends a HEAD request
to the object store to determine asset Etag. If the Etag matches,
returns a 304. Otherwise, generates an upload URL and redirects.
"""
def _GetUploadUrl(photo, verified_md5):
content_type = photo.content_type or 'image/jpeg'
return self._obj_store.GenerateUploadUrl(photo_id + suffix, content_type=content_type,
content_md5=verified_md5)
# Always expect well-formed Content-MD5 header. This ensures that the image data always matches
# what is in the metadata, and also enables the detection of any bit corruption on the wire.
if 'Content-MD5' not in self.request.headers:
raise web.HTTPError(400, 'Missing Content-MD5 header.')
try:
request_md5 = self.request.headers['Content-MD5']
actual_md5 = base64.b64decode(request_md5).encode('hex')
except:
raise web.HTTPError(400, 'Content-MD5 header "%s" is not a valid base-64 value.' % request_md5)
# Match against the MD5 value stored in the photo metadata.
if suffix not in ['.t', '.m', '.f', '.o']:
raise web.HTTPError(404, 'Photo not found; "%s" suffix is invalid.' % suffix)
# Ensure that user has permission to PUT the photo.
yield PhotoStoreHandler._AuthorizeUser(self._client, episode_id, photo_id, write_access=True)
# Get photo metadata, which will be used to create the upload URL.
photo = yield gen.Task(Photo.Query, self._client, photo_id, None)
# Get name of MD5 attribute in the photo metadata.
if suffix == '.o':
attr_name = 'orig_md5'
elif suffix == '.f':
attr_name = 'full_md5'
elif suffix == '.m':
attr_name = 'med_md5'
elif suffix == '.t':
attr_name = 'tn_md5'
else:
raise web.HTTPError(404, 'Photo not found; "%s" suffix is invalid.' % suffix)
# Check for the existence of the photo's image data in S3.
etag = yield gen.Task(Photo.IsImageUploaded, self._obj_store, photo.photo_id, suffix)
expected_md5 = getattr(photo, attr_name)
if expected_md5 != actual_md5:
if etag is None:
# Since there is not yet any photo image data, update the photo metadata to be equal to the
# actual MD5 value.
setattr(photo, attr_name, actual_md5)
yield gen.Task(photo.Update, self._client)
# Redirect to the S3 location.
self.redirect(_GetUploadUrl(photo, request_md5))
else:
# The client often sends mismatched MD5 values due to non-deterministic JPG creation IOS code.
# Only log the mismatch if it's an original photo to avoid spamming logs.
if suffix == '.o':
logging.error('Content-MD5 header "%s" does not match expected MD5 "%s"' %
(actual_md5, expected_md5))
self.set_status(400)
self.finish()
else:
# Check for If-None-Match header, which is used by client to check whether photo image data
# already exists (and therefore no PUT of the image data is needed).
match_etag = self.request.headers.get('If-None-Match', None)
if match_etag is not None and etag is not None and (match_etag == '*' or match_etag == etag):
# Photo image data exists and is not modified, so no need for client to PUT it again.
self.set_status(httplib.NOT_MODIFIED)
self.finish()
else:
# Redirect to the S3 upload location.
self.redirect(_GetUploadUrl(photo, request_md5))
@classmethod
@gen.coroutine
def GetPhotoUrl(cls, client, obj_store, episode_id, photo_id, suffix):
"""Checks that the current user (in Viewfinder context) is authorized to get the specified
photo, and returns a signed S3 URL for the photo if so.
"""
yield gen.Task(PhotoStoreHandler._AuthorizeUser, client, episode_id, photo_id, write_access=False)
raise gen.Return(GeneratePhotoUrl(obj_store, photo_id, suffix))
@classmethod
@gen.coroutine
def _AuthorizeUser(cls, client, episode_id, photo_id, write_access):
"""Checks that the current user (in Viewfinder context) user is authorized to access the given photo:
1. The photo must exist, and be in the given episode
2. The photo must not be unshared
3. If uploading the photo, the user must be the episode owner
4. A prospective user has access only to photos in the viewpoint specified in the cookie
"""
context = base.ViewfinderContext.current()
if context is None or context.user is None:
raise web.HTTPError(401, 'You are not logged in. Only users that have logged in can access this URL.')
user_id = context.user.user_id
post_id = Post.ConstructPostId(episode_id, photo_id)
episode, post = yield [gen.Task(Episode.QueryIfVisible, client, user_id, episode_id, must_exist=False),
gen.Task(Post.Query, client, episode_id, photo_id, None, must_exist=False)]
if episode is None or post is None:
raise web.HTTPError(404, 'Photo was not found or you do not have permission to view it.')
if write_access and episode.user_id != user_id:
raise web.HTTPError(403, 'You do not have permission to upload this photo; it is not owned by you.')
if post.IsUnshared():
raise web.HTTPError(403, 'This photo can no longer be viewed; it was unshared.')
# BUGBUG(Andy): The 1.5 client has a bug where it always passes in the library episode id
# when trying to fetch a photo, even if the photo is part of a conversation. This results
# in 403 errors when a user tries to sync to their library. For now, I'm disabling this
# check. Once 2.0 has established itself, I'll re-enable the check.
#if post.IsRemoved():
# raise web.HTTPError(403, 'This photo can no longer be viewed; it was removed.')
if not context.CanViewViewpoint(episode.viewpoint_id):
# Always allow system viewpoints to be accessed by a prospective user.
viewpoint = yield gen.Task(Viewpoint.Query, client, episode.viewpoint_id, None)
if not viewpoint.IsSystem():
raise web.HTTPError(403, 'You do not have permission to view this photo. '
'To see it, you must register an account.')
def _IsInteractiveRequest(self):
"""Always returns false, as this API is accessed programatically."""
return False
| 2.390625 | 2 |
RLBotPack/Botimus&Bumblebee/strategy/offense.py | FormularSumo/RLBotPack | 0 | 12790903 | from maneuvers.strikes.double_touch import DoubleTouch
from maneuvers.dribbling.carry_and_flick import CarryAndFlick
from maneuvers.maneuver import Maneuver
from maneuvers.strikes.aerial_strike import AerialStrike, FastAerialStrike
from maneuvers.strikes.close_shot import CloseShot
from maneuvers.strikes.dodge_strike import DodgeStrike
from maneuvers.strikes.ground_strike import GroundStrike
from maneuvers.strikes.mirror_strike import MirrorStrike
from rlutilities.linear_algebra import vec3
from rlutilities.simulation import Car
from tools.game_info import GameInfo
from tools.intercept import Intercept
from tools.vector_math import distance, ground_distance, align
class Offense:
def __init__(self, info: GameInfo):
self.info = info
self.allow_dribbles = False
def direct_shot(self, car: Car, target: vec3) -> Maneuver:
dodge_shot = DodgeStrike(car, self.info, target)
ground_shot = GroundStrike(car, self.info, target)
if car.boost > 40: # TODO
aerial_strike = AerialStrike(car, self.info, target)
fast_aerial = FastAerialStrike(car, self.info, target)
better_aerial_strike = min([aerial_strike, fast_aerial], key=lambda strike: strike.intercept.time)
if better_aerial_strike.intercept.time < dodge_shot.intercept.time:
if ground_distance(better_aerial_strike.intercept, self.info.their_goal.center) < 5000:
return DoubleTouch(better_aerial_strike)
return better_aerial_strike
if (
dodge_shot.intercept.time < ground_shot.intercept.time - 0.1
or ground_distance(dodge_shot.intercept, target) < 4000
or distance(ground_shot.intercept.ball.velocity, car.velocity) < 500
):
if (
distance(dodge_shot.intercept.ground_pos, target) < 4000
and abs(dodge_shot.intercept.ground_pos[0]) < 3000
):
return CloseShot(car, self.info, target)
return dodge_shot
return ground_shot
def any_shot(self, car: Car, target: vec3, intercept: Intercept) -> Maneuver:
ball = intercept.ball
if (
self.allow_dribbles
and (100 < ball.position[2] or abs(ball.velocity[2]) > 300)
and abs(ball.velocity[2]) < 1500
and ground_distance(car, ball) < 1500
and ground_distance(ball, self.info.my_goal.center) > 1000
):
if not self.is_opponent_close(car, ball):
return CarryAndFlick(car, self.info, target)
alignment = align(car.position, ball, target)
if alignment < 0.1 and abs(ball.position[1] - target[1]) > 3000:
return MirrorStrike(car, self.info, target)
# if 250 < ball.position[2] < 550 and self.is_opponent_close(car, ball):
# return DoubleJumpStrike(car, self.info, target)
return self.direct_shot(car, target)
def is_opponent_close(self, car, ball) -> bool:
for opponent in self.info.get_opponents(car):
if ground_distance(opponent, ball) < ball.position[2] * 2 + 1000:
return True
return False
| 2.359375 | 2 |
rec/dataset/split.py | btwardow/dml4rec | 6 | 12790904 | from abc import abstractmethod
from numpy import random
from rec.base import ParametrizedObject
from rec.dataset.dataset import Dataset
class DatasetSplitter(ParametrizedObject):
@abstractmethod
def split(self, dataset):
assert isinstance(dataset, Dataset)
pass
def _prepare_target_datasets(self, dataset):
train = Dataset(dataset.name)
test = Dataset(dataset.name)
train.items = dataset.items
test.items = dataset.items
return train, test
class IdentitySplitter(DatasetSplitter):
"""
Do not split dataset at all.
It returns for both, train and test, the same object.
This implementation is mainly for testing purpose.
It shouldn't be used in a real-life training schedule.
"""
def split(self, dataset):
return dataset, dataset
class PreciseUserNumberDatasetSplitter(DatasetSplitter):
def __init__(self, train_size=0, test_size=0):
super(PreciseUserNumberDatasetSplitter, self).__init__()
self.train_size = train_size
self.test_size = test_size
def split(self, dataset):
super(PreciseUserNumberDatasetSplitter, self).split(dataset)
train, test = self._prepare_target_datasets(dataset)
n = 0
for u, u_sessions in list(dataset.sessions.items()):
if n <= self.train_size:
train.sessions[u] = u_sessions
elif n <= self.train_size + self.test_size:
test.sessions[u] = u_sessions
else:
break
n += len(u_sessions)
train._create_indexes()
test._create_indexes()
return train, test
class RandomSessionSplitter(DatasetSplitter):
def __init__(self, train_ratio=0.7):
super(RandomSessionSplitter, self).__init__()
self.test_ratio = train_ratio
def split(self, dataset):
super(RandomSessionSplitter, self).split(dataset)
train, test = self._prepare_target_datasets(dataset)
test_session_num = self.test_ratio * dataset.sessions_num()
user_session_ids = []
for u, u_sessions in list(dataset.sessions.items()):
for sid in u_sessions.keys():
user_session_ids.append((u, sid))
random.shuffle(user_session_ids)
for n in range(len(user_session_ids)):
u, sid = user_session_ids[n]
out_dataset = train if n <= test_session_num else test
out_dataset.sessions[u][sid] = dataset.sessions[u][sid]
train._create_indexes()
test._create_indexes()
return train, test
class TimestampSessionSplitter(DatasetSplitter):
def __init__(self, split_sec=24 * 60 * 60):
super(TimestampSessionSplitter, self).__init__()
self.split_sec = split_sec
def split(self, dataset):
super(TimestampSessionSplitter, self).split(dataset)
train, test = self._prepare_target_datasets(dataset)
max_ts = self._get_max_timestamp(dataset)
threshold = max_ts - self.split_sec
for u, u_sessions in list(dataset.sessions.items()):
for sid, session in list(u_sessions.items()):
out_dataset = train if session.timestamp_end < threshold else test
out_dataset.sessions[u][sid] = dataset.sessions[u][sid]
train._create_indexes()
test._create_indexes()
return train, test
def _get_max_timestamp(self, dataset):
max_ts = 0
for u, u_sessions in list(dataset.sessions.items()):
for sid, session in list(u_sessions.items()):
if session.timestamp_end > max_ts:
max_ts = session.timestamp_end
return max_ts
class LastNPercentOfSessionsInDataset(DatasetSplitter):
def __init__(self, split_percent=.05):
self.split_percent = split_percent
def split(self, dataset):
all_sessions = dataset.all_sessions_list()
sorted(all_sessions, key=lambda s: s.timestamp_start)
split_num = len(all_sessions) * self.split_percent
train, test = self._prepare_target_datasets(dataset)
# iterate from last event till split is filled
for s in reversed(all_sessions):
out_dataset = train
if split_num > 0:
split_num -= 1
out_dataset = test
out_dataset.sessions[s.user_id][s.id] = s
train._create_indexes()
test._create_indexes()
return train, test
| 3.140625 | 3 |
Taller_02_Secuenciales/Ejercicio_01.py | BarinasJ/Algoritmos_Programacion | 0 | 12790905 | <reponame>BarinasJ/Algoritmos_Programacion
"""
Entradas: 3 edades
Edad 1 --> int --> a
Edad 2 --> int --> b
Edad 3 --> int --> c
Salidas --> El promedio los valores (a,b,c)
Promedio --> int --> p
"""
# Entradas
a = int(input("Dime la primera edad\n"))
b = int(input("Dime la primera edad\n"))
c = int(input("Dime la primera edad\n"))
# Caja negra
p = (a+b+c)/3
# Salidas
print(int(p)) | 3.96875 | 4 |
src/data/utils.py | ehudbaumatz/enhance | 0 | 12790906 | import cv2
def split_image_horizontally(path):
img = cv2.imread(path) if type(path) == str else path
height, width = img.shape[:2]
# Let's get the starting pixel coordiantes (top left of cropped top)
start_row, start_col = int(0), int(0)
# Let's get the ending pixel coordinates (bottom right of cropped top)
end_row, end_col = int(height), int(width * .5)
cropped_left = img[start_row:end_row, start_col:end_col]
# Let's get the starting pixel coordiantes (top left of cropped bottom)
start_row, start_col = int(0), int(width * .5)
# Let's get the ending pixel coordinates (bottom right of cropped bottom)
end_row, end_col = int(height), int(width)
cropped_right = img[start_row:end_row, start_col:end_col]
return cropped_left, cropped_right
| 3.171875 | 3 |
binsdpy/similarity/group_c.py | mikulatomas/binsdpy | 0 | 12790907 | import math
from binsdpy.utils import operational_taxonomic_units, BinaryFeatureVector
def smc(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Sokal-Michener similarity (also called simple matching coefficient)
<NAME>. (1958).
A statistical method for evaluating systematic relationships.
Univ. Kansas, Sci. Bull., 38, 1409-1438.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (a + d) / (a + b + c + d)
def rogers_tanimoto(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Roges-Tanimoto similarity
<NAME>., & <NAME>. (1960).
A computer program for classifying plants.
Science, 132(3434), 1115-1118.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (a + d) / (a + 2 * (b + c) + d)
def sokal_sneath2(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Sokal-Sneath similarity (v2)
<NAME>., & <NAME>. (1973).
Numerical taxonomy.
The principles and practice of numerical classification.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (2 * (a + d)) / (2 * (a + d) + b + c)
def sokal_sneath3(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Sokal-Sneath similarity (v3)
<NAME>., & <NAME>. (1973).
Numerical taxonomy.
The principles and practice of numerical classification.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (a + d) / (b + c)
def faith(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Faith similarity
<NAME>. (1983).
Asymmetric binary similarity measures.
Oecologia, 57(3), 287-290.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (a + 0.5 * d) / (a + b + c + d)
def gower_legendre(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Gower-Legendre similarity
<NAME>., & <NAME>. (1986).
Metric and Euclidean properties of dissimilarity coefficients.
Journal of classification, 3(1), 5-48.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (a + d) / (a + 0.5 * (b + c) + d)
def gower(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Gower similarity
<NAME>. (1971).
A general coefficient of similarity and some of its properties.
Biometrics, 857-871.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (a + d) / math.sqrt((a + b) * (a + c) * (b + d) * (c + d))
def austin_colwell(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Austin-Colwell similarity
<NAME>., & <NAME>. (1977).
Evaluation of some coefficients for use in numerical taxonomy of microorganisms.
International Journal of Systematic and Evolutionary Microbiology, 27(3), 204-210.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return 2 / math.pi * math.asin(math.sqrt((a + d) / (a + b + c + d)))
def consonni_todeschini1(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Consonni and Todeschini similarity (v1)
<NAME>., & <NAME>. (2012).
New similarity coefficients for binary data.
Match-Communications in Mathematical and Computer Chemistry, 68(2), 581.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return math.log(1 + a + d) / math.log(1 + a + b + c + d)
def hamman(
x: BinaryFeatureVector, y: BinaryFeatureVector, mask: BinaryFeatureVector = None
) -> float:
"""Hamman similarity
<NAME>. (1961).
Merkmalsbestand und verwandtschaftsbeziehungen der farinosae: ein beitrag zum system der monokotyledonen.
Willdenowia, 639-768.
Args:
x (BinaryFeatureVector): binary feature vector
y (BinaryFeatureVector): binary feature vector
Returns:
float: similarity of given vectors
"""
a, b, c, d = operational_taxonomic_units(x, y, mask)
return (a + d - b - c) / (a + b + c + d)
| 3.328125 | 3 |
03 - Types/3.3 - InbuiltTypes-DictionarySetArray/06-dictions-multiple-assignments.py | python-demo-codes/basics | 2 | 12790908 | # HEAD
# DataType - Dictionaries Multiple Assignments
# DESCRIPTION
# Describes the assigning, working, and method usages of dictionaries
# RESOURCES
#
obj = {'color': 'red', 'age': 42}
# Using multiple assignments from the item tuple returned during each iteration
for k, v in obj.items():
print('Key: ' + k + ' Value: ' + str(v))
# Using multiple assignments from the item tuple returned during each iteration
# Looping through key:value of the dictions
for k, v in obj.items():
print('Key:Value', k, v)
| 4.5 | 4 |
utility_ai/models/bucket.py | TomasMaciulis/Utility-AI-API | 0 | 12790909 | from .configuration_entry import ConfigurationEntry
from .action import Action
from utility_ai.traits.utility_score_trait import UtilityScoreTrait
class Bucket(ConfigurationEntry, UtilityScoreTrait):
def __init__(self, name: str, description: dict):
ConfigurationEntry.__init__(self, name, description)
UtilityScoreTrait.__init__(
self,
description['utility_score_formula'],
super().weight_value,
name
)
self.actions = description['actions']
@property
def actions(self):
return self.__actions
@actions.setter
def actions(self, actions: dict):
act = []
for action, value in actions.items():
act.append(Action(action, value))
self.__actions = act
| 2.390625 | 2 |
test_izettle.py | vilkasgroup/iZettle | 0 | 12790910 | import os
import sys
import unittest
import logging
import uuid
import time
from iZettle.iZettle import Izettle, RequestException
logger = logging.getLogger()
logger.level = logging.DEBUG
stream_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(stream_handler)
class TestIzettle(unittest.TestCase):
def __init__(self, *args, **kwargs):
""" Initialize iZettle client. Requires the following environment
variables IZETTLE_CLIENT_ID, IZETTLE_CLIENT_SECRET, IZETTLE_USER,
IZETTLE_PASSWORD. """
super(TestIzettle, self).__init__(*args, **kwargs)
self.client = Izettle(
client_id=os.environ['IZETTLE_CLIENT_ID'],
client_secret=os.environ['IZETTLE_CLIENT_SECRET'],
user=os.environ['IZETTLE_USER'],
password=<PASSWORD>['<PASSWORD>'],
)
def test_instance(self):
""" Test that the client was initialized correctly.
If this fails, make sure that you have environment variables set
for the TestIzettle.__init__ method """
self.assertIsNotNone(self.client)
self.assertIsNotNone(self.client._Izettle__client_id)
self.assertIsNotNone(self.client._Izettle__client_secret)
self.assertIsNotNone(self.client._Izettle__user)
self.assertIsNotNone(self.client._Izettle__password)
def test_auth(self):
""" Test that we got token from izettle API """
self.assertIsNotNone(self.client._Izettle__token)
def test_invalid_client_id(self):
""" Test client creation with invalid parameters """
with self.assertRaises(RequestException) as re:
Izettle(client_id='invalid')
exception = re.exception
self.assertEqual(exception.developer_message, "Invalid client_id")
self.assertEqual(exception.request.json()['error'], "invalid_client")
self.assertEqual(exception.request.status_code, 400)
def test_discounts(self):
c = self.client
discount_uuid = str(uuid.uuid1())
discount_percentage = '10'
c.create_discount({
'uuid': discount_uuid,
'percentage': discount_percentage,
})
self.assertGreater(len(c.get_all_discounts()), 0)
discount = c.get_discount(discount_uuid)
self.assertEqual(discount['uuid'], discount_uuid)
self.assertEqual(discount['percentage'], discount_percentage)
new_name = 'new name'
c.update_discount(discount_uuid, {'name': new_name})
self.assertEqual(c.get_discount(discount_uuid)['name'], new_name)
c.delete_discount(discount_uuid)
with self.assertRaises(RequestException) as re:
c.get_discount(discount_uuid)
exception = re.exception
self.assertEqual(exception.request.status_code, 404)
def test_categories(self):
c = self.client
category_uuid = str(uuid.uuid1())
category_name = 'category name'
c.create_category({
'uuid': category_uuid,
'name': category_name
})
self.assertGreater(len(c.get_all_categroies()), 0)
category = c.get_category(category_uuid)
self.assertEqual(category['uuid'], category_uuid)
# FUN FUN FUN. All categories have name converted to upper case...
self.assertEqual(category['name'], category_name.upper())
# Tough luck, categories do not have delete method.
# Your account is now full of unwanted categories...
def test_product(self):
c = self.client
uuid1 = str(uuid.uuid1())
name = 'product1'
with self.assertRaises(RequestException) as e:
c.get_product(uuid1)
self.assertEqual(e.exception.request.status_code, 404)
self.assertIn('not found', e.exception.developer_message)
c.create_product({
'name': name,
'uuid': uuid1,
})
product = c.get_product(uuid1)
self.assertEqual(product['uuid'], uuid1)
self.assertEqual(product['name'], name)
updated_name = 'updated product name'
c.update_product(uuid1, {
'name': updated_name,
})
updated_product = c.get_product(uuid1)
self.assertEqual(updated_product['name'], updated_name)
variant_uuid = str(uuid.uuid1())
variant_name = 'variant name 1'
c.create_product_variant(uuid1, {'uuid': variant_uuid})
c.update_product_variant(uuid1, variant_uuid, {'name': variant_name})
product_with_updated_variant = c.get_product(uuid1)
found_the_new_variant = False
for variant in product_with_updated_variant['variants']:
if(variant['uuid'] != variant_uuid):
continue
self.assertEqual(variant['name'], variant_name)
found_the_new_variant = True
self.assertTrue(found_the_new_variant)
c.delete_product_variant(uuid1, variant_uuid)
variant_is_no_longer_in_product = True
for variant in c.get_product(uuid1)['variants']:
if(variant['uuid'] == variant_uuid):
variant_is_no_longer_in_product = False
self.assertTrue(variant_is_no_longer_in_product)
c.delete_product(uuid1)
with self.assertRaises(RequestException) as re:
c.get_product(uuid1)
exception = re.exception
self.assertEqual(exception.msg, "request error 404")
self.assertEqual(exception.request.status_code, 404)
uuid2 = str(uuid.uuid1())
self.assertNotEqual(uuid1, uuid2)
current_product_amount = len(c.get_all_products())
c.create_product({'name': '1', 'uuid': uuid1})
c.create_product({'name': '2', 'uuid': uuid2})
self.assertEqual(len(c.get_all_products()), current_product_amount + 2)
c.delete_product_list({'uuid': [uuid1, uuid2]})
self.assertEqual(len(c.get_all_products()), current_product_amount)
def test_purchases(self):
c = self.client
with self.assertRaises(TypeError):
# Parameters need to be in data dict
c.get_multiple_purchases(limit=1)
with self.assertRaises(TypeError):
# missing mandatory argument
c.get_purchase()
with self.assertRaises(RequestException) as e:
# This order of course cannot be in the server, because we made up the uuid
c.get_purchase(str(uuid.uuid1()))
self.assertEqual(e.exception.request.status_code, 404)
self.assertIn('not found', e.exception.developer_message)
multiple_purchases = c.get_multiple_purchases({'limit': 1})
self.assertEqual(len(multiple_purchases['purchases']), 1)
purchase_uuid = multiple_purchases['purchases'][0]['purchaseUUID']
single_purchase = c.get_purchase(purchase_uuid)
self.assertEqual(purchase_uuid, single_purchase['purchaseUUID'])
purchase_uuid1 = multiple_purchases['purchases'][0]['purchaseUUID1']
single_purchase = c.get_purchase(purchase_uuid1)
self.assertEqual(purchase_uuid, single_purchase['purchaseUUID'])
@unittest.skip('This will take over 2 hours.')
def test_session(self):
""" This tests if the integration works if the session expires before we
anticipate. This simply waits for the for the sessino to expire, so it wil
take a looooooong time """
self.client.__session_valid_until = time.time() + 9000
time.sleep(8000)
self.assertIsNotNone(self.client.get_all_products())
if __name__ == '__main__':
unittest.main(verbosity=2)
| 2.609375 | 3 |
QuickSort/quick_sort.py | sddxzsb/algorithm | 0 | 12790911 | <gh_stars>0
def quick_sort(array, pivot_location, compare_count=0):
if len(array) == 1:
return array, compare_count
if len(array) == 2:
if array[0] < array[1]:
return [array[0], array[1]], compare_count + 1
else:
return [array[1], array[0]], compare_count + 1
else:
array, pivot_index = partition(array, pivot_location)
if pivot_index >= 2:
array[:pivot_index], compare_count = quick_sort(
array[:pivot_index],
pivot_location,
compare_count
)
if pivot_index <= len(array) - 3:
array[pivot_index+1:], compare_count = quick_sort(
array[pivot_index+1:],
pivot_location,
compare_count
)
compare_count += len(array) - 1
return array, compare_count
def partition(array, pivot_location):
pivot_index = get_pivot_index(array, pivot_location)
# put the pivot index in the beginning of the array
if pivot_index != 0:
array[0], array[pivot_index] = array[pivot_index], array[0]
i = 1 # index of first element > array[0]
for j in range(1, len(array)):
if array[j] < array[0]:
array[i], array[j] = array[j], array[i]
i += 1
array[0], array[i-1] = array[i-1], array[0]
# return the index of pivot
return array, i - 1
def get_pivot_index(array, pivot_location):
if pivot_location == 'first':
pivot_index = 0
elif pivot_location == 'last':
pivot_index = len(array) - 1
else:
first = array[0]
middle = array[(len(array) - 1) // 2]
last = array[-1]
if middle < first < last or last < first < middle:
pivot_index = 0
elif first < middle < last or last < middle < first:
pivot_index = (len(array) - 1) // 2
else:
pivot_index = len(array) - 1
return pivot_index
if __name__ == "__main__":
f = open("QuickSort.txt")
array = [int(line) for line in f]
sorted_array, compare_count = quick_sort(array, 'first')
print(compare_count)
f = open("QuickSort.txt")
array = [int(line) for line in f]
sorted_array, compare_count = quick_sort(array, 'last')
print(compare_count)
f = open("QuickSort.txt")
array = [int(line) for line in f]
sorted_array, compare_count = quick_sort(array, 'median')
print(compare_count)
| 3.703125 | 4 |
MAIN.py | KDen404/twitch-IRC-chat-bot | 1 | 12790912 | <filename>MAIN.py
from FUNCTIONS import *
import datetime
def main(chan, Main, irc, VIP, f):
print("finished Loading")
print("executing main section")
while Main:
text = irc.get_text().strip(bytes("\r\n", "UTF-8")).decode("UTF-8", "ignore")
buffer = text.replace(":", "")
user = buffer.split("!", 1)[0]
buffer.replace(buffer, "")
buffer = text
data = buffer.replace(":" + user + "!" + user + "@" + user + "." + "tmi.twitch.tv " + "PRIVMSG " + "#", "").split(" :", 1)[1]
time = datetime.datetime.now()
second = str(time.second).strip(" ")
minute = str(time.minute).strip(" ")
hour = str(time.hour).strip(" ")
timestamp = "[" + hour.zfill(2) + ":" + minute.zfill(2) + ":" + second.zfill(2) + "]"
f.chatlog(user, data, timestamp)
print(timestamp, user, ":", data)
if "PING :tmi.twitch.tv" == text:
irc.pong()
f.reset()
elif data.startswith("!cmdadd"):
if user in VIP:
f.addcmd(irc, chan, data)
f.reset()
else:
irc.send(chan, "You don't have the Permissions to perform this command!!")
f.reset()
elif data.startswith("!quoteadd"):
if user in VIP:
f.addQuote(irc, chan, data)
f.reset()
else:
irc.send(chan, "You don't have the Permissions to perform this command!!")
irc.reset()
elif data == "!quote":
q = open("data/quotes.ccfg", "r")
quotes = q.read().split("\n")
quote = random.choice(quotes)
irc.send(chan, quote)
print(BOTID + ": " + quote)
f.reset()
q.close()
elif data == "!stop":
if user == CHANNEL:
sys.exit(0)
f.reset()
else:
irc.send(chan, "You don't have the permissions to perform this command!!")
elif data.startswith("!"):
c = open("data/commands.cfg", "r")
cr = c.read().split("\n")
for line in cr:
cmdline = line.split(": ", 1)
cmd = cmdline[0]
info = cmdline[1]
if data.strip(" ") == cmd:
irc.send(chan, info)
print(BOTID + ": " + info)
cmdline.clear()
cmd.replace(cmd, "")
info.replace(info, "")
f.reset()
c.close()
break
else:
cmdline.clear()
cmd.replace(cmd, "")
info.replace(info, "")
else:
f.reset()
| 2.796875 | 3 |
utils.py | natsukium/TensorNet-TF | 0 | 12790913 | import numpy as np
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from sklearn.model_selection import train_test_split
def down_scale(x, scale=2):
# order 2 -> order 4
h = int(np.sqrt(x.shape[1]))
img = x.astype("float32").reshape(x.shape[0], h, h, 1)
scaled_img = tf.nn.avg_pool(img, ksize=[1, scale, scale, 1],
strides=[1, scale, scale, 1],
padding='VALID')
h //= scale
return tf.reshape(scaled_img, [x.shape[0], h ** 2])
def quantize(x):
phi = tf.concat(
[tf.expand_dims(tf.cos(x) * np.pi/2, 2),
tf.expand_dims(tf.sin(x) * np.pi/2, 2)], 2)
return phi
def load_mnist(one_hot=True, random_state=42):
mnist = input_data.read_data_sets('MNIST_data/', one_hot=one_hot)
mnist_X = np.concatenate((mnist.train.images, mnist.test.images), axis=0)
mnist_y = np.concatenate((mnist.train.labels, mnist.test.labels), axis=0)
return train_test_split(mnist_X, mnist_y, test_size=0.2,
random_state=random_state)
| 3.03125 | 3 |
Blender 2.91/2.91/scripts/addons/io_export_dxf/__init__.py | calculusrobotics/RNNs-for-Bayesian-State-Estimation | 1 | 12790914 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
bl_info = {
"name": "Export Autocad DXF Format (.dxf)",
"author": "<NAME> (AKA migius), <NAME>",
"version": (2, 2, 3),
"blender": (2, 80, 0),
"location": "File > Export > AutoCAD DXF",
"description": "The script exports Blender geometry to DXF format r12 version.",
"warning": "Under construction! Visit Wiki for details.",
"doc_url": "{BLENDER_MANUAL_URL}/addons/import_export/scene_dxf.html",
"category": "Import-Export",
}
if "bpy" in locals():
from importlib import reload
reload(operator)
del reload
import bpy
from . import operator
def menu_func(self, context):
self.layout.operator(operator.DXFExporter.bl_idname, text="AutoCAD DXF")
classes = (
operator.DXFExporter,
)
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
bpy.types.TOPBAR_MT_file_export.append(menu_func)
def unregister():
from bpy.utils import unregister_class
for cls in reversed(classes):
unregister_class(cls)
bpy.types.TOPBAR_MT_file_export.remove(menu_func)
if __name__ == "__main__":
register()
| 2.046875 | 2 |
Obsolete-Commands/LastMentioned.py | HeNine/PyMoronBot | 0 | 12790915 | # -*- coding: utf-8 -*-
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import subprocess
class Command(CommandInterface):
triggers = ['lastsaid', 'lastmention', 'lastmentioned']
help = 'lastmention(ed)/lastsaid <text> - checks the log for the last time someone mentioned a given word or phrase'
def execute(self, message):
"""
@type message: IRCMessage
"""
if len(message.MessageList) > 1 and (message.Command == "lastmention" or message.Command == "lastmentioned"):
proc = subprocess.Popen(['/usr/bin/php',
'/opt/moronbot/loggrep.php',
"\"" + message.Parameters.replace("\"", "\\\"").replace("\n", "\\\n") + "\"",
message.ReplyTo,
"mention"],
stdout=subprocess.PIPE)
output = proc.stdout.read()
return IRCResponse(ResponseType.Say, output, message.ReplyTo)
if len(message.MessageList) > 1 and message.Command == "lastsaid":
proc = subprocess.Popen(['/usr/bin/php',
'/opt/moronbot/loggrep.php',
"\"" + message.Parameters.replace("\"", "\\\"").replace("\n", "\\\n") + "\"",
message.ReplyTo,
"mentionnottoday"],
stdout=subprocess.PIPE)
output = proc.stdout.read()
return IRCResponse(ResponseType.Say, output, message.ReplyTo)
| 2.8125 | 3 |
pandas/core/internals/api.py | roberthdevries/pandas | 0 | 12790916 | """
This is a pseudo-public API for downstream libraries. We ask that downstream
authors
1) Try to avoid using internals directly altogether, and failing that,
2) Use only functions exposed here (or in core.internals)
"""
from __future__ import annotations
from collections import defaultdict
from typing import DefaultDict
import numpy as np
from pandas._libs.internals import BlockPlacement
from pandas._typing import (
ArrayLike,
Dtype,
)
from pandas.core.dtypes.common import (
is_datetime64tz_dtype,
pandas_dtype,
)
from pandas.core.arrays import DatetimeArray
from pandas.core.construction import extract_array
from pandas.core.indexes.api import Index
from pandas.core.internals.blocks import (
Block,
CategoricalBlock,
DatetimeTZBlock,
ExtensionBlock,
check_ndim,
ensure_block_shape,
extract_pandas_array,
get_block_type,
maybe_coerce_values,
new_block,
)
from pandas.core.internals.managers import (
BlockManager,
construction_error,
multi_blockify,
simple_blockify,
)
def make_block(
values, placement, klass=None, ndim=None, dtype: Dtype | None = None
) -> Block:
"""
This is a pseudo-public analogue to blocks.new_block.
We ask that downstream libraries use this rather than any fully-internal
APIs, including but not limited to:
- core.internals.blocks.make_block
- Block.make_block
- Block.make_block_same_class
- Block.__init__
"""
if dtype is not None:
dtype = pandas_dtype(dtype)
values, dtype = extract_pandas_array(values, dtype, ndim)
if klass is None:
dtype = dtype or values.dtype
klass = get_block_type(values, dtype)
elif klass is DatetimeTZBlock and not is_datetime64tz_dtype(values.dtype):
# pyarrow calls get here
values = DatetimeArray._simple_new(values, dtype=dtype)
if not isinstance(placement, BlockPlacement):
placement = BlockPlacement(placement)
ndim = maybe_infer_ndim(values, placement, ndim)
if is_datetime64tz_dtype(values.dtype):
# GH#41168 ensure we can pass 1D dt64tz values
values = extract_array(values, extract_numpy=True)
values = ensure_block_shape(values, ndim)
check_ndim(values, placement, ndim)
values = maybe_coerce_values(values)
return klass(values, ndim=ndim, placement=placement)
def maybe_infer_ndim(values, placement: BlockPlacement, ndim: int | None) -> int:
"""
If `ndim` is not provided, infer it from placment and values.
"""
if ndim is None:
# GH#38134 Block constructor now assumes ndim is not None
if not isinstance(values.dtype, np.dtype):
if len(placement) != 1:
ndim = 1
else:
ndim = 2
else:
ndim = values.ndim
return ndim
def create_block_manager_from_arrays(
arrays,
names: Index,
axes: list[Index],
consolidate: bool = True,
) -> BlockManager:
# Assertions disabled for performance
# assert isinstance(names, Index)
# assert isinstance(axes, list)
# assert all(isinstance(x, Index) for x in axes)
arrays = [extract_array(x, extract_numpy=True) for x in arrays]
try:
blocks = _form_blocks(arrays, names, axes, consolidate)
mgr = BlockManager(blocks, axes)
except ValueError as e:
raise construction_error(len(arrays), arrays[0].shape, axes, e)
if consolidate:
mgr._consolidate_inplace()
return mgr
def _form_blocks(
arrays: list[ArrayLike], names: Index, axes: list[Index], consolidate: bool
) -> list[Block]:
# put "leftover" items in float bucket, where else?
# generalize?
items_dict: DefaultDict[str, list] = defaultdict(list)
extra_locs = []
names_idx = names
if names_idx.equals(axes[0]):
names_indexer = np.arange(len(names_idx))
else:
# Assertion disabled for performance
# assert names_idx.intersection(axes[0]).is_unique
names_indexer = names_idx.get_indexer_for(axes[0])
for i, name_idx in enumerate(names_indexer):
if name_idx == -1:
extra_locs.append(i)
continue
v = arrays[name_idx]
block_type = get_block_type(v)
items_dict[block_type.__name__].append((i, v))
blocks: list[Block] = []
if len(items_dict["NumericBlock"]):
numeric_blocks = multi_blockify(
items_dict["NumericBlock"], consolidate=consolidate
)
blocks.extend(numeric_blocks)
if len(items_dict["DatetimeLikeBlock"]):
dtlike_blocks = multi_blockify(
items_dict["DatetimeLikeBlock"], consolidate=consolidate
)
blocks.extend(dtlike_blocks)
if len(items_dict["DatetimeTZBlock"]):
dttz_blocks = [
DatetimeTZBlock(
ensure_block_shape(extract_array(array), 2),
placement=BlockPlacement(i),
ndim=2,
)
for i, array in items_dict["DatetimeTZBlock"]
]
blocks.extend(dttz_blocks)
if len(items_dict["ObjectBlock"]) > 0:
object_blocks = simple_blockify(
items_dict["ObjectBlock"], np.object_, consolidate=consolidate
)
blocks.extend(object_blocks)
if len(items_dict["CategoricalBlock"]) > 0:
cat_blocks = [
CategoricalBlock(array, placement=BlockPlacement(i), ndim=2)
for i, array in items_dict["CategoricalBlock"]
]
blocks.extend(cat_blocks)
if len(items_dict["ExtensionBlock"]):
external_blocks = [
ExtensionBlock(array, placement=BlockPlacement(i), ndim=2)
for i, array in items_dict["ExtensionBlock"]
]
blocks.extend(external_blocks)
if len(extra_locs):
shape = (len(extra_locs),) + tuple(len(x) for x in axes[1:])
# empty items -> dtype object
block_values = np.empty(shape, dtype=object)
block_values.fill(np.nan)
na_block = new_block(block_values, placement=extra_locs, ndim=2)
blocks.append(na_block)
return blocks
| 2.265625 | 2 |
Python/Connection.py | mathur-rishi/Voice-Controlled-Assistant | 0 | 12790917 | import Libraries
#function definitions
def add_wlan_profile():
Libraries.subprocess.run('netsh wlan add profile filename="../Credentials/G5s_Hotspot.xml"', shell=True)
def open_wifi():
Libraries.subprocess.run('start ms-settings:network-wifi', shell=True)
Libraries.time.sleep(15)
def wifi_unsuccessful():
print('Sir, connection establishment to internet was unsuccessful!')
#check network connection
def check_wifi():
try:
Libraries.urllib.request.urlopen('https://www.google.com/')
return True
except:
return False | 2.65625 | 3 |
reactivex/scheduler/historicalscheduler.py | christiansandberg/RxPY | 0 | 12790918 | <gh_stars>0
from datetime import datetime
from typing import Optional
from .scheduler import UTC_ZERO
from .virtualtimescheduler import VirtualTimeScheduler
class HistoricalScheduler(VirtualTimeScheduler):
"""Provides a virtual time scheduler that uses datetime for absolute time
and timedelta for relative time."""
def __init__(self, initial_clock: Optional[datetime] = None) -> None:
"""Creates a new historical scheduler with the specified initial clock
value.
Args:
initial_clock: Initial value for the clock.
"""
super().__init__(initial_clock or UTC_ZERO)
| 2.921875 | 3 |
HackerRank/FindAString/string.py | Naga-kalyan/competitive_programming | 8 | 12790919 | def count_substring(string, sub_string):
count=0
i=0
while(i!=len(string)-len(sub_string)+1):
z=string[i:].find(sub_string)
if(z!=-1):
count+=1
i=i+z+1
else:
break
return count
| 3.796875 | 4 |
Ejercicios/clases/__pycache__/ordenacion_topologica.py | albabernal03/ejercicios_de_ordenar | 0 | 12790920 | class Tareas:
def __init__(self, lista_tareas):
self.lista_tareas = lista_tareas
def bubbleSort(self):
for i in range(len(self.lista-1)):
for j in range(len(self.lista-1)):
if self.lista[j] > self.lista[j+1]:
self.lista[j], self.lista[j+1] = self.lista[j+1], self.lista[j]
lista= [8,13,5,6,13]
bubbleSort(lista)
print(lista)
| 4.03125 | 4 |
train.py | makisgrammenos/alzheimer-stage-classifier | 3 | 12790921 | import numpy as np
from tensorflow import keras
import matplotlib.pyplot as plt
import os
import cv2
import random
import sklearn.model_selection as model_selection
import datetime
from model import createModel
from contextlib import redirect_stdout
categories = ["NonDemented", "MildDemented", "ModerateDemented", "VeryMildDemented"]
SIZE = 120
def getData():
rawdata = []
data = []
dir = "./data/"
for category in categories:
path = os.path.join(dir, category)
class_num = categories.index(category)
for img in os.listdir(path):
try:
rawdata = cv2.imread(os.path.join(path, img), cv2.IMREAD_GRAYSCALE)
new_data = cv2.resize(rawdata, (SIZE, SIZE))
data.append([new_data, class_num])
except Exception as e:
pass
random.shuffle(data)
img_data = []
img_labels = []
for features, label in data:
img_data.append(features)
img_labels.append(label)
img_data = np.array(img_data).reshape(-1, SIZE, SIZE, 1)
img_data = img_data / 255.0
img_labels = np.array(img_labels)
return img_data, img_labels
data, labels = getData()
train_data, test_data, train_labels, test_labels = model_selection.train_test_split(data, labels, test_size=0.20)
train_data, val_data, train_labels, val_labels = model_selection.train_test_split(train_data, train_labels,test_size=0.10)
print(len(train_data), " ", len(train_labels), len(test_data), " ", len(test_labels))
model = createModel(train_data)
checkpoint = keras.callbacks.ModelCheckpoint(filepath='./model/model.h5', save_best_only=True, monitor='val_loss', mode='min')
opt = keras.optimizers.Adam(learning_rate=0.001)
model.compile(optimizer=opt, loss="sparse_categorical_crossentropy", metrics=["accuracy"], )
history = model.fit(train_data, train_labels, epochs=10, validation_data=(val_data, val_labels)
)
model.save('./model/model.h5')
test_loss, test_acc = model.evaluate(test_data, test_labels)
print("Model Accuracy: ", test_acc, "Model Loss: ", test_loss)
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
| 2.6875 | 3 |
heligym/__init__.py | ugurcanozalp/heli-gym | 8 | 12790922 | from gym.envs.registration import register
from heligym.envs import Heli, HeliHover, HeliForwardFlight
register(
id='Heli-v0',
entry_point='heligym.envs:Heli',
max_episode_steps = 5000,
reward_threshold = 0.95,
nondeterministic = False
)
register(
id='HeliHover-v0',
entry_point='heligym.envs:HeliHover',
max_episode_steps = 5000,
reward_threshold = 0.95,
nondeterministic = False
)
| 1.570313 | 2 |
main.py | Tenvence/digital-device-image-retrieval | 0 | 12790923 | <gh_stars>0
import csv
import os
import numpy as np
import torch
import torch.backends.cudnn
import torch.cuda.amp as amp
import torch.nn as nn
import torch.nn.functional as fun
import torchvision.transforms as transforms
from torch.optim import SGD
from torch.utils.data import DataLoader
from torchvision.datasets import ImageFolder
from tqdm import tqdm
from model.encoder import Encoder
from util.data_set import TestDataSet
from util.lr_scheduler import LinearCosineScheduler
from util.tools import *
data_set_path = '../../DataSet/digital-device-dataset'
train_path = os.path.join(data_set_path, 'train')
device = torch.device('cuda:0')
num_classes = 3097
embedding_features = 3097
aug_norm_mean = [123.68, 116.779, 103.939]
aug_norm_std = [58.393, 57.12, 57.375]
name = 'r50-mlp-3097-amp'
output_base_path = os.path.join('./saved-output', name)
if not os.path.exists(output_base_path):
os.mkdir(output_base_path)
model_name = os.path.join(output_base_path, 'model.pkl')
param_name = os.path.join(output_base_path, 'param.pth')
# gallery_embedding_block_name = os.path.join(output_base_path, 'gallery-embedding-block')
# query_embedding_block_name = os.path.join(output_base_path, 'query-embedding-block')
def get_test_model():
model = Encoder(feature_num=embedding_features)
model.load_state_dict(torch.load(param_name))
if torch.cuda.is_available():
model = nn.DataParallel(model).to(device=device)
model.eval()
return model
def encode(model, test_dataset):
data_loader = DataLoader(test_dataset, batch_size=256, shuffle=False, num_workers=32)
processor = tqdm(data_loader)
embedding_block = []
for img in processor:
with torch.no_grad():
embedded_feature = model(img)
embedding_block.append(fun.normalize(embedded_feature, p=2, dim=-1))
embedding_block = torch.cat(embedding_block, dim=0)
return embedding_block.float() # matmul 函数不支持fp16(HalfTensor),将其转换为fp32(FloatTensor)
def train():
model = Encoder(feature_num=embedding_features)
torch.save(model.state_dict(), model_name)
model = nn.DataParallel(model)
model.to(device=device).train()
train_transforms = transforms.Compose([
transforms.RandomResizedCrop(size=224, scale=(0.08, 1.0), ratio=(3. / 4., 4. / 3.)),
transforms.RandomHorizontalFlip(p=0.5),
transforms.ColorJitter(brightness=0.4, saturation=0.4, contrast=0.4),
transforms.Resize((224, 224)),
transforms.ToTensor(),
transforms.Normalize(mean=[m / 255. for m in aug_norm_mean], std=[s / 255. for s in aug_norm_std]),
])
cla_dataset = ImageFolder(train_path, transform=train_transforms)
data_loader = DataLoader(cla_dataset, batch_size=256, shuffle=True, num_workers=32)
epoch = 200
iter_per_epoch = len(data_loader)
warm_epoch = 2
optimizer = SGD(model.parameters(), lr=0.1, momentum=0.9, weight_decay=0.0001)
scheduler = LinearCosineScheduler(optimizer, warm_epoch * iter_per_epoch, epoch * iter_per_epoch, max_lr=0.1)
# arc_margin_product = ArcMarginProduct(in_features=embedding_feature, out_features=num_classes)
scaler = amp.GradScaler()
for epoch_idx in range(epoch):
loss_arr = []
processor = tqdm(data_loader)
for data, label in processor:
data = data.to(device=device)
label = label.to(device=device)
optimizer.zero_grad()
with amp.autocast():
cla_output = model(data)
# loss = arc_margin_product(cla_output, label)
loss = fun.cross_entropy(cla_output, label)
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
scheduler.step()
loss_arr.append(float(loss))
mean_loss = sum(loss_arr) / len(loss_arr)
processor.set_description(' Epoch=%d/%d; mLoss=%.4f; loss=%.4f' % (epoch_idx + 1, epoch, mean_loss, float(loss)))
torch.save(model.module.state_dict(), param_name)
def query(query_embedding_block, gallery_embedding_block, query_names_path, gallery_names_path):
with open(query_names_path, 'r') as f:
query_names = f.read().splitlines()
with open(gallery_names_path, 'r') as f:
gallery_names = f.read().splitlines()
cosine_distance = torch.matmul(query_embedding_block, gallery_embedding_block.t())
indices_top_10 = torch.topk(cosine_distance, k=10, dim=-1).indices.cpu().numpy()
query_res = {}
for idx, match_indices in enumerate(indices_top_10):
query_name = query_names[idx]
match_names = []
for match_idx in match_indices:
match_names.append(gallery_names[match_idx])
query_res[query_name] = match_names
query_res[query_name][0] = '{' + query_res[query_name][0]
query_res[query_name][-1] += '}'
with open(os.path.join(output_base_path, 'submission.csv'), 'w', encoding='utf-8') as f:
csv_writer = csv.writer(f)
for query_key in query_res.keys():
csv_writer.writerow([query_key] + query_res[query_key])
if __name__ == '__main__':
init()
train()
test_model = get_test_model()
test_transforms = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(mean=[m / 255. for m in aug_norm_mean], std=[s / 255. for s in aug_norm_std]),
])
query_dataset = TestDataSet(os.path.join(data_set_path, 'test', 'query'), './saved-output/query_images.txt', transforms=test_transforms)
gallery_dataset = TestDataSet(os.path.join(data_set_path, 'test', 'gallery'), './saved-output/gallery_images.txt', transforms=test_transforms)
query_embedding_block = encode(test_model, query_dataset)
gallery_embedding_block = encode(test_model, gallery_dataset)
query(query_embedding_block, gallery_embedding_block, './saved-output/query_images.txt', './saved-output/gallery_images.txt')
| 2.03125 | 2 |
spider/__init__.py | yifei8/spider_blogs | 2 | 12790924 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
Description []
Created by yifei on 2018/2/5.
"""
import control_center
if __name__ == "__main__":
root_url = "http://blog.csdn.net/hustqb/article/list"
spider = control_center.SpiderMain()
spider.start_crawling(root_url) | 1.6875 | 2 |
main.py | filipinascimento/bl-network-measurements | 0 | 12790925 | #!/usr/bin/env python
import sys
import os.path
from os.path import join as PJ
import re
import json
import numpy as np
from tqdm import tqdm
import igraph as ig
import jgf
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
def calcModularity(g):
if("Community" in g.vertex_attributes()):
Ci = reindexList(g.vs["Community"])
else:
return (None,None)
if("weight" in g.edge_attributes()):
return None, g.modularity(Ci, weights="weight");
else:
return None, g.modularity(Ci, weights=None);
def calcDegree(g):
results = np.array(g.degree(mode="ALL"))
return results, np.average(results)
def calcInDegree(g):
if(not g.is_directed()):
return (None,None)
results = np.array(g.indegree())
return results, np.average(results)
def calcOutDegree(g):
if(not g.is_directed()):
return (None,None)
results = np.array(g.outdegree())
return results, np.average(results)
def calcStrength(g):
if("weight" not in g.edge_attributes()):
return (None,None)
results = np.array(g.strength(mode="ALL", weights = "weight"))
return results, np.average(results)
def calcInStrength(g):
if("weight" not in g.edge_attributes() or not g.is_directed()):
return (None,None)
results = np.array(g.strength(mode="IN", weights = "weight"))
return results, np.average(results)
def calcOutStrength(g):
if("weight" not in g.edge_attributes() or not g.is_directed()):
return (None,None)
results = np.array(g.strength(mode="OUT", weights = "weight"))
return results, np.average(results)
def calcClusteringCoefficient(g):
# if("weight" in g.edge_attributes()):
results = g.transitivity_local_undirected(weights=None)
# else:
# results = g.transitivity_local_undirected(weights="weight")
return np.nan_to_num(results,0), np.nanmean(results)
def calcCoreness(g):
results = np.array(g.coreness(mode="ALL"))
return results, None
def calcMatchIndex(g):
degree = np.array(g.degree())
matchIndex = np.zeros(g.ecount())
for id,e in enumerate(g.es):
node1,node2 = e.tuple
viz1 = g.neighbors(node1)
viz2 = g.neighbors(node2)
sharedNei = set(viz1) & set(viz2)
if ((degree[node1]+degree[node2]) > 2):
matchIndex[id] = len(sharedNei)/float(degree[node1]+degree[node2]-2)
else:
matchIndex[id] = 0
meanMatchIndex = np.mean(matchIndex)
return None, meanMatchIndex
def calcBetweenessCentrality(g):
result = np.array(g.betweenness(directed=g.is_directed()))
return result,np.average(result)
def calcBetweenessCentralityWeighted(g):
if("weight" not in g.edge_attributes()):
return (None,None)
result = np.array(g.betweenness(weights="weight"))
return result,np.average(result)
def calcBetweennessCentralization(G):
vnum = G.vcount()
if vnum < 3:
return None,0
denom = (vnum-1)*(vnum-2)
temparr = [2*i/denom for i in G.betweenness()]
max_temparr = max(temparr)
return None,sum(max_temparr-i for i in temparr)/(vnum-1)
def calcRichClubCoefficient(g, highest=True, scores=None, indices_only=False):
Trc = richClubPercentage
degree = np.array(g.degree())
edges = np.array(g.get_edgelist())
sourceDegree,targetDegree = degree[edges[:,0]],degree[edges[:,1]]
dT = int(np.percentile(degree,Trc))
indNodes = np.nonzero(degree>=dT)[0]
indEdges = np.nonzero((sourceDegree>=dT)&(targetDegree>=dT))[0]
if (indNodes.size>1):
RC = 2.*indEdges.size/(indNodes.size*(indNodes.size-1))
else:
RC = 0
return None,RC
def calcDegreeAssortativity(g):
return None,g.assortativity_degree(directed=g.is_directed())
def calcDiameter(g):
if("weight" in g.edge_attributes()):
return None,g.diameter(directed=g.is_directed(),weights="weight")
else:
return None,g.diameter(directed=g.is_directed())
def reindexList(names,returnDict=False):
d = {ni: indi for indi, ni in enumerate(set(names))}
numbers = [d[ni] for ni in names]
if(returnDict):
return numbers,d
else:
return numbers
def getNeighborhoods(g,mode="ALL"):
if("weight" in g.edge_attributes()):
return [[(e.target,e["weight"]) if e.target!=i else (e.source,e["weight"]) for e in g.es[g.incident(i,mode=mode)]] for i in range(g.vcount())]
else:
return [[(e.target,1) if e.target!=i else (e.source,1) for e in g.es[g.incident(i,mode=mode)]] for i in range(g.vcount())]
def calcModuleDegreeZScore(g,mode="ALL"):
if("Community" in g.vertex_attributes()):
Ci = reindexList(g.vs["Community"])
else:
return (None,None)
neighs = getNeighborhoods(g,mode=mode)
cneighs = [[(Ci[vertexID],weigth) for vertexID,weigth in neigh] for neigh in neighs]
kappa = np.zeros(g.vcount())
kappaSi = [[] for _ in range(max(Ci)+1)]
for i in range(g.vcount()):
kappa[i] = np.sum([weight for community,weight in cneighs[i] if community==Ci[i]])
kappaSi[Ci[i]].append(kappa[i])
avgKappaSi = np.zeros(max(Ci)+1)
stdKappaSi = np.zeros(max(Ci)+1)
for ci in range(len(kappaSi)):
avgKappaSi[ci] = np.average(kappaSi[ci])
stdKappaSi[ci] = np.std(kappaSi[ci])
zmodule = np.zeros(g.vcount())
for i in range(g.vcount()):
ci = Ci[i]
if(stdKappaSi[ci]>0):
zmodule[i] = (kappa[i]-avgKappaSi[ci])/stdKappaSi[ci]
return zmodule,None
def calcParticipationCoeff(g,mode="ALL"):
if("Community" in g.vertex_attributes()):
Ci = reindexList(g.vs["Community"])
else:
return (None,None)
neighs = getNeighborhoods(g,mode=mode)
cneighs = [[(Ci[vertexID],weigth) for vertexID,weigth in neigh] for neigh in neighs]
if("weight" in g.edge_attributes()):
degrees = np.array(g.strength(mode=mode,weights="weight"))
else:
degrees = np.array(g.degree(mode=mode))
kappasi = np.zeros(g.vcount())
for i in range(g.vcount()):
nodeCommunities = set([community for community,weight in cneighs[i]])
communityDegrees = {community:0 for community in nodeCommunities}
for community,weight in cneighs[i]:
communityDegrees[community]+=weight
kappasi[i] = np.sum(np.power(list(communityDegrees.values()),2))
result = 1.0-kappasi/np.power(degrees,2.0)
result[degrees==0.0] = 0
return result,None
measurements = {
"Degree" : calcDegree,
"InDegree" : calcInDegree,
"OutDegree" : calcOutDegree,
"Strength" : calcStrength,
"InStrength" : calcInStrength,
"OutStrength" : calcOutStrength,
"ClusteringCoefficient" : calcClusteringCoefficient,
"Coreness" : calcCoreness,
"MatchIndex" : calcMatchIndex,
"BetweenessCentrality" : calcBetweenessCentrality,
"BetweenessCentralityWeighted" : calcBetweenessCentralityWeighted,
"BetweennessCentralization" : calcBetweennessCentralization,
"RichClubCoefficient" : calcRichClubCoefficient,
"DegreeAssortativity" : calcDegreeAssortativity,
"Diameter" : calcDiameter,
"ModuleDegreeZScore" : calcModuleDegreeZScore,
"ParticipationCoeff" : calcParticipationCoeff,
"Modularity" : calcModularity,
}
def isFloat(value):
if(value is None):
return False
try:
numericValue = float(value)
return np.isfinite(numericValue)
except ValueError:
return False
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (np.int_, np.intc, np.intp, np.int8,
np.int16, np.int32, np.int64, np.uint8,
np.uint16, np.uint32, np.uint64)):
ret = int(obj)
elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)):
ret = float(obj)
elif isinstance(obj, (np.ndarray,)):
ret = obj.tolist()
else:
ret = json.JSONEncoder.default(self, obj)
if isinstance(ret, (float)):
if math.isnan(ret):
ret = None
if isinstance(ret, (bytes, bytearray)):
ret = ret.decode("utf-8")
return ret
results = {"errors": [], "warnings": [], "brainlife": [], "datatype_tags": [], "tags": []}
def warning(msg):
global results
results['warnings'].append(msg)
#results['brainlife'].append({"type": "warning", "msg": msg})
print(msg)
def error(msg):
global results
results['errors'].append(msg)
#results['brainlife'].append({"type": "error", "msg": msg})
print(msg)
def exitApp():
global results
with open("product.json", "w") as fp:
json.dump(results, fp, cls=NumpyEncoder)
if len(results["errors"]) > 0:
sys.exit(1)
else:
sys.exit()
def exitAppWithError(msg):
global results
results['errors'].append(msg)
#results['brainlife'].append({"type": "error", "msg": msg})
print(msg)
exitApp()
configFilename = "config.json"
argCount = len(sys.argv)
if(argCount > 1):
configFilename = sys.argv[1]
outputDirectory = "output"
outputFile = PJ(outputDirectory,"network.json.gz")
if(not os.path.exists(outputDirectory)):
os.makedirs(outputDirectory)
with open(configFilename, "r") as fd:
config = json.load(fd)
# "transform":"absolute", //"absolute" or "signed"
# "retain-weights":false,
# "threshold": "none"
richClubPercentage = 90
if("richClubPercentage" in config):
richClubPercentage = config["richClubPercentage"];
networks = jgf.igraph.load(config["network"], compressed=True)
outputNetworks = []
for network in tqdm(networks):
weighted = "weight" in network.edge_attributes()
hasCommunities = "Community" in network.vertex_attributes()
for measurement,measurementFunction in measurements.items():
nodePropData,networkPropData = measurementFunction(network)
if(nodePropData is not None):
network.vs[measurement] = nodePropData
if(networkPropData is not None):
if(nodePropData is not None): #Average measurement
network["Avg. "+measurement] = networkPropData
else:
network[measurement] = networkPropData
outputNetworks.append(network)
jgf.igraph.save(outputNetworks, outputFile, compressed=True)
exitApp()
| 2.15625 | 2 |
discordBot/bot.py | chand1012/IoTGarageDoor | 0 | 12790926 | <filename>discordBot/bot.py
import discord
import requests
from bs4 import BeautifulSoup
from json_extract import json_extract
HOST = json_extract("host")
PORT = json_extract("port")
URL = "http://{}:{}".format(HOST, PORT)
token = json_extract("token")
me = json_extract("admin")
client = discord.Client()
@client.event
async def on_message(message):
if message.author == client.user:
return
if not me in str(message.author):
print(message.author)
return
if message.content.lower().startswith("!opengaragedoor") or message.content.lower().startswith("!closegaragedoor"):
req = requests.get(URL + "/garageDoor/toggleGarage")
if req.status_code==200:
msg = ""
if "open" in message.content.lower():
msg = "Garage Door opened successfully!"
else:
msg = "Garage Door closed successfully!"
await message.channel.send(content=msg)
return
else:
msg = "There was an error with your request: server returned code {}.".format(req.status_code)
await message.channel.send(content=msg)
return
if message.content.lower().startswith("!turnonlamp") or message.content.lower().startswith("!turnofflamp"):
req = requests.get(URL + "/chandlerLamp/lampSwitch")
if req.status_code==200:
msg = ""
if "on" in message.content.lower():
msg = "Lamp turned on successfully!"
else:
msg = "Lamp turned off successfully!"
await message.channel.send(content=msg)
return
else:
msg = "There was an error with your request: server returned code {}.".format(req.status_code)
await message.channel.send(content=msg)
return
if message.content.lower().startswith("!doathing"):
req = requests.get(URL + "/testDevice/doAThing")
if req.status_code==200:
await message.channel.send(content="Did nothing successfully!")
else:
await message.channel.send(content="Failed to do nothing! Error code {}.".format(req.status_code))
return
if message.content.lower().startswith("!getathing"):
req = requests.post(URL + "/testDevice/getAThing")
if req.status_code==200:
page = BeautifulSoup(req.content, "html.parser")
await message.channel.send(content=page.p.text)
else:
await message.channel.send(content="Failed to get nothing! Error code {}.".format(req.status_code))
if message.content.lower().startswith("!garagestatus"):
req = requests.post(URL + "/garageDoor/getGarage")
if req.status_code == 200:
page = BeautifulSoup(req.content, "html.parser")
msg = page.h1.text + "\n" + page.p.text
await message.channel.send(content=msg)
return
else:
msg = "There was an error with your request: server returned code {}.".format(req.status_code)
await message.channel.send(content=msg)
return
@client.event # the on_ready event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
client.run(token)
| 3.03125 | 3 |
pytglib/api/types/address.py | iTeam-co/pytglib | 6 | 12790927 | <reponame>iTeam-co/pytglib
from ..utils import Object
class Address(Object):
"""
Describes an address
Attributes:
ID (:obj:`str`): ``Address``
Args:
country_code (:obj:`str`):
A two-letter ISO 3166-1 alpha-2 country code
state (:obj:`str`):
State, if applicable
city (:obj:`str`):
City
street_line1 (:obj:`str`):
First line of the address
street_line2 (:obj:`str`):
Second line of the address
postal_code (:obj:`str`):
Address postal code
Returns:
Address
Raises:
:class:`telegram.Error`
"""
ID = "address"
def __init__(self, country_code, state, city, street_line1, street_line2, postal_code, **kwargs):
self.country_code = country_code # str
self.state = state # str
self.city = city # str
self.street_line1 = street_line1 # str
self.street_line2 = street_line2 # str
self.postal_code = postal_code # str
@staticmethod
def read(q: dict, *args) -> "Address":
country_code = q.get('country_code')
state = q.get('state')
city = q.get('city')
street_line1 = q.get('street_line1')
street_line2 = q.get('street_line2')
postal_code = q.get('postal_code')
return Address(country_code, state, city, street_line1, street_line2, postal_code)
| 2.75 | 3 |
world-codesprint-5/algorithms/strings/string-construction/solution.py | bitnot/hackerrank-solutions | 0 | 12790928 | #!/bin/python3
import sys
n = int(input().strip())
for a0 in range(n):
s = input().strip()
letters = set()
cost = 0
for ch in s:
if not ch in letters:
letters.add(ch)
cost += 1
print(cost) | 3.09375 | 3 |
src/010_fib/pyx_fib2.py | xupingmao/benchmark | 0 | 12790929 | <filename>src/010_fib/pyx_fib2.py
# -*- coding:utf-8 -*-
# @author xupingmao
# @since 2022/01/26 16:35:51
# @modified 2022/01/26 16:36:46
# @filename pyx_fib2.py
import pyximport
pyximport.install()
import fib2
fib2.run()
| 1.15625 | 1 |
lfsearch.py | isi-metaphor/mokujin | 0 | 12790930 | <gh_stars>0
#!/usr/bin/env python
# coding: utf-8
# Copyright (C) USC Information Sciences Institute
# Author: <NAME> <<EMAIL>>
# URL: <http://nlg.isi.edu/>
# For more information, see README.md
# For license information, see LICENSE
import sys
import logging
import argparse
import cPickle as pickle
from mokujin.index import SimpleObjectIndex
from createlfindex import sent_to_terms
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--index", default="lfindex", help="LF sentences index directory", type=str)
parser.add_argument("-o", "--output", default=None, type=str)
parser.add_argument("-q", "--query", default=None, type=str)
args = parser.parse_args()
i_dir = args.inputdir
o_file = open(args.output, "w") if args.output is not None else sys.stdout
query_term = args.query
logging.info("INPUT DIR: %r" % i_dir)
logging.info("OUT FILE: %r" % o_file)
logging.info("QUERY: %s" % query_term)
obj_to_terms = sent_to_terms
obj_to_str = pickle.dumps
str_to_obj = pickle.loads
index = SimpleObjectIndex(i_dir, obj_to_terms, obj_to_str, str_to_obj)
index.load_all()
results = index.find(query_terms=(query_term,))
o_file.write("FOUND (%d):\n" % len(results))
for sent in results:
o_file.write(sent.raw_text.encode("utf-8"))
o_file.write("\n") | 2.5 | 2 |
siga/prospeccao/views.py | JenniferAmaral/DjangoSpike | 0 | 12790931 | from django.shortcuts import render
from django.views import generic
from django.urls import reverse_lazy
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from .models import DesafioInovacao
from .models import InovacaoAberta
# Desafios de Inovação
class DesafioInovacao(generic.ListView):
model = DesafioInovacao
context_object_name = 'desafioInovacao_list'
template_name = 'prospeccao/desafioInovacao_list.html'
class DesafioInovacaoDetailView(generic.DetailView):
model = DesafioInovacao
class DesafioInovacaoCreate(CreateView):
model = DesafioInovacao
fields = '__all__'
success_url = reverse_lazy('desafioInovacao')
class DesafioInovacaoUpdate(UpdateView):
model = DesafioInovacao
fields = '__all__'
success_url = reverse_lazy('desafioInovacao')
class DesafioInovacaoDelete(DeleteView):
model = InovacaoAberta
success_url = reverse_lazy('desafioInovacao')
# Ação de Inovação Aberta
class InovacaoAberta(generic.ListView):
model = InovacaoAberta
context_object_name = 'inovacaoAberta_list'
template_name = 'prospeccao/inovacaoAberta_list.html'
class InovacaoAbertaDetailView(generic.DetailView):
model = InovacaoAberta
class InovacaoAbertaCreate(CreateView):
model = InovacaoAberta
fields = '__all__'
success_url = reverse_lazy('inovacaoAberta')
class InovacaoAbertaUpdate(UpdateView):
model = InovacaoAberta
fields = '__all__'
success_url = reverse_lazy('inovacaoAberta')
class InovacaoAbertaDelete(DeleteView):
model = InovacaoAberta
success_url = reverse_lazy('inovacaoAberta')
| 2.265625 | 2 |
Algorithms_medium/0750. Number Of Corner Rectangles.py | VinceW0/Leetcode_Python_solutions | 4 | 12790932 | """
0750. Number Of Corner Rectangles
Medium
Given a grid where each entry is only 0 or 1, find the number of corner rectangles.
A corner rectangle is 4 distinct 1s on the grid that form an axis-aligned rectangle. Note that only the corners need to have the value 1. Also, all four 1s used must be distinct.
Example 1:
Input: grid =
[[1, 0, 0, 1, 0],
[0, 0, 1, 0, 1],
[0, 0, 0, 1, 0],
[1, 0, 1, 0, 1]]
Output: 1
Explanation: There is only one corner rectangle, with corners grid[1][2], grid[1][4], grid[3][2], grid[3][4].
Example 2:
Input: grid =
[[1, 1, 1],
[1, 1, 1],
[1, 1, 1]]
Output: 9
Explanation: There are four 2x2 rectangles, four 2x3 and 3x2 rectangles, and one 3x3 rectangle.
Example 3:
Input: grid =
[[1, 1, 1, 1]]
Output: 0
Explanation: Rectangles must have four distinct corners.
Note:
The number of rows and columns of grid will each be in the range [1, 200].
Each grid[i][j] will be either 0 or 1.
The number of 1s in the grid will be at most 6000.
"""
class Solution:
def countCornerRectangles(self, grid: List[List[int]]) -> int:
if not grid or not grid[0]:
return 0
dp_set = []
res = 0
for y in range(len(grid)):
dp_set.append(set(idx for idx,val in enumerate(grid[y]) if val))
for prev in range(y):
matches = len(dp_set[y] & dp_set[prev])
if matches >= 2:
res += matches * (matches-1) // 2
return res
class Solution:
def countCornerRectangles(self, grid: List[List[int]]) -> int:
if not grid or not grid[0]:
return 0
h, w = len(grid), len(grid[0])
res = 0
for i in range(h -1):
for j in range(i+1, h):
count = 0
for c in range(w):
if grid[i][c] and grid[j][c]:
res += count
count += 1
return res
| 3.953125 | 4 |
tests/test_parse_timedelta.py | stanford-rc/globus-timer-cli | 0 | 12790933 | from datetime import timedelta
import pytest
from timer_cli.main import _parse_timedelta
timedelta_test_cases = [
("", timedelta(seconds=0)),
(" ", timedelta(seconds=0)),
("0", timedelta(seconds=0)),
("0s", timedelta(seconds=0)),
("0 s", timedelta(seconds=0)),
("10", timedelta(seconds=10)),
("100", timedelta(seconds=100)),
("5m 10s", timedelta(minutes=5, seconds=10)),
("5m 100s", timedelta(minutes=5, seconds=100)),
("2h 3m 4s", timedelta(hours=2, minutes=3, seconds=4)),
("2h3m4s", timedelta(hours=2, minutes=3, seconds=4)),
("2h 3m 4s", timedelta(hours=2, minutes=3, seconds=4)),
("10h", timedelta(hours=10)),
("1d 2h 3m 4s", timedelta(days=1, hours=2, minutes=3, seconds=4)),
("4w", timedelta(days=28)),
("4w 1d", timedelta(days=29)),
]
@pytest.mark.parametrize("s, d", timedelta_test_cases)
def test_parse_timedelta(s, d):
assert _parse_timedelta(s) == d
| 2.578125 | 3 |
exact.py | SajedeNick1999/Exact-and-Inexact-Solver-For-a-NP-hard-problem | 1 | 12790934 | from minizinc import Instance, Model, Solver
gecode = Solver.lookup("gecode")
max=0
trivial = Model()
FileName="small"
with open(FileName+".txt") as f:
file=f.readlines()
f.close()
minizinc=""
file = [x.strip() for x in file]
file = [x.split(" ") for x in file]
#file = [x.split("\t") for x in file]
print(file)
for x in file:
for y in x:
if int(y)>max:
max=int(y)
for y in range(0,max+1):
minizinc=minizinc+"var 0..1:x"+str(y)+";\n"
minizinc=minizinc+"\n"
minizinc=minizinc+"var int: a;\n\n"
minizinc=minizinc+"\n constraint x0=0;\n"
for x in file:
minizinc=minizinc+"constraint ("
for y in x:
minizinc=minizinc+"x"+y+"+"
minizinc=minizinc[:-1]
minizinc=minizinc+") mod 2=0 ;\n"
minizinc2="a = "
for i in range(1,max+1):
minizinc2=minizinc2+"x"+str(i)+"+"
minizinc2=minizinc2[:-1]
minizinc+="\n"+minizinc2+";\n\n"
minizinc+="\nconstraint a!=0 ;\n"
minizinc+="\nsolve minimize a;\n"
print(max)
print(minizinc)
sum=0;
trivial.add_string(minizinc)
instance = Instance(gecode, trivial)
# Find and print all intermediate solutions
result = instance.solve(intermediate_solutions=True)
f = open(FileName+"_solution.txt", "w")
for j in range(1,max+1):
#print("x"+str(j)+" = ")
print(result[len(result)-1, "x"+str(j)])
f.write("x"+str(j)+"=")
f.write(str(result[len(result)-1, "x"+str(j)] )+"\n")
sum+=result[len(result)-1, "x"+str(j)]
f.write("\nnumber = "+str(sum))
print(sum)
f.close()
| 3.328125 | 3 |
app/assess/routes.py | communitiesuk/funding-service-design-assessment | 0 | 12790935 | from app.assess.data import *
from app.config import APPLICATION_STORE_API_HOST_PUBLIC
from app.config import ASSESSMENT_HUB_ROUTE
from flask import abort
from flask import Blueprint
from flask import render_template
from flask import request
assess_bp = Blueprint(
"assess_bp",
__name__,
url_prefix=ASSESSMENT_HUB_ROUTE,
template_folder="templates",
)
@assess_bp.route("/", methods=["GET"])
def funds():
"""
Page showing available funds
from fund store
:return:
"""
funds = get_funds()
return render_template("funds.html", funds=funds)
@assess_bp.route("/landing/", methods=["GET"])
def landing():
"""
Landing page for assessors
Provides a summary of available applications
with a keyword searchable and filterable list
of applications and their statuses
"""
# Initialise empty search params
search_params = {
"id_contains": "",
"order_by": "",
"order_rev": "",
"status_only": "",
}
# Add request arg search params to dict
for key, value in request.args.items():
if key in search_params:
search_params.update({key: value})
applications = get_applications(params=search_params)
todo_summary = get_todo_summary()
return render_template(
"landing.html",
applications=applications,
search_params=search_params,
todo_summary=todo_summary,
applications_endpoint="".join(
[
APPLICATION_STORE_API_HOST_PUBLIC,
APPLICATION_SEARCH_ENDPOINT.replace("{params}", ""),
]
),
)
@assess_bp.route("/application/<application_id>", methods=["GET"])
def application(application_id):
"""
Application summary page
Shows information about the fund, application ID
and all the application questions and their assessment status
:param application_id:
:return:
"""
application = get_application_status(application_id=application_id)
if not application:
abort(404)
fund = get_fund(application.fund_id)
if not fund:
abort(404)
return render_template(
"application.html", application=application, fund=fund
)
"""
Legacy
The following routes serve information relating to
individual funds and fund rounds and are not shown in the assessor views
"""
@assess_bp.route("/<fund_id>/", methods=["GET"])
def fund(fund_id: str):
"""
Page showing available rounds for a given fund
from round store
:param fund_id:
:return:
"""
fund = get_fund(fund_id)
if not fund:
abort(404)
rounds = get_rounds(fund_id)
return render_template("fund.html", fund=fund, rounds=rounds)
@assess_bp.route("/<fund_id>/<round_id>/", methods=["GET"])
def fund_round(fund_id: str, round_id: str):
"""
Page showing available applications
from a given fund_id and round_id
from the application store
:param fund_id:
:param round_id:
:return:
"""
fund = get_fund(fund_id)
if not fund:
abort(404)
fund_round = get_round_with_applications(
fund_id=fund_id, round_id=round_id
)
if not fund_round:
abort(404)
return render_template("round.html", fund=fund, round=fund_round)
| 2.515625 | 3 |
AI/recommendation/data_update.py | osamhack2021/AI_APP_handylib_devlib | 1 | 12790936 | <reponame>osamhack2021/AI_APP_handylib_devlib<filename>AI/recommendation/data_update.py<gh_stars>1-10
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))))
from main.models import database
import pandas as pd
import numpy as np
def update():
'''
books = database.Book.objects()
isbn = []
for b in books:
isbn.append(b.isbn)
'''
file_path = '/var/www/python_flask/main/recommendation/' # 환경에 맞게 수정
books_file_name = 'API_test_books.csv' # 환경에 맞게 수정
df_books = pd.read_csv(file_path + books_file_name)
isbn = list(np.array(df_books['isbn']).tolist())
dummy_file_name = 'dummy_users.csv' # 환경에 맞게 수정
df_dummy_user = pd.read_csv(file_path + dummy_file_name)
users = database.User.objects()
name = []
email = []
password = []
user_id = []
like = []
rank = []
unit = []
for u in users:
t_list = []
for l in u.like:
t = l.rstrip('/')
try:
t = isbn.index(t)
except:
continue
t_list.append(t)
like.append(t_list)
name.append(u.name)
email.append(u.email)
password.append(<PASSWORD>)
user_id.append(u.user_id)
rank.append(u.rank)
unit.append(u.unit)
| 2.59375 | 3 |
quicktest/test.py | 0xf0f/quicktest | 0 | 12790937 | <reponame>0xf0f/quicktest<gh_stars>0
class Test:
def __init__(self):
self.name = '<NAME>'
self.method = lambda: None
| 1.609375 | 2 |
docs/examples/fig6p24.py | uluturki/Mathematics-of-Epidemics-on-Networks | 136 | 12790938 | import networkx as nx
import EoN
from collections import defaultdict
import matplotlib.pyplot as plt
import scipy
import random
colors = ['#5AB3E6','#FF2000','#009A80','#E69A00', '#CD9AB3', '#0073B3','#F0E442']
rho = 0.01
Nbig=500000
Nsmall = 5000
tau =0.4
gamma = 1.
def poisson():
return scipy.random.poisson(5)
def PsiPoisson(x):
return scipy.exp(-5*(1-x))
def DPsiPoisson(x):
return 5*scipy.exp(-5*(1-x))
bimodalPk = {8:0.5, 2:0.5}
def PsiBimodal(x):
return (x**8 +x**2)/2.
def DPsiBimodal(x):
return(8*x**7 + 2*x)/2.
def homogeneous():
return 5
def PsiHomogeneous(x):
return x**5
def DPsiHomogeneous(x):
return 5*x**4
PlPk = {}
exponent = 1.418184432
kave = 0
for k in range(1,81):
PlPk[k]=k**(-exponent)*scipy.exp(-k*1./40)
kave += k*PlPk[k]
normfact= sum(PlPk.values())
for k in PlPk:
PlPk[k] /= normfact
#def trunc_pow_law():
# r = random.random()
# for k in PlPk:
# r -= PlPk[k]
# if r<0:
# return k
def PsiPowLaw(x):
#print PlPk
rval = 0
for k in PlPk:
rval += PlPk[k]*x**k
return rval
def DPsiPowLaw(x):
rval = 0
for k in PlPk:
rval += k*PlPk[k]*x**(k-1)
return rval
def get_G(N, Pk):
while True:
ks = []
for ctr in range(N):
r = random.random()
for k in Pk:
if r<Pk[k]:
break
else:
r-= Pk[k]
ks.append(k)
if sum(ks)%2==0:
break
G = nx.configuration_model(ks)
return G
report_times = scipy.linspace(0,20,41)
def process_degree_distribution(Gbig, Gsmall, color, Psi, DPsi, symbol):
t, S, I, R = EoN.fast_SIR(Gsmall, tau, gamma, rho=rho)
plt.plot(t, I*1./Gsmall.order(), ':', color = color)
t, S, I, R = EoN.fast_SIR(Gbig, tau, gamma, rho=rho)
plt.plot(t, I*1./Gbig.order(), color = color)
N= Gbig.order()#N is arbitrary, but included because our implementation of EBCM assumes N is given.
t, S, I, R = EoN.EBCM(N, lambda x: (1-rho)*Psi(x), lambda x: (1-rho)*DPsi(x), tau, gamma, 1-rho)
I = EoN.subsample(report_times, t, I)
plt.plot(report_times, I/N, symbol, color = color, markeredgecolor='k')
#<NAME>
Gsmall = nx.fast_gnp_random_graph(Nsmall, 5./(Nsmall-1))
Gbig = nx.fast_gnp_random_graph(Nbig, 5./(Nbig-1))
process_degree_distribution(Gbig, Gsmall, colors[0], PsiPoisson, DPsiPoisson, '^')
#Bimodal
Gsmall = get_G(Nsmall, bimodalPk)
Gbig = get_G(Nbig, bimodalPk)
process_degree_distribution(Gbig, Gsmall, colors[1], PsiBimodal, DPsiBimodal, 'o')
#Homogeneous
Gsmall = get_G(Nsmall, {5:1.})
Gbig = get_G(Nbig, {5:1.})
process_degree_distribution(Gbig, Gsmall, colors[2], PsiHomogeneous, DPsiHomogeneous, 's')
#Powerlaw
Gsmall = get_G(Nsmall, PlPk)
Gbig = get_G(Nbig, PlPk)
process_degree_distribution(Gbig, Gsmall, colors[3], PsiPowLaw, DPsiPowLaw, 'd')
plt.axis(xmin=0, ymin=0, xmax = 20, ymax = 0.2)
plt.xlabel('$t$')
plt.ylabel('Proportion Infected')
plt.savefig('fig6p24.png') | 2.3125 | 2 |
calabro/widgets/__init__.py | CarlosGabaldon/calabro | 2 | 12790939 | <reponame>CarlosGabaldon/calabro
from widgets import * | 0.976563 | 1 |
modules/2.79/bpy/types/LimitRotationConstraint.py | cmbasnett/fake-bpy-module | 0 | 12790940 | <reponame>cmbasnett/fake-bpy-module
class LimitRotationConstraint:
max_x = None
max_y = None
max_z = None
min_x = None
min_y = None
min_z = None
use_limit_x = None
use_limit_y = None
use_limit_z = None
use_transform_limit = None
| 1.195313 | 1 |
laboratorio/migrations/0001_initial.py | alejandroquintero/proyecto_laboratorio | 0 | 12790941 | <reponame>alejandroquintero/proyecto_laboratorio
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-14 20:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='Perfil',
fields=[
('group_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='auth.Group')),
('descripcion', models.TextField(blank=True)),
],
options={
'ordering': ['name'],
'verbose_name_plural': 'Perfiles',
},
bases=('auth.group',),
),
migrations.CreateModel(
name='Usuario',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=50)),
('perfil', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='laboratorio.Perfil')),
],
),
]
| 1.59375 | 2 |
src/secml/ml/scalers/tests/c_scaler_testcases.py | zangobot/secml | 63 | 12790942 | from secml.testing import CUnitTest
from secml.array import CArray
from secml.ml.tests import CModuleTestCases
class CScalerTestCases(CModuleTestCases):
"""Unittests interface for Normalizers."""
def _compare_scalers(self, scaler, scaler_sklearn,
array, convert_to_dense=False):
"""Compare wrapped scikit-learn scaler to the unwrapped scaler.
Parameters
----------
array : CArray
scaler : A wrapped CScaler
scaler_sklearn
Scikit-learn normalizer.
convert_to_dense : bool, optional
If True the data used by the SkLearn scaler will be converted
to dense.
Returns
-------
scaler_sklearn
Trained Scikit-learn normalizer (from `sklearn.preprocessing`).
scaler : CScaler
Trained normalizer.
"""
self.logger.info("Original array is:\n{:}".format(array))
array_sk = array.get_data() if convert_to_dense is False \
else array.tondarray()
# Sklearn normalizer
scaler_sklearn.fit(array_sk, None)
transform_sklearn = CArray(scaler_sklearn.transform(array_sk))
# Our normalizer
scaler._fit(array)
transform = scaler.forward(array)
self.logger.info("sklearn result is:\n{:}".format(transform_sklearn))
self.logger.info("Our result is:\n{:}".format(transform))
self.assert_array_almost_equal(transform_sklearn, transform)
return scaler, scaler_sklearn
def _test_chain(self, x, class_type_list, kwargs_list, y=None):
"""Tests if preprocess chain and manual chaining yield same result."""
x_chain = super(CScalerTestCases, self)._test_chain(
x, class_type_list, kwargs_list, y)
self.assertEqual((self.array_dense.shape[0],
self.array_dense.shape[1] - 1), x_chain.shape)
return x_chain
def _test_chain_gradient(self, x, class_type_list, kwargs_list, y=None):
"""Tests if gradient preprocess chain and
gradient of manual chaining yield same result."""
grad_chain = super(CScalerTestCases, self)._test_chain_gradient(
x, class_type_list, kwargs_list, y)
self.assertEqual((self.array_dense.shape[1],), grad_chain.shape)
return grad_chain
if __name__ == '__main__':
CUnitTest.main()
| 2.515625 | 3 |
herramientas/zapador/tests/test_zapadorapp.py | ZR-TECDI/Framework_ZR | 4 | 12790943 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from zapador.zapadorapp import ZapadorApp
class TestZapadorApp(unittest.TestCase):
"""TestCase for ZapadorApp.
"""
def setUp(self):
self.app = ZapadorApp()
def test_name(self):
self.assertEqual(self.app.name, 'zapador')
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| 2.578125 | 3 |
tonclient/boc.py | move-ton/ton-client-py | 28 | 12790944 | <gh_stars>10-100
from tonclient.decorators import result_as
from tonclient.module import TonModule
from tonclient.types import ParamsOfParse, ResultOfParse, \
ParamsOfParseShardstate, ParamsOfGetBocHash, ResultOfGetBocHash, \
ParamsOfGetBlockchainConfig, ResultOfGetBlockchainConfig, \
ParamsOfGetCodeFromTvc, ResultOfGetCodeFromTvc, ParamsOfBocCacheGet, \
ResultOfBocCacheGet, ParamsOfBocCacheSet, ResultOfBocCacheSet, \
ParamsOfBocCacheUnpin, ParamsOfEncodeBoc, ResultOfEncodeBoc, \
ParamsOfGetCodeSalt, ResultOfGetCodeSalt, ParamsOfSetCodeSalt, \
ResultOfSetCodeSalt, ParamsOfDecodeTvc, ResultOfDecodeTvc, \
ParamsOfEncodeTvc, ResultOfEncodeTvc, ParamsOfGetCompilerVersion, \
ResultOfGetCompilerVersion
class TonBoc(TonModule):
""" Free TON boc SDK API implementation """
@result_as(classname=ResultOfParse)
def parse_message(self, params: ParamsOfParse) -> ResultOfParse:
"""
Parses message boc into a JSON.
JSON structure is compatible with GraphQL API message object
:param params: See `types.ParamsOfParse`
:return: See `types.ResultOfParse`
"""
return self.request(method='boc.parse_message', **params.dict)
@result_as(classname=ResultOfParse)
def parse_transaction(self, params: ParamsOfParse) -> ResultOfParse:
"""
Parses transaction boc into a JSON.
JSON structure is compatible with GraphQL API transaction object
:param params: See `types.ParamsOfParse`
:return: See `types.ResultOfParse`
"""
return self.request(method='boc.parse_transaction', **params.dict)
@result_as(classname=ResultOfParse)
def parse_account(self, params: ParamsOfParse) -> ResultOfParse:
"""
Parses account boc into a JSON.
JSON structure is compatible with GraphQL API account object
:param params: See `types.ParamsOfParse`
:return: See `types.ResultOfParse`
"""
return self.request(method='boc.parse_account', **params.dict)
@result_as(classname=ResultOfParse)
def parse_block(self, params: ParamsOfParse) -> ResultOfParse:
"""
Parses block boc into a JSON.
JSON structure is compatible with GraphQL API block object
:param params: See `types.ParamsOfParse`
:return: See `types.ResultOfParse`
"""
return self.request(method='boc.parse_block', **params.dict)
@result_as(classname=ResultOfParse)
def parse_shardstate(
self, params: ParamsOfParseShardstate) -> ResultOfParse:
"""
Parses shardstate boc into a JSON.
JSON structure is compatible with GraphQL API shardstate object
:param params: See `ParamsOfParseShardstate`
:return: See `ResultOfParse`
"""
return self.request(method='boc.parse_shardstate', **params.dict)
@result_as(classname=ResultOfGetBocHash)
def get_boc_hash(self, params: ParamsOfGetBocHash) -> ResultOfGetBocHash:
"""
Calculates BOC root hash
:param params: See `ParamsOfGetBocHash`
:return: See `ResultOfGetBocHash`
"""
return self.request(method='boc.get_boc_hash', **params.dict)
@result_as(classname=ResultOfGetBlockchainConfig)
def get_blockchain_config(
self, params: ParamsOfGetBlockchainConfig
) -> ResultOfGetBlockchainConfig:
"""
Extract blockchain configuration from key block and also from
zero state
:param params: See `ParamsOfGetBlockchainConfig`
:return: See `ResultOfGetBlockchainConfig`
"""
return self.request(
method='boc.get_blockchain_config', **params.dict)
@result_as(classname=ResultOfGetCodeFromTvc)
def get_code_from_tvc(
self, params: ParamsOfGetCodeFromTvc) -> ResultOfGetCodeFromTvc:
"""
Extracts code from TVC contract image
:param params: See `types.ParamsOfGetCodeFromTvc`
:return: See `types.ResultOfGetCodeFromTvc`
"""
return self.request(method='boc.get_code_from_tvc', **params.dict)
@result_as(classname=ResultOfBocCacheGet)
def cache_get(self, params: ParamsOfBocCacheGet) -> ResultOfBocCacheGet:
"""
Get BOC from cache
:param params: See `types.ParamsOfBocCacheGet`
:return: See `types.ResultOfBocCacheGet`
"""
return self.request(method='boc.cache_get', **params.dict)
@result_as(classname=ResultOfBocCacheSet)
def cache_set(self, params: ParamsOfBocCacheSet) -> ResultOfBocCacheSet:
"""
Save BOC into cache
:param params: See `types.ParamsOfBocCacheSet`
:return: See `types.ResultOfBocCacheSet`
"""
return self.request(method='boc.cache_set', **params.dict)
def cache_unpin(self, params: ParamsOfBocCacheUnpin):
"""
Unpin BOCs with specified pin.
BOCs which don't have another pins will be removed from cache
:param params: See `types.ParamsOfBocCacheUnpin`
:return:
"""
return self.request(method='boc.cache_unpin', **params.dict)
@result_as(classname=ResultOfEncodeBoc)
def encode_boc(self, params: ParamsOfEncodeBoc) -> ResultOfEncodeBoc:
"""
Encodes bag of cells (BOC) with builder operations.
This method provides the same functionality as Solidity TvmBuilder.
Resulting BOC of this method can be passed into Solidity and C++
contracts as TvmCell type
:param params: See `types.ParamsOfEncodeBoc`
:return: See `types.ResultOfEncodeBoc`
"""
return self.request(method='boc.encode_boc', **params.dict)
@result_as(classname=ResultOfGetCodeSalt)
def get_code_salt(
self, params: ParamsOfGetCodeSalt) -> ResultOfGetCodeSalt:
"""
Returns the contract code's salt if it is present
:param params: See `types.ParamsOfGetCodeSalt`
:return: See `types.ResultOfGetCodeSalt`
"""
return self.request(method='boc.get_code_salt', **params.dict)
@result_as(classname=ResultOfSetCodeSalt)
def set_code_salt(
self, params: ParamsOfSetCodeSalt) -> ResultOfSetCodeSalt:
"""
Sets new salt to contract code.
Returns the new contract code with salt
:param params: See `types.ParamsOfSetCodeSalt`
:return: See `types.ResultOfSetCodeSalt`
"""
return self.request(method='boc.set_code_salt', **params.dict)
@result_as(classname=ResultOfDecodeTvc)
def decode_tvc(self, params: ParamsOfDecodeTvc) -> ResultOfDecodeTvc:
"""
Decodes tvc into code, data, libraries and special options
:param params: See `types.ParamsOfDecodeTvc`
:return: See `types.ResultOfDecodeTvc`
"""
return self.request(method='boc.decode_tvc', **params.dict)
@result_as(classname=ResultOfEncodeTvc)
def encode_tvc(self, params: ParamsOfEncodeTvc) -> ResultOfEncodeTvc:
"""
Encodes tvc from code, data, libraries ans special options
(see input params)
:param params: See `types.ParamsOfEncodeTvc`
:return: See `types.ResultOfEncodeTvc`
"""
return self.request(method='boc.encode_tvc', **params.dict)
@result_as(classname=ResultOfGetCompilerVersion)
def get_compiler_version(
self, params: ParamsOfGetCompilerVersion
) -> ResultOfGetCompilerVersion:
"""
Returns the compiler version used to compile the code
:param params: See `types.ParamsOfGetCompilerVersion`
:return: See `types.ResultOfGetCompilerVersion`
"""
return self.request(method='boc.get_compiler_version', **params.dict)
| 2.21875 | 2 |
scripts/pygrill/board/ssrc_client.py | kins-dev/igrill | 5 | 12790945 | <gh_stars>1-10
#!/usr/bin/env python3
"""
Copyright (c) 2019: <NAME> <<EMAIL>>
(https://git.kins.dev/igrill-smoker)
License: MIT License
See the LICENSE file
"""
__author__ = "<NAME>"
__version__ = "1.4.0"
__license__ = "MIT"
import pigpio
import logging
import argparse
import configparser
import os
import time
import sys
from Pyro5.api import Proxy
from ..common.local_logging import SetupLog
from . import ssrc_daemon
from ..common.constant import SSRC, CONFIG
config = configparser.ConfigParser()
# does not throw an error, just returns the empty set if the file doesn't exist
config.read(CONFIG.BASEPATH+'/config/iGrill_config.ini')
loglevel = config.get("Logging", "LogLevel", fallback="Error")
logfile = config.get("Logging", "LogFile", fallback="")
parser = argparse.ArgumentParser(
description='Connects to TP-Link Kasa daemon for power control')
parser.add_argument(
'--cold',
dest='cold',
help='The current temp is colder than it should be',
action='store_true')
parser.add_argument(
'--hot',
dest='hot',
help='The current temp is hotter than it should be',
action='store_true')
parser.add_argument(
'--in_band',
dest='in_band',
help='The current temp is close to what it should be',
action='store_true')
# need target temp, current temp and last temp
# in band:
# if need to get warmer and getting warmer - do nothing
# if need to get colder and getting colder - do nothing
# if need to stay the same and staying the same - do nothing
# if need to get warmer and staying the same - up small amount(1%)
# if need to get warmer and getting colder - up large amount (10%)
# if need to get colder and getting warmer - down large amount (10%)
# if need to get colder and staying the same - down small amount (1%)
# out of band:
# if need to get warmer - PWM to 100%
# if need to get colder - PWM to 0%
# out of band to in band
# use previous value or pwm is set to 50%
# need to save and restore PWM value for target temp
parser.add_argument(
'--exit',
dest='shutdown',
help='Tells the daemon to shutdown',
action='store_true')
parser.add_argument(
'-l',
'--log-level',
action='store',
dest='log_level',
default=loglevel,
help='Set log level, default: \'' + loglevel + '\'')
parser.add_argument(
'-d',
'--log-destination',
action='store',
dest='log_destination',
default=logfile,
help='Set log destination (file), default: \'' + logfile + '\'')
parser.add_argument(
'--status',
dest='status',
help='Gets the SSRC status',
action='store_true')
options = parser.parse_args()
SetupLog(options.log_level, options.log_destination)
if(0 < len(vars(options))):
if(options.hot and options.cold):
print("Cannot be too hot and too cold at the same time")
sys.exit(1)
ssrcObj = Proxy(("PYRO:{}@{}:{}").format(
SSRC.DAEMON.PYRO_OBJECT_ID,
SSRC.DAEMON.PYRO_HOST,
SSRC.DAEMON.PYRO_PORT))
try:
if(options.shutdown):
ssrcObj.Exit()
elif(options.status):
print("{:.2f}".format(ssrcObj.Status()))
else:
if(options.in_band):
if(options.hot):
ssrcObj.Adjust(SSRC.TemperatureState.WARM)
logging.debug(
"Adjust down 1% to {:.2f}%".format(ssrcObj.Status()))
elif(options.cold):
ssrcObj.Adjust(SSRC.TemperatureState.COOL)
logging.debug(
"Adjust up 0.25% to {:.2f}%".format(ssrcObj.Status()))
else:
ssrcObj.Adjust(SSRC.TemperatureState.PERFECT)
logging.debug(
"Stay at {:.2f}%".format(ssrcObj.Status()))
else:
if(options.hot):
ssrcObj.Adjust(SSRC.TemperatureState.HOT)
logging.debug(
"Adjust down 20% to {:.2f}%".format(ssrcObj.Status()))
elif(options.cold):
ssrcObj.Adjust(SSRC.TemperatureState.COLD)
logging.debug(
"Adjust up 5% to {:.2f}%".format(ssrcObj.Status()))
else:
logging.info(
"Odd, ssr_client called but not in band, hot or cold, ignoring")
# something else like logging
pass
except:
logging.error(
"Exception while attempting to adjust SSRC - may be a temporary issue")
finally:
# Might get an exception from a communication error (new IP)
sys.exit(0)
| 1.96875 | 2 |
beginner_contest/123/C.py | FGtatsuro/myatcoder | 0 | 12790946 | <reponame>FGtatsuro/myatcoder
import sys
input = sys.stdin.readline
sys.setrecursionlimit(10 ** 7)
n = int(input())
a = int(input())
b = int(input())
c = int(input())
d = int(input())
e = int(input())
print(((n + min(a,b,c,d,e) - 1) // min(a,b,c,d,e)) + 4)
| 2.1875 | 2 |
app/appmodule/remote.py | linjinux/foxJobGui | 0 | 12790947 | <filename>app/appmodule/remote.py
#!/usr/bin/env python
import tkinter as tk
import os
class RemoteWindow_W(tk.Tk):
"""remote module function"""
def __init__(self):
super(RemoteWindow_W, self).__init__() # 继承类
width = 500 # 登录界面宽度
height = 300 # 登录界面高度
sw = (self.winfo_screenwidth() - width) //2 # 获取x轴坐标
sh = (self.winfo_screenheight() - height) // 2 # 获取y轴坐标
self.title("链接信息")
self.geometry("{}x{}+{}+{}".format(width, height, sw, sh))
self.resizable(0, 0) # 固定窗口不能拉伸
self.setup_Ui() # 加载窗体
def setup_Ui(self):
self.connect_name=tk.StringVar()
self.Label_connect_name = tk.Label(self,text='名称',width=6).place(x=10, y=10)
self.Entry_connect_name = tk.Entry(self,textvariable=self.connect_name, width=15).place(x=60, y=10)
self.connect_addr=tk.StringVar()
self.connect_port=tk.StringVar()
self.Label_connect_addr = tk.Label(self, text='地址',width=6).place(x=10, y=60)
self.Entry_connect_addr= tk.Entry(self, textvariable=self.connect_addr, width=15).place(x=60, y=60)
self.Entry_connect_port = tk.Entry(self, textvariable=self.connect_port, width=6).place(x=190, y=60)
self.user_name=tk.StringVar()
self.Label_user_name = tk.Label(self, text='用户名',width=6).place(x=10, y=110)
self.Entry_user_name = tk.Entry(self, textvariable=self.user_name, width=15).place(x=60, y=110)
self.user_passwd=tk.StringVar()
self.Label_user_passwd = tk.Label(self, text='密码',width=6).place(x=10, y=160)
self.Entry_user_passwd = tk.Entry(self, textvariable=self.user_passwd, show="*",width=15).place(x=60, y=160)
self.connect_protocol=tk.StringVar()
self.Label_connect_protocol = tk.Label(self, text='协议', width=6).place(x=10, y=210)
self.Entry_connect_protocol = tk.Entry(self, textvariable=self.connect_protocol, width=15).place(x=60, y=210)
self.Button_save = tk.Button(self, text='保存', width=10).place(x=10, y=260)
self.Button_connect = tk.Button(self, text='启动链接', width=10,command=self.connect_Remote_Var).place(x=160, y=260)
def connect_Remote_Var(self):
passwd=self.user_passwd.get()
name=self.user_name.get()
addr=self.connect_addr.get()
port=self.connect_port.get()
connect_name=self.connect_name.get()
protocol=self.connect_protocol.get()
self.start_Connect(protocol,name,addr,port,passwd,connect_name)
def start_Connect(self,protocol,name,addr,port,passwd,connect_name):
os_path=os.path.abspath('.')
os.system("start {}\\app\\share\\sysfile\\putty.exe -{} {}@{} -pw {} -P {}".format(os_path,protocol,name,addr,passwd,port))
class RemoteWindow_L(RemoteWindow_W):
def start_Connect(self,passwd,name,addr,port,protocol,connect_name):
os.system("{} {}@{} -p {}".format(protocol,name,addr,port))
| 3.3125 | 3 |
pyeccodes/defs/grib1/mars_labeling_def.py | ecmwf/pyeccodes | 7 | 12790948 | <reponame>ecmwf/pyeccodes
import pyeccodes.accessors as _
def load(h):
h.add(_.StringCodetable('marsClass', 1, "mars/class.table"))
h.add(_.StringCodetable('marsType', 1, "mars/type.table"))
h.add(_.StringCodetable('marsStream', 2, "mars/stream.table"))
h.add(_.Ksec1expver('experimentVersionNumber', 4))
h.alias('ls.dataType', 'marsType')
h.alias('mars.class', 'marsClass')
h.alias('mars.type', 'marsType')
h.alias('mars.stream', 'marsStream')
h.alias('mars.expver', 'experimentVersionNumber')
h.alias('mars.domain', 'globalDomain')
| 1.867188 | 2 |
doc/code/basic_note_calculation.py | 6r1d/midi-message-parser | 0 | 12790949 | #!/usr/bin/env python3
"""
A program to calculate note frequencies for a number of MIDI notes.
"""
from numpy import power, divide
def midi_note_to_freq_basic(keynum):
return 440.0 * pow(2.0, (keynum - 69.0) / 12.0)
f_basic = [midi_note_to_freq_basic(key) for key in range(128)]
print(f_basic)
| 3.640625 | 4 |
highlanderclient/tests/unit/v1/test_cli_actions.py | StephenTao/python-stephenclient | 0 | 12790950 | <filename>highlanderclient/tests/unit/v1/test_cli_actions.py<gh_stars>0
# Copyright 2014 Mirantis, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from highlanderclient.api.v2 import actions
from highlanderclient.commands.v2 import actions as action_cmd
from highlanderclient.tests.unit import base
ACTION_DICT = {
'name': 'a',
'is_system': True,
'input': "param1",
'description': 'My cool action',
'tags': ['test'],
'created_at': '1',
'updated_at': '1'
}
ACTION_DEF = """
---
version: '2.0'
base: std.echo
base-parameters:
output: "<% $.str1 %><% $.str2 %>"
output: "<% $ %><% $ %>"
"""
ACTION_WITH_DEF_DICT = ACTION_DICT.copy()
ACTION_WITH_DEF_DICT.update({'definition': ACTION_DEF})
ACTION = actions.Action(mock, ACTION_DICT)
ACTION_WITH_DEF = actions.Action(mock, ACTION_WITH_DEF_DICT)
class TestCLIActionsV2(base.BaseCommandTest):
@mock.patch('argparse.open', create=True)
@mock.patch('highlanderclient.api.v2.actions.ActionManager.create')
def test_create(self, mock, mock_open):
mock.return_value = (ACTION,)
result = self.call(action_cmd.Create, app_args=['1.txt'])
self.assertEqual(
[('a', True, "param1", 'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
@mock.patch('highlanderclient.api.v2.actions.ActionManager.update')
def test_update(self, mock, mock_open):
mock.return_value = (ACTION,)
result = self.call(action_cmd.Update, app_args=['my_action.yaml'])
self.assertEqual(
[('a', True, "param1", 'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('highlanderclient.api.v2.actions.ActionManager.list')
def test_list(self, mock):
mock.return_value = (ACTION,)
result = self.call(action_cmd.List)
self.assertEqual(
[('a', True, "param1", 'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('highlanderclient.api.v2.actions.ActionManager.get')
def test_get(self, mock):
mock.return_value = ACTION
result = self.call(action_cmd.Get, app_args=['name'])
self.assertEqual(
('a', True, "param1", 'My cool action', 'test', '1', '1'),
result[1]
)
@mock.patch('highlanderclient.api.v2.actions.ActionManager.delete')
def test_delete(self, del_mock):
self.call(action_cmd.Delete, app_args=['name'])
del_mock.assert_called_once_with('name')
@mock.patch('highlanderclient.api.v2.actions.ActionManager.delete')
def test_delete_with_multi_names(self, del_mock):
self.call(action_cmd.Delete, app_args=['name1', 'name2'])
self.assertEqual(2, del_mock.call_count)
self.assertEqual(
[mock.call('name1'), mock.call('name2')],
del_mock.call_args_list
)
@mock.patch('highlanderclient.api.v2.actions.'
'ActionManager.get')
def test_get_definition(self, mock):
mock.return_value = ACTION_WITH_DEF
self.call(action_cmd.GetDefinition, app_args=['name'])
self.app.stdout.write.assert_called_with(ACTION_DEF)
| 2.25 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.