hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
31003c512b10b0b0ca56e1c7ca80af3e0e177a3f | 2,662 | py | Python | simple_ddl_parser/tokens.py | burakuyar/simple-ddl-parser | f5714e0947a7cb48c202a91969b6e6d0d9a64d24 | [
"MIT"
]
| 46 | 2021-03-07T02:21:29.000Z | 2022-03-24T12:45:03.000Z | simple_ddl_parser/tokens.py | burakuyar/simple-ddl-parser | f5714e0947a7cb48c202a91969b6e6d0d9a64d24 | [
"MIT"
]
| 53 | 2021-03-08T08:50:00.000Z | 2022-03-29T06:11:13.000Z | simple_ddl_parser/tokens.py | burakuyar/simple-ddl-parser | f5714e0947a7cb48c202a91969b6e6d0d9a64d24 | [
"MIT"
]
| 12 | 2021-06-21T16:14:17.000Z | 2022-03-25T13:52:35.000Z | # statements that used at the start of defenition or in statements without columns
defenition_statements = {
"DROP": "DROP",
"CREATE": "CREATE",
"TABLE": "TABLE",
"DATABASE": "DATABASE",
"SCHEMA": "SCHEMA",
"ALTER": "ALTER",
"TYPE": "TYPE",
"DOMAIN": "DOMAIN",
"REPLACE": "REPLACE",
"OR": "OR",
"CLUSTERED": "CLUSTERED",
"SEQUENCE": "SEQUENCE",
"TABLESPACE": "TABLESPACE",
}
common_statements = {
"INDEX": "INDEX",
"REFERENCES": "REFERENCES",
"KEY": "KEY",
"ADD": "ADD",
"AS": "AS",
"CLONE": "CLONE",
"DEFERRABLE": "DEFERRABLE",
"INITIALLY": "INITIALLY",
"IF": "IF",
"NOT": "NOT",
"EXISTS": "EXISTS",
"ON": "ON",
"FOR": "FOR",
"ENCRYPT": "ENCRYPT",
"SALT": "SALT",
"NO": "NO",
"USING": "USING",
# bigquery
"OPTIONS": "OPTIONS",
}
columns_defenition = {
"DELETE": "DELETE",
"UPDATE": "UPDATE",
"NULL": "NULL",
"ARRAY": "ARRAY",
",": "COMMA",
"DEFAULT": "DEFAULT",
"COLLATE": "COLLATE",
"ENFORCED": "ENFORCED",
"ENCODE": "ENCODE",
"GENERATED": "GENERATED",
"COMMENT": "COMMENT",
}
first_liners = {
"LIKE": "LIKE",
"CONSTRAINT": "CONSTRAINT",
"FOREIGN": "FOREIGN",
"PRIMARY": "PRIMARY",
"UNIQUE": "UNIQUE",
"CHECK": "CHECK",
"WITH": "WITH",
}
common_statements.update(first_liners)
defenition_statements.update(common_statements)
after_columns_tokens = {
"PARTITIONED": "PARTITIONED",
"PARTITION": "PARTITION",
"BY": "BY",
# hql
"INTO": "INTO",
"STORED": "STORED",
"LOCATION": "LOCATION",
"ROW": "ROW",
"FORMAT": "FORMAT",
"TERMINATED": "TERMINATED",
"COLLECTION": "COLLECTION",
"ITEMS": "ITEMS",
"MAP": "MAP",
"KEYS": "KEYS",
"SERDE": "SERDE",
"CLUSTER": "CLUSTER",
"SERDEPROPERTIES": "SERDEPROPERTIES",
"TBLPROPERTIES": "TBLPROPERTIES",
"SKEWED": "SKEWED",
# oracle
"STORAGE": "STORAGE",
"TABLESPACE": "TABLESPACE",
# mssql
"TEXTIMAGE_ON": "TEXTIMAGE_ON",
}
sequence_reserved = {
"INCREMENT": "INCREMENT",
"START": "START",
"MINVALUE": "MINVALUE",
"MAXVALUE": "MAXVALUE",
"CACHE": "CACHE",
"NO": "NO",
}
tokens = tuple(
set(
["ID", "DOT", "STRING", "DQ_STRING", "LP", "RP", "LT", "RT", "COMMAT"]
+ list(defenition_statements.values())
+ list(common_statements.values())
+ list(columns_defenition.values())
+ list(sequence_reserved.values())
+ list(after_columns_tokens.values())
)
)
symbol_tokens = {
")": "RP",
"(": "LP",
}
symbol_tokens_no_check = {"<": "LT", ">": "RT"}
| 22.752137 | 82 | 0.545079 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,440 | 0.540947 |
3100684d54c98a60b6eb64626957c2c118c7564c | 7,715 | py | Python | rmexp/worker.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
]
| 1 | 2021-05-12T12:49:15.000Z | 2021-05-12T12:49:15.000Z | rmexp/worker.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
]
| null | null | null | rmexp/worker.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
]
| 1 | 2021-11-21T08:12:19.000Z | 2021-11-21T08:12:19.000Z | from __future__ import absolute_import, division, print_function
import json
import logging
import os
import time
import importlib
import multiprocessing
import cv2
import fire
import logzero
from logzero import logger
import numpy as np
from rmexp import config, cvutils, dbutils, gabriel_pb2, client
from rmexp.schema import models
logzero.formatter(logging.Formatter(
fmt='%(asctime)s.%(msecs)03d - %(levelname)s: %(message)s', datefmt='%H:%M:%S'))
logzero.loglevel(logging.DEBUG)
def work_loop(job_queue, app, busy_wait=None):
"""[summary]
Arguments:
job_queue {[type]} -- [description]
app {[type]} -- [description]
Keyword Arguments:
busy_wait {float} -- if not None, busy spin seconds instead of running actual app (default: {None})
"""
handler = importlib.import_module(app).Handler()
while True:
get_ts = time.time()
msg = job_queue.get()[0]
get_wait = time.time() - get_ts
if get_wait > 2e-3:
logger.warn("[pid {}] took {} ms to get a new request. Maybe waiting".format(
os.getpid(), int(1000 * get_wait)))
arrival_ts = time.time()
gabriel_msg = gabriel_pb2.Message()
gabriel_msg.ParseFromString(msg)
encoded_im, ts = gabriel_msg.data, gabriel_msg.timestamp
logger.debug("[pid {}] about to process frame {}".format(
os.getpid(), gabriel_msg.index))
cts = time.clock()
if not busy_wait:
# do real work
encoded_im_np = np.frombuffer(encoded_im, dtype=np.uint8)
img = cv2.imdecode(encoded_im_np, cv2.CV_LOAD_IMAGE_UNCHANGED)
result = handler.process(img)
else:
# busy wait fixed time
tic = time.time()
while True:
if time.time() - tic > busy_wait:
break
result = 'busy wait {}'.format(busy_wait)
finished_ts = time.time()
time_lapse = (finished_ts - ts) * 1000
cpu_proc_ms = round((time.clock() - cts) * 1000)
if gabriel_msg.reply:
reply = gabriel_pb2.Message()
reply.data = str(result)
reply.timestamp = gabriel_msg.timestamp
reply.index = gabriel_msg.index
reply.finished_ts = finished_ts
reply.arrival_ts = arrival_ts
reply.cpu_proc_ms = cpu_proc_ms
job_queue.put([reply.SerializeToString(), ])
logger.debug('[pid {}] takes {} ms (cpu: {} ms) for frame {}: {}.'.format(
os.getpid(), (time.time() - ts) * 1000, cpu_proc_ms, gabriel_msg.index, result))
class Sampler(object):
"""A Class to sample video stream. Designed to work with cam.read().
Sample once every sample_period calls
"""
def __init__(self, sample_period, sample_func=None):
super(Sampler, self).__init__()
self._sp = sample_period
assert(type(sample_period) is int and sample_period > 0)
self._sf = sample_func
self._cnt = 0
def sample(self):
while True:
self._cnt = (self._cnt + 1) % self._sp
if self._cnt == 0:
return self._sf()
self._sf()
def process_and_time(img, app_handler):
ts = time.time()
result = app_handler.process(img)
time_lapse = int(round((time.time() - ts) * 1000))
return result, time_lapse
def store(
data,
session,
store_result,
store_latency,
store_profile,
**kwargs):
name, trace, idx, result, time_lapse = data
if store_result:
rec, _ = dbutils.get_or_create(
session,
models.SS,
name=name,
index=idx,
trace=trace)
rec.val = str(result)
if store_latency:
rec, _ = dbutils.get_or_create(
session,
models.LegoLatency,
name=name,
index=idx)
rec.val = int(time_lapse)
if store_profile:
rec = kwargs
rec.update(
{'trace': trace,
'index': idx,
'name': name,
'latency': time_lapse
}
)
dbutils.insert(
session,
models.ResourceLatency,
rec
)
def batch_process(video_uri,
app,
experiment_name,
trace=None,
store_result=False,
store_latency=False,
store_profile=False,
**kwargs):
"""Batch process a video. Able to store both the result and the frame processing latency.
Arguments:
video_uri {string} -- Video URI
app {string} -- Applicaiton name
experiment_name {string} -- Experiment name
Keyword Arguments:
trace {string} -- Trace id
store_result {bool} -- Whether to store result into database
store_result {bool} -- [description] (default: {False})
store_latency {bool} -- [description] (default: {False})
cpu {string} -- No of CPUs used. Used to populate profile database
memory {string} -- No of memory used. Used to populate profile database
num_worker {int} -- No of simultaneous workers. Used to populate profile database
"""
if trace is None:
trace = os.path.basename(os.path.dirname(video_uri))
app = importlib.import_module(app)
app_handler = app.Handler()
vc = client.VideoClient(
app.__name__, video_uri, None, loop=False, random_start=False)
idx = 1
with dbutils.session_scope() as session:
for img in vc.get_frame_generator():
cpu_time_ts = time.clock()
result, time_lapse = process_and_time(img, app_handler)
logger.debug("[pid: {}] processing frame {} from {}. {} ms".format(os.getpid(),
idx, video_uri, int(time_lapse)))
logger.debug(result)
store(
(experiment_name, trace, idx, result, time_lapse),
session,
store_result,
store_latency,
store_profile,
**kwargs
)
idx += 1
def phash(video_uri):
cam = cv2.VideoCapture(video_uri)
has_frame = True
with dbutils.session_scope(dry_run=False) as sess:
trace_name = os.path.basename(os.path.dirname(video_uri))
idx = 1
while has_frame:
has_frame, img = cam.read()
if img is not None:
cur_hash = cvutils.phash(img)
sess.add(models.SS(
name='{}-f{}-phash'.format(trace_name, idx),
val=str(cur_hash),
trace=trace_name))
idx += 1
def phash_diff_adjacent_frame(video_uri, output_dir):
cam = cv2.VideoCapture(video_uri)
os.makedirs(output_dir)
has_frame = True
prev_hash = None
idx = 1
logger.debug('calculating phash diff for adjacent frames')
while has_frame:
has_frame, img = cam.read()
if img is not None:
cur_hash = cvutils.phash(img)
if prev_hash is not None:
diff = cur_hash - prev_hash
cv2.putText(img, 'diff={}'.format(
diff), (int(img.shape[1] / 3), img.shape[0] - 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), thickness=5)
cv2.imwrite(os.path.join(
output_dir, '{:010d}.jpg'.format(idx)), img)
logger.debug(diff)
prev_hash = cur_hash
idx += 1
if __name__ == "__main__":
fire.Fire()
| 31.748971 | 125 | 0.564485 | 576 | 0.07466 | 0 | 0 | 0 | 0 | 0 | 0 | 1,541 | 0.199741 |
3101d2087b21203c5098923bc3b4dd50b2a5f5c1 | 616 | py | Python | app/requests/users.py | codingedward/book-a-meal-api | 36756abc225bf7e8306330f2c3e223dc32af7869 | [
"MIT"
]
| null | null | null | app/requests/users.py | codingedward/book-a-meal-api | 36756abc225bf7e8306330f2c3e223dc32af7869 | [
"MIT"
]
| null | null | null | app/requests/users.py | codingedward/book-a-meal-api | 36756abc225bf7e8306330f2c3e223dc32af7869 | [
"MIT"
]
| 2 | 2018-10-01T17:45:19.000Z | 2020-12-07T13:48:25.000Z | from .base import JsonRequest
class PostRequest(JsonRequest):
@staticmethod
def rules():
return {
'email': 'required|email|unique:User,email',
'password': 'required|string|confirmed|least_string:6',
'username': 'required|alpha|least_string:3',
'role': 'integer|positive|found_in:1,2',
}
class PutRequest(JsonRequest):
@staticmethod
def rules():
return {
'password': 'string|confirmed|least_string:6',
'username': 'alpha|least_string:3',
'role': 'integer|positive|found_in:1,2',
}
| 26.782609 | 67 | 0.586039 | 580 | 0.941558 | 0 | 0 | 509 | 0.826299 | 0 | 0 | 283 | 0.459416 |
3101d908081b56ffdf8894a573c86a99a80dea01 | 1,650 | py | Python | kmp.py | mutux/kmp | 9fa87296a79716a22344314d1a19f53833df5b41 | [
"MIT"
]
| 3 | 2017-09-23T09:59:23.000Z | 2022-02-25T16:59:05.000Z | kmp.py | mutux/kmp | 9fa87296a79716a22344314d1a19f53833df5b41 | [
"MIT"
]
| null | null | null | kmp.py | mutux/kmp | 9fa87296a79716a22344314d1a19f53833df5b41 | [
"MIT"
]
| 2 | 2019-04-24T04:30:39.000Z | 2019-12-03T14:05:33.000Z | def kmp(P, T):
# Compute the start position (number of chars) of the longest suffix that matches a prefix,
# and store them into list K, the first element of K is set to be -1, the second
#
K = [] # K[t] store the value that when mismatch happens at t, should move Pattern P K[t] characters ahead
t = -1 # K's length is len(P) + 1, the first element is set to be -1, corresponding to no elements in P.
K.append(t) # Add the first element, keep t = -1.
for k in range(1, len(P) + 1):
# traverse all the elemtn in P, calculate the corresponding value for each element.
while(t >= 0 and P[t] != P[k - 1]): # if t=-1, then let t = 0, if t>=0 and current suffix doesn't match, then try a shorter suffix
t = K[t]
t = t + 1 # If it matches, then the matching position should be one character ahead.
K.append(t) # record the matching postion for k
print(K)
# Match the String T with P
m = 0 # Record the current matching position in P when compared with T
for i in range(0, len(T)): # traverse T one-by-one
while (m >= 0 and P[m] != T[i]): # if mismatch happens at position m, move P forward with K[m] characters and restart comparison
m = K[m]
m = m + 1 # if position m matches, move P forward to next position
if m == len(P): # if m is already the end of K (or P), the a fully match is found. Continue comparison by move P forward K[m] characters
print (i - m + 1, i)
m = K[m]
if __name__ == "__main__":
kmp('abcbabca', 'abcbabcabcbabcbabcbabcabcbabcbabca')
kmp('abab', 'ababcabababc')
| 55 | 145 | 0.621212 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,152 | 0.698182 |
310233a1e9f02803dc17a9f40655a8b55df02a4a | 1,657 | py | Python | weeklypedia/publish.py | Nintendofan885/weeklypedia | 512be3814a693d7ba3044bda7965e7a5d3d137fd | [
"Unlicense"
]
| null | null | null | weeklypedia/publish.py | Nintendofan885/weeklypedia | 512be3814a693d7ba3044bda7965e7a5d3d137fd | [
"Unlicense"
]
| null | null | null | weeklypedia/publish.py | Nintendofan885/weeklypedia | 512be3814a693d7ba3044bda7965e7a5d3d137fd | [
"Unlicense"
]
| null | null | null | # -*- coding: utf-8 -*-
import os
import json
from os.path import dirname
from argparse import ArgumentParser
from clastic.render import AshesRenderFactory
from common import DEBUG, DEBUG_LIST_ID, SENDKEY
from web import (comma_int,
ISSUE_TEMPLATES_PATH)
from bake import (Issue,
bake_latest_issue,
render_index,
SUPPORTED_LANGS)
_CUR_PATH = dirname(os.path.abspath(__file__))
LIST_ID_MAP = json.load(open(os.path.join(_CUR_PATH, 'secrets.json'))).get('list_ids')
def send_issue(lang, is_dev=False):
if is_dev:
list_id = DEBUG_LIST_ID
else:
list_id = LIST_ID_MAP[lang]
cur_issue = Issue(lang, include_dev=is_dev)
return cur_issue.send(list_id, SENDKEY)
def get_argparser():
desc = 'Bake and send Weeklypedia issues. (Please fetch first)'
prs = ArgumentParser(description=desc)
prs.add_argument('--lang', default=None)
prs.add_argument('--bake_all', default=False, action='store_true')
prs.add_argument('--debug', default=DEBUG, action='store_true')
return prs
if __name__ == '__main__':
issue_ashes_env = AshesRenderFactory(ISSUE_TEMPLATES_PATH,
filters={'ci': comma_int}).env
parser = get_argparser()
args = parser.parse_args()
debug = args.debug
if args.bake_all:
for lang in SUPPORTED_LANGS:
bake_latest_issue(issue_ashes_env, lang=lang, include_dev=debug)
if args.lang in SUPPORTED_LANGS:
lang = args.lang
print bake_latest_issue(issue_ashes_env, lang=lang, include_dev=debug)
print send_issue(lang, debug)
| 31.264151 | 86 | 0.674713 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 170 | 0.102595 |
310343fbd38310545ea02277eee138c31da84d40 | 292 | py | Python | scripts/words_gen.py | andybui01/Bloom | 20cc1bbb03f84c6f96a191f92e596013c9ac2da9 | [
"MIT"
]
| null | null | null | scripts/words_gen.py | andybui01/Bloom | 20cc1bbb03f84c6f96a191f92e596013c9ac2da9 | [
"MIT"
]
| null | null | null | scripts/words_gen.py | andybui01/Bloom | 20cc1bbb03f84c6f96a191f92e596013c9ac2da9 | [
"MIT"
]
| null | null | null | import random
import sys
# usage: python3 words_gen.py > list.txt
N = int(sys.argv[1]) # how many words should be in the resulting list
with open("scripts/words.txt", "r") as f:
words = f.readlines()
for i in range(N):
print(words[random.randint(0, 466550 - 1)].rstrip())
| 22.461538 | 69 | 0.657534 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 110 | 0.376712 |
3103a85a9b3f81384b162df662d084203f7e48e2 | 3,037 | py | Python | testing/test_input.py | arunumd/Rover | 622ed4775827514a5add787ffb25b338e8faacf3 | [
"MIT"
]
| 1 | 2020-11-22T12:01:25.000Z | 2020-11-22T12:01:25.000Z | testing/test_input.py | arunumd/Rover | 622ed4775827514a5add787ffb25b338e8faacf3 | [
"MIT"
]
| null | null | null | testing/test_input.py | arunumd/Rover | 622ed4775827514a5add787ffb25b338e8faacf3 | [
"MIT"
]
| 2 | 2020-05-16T13:41:35.000Z | 2021-12-30T10:28:04.000Z | import unittest
from modules.Input import *
class CommandInputTest(unittest.TestCase):
def setUp(self):
self.field_a_locations = ["gOtOFieldAroW8",
" go to fieLDA RoW 18 ",
"gOTOfield A rOW88"]
self.field_b_locations = ["gOtOFieldBroW8",
" go to fieLDB RoW 18 ",
"gOTOfield B rOW88"]
self.charger_locations = ["gotocharGeR",
" goTo cHaRgeR ",
" go toCHARGER "]
self.planting_locations = ["plantPotaToes inFieLDA rOW7",
" plANt caRRottsin fieldBRow 88"]
def tearDown(self):
self.field_a_locations = []
self.field_b_locations = []
self.charger_locations = []
self.planting_locations = []
def test_field_a_rows(self):
self.assertEqual(user_input(self.field_a_locations[0])[0], "FAR8")
self.assertEqual(user_input(self.field_a_locations[0])[1], "N/A")
self.assertEqual(user_input(self.field_a_locations[1])[0], "FAR18")
self.assertEqual(user_input(self.field_a_locations[1])[1], "N/A")
self.assertEqual(user_input(self.field_a_locations[2])[0], "FAR88")
self.assertEqual(user_input(self.field_a_locations[2])[1], "N/A")
def test_field_b_rows(self):
self.assertEqual(user_input(self.field_b_locations[0])[0], "FBR8")
self.assertEqual(user_input(self.field_b_locations[0])[1], "N/A")
self.assertEqual(user_input(self.field_b_locations[1])[0], "FBR18")
self.assertEqual(user_input(self.field_b_locations[1])[1], "N/A")
self.assertEqual(user_input(self.field_b_locations[2])[0], "FBR88")
self.assertEqual(user_input(self.field_b_locations[2])[1], "N/A")
def test_charger(self):
self.assertEqual(user_input(self.charger_locations[0])[0], "Charger")
self.assertEqual(user_input(self.charger_locations[0])[1], "N/A")
self.assertEqual(user_input(self.charger_locations[1])[0], "Charger")
self.assertEqual(user_input(self.charger_locations[1])[1], "N/A")
self.assertEqual(user_input(self.charger_locations[2])[0], "Charger")
self.assertEqual(user_input(self.charger_locations[2])[1], "N/A")
def test_bad_input(self):
self.assertEqual(user_input("")[0], "Invalid task")
self.assertEqual(user_input("")[1], "N/A")
self.assertEqual(user_input("fg73f37g")[0], "Invalid task")
self.assertEqual(user_input("fg73f37g")[1], "N/A")
def test_planting_instructions(self):
self.assertEqual(user_input(self.planting_locations[0])[0], "FAR7")
self.assertEqual(user_input(self.planting_locations[0])[1], "PotaToes")
self.assertEqual(user_input(self.planting_locations[1])[0], "FBR88")
self.assertEqual(user_input(self.planting_locations[1])[1], "caRRotts")
if __name__ == '__main__':
unittest.main()
| 46.723077 | 79 | 0.622654 | 2,940 | 0.968061 | 0 | 0 | 0 | 0 | 0 | 0 | 450 | 0.148173 |
31042f7232eebc872883e4144e0716776054ead0 | 14,601 | py | Python | rmgpy/kinetics/chebyshevTest.py | pm15ma/RMG-Py | ca2f663c711ec45012afc911138716aaf0049296 | [
"MIT"
]
| 1 | 2020-10-14T12:01:47.000Z | 2020-10-14T12:01:47.000Z | rmgpy/kinetics/chebyshevTest.py | pm15ma/RMG-Py | ca2f663c711ec45012afc911138716aaf0049296 | [
"MIT"
]
| null | null | null | rmgpy/kinetics/chebyshevTest.py | pm15ma/RMG-Py | ca2f663c711ec45012afc911138716aaf0049296 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
###############################################################################
# #
# RMG - Reaction Mechanism Generator #
# #
# Copyright (c) 2002-2020 Prof. William H. Green ([email protected]), #
# Prof. Richard H. West ([email protected]) and the RMG Team ([email protected]) #
# #
# Permission is hereby granted, free of charge, to any person obtaining a #
# copy of this software and associated documentation files (the 'Software'), #
# to deal in the Software without restriction, including without limitation #
# the rights to use, copy, modify, merge, publish, distribute, sublicense, #
# and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in #
# all copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING #
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER #
# DEALINGS IN THE SOFTWARE. #
# #
###############################################################################
"""
This script contains unit tests of the :mod:`rmgpy.kinetics.chebyshev` module.
"""
import unittest
import numpy as np
from rmgpy.exceptions import KineticsError
from rmgpy.kinetics.chebyshev import Chebyshev
################################################################################
class TestChebyshev(unittest.TestCase):
"""
Contains unit tests of the Chebyshev class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.Tmin = 300.
self.Tmax = 2000.
self.Pmin = 0.01
self.Pmax = 100.
self.coeffs = np.array([
[11.67723, 0.729281, -0.11984, 0.00882175],
[-1.02669, 0.853639, -0.0323485, -0.027367],
[-0.447011, 0.244144, 0.0559122, -0.0101723],
[-0.128261, 0.0111596, 0.0281176, 0.00604353],
[-0.0117034, -0.0235646, 0.00061009, 0.00401309],
[0.0155433, -0.0136846, -0.00463048, -0.000261353],
])
self.comment = """acetyl + O2 -> acetylperoxy"""
self.chebyshev = Chebyshev(
coeffs=self.coeffs,
kunits="cm^3/(mol*s)",
Tmin=(self.Tmin, "K"),
Tmax=(self.Tmax, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
def test_coeffs(self):
"""
Test that the Chebyshev coeffs property was properly set.
"""
self.assertEqual(self.chebyshev.coeffs.value.shape, self.coeffs.shape)
for i in range(self.chebyshev.coeffs.value.shape[0]):
for j in range(self.chebyshev.coeffs.value.shape[1]):
C0 = float(self.coeffs[i, j])
C = float(self.chebyshev.coeffs.value_si[i, j])
if i == 0 and j == 0: C0 -= 6 # Unit conversion from cm^3/(mol*s) to m^3/(mol*s)
self.assertAlmostEqual(C0, C, delta=1e-6 * C0)
def test_temperature_min(self):
"""
Test that the Chebyshev Tmin property was properly set.
"""
self.assertAlmostEqual(self.chebyshev.Tmin.value_si, self.Tmin, 6)
def test_temperature_max(self):
"""
Test that the Chebyshev Tmax property was properly set.
"""
self.assertAlmostEqual(self.chebyshev.Tmax.value_si, self.Tmax, 6)
def test_pressure_min(self):
"""
Test that the Chebyshev Pmin property was properly set.
"""
self.assertAlmostEqual(self.chebyshev.Pmin.value_si * 1e-5, self.Pmin, 6)
def test_pressure_max(self):
"""
Test that the Chebyshev Pmax property was properly set.
"""
self.assertAlmostEqual(self.chebyshev.Pmax.value_si * 1e-5, self.Pmax, 6)
def test_comment(self):
"""
Test that the Chebyshev comment property was properly set.
"""
self.assertEqual(self.chebyshev.comment, self.comment)
def test_is_pressure_dependent(self):
"""
Test the Chebyshev.is_pressure_dependent() method.
"""
self.assertTrue(self.chebyshev.is_pressure_dependent())
def test_get_rate_coefficient(self):
"""
Test the Chebyshev.get_rate_coefficient() method.
"""
Tlist = np.array([300, 500, 1000, 1500])
Plist = np.array([1e4, 1e5, 1e6])
Kexp = np.array([
[2.29100e+06, 2.58452e+06, 2.57204e+06],
[1.10198e+06, 2.04037e+06, 2.57428e+06],
[4.37919e+04, 2.36481e+05, 8.57727e+05],
[5.20144e+03, 4.10123e+04, 2.50401e+05],
])
for t in range(Tlist.shape[0]):
for p in range(Plist.shape[0]):
Kact = self.chebyshev.get_rate_coefficient(Tlist[t], Plist[p])
self.assertAlmostEqual(Kact / Kexp[t, p], 1.0, 4, '{0} != {1} within 4 places'.format(Kexp[t, p], Kact))
def test_fit_to_data(self):
"""
Test the Chebyshev.fit_to_data() method.
"""
Tdata = np.array(
[300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500, 1600, 1700, 1800, 1900, 2000])
Pdata = np.array([3e3, 1e4, 3e4, 1e5, 3e5, 1e6, 3e7])
nT = len(Tdata)
nP = len(Pdata)
kdata = np.zeros((nT, nP))
for t in range(nT):
for p in range(nP):
kdata[t, p] = self.chebyshev.get_rate_coefficient(Tdata[t], Pdata[p]) * 1e6
chebyshev = Chebyshev().fit_to_data(Tdata, Pdata, kdata, kunits="cm^3/(mol*s)", degreeT=6, degreeP=4, Tmin=300,
Tmax=2000, Pmin=0.1, Pmax=10.)
for t in range(nT):
for p in range(nP):
kfit = chebyshev.get_rate_coefficient(Tdata[t], Pdata[p]) * 1e6
self.assertAlmostEqual(kfit, kdata[t, p], delta=1e-4 * kdata[t, p])
def test_fit_to_data2(self):
"""
Test the Chebyshev.fit_to_data() method throws error without enough degrees of freedom.
Here only 3 temperatures are given, but the polynomial desired has 6 parameters.
"""
Tdata = np.array([300, 1200, 2000])
Pdata = np.array([1e5, 3e5, 1e6, 3e7])
nT = len(Tdata)
nP = len(Pdata)
kdata = np.zeros((nT, nP))
for t in range(nT):
for p in range(nP):
kdata[t, p] = self.chebyshev.get_rate_coefficient(Tdata[t], Pdata[p])
with self.assertRaises(KineticsError):
Chebyshev().fit_to_data(Tdata, Pdata, kdata, kunits="cm^3/(mol*s)", degreeT=12, degreeP=8, Tmin=300,
Tmax=2000, Pmin=0.1, Pmax=10.)
def test_pickle(self):
"""
Test that a Chebyshev object can be pickled and unpickled with no loss
of information.
"""
import pickle
chebyshev = pickle.loads(pickle.dumps(self.chebyshev, -1))
self.assertEqual(self.chebyshev.coeffs.value.shape[0], chebyshev.coeffs.value.shape[0])
self.assertEqual(self.chebyshev.coeffs.value.shape[1], chebyshev.coeffs.value.shape[1])
for i in range(self.chebyshev.coeffs.value.shape[0]):
for j in range(self.chebyshev.coeffs.value.shape[1]):
C0 = self.chebyshev.coeffs.value_si[i, j]
C = chebyshev.coeffs.value_si[i, j]
self.assertAlmostEqual(C0, C, delta=1e-4 * C0)
self.assertAlmostEqual(self.chebyshev.Tmin.value, chebyshev.Tmin.value, 4)
self.assertEqual(self.chebyshev.Tmin.units, chebyshev.Tmin.units)
self.assertAlmostEqual(self.chebyshev.Tmax.value, chebyshev.Tmax.value, 4)
self.assertEqual(self.chebyshev.Tmax.units, chebyshev.Tmax.units)
self.assertAlmostEqual(self.chebyshev.Pmin.value, chebyshev.Pmin.value, 4)
self.assertEqual(self.chebyshev.Pmin.units, chebyshev.Pmin.units)
self.assertAlmostEqual(self.chebyshev.Pmax.value, chebyshev.Pmax.value, 4)
self.assertEqual(self.chebyshev.Pmax.units, chebyshev.Pmax.units)
self.assertEqual(self.chebyshev.comment, chebyshev.comment)
def test_repr(self):
"""
Test that a Chebyshev object can be reconstructed from its repr()
output with no loss of information.
"""
namespace = {}
exec('chebyshev = {0!r}'.format(self.chebyshev), globals(), namespace)
self.assertIn('chebyshev', namespace)
chebyshev = namespace['chebyshev']
self.assertEqual(self.chebyshev.coeffs.value.shape[0], chebyshev.coeffs.value.shape[0])
self.assertEqual(self.chebyshev.coeffs.value.shape[1], chebyshev.coeffs.value.shape[1])
for i in range(self.chebyshev.coeffs.value.shape[0]):
for j in range(self.chebyshev.coeffs.value.shape[1]):
C0 = self.chebyshev.coeffs.value[i, j]
C = chebyshev.coeffs.value[i, j]
self.assertAlmostEqual(C0, C, delta=1e-4 * C0)
self.assertAlmostEqual(self.chebyshev.Tmin.value, chebyshev.Tmin.value, 4)
self.assertEqual(self.chebyshev.Tmin.units, chebyshev.Tmin.units)
self.assertAlmostEqual(self.chebyshev.Tmax.value, chebyshev.Tmax.value, 4)
self.assertEqual(self.chebyshev.Tmax.units, chebyshev.Tmax.units)
self.assertAlmostEqual(self.chebyshev.Pmin.value, chebyshev.Pmin.value, 4)
self.assertEqual(self.chebyshev.Pmin.units, chebyshev.Pmin.units)
self.assertAlmostEqual(self.chebyshev.Pmax.value, chebyshev.Pmax.value, 4)
self.assertEqual(self.chebyshev.Pmax.units, chebyshev.Pmax.units)
self.assertEqual(self.chebyshev.comment, chebyshev.comment)
def test_change_rate(self):
"""
Test the Chebyshev.change_rate() method.
"""
Tlist = np.array([300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500])
k0list = np.array([self.chebyshev.get_rate_coefficient(T, 1e5) for T in Tlist])
self.chebyshev.change_rate(2)
for T, kexp in zip(Tlist, k0list):
kact = self.chebyshev.get_rate_coefficient(T, 1e5)
self.assertAlmostEqual(2 * kexp, kact, delta=1e-6 * kexp)
def test_is_identical_to(self):
"""
Test the Chebyshev.is_identical_to() method.
"""
# Trivial case, compare to a KineticsModel
from rmgpy.kinetics.model import KineticsModel
self.assertFalse(self.chebyshev.is_identical_to(KineticsModel()))
# Compare to identical Chebyshev
new_chebyshev = Chebyshev(
coeffs=self.coeffs,
kunits="cm^3/(mol*s)",
Tmin=(self.Tmin, "K"),
Tmax=(self.Tmax, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
self.assertTrue(self.chebyshev.is_identical_to(new_chebyshev))
# Compare to Chebyshev with different Tmin/Tmax
new_chebyshev = Chebyshev(
coeffs=self.coeffs,
kunits="cm^3/(mol*s)",
Tmin=(200, "K"),
Tmax=(self.Tmax, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
self.assertFalse(self.chebyshev.is_identical_to(new_chebyshev))
new_chebyshev = Chebyshev(
coeffs=self.coeffs,
kunits="cm^3/(mol*s)",
Tmin=(self.Tmin, "K"),
Tmax=(2500, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
self.assertFalse(self.chebyshev.is_identical_to(new_chebyshev))
# Compare to Chebyshev with different degreeT/degreeP
new_chebyshev = Chebyshev(
coeffs=self.coeffs[0:-1, :], # Remove one T dimension
kunits="cm^3/(mol*s)",
Tmin=(self.Tmin, "K"),
Tmax=(self.Tmax, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
self.assertFalse(self.chebyshev.is_identical_to(new_chebyshev))
new_chebyshev = Chebyshev(
coeffs=self.coeffs[:, 0:-1], # Remove one P dimension
kunits="cm^3/(mol*s)",
Tmin=(self.Tmin, "K"),
Tmax=(self.Tmax, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
self.assertFalse(self.chebyshev.is_identical_to(new_chebyshev))
# Compare to Chebyshev with different units
new_chebyshev = Chebyshev(
coeffs=self.coeffs,
kunits="m^3/(mol*s)",
Tmin=(self.Tmin, "K"),
Tmax=(self.Tmax, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
self.assertFalse(self.chebyshev.is_identical_to(new_chebyshev))
# Compare to Chebyshev with slightly different coefficients
new_chebyshev = Chebyshev(
coeffs=np.copy(self.coeffs) * 0.01,
kunits="cm^3/(mol*s)",
Tmin=(self.Tmin, "K"),
Tmax=(self.Tmax, "K"),
Pmin=(self.Pmin, "bar"),
Pmax=(self.Pmax, "bar"),
comment=self.comment,
)
self.assertFalse(self.chebyshev.is_identical_to(new_chebyshev))
################################################################################
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| 43.455357 | 120 | 0.562496 | 12,018 | 0.823094 | 0 | 0 | 0 | 0 | 0 | 0 | 4,500 | 0.308198 |
31048c01335723cf99267bce9b58d406c5d4fced | 3,230 | py | Python | SceneDistribution_Blender/Source/__init__.py | FilmakademieRnd/v-p-e-t | d7dd8efb6d4aa03784e1bb4f941d2bcef919f28b | [
"MIT"
]
| 62 | 2016-10-12T17:29:37.000Z | 2022-02-27T01:24:48.000Z | SceneDistribution_Blender/Source/__init__.py | FilmakademieRnd/v-p-e-t | d7dd8efb6d4aa03784e1bb4f941d2bcef919f28b | [
"MIT"
]
| 75 | 2017-01-05T12:02:43.000Z | 2021-04-06T19:07:50.000Z | SceneDistribution_Blender/Source/__init__.py | FilmakademieRnd/v-p-e-t | d7dd8efb6d4aa03784e1bb4f941d2bcef919f28b | [
"MIT"
]
| 16 | 2016-10-12T17:29:42.000Z | 2021-12-01T17:27:33.000Z | """
-----------------------------------------------------------------------------
This source file is part of VPET - Virtual Production Editing Tools
http://vpet.research.animationsinstitut.de/
http://github.com/FilmakademieRnd/VPET
Copyright (c) 2021 Filmakademie Baden-Wuerttemberg, Animationsinstitut R&D Lab
This project has been initiated in the scope of the EU funded project
Dreamspace under grant agreement no 610005 in the years 2014, 2015 and 2016.
http://dreamspaceproject.eu/
Post Dreamspace the project has been further developed on behalf of the
research and development activities of Animationsinstitut.
The VPET component Blender Scene Distribution is intended for research and development
purposes only. Commercial use of any kind is not permitted.
There is no support by Filmakademie. Since the Blender Scene Distribution is available
for free, Filmakademie shall only be liable for intent and gross negligence;
warranty is limited to malice. Scene DistributiorUSD may under no circumstances
be used for racist, sexual or any illegal purposes. In all non-commercial
productions, scientific publications, prototypical non-commercial software tools,
etc. using the Blender Scene Distribution Filmakademie has to be named as follows:
“VPET-Virtual Production Editing Tool by Filmakademie Baden-Württemberg,
Animationsinstitut (http://research.animationsinstitut.de)“.
In case a company or individual would like to use the Blender Scene Distribution in
a commercial surrounding or for commercial purposes, software based on these
components or any part thereof, the company/individual will have to contact
Filmakademie (research<at>filmakademie.de).
-----------------------------------------------------------------------------
"""
bl_info = {
"name" : "VPET Blender",
"author" : "Tonio Freitag",
"description" : "",
"blender" : (2, 92, 2),
"version" : (0, 5, 0),
"location" : "VIEW3D",
"warning" : "",
"category" : "Animationsinstitut"
}
from typing import Set
import bpy
from .bl_op import DoDistribute
from .bl_op import StopDistribute
from .bl_op import SetupScene
from .bl_op import InstallZMQ
from .bl_panel import VPET_PT_Panel
from .tools import initialize
from .settings import VpetData
from .settings import VpetProperties
# imported classes to register
classes = (DoDistribute, StopDistribute, SetupScene, VPET_PT_Panel, VpetProperties, InstallZMQ)
## Register classes and VpetSettings
#
def register():
bpy.types.WindowManager.vpet_data = VpetData()
from bpy.utils import register_class
for cls in classes:
try:
register_class(cls)
print(f"Registering {cls.__name__}")
except Exception as e:
print(f"{cls.__name__} "+ str(e))
bpy.types.Scene.vpet_properties = bpy.props.PointerProperty(type=VpetProperties)
initialize()
print("Registered VPET Addon")
## Unregister for removal of Addon
#
def unregister():
del bpy.types.WindowManager.vpet_data
from bpy.utils import unregister_class
for cls in classes:
try:
unregister_class(cls)
except Exception as e:
print(f"{cls.__name__} "+ str(e))
print("Unregistered VPET Addon") | 37.55814 | 96 | 0.71517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,105 | 0.650696 |
310520cb4491a27df4f8da00ca88e83d5b43b0ea | 3,453 | py | Python | defences/CIFAR10/standard_training.py | calinbiberea/imperial-individual-project | 86f224f183b8348d21b4c7a4aed408cd1ca41df1 | [
"MIT"
]
| null | null | null | defences/CIFAR10/standard_training.py | calinbiberea/imperial-individual-project | 86f224f183b8348d21b4c7a4aed408cd1ca41df1 | [
"MIT"
]
| null | null | null | defences/CIFAR10/standard_training.py | calinbiberea/imperial-individual-project | 86f224f183b8348d21b4c7a4aed408cd1ca41df1 | [
"MIT"
]
| null | null | null | # Unlike the other datasets, CIFAR-10 uses ResNet and suffers from
# a variety of problems, including exploding gradients
import torch
import torch.nn as nn
from tqdm.notebook import tnrange, tqdm
# For loading model sanely
import os.path
import sys
# This here actually adds the path
sys.path.append("../../")
import models.resnet as resnet
# Define the `device` PyTorch will be running on, please hope it is CUDA
device = "cuda" if torch.cuda.is_available() else "cpu"
print("Notebook will use PyTorch Device: " + device.upper())
# Helps adjust learning rate for better results
def adjust_learning_rate(optimizer, epoch, learning_rate, long_training):
actual_learning_rate = learning_rate
if long_training:
first_update_threshold = 100
second_update_threshold = 150
else:
first_update_threshold = 20
second_update_threshold = 25
if epoch >= first_update_threshold:
actual_learning_rate = 0.01
if epoch >= second_update_threshold:
actual_learning_rate = 0.001
for param_group in optimizer.param_groups:
param_group["lr"] = actual_learning_rate
# This method creates a new model and also trains it
def standard_training(
trainSetLoader,
long_training=True,
load_if_available=False,
load_path="../models_data/CIFAR10/cifar10_standard"
):
# Number of epochs is decided by training length
if long_training:
epochs = 200
else:
epochs = 30
learning_rate = 0.1
# Network parameters
loss_function = nn.CrossEntropyLoss()
model = resnet.ResNet18()
model = model.to(device)
model = nn.DataParallel(model)
model.train()
# Consider using ADAM here as another gradient descent algorithm
optimizer = torch.optim.SGD(
model.parameters(), lr=learning_rate, momentum=0.9, weight_decay=0.0002
)
# If a trained model already exists, give up the training part
if load_if_available and os.path.isfile(load_path):
print("Found already trained model...")
model = torch.load(load_path)
print("... loaded!")
else:
print("Training the model...")
# Use a pretty progress bar to show updates
for epoch in tnrange(epochs, desc="Training Progress"):
# Print loss results
total_epoch_loss = 0
# Adjust the learning rate
adjust_learning_rate(optimizer, epoch, learning_rate, long_training)
for _, (images, labels) in enumerate(tqdm(trainSetLoader, desc="Batches")):
# Cast to proper tensors
images, labels = images.to(device), labels.to(device)
# Clean the gradients
optimizer.zero_grad()
# Predict
logits = model(images)
# Calculate loss
loss = loss_function(logits, labels)
# Gradient descent
loss.backward()
# Add total accumulated loss
total_epoch_loss += loss.item()
# Also clip the gradients (ReLU leads to vanishing or
# exploding gradients)
torch.nn.utils.clip_grad_norm_(model.parameters(), 10)
optimizer.step()
print("Loss at epoch {} is {}".format(epoch, total_epoch_loss))
print("... done!")
# Make sure the model is in eval mode before returning
model.eval()
return model | 30.026087 | 87 | 0.645815 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,110 | 0.32146 |
31052f39ded0bd3a3116064ecf6b648965c83185 | 4,510 | py | Python | src/romt/manifest.py | hehaoqian/romt | 02d5d3f10f22883e277a319e19c199e349b1c8e5 | [
"MIT"
]
| 29 | 2020-05-09T17:06:06.000Z | 2022-03-30T16:18:01.000Z | src/romt/manifest.py | hehaoqian/romt | 02d5d3f10f22883e277a319e19c199e349b1c8e5 | [
"MIT"
]
| 12 | 2020-05-23T13:10:51.000Z | 2022-03-27T21:48:18.000Z | src/romt/manifest.py | hehaoqian/romt | 02d5d3f10f22883e277a319e19c199e349b1c8e5 | [
"MIT"
]
| 9 | 2020-10-15T17:07:13.000Z | 2022-03-13T18:47:25.000Z | #!/usr/bin/env python3
# coding=utf-8
import copy
from pathlib import Path
from typing import (
Any,
Generator,
Iterable,
List,
MutableMapping,
Optional,
)
import toml
from romt import error
def target_matches_any(target: str, expected_targets: Iterable[str]) -> bool:
if target == "*":
return True
for expected in expected_targets:
if target == expected or expected == "*":
return True
return False
class Package:
def __init__(
self, name: str, target: str, details: MutableMapping[str, Any]
):
self.name = name
self.target = target
self.available = details["available"]
self.xz_url = details.get("xz_url", "")
@property
def has_rel_path(self) -> bool:
return self.xz_url != ""
@property
def rel_path(self) -> str:
if not self.has_rel_path:
raise ValueError(
"Package {}/{} missing xz_url".format(self.name, self.target)
)
url = self.xz_url
prefix = "/dist/"
return url[url.index(prefix) + len(prefix) :]
class Manifest:
def __init__(self, raw_dict: MutableMapping[str, Any]):
self._dict = raw_dict
@staticmethod
def from_toml_path(toml_path: Path) -> "Manifest":
return Manifest(toml.load(toml_path))
def clone(self) -> "Manifest":
return Manifest(copy.deepcopy(self._dict))
@property
def _rust_src_version(self) -> str:
version = self._dict["pkg"]["rust-src"]["version"]
# Sample version lines found below [pkg.rust-src]:
# version = "1.43.0-beta.5 (934ae7739 2020-04-06)"
# version = "1.44.0-nightly (42abbd887 2020-04-07)"
# version = "1.42.0 (b8cedc004 2020-03-09)"
return version
@property
def channel(self) -> str:
version = self._rust_src_version
if "-beta" in version:
channel = "beta"
elif "-nightly" in version:
channel = "nightly"
else:
channel = "stable"
return channel
@property
def version(self) -> str:
version = self._rust_src_version
# version = "1.44.0-nightly (42abbd887 2020-04-07)"
# version = "1.42.0 (b8cedc004 2020-03-09)"
return version.split("-")[0].split()[0]
@property
def date(self) -> str:
return self._dict["date"]
@property
def spec(self) -> str:
return "{}-{}".format(self.channel, self.date)
@property
def ident(self) -> str:
return "{}({})".format(self.spec, self.version)
def set_package_available(
self, package_name: str, target: str, available: bool = True
) -> None:
details = self._dict["pkg"][package_name]["target"][target]
if available and "xz_url" not in details:
raise error.AbortError(
"package {}/{} set available but missing xz_url".format(
package_name, target
)
)
details["available"] = available
def get_package(self, package_name: str, target: str) -> Package:
details = self._dict["pkg"][package_name]["target"][target]
return Package(package_name, target, details)
def gen_packages(self) -> Generator[Package, None, None]:
"""Generate Package for all (name, target) in manifest."""
for name, package_dict in self._dict["pkg"].items():
for target in package_dict["target"].keys():
yield self.get_package(name, target)
def gen_available_packages(
self, *, targets: Optional[Iterable[str]] = None
) -> Generator[Package, None, None]:
"""gen_packages() for available packages matching targets."""
for package in self.gen_packages():
if package.available:
if targets is None or target_matches_any(
package.target, targets
):
yield package
def available_packages(self) -> List[Package]:
return list(self.gen_available_packages())
def _targets_from_packages(self, packages: Iterable[Package]) -> List[str]:
targets = set(p.target for p in packages)
targets.discard("*")
return sorted(targets)
def all_targets(self) -> List[str]:
return self._targets_from_packages(self.gen_packages())
def available_targets(self) -> List[str]:
return self._targets_from_packages(self.gen_available_packages())
| 30.268456 | 79 | 0.595787 | 4,035 | 0.894678 | 723 | 0.16031 | 1,625 | 0.36031 | 0 | 0 | 727 | 0.161197 |
3105dd47d27bbfccfce90631a437c8b92d2e3fa8 | 212 | py | Python | examples/unread_sms.py | MikeRixWolfe/pygooglevoice | 1da84249e4ee3ecda921e7a20495074ff89a99cd | [
"BSD-3-Clause"
]
| 1 | 2015-08-19T18:22:10.000Z | 2015-08-19T18:22:10.000Z | examples/unread_sms.py | MikeRixWolfe/pygooglevoice | 1da84249e4ee3ecda921e7a20495074ff89a99cd | [
"BSD-3-Clause"
]
| null | null | null | examples/unread_sms.py | MikeRixWolfe/pygooglevoice | 1da84249e4ee3ecda921e7a20495074ff89a99cd | [
"BSD-3-Clause"
]
| null | null | null | from googlevoice import Voice
voice = Voice()
voice.login()
for message in voice.sms().messages:
#if not message.isRead:
print(message.id, message.phoneNumber, message.messageText)
#message.mark(1)
| 21.2 | 63 | 0.726415 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.183962 |
310702fdf551ae6fbf1e07ce937cc37a28affac0 | 233 | py | Python | libra/handlers/user.py | pitomba/libra | 8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d | [
"MIT"
]
| 2 | 2015-03-22T09:44:36.000Z | 2015-06-04T06:30:13.000Z | libra/handlers/user.py | pitomba/libra | 8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d | [
"MIT"
]
| null | null | null | libra/handlers/user.py | pitomba/libra | 8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d | [
"MIT"
]
| null | null | null | # coding: utf-8
from tornado.web import RequestHandler
from libra.handlers.base import authenticated
class UserHandler(RequestHandler):
@authenticated
def post(self, user, **kwargs):
self.write({"msg": "Success"})
| 21.181818 | 45 | 0.716738 | 129 | 0.553648 | 0 | 0 | 89 | 0.381974 | 0 | 0 | 29 | 0.124464 |
31079504b1f3b5dee16171b9a8d5ac6fd490a1d8 | 1,327 | py | Python | plextraktsync/commands/unmatched.py | RoyXiang/PlexTraktSync | 3f8d42448a3ba335d547e317dc8ca40c9ef6313e | [
"MIT"
]
| null | null | null | plextraktsync/commands/unmatched.py | RoyXiang/PlexTraktSync | 3f8d42448a3ba335d547e317dc8ca40c9ef6313e | [
"MIT"
]
| null | null | null | plextraktsync/commands/unmatched.py | RoyXiang/PlexTraktSync | 3f8d42448a3ba335d547e317dc8ca40c9ef6313e | [
"MIT"
]
| null | null | null | import click
from plextraktsync.commands.login import ensure_login
from plextraktsync.factory import factory
from plextraktsync.walker import WalkConfig, Walker
def unmatched(no_progress_bar: bool, local: bool):
config = factory.run_config().update(progressbar=not no_progress_bar)
ensure_login()
plex = factory.plex_api()
trakt = factory.trakt_api()
mf = factory.media_factory()
pb = factory.progressbar(config.progressbar)
wc = WalkConfig()
walker = Walker(plex, trakt, mf, wc, progressbar=pb)
if not wc.is_valid():
click.echo("Nothing to scan, this is likely due conflicting options given.")
return
failed = []
if local:
for pm in walker.get_plex_movies():
if pm.guids[0].provider == 'local':
failed.append(pm)
else:
for pm in walker.get_plex_movies():
movie = mf.resolve_any(pm)
if not movie:
failed.append(pm)
for pm in failed:
p = pm.item
url = plex.media_url(pm)
print("=" * 80)
print(f"No match: {pm}")
print(f"URL: {url}")
print(f"Title: {p.title}")
print(f"Year: {p.year}")
print(f"Updated At: {p.updatedAt}")
for l in p.locations:
print(f"Location: {l}")
print("")
| 28.847826 | 84 | 0.602864 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 186 | 0.140166 |
31084212e6f6b3216d28d48c58d75e7c0f29d92d | 1,940 | py | Python | montepython/likelihoods/covfefe/__init__.py | Maquiner/Monte_Python_2CCL | 900d362def8f0b3607645f911b2fac0f102d6700 | [
"MIT"
]
| 1 | 2018-04-29T06:48:35.000Z | 2018-04-29T06:48:35.000Z | montepython/likelihoods/covfefe/__init__.py | Maquiner/Monte_Python_2CCL | 900d362def8f0b3607645f911b2fac0f102d6700 | [
"MIT"
]
| null | null | null | montepython/likelihoods/covfefe/__init__.py | Maquiner/Monte_Python_2CCL | 900d362def8f0b3607645f911b2fac0f102d6700 | [
"MIT"
]
| 2 | 2019-10-11T09:46:35.000Z | 2019-12-05T14:55:04.000Z | import os
import numpy as np
from montepython.likelihood_class import Likelihood
import montepython.io_mp as io_mp
import warnings
import ccl_tools as tools
import pyccl as ccl
class covfefe(Likelihood):
# initialization routine
def __init__(self, path, data, command_line):
Likelihood.__init__(self, path, data, command_line)
self.nb = data.cosmo_arguments['n_bins']
self.cm = data.cosmo_arguments['cov']
n_sims = 20000
# Load Covariance matrix
fn = 'cov_{}_{}.npz'.format(self.cm,self.nb)
self.cov = np.load(os.path.join(self.data_directory, fn))['arr_0']
if self.cm=='sim':
factor = (n_sims-self.cov.shape[0]-2.)/(n_sims-1.)
else:
factor = 1.
self.icov = factor*np.linalg.inv(self.cov)
# Load ell bandpowers
self.ell_bp = np.load(os.path.join(self.data_directory, 'ell_bp.npz'))['lsims'].astype(int)
self.nl = len(self.ell_bp)
# Load photo_z
fn = 'z_{}.npz'.format(self.nb)
self.z = np.load(os.path.join(self.data_directory, fn))['arr_0']
fn = 'pz_{}.npz'.format(self.nb)
self.pz = np.load(os.path.join(self.data_directory, fn))['arr_0']
fn = 'bz_{}.npz'.format(self.nb)
self.bz = np.load(os.path.join(self.data_directory, fn))['arr_0']
# Load data
fn = 'data_{}.npz'.format(self.nb)
self.data = np.load(os.path.join(self.data_directory, fn))['arr_0']
# end of initialization
# compute likelihood
def loglkl(self, cosmo, data):
# Get theory Cls
cosmo_ccl = tools.get_cosmo_ccl(cosmo.pars)
tracers = tools.get_tracers_ccl(cosmo_ccl, self.z, self.pz, self.bz)
theory = tools.get_cls_ccl(cosmo_ccl, tracers, self.ell_bp)
# Get chi2
chi2 = (self.data-theory).dot(self.icov).dot(self.data-theory)
lkl = - 0.5 * chi2
return lkl
| 29.393939 | 99 | 0.615464 | 1,759 | 0.906701 | 0 | 0 | 0 | 0 | 0 | 0 | 295 | 0.152062 |
3109cfe9dfb7d000bea9fd6f0dead3f4122daff9 | 1,156 | py | Python | tests/integration/web3/conftest.py | cducrest/eth-tester-rpc | f34dcce2b4110010e3b54531a5cd8add4df43beb | [
"MIT"
]
| 3 | 2018-08-09T08:33:30.000Z | 2021-10-06T15:05:57.000Z | tests/integration/web3/conftest.py | cducrest/eth-tester-rpc | f34dcce2b4110010e3b54531a5cd8add4df43beb | [
"MIT"
]
| 11 | 2018-09-15T18:58:24.000Z | 2020-11-30T17:00:46.000Z | tests/integration/web3/conftest.py | cducrest/eth-tester-rpc | f34dcce2b4110010e3b54531a5cd8add4df43beb | [
"MIT"
]
| 3 | 2018-09-24T13:47:23.000Z | 2020-11-25T16:39:08.000Z | from eth_utils import (
to_bytes,
)
from eth_utils.toolz import (
identity,
)
import pytest
from web3._utils.module_testing.emitter_contract import (
CONTRACT_EMITTER_ABI,
CONTRACT_EMITTER_CODE,
)
from web3._utils.module_testing.math_contract import (
MATH_ABI,
MATH_BYTECODE,
)
from web3._utils.module_testing.revert_contract import (
_REVERT_CONTRACT_ABI,
REVERT_CONTRACT_BYTECODE,
)
@pytest.fixture(scope="module", params=[lambda x: to_bytes(hexstr=x), identity])
def address_conversion_func(request):
return request.param
@pytest.fixture(scope="module")
def math_contract_factory(web3):
contract_factory = web3.eth.contract(abi=MATH_ABI, bytecode=MATH_BYTECODE)
return contract_factory
@pytest.fixture(scope="module")
def emitter_contract_factory(web3):
contract_factory = web3.eth.contract(abi=CONTRACT_EMITTER_ABI, bytecode=CONTRACT_EMITTER_CODE)
return contract_factory
@pytest.fixture(scope="module")
def revert_contract_factory(web3):
contract_factory = web3.eth.contract(
abi=_REVERT_CONTRACT_ABI,
bytecode=REVERT_CONTRACT_BYTECODE
)
return contract_factory
| 25.130435 | 98 | 0.772491 | 0 | 0 | 0 | 0 | 726 | 0.628028 | 0 | 0 | 32 | 0.027682 |
310bacdf46c233952b779b59db8e0cb5aa5c4604 | 5,068 | py | Python | planning/domains/depth/depthGenerator.py | xlbandy/fape | 8a00f9d4c20f722930c11d88b60e0e82f523a439 | [
"BSD-2-Clause"
]
| 14 | 2017-01-09T23:25:12.000Z | 2022-02-16T12:08:48.000Z | planning/domains/depth/depthGenerator.py | xlbandy/fape | 8a00f9d4c20f722930c11d88b60e0e82f523a439 | [
"BSD-2-Clause"
]
| 7 | 2018-05-18T08:27:03.000Z | 2022-03-23T06:39:42.000Z | planning/domains/depth/depthGenerator.py | xlbandy/fape | 8a00f9d4c20f722930c11d88b60e0e82f523a439 | [
"BSD-2-Clause"
]
| 8 | 2016-12-09T13:31:43.000Z | 2022-02-16T12:08:50.000Z | from __future__ import division
import itertools
import json
import math
import os
import random
import shutil
import subprocess
import sys
durationA = str(5)
durationB = str(4)
durationC = str(1)
def main():
if len(sys.argv) > 1:
nbDepth = int(sys.argv[1])
if nbDepth < 2 :
nbDepth =2
else :
nbDepth =2
mainFolder = "depth"
if not os.path.exists(mainFolder):
subprocess.call(["mkdir", mainFolder])
generateDomain("depth", nbDepth)
#print "Every file has been written. Exiting"
def generateDomain(folderName, nbDepth):
domainFilename = folderName + "/" + folderName + "-flat" + str(nbDepth) + ".dom.anml"
printDomainToFile(domainFilename, nbDepth)
domainFilename = folderName + "/" + folderName + "-hier" + str(nbDepth) + ".dom.anml"
printDomainHierToFile(domainFilename, nbDepth)
def printDomainToFile(domainFilename, nbDepth):
with open(domainFilename, "w") as f:
for i in range(0, nbDepth):
f.write("predicate a" + str(i+1) +"();\n")
f.write("predicate b" + str(i+1) +"();\n")
f.write("predicate c" + str(i+1) +"();\n")
f.write("predicate d" + str(i+1) +"();\n")
f.write("predicate e" + str(i+1) +"();\n")
f.write("\naction An" + str(i+1) + " () {\n")
f.write("\tduration := " + durationA + ";\n")
if i > 0:
f.write("\t[start] {\n")
f.write("\t\tb"+ str(i) +" == true;\n")
f.write("\t\td"+ str(i) +" == true;\n")
f.write("\t\te"+ str(i) +" == true;\n")
f.write("\t};\n")
f.write("\t[start] a" + str(i+1) + " := true;\n")
f.write("\t[end] {\n")
f.write("\t\ta" + str(i+1) + " := false;\n")
f.write("\t\tb" + str(i+1) + " := true;\n")
f.write("\t\td" + str(i+1) + " := false;\n")
f.write("\t};\n")
f.write("};\n")
f.write("\naction Bn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationB + ";\n")
f.write("\t[start] a" + str(i+1) + " == true;\n")
f.write("\t[start] c" + str(i+1) + " := true;\n")
f.write("\t[end] {\n")
f.write("\t\tc" + str(i+1) + " := false;\n")
f.write("\t\td" + str(i+1) + " := true;\n")
f.write("\t};\n")
f.write("};\n")
f.write("\naction Cn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationC + ";\n")
f.write("\t[start] c" + str(i+1) + " == true;\n")
f.write("\t[end] {\n")
f.write("\t\tb" + str(i+1) + " := false;\n")
f.write("\t\te" + str(i+1) + " := true;\n")
f.write("\t};\n")
f.write("};\n")
######################## problem ###############
f.write("\n/*******Problem************/\n")
f.write("[all] contains{\n")
f.write("\tCn" + str(nbDepth) +"();\n")
f.write("};")
def printDomainHierToFile(domainFilename, nbDepth):
with open(domainFilename, "w") as f:
for i in range(0, nbDepth):
if i == 0:
f.write("\naction An" + str(i+1) + " () {\n")
f.write("\tmotivated;\n")
f.write("\tduration := " + durationA + ";\n")
f.write("};\n")
else:
f.write("\naction An" + str(i+1) + " () {\n")
f.write("\tmotivated;\n")
f.write("\tduration := " + durationA + ";\n")
f.write("\ta : ABC" + str(i) + "();\n")
f.write("\t end(a) < start;\n")
f.write("};\n")
f.write("\naction Bn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationB + ";\n")
f.write("\tmotivated;\n")
f.write("};\n")
f.write("\naction Cn" + str(i+1) + " () {\n")
f.write("\tduration := " + durationC + ";\n")
f.write("\tmotivated;\n")
f.write("};\n")
f.write("\naction ABC" + str(i+1) + " () {\n")
f.write("\t[all] contains {\n")
f.write("\t\t b" + str(i+1) + " : An" + str(i+1) + "();\n")
f.write("\t\t d" + str(i+1) + " : Bn" + str(i+1) + "();\n")
f.write("\t\t e" + str(i+1) + " : Cn" + str(i+1) + "();\n")
f.write("\t};\n")
f.write("\tstart(b" + str(i+1) + ") < start(d" + str(i+1) + ");\n")
f.write("\tend(d" + str(i+1) + ") < end(b" + str(i+1) + ");\n")
f.write("\tstart(d" + str(i+1) + ") < start(e" + str(i+1) + ");\n")
f.write("\tend(e" + str(i+1) + ") < end(d" + str(i+1) + ");\n")
f.write("};\n")
#################### problem #############
f.write("\n/*******Problem************/\n")
f.write("[all] contains{\n")
f.write("\tCn" + str(nbDepth) +"();\n")
f.write("};")
if __name__ == "__main__":
main()
| 36.992701 | 89 | 0.414759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,568 | 0.309392 |
310c2ef28341d61c8889cc1642f625b5908d2dd0 | 24,120 | py | Python | idc/settings.py | fedorov/IDC-WebApp | a2e2ef492d6d227c1b617e0da2a77a710e5415a6 | [
"Apache-2.0",
"BSD-3-Clause"
]
| null | null | null | idc/settings.py | fedorov/IDC-WebApp | a2e2ef492d6d227c1b617e0da2a77a710e5415a6 | [
"Apache-2.0",
"BSD-3-Clause"
]
| null | null | null | idc/settings.py | fedorov/IDC-WebApp | a2e2ef492d6d227c1b617e0da2a77a710e5415a6 | [
"Apache-2.0",
"BSD-3-Clause"
]
| null | null | null | ###
# Copyright 2015-2020, Institute for Systems Biology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import print_function
from builtins import str
from builtins import object
import os
import re
import datetime
from os.path import join, dirname, exists
import sys
import dotenv
from socket import gethostname, gethostbyname
SECURE_LOCAL_PATH = os.environ.get('SECURE_LOCAL_PATH', '')
if not exists(join(dirname(__file__), '../{}.env'.format(SECURE_LOCAL_PATH))):
print("[ERROR] Couldn't open .env file expected at {}!".format(
join(dirname(__file__), '../{}.env'.format(SECURE_LOCAL_PATH)))
)
print("[ERROR] Exiting settings.py load - check your Pycharm settings and secure_path.env file.")
exit(1)
dotenv.read_dotenv(join(dirname(__file__), '../{}.env'.format(SECURE_LOCAL_PATH)))
APP_ENGINE_FLEX = 'aef-'
APP_ENGINE = 'Google App Engine/'
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep
SHARED_SOURCE_DIRECTORIES = [
'IDC-Common'
]
# Add the shared Django application subdirectory to the Python module search path
for directory_name in SHARED_SOURCE_DIRECTORIES:
sys.path.append(os.path.join(BASE_DIR, directory_name))
DEBUG = (os.environ.get('DEBUG', 'False') == 'True')
CONNECTION_IS_LOCAL = (os.environ.get('DATABASE_HOST', '127.0.0.1') == 'localhost')
IS_CIRCLE = (os.environ.get('CI', None) is not None)
DEBUG_TOOLBAR = ((os.environ.get('DEBUG_TOOLBAR', 'False') == 'True') and CONNECTION_IS_LOCAL)
IMG_QUOTA = os.environ.get('IMG_QUOTA', '137')
print("[STATUS] DEBUG mode is {}".format(str(DEBUG)), file=sys.stdout)
RESTRICT_ACCESS = (os.environ.get('RESTRICT_ACCESS', 'True') == 'True')
RESTRICTED_ACCESS_GROUPS = os.environ.get('RESTRICTED_ACCESS_GROUPS', '').split(',')
if RESTRICT_ACCESS:
print("[STATUS] Access to the site is restricted to members of the {} group(s).".format(", ".join(RESTRICTED_ACCESS_GROUPS)), file=sys.stdout)
else:
print("[STATUS] Access to the site is NOT restricted!", file=sys.stdout)
# Theoretically Nginx allows us to use '*' for ALLOWED_HOSTS but...
ALLOWED_HOSTS = list(set(os.environ.get('ALLOWED_HOST', 'localhost').split(',') + ['localhost', '127.0.0.1', '[::1]', gethostname(), gethostbyname(gethostname()),]))
#ALLOWED_HOSTS = ['*']
SSL_DIR = os.path.abspath(os.path.dirname(__file__))+os.sep
ADMINS = ()
MANAGERS = ADMINS
GCLOUD_PROJECT_ID = os.environ.get('GCLOUD_PROJECT_ID', '')
GCLOUD_PROJECT_NUMBER = os.environ.get('GCLOUD_PROJECT_NUMBER', '')
BIGQUERY_PROJECT_ID = os.environ.get('BIGQUERY_PROJECT_ID', GCLOUD_PROJECT_ID)
BIGQUERY_DATA_PROJECT_ID = os.environ.get('BIGQUERY_DATA_PROJECT_ID', GCLOUD_PROJECT_ID)
# Deployment module
CRON_MODULE = os.environ.get('CRON_MODULE')
# Log Names
WEBAPP_LOGIN_LOG_NAME = os.environ.get('WEBAPP_LOGIN_LOG_NAME', 'local_dev_logging')
BASE_URL = os.environ.get('BASE_URL', 'https://idc-dev.appspot.com')
BASE_API_URL = os.environ.get('BASE_API_URL', 'https://api-dot-idc-dev.appspot.com')
API_HOST = os.environ.get('API_HOST', 'api-dot-idc-dev.appspot.com')
# Compute services - Should not be necessary in webapp
PAIRWISE_SERVICE_URL = os.environ.get('PAIRWISE_SERVICE_URL', None)
# Data Buckets
GCLOUD_BUCKET = os.environ.get('GOOGLE_STORAGE_BUCKET')
# BigQuery cohort storage settings
BIGQUERY_COHORT_DATASET_ID = os.environ.get('BIGQUERY_COHORT_DATASET_ID', 'cohort_dataset')
BIGQUERY_COHORT_TABLE_ID = os.environ.get('BIGQUERY_COHORT_TABLE_ID', 'developer_cohorts')
BIGQUERY_IDC_TABLE_ID = os.environ.get('BIGQUERY_IDC_TABLE_ID', '')
MAX_BQ_INSERT = int(os.environ.get('MAX_BQ_INSERT', '500'))
USER_DATA_ON = bool(os.environ.get('USER_DATA_ON', 'False') == 'True')
database_config = {
'default': {
'ENGINE': os.environ.get('DATABASE_ENGINE', 'django.db.backends.mysql'),
'HOST': os.environ.get('DATABASE_HOST', '127.0.0.1'),
'NAME': os.environ.get('DATABASE_NAME', 'dev'),
'USER': os.environ.get('DATABASE_USER', 'django-user'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD')
}
}
# On the build system, we need to use build-system specific database information
if os.environ.get('CI', None) is not None:
database_config = {
'default': {
'ENGINE': os.environ.get('DATABASE_ENGINE', 'django.db.backends.mysql'),
'HOST': os.environ.get('DATABASE_HOST_BUILD', '127.0.0.1'),
'NAME': os.environ.get('DATABASE_NAME_BUILD', ''),
'PORT': 3306,
'USER': os.environ.get('DATABASE_USER_BUILD'),
'PASSWORD': os.environ.get('MYSQL_ROOT_PASSWORD_BUILD')
}
}
DATABASES = database_config
DB_SOCKET = database_config['default']['HOST'] if 'cloudsql' in database_config['default']['HOST'] else None
IS_DEV = (os.environ.get('IS_DEV', 'False') == 'True')
IS_APP_ENGINE_FLEX = os.getenv('GAE_INSTANCE', '').startswith(APP_ENGINE_FLEX)
IS_APP_ENGINE = os.getenv('SERVER_SOFTWARE', '').startswith(APP_ENGINE)
VERSION = "{}.{}".format("local-dev", datetime.datetime.now().strftime('%Y%m%d%H%M'))
if exists(join(dirname(__file__), '../version.env')):
dotenv.read_dotenv(join(dirname(__file__), '../version.env'))
else:
if IS_DEV:
import git
repo = git.Repo(path="/home/vagrant/www/",search_parent_directories=True)
VERSION = "{}.{}.{}".format("local-dev", datetime.datetime.now().strftime('%Y%m%d%H%M'),
str(repo.head.object.hexsha)[-6:])
APP_VERSION = os.environ.get("APP_VERSION", VERSION)
DEV_TIER = bool(DEBUG or re.search(r'^dev\.',APP_VERSION))
# If this is a GAE-Flex deployment, we don't need to specify SSL; the proxy will take
# care of that for us
if 'DB_SSL_CERT' in os.environ and not IS_APP_ENGINE_FLEX:
DATABASES['default']['OPTIONS'] = {
'ssl': {
'ca': os.environ.get('DB_SSL_CA'),
'cert': os.environ.get('DB_SSL_CERT'),
'key': os.environ.get('DB_SSL_KEY')
}
}
# Default to localhost for the site ID
SITE_ID = 2
if IS_APP_ENGINE_FLEX or IS_APP_ENGINE:
print("[STATUS] AppEngine Flex detected.", file=sys.stdout)
SITE_ID = 3
def get_project_identifier():
return BIGQUERY_PROJECT_ID
# Set cohort table here
if BIGQUERY_COHORT_TABLE_ID is None:
raise Exception("Developer-specific cohort table ID is not set.")
BQ_MAX_ATTEMPTS = int(os.environ.get('BQ_MAX_ATTEMPTS', '10'))
API_USER = os.environ.get('API_USER', 'api_user')
API_AUTH_KEY = os.environ.get('API_AUTH_KEY', 'Token')
# TODO Remove duplicate class.
#
# This class is retained here, as it is required by bq_data_access/v1.
# bq_data_access/v2 uses the class from the bq_data_access/bigquery_cohorts module.
class BigQueryCohortStorageSettings(object):
def __init__(self, dataset_id, table_id):
self.dataset_id = dataset_id
self.table_id = table_id
def GET_BQ_COHORT_SETTINGS():
return BigQueryCohortStorageSettings(BIGQUERY_COHORT_DATASET_ID, BIGQUERY_COHORT_TABLE_ID)
USE_CLOUD_STORAGE = bool(os.environ.get('USE_CLOUD_STORAGE', 'False') == 'True')
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_SECURE = bool(os.environ.get('CSRF_COOKIE_SECURE', 'True') == 'True')
SESSION_COOKIE_SECURE = bool(os.environ.get('SESSION_COOKIE_SECURE', 'True') == 'True')
SECURE_SSL_REDIRECT = bool(os.environ.get('SECURE_SSL_REDIRECT', 'True') == 'True')
SECURE_REDIRECT_EXEMPT = []
if SECURE_SSL_REDIRECT:
# Exempt the health check so it can go through
SECURE_REDIRECT_EXEMPT = [r'^_ah/(vm_)?health$', ]
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_FOLDER = os.environ.get('MEDIA_FOLDER', 'uploads/')
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', MEDIA_FOLDER)
MEDIA_ROOT = os.path.normpath(MEDIA_ROOT)
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = 'static_collex'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = os.environ.get('STATIC_URL', '/static/')
GCS_STORAGE_URI = os.environ.get('GCS_STORAGE_URI', 'https://storage.googleapis.com/')
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(BASE_DIR, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', '')
SECURE_HSTS_INCLUDE_SUBDOMAINS = (os.environ.get('SECURE_HSTS_INCLUDE_SUBDOMAINS','True') == 'True')
SECURE_HSTS_PRELOAD = (os.environ.get('SECURE_HSTS_PRELOAD','True') == 'True')
SECURE_HSTS_SECONDS = int(os.environ.get('SECURE_HSTS_SECONDS','3600'))
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'idc.checkreqsize_middleware.CheckReqSize',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'adminrestrict.middleware.AdminPagesRestrictMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'idc.team_only_middleware.TeamOnly',
# Uncomment the next line for simple clickjacking protection:
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'offline.middleware.OfflineMiddleware',
]
ROOT_URLCONF = 'idc.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'idc.wsgi.application'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'anymail',
'idc',
'data_upload',
'sharing',
'cohorts',
'idc_collections',
'offline',
'adminrestrict'
)
#############################
# django-session-security #
#############################
INSTALLED_APPS += ('session_security',)
SESSION_SECURITY_WARN_AFTER = int(os.environ.get('SESSION_SECURITY_WARN_AFTER','540'))
SESSION_SECURITY_EXPIRE_AFTER = int(os.environ.get('SESSION_SECURITY_EXPIRE_AFTER','600'))
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
MIDDLEWARE.append(
# for django-session-security -- must go *after* AuthenticationMiddleware
'session_security.middleware.SessionSecurityMiddleware',
)
###############################
# End django-session-security #
###############################
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue'
},
},
'formatters': {
'verbose': {
'format': '[%(levelname)s] @%(asctime)s in %(module)s/%(process)d/%(thread)d - %(message)s'
},
'simple': {
'format': '[%(levelname)s] @%(asctime)s in %(module)s: %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console_dev': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'console_prod': {
'level': 'DEBUG',
'filters': ['require_debug_false'],
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'main_logger': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
'allauth': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
'google_helpers': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
'data_upload': {
'handlers': ['console_dev', 'console_prod'],
'level': 'DEBUG',
'propagate': True,
},
},
}
# Force allauth to only use https
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'
# ...but not if this is a local dev build
if IS_DEV:
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'http'
##########################
# Start django-allauth #
##########################
LOGIN_REDIRECT_URL = '/extended_login/'
INSTALLED_APPS += (
'accounts',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'rest_framework.authtoken'
)
# Template Engine Settings
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# add any necessary template paths here
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
os.path.join(BASE_DIR, 'templates', 'accounts'),
],
'APP_DIRS': True,
'OPTIONS': {
# add any context processors here
'context_processors': (
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.tz',
'finalware.context_processors.contextify',
'idc.context_processor.additional_context',
),
# add any loaders here; if using the defaults, we can comment it out
# 'loaders': (
# 'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.app_directories.Loader'
# ),
'debug': DEBUG,
},
},
]
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend",
)
SOCIALACCOUNT_PROVIDERS = \
{ 'google':
{ 'SCOPE': ['profile', 'email'],
'AUTH_PARAMS': { 'access_type': 'online' }
}
}
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_USERNAME_REQUIRED = bool(os.environ.get('ACCOUNT_USERNAME_REQUIRED', 'False') == 'True')
ACCOUNT_EMAIL_VERIFICATION = os.environ.get('ACCOUNT_EMAIL_VERIFICATION', 'mandatory').lower()
ACCOUNT_EMAIL_SUBJECT_PREFIX = "[Imaging Data Commons] "
ACCOUNTS_PASSWORD_EXPIRATION = os.environ.get('ACCOUNTS_PASSWORD_EXPIRATION',120) # Max password age in days
ACCOUNTS_PASSWORD_HISTORY = os.environ.get('ACCOUNTS_PASSWORD_HISTORY', 5) # Max password history kept
ACCOUNTS_ALLOWANCES = list(set(os.environ.get('ACCOUNTS_ALLOWANCES','').split(',')))
##########################
# End django-allauth #
##########################
##########################
# Django local auth #
##########################
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 16,
}
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'idc.validators.PasswordComplexityValidator',
'OPTIONS': {
'min_length': 16,
'special_char_list': '!@#$%^&*+:;?'
}
},
{
'NAME': 'idc.validators.PasswordReuseValidator'
}
]
#########################################
# MailGun Email Settings for requests #
#########################################
#
# These settings allow use of MailGun as a simple API call
EMAIL_SERVICE_API_URL = os.environ.get('EMAIL_SERVICE_API_URL', '')
EMAIL_SERVICE_API_KEY = os.environ.get('EMAIL_SERVICE_API_KEY', '')
NOTIFICATION_EMAIL_FROM_ADDRESS = os.environ.get('NOTIFICATOON_EMAIL_FROM_ADDRESS', '[email protected]')
#########################
# django-anymail #
#########################
#
# Anymail lets us use the Django mail system with mailgun (eg. in local account email verification)
ANYMAIL = {
"MAILGUN_API_KEY": EMAIL_SERVICE_API_KEY,
"MAILGUN_SENDER_DOMAIN": 'mg.canceridc.dev', # your Mailgun domain, if needed
}
EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend"
DEFAULT_FROM_EMAIL = NOTIFICATION_EMAIL_FROM_ADDRESS
SERVER_EMAIL = "[email protected]"
GOOGLE_APPLICATION_CREDENTIALS = join(dirname(__file__), '../{}{}'.format(SECURE_LOCAL_PATH,os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', '')))
OAUTH2_CLIENT_ID = os.environ.get('OAUTH2_CLIENT_ID', '')
OAUTH2_CLIENT_SECRET = os.environ.get('OAUTH2_CLIENT_SECRET', '')
if not exists(GOOGLE_APPLICATION_CREDENTIALS):
print("[ERROR] Google application credentials file wasn't found! Provided path: {}".format(GOOGLE_APPLICATION_CREDENTIALS))
exit(1)
#################################
# For NIH/eRA Commons login #
#################################
GOOGLE_GROUP_ADMIN = os.environ.get('GOOGLE_GROUP_ADMIN', '')
SUPERADMIN_FOR_REPORTS = os.environ.get('SUPERADMIN_FOR_REPORTS', '')
##############################
# Start django-finalware #
##############################
#
# This should only be done on a local system which is running against its own VM, or during CircleCI testing.
# Deployed systems will already have a site superuser so this would simply overwrite that user.
# NEVER ENABLE this in production!
#
if (IS_DEV and CONNECTION_IS_LOCAL) or IS_CIRCLE:
INSTALLED_APPS += (
'finalware',)
SITE_SUPERUSER_USERNAME = os.environ.get('SUPERUSER_USERNAME', '')
SITE_SUPERUSER_EMAIL = ''
SITE_SUPERUSER_PASSWORD = os.environ.get('SUPERUSER_PASSWORD')
#
############################
# End django-finalware #
############################
CONN_MAX_AGE = 60
############################
# CUSTOM TEMPLATE CONTEXT
############################
############################
# METRICS SETTINGS
############################
SITE_GOOGLE_ANALYTICS = bool(os.environ.get('SITE_GOOGLE_ANALYTICS_TRACKING_ID', None) is not None)
SITE_GOOGLE_ANALYTICS_TRACKING_ID = os.environ.get('SITE_GOOGLE_ANALYTICS_TRACKING_ID', '')
##############################################################
# MAXes to prevent size-limited events from causing errors
##############################################################
# Google App Engine has a response size limit of 32M. ~65k entries from the cohort_filelist view will
# equal just under the 32M limit. If each individual listing is ever lengthened or shortened this
# number should be adjusted
MAX_FILE_LIST_REQUEST = 65000
MAX_BQ_RECORD_RESULT = int(os.environ.get('MAX_BQ_RECORD_RESULT', '5000'))
# Rough max file size to allow for eg. barcode list upload, to prevent triggering RequestDataTooBig
FILE_SIZE_UPLOAD_MAX = 1950000
#################################
# DICOM Viewer settings
#################################
DICOM_VIEWER = os.environ.get('DICOM_VIEWER', None)
#################################
# SOLR settings
#################################
SOLR_URI = os.environ.get('SOLR_URI', '')
SOLR_LOGIN = os.environ.get('SOLR_LOGIN', '')
SOLR_PASSWORD = os.environ.get('SOLR_PASSWORD', '')
SOLR_CERT = join(dirname(dirname(__file__)), "{}{}".format(SECURE_LOCAL_PATH, os.environ.get('SOLR_CERT', '')))
DEFAULT_FETCH_COUNT = os.environ.get('DEFAULT_FETCH_COUNT', 10)
# Explicitly check for known problems in descrpitions and names provided by users
BLACKLIST_RE = r'((?i)<script>|(?i)</script>|!\[\]|!!\[\]|\[\]\[\".*\"\]|(?i)<iframe>|(?i)</iframe>)'
if DEBUG and DEBUG_TOOLBAR:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware',)
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
SHOW_TOOLBAR_CALLBACK = True
INTERNAL_IPS = (os.environ.get('INTERNAL_IP', ''),)
##################
# OHIF_SETTINGS
##################
#
# default is to add trailing '/' to urls ie /callback becomes /callback/. Ohif does not like /callback/ !
APPEND_SLASH = False
DICOM_STORE_PATH=os.environ.get('DICOM_STORE_PATH','')
# Log the version of our app
print("[STATUS] Application Version is {}".format(APP_VERSION))
| 37.628705 | 165 | 0.651368 | 160 | 0.006633 | 0 | 0 | 0 | 0 | 0 | 0 | 13,855 | 0.57442 |
310c8eff631db50cd8a05c87d1793446b7ad450c | 4,065 | py | Python | examples/fixed_play.py | wwxFromTju/malib | 7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2 | [
"MIT"
]
| 6 | 2021-05-19T10:25:36.000Z | 2021-12-27T03:30:33.000Z | examples/fixed_play.py | wwxFromTju/malib | 7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2 | [
"MIT"
]
| 1 | 2021-05-29T04:51:37.000Z | 2021-05-30T06:18:10.000Z | examples/fixed_play.py | ying-wen/malib_deprecated | 875338b81c4d87064ad31201f461ef742db05f25 | [
"MIT"
]
| 1 | 2021-05-31T16:16:12.000Z | 2021-05-31T16:16:12.000Z | # Created by yingwen at 2019-03-16
from multiprocessing import Process
from malib.agents.agent_factory import *
from malib.environments import DifferentialGame
from malib.logger.utils import set_logger
from malib.samplers.sampler import MASampler
from malib.trainers import MATrainer
from malib.utils.random import set_seed
def get_agent_by_type(type_name, i, env, hidden_layer_sizes, max_replay_buffer_size):
if type_name == "SAC":
return get_sac_agent(
env,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
elif type_name == "ROMMEO":
return get_rommeo_agent(
env,
agent_id=i,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
elif type_name == "ROMMEO-UNI":
return get_rommeo_agent(
env,
agent_id=i,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
uniform=True,
)
elif type_name == "DDPG-OM":
return get_ddpgom_agent(
env,
agent_id=i,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
elif type_name == "DDPG-TOM":
return get_ddpgtom_agent(
env,
agent_id=i,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
elif type_name == "DDPG":
return get_ddpg_agent(
env,
agent_id=i,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
elif type_name == "MADDPG":
return get_maddpg_agent(
env,
agent_id=i,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
elif type_name == "MFAC":
return get_maddpg_agent(
env,
agent_id=i,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
def train_fixed(seed, agent_setting, game_name="ma_softq"):
set_seed(seed)
suffix = f"fixed_play1/{game_name}/{agent_setting}/{seed}"
set_logger(suffix)
batch_size = 512
training_steps = 2000
exploration_steps = 100
max_replay_buffer_size = 1e5
hidden_layer_sizes = (128, 128)
max_path_length = 1
agent_num = 2
env = DifferentialGame(game_name, agent_num)
agents = []
agent_types = agent_setting.split("_")
assert len(agent_types) == agent_num
for i, agent_type in enumerate(agent_types):
agents.append(
get_agent_by_type(
agent_type,
i,
env,
hidden_layer_sizes=hidden_layer_sizes,
max_replay_buffer_size=max_replay_buffer_size,
)
)
sampler = MASampler(
agent_num, batch_size=batch_size, max_path_length=max_path_length
)
sampler.initialize(env, agents)
trainer = MATrainer(
env=env,
agents=agents,
sampler=sampler,
steps=training_steps,
exploration_steps=exploration_steps,
training_interval=1,
extra_experiences=["annealing", "recent_experiences"],
batch_size=batch_size,
)
trainer.run()
def main():
settings = [
"ROMMEO_ROMMEO",
]
game = "ma_softq"
for setting in settings:
processes = []
for e in range(1):
seed = 1 + int(23122134 / (e + 1))
def train_func():
train_fixed(seed, setting, game)
# # # Awkward hacky process runs, because Tensorflow does not like
p = Process(target=train_func, args=tuple())
p.start()
processes.append(p)
for p in processes:
p.join()
if __name__ == "__main__":
main()
| 28.229167 | 85 | 0.613284 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 294 | 0.072325 |
310e8a0bf4712762b03a5c5b70b449b48e5c9b02 | 776 | py | Python | hypha/apply/projects/templatetags/payment_request_tools.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
]
| 16 | 2020-01-24T11:52:46.000Z | 2021-02-02T22:21:04.000Z | hypha/apply/projects/templatetags/payment_request_tools.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
]
| 538 | 2020-01-24T08:27:13.000Z | 2021-04-05T07:15:01.000Z | hypha/apply/projects/templatetags/payment_request_tools.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
]
| 17 | 2020-02-07T14:55:54.000Z | 2021-04-04T19:32:38.000Z | import decimal
from django import template
register = template.Library()
@register.simple_tag
def can_change_status(payment_request, user):
return payment_request.can_user_change_status(user)
@register.simple_tag
def can_delete(payment_request, user):
return payment_request.can_user_delete(user)
@register.simple_tag
def can_edit(payment_request, user):
return payment_request.can_user_edit(user)
@register.simple_tag
def percentage(value, total):
if not total:
return decimal.Decimal(0)
unrounded_total = (value / total) * 100
# round using Decimal since we're dealing with currency
rounded_total = unrounded_total.quantize(
decimal.Decimal('0.0'),
rounding=decimal.ROUND_DOWN,
)
return rounded_total
| 20.972973 | 59 | 0.748711 | 0 | 0 | 0 | 0 | 689 | 0.887887 | 0 | 0 | 60 | 0.07732 |
310f1e6f71fa93dea9a16a0f58d9908f5ecbe8c1 | 15,870 | py | Python | pylabnet/hardware/counter/swabian_instruments/qudi/slow_ctr.py | wi11dey/pylabnet | a6e3362f727c45aaa60e61496e858ae92e85574d | [
"MIT"
]
| 10 | 2020-01-07T23:28:49.000Z | 2022-02-02T19:09:17.000Z | pylabnet/hardware/counter/swabian_instruments/qudi/slow_ctr.py | wi11dey/pylabnet | a6e3362f727c45aaa60e61496e858ae92e85574d | [
"MIT"
]
| 249 | 2019-12-28T19:38:49.000Z | 2022-03-28T16:45:32.000Z | pylabnet/hardware/counter/swabian_instruments/qudi/slow_ctr.py | wi11dey/pylabnet | a6e3362f727c45aaa60e61496e858ae92e85574d | [
"MIT"
]
| 5 | 2020-11-17T19:45:10.000Z | 2022-01-04T18:07:04.000Z | """ pylabnet measurement and service classes for Swabian Instruments TimeTagger
which implements qudi's SlowCounter interface.
This file contains pylabnet wrapper and service classes to allow qudi to
access Swabian Instruments TT through pylabnet network as SlowCounter.
Steps:
- instantiate TimeTagger
- instantiate pylabnet-SlowCtrWrap (pass ref to TimeTagger as tagger)
- instantiate pylabnet-SlowCtrService and assign module to the created wrapper
- start pylabnet-server for SlowCtrService
- in qudi, instantiate SlowCtrClient as one of the hardware modules
"""
from pylabnet.network.core.service_base import ServiceBase
import TimeTagger as TT
import time
import copy
import pickle
class Wrap:
""" Measurement instance which implements qudi's SlowCounter interface.
"""
def __init__(self, tagger, channel_list, clock_frequency, buffer_size):
# References to the device and to TT.Counter measurement
self._tagger = tagger
self._counter = None
# Counter parameters
self._channel_list = channel_list
self._clock_frequency = clock_frequency
self._buffer_size = buffer_size
self._bin_width = 0
self._bin_width_sec = 0
def set_up_clock(self, clock_frequency=None, clock_channel=None):
"""
Sets sample clock frequency for the Counter measurement.
:param clock_frequency: (float) sample clock frequency. If not given,
configuration value is used
:param clock_channel: ignored (internal timebase is used to generate
sample clock signal)
:return: (int) operation status code: 0 - OK
-1 - Error
"""
# Use config value, if no clock_frequency is specified
if clock_frequency is None:
clock_frequency = self._clock_frequency
# Calculate final bin width
bin_width = int(1e12 / clock_frequency) # in picoseconds, for device
bin_width_sec = bin_width * 1e-12 # is seconds, for software timing
# Set new values param to internal variables
self._bin_width = bin_width
self._bin_width_sec = bin_width_sec
return 0
def set_up_counter(self,
counter_channels=None,
sources=None,
clock_channel=None,
counter_buffer=None):
"""
Configures the actual counter with a given clock.
(list of int) [optional] list of channels
to count clicks on. If not given, config value is used.
:param counter_buffer: (int) [optional] size of the memory buffer.
If not given, config value is used.
:param counter_channels: ignored
This argument should not be used. Counter GUI initializes set of plot curves
self.curves during its on_activate() method. It basically calls
counter_hardware.get_counter_channels() and uses this list to init self.curves
Only after that user can click "Start" button, which will call set_up_counter().
And since GUI already has inited set of curves, set of channels must not be
modified here! It will case GUI to fail.
:param sources: ignored
:param clock_channel: ignored
:return: (int) operation status code: 0 - OK
-1 - Error
"""
# Set counter channels
if counter_channels is not None:
channel_list = counter_channels
else:
channel_list = self._channel_list
# apply counter channel change
self.set_counter_channels(channel_list=channel_list)
# Set buffer size
if counter_buffer is not None:
buffer_size = counter_buffer
else:
buffer_size = self._buffer_size
# sanity check:
if not isinstance(buffer_size, int) or buffer_size <= 0:
# self.log.error('set_up_counter(): invalid parameter value counter_buffer = {}.'
# 'This parameter must be a positive integer.'
# ''.format(buffer_size))
return -1
# apply buffer size change
self._buffer_size = buffer_size
# Create instance of Counter measurement
try:
self._counter = TT.Counter(
tagger=self._tagger,
channels=self._channel_list,
binwidth=self._bin_width,
n_values=self._buffer_size
)
# handle initialization error (TT functions always produce NotImplementedError)
except NotImplementedError:
self._counter = None
# self.log.error('set_up_counter(): failed to instantiate TT.Counter measurement')
return -1
# Start Counter
# (TT.Counter measurement starts running immediately after instantiation,
# so it is necessary to erase all counts collected since instantiation)
self._counter.stop()
self._counter.clear()
self._counter.start()
return 0
def close_clock(self):
"""
Closes the clock.
:return: (int) error code: 0 - OK
-1 - Error
"""
# self._bin_width = 0
# self._bin_width_sec = 0
return 0
def close_counter(self):
"""
Closes the counter and cleans up afterwards.
:return: (int) error code: 0 - OK
-1 - Error
"""
# Try stopping and clearing TT.Counter measurement
try:
self._counter.stop()
self._counter.clear()
# Handle the case of exception in TT function call (NotImplementedError)
# and the case of self._ctr = None (AttributeError)
except (NotImplementedError, AttributeError):
pass
# Remove reference to the counter
# self._ctr = None
# Clear counter parameters
# self._buffer_size = []
# Do not clear channel list:
# Counter GUI inits its list of curves self.curves
# by calling counter_hardware.get_counter_channels() before
# calling counter_hardware.set_up_counter()
# If one clears _channel_list here, GUI will fail at the next
# "Start" button click after reloading.
#
# self._channel_list = []
return 0
def get_counter(self, samples=1):
"""
Returns the current counts per second of the counter.
:param samples: (int) [optional] number of samples to read in one go
(default is one sample)
:return: numpy.array((samples, uint32), dtype=np.uint32)
array of count rate [counts/second] arrays of length samples for each click channel
Empty array [] is returned in the case of error.
"""
# Sanity check: samples has valid value
if samples != 1:
if not isinstance(samples, int) or samples <= 0:
# self.log.error('get_counter(): invalid argument samples={0}. This argument must be a positive integer'
# ''.format(samples))
return []
# MORE SOPHISTICATED VERSION
# (WORKS TOO SLOWLY: PROBABLY BECAUSE OF SLOW INTEGER DIVISION OF LARGE INTEGERS)
#
# start_time = time.time()
# while time.time() - start_time < self._timeout:
# new_complete_bins = self._ctr.getCaptureDuration() // self._bin_width - self._last_read_bin
#
# self._overflow = new_complete_bins
# # self.log.error('new_complete_bins = {}'.format(new_complete_bins))
#
# if new_complete_bins < samples:
# time.sleep(self._bin_width_sec/2)
# continue
# elif new_complete_bins == samples:
# self._last_read_bin += new_complete_bins
# break
# else:
# # self.log.warn('Counter is overflowing. \n'
# # 'Software pulls data in too slowly and counter bins are too short, '
# # 'such that some bins are lost. \n'
# # 'Try reducing sampling rate or increasing oversampling')
# self._last_read_bin += new_complete_bins
# break
# Wait for specified number of samples (samples parameter) to be accumulated
#
# This approach is very naive and is more or less accurate for
# clock frequency below 50 Hz.
#
# For higher frequencies, the actual time sampling interval is determined
# by software delays (about 1 ms). Counter measurement overflows
# (most of the samples are over-written before software reads them in)
# but does not fail. The only problem here is that time axis on the count-trace
# graph is no longer accurate:
# the difference between consecutive tick labels is much smaller than the actual
# time interval between measured samples (about 1 ms)
time.sleep(samples * self._bin_width_sec)
# read-in most recent 'samples' samples
try:
count_array = self._counter.getData()[:, -samples:]
except NotImplementedError:
# self.log.error('get_counter() reading operation failed')
return []
except AttributeError:
# self.log.error('get_counter(): counter was not initialized')
return []
# Calculate count rate [count/sec]
count_rate_array = count_array / self._bin_width_sec
return count_rate_array
def get_counter_channels(self):
"""
Returns the list of click channel numbers.
:return: (list of int) list of click channel numbers
"""
return copy.deepcopy(self._channel_list)
def set_counter_channels(self, channel_list=None):
"""
Set click channel list.
Notice that this method only modifies internal variable _channel_list.
To apply the change to the counter, one has to call set_up_counter() again.
:param channel_list: (list of int) list of channels to count clicks on
:return: (list of int) actual list of click channels
"""
if channel_list is None:
return self.get_counter_channels()
# Sanity check:
all_channels = self._get_all_channels()
if not set(channel_list).issubset(set(all_channels)):
# self.log.error('set_counter_channels(): requested list of channels is invalid: '
# 'some channels are not present on the device.'
# 'requested list: {0} \n'
# 'available channels: {1}'
# ''.format(channel_list, all_channels))
return self.get_counter_channels()
# Apply changes to internal variable self._channel_list
self._channel_list = channel_list
# Sort channel numbers, such that channel order does not depend
# on order of numbers in the config file
self._channel_list.sort()
return self.get_counter_channels()
def _get_all_channels(self):
"""
Return list of all channels available on the device.
Positive/negative values correspond to rising/falling edge detection.
For example:
1 means 'rising edge on connector 1'
-1 means 'falling edge on connector 1
:return: (list of int) list of all available channel numbers,
including edge sign.
"""
try:
available_channel_tuple = list(
self._tagger.getChannelList(TT.TT_CHANNEL_RISING_AND_FALLING_EDGES)
)
# handle exception in the call (TT functions normally produce NotImplementedError)
except NotImplementedError:
# self.log.error('_get_all_channels(): communication with the device failed')
return []
# handle the case of self._tagger = None
except AttributeError:
# self.log.error('_get_all_channels(): _tagger is None. Initialize device first')
return []
return list(available_channel_tuple)
class Service(ServiceBase):
def exposed_set_up_clock(self, clock_frequency=None, clock_channel=None):
"""
Sets sample clock frequency for the Counter measurement.
:param clock_frequency: (float) sample clock frequency. If not given,
configuration value is used
:param clock_channel: ignored (internal timebase is used to generate
sample clock signal)
:return: (int) operation status code: 0 - OK
-1 - Error
"""
return self._module.set_up_clock(
clock_frequency=clock_frequency,
clock_channel=clock_channel
)
def exposed_set_up_counter(self, counter_channels=None, sources=None, clock_channel=None, counter_buffer=None):
"""
Configures the actual counter with a given clock.
(list of int) [optional] list of channels
to count clicks on. If not given, config value is used.
:param counter_buffer: (int) [optional] size of the memory buffer.
If not given, config value is used.
:param counter_channels: ignored
This argument should not be used. Counter GUI initializes set of plot curves
self.curves during its on_activate() method. It basically calls
counter_hardware.get_counter_channels() and uses this list to init self.curves
Only after that user can click "Start" button, which will call set_up_counter().
And since GUI already has inited set of curves, set of channels must not be
modified here! It will case GUI to fail.
:param sources: ignored
:param clock_channel: ignored
:return: (int) operation status code: 0 - OK
-1 - Error
"""
return self._module.set_up_counter(
counter_channels=counter_channels,
sources=sources,
clock_channel=clock_channel,
counter_buffer=counter_buffer
)
def exposed_close_clock(self):
"""
Closes the clock.
:return: (int) error code: 0 - OK
-1 - Error
"""
return self._module.close_clock()
def exposed_close_counter(self):
"""
Closes the counter and cleans up afterwards.
:return: (int) error code: 0 - OK
-1 - Error
"""
return self._module.close_ctr()
def exposed_get_counter(self, samples=1):
"""
Returns the current counts per second of the counter.
:param samples: (int) [optional] number of samples to read in one go
(default is one sample)
:return: numpy.array((samples, uint32), dtype=np.uint32)
array of count rate [counts/second] arrays of length samples for each click channel
Empty array [] is returned in the case of error.
"""
res = self._module.get_counter(samples=samples)
return pickle.dumps(res)
def exposed_get_counter_channels(self):
"""
Returns the list of click channel numbers.
:return: (list of int) list of click channel numbers
"""
res = self._module.get_counter_channels()
return pickle.dumps(res)
| 36.65127 | 120 | 0.60189 | 15,173 | 0.956081 | 0 | 0 | 0 | 0 | 0 | 0 | 10,330 | 0.650914 |
310f4d83fe0811735557a3547fef59abd4752a52 | 174,167 | py | Python | dev0s/classes/defaults/files.py | vandenberghinc/dev0s | 1c9629e2a81ad111fd3e74929aaeb05c97be0276 | [
"MIT"
]
| 1 | 2021-03-09T20:14:29.000Z | 2021-03-09T20:14:29.000Z | dev0s/classes/defaults/files.py | vandenberghinc/dev0s | 1c9629e2a81ad111fd3e74929aaeb05c97be0276 | [
"MIT"
]
| null | null | null | dev0s/classes/defaults/files.py | vandenberghinc/dev0s | 1c9629e2a81ad111fd3e74929aaeb05c97be0276 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Must still be recoded with some cleaner code.
"""
# imports.
from dev0s.classes.config import *
from dev0s.classes import utils
from dev0s.classes.defaults.color import color, symbol
from dev0s.classes import console
from dev0s.classes.defaults.exceptions import Exceptions
# pip.
from datetime import datetime, timezone
import shutil, math
from PIL import Image as _Image_
"""
Notes.
All default files & formats must exact the same as the default dict, bool, list etc in the native sense.
There are lots additionals though. But a dict and Dictionary should be able to be used universally as if the user would not know the difference (which could very quickly in some instances).
"""
# the format classes.
class Formats():
# variables.
digits = [0,1,2,3,4,5,6,7,8,9,]
str_digits = ["0","1","2","3","4","5","6","7","8","9"]
alphabet, capitalized_alphabet = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"], []
for i in alphabet: capitalized_alphabet.append(i.upper())
special_characters = ["±","§","!","@","€","#","£","$","¢","%","∞","^","&","ª","(",")","–","_","+","=","{","}","[","]",";",":","'",'"',"|","\\","//","?",">",".",",","<"]
# check & get format / instance.
def check(
nones=None,
booleans=None,
none_allowed_booleans=None,
strings=None,
none_allowed_strings=None,
integers=None,
none_allowed_integers=None,
bytes_=None,
none_allowed_bytes=None,
arrays=None,
none_allowed_arrays=None,
dictionaries=None,
none_allowed_dictionaries=None,
):
if nones != None:
for key,value in nones.items():
if value == None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [!null].")
if booleans != None:
for key,value in booleans.items():
if not isinstance(value, bool): raise ValueError(f"Invalid [{key}] format [{value}], required format is [bool].")
if none_allowed_booleans != None:
for key,value in none_allowed_booleans.items():
if not isinstance(value, bool) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [bool].")
if strings != None:
for key,value in strings.items():
if not isinstance(value, str): raise ValueError(f"Invalid [{key}] format [{value}], required format is [str].")
if none_allowed_strings != None:
for key,value in none_allowed_strings.items():
if not isinstance(value, str) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [str].")
if integers != None:
for key,value in integers.items():
if not isinstance(value, int): raise ValueError(f"Invalid [{key}] format [{value}], required format is [int].")
if none_allowed_integers != None:
for key,value in none_allowed_integers.items():
if not isinstance(value, int) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [int].")
if bytes_ != None:
for key,value in bytes_.items():
if not isinstance(value, bytes): raise ValueError(f"Invalid [{key}] format [{value}], required format is [bytes].")
if none_allowed_bytes != None:
for key,value in none_allowed_bytes.items():
if not isinstance(value, bytes) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [bytes].")
if arrays != None:
for key,value in arrays.items():
if not isinstance(value, list): raise ValueError(f"Invalid [{key}] format [{value}], required format is [list].")
if none_allowed_arrays != None:
for key,value in none_allowed_arrays.items():
if not isinstance(value, list) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [list].")
if dictionaries != None:
for key,value in dictionaries.items():
if not isinstance(value, dict): raise ValueError(f"Invalid [{key}] format [{value}], required format is [dict].")
if none_allowed_dictionaries != None:
for key,value in none_allowed_dictionaries.items():
if not isinstance(value, dict) and value != None: raise ValueError(f"Invalid [{key}] format [{value}], required format is [dict].")
def get(value, serialize=False):
if value == None: return None
elif isinstance(value, bool):
if not serialize: return bool
else: return "bool"
elif isinstance(value, str):
if not serialize: return str
else: return "str"
elif isinstance(value, int):
if not serialize: return int
else: return "int"
elif isinstance(value, bytes):
if not serialize: return bytes
else: return "bytes"
elif isinstance(value, list):
if not serialize: return list
else: return "list"
elif isinstance(value, dict):
if not serialize: return dict
else: return "dict"
elif isinstance(value, Boolean) or value.__class__.__name__ == "Boolean":
if not serialize: return Boolean
else: return "Boolean"
elif isinstance(value, String) or value.__class__.__name__ == "String":
if not serialize: return String
else: return "String"
elif isinstance(value, Integer) or value.__class__.__name__ == "Integer":
if not serialize: return Integer
else: return "Integer"
elif isinstance(value, Bytes) or value.__class__.__name__ == "Bytes":
if not serialize: return Bytes
else: return "Bytes"
elif isinstance(value, Array) or value.__class__.__name__ == "Array":
if not serialize: return Array
else: return "Array"
elif isinstance(value, Dictionary) or value.__class__.__name__ == "Dictionary":
if not serialize: return Dictionary
else: return "Dictionary"
elif isinstance(value, FilePath) or value.__class__.__name__ == "FilePath":
if not serialize: return FilePath
else: return "FilePath"
elif isinstance(value, object):
if not serialize: return object
else: return "object"
else: raise ValueError(f"Unknown format [{value}].")
#
# try to parse variable to format, when failed it returns None.
def parse(
# the variable to parse (required) (#1).
variable,
# the expected format (required) (#2).
format=None,
# with safe disabled it throws a ParseError when the variable can't be parsed to the expected format.
safe=True,
# the default return value for when safe is enabled.
default=None,
):
if format in [bool, "bool", Boolean, "Boolean"]:
try:
return bool(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a bool from ({variable.__class__.__name__}) [{variable}].")
elif format in [int, "int"]:
try:
return int(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a int from ({variable.__class__.__name__}) [{variable}].")
elif format in [float, "float", Integer, "Integer"]:
try:
return float(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a float from ({variable.__class__.__name__}) [{variable}].")
elif format in [str, "str", String, "String"]:
try:
return str(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a str from ({variable.__class__.__name__}) [{variable}].")
elif format in [list, "list", Array, "Array"]:
if isinstance(variable, (list,Array)):
return variable
elif not isinstance(variable, (str, String)):
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse an array from ({variable.__class__.__name__}) [{variable}].")
try:
return ast.literal_eval(variable)
except:
try:
return json.loads(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse an array from ({variable.__class__.__name__}) [{variable}].")
elif format in [dict, "dict", Dictionary, "Dictionary"]:
if isinstance(variable, (dict,Dictionary)):
return variable
elif not isinstance(variable, (str, String)):
raise Exceptions.ParseError(f"Unable to parse a dict from ({variable.__class__.__name__}) [{variable}].")
try:
return ast.literal_eval(variable)
except:
try:
return json.loads(variable)
except:
if safe:
return default
else:
raise Exceptions.ParseError(f"Unable to parse a dict from ({variable.__class__.__name__}) [{variable}].")
else:
raise Exceptions.InvalidUsage(f"Specified format [{format}] is not a valid format option.")
#
# initialize from default format to dev0s format.
def initialize(variable, file_paths=True):
if variable.__class__.__name__ in ["str","String"]:
if file_paths and "/" in variable and Files.exists(variable):
return FilePath(variable)
else:
return String(variable)
elif variable.__class__.__name__ in ["bool","Boolean"]:
return Boolean(variable)
elif variable.__class__.__name__ in ["int","float","Integer"]:
return Integer(variable)
elif variable.__class__.__name__ in ["dict","Dictionary"]:
return Dictionary(variable)
elif variable.__class__.__name__ in ["list","Array"]:
return Array(variable)
else:
return variable
#
# denitialize from dev0s formats to default format.
def denitialize(variable, file_paths=True):
if variable.__class__.__name__ in ["String"]:
return str(variable)
elif variable.__class__.__name__ in ["FilePath"]:
return str(variable)
elif variable.__class__.__name__ in ["Boolean"]:
return bool(variable)
elif variable.__class__.__name__ in ["Integer"]:
return variable.value
elif variable.__class__.__name__ in ["Dictionary", "ResponseObject", "OutputObject", "dict"]:
new = {}
for key,value in variable.items():
new[key] = Formats.denitialize(value, file_paths=file_paths)
return new
elif variable.__class__.__name__ in ["Array", "list"]:
new = []
for value in variable:
new.append(Formats.denitialize(value, file_paths=file_paths))
return new
else:
return variable
#
# the file path object class.
class FilePath(object):
def __init__(self, path, default=False, check=False, load=False):
# docs.
DOCS = {
"module":"FilePath",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.path = str(self.clean(path=str(path), raw=True))
if check == False and default == False and path != False:
if Files.directory(self.path) and self.path[len(self.path)-1] != '/': self.path += '/'
if check and os.path.exists(self.path) == False: raise FileNotFoundError(f"Path [{self.path}] does not exist.")
self.ownership = self.Ownership(path=self.path, load=load)
self.permission = self.Permission(path=self.path, load=load)
#
# - info:
def join(self, name=None, type="/"):
if type not in ["", "/"] and "." not in type:
type = "." + type
path = self.path
if path[len(path)-1] != "/": path += '/'
return FilePath("{}{}{}".format(path, name, type))
def name(self, path=None, remove_extension=False,):
if path == None: path = self.path
if path in [False, None]: return None
x = 1
if path[len(path)-1] == '/': x += 1
name = path.split('/')[len(path.split('/'))-x]
if remove_extension:
count = len(name.split("."))
if count > 1:
c, s = 0, None
for i in name.split("."):
if c < count-1:
if s == None: s = i
else: s += "."+i
c += 1
name = s
return name
def extension(self, name=None, path=None):
if path == None: path = self.path
# - check directory:
extension = None
if name == None and Files.directory(path): extension = 'dir'
else:
# - get extension:
try:
if name == None: name = self.name(path=path)
extension = name.split('.')[len(name.split('.'))-1]
except:
try:
name = self.name(path=path)
extension = name.split('.')[len(name.split('.'))-1]
except: extension = None
# - check image & video:
if extension in ["jpg", "png", "gif", "webp", "tiff", "psd", "raw", "bmp", "heig", "indd", "jpeg", "svg", "ai", "eps", "pdf"]: extension = "img"
elif extension in ["mp4", "m4a", "m4v", "f4v", "f4a", "m4b", "m4r", "f4b", "mov", "3gp", "3gp2", "3g2", "3gpp", "3gpp2", "h.263", "h.264", "hevc", "mpeg4", "theora", "3gp", "windows media 8", "quicktime", "mpeg-4", "vp8", "vp6", "mpeg1", "mpeg2", "mpeg-ts", "mpeg", "dnxhd", "xdcam", "dv", "dvcpro", "dvcprohd", "imx", "xdcam", "hd", "hd422"]: extension = "video"
return extension
def base(self,
# the path (leave None to use self.path) (param #1).
path=None,
# the dirs back.
back=1,
):
if path == None: path = self.path
return Files.base(path=path, back=back)
#
def basename(self, back=1, path=None):
if path == None: path = self.path
return self.name(path=self.base(back=back, path=path))
def size(self, format=str, mode="auto", path=None, options=["auto", "bytes", "kb", "mb", "gb", "tb"]):
def __size__(path):
total = 0
try:
# print("[+] Getting the size of", directory)
for entry in os.scandir(path):
if entry.is_file():
# if it's a file, use stat() function
total += entry.stat().st_size
elif entry.is_dir():
# if it's a directory, recursively call this function
total += __size__(entry.path)
except NotADirectoryError:
# if `directory` isn't a directory, get the file size then
return os.path.getsize(path)
except PermissionError:
# if for whatever reason we can't open the folder, return 0
return 0
return total
#
if path == None: path = self.path
if path != None: path = str(path)
return self.convert_bytes(__size__(path), format=format, mode=mode)
def space(self, format=str, mode="auto", path=None, options=["auto", "bytes", "kb", "mb", "gb", "tb"]):
if path == None: path = self.path
total, used, free = shutil.disk_usage(path)
total, used, free = self.convert_bytes(total, format=format, mode=mode), self.convert_bytes(used, format=format, mode=mode), self.convert_bytes(free, format=format, mode=mode)
return {
"total":total,
"used":used,
"free":free,
}
def convert_bytes(self, bytes:int, format=str, mode="auto", options=["auto", "bytes", "kb", "mb", "gb", "tb"]):
if format in [float, "float", "integer", "Integer", Integer]:
format = float
if (mode == "bytes" or mode == "bytes".upper()):
return float(bytes)
elif format in [int, "int", "integer", "Integer", Integer]:
format = int
if (mode == "bytes" or mode == "bytes".upper()):
return int(round(bytes,0))
if mode == "auto":
if int(bytes/1024**4) >= 10:
bytes = round(bytes/1024**4,2)
if format not in [int, float]:
bytes = '{:,} TB'.format(bytes)#.replace(',', '.')
elif int(bytes/1024**3) >= 10:
bytes = round(bytes/1024**3,2)
if format not in [int, float]:
bytes = '{:,} GB'.format(bytes)#.replace(',', '.')
elif int(bytes/1024**2) >= 10:
bytes = round(bytes/1024**2,2)
if format not in [int, float]:
bytes = '{:,} MB'.format(bytes)#.replace(',', '.')
elif int(bytes/1024) >= 10:
bytes = round(bytes/1024,2)
if format not in [int, float]:
bytes = '{:,} KB'.format(bytes)#.replace(',', '.')
else:
bytes = int(round(bytes,0))
if format not in [int, float]:
bytes = '{:,} Bytes'.format(bytes)#.replace(',', '.')
elif (mode == "bytes" or mode == "bytes".upper()):
bytes = int(round(bytes,0))
if format not in [int, float]:
bytes = '{:,} Bytes'.format(bytes)#.replace(',', '.')
elif mode == "kb" or mode == "kb".upper():
bytes = round(bytes/1024,2)
if format not in [int, float]:
bytes = '{:,} KB'.format(bytes)#.replace(',', '.')
elif mode == "mb" or mode == "mb".upper():
bytes = round(bytes/1024**2,2)
if format not in [int, float]:
bytes = '{:,} MB'.format(bytes)#.replace(',', '.')
elif mode == "gb" or mode == "gb".upper():
bytes = round(bytes/1024**3,2)
if format not in [int, float]:
bytes = '{:,} GB'.format(bytes)#.replace(',', '.')
elif mode == "tb" or mode == "tb".upper():
bytes = round(bytes/1024**4,2)
if format not in [int, float]:
bytes = '{:,} TB'.format(bytes)#.replace(',', '.')
else: raise Exceptions.InvalidUsage(f"Selected an invalid size format [{format}], options {options}.")
return bytes
def exists(self,
# the path (leave None to use self.path) (#1).
path=None,
# root permission required.
sudo=False,
):
if path == None: path = self.path
path = gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True)
path = str(path)
if not sudo:
return os.path.exists(str(path))
else:
try:
output = utils.__execute__(["sudo", "ls","-ld",path])
if "No such file or directory" in str(output):
return False
else: return True
except: return False
#
def mount(self,
# the path (leave None to use self.path) (#1).
path=None,
):
if path == None: path = self.path
path = gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True)
return os.path.ismount(path)
#
def directory(self,
# the path (leave None to use self.path) (#1).
path=None,
):
if path == None: path = self.path
return Files.directory(path)
#
def mtime(self, format='%d-%m-%y %H:%M.%S', path=None):
if path == None: path = self.path
fname = pathlib.Path(path)
try: mtime = fname.stat().st_mtime
except: mtime = fname.stat().ct_mtime
if format in ['s', "seconds"]:
return mtime
else:
return Formats.Date().from_seconds(mtime, format=format)
def clean(self,
# the path (leave None to use self.path) (param #1).
path=None,
# the clean options.
remove_double_slash=True,
remove_first_slash=False,
remove_last_slash=False,
ensure_first_slash=False,
ensure_last_slash=False,
# return the path as a raw string.
raw=False,
):
if path == None: path = self.path
if not isinstance(path, (str, String)):
return path
path = str(path).replace("~",HOME)
while True:
if remove_double_slash and "//" in path: path = path.replace("//","/")
elif remove_first_slash and len(path) > 0 and path[0] == "/": path = path[1:]
elif remove_last_slash and len(path) > 0 and path[len(path)-1] == "/": path = path[:-1]
elif ensure_first_slash and len(path) > 0 and path[0] != "/": path = "/"+path
elif ensure_last_slash and len(path) > 0 and path[len(path)-1] != "/": path += "/"
else: break
if raw:
return path
else:
return FilePath(path)
def absolute(self,
# the path (leave None to use self.path) (param #1).
path=None,
):
if path == None: path = self.path
return FilePath(os.path.abspath(path))
# path to python module path.
def module(self, path=None):
if path == None: path = self.path
return gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True, remove_first_slash=True).replace("/",".").replace(".py","").replace(".__init__", "").replace("__init__", "")
# serialize a requirements file.
def requirements(self, path=None, format="pip", include_version=True):
if format in ["pip3"]: format = "pip"
if format not in ["pip"]: raise ValueError(f"Invalid usage, format [{format}] is not a valid option, options: [pip].")
# pip requirements.
if format == "pip":
requirements = []
for i in Files.load(path).split("\n"):
if len(i) > 0 and i[0] != "#" and i not in [""," "]:
while True:
if len(i) > 0 and i[len(i)-1] in [" "]: i = i[:-1]
else: break
if " " not in i:
sid = None
for lid in ["==", ">=", "<="]:
if lid in i: sid = lid ; break
if sid != None:
if include_version:
requirements.append(i)
else:
requirements.append(i.split(sid)[0])
else:
requirements.append(i)
return requirements
# - commands:
def delete(self,
# the path (leave None to use self.path) (param #1).
path=None,
# the options.
forced=False,
sudo=False,
silent=False,
):
if path == None: path = self.path
if silent: silent = ' 2> /dev/null'
else: silent = ""
if sudo: sudo = "sudo "
else: sudo = ""
options = " "
if forced:
options = " -f "
if Files.directory(path): options = " -fr "
elif Files.directory(path): options = " -r "
os.system(f"{sudo}rm{options}{path}{silent}")
def move(self,
# the to path (#1).
path=None,
# root permission required.
sudo=False,
# root permission required.
log_level=0,
):
return Files.move(
# the from & to path (#1 & #2).
self.path, path,
# root permission required.
sudo=sudo,
# root permission required.
log_level=log_level,
)
self.path = gfp.clean(path=path)
def copy(self,
# the to path (#1).
path=None,
# root permission required.
sudo=False,
# the active log level.
log_level=0,
# the exclude patterns.
exclude=[],
# update deleted files.
delete=True,
):
return Files.copy(
# the from & to path (#1 & #2).
self.path, path,
# root permission required.
sudo=sudo,
# the active log level.
log_level=log_level,
# the exclude patterns.
exclude=exclude,
# update deleted files.
delete=delete,)
def open(self, sudo=False):
if sudo: sudo = "sudo "
else: sudo = ""
if OS in ["macos"]:
os.system(f"{sudo}open {self.path}")
elif OS in ["linux"]:
os.system(f"{sudo}nautulis {self.path}")
else: raise Exceptions.InvalidOperatingSystem(f"Unsupported operating system [{OS}].")
def create(self,
# Option 1: (creating a directory)
# - boolean format:
directory=False,
# Option 2: (creating any file extension)
# - string format:
data="",
# Options:
# - integer format:
permission=None,
# - string format:
owner=None,
group=None,
# - boolean format:
sudo=False,
):
# - option 1:
if directory:
if sudo: os.system('sudo mkdir -p '+self.path)
else: os.system('mkdir -p '+self.path)
# - option 2:
elif data != None:
if sudo:
f = Files.File(path='/tmp/tmp_file', data=data)
f.save()
os.system(f"sudo mv {f.file_path.path} {self.path}")
else:
Files.File(path=self.path, data=data).save()
#with open
# - invalid option:
else: raise ValueError("Invalid option, either enable the [directory] boolean to create a directory, or specify [path] and [data] to create any file sort.")
# - default:
if owner != None or group != None: self.ownership.set(owner=owner, group=group, sudo=sudo)
if permission != None: self.permission.set(permission, sudo=sudo)
#
def check(self,
# Option 1: (creating a directory)
# - boolean format:
directory=False,
# Option 2: (creating any file extension)
# - string format:
data="",
# Options:
# - integer format:
permission=None,
# - string format:
owner=None,
group=None,
# - boolean format:
sudo=False,
silent=False,
recursive=False, # for directories only (for permission & ownership check)
):
# - option 1:
if not self.exists(sudo=sudo):
self.create(directory=directory, data=data, permission=permission, owner=owner, group=group, sudo=sudo)
else:
# - default:
self.ownership.check(owner=owner, group=group, sudo=sudo, silent=silent, recursive=recursive)
self.permission.check(permission=permission, sudo=sudo, silent=silent, recursive=recursive)
#
# support default str functions.
def split(self, path):
return Files.Array(self.path.split(str(path)))
def count(self, path):
return Formats.Integer(self.path.count(str(path)))
def replace(self, from_, to_):
return self.path.replace(str(from_), str(to_))
def lower(self, path):
return self.path.lower(str(path))
def upper(self, path):
return self.path.upper(str(path))
# support subscriptionable.
def __getitem__(self, index):
return self.path[Formats.denitialize(index)]
def __setitem__(self, index, value):
self.path[Formats.denitialize(index)] = str(value)
# support "+" & "-" .
def __add__(self, path):
if isinstance(path, str):
a=1
elif isinstance(path, self.__class__):
path = path.path
elif not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {path.__class__}.")
return self.path + path
def __sub__(self, path):
if isinstance(path, str):
a=1
elif isinstance(path, self.__class__):
path = path.path
elif not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {path.__class__}.")
return self.path.replace(path, "")
# support +.
def __concat__(self, path):
if isinstance(path, str):
a=1
elif isinstance(path, self.__class__):
path = path.path
elif not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {path.__class__}.")
return self.path + path
# support default iteration.
def __iter__(self):
return iter(self.path)
# support '>=' & '>' operator.
def __gt__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) > len(path.path)
def __ge__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) >= len(path.path)
# support '<=' & '<' operator.
def __lt__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) < len(path.path)
def __le__(self, path):
if not isinstance(path, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {path.__class__}.")
return len(self.path) <= len(path.path)
# support '==' & '!=' operator.
def __eq__(self, path):
if isinstance(path, str):
return self.path == path
elif not isinstance(path, self.__class__):
return False
return self.path == path.path
def __ne__(self, path):
if isinstance(path, str):
return self.path != path
elif not isinstance(path, self.__class__):
return True
return self.path != path.path
# support 'in' operator.
def __contains__(self, path):
if isinstance(path, (list, Files.Array)):
for i in path:
if i in self.path:
return True
return False
else:
return path in self.path
#
# int representation.
def __repr__(self):
return str(self)
# str representation.
def __str__(self):
return str(self.path)
# int representation.
def __int__(self):
return int(self.path)
# float representation.
def __float__(self):
return float(self.path)
# bool representation.
def __bool__(self):
if self.path in [1.0, 1, "true", "True", "TRUE", True]:
return True
elif self.path in [0, 0.0, "false", "False", "FALSE", False]:
return False
else:
raise Exceptions.FormatError(f"Could not parse a bool from {self.__id__()}.")
# content count.
def __len__(self):
return len(self.path)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "FilePath"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, path, load=False):
if isinstance(path, self.__class__):
path = path.path
self.path = gfp.clean(path=path)
self.ownership = self.Ownership(path=self.path, load=load)
self.permission = self.Permission(path=self.path, load=load)
return self
# return raw data.
def raw(self):
return self.path
# - objects:
class Ownership(object):
def __init__(self, path=None, load=False):
# docs.
DOCS = {
"module":"FilePath.Ownership",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.path = path
self.owner = None
self.group = None
if load:
get = self.get()
self.owner = get["owner"]
self.group = get["permission"]
# - info:
def get(self, path=None):
if path == None: path = self.path
owner = pwd.getpwuid(os.stat(path).st_uid).pw_name
try:
group = grp.getgrgid(os.stat(path).st_gid).gr_name
except KeyError: # unknown group likely from different os / machine.
group = os.stat(path).st_gid
except Exception as e:
raise ValueError(f"Unable to retrieve the group of file {path}, error: {e}.")
return owner, group
def set(self,
# the permission (str) (#1).
owner=None,
# the group (str) (optional) (#2).
group=None,
# the path (optional) (overwrites self.path) (#3).
path=None,
# root permission required.
sudo=False,
# recursive.
recursive=False,
# silent.
silent=False,
):
if path == None: path = self.path
if group == None:
if OS in ["macos"]: group = "wheel"
elif OS in ["linux"]: group = "root"
else: raise ValueError("Unsupported operating system [{}].".format(OS))
silent_option = ""
if silent: silent_option = ' 2> /dev/null'
if recursive:
if sudo: os.system("sudo chown -R {} {} {}".format(owner+":"+group, path, silent_option))
else: os.system("chown -R {} {}".format(owner+":"+group, path))
else:
if sudo: os.system("sudo chown {} {} {}".format(owner+":"+group, path, silent_option))
else: os.system("chown {} {} {}".format(owner+":"+group, path, silent_option))
def check(self, owner=None, group=None, sudo=False, silent=False, iterate=False, recursive=False, path=None): # combine [recursive] and [iterate] to walk all set all files in an directory and check it with the given permission.
if path == None: path = self.path
if group == None:
if OS in ["macos"]: group = "wheel"
elif OS in ["linux"]: group = "root"
else: raise ValueError("Unsupported operating system [{}].".format(OS))
_owner_, _group_ = self.get(path=path)
if _owner_ != owner or _group_ != group:
self.set(owner=owner, group=group, sudo=sudo, silent=silent, recursive=recursive, path=path)
if recursive and iterate and Files.directory(self.path):
for dirpath, subdirs, files in os.walk(self.path):
for path in subdirs:
#print("DIRECTORY:",path)
#print("> FULL PATH NAME:",dirpath+"/"+path)
if path not in ["lost+found"]:
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.ownership.check(owner=owner, group=group, sudo=sudo, silent=silent)
for path in files:
#print("FILE NAME:",path)
#print("> FULL PATH:",dirpath+"/"+path)
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.ownership.check(owner=owner, group=group, sudo=sudo, silent=silent)
class Permission(object):
def __init__(self, path=None, load=False):
# docs.
DOCS = {
"module":"FilePath.Permission",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# defaults.
#self.__class__.__name__ = "Permission"
# init.
self.path = path
self.permission = None
if load: self.permission = self.get()
# - info:
def get(self, path=None):
if path == None: path = self.path
status = os.stat(path)
permission = oct(status.st_mode)[-3:]
return permission
def set(self,
# the permission (int) (#1).
permission=None,
# the path (optional) (overwrites self.path) (#2).
path=None,
# root permission required.
sudo=False,
# recursive.
recursive=False,
# silent.
silent=False,
):
if path == None: path = self.path
silent_option = ""
if silent: silent_option = ' 2> /dev/null'
if recursive:
if sudo: os.system("sudo chmod -R {} {} {}".format(permission, path, silent_option))
else: os.system("chmod -R {} {} {}".format(permission, path, silent_option))
else:
if sudo: os.system("sudo chmod {} {} {}".format(permission, path, silent_option))
else: os.system("chmod {} {} {}".format(permission, path, silent_option))
def check(self, permission=None, sudo=False, silent=False, iterate=False, recursive=False, path=None): # combine [recursive] and [iterate] to walk all set all files in an directory and check it with the given permission.
if path == None: path = self.path
if self.get(path=path) != permission:
self.set(permission=permission, sudo=sudo, silent=silent, recursive=recursive, path=path)
if recursive and iterate and Files.directory(path):
for dirpath, subdirs, files in os.walk(path):
for path in subdirs:
#print("DIR NAME:",path)
#print("> FULL PATH:",dirpath+"/"+path)
if path not in ["lost+found"]:
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.permission.check(permission=permission, sudo=sudo, silent=silent)
for path in files:
#print("FILE NAME:",path)
#print("> FULL PATH:",dirpath+"/"+path)
file_path = Formats.FilePath(dirpath+"/"+path)
file_path.permission.check(permission=permission, sudo=sudo, silent=silent)
#
# the string object class.
class String(object):
def __init__(self,
# the string's value (str) (#1).
string="",
# the path (str, FilePath) (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"String",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.string = str(string)
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def save(self, string=None, path=None, sudo=False):
if string == None: string = self.string
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
self.string = str(string)
return Files.save(path, str(string), format="str", sudo=sudo)
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.string = Files.load(self.file_path.path, format="str", sudo=sudo)
return self.string
def is_numerical(self):
for i in ["q", "w", "e", "r", "t", "y", "u", "i", "o", "p", "a", "s", "d", "f", "g", "h", "j", "k", "l", "z", "x", "c", "v", "b", "n", "m"]:
if i in self.string.lower(): return False
return True
def bash(self):
a = self.string.replace('(','\(').replace(')','\)').replace("'","\'").replace(" ","\ ").replace("$","\$").replace("!","\!").replace("?","\?").replace("@","\@").replace("$","\$").replace("%","\%").replace("^","\^").replace("&","\&").replace("*","\*").replace("'","\'").replace('"','\"')
return a
def identifier(self):
x = self.string.lower().replace(' ','-')
return x
def variable_format(self,
exceptions={
"smart_card":"smartcard",
"smart_cards":"smartcards" ,
"web_server":"webserver" ,
},
):
s, c = "", 0
for i in self.string:
try:
n = self.string[c+1]
except:
n = "none"
try:
p = self.string[c-1]
except:
p = "none"
if s != "" and i.lower() != i and str(n).lower() == str(n) and str(p).lower() == str(p):
s += "_"
s += i.lower()
c += 1
if s in list(exceptions.keys()):
return exceptions[s]
else:
return s
def class_format(self):
s, next_capital = "", False
for i in self.string:
if i == "_":
next_capital = True
elif next_capital:
s += i.upper()
else:
s += i
return s
def capitalized_scentence(self):
x = self.string.split(" ")
cap = [y.capitalize() for y in x]
return " ".join(cap)
def capitalized_word(self):
try:
new = self.string[0].upper()
c = 0
for i in self.string:
if c > 0: new += i
c += 1
return new
except IndexError: return self.string
def generate(self,
# the length of the generated string.
length=6,
# include digits.
digits=False,
# include capital letters.
capitalize=False,
# include special characters.
special=False,
):
charset = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]
if capitalize:
for i in ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]: charset.append(i.upper())
if digits: digits = ["1","2","3","4","5","6","7","8","9","0"]
else: digits = []
if special: special = ["!", "?", "&", "#","@", "*"]
else: special = []
s = ""
for i in range(length):
if len(digits) > 0 and random.randrange(1,101) <= 40:
s += digits[random.randrange(0, len(digits))]
elif len(special) > 0 and random.randrange(1,101) <= 10:
s += special[random.randrange(0, len(special))]
else:
s += charset[random.randrange(0, len(charset))]
return s
#
# iterate a string (backwards) to check the first occurency of a specified charset.
def first_occurence(self, charset=[" ", "\n"], reversed=False, string=None):
if string == None: string = self.string
if reversed:
c, space_newline_id = len(string)-1, ""
for _ in string:
char = string[c]
if char in charset:
a = 0
for i in charset:
if i == char: return i
c -= 1
return None
else:
c, space_newline_id = 0, ""
for _ in string:
char = string[c]
if char in charset:
a = 0
for i in charset:
if i == char: return i
c += 1
return None
# splice a string into before/after by a first occurence.
# if include is True and both include_before and inluce_after are False it includes at before.
def before_after_first_occurence(self, slicer=" ", include=True, include_before=False, include_after=False, string=None):
if isinstance(slicer, list):
first = self.first_occurence(charset=slicer, string=string)
return self.before_after_first_occurence(slicer=first, include=include, include_before=include_before, include_after=include_after, string=string)
else:
if string == None: string = self.string
before, after, slice_count, slices, _last_ = "", "", string.count(slicer), 0, ""
for char in string:
if len(_last_) >= len(slicer): _last_ = _last_[1:]
_last_ += char
if _last_ == slicer:
slices += 1
if include:
if slices != slice_count or include_before:
before += char
elif include_after:
after += char
else:
before += char
elif slices > 0:
after += char
else:
before += char
return before, after
# splice a string into before/selected/after by a first occurence.
def before_selected_after_first_occurence(self, slicer=" ", string=None):
if string == None: string = self.string
before, selected, after, slice_count, open, _last_ = "", "", "", string.count(slicer), False, ""
selected_sliced_count = 0
for char in string:
if isinstance(slicer, str) and len(_last_) >= len(slicer): _last_ = _last_[1:]
elif isinstance(slicer, list) and len(_last_) >= len(slicer[selected_sliced_count]): _last_ = _last_[1:]
_last_ += char
if (isinstance(slicer, str) and _last_ == slicer) or (isinstance(slicer, list) and _last_ == slicer[selected_sliced_count]):
selected_sliced_count += 1
selected += char
if open: open = False
else: open = True
elif open:
after += char
else:
before += char
return before, selected, after
# splice a string into before/after by a last occurence.
# if include is True and both include_before and inluce_after are False it includes at before.
def before_after_last_occurence(self, slicer=" ", include=True, include_before=False, include_after=False, string=None):
if string == None: string = self.string
before, after, slice_count, slices, _last_ = "", "", string.count(slicer), 0, ""
for char in string:
if len(_last_) >= len(slicer): _last_ = _last_[1:]
_last_ += char
if _last_ == slicer:
slices += 1
if include:
if slices != slice_count or include_before:
before += char
elif include_after:
after += char
else:
before += char
elif slices == slice_count:
after += char
else:
before += char
return before, after
# splice a string into before/selected/after by a last occurence.
def before_selected_after_last_occurence(self, slicer=" ", string=None):
if string == None: string = self.string
before, selected, after, slice_count, slices, _last_ = "", "", "", string.count(slicer), 0, ""
for char in string:
if len(_last_) >= len(slicer): _last_ = _last_[1:]
_last_ += char
if _last_ == slicer:
slices += 1
selected += char
elif slices == slice_count:
after += char
else:
before += char
return before, selected, after
# get the first text between an 2 string identifiers [start,end] by depth.
# identifiers must be parameter number 1.
def between(self, identifiers=["{","}"], depth=1, include=True, string=None):
# vars.
if string == None: string = self.string
keep_last = [len(identifiers[0]), len(identifiers[1])]
last = ["", ""]
unadded = ""
s, open, opened, first_open = "", 0, False, False
# iterate.
for i in string:
# set last & unadded.
unadded += i
last[0] += i
last[1] += i
if len(last[0]) > keep_last[0]:
last[0] = str(String(last[0]).remove_first(1))
if len(last[1]) > keep_last[1]:
last[1] = str(String(last[1]).remove_first(1))
# check ids.
if last[0] == identifiers[0]:
open += 1
first_open = True
elif last[1] == identifiers[1]:
open -= 1
if open >= depth:
if include or open == depth:
if include and first_open:
s += identifiers[0]
unadded = ""
first_open = False
else:
s += unadded
unadded = ""
opened = True
if opened and open < depth:
if include:
s += unadded
unadded = ""
break
# remainders.
if unadded != "" and opened and open < depth:
if include:
s += unadded
unadded = ""
# handler.
return Formats.String(s)
#
# get the text with betwee & replace the inside between str with a new str.
def replace_between(self,
# the between identifiers (list) (#1).
identifiers=["{","}"],
# the new string (str) (#2).
to="",
# the identifiers depth.
depth=1,
# the optional string.
string=None,
):
update = False
if string == None:
update = True
string = self.string
sliced = self.between(identifiers, depth=depth, include=True, string=string)
string = string.replace(str(sliced), to)
if update:
self.string = string
return string
#
# increase version.
def increase_version(self):
# version 2.
#
path = "/tmp/increase_version"
Files.save(path, f"""version='{self.string}"""+"""' && echo $version | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{if(length($NF+1)>length($NF))$(NF-1)++; $NF=sprintf("%0*d", length($NF), ($NF+1)%(10^length($NF))); print}'""")
return subprocess.check_output([f"bash", path]).decode().replace("\n","")
# version 1.
#
old_version = self.string
base, _base_= [], old_version.split(".")
increase = True
for i in _base_:
base.append(int(i))
count = len(base)-1
for i in range(len(base)):
if increase:
if base[count] >= 9:
if count > 0:
base[count-1] += 1
base[count] = 0
increase = False
else:
base[count] += 1
break
else:
base[count] += 1
break
else:
if count > 0 and int(base[count]) >= 10:
base[count-1] += 1
base[count] = 0
increase = False
elif count == 0: break
count -= 1
version = ""
for i in base:
if version == "": version = str(i)
else: version += "."+str(i)
return version
# slice dict from string.
# get the first {} from the string by depth.
def slice_dict(self, depth=1):
return self.between(["{", "}"], depth=depth)
# slice array from string.
# get the first [] from the string by depth.
def slice_array(self, depth=1):
return self.between(["[", "]"], depth=depth)
# slice tuple from string.
# get the first () from the string by depth.
def slice_tuple(self, depth=1):
return self.between(["(", ")"], depth=depth)
# iterate chars.
# > for charcount, char in String.iterate_chars()
def iterate_chars(self):
charcount, items = 0, []
for char in self.string:
items.append([charcount, char])
charcount += 1
return items
def iterate_characters(self):
return self.iterate_chars()
# iterate lines.
# > for linecount, line in String.iterate_lines()
def iterate_lines(self):
linecount, items = 0, []
for line in self.string.split("\n"):
items.append([linecount, line])
linecount += 1
return items
# slice indent from string.
# get the content bewteen the \n{indent}
def indent(self, indent=4):
s = ""
for i in range(indent): s += " "
return s
def line_indent(self, line=""):
# get line indent.
line = line.replace(" ", " ")
if len(line) > 0 and " " in line:
line_indent = 0
for c in line:
if c in [" "]: line_indent += 1
else: break
else: line_indent = 0
return Formats.Integer(line_indent)
def slice_indent(self, indent=4, depth=1, string=None, remove_indent=True):
if string == None: string = self.string
string = string.replace(" ", " ")
s, open, opened, d = "", 0, False, 0
for line in string.split("\n"):
# get line indent.
if len(line) > 0 and " " in line:
line_indent = 0
for c in line:
if c in [" "]: line_indent += 1
else: break
else: line_indent = 0
# check indent match.
if (not opened and line_indent == indent) or (opened and line_indent >= indent):
if d >= depth:
if remove_indent:
s += line[indent:]+"\n"
else:
s += line+"\n"
opened = True
#elif len(line) > 0 and not opened and line_indent == indent:
# d += 1
elif len(line) > 0 and line_indent <= indent:
if opened:
break
else:
d += 1
return s
# get the first / last n characters of the string.
def first(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
return self.string[:count]
def last(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
if len(self.string) >= count:
return self.string[count:]
else:
return None
#
# remove first / last n characters of the string.
def remove_first(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
removed = self.first(count)
self.string = self.string[count:]
return self.string
def remove_last(self, count):
if isinstance(count, (int, float, Integer)):
count = int(count)
else:
count = len(count)
removed = self.last(count)
self.string = self.string[:-count]
return self.string
#
# support default str functions.
def split(self, string):
if isinstance(string, (list, Array)):
if isinstance(string, Array): array = string.array
else: array = string
new, last, next_start = [], "", None
for i in self.string:
last += i
newslice = False
#l_next_start = None
for test in array:
if test in last:
if str(last[-len(test):]) == str(test):
#l_next_start = last[:-len(test)]
last = last[:-len(test)]
newslice = True
break
if newslice:
new.append(last)
last = ""
#if next_start == None: new.append(last)
#elif include:
# new.append(next_start+last)
# next_start = None
#if include and l_next_start != None:
# next_start = l_next_start
if last != "":
new.append(last)
return new
else:
return Files.Array(self.string.split(str(string)))
def count(self, string):
return Formats.Integer(self.string.count(str(string)))
def replace(self, from_, to_):
return self.string.replace(str(from_), str(to_))
def lower(self, string):
return self.string.lower(str(string))
def upper(self, string):
return self.string.upper(str(string))
# support "+" & "-" .
def __add__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
return self.string + string
def __iadd__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
self.string = self.string + string
return self
def __sub__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
return self.string.replace(string, "")
def __isub__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {string.__class__}.")
self.string = self.string.replace(string, "")
return self
# support subscriptionable.
def __getitem__(self, index):
return self.string[Formats.denitialize(index)]
def __setitem__(self, index, value):
self.string[Formats.denitialize(index)] = str(value)
# support default iteration.
def __iter__(self):
return iter(self.string)
# support '>=' & '>' operator.
def __gt__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) > len(string)
def __ge__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) >= len(string)
# support '<=' & '<' operator.
def __lt__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) < len(string)
def __le__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self.string) <= len(string)
# support '==' & '!=' operator.
def __eq__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
return False
return self.string == string
def __ne__(self, string):
if isinstance(string, str):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
return True
return self.string != string
# support +.
def __concat__(self, string):
if isinstance(string, (str)):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not concat object {self.__class__} & {string.__class__}.")
return self.string + string
# support 'in' operator.
def __contains__(self, string):
if isinstance(string, (list, Files.Array)):
for i in string:
if str(i) in str(self.string):
return True
return False
else:
return str(string) in str(self.string)
#
# representation.
def __repr__(self):
return str(self)
# str representation.
def __str__(self):
return str(self.string)
# int representation.
def __int__(self):
return int(self.string)
# float representation.
def __float__(self):
return float(self.string)
# bool representation.
def __bool__(self):
return len(self.string) > 0
#if self.string in [1.0, 1, "true", "True", "TRUE", True]:
# return True
#elif self.string in [0, 0.0, "false", "False", "FALSE", False]:
# return False
#else:
# raise Exceptions.FormatError(f"Could not parse a bool from {self.__id__()}.")
# content count.
def __len__(self):
return len(self.string)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# # object instance.
def instance(self):
return "String"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, string):
if isinstance(string, (int, float)):
a=1
elif isinstance(string, self.__class__):
string = string.string
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not assign object {self.__class__} & {string.__class__}.")
self.string = str(string)
return self
# return raw data.
def raw(self):
return self.str
#
# the boolean object class.
class Boolean(object):
def __init__(self,
# the boolean's value (bool) (#1).
boolean=False,
# the path (str, FilePath) (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Boolean",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(boolean, Formats.Boolean):
boolean = boolean.bool
# init.
self.bool = boolean
if self.bool in ["true", "True", "TRUE", True]: self.bool = True
else: self.bool = False
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def save(self, bool=None, path=None, sudo=False):
if bool != None: bool = self.bool
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
self.bool = bool
return Files.save(path, str(bool), format="str", sudo=sudo)
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.bool = Files.load(self.file_path.path, format="str", sudo=sudo)
return self.bool
def string(self, true="True", false="False"):
if self.bool:
return true
else:
return false
# native support.
def __index__(self):
return int(self)
# support '==' & '!=' operator.
def __eq__(self, boolean):
if isinstance(boolean, bool):
return self.bool == boolean
elif not isinstance(boolean, self.__class__):
return False
return self.bool == boolean.bool
def __ne__(self, boolean):
if isinstance(boolean, bool):
return self.bool != boolean
elif not isinstance(boolean, self.__class__):
return True
return self.bool != boolean.bool
# support default iteration.
def __iter__(self):
return iter(str(self.bool))
# support 'in' operator.
def __contains__(self, string):
return string in str(self.bool)
#
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(self.bool)
# int representation.
def __int__(self):
if self.bool:
return 1
else:
return 0
# float representation.
def __float__(self):
if self.bool:
return 1.0
else:
return 0.0
# bool representation.
def __bool__(self):
return self.bool
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Boolean"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, boolean):
if isinstance(boolean, (int, float)):
a=1
elif isinstance(value, self.__class__):
boolean = boolean.bool
elif not isinstance(boolean, self.__class__):
raise Exceptions.FormatError(f"Can not assign object {self.__class__} & {boolean.__class__}.")
self.bool = boolean
return self
# return raw data.
def raw(self):
return self.bool
#
# the integer object class.
class Integer(object):
def __init__(self,
# the integers value (int, float) (param #1).
value=0,
# the path (str, FilePath) (param #2).
path=False,
# the integer format (str) (param #3).
format="auto",
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Integer",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(value, Formats.Integer):
if "." in str(value):
value = value.float
else:
value = value.int
# init.
if "." in str(value):
self.format = "float"
self.value = float(value)
else:
self.format = "int"
self.value = int(value)
self.int = int(value)
self.float = float(value)
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def save(self, data=None, path=None, sudo=False):
if data != None: data = self.raw()
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
if data != self.raw():
self.assign(data)
return Files.save(path, str(data), format="str", sudo=sudo)
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
data = Files.load(self.file_path.path, format="str", sudo=sudo)
self.assign(data)
return data
def increase_version(self):
# version 1.
#
old_version = self.value
base, _base_= [], old_version.split(".")
increase = True
for i in _base_:
base.append(int(i))
count = len(base)-1
for i in range(len(base)):
if increase:
if base[count] >= 9:
if count > 0:
base[count-1] += 1
base[count] = 0
increase = False
else:
base[count] += 1
break
else:
base[count] += 1
break
else:
if count > 0 and int(base[count]) >= 10:
base[count-1] += 1
base[count] = 0
increase = False
elif count == 0: break
count -= 1
version = ""
for i in base:
if version == "": version = str(i)
else: version += "."+str(i)
return version
def round(self, decimals):
"""
Returns a value rounded down to a specific number of decimal places.
"""
if not isinstance(decimals, int):
raise TypeError("decimal places must be an integer")
else: return round(self.value, decimals)
def round_down(self, decimals):
"""
Returns a value rounded down to a specific number of decimal places.
"""
if not isinstance(decimals, int):
raise TypeError("decimal places must be an integer")
elif decimals < 0:
raise ValueError("decimal places has to be 0 or more")
elif decimals == 0:
return math.ceil(self.value)
factor = 10 ** decimals
return math.floor(self.value * factor) / factor
#
def generate(self, length=6):
return utils.generate.pincode(length=length)
#
# int format.
def __index__(self):
return self.value
# support "+, -, *, %, @, /, //, **" .
def __add__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value + value)
def __sub__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value - value)
def __iadd__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {value.__class__}.")
self.value += value
return self
def __isub__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {value.__class__}.")
self.value -= value
return self
def __mod__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mod object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value % value)
def __mul__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value * value)
def __pow__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value ** value)
def __div__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value / value)
def __truediv__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value / value)
def __floordiv__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value // value)
def __concat__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value + value)
# support "+=" & "-=".
def __pos__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not mul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value + value)
def __matmul__(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not matmul object {self.__class__} & {value.__class__}.")
return Formats.Integer(self.value @ value)
# support //.
#def __floordiv__(a, b)
# return a // b.
# support default iteration.
def __iter__(self):
return iter(str(self.value))
# support '>=' & '>' operator.
def __gt__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value > integer
def __ge__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value >= integer
# support '<=' & '<' operator.
def __lt__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value < integer
def __le__(self, integer):
if isinstance(integer, (int,float)):
integer = integer
elif not isinstance(integer, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {integer.__class__}.")
else:
integer = integer.value
return self.value <= integer
# support '==' & '!=' operator.
def __eq__(self, integer):
if isinstance(integer, (int,float)):
return self.value == integer
elif not isinstance(integer, self.__class__):
return False
return self.value == integer.value
def __ne__(self, integer):
if isinstance(integer, (int,float)):
return self.value != integer
elif not isinstance(integer, self.__class__):
return True
return self.value != integer.value
# support 'in' operator.
def __contains__(self, integer):
if isinstance(integer, (list, Files.Array)):
for i in integer:
if str(integer) in str(self.value):
return True
return False
else:
return str(value) in str(self.value)
#
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(self.value)
# int representation.
def __int__(self):
return self.int
# float representation.
def __float__(self):
if self.format == "float":
return self.float
else:
return float(self.int)
# bool representation.
def __bool__(self):
if self.value in [1.0, 1]:
return True
elif self.value in [0, 0.0]:
return False
else:
raise Exceptions.FormatError(f"Could not parse a bool from {self.__id__()}.")
# content count.
def __len__(self):
return len(str(self.value))
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Integer"
#
# support self assignment.
def assign(self, value):
if isinstance(value, (int, float)):
a=1
elif isinstance(value, self.__class__):
value = value.value
elif not isinstance(value, self.__class__):
raise Exceptions.FormatError(f"Can not assign object {self.__class__} & {value.__class__}.")
self.value = value
return self
# return raw data.
def raw(self):
return self.value
#
# the date object class.
class Date(object):
def __init__(self,
#
# Leave all parameters None to initialize a Date() object with the current date.
# Pass another Date object, str repr or timestamp in seconds to initialize a Date object from that timestamp.
#
# the date parameter (str, int, Date) (optional) (#1).
date=None,
# the format for the date (leave None to parse the date format automatically) (str).
format=None,
):
# docs.
DOCS = {
"module":"Date",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# formats.
self.default_format = "%d-%m-%y %H:%M:%S" # is Date() str repr
self.seconds_format = '%S'
self.minute_format = '%M'
self.hour_format = '%H'
self.day_format = '%d'
self.day_name_format = '%A'
self.week_format = '%V'
self.month_format = '%m'
self.month_name_format = '%h'
self.year_format = '%Y'
self.date_format = '%d-%m-%y'
self.timestamp_format = '%d-%m-%y %H:%M'
self.shell_timestamp_format = '%d_%m_%y-%H_%M'
self.seconds_timestamp_format = '%d-%m-%y %H:%M:%S'
self.shell_seconds_timestamp_format = '%d_%m_%y-%H_%M_%S'
self.formats = [
self.shell_seconds_timestamp_format,
self.seconds_timestamp_format,
self.shell_timestamp_format,
self.timestamp_format,
self.date_format,
self.year_format,
self.seconds_format,
self.minute_format,
self.hour_format,
self.day_format,
self.day_name_format,
self.week_format,
self.month_format,
self.month_name_format,
]
# assign
if date == None:
self.initialize()
else:
self.assign(date, format=format)
#
def initialize(self,
#
# Leave all parameters None to initialize a Date() object with the current date.
#
# Initialize a future / previous date.
# option 1:
# specify the timestamp to initialize a previous / future date (format required).
timestamp=None,
# the timestamp format (leave None to parse).
format=None,
# options 2:
# initialize by seconds.
seconds=None,
# option 3:
# define the datetime object.
datetime_obj=None,
):
# defaults.
#self.__class__.__name__ = "Date"
# by datetime_obj
if datetime_obj != None:
seconds = time.mktime(datetime_obj.timetuple())
today = datetime.fromtimestamp(float(seconds))
# by timestamp & format.
elif timestamp != None:
if format == None:
format = self.parse_format(timestamp)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/).")
seconds = time.mktime(datetime.strptime(str(timestamp), str(format)).timetuple())
today = datetime.fromtimestamp(float(seconds))
# by seconds.
elif seconds != None:
today = datetime.fromtimestamp(float(seconds))
# by current.
else:
today = datetime.today()
# attributes.
self.seconds = str(today.strftime(self.seconds_format))
self.minute = str(today.strftime(self.minute_format))
self.hour = str(today.strftime(self.hour_format))
self.day = str(today.strftime(self.day_format))
self.day_name = str(today.strftime(self.day_name_format))
self.week = str(today.strftime(self.week_format))
self.month = str(today.strftime(self.month_format))
self.month_name = str(today.strftime(self.month_name_format))
self.year = str(today.strftime(self.year_format))
self.date = str(today.strftime(self.date_format))
self.timestamp = str(today.strftime(self.timestamp_format))
self.shell_timestamp = str(today.strftime(self.shell_timestamp_format))
self.seconds_timestamp = str(today.strftime(self.seconds_timestamp_format))
self.shell_seconds_timestamp = str(today.strftime(self.shell_seconds_timestamp_format))
self.time = self.hour + ":" + self.minute
return self
def compare(self, comparison=None, current=None, format=None):
if current == None: current = str(self)
if isinstance(comparison, Formats.Date):
comparison = str(comparison)
if isinstance(current, Formats.Date):
current = str(current)
if format == None:
comparison_format = self.parse_format(comparison)
if comparison_format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from comparison [{comparison}].")
else:
comparison_format = format
comparison = self.to_seconds(comparison, format=comparison_format)
if format == None:
current_format = self.parse_format(current)
if current_format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from current [{current}].")
else:
current_format = format
current = self.to_seconds(current, format=current_format)
if comparison >= current:
return "future"
elif comparison <= current:
return "past"
elif comparison == current:
return "present"
else:
raise ValueError(f"Unexpected error, comparison seconds: {comparison} current seconds: {current}.")
def increase(self, string=None, weeks=0, days=0, hours=0, minutes=0, seconds=0, format=None):
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if format == None:
format = self.parse_format(string)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from string [{string}].")
seconds += 60*minutes
seconds += 3600*hours
seconds += 3600*24*days
seconds += 3600*24*7*weeks
s = self.to_seconds(string, format=format)
s += seconds
return self.from_seconds(s, format=format)
def decrease(self, string=None, weeks=0, days=0, hours=0, minutes=0, seconds=0, format=None):
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if format == None:
format = self.parse_format(string)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from string [{string}].")
seconds += 60*minutes
seconds += 3600*hours
seconds += 3600*24*days
seconds += 3600*24*7*weeks
s = self.to_seconds(string, format=format)
s -= seconds
return self.from_seconds(s, format=format)
def to_seconds(self, string=None, format=None):
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if format == None:
format = self.default_format
return time.mktime(datetime.strptime(str(string), str(format)).timetuple())
#
def from_seconds(self, seconds, format=None):
if isinstance(seconds, (str,String,Integer)):
seconds = float(seconds)
if format == None:
format = self.default_format
return Date(datetime.fromtimestamp(float(seconds)).strftime(format))
#
def convert(self, string=None, datetime_obj=None, input=None, output="%Y%m%d"):
if datetime_obj == None:
if string == None: string = str(self)
if isinstance(string, Formats.Date):
string = str(string)
if input == None:
input = self.parse_format(string)
datetime_obj = datetime.strptime(str(string), str(input))
return datetime_obj.strftime(str(output))
def parse_format(self, string):
if isinstance(string, Formats.Date):
return self.default_format
elif isinstance(string, (int,float,Integer)):
return self.seconds_format
formats = []
if "-" in str(string):
formats += [
self.shell_seconds_timestamp_format,
self.seconds_timestamp_format,
self.shell_timestamp_format,
self.timestamp_format,
self.date_format,
]
else:
formats += [
self.year_format,
self.seconds_format,
#self.minute_format,
#self.hour_format,
#self.day_format,
#self.day_name_format,
#self.week_format,
#self.month_format,
#self.month_name_format,
]
# plus some custom formats.
formats += [
"%d-%m-%y %H:%M.%S", # old default.
"%Y-%m-%d %H:%M:%S", # stock market
"%d-%m-%Y", # dd-mm-yyyy.
"%d-%m-%y %H:%M:%S", # dd-mm-yy hh:mm:ss.
"%d-%m-%Y %H:%M:%S", # dd-mm-yyyy hh:mm:ss.
"%Y-%m-%dT%H:%M:%SZ", # rfc-3339.
"%Y-%m-%d",
]
for format in formats:
try:
datetime.strptime(str(string), str(format))
return format
except Exception as e:
a=1
return None
def assign(self, string, format=None):
if isinstance(string, Formats.Date):
self = string
return self
else:
if format == None:
format = self.parse_format(string)
if format == None:
raise Exceptions.ParseError(f"Unable to parse a Date() object from string [{string}].")
if format == self.seconds_format:
self.initialize(seconds=float(string))
else:
self.initialize(timestamp=string, format=format)
return self
# normalize seconds to 10s or 1m etc.
def normalize_seconds(self, seconds:(int,float)):
if seconds < 0:
raise ValueError("Can not normalize negative seconds.")
if seconds < 0.01:
return f'{int(seconds*1000)}ms'
elif seconds <= 60:
return f'{int(seconds)}s'
elif seconds <= 60*60:
return f'{round(seconds/60,1)}m'
elif seconds <= 60*60*24:
return f'{round(seconds/(60*60),1)}h'
elif seconds <= 60*60*24*30:
return f'{round(seconds/(60*60*24),1)}d'
elif seconds <= 60*60*24*30*12:
return f'{round(seconds/(60*60*24*30),1)}m'
else:
return f'{round(seconds/(60*60*24*30*12),1)}y'
# convert to datetime object.
def datetime(self, timestamp=None):
# set defaults.
if timestamp == None: timestamp = str(self)
# parse format.
seconds = isinstance(timestamp, (int, float))
# by timestamp & format.
if not seconds:
format = self.parse_format(timestamp)
if format == None:
raise Exceptions.ParseError(f"Unable to parse the date format from timestamp [{timestamp}]. Find out what the required format is and request a commit that updates the Date().parse_format() function with the required format (https://github.com/vandenberghinc/dev0s/).")
seconds = time.mktime(datetime.strptime(str(timestamp), str(format)).timetuple())
return datetime.fromtimestamp(float(seconds))
# by seconds.
else:
return datetime.fromtimestamp(float(seconds))
# convert to rfc_3339 format.
def rfc_3339(self, timestamp=None):
# convert.
return self.datetime(timestamp=timestamp).isoformat('T') + "Z"
#
# convert to utc format.
def utc(self, timestamp=None):
# convert.
return self.datetime(timestamp=timestamp).replace(tzinfo=timezone.utc)
#
# support default iteration.
def __iter__(self):
return iter([self.year, self.month, self.week, self.hour, self.minutes, self.seconds])
# support '>=' & '>' operator.
def __gt__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) > float(date)
def __ge__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) >= float(date)
# support '<=' & '<' operator.
def __lt__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) < float(date)
def __le__(self, date):
if not isinstance(date, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {date.__class__}.")
return float(self) <= float(date)
# support '==' & '!=' operator.
def __eq__(self, date):
if not isinstance(date, self.__class__):
return False
return float(self) == float(date)
def __ne__(self, date):
if not isinstance(date, self.__class__):
return True
return float(self) != float(date)
# support 'in' operator.
def __contains__(self, string):
if isinstance(string, (list, Files.Array)):
for i in string:
if i in str(self):
return True
return False
else:
return string in str(self)
# support "+", -, =-, =+" .
def __add__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {add.__class__}.")
return Date(self.to_seconds() + add)
def __iadd__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not iadd object {self.__class__} & {add.__class__}.")
self = Date(self.to_seconds() + add)
return self
def __sub__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {add.__class__}.")
return Date(self.to_seconds() - add)
def __isub__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not isub object {self.__class__} & {add.__class__}.")
self = Date(self.to_seconds() - add)
return self
# support +.
def __concat__(self, add):
if isinstance(add, (int,float)):
add = float(add)
elif isinstance(add, self.__class__):
add = add.to_seconds()
elif not isinstance(add, self.__class__):
raise Exceptions.FormatError(f"Can not sub object {self.__class__} & {add.__class__}.")
return Date(self.to_seconds() - add)
# representation.
def __repr__(self):
return str(self)
#
# int representation.
def __int__(self):
return int(self.to_seconds(self.seconds_timestamp, format=self.seconds_timestamp_format))
# float representation.
def __float__(self):
return float(self.to_seconds(self.seconds_timestamp, format=self.seconds_timestamp_format))
# str representation.
def __str__(self):
return str(self.seconds_timestamp)
# content count.
def __len__(self):
return len(self.seconds_timestamp)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Date"
#
#
# the files class.
class Files():
#
# functions.
def join(path=None, name=None, type=""):
if type not in ["", "/"] and "." not in type:
type = "." + type
path = str(path)
if os.path.exists(path) and Files.directory(path) and path[len(path)-1] != "/": path += '/'
return gfp.clean("{}{}{}".format(path, name, type), remove_double_slash=True, remove_last_slash=False)
def load(path, data="not to be used", format="str", raw=False, sudo=False): # keep data as second param to prevent save load errors.
# correct format.
if format in [str, String, "String", "string", "file"]: format = "str"
if format in [dict, Dictionary, "Dictionary", "dict", "array", "Array"]: format = "json"
if format in [bytes, Bytes, "Bytes"]: format = "bytes"
#format = str(format)
# match format.
path = str(path)
data = None
# sudo.
if sudo:
data = utils.__execute__(["sudo", "cat", path])
if "No such file or directory" in data: raise FileNotFoundError(f"File [{path}] does not exist.")
# proceed.
if format == "str":
if not sudo:
file = open(path,mode='rb')
data = file.read().decode()
file.close()
elif format == "json":
if not sudo:
try:
with open(path, 'r+') as json_file:
data = json.load(json_file)
except json.decoder.JSONDecodeError as e:
try:
data = ast.literal_eval(Files.load(path=path, format="str", raw=True, sudo=sudo))
except:
e = f"Unable to decode file [{path}] (sudo: {sudo}), error: {e}."
raise Exceptions.JSONDecodeError(e)
else:
try:
data = json.loads(data)
except json.decoder.JSONDecodeError as e:
try:
data = ast.literal_eval(Files.load(path=path, format="str", raw=True, sudo=sudo))
except:
e = f"Unable to decode file [{path}] (sudo: {sudo}), error: {e}."
raise Exceptions.JSONDecodeError(e)
elif format == "bytes":
if not sudo:
with open(path, "rb") as file:
data = file.read()
else:
data = data.encode()
else: raise ValueError(f"Unknown format {format}.")
if raw: return data
else: return Formats.initialize(data)
def save(
# the path (str) (#1).
path,
# the data (str, dict, list) (#2).
data,
# the file format, options: [str, bytes, json].
format="str",
# root permission required.
sudo=False,
# json options.
indent=4,
ensure_ascii=False,
# create backups.
backups=False,
# warning: safe True keeps infinitely trying to save the doc when an KeyboardInterrupt is raised by the user.
safe=True,
# system functions.
__loader__=None,
__checks__=True,
__keyboard_interrupt__=False,
__attempt__=1,
__real_path__=None,
):
if __checks__:
# correct format.
if format in [str, String, "String", "string", "file"]: format = "str"
if format in [dict, Dictionary, "Dictionary", "dict", "array"]: format = "json"
if format in [bytes, Bytes, "Bytes"]: format = "bytes"
#format = str(format)
# match format.
path = gfp.clean(str(path), remove_double_slash=True, remove_last_slash=False)
if sudo:
__real_path__ = str(path)
tmp_path = path = f"/tmp/{String().generate(length=12)}"
data = Formats.denitialize(data)
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = str(path)
if format == "str":
file = open(path, "w+")
file.write(data)
file.close()
elif format == "json":
if __checks__:
try:
test = json.dumps(data)
except:
raise Exceptions.JSONDecodeError(f"Unable to dump expected json data: {data}")
try:
with open(path, 'w+') as json_file:
json.dump(data, json_file, ensure_ascii=ensure_ascii, indent=indent)
except PermissionError:
with open(path, 'w') as json_file:
json.dump(data, json_file, ensure_ascii=ensure_ascii, indent=indent)
except KeyboardInterrupt as e:
if __loader__ == None:
__loader__ = console.Loader(f"&RED&Do not interrupt!&END& Saving file [{path}] (attempt: {__attempt__}).")
if __attempt__ >= 100:
__loader__.stop(success=False)
raise KeyboardInterrupt(e)
return Files.save(
path, data,
format=format,
sudo=sudo,
indent=indent,
ensure_ascii=ensure_ascii,
backups=False,
safe=safe,
__loader__=__loader__,
__checks__=False,
__keyboard_interrupt__=str(e),
__attempt__=__attempt__+1,
__real_path__=__real_path__,)
elif format == "bytes":
with open(path, "wb") as file:
file.write(data)
else: raise ValueError(f"Unknown format {format}.")
if sudo:
if Files.directory(path) and path[len(path)-1] != "/":
path += "/"
if __real_path__[len(__real_path__)-1] != "/": __real_path__ += "/"
os.system(f"sudo rsync -aq {gfp.clean(path)} {gfp.clean(__real_path__)} && rm -fr {tmp_path}")
#print(f"sudo mv {gfp.clean(path)} {gfp.clean(__real_path__}")
#os.system(f"sudo mv {gfp.clean(path)} {gfp.clean(__real_path__}")
# os.system(f"sudo rsync -aq {gfp.clean(path)} {gfp.clean(__real_path__} && rm -fr {tmp_path}")
#else:
# os.system(f"sudo rsync -ogq {gfp.clean(path)} {gfp.clean(__real_path__} && rm -fr {tmp_path}")
if __keyboard_interrupt__ != False:
if __loader__ != None:
__loader__.stop()
raise KeyboardInterrupt(__keyboard_interrupt__)
def delete(
# the path (param #1).
path=None,
# root permission required.
sudo=False,
# forced mode.
forced=False,
# hide logs.
silent=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = str(path)
return gfp.delete(path=path, forced=forced, sudo=sudo, silent=silent)
def chmod(
# the path (param #1).
path=None,
# the new permission.
permission=None,
# recursive for entire dir.
recursive=False,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
if permission == None: raise Exceptions.InvalidUsage("Define parameter: permission.")
path = str(path)
return gfp.permission.set(path=path, permission=permission, recursive=recursive, sudo=sudo)
def chown(
# the path (param #1).
path=None,
# the new owner.
owner=None,
# the new group (optional).
group=None,
# recursive for entire dir.
recursive=False,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
if owner == None: raise Exceptions.InvalidUsage("Define parameter: owner.")
path = str(path)
return gfp.ownership.set(path=path, owner=owner, group=group, recursive=recursive, sudo=sudo)
def exists(path=None, sudo=False):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
return gfp.exists(path=path, sudo=sudo)
#
def clean(
# the path (leave None to use self.path) (param #1).
path=None,
# the clean options.
remove_double_slash=True,
remove_first_slash=False,
remove_last_slash=False,
ensure_first_slash=False,
ensure_last_slash=False,
):
if path == None:
raise ValueError("Define parameter: path.")
path = str(path).replace("~",HOME)
while True:
if remove_double_slash and "//" in path: path = path.replace("//","/")
elif remove_first_slash and len(path) > 0 and path[0] == "/": path = path[1:]
elif remove_last_slash and len(path) > 0 and path[len(path)-1] == "/": path = path[:-1]
elif ensure_first_slash and len(path) > 0 and path[0] != "/": path = "/"+path
elif ensure_last_slash and len(path) > 0 and path[len(path)-1] != "/": path += "/"
else: break
return path
def directory(
# the path (#1).
path=None,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = Files.clean(path=path, remove_double_slash=True, remove_last_slash=True)
path = str(path)
return os.path.isdir(path)
#
def mounted(
# the path (#1).
path=None,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
path = gfp.clean(path=path, remove_double_slash=True, remove_last_slash=True)
path = str(path)
return os.path.ismount(path)
#
def create(
# the path to the file (str) (required) (#1).
path=None,
# the data (str) (optional).
data=None,
# path is directory (bool).
directory=False,
# the owner (str) (optional).
owner=None,
# the group (str) (optional).
group=None,
# the permission (int) (optional).
permission=None,
# root permission required.
sudo=False,
):
if path == None: raise Exceptions.InvalidUsage("Define parameter: path.")
elif Files.exists(path, sudo=sudo): Exceptions.DuplicateError(f"Path [{path}] already exists.")
sudo_str = Boolean(sudo).string(true="sudo ", false="")
if directory:
os.system(f"{sudo_str}mkdir -p {path}")
else:
if isinstance(data, (list, Array, dict, Dictionary)):
if isinstance(data, (Dictionary,Array)):
data = data.raw()
Files.save(path=path, data=data, format="json", sudo=sudo, )
else:
Files.save(path=path, data=str(data), sudo=sudo)
if not Files.exists(path, sudo=sudo):
raise ValueError(f"Unable to create {Boolean(directory).string(true='directory', false='file')} [{path}] (sudo: {sudo}).")
if permission != None:
Files.chmod(path=path, permission=permission, sudo=sudo)
if owner != None:
Files.chown(path=path, owner=owner, group=group, sudo=sudo)
def copy(
# the from & to path (#1 & #2).
from_, to_,
# root permission required.
sudo=False,
# the active log level.
log_level=0,
# the exclude patterns.
exclude=[],
# update deleted files.
delete=True,
):
if not Files.exists(from_, sudo=sudo):
raise FileNotFoundError(f"Specified copy path [{from_}] does not exist.")
directory = False
if Files.directory(from_, sudo=sudo):
directory = True
from_ += "/"
to_ += "/"
from_ = gfp.clean(from_)
to_ = gfp.clean(to_)
if not Files.exists(gfp.base(to_), sudo=sudo): Files.create(gfp.base(to_), sudo=sudo, directory=directory)
exclude_str = ""
for i in exclude: exclude_str += f" --exclude '{i}'"
os.system(f"{Boolean(sudo).string(true='sudo ', false='')}rsync -azt{Boolean(log_level >= 1).string(true='P',false='')} {from_} {to_} {Boolean(delete).string(true='--delete', false='')}{exclude_str}")
def move(
# the from & to path (#1 & #2).
from_, to_,
# root permission required.
sudo=False,
# root permission required.
log_level=0,
):
if not Files.exists(from_, sudo=sudo):
raise FileNotFoundError(f"Specified move path [{from_}] does not exist.")
directory = False
if Files.directory(from_, sudo=sudo):
directory = True
from_ += "/"
to_ += "/"
from_ = gfp.clean(from_)
to_ = gfp.clean(to_)
if not Files.exists(gfp.base(to_), sudo=sudo): Files.create(gfp.base(to_), sudo=sudo, directory=directory)
os.system(f"{Boolean(sudo).string(true='sudo ', false='')}mv {from_} {to_}")
def base(
# the path (str, FilePath) (#1).
path=None,
# the dirs back.
back=1,
):
if path == None: raise ValueError("Define parameter: path:str.")
path = str(path)
base = path.replace('//','/')
if base[len(base)-1] == '/': base = base[:-1]
if len(base.split("/")) <= 1: raise ValueError("Path [{}] has no base.".format(base))
startslash = True
if base[0] != "/":
startslash = False
base = base.split("/")
m, c, s = len(base), 0, ""
for i in base:
if c >= m-back: break
if c == 0:
s = f"/{i}/"
else:
s += f"{i}/"
c += 1
if startslash:
return s
else:
return s[1:]
#
#
# the file object class.
class File(object):
def __init__(self, path=None, data=None, load=False, default=None):
# docs.
DOCS = {
"module":"File",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(data, Files.File):
data = data.data
# init.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
self.data = data
if default != None and not os.path.exists(self.file_path.path):
self.save(data=default)
if load: self.load()
# can be filled with executing [self.x = x()]:
def load(self, default=None, sudo=False):
utils.__check_memory_only__(str(self.file_path.path))
if not os.path.exists(str(self.file_path.path)) and default != None:
self.save(data=default, sudo=sudo)
self.data = Files.load(self.file_path.path, format=str, sudo=sudo)
return self.data
def load_line(self, line_number, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(str(default), self.file_path.path, sudo=sudo)
data = Files.load(self.file_path.path, format=str, sudo=sudo)
return data.split('\n')[line_number]
def save(self, data=None, path=None, overwrite_duplicates=True, sudo=False):
if path == None: path = self.file_path.path
if data == None: data = self.data
utils.__check_memory_only__(path)
if overwrite_duplicates:
self.data = data
return Files.save(path, data, sudo=sudo)
else:
file_name, original_path = Formats.FilePath(path).name(), path
extension = file_name.split('.')[file_name.count('.')]
file_name_without_extension = file_name.replace(extension, '')
while True:
if not os.path.exists(path): break
else: path = original_path.replace(file_name, file_name_without_extension+'-'+str(index)+extension)
self.data = data
return Files.save(path, data, sudo=sudo)
def check(self, default=None, save=True):
if default != None and isinstance(default, (str, String)):
if not self.fp.exists():
self.data = default
if save:
self.save(data=default)
# support default iteration.
def __iter__(self):
return iter(self.data)
# support '>=' & '>' operator.
def __gt__(self, string):
if not isinstance(string, str):
return len(self) > len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) > len(string.data)
def __ge__(self, string):
if not isinstance(string, str):
return len(self) >= len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) >= len(string.data)
# support '<=' & '<' operator.
def __lt__(self, string):
if not isinstance(string, str):
return len(self) < len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) < len(string.data)
def __le__(self, string):
if not isinstance(string, str):
return len(self) <= len(string)
elif not isinstance(string, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {string.__class__}.")
return len(self) <= len(string.data)
# support '==' & '!=' operator.
def __eq__(self, string):
if not isinstance(string, str):
return self.data == string
elif not isinstance(string, self.__class__):
return False
return self.data == string.data
def __ne__(self, string):
if not isinstance(string, str):
return self.data != string
elif not isinstance(string, self.__class__):
return True
return self.data != string.data
# support 'in' operator.
def __contains__(self, key):
if isinstance(key, (list, Files.Array)):
for i in key:
if i in self.data:
return True
return False
else:
return key in self.data
# str representation.
def __str__(self):
return str(self.data)
# content count.
def __len__(self):
return len(self.data)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "File"
#
# support self assignment.
def assign(self, data):
if isinstance(data, self.__class__):
data = data.data
self.data = data
return self
# return raw data.
def raw(self):
return self.data
#
# the array object class.
class Array(object):
def __init__(self,
# the array (param #1).
array=[],
# the path (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Array",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(array, Files.Array):
array = array.array
elif not isinstance(array, list):
raise Exceptions.InstanceError(f"Parameter [{self.__class__.__name__}.array] must be a [Array] or [list], not [{array.__class__.__name__}].")
# initialize dictionary recursive.
#new = []
#for i in array: new.append(Formats.initialize(i))
#array = new
#if isinstance(array, Array):
# array = array.array
# init.
if path in [False, None]:
self.file_path = self.fp = None # used in local memory (not fysical)
self.__path__ = None
else:
self.file_path = self.fp = Formats.FilePath(path)
self.__path__ = self.file_path.path
self.array = array
if default != None and self.file_path != None and not os.path.exists(self.file_path.path):
self.save(array=default)
self.array = default
if load: self.load()
#
# save to file.
def save(self, array=None, path=None, ensure_ascii=False, indent=4, sudo=False):
if array != None: array = self.array
if path == None: path = self.file_path.path
utils.__check_memory_only__(path)
self.array = array
return Files.save(path, Formats.denitialize(array), format="json", indent=indent, ensure_ascii=ensure_ascii, sudo=sudo)
# load from file.
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.array = Files.load(self.file_path.path, format="json", sudo=sudo)
return self.array
# convert to string.
def string(self, joiner=" ", sum_first=False):
string = ""
for x in self.array:
if sum_first and string == "": string = joiner + str(x)
elif string == '': string = str(x)
else: string += joiner + str(x)
return str(string)
# divide into several arrays.
def divide(self, into=2):
avg = len(self.array) / float(into)
out = []
last = 0.0
while last < len(self.array):
out.append(self.array[int(last):int(last + avg)])
last += avg
if len(out) > into:
while len(out) > into:
last = out.pop(len(out)-1)
out[len(out)-1] += last
return out
# reomve indexes or values.
def remove(self, indexes=[], values=[]):
array = self.array
for i in indexes:
try: array.pop(i)
except: a=1
if values != []:
new = []
for v in array:
if v not in values: new.append(v)
array = new
return Array(array, path=self.__path__)
# default list functions.
def append(self, var):
array = list(self.array)
return Array(array.append(var), path=self.__path__)
def pop(self, index):
array = list(self.array)
return Array(array.pop(index), path=self.__path__)
def count(self, item=None):
if item == None:
return Formats.Integer(len(self.array))
elif isinstance(item, (str, Formats.String)):
c = 0
for i in self:
if i == item: c += 1
return Formats.Integer(c)
elif isinstance(item, (list, Files.Array)):
c = 0
for x in self:
for y in item:
if x == y: c += 1
return Formats.Integer(c)
else: raise Exceptions.InstanceError("Parameter [item] must either be None, String or Array.")
# check.
def check(self, default=None, save=True):
if default != None and isinstance(default, (list, Array)):
if not self.fp.exists():
self.array = default
if save:
self.save(data=default)
else:
for i in default:
if i not in self.array:
self.array.append(i)
if save:
self.save()
# clean content.
def clean(self,
# the string replacements.
# example:
# { "Hello":"hello" }
# [ ["Hello", "hello"] ]
replacements={},
# the first characters to remove (String & Array).
remove_first=[],
# the last characters to remove (String & Array).
remove_last=[],
# the first characters that are ensured (String & Array) (List: check is one of the list is ensured).
ensure_first=[],
# the last characters that are ensured (String & Array) (List: check is one of the list is ensured).
ensure_last=[],
# remove all values within the list from the array.
remove_values=[],
# update the self array.
update=True,
# the dicionary (leave None to use self.array).
array=None,
):
if array == None: array = list(self.array)
if isinstance(remove_first, (str, Formats.String)):
remove_first = [remove_first]
if isinstance(remove_last, (str, Formats.String)):
remove_last = [remove_last]
if isinstance(ensure_first, (str, Formats.String)):
ensure_first = [ensure_first]
if isinstance(ensure_last, (str, Formats.String)):
ensure_last = [ensure_last]
new = []
for item in list(array):
if item not in remove_values:
while True:
edits = False
for i in remove_first:
if len(item) >= len(i) and item[:len(i)] == i:
item = item[len(i):]
edits = True
for i in remove_last:
if len(item) >= len(i) and item[len(i):] == i:
item = item[:-len(i)]
edits = True
for i in ensure_first:
if len(item) >= len(i) and item[:len(i)] != i:
item = i+item
edits = True
for i in ensure_last:
if len(item) >= len(i) and item[len(i):] != i:
item += i
edits = True
for from_, to_ in replacements.items():
if isinstance(item, (str, Formats.String)) and from_ in item:
item = item.replace(from_, to_)
edits = True
if not edits: break
new.append(item)
return Array(new, path=self.__path__)
# iterations.
def iterate(self, sorted=False, reversed=False, array=None):
if array == None: array = list(self.array)
return self.items(reversed=reversed, sorted=sorted, array=array)
# iterate items.
def items(self, sorted=False, reversed=False, array=None):
if array == None: array = list(self.array)
if sorted: array = self.sort(array=array)
if reversed: return self.reversed(array=array)
else: return Array(array, path=self.__path__)
# reserse array.
def reversed(self, array=None):
if array == None: array = self.array
reversed_keys = []
c = len(array)-1
for _ in range(len(array)):
reversed_keys.append(array[c])
c -= 1
return Array(reversed_keys, path=self.__path__)
# sort array.
def sort(self, reversed=False, array=None):
if array == None: array = self.array
return Array(sorted(array, reverse=reversed), path=self.__path__)
# dump json string.
def json(self, sorted=False, reversed=False, indent=4, array=None, ):
#return json.dumps(Formats.denitialize(self), indent=indent)
if array == None: array = self.array
return json.dumps(self.serialize(json=False, sorted=sorted, reversed=reversed, array=array), indent=indent)
# serialize array.
def serialize(self, sorted=False, reversed=False, json=False, array=None):
if array == None: array = self.array
if isinstance(array, Files.Array):
array = array.array
if sorted:
items = self.items(reversed=reversed, array=self.sort(alphabetical=True, array=array))
else:
items = self.items(reversed=reversed, array=array)
new = []
for value in items:
if isinstance(value, (dict, Files.Dictionary)):
value = Files.Dictionary().serialize(json=json, sorted=sorted, reversed=reversed, dictionary=value)
elif isinstance(value, (list, Files.Array)):
value = self.serialize(json=json, sorted=sorted, reversed=reversed, array=value)
elif isinstance(value, object):
value = str(value)
elif isinstance(value, str) or isinstance(value, bool) or value == None:
if value in [True, "True", "True".lower()]:
if json:
value = "true"
else:
value = True
elif value in [False, "False", "False".lower()]:
if json:
value = "false"
else:
value = False
elif value in [None, "None", "None".lower()]:
if json:
value = "null"
else:
value = None
new.append(value)
return new
# randomize the content of the array always non recursive.
def randomize(self,
# optionally pass the array (leave None to use self.array).
array=None,
):
if array == None: array = list(self.array)
randomized = []
while len(array) > 0:
index = random.randrange(0, len(array))
item = array.pop(index)
randomized.append(item)
return Array(randomized, path=self.__path__)
#
# limit the content of the array.
def limit(self,
# limit to the number of samples.
limit:int,
# the index to start from.
start=0,
# optionally pass the array (leave None to use self.array).
array=None,
):
if array == None: array = list(self.array)
return Array(array[start:start+limit], path=self.__path__)
# min of numerical array.
def min(self):
min = self.array[0]
for item in self.array:
if item < min:
min = item
return min
# max of numerical array.
def max(self):
max = self.array[0]
for item in self.array:
if item > max:
max = item
return max
# sum numerical array.
def sum(self):
return sum(self.array)
# mean of numerical array.
def mean(self, window=None):
return self.sum() / len(self.array)
#
# variance of numerical array.
def variance(self):
mean = self.mean()
deviations = []
for x in self.array:
deviations.append((x - mean) ** 2)
return sum(deviations) / len(self.array)
# standard deviation of numerical array.
def stdev(self):
return math.sqrt(self.variance())
# copy.
def copy(self):
return Files.Array(self.array, path=self.__path__)
#
# support "+", -, =-, =+" .
def __add__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
return Array(self.array + array)
def __iadd__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
self.array += array
def __sub__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
new = []
for i in self.array:
if i not in array:
new.append(i)
return Array(new)
def __isub__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
new = []
for i in self.array:
if i not in array:
new.append(i)
self.array = new
# support +.
def __concat__(self, array):
if isinstance(array, list):
a=1
elif isinstance(array, self.__class__):
array = array.array
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {array.__class__}.")
return Array(self.array + array)
# support default iteration.
def __iter__(self):
return iter(self.array)
# support '>=' & '>' operator.
def __gt__(self, array):
if not isinstance(array, list):
return len(self.array) > len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) > len(array.array)
def __ge__(self, array):
if not isinstance(array, list):
return len(self.array) >= len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) >= len(array.array)
# support '<=' & '<' operator.
def __lt__(self, array):
if not isinstance(array, list):
return len(self.array) < len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) < len(array.array)
def __le__(self, array):
if not isinstance(array, list):
return len(self.array) <= len(array)
elif not isinstance(array, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {array.__class__}.")
return len(self.array) <= len(array.array)
# support '==' & '!=' operator.
def __eq__(self, array):
if not isinstance(array, list):
return self.array == array
elif not isinstance(array, self.__class__):
return False
return self.array == array.array
def __ne__(self, array):
if not isinstance(array, list):
return self.array != array
elif not isinstance(array, self.__class__):
return True
return self.array != array.array
# support 'in' operator.
def __contains__(self, key):
if isinstance(key, (list, Files.Array)):
for i in key:
if i in self.array:
return True
return False
else:
return key in self.array
# support '*' operator.
def __mul__(self, value):
if isinstance(value, int):
a=1
else:
raise Exceptions.FormatError(f"Can not mul object {self.__class__.__name__} & {value.__class__.__name__}.")
return Array(self.array * value)
# support '/' operator.
def __div__(self, value):
if isinstance(value, int):
a=1
else:
raise Exceptions.FormatError(f"Can not div object {self.__class__.__name__} & {value.__class__.__name__}.")
return Array(self.divide(into=value))
# support item assignment.
def __setitem__(self, index, value):
#if "/" in item
try:
self.array[Formats.denitialize(index)] = value
except IndexError:
self.array.append(value)
def __getitem__(self, index):
return self.array[Formats.denitialize(index)]
def __delitem__(self, index):
#if "/" in item
return self.array.pop(Formats.denitialize(index))
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(Formats.denitialize(self.array))
# content count.
def __len__(self):
return len(self.array)
# object id.
def __id__(self):
if len(self.array) > 10:
return f"({self.instance()}:[{self.array[0]}, {self.array[0]}, {self.array[0]}, ... {self.array[len(self.array)-3]}, {self.array[len(self.array)-2]}, {self.array[len(self.array)-1]}])"
else:
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Array"
#
# support self assignment.
def assign(self, array):
if isinstance(array, self.__class__):
array = array.array
self.array = array
# assign a new path.
def assign_path(self, path):
self.file_path = FilePath(path)
self.__path__ = path
# return raw data.
def raw(self):
return self.array
#
# the dictionary object class.
class Dictionary(object):
def __init__(self,
# the dictionary (param #1).
dictionary={},
# the file path (param #2).
path=False,
# load the file path dictionary on init.
load=False,
# specify default to check & create the dict.
default=None,
):
# docs.
DOCS = {
"module":"Dictionary",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(dictionary, Files.Dictionary):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, dict):
raise Exceptions.InstanceError(f"Parameter [{self.__class__.__name__}.dictionary] must be a [Dictionary] or [dict], not [{dictionary.__class__.__name__}].")
"""elif not isinstance(dictionary, dict):
max_attempts = 2
for attempt in range(max_attempts):
try:
if 1+attempt == 1:
dictionary = dictionary.dict()
break
elif 1+attempt == 2:
dictionary = dictionary.json()
break
except:
if 1+attempt >= max_attempts:
raise Exceptions.InstanceError(f"Parameter [{self.__class__.__name__}.dictionary] must be a [Dictionary] or [dict], not [{dictionary.__class__.__name__}].")
"""
# initialize dictionary recursive.
#for key in list(dictionary.keys()):
# dictionary[key] = Formats.initialize(dictionary[key])
#if isinstance(dictionary, Dictionary):
# dictionary = dictionary.dictionary
# arguments.
self.dictionary = dictionary
self.path = gfp.clean(path=path)
self.default = default
self.file_path = self.fp = None
self.__path__ = None
# checks.
if path not in [False, None]:
self.file_path = self.fp = Formats.FilePath(path)
self.__path__ = self.file_path.path
if self.default != None:
self.dictionary = self.check(default=self.default).dictionary
if self.file_path != None and not self.file_path.exists(): self.save()
if load: self.load()
#
# save to file.
def save(self, dictionary=None, path=None, ensure_ascii=False, indent=4, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if dictionary == None: dictionary = dict(self.dictionary)
if path == None: path = self.file_path.path
return Files.save(path, Formats.denitialize(dictionary), format="json", indent=indent, ensure_ascii=ensure_ascii, sudo=sudo)
# load from file.
def load(self, default=None, sudo=False):
utils.__check_memory_only__(self.file_path.path)
if not os.path.exists(self.file_path.path) and default != None:
self.save(default, sudo=sudo)
self.dictionary = Files.load(self.file_path.path, format="json", sudo=sudo)
return self.dictionary
# load a single line from file.
def load_line(self, line_number, sudo=False):
utils.__check_memory_only__(self.file_path.path)
data = Files.load(str(self.file_path.path, sudo=sudo))
return data.split('\n')[line_number]
# check the dictionary.
def check(self,
# Option 1:
key=None, # check a certain key, it appends if not present
value=None, # check a certain key, append the value if not present (no format check)
# Option 2:
default=None, # check based on a default dictionary, it appends it not present.
# Optionals:
dictionary=None, # overwrite the start dictionary, leave None to use self.dictionary.
save=False, # saves the output & and sets the output to self.dictionary.
):
# functions.
def __iterate_dict__(dictionary, default):
#print("\niterating new dictionary: [{}] & default [{}]\n".format(dictionary, default))
for identifier, item in default.items():
if isinstance(item, (dict,Dictionary)):
try: dictionary[str(identifier)] = __iterate_dict__(dictionary[str(identifier)], item)
except KeyError: dictionary[str(identifier)] = dict(item)
elif isinstance(item, (list,Array)):
if isinstance(item, (list)): item = list(item)
elif isinstance(item, (Array)): item = item.array
try: dictionary[str(identifier)]
except KeyError: dictionary[str(identifier)] = item
else:
try: dictionary[str(identifier)]
except KeyError: dictionary[str(identifier)] = item
return dictionary
# init.
if dictionary == None: dictionary = dict(self.dictionary)
if not isinstance(dictionary, (dict, Dictionary)):
raise Exceptions.InvalidUsage(f"<Dictionary.check> parameter [dicionary] requires to be a [dict, Dictionary] not [{dictionary.__class__.__name__}].")
# - option 1:
if key == None and value != None: raise ValueError("Define both parameters: [key & value].")
elif value == None and key != None: raise ValueError("Define both parameters: [key & value].")
if key != None and value != None:
try: dictionary[key]
except KeyError: dictionary[key] = value
return dictionary
# - option 2:
if default == None: default = self.default
if default == None: raise ValueError("Define both parameters: [key & value] or parameter [default].")
dictionary = __iterate_dict__(dictionary, default)
return Dictionary(dictionary, path=self.__path__)
#
# divide dictionary into multiple arrays.
def divide(self, into=2):
return_list = [dict() for idx in range(into)]
idx = 0
for k,v in self.dictionary.items():
return_list[idx][k] = v
if idx < into-1: # indexes start at 0
idx += 1
else:
idx = 0
return Array(return_list)
# append to dict.
def append(self,
# by default it only overwrites if a key does not exist and sums the key if it is a str / int.
#
# a dictionary to append.
dictionary,
# the overwrite formats (add "*" for all).
overwrite=[],
# the sum formats (add "*" for all).
sum=["int", "float"],
# the banned dictionary keys.
banned=[],
# do not use.
dictionary_=None,
):
if dictionary_ == None: dictionary_ = dict(self.dictionary)
if dictionary == dictionary_: return dictionary
if dictionary_ == {}: return dictionary
for key, value in dictionary.items():
if key not in banned:
if isinstance(value, (dict, Dictionary)):
found = True
try: dictionary_[key]
except: found = False
if found:
dictionary_[key] = self.append(value, overwrite=overwrite, sum=sum, banned=banned, dictionary_=dictionary_[key], save=False, update=False)
else:
dictionary_[key] = value
else:
format = value.__class__.__name__
if "*" in sum or format in sum:
if format in ["str", "int", "float", "list", "Array"]:
try: dictionary_[key] += value
except KeyError: dictionary_[key] = value
else: # cant be summed.
dictionary_[key] = value
elif "*" in overwrite or format in overwrite:
dictionary_[key] = value
else:
try: dictionary_[key]
except KeyError: dictionary_[key] = value
return Dictionary(dictionary_, path=self.__path__)
# edit.
def edit(self,
# the dictionary (leave None to use self.dictionary).
dictionary=None,
# the edits (dict).
# adds / replaces the current (except the exceptions).
edits={},
# the edits key Exceptions.
exceptions=[],
# the edits value Exceptions.
value_exceptions=[None],
# the instances to overwrite (list[str]) (missing stands for the keys that are missing in the dictionary).
overwite=["missing"],
# the instances to combine (list[str]) (dict is always recursive).
combine=["int", "float", "Integer", "list", "Array"],
# the log level.
log_level=-1,
):
def edit_dict(dictionary={}, edits={}):
c = 0
for key, value in edits.items():
found = True
try: dictionary[key]
except KeyError: found = False
# recursive.
if key not in exceptions and value not in value_exceptions and isinstance(value, (dict, Dictionary)):
if isinstance(value, (Dictionary)):
value = value.dictionary
if found:
dictionary[key], lc = edit_dict(dictionary=dictionary[key], edits=value)
c += lc
else:
if log_level >= 0:
print(f"Editing {alias} config {key}: {value}.")
dictionary[key] = value
c += 1
elif key not in exceptions and value not in value_exceptions and not found and "missing" in overwrite:
if log_level >= 0:
print(f"Editing {alias} config {key}: {value}.")
dictionary[key] = value
c += 1
elif key not in exceptions and value not in value_exceptions and found and value.__class__.__name__ in combine:
if log_level >= 0:
print(f"Editing {alias} config {key}: {value}.")
dictionary[key] = dictionary[key] + value
c += 1
return dictionary, c
# check specific.
if dictionary == None: dictionary = dict(self.dictionary)
dictionary, c = edit_dict(dictionary=dictionary, edits=edits)
return Dictionary(dictionary, path=self.__path__)
# unpack attribute(s).
def unpack(self,
# the key / keys / defaults parameter (#1).
# str instance:
# unpack the str key
# list instance:
# unpack all keys in the list.
# dict instance:
# unpack all keys from the dict & when not present return the key's value as default.
keys,
):
defaults_ = {}
if isinstance(keys, (dict, Files.Dictionary)):
if isinstance(keys, dict):
defaults_ = dict(keys)
keys = list(keys.keys())
else:
defaults_ = keys.dict()
keys = keys.keys()
elif isinstance(keys, str):
keys = [keys]
unpacked = []
for key in keys:
value, set = None, True
try:
value = self.dictionary[key]
except KeyError:
try:
value = defaults_[key]
except KeyError:
set = False
if not set:
raise Exceptions.UnpackError(f"Dictionary does not contain attribute [{key}].")
unpacked.append(value)
if len(unpacked) == 1:
return unpacked[0]
else:
return unpacked
# remove.
def remove(self, keys=[], values=[], update=True, save=False, dictionary=None):
if dictionary == None:
dictionary = dict(self.dictionary)
for i in list(keys):
try: del dictionary[i]
except: a=1
if values != []:
new = {}
for k,v in dictionary.items():
if v not in values: new[k] = v
dictionary = new
return Dictionary(dictionary, path=self.__path__)
# count keys or values.
def count(self, item=None, values=False):
if item == None:
return Formats.Integer(len(self.dictionary))
elif isinstance(item, (str, Formats.String)):
c, array = 0, []
if values:
array = self.values()
else:
array = self.keys()
for i in array:
if i == item: c += 1
return Formats.Integer(c)
elif isinstance(item, (list, Files.Array)):
c, array = 0, []
if values:
array = self.values()
else:
array = self.keys()
for x in array:
for y in item:
if x == y: c += 1
return Formats.Integer(c)
else: raise Exceptions.InstanceError(f"Parameter [item] must either be [None], [String] or [Array], not [{item.__class__}].")
# insert new keys & values.
def insert(self, dictionary={}, __dictionary__=None):
if __dictionary__ == None:
__dictionary__ = dict(self.dictionary)
for key,value in dictionary.items():
if isinstance(value, (dict, Dictionary)):
if key in __dictionary__:
__dictionary__[key] = self.insert(value, __dictionary__=__dictionary__[key])
else:
__dictionary__[key] = value
elif isinstance(value, (list, Array)):
if key in __dictionary__:
for i in value:
if i not in __dictionary__[key]: __dictionary__[key].append(i)
else:
__dictionary__[key] = value
else:
__dictionary__[key] = value
return Dictionary(__dictionary__, path=self.__path__)
# iterate keys and values.
def iterate(self, sorted=False, reversed=False, dictionary=None):
if dictionary == None: dictionary = self.dictionary
return self.items(reversed=reversed, sorted=sorted, dictionary=dictionary)
def items(self, sorted=False, reversed=False, dictionary=None):
if dictionary == None: dictionary = self.dictionary
if sorted: dictionary = self.sort(dictionary=dictionary)
if reversed: return self.reversed(dictionary=dictionary).items()
else: return dictionary.items()
# iterate keys.
def keys(self, dictionary=None):
if dictionary == None: dictionary = self.dictionary
return Array(list(dictionary.keys()))
# iterate values.
def values(self, dictionary=None):
if dictionary == None: dictionary = dict(self.dictionary)
values = []
for key, value in dictionary.items():
values.append(value)
return Array(values)
# reverse dictionary.
def reversed(self, dictionary=None):
if dictionary == None: dictionary = dict(self.dictionary)
keys = list(dictionary.keys())
reversed_keys = []
c = len(keys)-1
for _ in range(len(keys)):
reversed_keys.append(keys[c])
c -= 1
reversed_dict = {}
for key in reversed_keys:
reversed_dict[key] = dictionary[key]
return Dictionary(reversed_dict, path=self.__path__)
# sort ascending dictionary.
def sort(self,
# reverse ascending to descending.
reversed=False,
# sort the keys or sort the values.
sort="keys",
# system parameters.
dictionary=None,
):
if dictionary == None: dictionary = dict(self.dictionary)
if sort == "values":
new = {}
for key in sorted(dictionary, key=dictionary.get, reverse=reversed):
new[key] = dictionary[key]
elif sort == "keys":
new = {}
for key in sorted(dictionary, reverse=reversed):
new[key] = dictionary[key]
else: raise ValueError(f"Selected an invalid sort mode [{sort}].")
return Dictionary(new, path=self.__path__)
# dump json string.
def json(self, indent=4, dictionary=None, ):
if dictionary == None: dictionary = self.dictionary
return json.dumps(self.serialize(json=False, dictionary=dictionary), indent=indent)
# serialize dict.
def serialize(self, json=False, dictionary=None):
if dictionary == None: dictionary = dict(self.dictionary)
if isinstance(dictionary, Files.Dictionary):
dictionary = dictionary.dictionary
items = self.items(dictionary=dictionary)
dictionary = {}
for key, value in items:
if isinstance(value, (dict, Files.Dictionary)):
value = self.serialize(json=json, dictionary=value)
elif isinstance(value, (list, Files.Array)):
value = Files.Array(value).serialize(json=json)
elif isinstance(value, object):
value = str(value)
elif isinstance(value, str) or isinstance(value, bool) or value == None:
if value in [True, "True", "True".lower()]:
if json:
value = "true"
else:
value = True
elif value in [False, "False", "False".lower()]:
if json:
value = "false"
else:
value = False
elif value in [None, "None", "None".lower()]:
if json:
value = "null"
else:
value = None
dictionary[key] = value
return dictionary
# copy.
def copy(self):
return Files.Dictionary(self.dictionary, path=self.__path__)
#
# system functions.
def __reverse_keys_and_values__(self, dictionary=None):
if dictionary == None: dictionary = self.dictionary
new = {}
for key,value in dictionary.items():
new[value] = key
return new
def __serialize_string__(self, string, banned_characters=["@"]):
c, s, l = 0, "", False
for char in string:
if char not in banned_characters:
# regular letter.
if char.lower() == char:
s += char.lower()
l = False
# capital letter.
else:
if c == 0:
s += char.lower()
else:
if l:
s += char.lower()
else:
s += "_"+char.lower()
l = True
c += 1
return s
def __serialize_dictionary__(self, response):
_response_ = {}
for key,value in response.items():
s_key = self.__serialize_string__(key)
if isinstance(value, dict):
_response_[s_key] = self.__serialize_dictionary__(value)
elif isinstance(value, str):
try: integer = int(value)
except: integer = False
if integer != False:
_response_[s_key] = integer
elif value in ["false", "False", "FALSE", "DISABLED"]:
_response_[s_key] = False
elif value in ["true", "True", "TRUE", "ENABLED"]:
_response_[s_key] = True
else:
_response_[s_key] = value
else:
_response_[s_key] = value
return _response_
# support "+", -, =-, =+" .
def __add__(self, dictionary):
if isinstance(dictionary, dict):
a=1
elif isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
return self.append(dictionary=dictionary, overwrite=["*"], sum=[])
def __iadd__(self, dictionary):
if isinstance(dictionary, dict):
a=1
elif isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
self.dictionary = self.append(dictionary=dictionary, overwrite=["*"], sum=[]).dictionary
def __sub__(self, dictionary):
if isinstance(dictionary, dict):
keys = list(dictionary.keys())
elif isinstance(dictionary, list):
keys = dictionary
elif isinstance(dictionary, Files.Array):
keys = dictionary.array
elif isinstance(dictionary, self.__class__):
keys = dictionary.keys()
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
return self.remove(keys=keys)
def __isub__(self, dictionary):
if isinstance(dictionary, dict):
keys = list(dictionary.keys())
elif isinstance(dictionary, list):
keys = dictionary
elif isinstance(dictionary, Files.Array):
keys = dictionary.array
elif isinstance(dictionary, self.__class__):
keys = dictionary.keys()
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
self.dictionary = self.remove(keys=keys, update=True).dictionary
# support +.
def __concat__(self, string):
if isinstance(dictionary, dict):
a=1
elif isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not add object {self.__class__} & {dictionary.__class__}.")
return self.append(dictionary=dictionary, sum=[], overwrite=["*"])
# support default iteration.
def __iter__(self):
return iter(self.dictionary)
# support '>=' & '>' operator.
def __gt__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) > len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) > len(dictionary.dictionary)
def __ge__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) >= len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) >= len(dictionary.dictionary)
# support '<=' & '<' operator.
def __lt__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) < len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) < len(dictionary.dictionary)
def __le__(self, dictionary):
if isinstance(dictionary, dict):
return len(self.dictionary) <= len(dictionary)
elif not isinstance(dictionary, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {dictionary.__class__}.")
return len(self.dictionary) <= len(dictionary.dictionary)
# support '==' & '!=' operator.
def __eq__(self, dictionary):
if isinstance(dictionary, dict):
return str(self.sort()) == str(Dictionary(dictionary).sort())
elif isinstance(dictionary, Dictionary):
return str(self.sort()) == str(dictionary.sort())
else:
try:
return str(self.sort()) == str(dictionary.sort())
except:
return False
def __ne__(self, dictionary):
if isinstance(dictionary, dict):
return str(self.sort()) != str(Dictionary(dictionary).sort())
elif isinstance(dictionary, Dictionary):
return str(self.sort()) != str(dictionary.sort())
else:
try:
return str(self.sort()) != str(dictionary.sort())
except:
return False
# support 'in' operator.
def __contains__(self, key):
keys = list(self.dictionary.keys())
if isinstance(key, (list, Files.Array)):
for i in key:
if i in keys:
return True
return False
else:
return key in keys
# support item assignment.
def __setitem__(self, key, value):
if isinstance(key, (int, Integer)):
key = self.keys()[key]
self.dictionary[Formats.denitialize(key)] = value
def __getitem__(self, key):
if isinstance(key, slice):
raise ValueError("Coming soon.")
elif isinstance(key, (int, Integer)):
key = self.keys()[key]
return self.dictionary[Formats.denitialize(key)]
#
def __delitem__(self, key):
if isinstance(key, (int, Integer)):
key = self.keys()[key]
del self.dictionary[Formats.denitialize(key)]
def __splitkey__(self, key):
if key in self:
return [key]
return gfp.clean(path=key, remove_last_slash=True, remove_double_slash=True, remove_first_slash=True).split("/")
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(Formats.denitialize(self.dictionary))
# content count.
def __len__(self):
return len(self.dictionary)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self, serialize=False):
return "Dictionary"
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, dictionary):
if isinstance(dictionary, self.__class__):
dictionary = dictionary.dictionary
self.dictionary = dictionary
# assign a new path.
def assign_path(self, path):
self.file_path = FilePath(path)
self.__path__ = path
# return raw data.
def raw(self):
return self.dictionary
#
#
# the directory object class.
class Directory(object):
def __init__(self,
# the dirs file path (param #1).
path=None,
# the hierarchy to check / create.
hierarchy={},
# load the content.
#load=False,
# load recursive.
#recursive=False,
):
# docs.
DOCS = {
"module":"Directory",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(path, Files.Directory):
path = path.fp.path
# init.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else:
if path[len(path)-1] != "/": path += "/"
self.file_path = self.fp = Formats.FilePath(path)
self.hierarchy = hierarchy
if self.hierarchy != {}:
self.check(hierarchy=hierarchy)
# load.
#self.content = {}
#if load:
# self.content = {}
# can be filled with executing [self.x = x()]:
# executable functions.
# actions.
def create(self, file_paths=[], path=None, sudo=False, owner=None, group=None, permission=None):
# - init:
if path == None: path = self.file_path.path
# - create dir:
if not os.path.exists(path):
if sudo: os.system('sudo mkdir -p '+path)
else: os.system('mkdir -p '+path)
# - copy files:
commands = []
for l_path in file_paths:
if sudo:
command = None
if Files.directory(l_path): command = 'sudo cp -r {0} {1} '.format(l_path, path+Formats.FilePath(l_path).name())
else: command = 'sudo cp {0} {1}'.format(l_path, path+Formats.FilePath(l_path).name())
commands.append(command)
else:
command = None
if Files.directory(l_path): command = 'cp -r {0} {1} '.format(l_path, path+Formats.FilePath(l_path).name())
else: command = 'cp {0} {1}'.format(l_path, path+Formats.FilePath(l_path).name())
commands.append(command)
if len(commands) > 0:
if sudo:
script = Files.ShellScript(
data=command,
path='/tmp/shell_script-'+str(random.randrange(23984792,23427687323))+'.sh'
)
script.save()
script.setPermission(755)
script.execute(sudo=sudo)
script.delete()
else: os.system(Files.Array(array=commands,path=False).string(joiner=" \n "))
if owner != None or group!=None: self.file_path.ownership.set(owner=owner, group=group, sudo=sudo)
if permission != None: self.file_path.permission.set(permission=permission, sudo=sudo)
def delete(self, forced=False):
if forced: os.system('rm -fr {}'.format(self.file_path.path))
else: os.system('rm -r {}'.format(self.file_path.path))
def check(self,
# Required:
# - dictionary format:
hierarchy=None,
# Optionals:
# - string format:
owner=None,
group=None,
# - boolean format:
sudo=False,
# - integer format:
permission=None, # (octal format)
recursive=False, # for permission/ownership
silent=False,
):
format = {
"my_directory_name":{
# Required:
"path":"my_directory_name/",
# Optionals:
"permission":755,
"owner":"daanvandenbergh",
"group":None,
"sudo":False,
"directory":True,
"recursive":False, # for permission & ownership (directories).
"default_data":None, # makes it a file
"default":None, # makes it a dictionary
}
}
def checkPermissionOwnership(file_path, dictionary, silent=False, recursive=False):
if dictionary["permission"] != None and dictionary["permission"] != file_path.permission.permission:
#print("editing file [{}] permission [{}] to [{}]...".format(file_path.path, file_path.permission.permission, dictionary["permission"]))
file_path.permission.set(permission=dictionary["permission"], sudo=dictionary["sudo"], recursive=recursive, silent=silent)
if dictionary["owner"] != None and dictionary["owner"] != file_path.ownership.owner:
#print("editing file [{}] owner [{}] to [{}]...".format(file_path.path, file_path.ownership.owner, dictionary["owner"]))
file_path.ownership.set(owner=dictionary["owner"], group=file_path.ownership.group, sudo=dictionary["sudo"], recursive=recursive, silent=silent)
#print("file [{}] current group [{}] wanted group [{}]".format(file_path.path, file_path.ownership.group, dictionary["group"]))
if dictionary["group"] != None and dictionary["group"] != file_path.ownership.group:
#print("editing file [{}] group [{}] to [{}]...".format(file_path.path, file_path.ownership.group, dictionary["group"]))
file_path.ownership.set(owner=file_path.ownership.owner, group=dictionary["group"], sudo=dictionary["sudo"], recursive=recursive, silent=silent)
if hierarchy == None: hierarchy = self.hierarchy
#if owner == None: owner = self.owner
#if group == None: group = self.group
#if permission == None: permission = self.permission
file_path = Formats.FilePath(self.file_path.path)
if file_path.exists(sudo=sudo) == False:
file_path.create(
directory=True,
permission=permission,
group=group,
owner=owner,
sudo=sudo)
elif group != None or owner != None or permission != None:
file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file_path.ownership.group = _group_
file_path.ownership.owner = _owner_
checkPermissionOwnership(file_path, {"sudo":sudo, "owner":owner, "group":group, "permission":permission}, recursive=recursive, silent=silent)
if hierarchy == None: raise ValueError("Define dictionary parameter: hierarchy")
for identifier, dictionary in hierarchy.items():
# - check:
try: dictionary["path"] = self.file_path.path + dictionary["path"]
except: raise ValueError("Invalid hierarchy item [{} : {}]. Specify the [path].".format(identifier, "?"))
try: dictionary["permission"]
except KeyError: dictionary["permission"] = None
try: dictionary["owner"]
except KeyError: dictionary["owner"] = None
try: dictionary["group"]
except KeyError: dictionary["group"] = None
try: dictionary["directory"]
except KeyError: dictionary["directory"] = False
try: dictionary["sudo"]
except KeyError: dictionary["sudo"] = False
try: dictionary["default_data"]
except KeyError: dictionary["default_data"] = None
try: dictionary["default"]
except KeyError: dictionary["default"] = None
try: dictionary["recursive"]
except KeyError: dictionary["recursive"] = False
# - directory:
if dictionary["directory"]:
file_path = Formats.FilePath(dictionary["path"])
if file_path.exists(sudo=dictionary["sudo"]) == False:
file_path.create(
directory=True,
permission=dictionary["permission"],
group=dictionary["group"],
owner=dictionary["owner"],
sudo=dictionary["sudo"],)
else:
file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file_path.ownership.group = _group_
file_path.ownership.owner = _owner_
#if 'back_up_requests/requests' in file_path.path:
# print("file: {}, owner: {}, group: {}, permission: {}".format(file_path.path, file_path.ownership.owner, file_path.ownership.group, file_path.permission.permission))
checkPermissionOwnership(file_path, dictionary, silent=silent, recursive=dictionary["recursive"])
# - file:
elif dictionary["default_data"] != None:
file = Files.File(path=dictionary["path"])
if file.file_path.exists(sudo=dictionary["sudo"]) == False:
file.file_path.create(
data=dictionary["default_data"],
permission=dictionary["permission"],
group=dictionary["group"],
owner=dictionary["owner"],
sudo=dictionary["sudo"])
else:
file.file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file.file_path.ownership.group = _group_
file.file_path.ownership.owner = _owner_
checkPermissionOwnership(file.file_path, dictionary, silent=silent)
# - dictionary:
elif dictionary["default"] != None:
file = Files.Dictionary(path=dictionary["path"])
if file.file_path.exists(sudo=dictionary["sudo"]) == False:
file.save(dictionary["default"])
file.file_path.permission.check(
permission=dictionary["permission"],
sudo=dictionary["sudo"])
file.file_path.ownership.check(
group=dictionary["group"],
owner=dictionary["owner"],
sudo=dictionary["sudo"])
else:
file.file_path.permission.permission = file_path.permission.get()
_owner_,_group_ = file_path.ownership.get()
file.file_path.ownership.group = _group_
file.file_path.ownership.owner = _owner_
checkPermissionOwnership(file.file_path, dictionary, silent=silent)
file.check(default=default, save=True)
else:
raise ValueError("Invalid hierarchy item [{} : {}]. Either [directory] must be enabled, or [default_data / default] must be specified.".format(identifier, dictionary["path"]))
#
# load & save sub paths.
def load(self, path=None, format=str, default=None, sudo=False):
return Files.load(path=self.fullpath(path), format=format, sudo=sudo)
def save(self, path=None, data=None, format=str, sudo=False):
return Files.save(path=self.fullpath(path), data=data, format=format, sudo=sudo)
# returnable functions.
def paths(self,
# get recursively (bool).
recursive=False,
# get files only (bool).
files_only=False,
# get firs only (bool).
dirs_only=False,
# also get empty dirs (bool).
empty_dirs=True,
# the banned full paths (list).
banned=[],
# the banned names (list).
banned_names=[".DS_Store"],
# the banend base names (list).
banned_basenames=["__pycache__"],
# the allowed extensions (list).
extensions=["*"],
# the path (leave None to use self.path) (str, FilePath).
path=None,
):
if dirs_only and files_only: raise ValueError("Both parameters dirs_only & piles_only are True.")
if path == None: path = self.file_path.path
path = str(path)
if not Files.exists(path): return []
if isinstance(extensions, str): extensions = [extensions]
if len(banned) > 0:
l_banned = []
for i in banned:
l_banned.append(gfp.clean(f"{path}/{i}"))
banned = l_banned
paths = []
if recursive:
# does only work with recursive.
for root, dirs, files in os.walk(path):
if not dirs_only:
for name in files:
if name not in banned_names and ("*" in extensions or gfp.extension(name=name) in extensions ):
l_path = gfp.clean(path=f"{root}/{name}")
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
if not files_only:
for name in dirs:
if name not in banned_names and (dirs_only or "*" in extensions or "dir" in extensions ):
l_path = gfp.clean(path=f"{root}/{name}/")
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
if recursive:
paths += self.paths(recursive=recursive, path=l_path, dirs_only=dirs_only, files_only=files_only, banned=banned, banned_names=banned_names, empty_dirs=empty_dirs)
else:
for name in os.listdir(path):
l_path = gfp.clean(path=f"{path}/{name}")
if not dirs_only and not Files.directory(l_path):
if name not in banned_names and ("*" in extensions or gfp.extension(name=name) in extensions ):
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
if not files_only and Files.directory(l_path):
l_path += "/"
if name not in banned_names and (dirs_only or "*" in extensions or "dir" in extensions ):
l_banned = False
for i in banned_basenames:
if f"/{i}/" in l_path: l_banned = True ; break
if l_path not in banned and not l_banned and l_path+"/" not in banned:
paths.append(l_path)
return paths
def names(self,
# get recursively (bool).
recursive=False,
# get files only (bool).
files_only=False,
# get firs only (bool).
dirs_only=False,
# also get empty dirs (bool).
empty_dirs=True,
# remove the extension names (bool).
remove_extensions=False,
# the banned full paths (list).
banned=[],
# the banned names (list).
banned_names=[".DS_Store"],
# the banend base names (list).
banned_basenames=["__pycache__"],
# the allowed extensions (list).
extensions=["*"],
# the path (leave None to use self.path) (str, FilePath).
path=None,
):
names = []
for _path_ in self.paths(dirs_only=dirs_only, files_only=files_only, empty_dirs=empty_dirs, recursive=recursive, path=path, banned=banned, banned_names=banned_names, extensions=extensions):
if remove_extensions:
name = gfp.name(path=_path_)
names.append(name[:-len(gfp.extension(name=name))])
else:
names.append(gfp.name(path=_path_))
return names
def oldest(self):
files = []
for i in os.listdir(self.file_path.path):
if i not in [".DS_Store"]:
path = f'{self.file_path.path}/{i}'.replace("//",'/')
files.append(path)
if len(files) == 0: return False
return min(files, key=os.path.getctime) # oldest is min (this is not a code error)
def newest(self):
files = []
for i in os.listdir(self.file_path.path):
if i not in [".DS_Store"]:
path = f'{self.file_path.path}/{i}'.replace("//",'/')
files.append(path)
if len(files) == 0: return False
return max(files, key=os.path.getctime) # newest is max (this is not a code error)
def random(self):
files = []
for i in os.listdir(self.file_path.path):
if i not in [".DS_Store"]:
path = f'{self.file_path.path}/{i}'.replace("//",'/')
files.append(path)
if len(files) == 0: return False
return files[random.randrange(0, len(files))]
def generate(self, length=24, type="/"):
path, paths = None, self.paths()
for x in range(1000):
path = self.join(utils.generate.shell_string(length=length), type)
if path not in paths:
break
if path == None: __error__("Failed to generate a new random path inside directory [{}].".format(self.file_path.path))
return path
def structured_join(self, name, type="", structure="alphabetical", create_base=False, sudo=False, owner=None, group=None, permission=None):
if type not in ["/", ""]:
type = "."+type
if structure == "alphabetical":
alphabetical = None
try: alphabetical = name[0].upper()
except: alphabetical = "SPECIAL"
if str(alphabetical) not in ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Z","0","1","2","3","4","5","6","7","8","9"]: aplhabetical = "SPECIAL"
base = self.file_path.path + "/" + alphabetical + "/"
if create_base and os.path.exists(base) == False:
self.create(path=base, sudo=sudo, owner=owner, group=group, permission=permission)
alph_dir = base + name + type
return alph_dir
else: raise ValueError("Invalid usage, parameter structure [{}], valid options: {}".format(structure, ["alphabetical"]))
def contains(self, name=None, type="/", recursive=False):
return self.join(name, type) in self.paths(recursive=recursive)
#
def subpath(self, fullpath):
return self.fp.clean(path=fullpath.replace(self.fp.path, ""), remove_double_slash=True)
def fullpath(self, subpath):
return self.fp.clean(path=f"{self.fp.path}/{subpath}", remove_double_slash=True)
# set the icon.
def set_icon(self,
# the path to the .png / .jpg icon.
icon=None,
# the directory path (leave None to use self.fp.path).
path=None,
):
if icon == None: raise Exceptions.InvalidUsage("Define parameter: icon.")
if path == None: path = self.fp.path
if OS in ["osx", "macos"]:
utils.__execute_script__(f"""
#!/bin/bash
# settings.
icon="{icon}"
dest="{path}"
# check inputs
if [ ! -f $icon ]; then
echo "ERROR: File $1 does not exists"
exit 1
elif [[ ! $icon =~ .*\.(png|PNG|jpg|JPG) ]]; then
echo "ERROR: Icon must be a .png|.jpg file"
exit 1
elif [ -f $dest ]; then
folder=false
elif [ -d $dest ]; then
folder=true
else
echo 'ERROR: File|Folder destination does not exists'
exit 1
fi
# create icns icon
sips -i $icon > /dev/null
DeRez -only icns $icon > /tmp/tmpicns.rsrc
# set icon
if [ "$folder" = true ]; then
Rez -append /tmp/tmpicns.rsrc -o $dest$'/Icon\r'
SetFile -a C $dest
SetFile -a V $dest$'/Icon\r'
else
Rez -append /tmp/tmpicns.rsrc -o $dest
SetFile -a C $dest
fi
# clean up
rm /tmp/tmpicns.rsrc
exit 0
""")
else:
raise OSError("Unsupported operating system.")
# index the content.
def index(self,
# the wanted options.
metrics=[],
options=["size", "mtime", "content", "name", "basename", "extension", "mount", "directory"],
# optional path (leave None to use self.path).
path=None,
):
def process(path):
info = {}
if "mtime" in metrics:
info["mtime"] = gfp.mtime(path=path, format="seconds")
if "size" in metrics:
info["size"] = gfp.size(path=path, format=int)
directory = None
if "directory" in metcics:
directory = info["directory"] = Files.directory(str(path))
if "content" in metrics:
if directory == None: raise Exceptions.InvalidUsage("Metric [directory] is required when obtaining metric [content].")
if not directory:
info["content"] = Files.load(path)
else:
info["content"] = None
if "mount" in metrics:
info["mount"] = os.path.ismount(str(path))
if "name" in metrics:
info["name"] = gfp.name(path=path)
if "extension" in metrics:
info["name"] = gfp.extension(path=path)
if "basename" in metrics:
info["basename"] = gfp.basename(path=path)
return info
#
if path == None: path = self.path
if metrics == []:
raise Exceptions.InvalidUsage(f'No metrics are specified, metric options: [{Array(options).string(joiner=" ")}].')
for i in metrics:
if i not in options:
raise Exceptions.InvalidUsage(f'Metric [{i}] is not a valid metric option, options: [{Array(options).string(joiner=" ")}].')
indexed, dir, ids = Dictionary(path=False, dictionary={}), Files.Directory(path=path), []
for _path_ in dir.paths(recursive=True, files_only=True, banned=[gfp.clean(f"{path}/Icon\r")], banned_names=[".DS_Store", "__pycache__"]):
if _path_ not in ids and "/__pycache__/" not in _path_ and "/.DS_Store" not in _path_:
indexed[_path_] = process(_path_)
ids.append(_path_)
for _path_ in dir.paths(recursive=True, dirs_only=True, banned=[gfp.clean(f"{path}/Icon\r")], banned_names=[".DS_Store", "__pycache__"]):
if _path_ not in ids and "/__pycache__/" not in _path_ and "/.DS_Store" not in _path_:
indexed[_path_] = process(_path_)
ids.append(_path_)
return indexed.sort(alphabetical=True)
# open for desktop.
def open(self, path=None, sudo=False):
if path == None: path = self.fp.path
if sudo: sudo = "sudo "
else: sudo = ""
if OS in ["macos"]:
os.system(f"{sudo}open {path}")
elif OS in ["linux"]:
os.system(f"{sudo}nautulis {path}")
else: raise Exceptions.InvalidOperatingSystem(f"Unsupported operating system [{OS}].")
# return references of each file that includes one of the matches.
def find(self, matches:list, path=None, recursive=False, log_level=0):
if path == None: path = self.path
gfp = Formats.FilePath("")
c, references = 0, {}
for string in matches:
if not os.path.exists(path):
raise ValueError(f"Path {path} does not exist.")
elif not Files.directory(path):
raise ValueError(f"Path {path} is not a directory.")
for i_path in self.paths(recursive=recursive, files_only=True, banned_names=[".DS_Store", ".git"], path=path):
data = None
try:
data = Files.load(i_path)
except:
try:
data = f"{Files.load(i_path, format=bytes)}"
except: data = None
if data != None and string in data:
if log_level >= 0:
print("")
print(f"{i_path}:")
lines, linecount = data.split("\n"), 0
for _ in lines:
if string in lines[linecount]:
try: before = lines[linecount-1]
except: before = None
try: after = lines[linecount+1]
except: after = None
if log_level >= 0:
if before != None: print(" * "+before)
print(" * "+lines[linecount])
if after != None: print(" * "+after)
references[i_path] = lines[linecount]
linecount += 1
c += 1
if log_level >= 0 and c > 0: print("")
return references
# replace str within all files.
def replace(self, replacements:list, path=None, recursive=False, log_level=0):
if path == None: path = self.path
gfp = Formats.FilePath("")
c, updates = 0, []
for from_, to in replacements:
if not os.path.exists(path):
raise ValueError(f"Path {path} does not exist.")
elif not Files.directory(path):
raise ValueError(f"Path {path} is not a directory.")
for path in self.paths(recursive=recursive, banned_names=[".DS_Store", ".git"], path=path):
if not Files.directory(path):
try:
data = Files.load(path)
except UnicodeDecodeError: a=1
if from_ in data:
if log_level >= 0:
loader = console.Loader(f"Updating file {path}.")
Files.save(path, data.replace(from_, to))
if log_level >= 0:
loader.stop()
updates.append(path)
c += 1
return updates
# filepath shortcuts.
def join(self, name=None, type=""):
return self.file_path.join(name, type)
def name(self):
return self.file_path.name()
def base(self):
return self.file_path.base()
def basename(self):
return self.file_path.basename()
# support default iteration.
def __iter__(self):
return iter(self.paths())
# support '>=' & '>' operator.
def __gt__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) > len(directory.paths())
def __ge__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) >= len(directory.paths())
# support '<=' & '<' operator.
def __lt__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) < len(directory.paths())
def __le__(self, directory):
if not isinstance(directory, self.__class__):
raise Exceptions.FormatError(f"Can not compare object {self.__class__} & {directory.__class__}.")
return len(self.paths()) <= len(directory.paths())
# support '==' & '!=' operator.
def __eq__(self, directory):
if not isinstance(directory, self.__class__):
return False
return len(self.paths()) == len(directory.paths())
def __ne__(self, directory):
if not isinstance(directory, self.__class__):
return True
return len(self.paths()) != len(directory.paths())
# support 'in' operator.
def __contains__(self, path):
paths = self.paths()
if isinstance(path, (list, Files.Array)):
for i in path:
if i in paths:
return True
return False
else:
return path in paths
# representation.
def __repr__(self):
return str(self)
#
# system functions.
def __str__(self):
return str(self.fp.path)
# content count.
def __len__(self):
return len(self.paths())
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Directory"
#
@property
def __name__(self):
return self.instance()
# return raw data.
def raw(self):
return self.fp.path
#
# the image object class.
class Image(object):
def __init__(self, path=None, image=None, load=False):
# docs.
DOCS = {
"module":"Image",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
self.image = image
if load: self.load()
#
def load(self, path=None):
if path == None: path = self.file_path.path
self.image = Image.open(path)
def edit_pixel(self, pixel=[0, 0], new_pixel_tuple=None):
pixel = self.image.load()
pix[15, 15] = value
self.image.save(self.file_path.path)
def convert(self,
# the input path (str, FilePath) (#1).
output=None,
# the input path (str, FilePath) (leave None to use self.fp.path)
input=None,
):
if input == None: input = self.fp.path
if output == None:
raise Exceptions.InvalidUsage("Define parameter: [output].")
img = _Image_.open(str(input))
img.save(str(output))
print(f"Successfully converted image {input} to {output}.")
def replace_pixels(self, input_path=None, output_path=None, input_hex=None, output_hex=None):
img = _Image_.open(input_path)
pixels = img.load()
input_rgb, output_rgb = input_hex, output_hex # self.hex_to_rgb(input_hex), self.hex_to_rgb(output_hex)
for i in range(img.size[0]):
for j in range(img.size[1]):
print(pixels[i,j], "VS", input_rgb)
if pixels[i,j] == input_rgb:
pixels[i,j] = output_rgb
img.save(output_path)
def replace_colors(self, input_path=None, output_path=None, hex=None):
img = _Image_.open(input_path)
pixels = img.load()
rgb = hex #self.hex_to_rgb(hex)
for i in range(img.size[0]):
for j in range(img.size[1]):
if pixels[i,j] != rgb and pixels[i,j] != (0, 0, 0, 0):
pixels[i,j] = rgb
img.save(output_path)
def rgb_to_hex(self, tuple):
return '#%02x%02x%02x' % tuple
def hex_to_rgb(self, _hex_):
return tuple(int(_hex_[i:i+2], 16) for i in (0, 2, 4))
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Image"
#
@property
def __name__(self):
return self.instance()
# return raw data.
def raw(self):
return self.fp.path
# suport eq.
def __eq__(self, var):
if var.__class__.__name__ in ["NoneType"]:
return False
else:
return str(var) == str(self)
def __ne__(self, var):
if var.__class__.__name__ in ["NoneType"]:
return True
else:
return str(var) != str(self)
# repr.
def __str__(self):
return str(self.fp)
def __repr__(self):
return str(self)
#
#
# the zip object class.
class Zip(object):
def __init__(self, path=None, check=False):
# docs.
DOCS = {
"module":"Zip",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# init.
self.file_path = self.fp = Formats.FilePath(path, check=check)
#
def create(self,
# source can either be a string or an array.
source=None,
# remove the source file(s).
remove=False,
# sudo required to move/copy source files.
sudo=False,
):
# create tmp dir.
name = self.file_path.name().replace('.encrypted.zip','').replace("."+self.file_path.extension(),'')
tmp = Formats.FilePath(f'/tmp/zip-{utils.generate.shell_string(24)}')
tmp_content = Formats.FilePath(tmp.join(name, ""))
if tmp.exists(): tmp.delete(forced=True)
if os.path.exists(tmp.path):os.system(f"rm -fr {tmp.path}")
os.system(f"mkdir -p {tmp.path}")
if isinstance(source, str):
target = Formats.FilePath(source)
name = target.name().replace('.encrypted.zip','').replace("."+target.extension(),'')
if remove: target.move(tmp_content.path, sudo=sudo)
else: target.copy(tmp_content.path, sudo=sudo)
elif isinstance(source, list):
tmp_content.create(directory=True)
for path in source:
file_path = Formats.FilePath(path)
if remove: file_path.move("/"+tmp_content.join('/'+file_path.name(),"/"), sudo=sudo)
else: file_path.copy("/"+tmp_content.join('/'+file_path.name(),"/"), sudo=sudo)
else: raise ValueError("Parameter [source] must either be a str or list.")
# write out zip.
base = self.file_path.base()
format = self.file_path.extension()
archive_from = os.path.dirname(tmp_content.path)
archive_to = os.path.basename(tmp_content.path.strip(os.sep))
zip_path = shutil.make_archive(name, format, archive_from, archive_to)
os.system(f'mv {zip_path} {self.file_path.path}')
tmp.delete(forced=True, sudo=sudo)
#
def extract(self,
# the base extract directory.
base=None,
# remove the zip after extraction.
remove=False,
# if sudo required for removing file path.
sudo=False,):
# extract.
if base == None:
base = self.file_path.base()
with zipfile.ZipFile(self.file_path.path, 'r') as zip_ref:
zip_ref.extractall(base)
if remove: self.file_path.delete(forced=True, sudo=sudo)
#
# representation.
def __repr__(self):
return str(self)
#
# system functions.
def __str__(self):
return self.fp.path
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Zip"
#
@property
def __name__(self):
return self.instance()
# return raw data.
def raw(self):
return self.fp.path
#
#
# the bytes object class.
class Bytes(object):
def __init__(self,
# the bytes (param #1).
data=b"",
# the path (str, FilePath) (param #2).
path=False,
# load the data on initialization.
load=False,
# the default array (will be created if file path does not exist).
default=None,
):
# docs.
DOCS = {
"module":"Bytes",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# check self instance.
if isinstance(data, Files.Bytes):
data = data.bytes
# bytes.
self.bytes = bytes
# path.
if path == False: self.file_path = self.fp = None # used in local memory (not fysical)
else: self.file_path = self.fp = Formats.FilePath(path)
if default != None and not Files.exists(self.file_path.path): self.save(array=default)
if load: self.load()
#
def load(self, sudo=False):
bytes = Files.load(self.file_path.path, format="bytes", sudo=sudo)
self.bytes = bytes
return bytes
def save(self, bytes=None, sudo=False):
if bytes == None: bytes = self.bytes
bytes = Formats.denitialize(bytes)
self.bytes = bytes
return Files.save(self.fp.path, bytes, format="bytes", sudo=sudo)
# suppor default iteration.
def __iter__(self):
return iter(self.bytes)
# support '==' & '!=' operator.
def __eq__(self, bytes_):
if isinstance(bytes_, bytes):
return self.bytes == bytes_
elif not isinstance(bytes_, self.__class__):
return False
return self.bytes == bytes_.bytes
def __ne__(self, bytes_):
if isinstance(bytes_, bytes):
return self.bytes != bytes_
elif not isinstance(bytes_, self.__class__):
return True
return self.bytes != bytes_.bytes
# support 'in' operator.
def __contains__(self, bytes_):
if isinstance(bytes_, (list, Files.Array)):
for i in bytes_:
if i == self.bytes:
return True
return False
else:
return bytes_ in self.bytes
#
# representation.
def __repr__(self):
return str(self)
#
# str representation.
def __str__(self):
return str(self.bytes)
# content count.
def __len__(self):
return len(self.bytes)
# object id.
def __id__(self):
return f"({self.instance()}:{str(self)})"
# object instance.
def instance(self):
return "Bytes"
#
@property
def __name__(self):
return self.instance()
# support self assignment.
def assign(self, b):
if isinstance(b, self.__class__):
b = b.bytes
self.bytes = b
return self
# return raw data.
def raw(self):
return self.bytes
#
#
#
#
# some default classes.
class Classes():
# the speed class.
class Speed():
# the mark function, returns a timestamp used for calculation.
def mark():
return time.time()
#
# calculate the difference between the marked timestamp & the current.
def calculate(
# the marked timestamp from Speed.mark.
stamp,
# the current timestamp (leave None to use Speed.mark)
current=None,
# round to decimals (Leave None to ignore).
decimals=None,
# normalize seconds.
normalize=False,
):
if current == None: current = Speed.mark()
diff = current - stamp
if decimals != None:
diff = round(diff, decimals)
if normalize:
diff = Speed.normalize_seconds(diff)
return diff
# normalize seconds to 10s or 1m etc.
def normalize_seconds(seconds:(int,float), decimals=1):
if seconds < 0:
raise ValueError("Can not normalize negative seconds.")
if seconds < 0.01:
return f'{int(seconds*1000)}ms'
elif seconds <= 60:
return f'{int(seconds)}s'
elif seconds <= 60*60:
return f'{round(seconds/60, decimals)}m'
elif seconds <= 60*60*24:
return f'{round(seconds/(60*60), decimals)}h'
elif seconds <= 60*60*24*30:
return f'{round(seconds/(60*60*24), decimals)}d'
elif seconds <= 60*60*24*30*12:
return f'{round(seconds/(60*60*24*30), decimals)}m'
else:
return f'{round(seconds/(60*60*24*30*12), decimals)}y'
# some default objects.
class Objects():
# the generate object class.
class Generate(object):
def __init__(self):
# docs.
DOCS = {
"module":"Generate",
"initialized":False,
"description":[],
"chapter": "Defaults", }
#
def int(self, length=6):
charset = Array(Formats.digits).string(joiner="")
return ''.join(random.choice(charset) for x in range(length))
#
def string(self, length=6, capitalize=True, digits=True):
charset = Array(Formats.alphabet).string(joiner="")
if capitalize: charset += Array(Formats.capitalized_alphabet).string(joiner="")
if digits: charset += Array(Formats.digits).string(joiner="")
return ''.join(random.choice(charset) for x in range(length))
#
# the interval object class.
class Interval(object):
def __init__(self,
# the sleep time.
sleeptime=1,
# the timeout.
timeout=60,
):
# docs.
DOCS = {
"module":"Interval",
"initialized":False,
"description":[],
"chapter": "Defaults", }
# attributes.
self.sleeptime = sleeptime
self.timeout = timeout
#
def __int__(self):
return int(self.sleeptime)
def __iter__(self):
l = []
for _ in range(int(self.timeout/self.sleeptime)):
l.append(self)
return iter(l)
def sleep(self, chapters=1):
for _ in range(chapters):
time.sleep(int(self)/chapters)
#
#for interval in Interval(sleeptime=60, timeout=3600):
# ...
# interval.sleep()
#
# shortcuts.
FilePath = Formats.FilePath
String = Formats.String
Boolean = Formats.Boolean
Integer = Formats.Integer
Date = Formats.Date
File = Files.File
Directory = Files.Directory
Zip = Files.Zip
Image = Files.Image
Bytes = Files.Bytes
Dictionary = Files.Dictionary
Array = Files.Array
Speed = Classes.Speed
Generate = Objects.Generate
Interval = Objects.Interval
# initialized objects.
gfp = Formats.FilePath("") # is required (do not remove).
gd = gdate = Formats.Date()
#
| 33.532345 | 366 | 0.647849 | 172,855 | 0.992404 | 0 | 0 | 456 | 0.002618 | 0 | 0 | 43,000 | 0.246874 |
31106007c9cf6a0a237a91ff54fe5bef674f40ab | 619 | py | Python | object_torus.py | KeerthanBhat/pygame-Search-the-Key | f3488c164c1dfca0ae85d3af8ad6bfc612676dc3 | [
"MIT"
]
| null | null | null | object_torus.py | KeerthanBhat/pygame-Search-the-Key | f3488c164c1dfca0ae85d3af8ad6bfc612676dc3 | [
"MIT"
]
| null | null | null | object_torus.py | KeerthanBhat/pygame-Search-the-Key | f3488c164c1dfca0ae85d3af8ad6bfc612676dc3 | [
"MIT"
]
| 2 | 2019-10-19T15:55:14.000Z | 2019-10-28T18:05:15.000Z | import pygame
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
def main():
pygame.init()
glutInit()
display = (800,600)
pygame.display.set_mode(display, DOUBLEBUF|OPENGL)
gluPerspective(45, (display[0]/display[1]), 0.1, 50.0)
glTranslatef(0.0, 0.0, -5)
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
glRotatef(1, 0, 1, 0)
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
glColor3f(0.0, 1.0, 0.0)
glutWireTorus(0.2, 0.8, 50, 50)
pygame.display.flip()
pygame.time.wait(10)
main() | 19.967742 | 55 | 0.686591 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
3110dcee53cb41b5b07143e76922f9343f0ed9c4 | 2,794 | py | Python | samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model.py | renovate-bot/python-translate | 1ab82aa12ecd4bbb0195e4c39ca476b944cdddbc | [
"Apache-2.0"
]
| 70 | 2020-03-03T04:02:23.000Z | 2022-03-29T20:09:22.000Z | samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model.py | renovate-bot/python-translate | 1ab82aa12ecd4bbb0195e4c39ca476b944cdddbc | [
"Apache-2.0"
]
| 130 | 2020-01-31T20:17:09.000Z | 2022-03-24T17:01:21.000Z | samples/snippets/translate_v3_batch_translate_text_with_glossary_and_model.py | renovate-bot/python-translate | 1ab82aa12ecd4bbb0195e4c39ca476b944cdddbc | [
"Apache-2.0"
]
| 47 | 2020-01-31T21:25:59.000Z | 2022-03-31T20:52:21.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START translate_v3_batch_translate_text_with_glossary_and_model]
from google.cloud import translate
def batch_translate_text_with_glossary_and_model(
input_uri="gs://YOUR_BUCKET_ID/path/to/your/file.txt",
output_uri="gs://YOUR_BUCKET_ID/path/to/save/results/",
project_id="YOUR_PROJECT_ID",
model_id="YOUR_MODEL_ID",
glossary_id="YOUR_GLOSSARY_ID",
):
"""
Batch translate text with Glossary and Translation model
"""
client = translate.TranslationServiceClient()
# Supported language codes: https://cloud.google.com/translate/docs/languages
location = "us-central1"
target_language_codes = ["ja"]
gcs_source = {"input_uri": input_uri}
# Optional. Can be "text/plain" or "text/html".
mime_type = "text/plain"
input_configs_element = {"gcs_source": gcs_source, "mime_type": mime_type}
input_configs = [input_configs_element]
gcs_destination = {"output_uri_prefix": output_uri}
output_config = {"gcs_destination": gcs_destination}
parent = f"projects/{project_id}/locations/{location}"
model_path = "projects/{}/locations/{}/models/{}".format(
project_id, "us-central1", model_id
)
models = {"ja": model_path}
glossary_path = client.glossary_path(
project_id, "us-central1", glossary_id # The location of the glossary
)
glossary_config = translate.TranslateTextGlossaryConfig(glossary=glossary_path)
glossaries = {"ja": glossary_config} # target lang as key
operation = client.batch_translate_text(
request={
"parent": parent,
"source_language_code": "en",
"target_language_codes": target_language_codes,
"input_configs": input_configs,
"output_config": output_config,
"models": models,
"glossaries": glossaries,
}
)
print("Waiting for operation to complete...")
response = operation.result()
# Display the translation for each input text provided
print("Total Characters: {}".format(response.total_characters))
print("Translated Characters: {}".format(response.translated_characters))
# [END translate_v3_batch_translate_text_with_glossary_and_model]
| 35.820513 | 83 | 0.710451 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,537 | 0.550107 |
3110e66487f691f9c1b42704fe374f3e4fc85644 | 4,044 | py | Python | movo_common/movo_third_party/executive_smach/smach_ros/test/concurrence.py | zkytony/kinova-movo | 37d7454b2dc589d44133f3913f567b9cc321a66d | [
"BSD-3-Clause"
]
| 2 | 2019-12-07T20:45:15.000Z | 2019-12-25T01:21:32.000Z | movo_common/movo_third_party/executive_smach/smach_ros/test/concurrence.py | ALAN-NUS/kinova_movo | 05a0451f5c563359ae0ffe3280e1df85caec9e55 | [
"BSD-3-Clause"
]
| null | null | null | movo_common/movo_third_party/executive_smach/smach_ros/test/concurrence.py | ALAN-NUS/kinova_movo | 05a0451f5c563359ae0ffe3280e1df85caec9e55 | [
"BSD-3-Clause"
]
| 1 | 2020-01-21T11:05:24.000Z | 2020-01-21T11:05:24.000Z | #!/usr/bin/env python
import roslib; roslib.load_manifest('smach_ros')
import rospy
import rostest
import unittest
from actionlib import *
from actionlib.msg import *
from smach import *
from smach_ros import *
from smach_msgs.msg import *
# Static goals
g1 = TestGoal(1) # This goal should succeed
g2 = TestGoal(2) # This goal should abort
g3 = TestGoal(3) # This goal should be rejected
### Custom tate classe
class Setter(State):
"""State that sets the key 'a' in its userdata"""
def __init__(self):
State.__init__(self,['done'],[],['a'])
def execute(self,ud):
ud.a = 'A'
rospy.loginfo("Added key 'a'.")
return 'done'
class Getter(State):
"""State that grabs the key 'a' from userdata, and sets 'b'"""
def __init__(self):
State.__init__(self,['done','preempted'],['a'],['b'])
def execute(self,ud):
while 'a' not in ud:
rospy.loginfo("Waiting for key 'a' to appear. ")
rospy.sleep(0.1)
ud.b = ud.a
rospy.sleep(1.0)
if self.preempt_requested():
return 'preempted'
return 'done'
### Test harness
class TestStateMachine(unittest.TestCase):
def test_concurrence(self):
"""Test concurrent container."""
sm = StateMachine(['done','succeeded'])
with sm:
cc = Concurrence(['succeeded','done'],
default_outcome = 'done',
outcome_map = {'succeeded':{'SETTER':'done'}})
sm.add('CONCURRENT',cc)
with cc:
Concurrence.add('SETTER', Setter())
Concurrence.add('GETTER', Getter())
outcome = sm.execute()
assert outcome == 'succeeded'
assert 'a' in cc.userdata
assert 'b' in cc.userdata
assert cc.userdata.a == 'A'
assert cc.userdata.b == 'A'
def test_preempt(self):
"""Test concurrent container that preempts siblings."""
cc = Concurrence(['succeeded','done'],
default_outcome = 'done',
child_termination_cb = lambda so: True,
outcome_map = {'succeeded':{'SETTER':'done', 'GETTER':'preempted'}})
with cc:
Concurrence.add('SETTER', Setter())
Concurrence.add('GETTER', Getter())
outcome = cc.execute()
assert outcome == 'succeeded'
assert 'a' in cc.userdata
assert 'b' in cc.userdata
assert cc.userdata.a == 'A'
assert cc.userdata.b == 'A'
def test_no_preempt(self):
"""Test concurrent container that doesnt preempt siblings."""
cc = Concurrence(['succeeded','done'],
default_outcome = 'done',
child_termination_cb = lambda so: False,
outcome_map = {
'succeeded':{
'SETTER':'done',
'GETTER':'done'}})
with cc:
Concurrence.add('SETTER', Setter())
Concurrence.add('GETTER', Getter())
outcome = cc.execute()
assert outcome == 'succeeded'
assert 'a' in cc.userdata
assert 'b' in cc.userdata
assert cc.userdata.a == 'A'
assert cc.userdata.b == 'A'
def test_outcome_cb(self):
"""Test concurrent container that doesnt preempt siblings."""
cc = Concurrence(['succeeded','done'],
default_outcome = 'done',
child_termination_cb = lambda so: False,
outcome_cb = lambda so: list(set(so.values()))[0])
with cc:
Concurrence.add('SETTER', Setter())
Concurrence.add('GETTER', Getter())
outcome = cc.execute()
assert outcome == 'done'
assert 'a' in cc.userdata
assert 'b' in cc.userdata
assert cc.userdata.a == 'A'
assert cc.userdata.b == 'A'
def main():
rospy.init_node('concurrence_test',log_level=rospy.DEBUG)
rostest.rosrun('smach', 'concurrence_test', TestStateMachine)
if __name__=="__main__":
main();
| 30.870229 | 84 | 0.562067 | 3,424 | 0.846686 | 0 | 0 | 0 | 0 | 0 | 0 | 1,027 | 0.253956 |
31118c5b5246a2ec094961b6d1e7c75e1bcdc0c9 | 279 | py | Python | KaratAPP/models.py | MHuiG/Karat-Django-Backend | 8887417bb3eee302a1639e247957539479d2ef67 | [
"MIT"
]
| null | null | null | KaratAPP/models.py | MHuiG/Karat-Django-Backend | 8887417bb3eee302a1639e247957539479d2ef67 | [
"MIT"
]
| null | null | null | KaratAPP/models.py | MHuiG/Karat-Django-Backend | 8887417bb3eee302a1639e247957539479d2ef67 | [
"MIT"
]
| null | null | null | from django.db import models
# Create your models here.
##########################################################################
#投票
class Vote(models.Model):
data=models.CharField(max_length=255)
########################################################################## | 31 | 74 | 0.351254 | 67 | 0.236749 | 0 | 0 | 0 | 0 | 0 | 0 | 181 | 0.639576 |
311249ddd416775b05d1978d206331804a252949 | 3,016 | py | Python | arguments.py | nudles/a2c | 6225845ab450b5ea03b6a066455b0446d3f92ed0 | [
"MIT"
]
| null | null | null | arguments.py | nudles/a2c | 6225845ab450b5ea03b6a066455b0446d3f92ed0 | [
"MIT"
]
| 4 | 2021-03-19T03:19:18.000Z | 2022-01-13T01:35:04.000Z | arguments.py | nudles/a2c | 6225845ab450b5ea03b6a066455b0446d3f92ed0 | [
"MIT"
]
| null | null | null | import argparse
import torch
def get_args():
parser = argparse.ArgumentParser(description='RL')
parser.add_argument('--algo', default='a2c',
help='algorithm to use: a2c | ppo ')
parser.add_argument('--lr', type=float, default=7e-5,
help='learning rate (default: 7e-4)')
parser.add_argument('--eps', type=float, default=1e-5,
help='RMSprop optimizer epsilon (default: 1e-5)')
parser.add_argument('--alpha', type=float, default=0.99,
help='RMSprop optimizer apha (default: 0.99)')
parser.add_argument('--gamma', type=float, default=0.99,
help='discount factor for rewards (default: 0.99)')
parser.add_argument('--max-grad-norm', type=float, default=0.5,
help='max norm off gradients (default: 0.5)')
parser.add_argument('--seed', type=int, default=1,
help='random seed (default: 1)')
parser.add_argument('--num-processes', type=int, default=1,
help='how many training CPU processes to use (default: 16)')
parser.add_argument('--num-steps', type=int, default=32,
help='number of forward steps in A2C (default: 5)')
parser.add_argument('--clip-param', type=float, default=0.2,
help='clip parameter (default: 0.2)')
parser.add_argument('--log-interval', type=int, default=50,
help='log interval, one log per n updates (default: 10)')
parser.add_argument('--num-frames', type=int, default=80000,
help='number of frames to train (default: 10e6)')
parser.add_argument('--cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--obs_size', type=int, default=200,
help='observation vector size')
parser.add_argument('--cycle_len', type=int, default=500,
help='observation vector size')
parser.add_argument('--debug', action='store_true', default=False,
help='whether to record the logfile')
parser.add_argument('--num_models', type=int, default=3,
help='number of the model to use')
parser.add_argument('--beta', type=float, default=1,
help='balance the accuracy and latency when calculate the reward')
parser.add_argument('--tau', type=float, default=2,
help='max waiting time for enqueue')
parser.add_argument('--max_latency', type=float, default=16,
help='accept latency for each request')
parser.add_argument('--policy', choices=['async', 'sync'], default='async', help='policy')
args = parser.parse_args()
print("cuda: %s" % str(args.cuda))
if args.cuda:
assert torch.cuda.is_available(), 'CUDA is not available in this machine!'
return args
if __name__ == '__main__':
get_args() | 52 | 94 | 0.590186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,089 | 0.361074 |
31138701c903a02d7a07c3e409e2e717ebba46c9 | 207 | py | Python | vega/algorithms/nas/__init__.py | wnov/vega | bf51cbe389d41033c4ae4bc02e5078c3c247c845 | [
"MIT"
]
| 6 | 2020-11-13T15:44:47.000Z | 2021-12-02T08:14:06.000Z | vega/algorithms/nas/__init__.py | JacobLee121/vega | 19256aca4d047bfad3b461f0a927e1c2abb9eb03 | [
"MIT"
]
| null | null | null | vega/algorithms/nas/__init__.py | JacobLee121/vega | 19256aca4d047bfad3b461f0a927e1c2abb9eb03 | [
"MIT"
]
| 2 | 2021-06-25T09:42:32.000Z | 2021-08-06T18:00:09.000Z | from .backbone_nas import *
from .adelaide_ea import *
from .sr_ea import *
from .esr_ea import *
from .darts_cnn import *
from .cars import *
from .fis import *
from .auto_lane import *
from .mfkd import *
| 20.7 | 27 | 0.73913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
31139b4fff3a18ffb360806368a4eb20f571136a | 10,221 | py | Python | ai.py | s18mbbustorff/AI_Hanabi_Assignment | 651699bdd77f10e72b49cdb2c62faeca585bdfa3 | [
"CNRI-Python"
]
| null | null | null | ai.py | s18mbbustorff/AI_Hanabi_Assignment | 651699bdd77f10e72b49cdb2c62faeca585bdfa3 | [
"CNRI-Python"
]
| null | null | null | ai.py | s18mbbustorff/AI_Hanabi_Assignment | 651699bdd77f10e72b49cdb2c62faeca585bdfa3 | [
"CNRI-Python"
]
| null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 9 12:27:15 2021
@author: kuba
"""
import copy
import numpy as np
w = {"H0": {"H0":0.2,"H1": 0.2, "P0": 0.5, "P1": 0.1},
"H1": {"H0":0.2,"H1": 0.2, "P0": 0.1, "P1": 0.5},
"P0": {"H0":0.3,"H1": 0.3, "P0": 0.2, "P1": 0.2},
"P1": {"H0":0.3,"H1": 0.3, "P0": 0.2, "P1": 0.2}
}
class BeliefSpace:
def __init__(self, state):
self.states = []
return self.states
class State:
def __init__(self,player,cards1,cards2,table,deck, parent):
self.parent = parent
self.depth = 0
self.value = 0
self.player = player #player that has the turn, either 1 or 2 (int)
self.cards1 = cards1 #list of cards in player one's hand (Card list) 2cards that need to be created with the Card object
self.cards2 = cards2 #list of cards in AI 's hand (Card list) 2cards that need to be created with the Card object
self.table = table #list of card numbers in the table (int list)
#/!\initial table should contain a 0 for the Play action to work
self.deck = deck #number of cards left in the deck (int)
tableCards =[]
for nb in table:
tableCards.append(Card(nb))
self.discoveredCards = cards1+cards2+tableCards #list of all the cards that are out of the deck (list)
class Card():
#-----------------------------
#-----Initialization functions
#-----------------------------
def __init__(self,color,number):
self.color = color
self.number = number
self.colorHinted = False
self.numberHinted = False
class Actions:
def Hint(initialstate,side):
newstate = copy.deepcopy(initialstate) #side is an integer, 0 = left, 1 = right
newstate.parent = initialstate
newstate.depth = initialstate.depth+1
if initialstate.player == 1:
newstate.cards2[side].known=True
newstate.player = 2
elif initialstate.player == 2:
newstate.cards1[side].known=True
newstate.player = 1
return [newstate]
def Play(initialstate,side): #side is an integer, 0 = left, 1 = right
newstate = copy.deepcopy(initialstate)
newstate.parent = initialstate
newstate.depth = initialstate.depth+1
#------------------------
#if no cards left in deck
if initialstate.deck == 0:
if initialstate.player == 1:
playedcard = initialstate.cards1[side]
if playedcard.number == (max(initialstate.table)+1): #check if it is a correct card
newstate.table.append(playedcard.number) #it is added to the table of the new state
newstate.cards1[side] = None #remove card from hand
newstate.player = 2 #change player turn
elif initialstate.player == 2:
playedcard = initialstate.cards2[side]
if playedcard.number == (max(initialstate.table)+1): #if it is a correct card
newstate.table.append(playedcard.number) #it is added to the table of the new state
newstate.cards2[side] = None
newstate.player = 1
return [newstate]
#----------------------------
#if there are cards left in the deck, we need to make a new state for each possibility of a new card
#the function will return a list of new states
else:
#initializing the list of newstates
nbCardsLeft = initialstate.deck
newstates = [None] * nbCardsLeft
for i in range(nbCardsLeft):
newstates[i] = copy.deepcopy(newstate)
#making a list of all the possible numbers left
discoveredNumbers = []
for card in initialstate.discoveredCards:
discoveredNumbers.append(card.number)
allNumbers = [1,2,3,4,5]
numbersLeft = [x for x in allNumbers if x not in discoveredNumbers]
#updating all the new states with all possible new cards
#then removing the played card (add its number to table if correct)
if initialstate.player == 1:
playedcard = initialstate.cards1[side]
for i in range(nbCardsLeft):
newstates[i].cards1[side] = Card(numbersLeft[i]) #old card that was played gets replaced by new card
if playedcard.number == (max(initialstate.table)+1): #if it is a correct card
newstates[i].table.append(playedcard.number) #it is added to the table of the new state
newstates[i].player = 2
elif initialstate.player == 2:
playedcard = initialstate.cards2[side]
for i in range(nbCardsLeft):
newstates[i].cards2[side] = Card(numbersLeft[i]) #old card that was played gets replaced by new card
if playedcard.number == (max(initialstate.table)+1): #if it is a correct card
newstate.table.append(playedcard.number) #its number is added to the table of the new state
newstates[i].player = 1
for state in newstates:
state.deck = initialstate.deck-1
return newstates
class Solver:
def __init__(self,max_depth, hand_size,actions):
self.max_depth = max_depth
self.hand_size = hand_size
self.actions = actions
def utility(self, state):
return 10 * len(state.table)
"""
def forward(self, beliefspace, actions):
visited = []
queue = []
terminal_nodes = []
for state in beliefspace:
visited.append(state)
queue.append(state)
while queue:
s = queue.pop(0)
if s.depth < self.max_depth:
for action in actions:
for side in [0, 1]:
children = action(s, side)
for child in children:
queue.append(child)
visited.append(child)
print(child.depth)
if child.depth == self.max_depth:
terminal_nodes.append(child)
return terminal_nodes
"""
"""
def forward2(self, beliefspace, actions):
results = []
for state in beliefspace:
children = [(self.weighted_value(action(state, pos)[0], a_id + str(pos)),a_id, pos) for (action,a_id) in actions for pos in np.arange(self.hand_size)]
print(children)
results.append(sorted(children, key=lambda tup: tup[0])[-1])
return results
def max_value(self, state):
global w
if state.depth >= self.max_depth:
return self.utility(state)
v = - np.inf
for (a,a_id) in actions:
for s in range(2):
v = np.amax(v,self.weighted_value(a(state,s)[0],a_id + str(s)))
return v
def weighted_value(self, state, act_id):
global w
weights = w[act_id]
if state.depth >= self.max_depth:
return self.utility(state)
v = 0
for (a,a_id) in actions:
for s in range(2):
v = v + weights[a_id+str(s)]*self.max_value(a(state,s)[0])
return v
"""
def evaluate(self, beliefspace, actions):
results = []
for state in beliefspace:
children = [(self.weighted_value(action(state, pos)[0], a_id + str(pos)),a_id, pos) for (action,a_id) in actions for pos in np.arange(self.hand_size)]
print(children)
results.append(sorted(children, key=lambda tup: tup[0])[-1])
return results
def max_value(self, state):
global w
if state.depth >= self.max_depth:
return self.utility(state)
v = - np.inf
for (a,a_id) in actions:
for s in range(2):
v = np.amax(v,self.weighted_value(a(state,s)[0],a_id + str(s)))
return v
def weighted_value(self, state, act_id):
global w
weights = w[act_id]
if state.depth >= self.max_depth:
return self.utility(state)
v = 0
for (a,a_id) in actions:
for s in range(2):
v = v + weights[a_id+str(s)]*self.max_value(a(state,s)[0])
return v
if __name__ == "__main__":
c1 = Card(1)
c2 = Card(2)
c3 = Card(3)
c4 = Card(4)
c5 = Card(5)
cards1 = [c5, c1]
cards2 = [c2, c4]
table = [0]
deck = 1
parent = None
player = 2
state = State(player,cards1,cards2,table,deck, parent)
initial_belief_states = [state]
solver = Solver(2)
actions = [(Actions.Play, "P"), (Actions.Hint, "H")]
terminal = solver.forward2(initial_belief_states, actions)
"""
print("Some tests to see the Actions funtioning:")
print("0.Initial state with cards: player1: (1,2), player2: (3,4)")
state1 = State(1,[Card(1),Card(2)],[Card(4),Card(5)],[0],1,None)
print("")
print("1.Making a Hint of the 2nd player right card:")
state2 = Actions.Hint(state1,1)
#check that the card is now "known" and that the player becomes "2"
print("Is the card known? {}. What player turn is it after the action? {}.".format(state2[0].cards2[1].known,state2[0].player))
print("")
print("2. Playing the correct card from player 1's left (the 1):")
state2b = Actions.Play(state1,0)
print("New size of deck: {}. New card on the left for player 1: {}. New table: {}. Amount of new states created: {}".format(state2b[0].deck,state2b[0].cards1[0].number,state2b[0].table,len(state2b)))
print(state2[0].depth)
state3 = Actions.Hint(state2[0],1)
print(state3[0].depth)
state4 = Actions.Hint(state3[0],1)
print(state4[0].depth)
"""
| 36.503571 | 203 | 0.551609 | 8,300 | 0.812054 | 0 | 0 | 0 | 0 | 0 | 0 | 4,502 | 0.440466 |
3113f40d512a333a3eafda75e8d9b764160cd806 | 15,467 | py | Python | (19.12.06) Culminating/sprites.py | bly852/ICS3U1 | 8844321b26027d1612fad7fba88e70a1108de2eb | [
"MIT"
]
| null | null | null | (19.12.06) Culminating/sprites.py | bly852/ICS3U1 | 8844321b26027d1612fad7fba88e70a1108de2eb | [
"MIT"
]
| null | null | null | (19.12.06) Culminating/sprites.py | bly852/ICS3U1 | 8844321b26027d1612fad7fba88e70a1108de2eb | [
"MIT"
]
| null | null | null | # course: ICS3U1 2019
# exercise: Culminating Activity
# date: 2019-12-06
# student number: 340926187
# name: Brandon Ly
# description: Two players (Mr Chun & Mr Pileggi) running around the school
# collecting food for the food drive.
# sprite classes
import pygame
import random
import math
import os
from settings import *
class Player(pygame.sprite.Sprite):
"""
player class that contains all data and functions related to the player
"""
def __init__(self, game, x, y, playerNum):
"""
initalizes a player sprite when an instance is created in the game
parameter, at the x and y paramters, and with the player number
"""
self.playerNum = playerNum
self.groups = game.all_sprites, game.players
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
# image selection for each player
if self.playerNum == 1:
self.image = pygame.transform.rotate(self.game.player1_image, 90)
else:
self.image = pygame.transform.rotate(self.game.player2_image, 90)
self.rect = self.image.get_rect()
# setting the players base movement velocity
self.velX, self.velY = 0, 0
# setting the players position on the grid
self.x = x * tileSize - tileSize
self.y = y * tileSize - tileSize
# players starting score
self.score = 0
# if joysticks are connected, enable joystick controls for the player
self.joystick_count = pygame.joystick.get_count()
if self.joystick_count > 0:
self.joystick_enabled = True
else:
self.joystick_enabled = False
def get_keys(self):
"""
checks for all keys pressed and changes the players velocity on that
axis to the player speed varaiable
"""
self.velX, self.velY = 0, 0
keys = pygame.key.get_pressed()
# player 1 controls
if self.playerNum == 1:
if keys[pygame.K_a]:
self.velX = -player_speed
if keys[pygame.K_d]:
self.velX = player_speed
if keys[pygame.K_w]:
self.velY = -player_speed
if keys[pygame.K_s]:
self.velY = player_speed
# player 2 controls
else:
if keys[pygame.K_LEFT]:
self.velX = -player_speed
if keys[pygame.K_RIGHT]:
self.velX = player_speed
if keys[pygame.K_UP]:
self.velY = -player_speed
if keys[pygame.K_DOWN]:
self.velY = player_speed
# if moving diagonally reduce the speed
if self.velX > 0 and self.velY > 0:
self.velX = player_speed * 0.701
self.velY = player_speed * 0.701
elif self.velX < 0 and self.velY < 0:
self.velX = player_speed * -0.701
self.velY = player_speed * -0.701
def get_joystick_axis(self):
"""
changes the velocity of the character in the x and y based on joystick
input
"""
# joystick controls for two seperate controllers
if self.joystick_count == 2:
# joystick control for player 1
if self.playerNum == 1:
# joystick initialization
joystick = pygame.joystick.Joystick(1)
joystick.init()
# different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0:
self.velX += joystick.get_axis(0) * player_speed
self.velY += joystick.get_axis(1) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(1) * player_speed
self.velY -= joystick.get_axis(0) * player_speed
# joystick control for player 2
elif self.playerNum == 2:
# joystick initialization
joystick = pygame.joystick.Joystick(0)
joystick.init()
# Different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0:
self.velX += joystick.get_axis(0) * player_speed
self.velY += joystick.get_axis(1) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(1) * player_speed
self.velY -= joystick.get_axis(0) * player_speed
# joystick controls for a single controller
elif self.joystick_count == 1:
# joystick control for player 1
if self.playerNum == 1:
# joystick initialization
joystick = pygame.joystick.Joystick(0)
joystick.init()
# different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0:
self.velX += joystick.get_axis(0) * player_speed
self.velY += joystick.get_axis(1) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(1) * player_speed
self.velY -= joystick.get_axis(0) * player_speed
# joystick control for player 2
elif self.playerNum == 2:
# joystick initialization
joystick = pygame.joystick.Joystick(0)
joystick.init()
# different joystick settings for Xbox controllers
if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)':
# checks for axis movement and changes velX and velY
if round(joystick.get_axis(4)) != 0 or round(joystick.get_axis(3)) != 0:
self.velX += joystick.get_axis(4) * player_speed
self.velY += joystick.get_axis(3) * player_speed
else:
if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0:
self.velX += joystick.get_axis(2) * player_speed
self.velY -= joystick.get_axis(3) * player_speed
def direction(self):
"""
rotates the player sprite based on the current direction and new
direction
"""
# player 1 rotation
if self.playerNum == 1:
if self.velX > 100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player1_image, 45)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player1_image, -45)
else:
self.image = pygame.transform.rotate(self.game.player1_image, 0)
elif self.velX < -100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player1_image, 135)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player1_image, -135)
else:
self.image = pygame.transform.rotate(self.game.player1_image, 180)
else:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player1_image, 90)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player1_image, -90)
# player 2 rotation
else:
if self.velX > 100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player2_image, 45)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player2_image, -45)
else:
self.image = pygame.transform.rotate(self.game.player2_image, 0)
elif self.velX < -100:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player2_image, 135)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player2_image, -135)
else:
self.image = pygame.transform.rotate(self.game.player2_image, 180)
else:
if self.velY < -100:
self.image = pygame.transform.rotate(self.game.player2_image, 90)
elif self.velY > 100:
self.image = pygame.transform.rotate(self.game.player2_image, -90)
def wall_collision(self, axis):
"""
checks for player collision with the all wall sprites on the axis
given and prevents player movement onto it
"""
if axis == 'x':
collides = pygame.sprite.spritecollide(self, self.game.walls, False)
if collides:
if self.velX > 0:
self.x = collides[0].rect.left - self.rect.width
if self.velX < 0:
self.x = collides[0].rect.right
self.velX = 0
self.rect.x = self.x
if axis == 'y':
collides = pygame.sprite.spritecollide(self, self.game.walls, False)
if collides:
if self.velY > 0:
self.y = collides[0].rect.top - self.rect.height
if self.velY < 0:
self.y = collides[0].rect.bottom
self.velY = 0
self.rect.y = self.y
def player_collision(self, axis):
"""
checks for player collision with the all wall sprites on the axis
given and prevents player movement onto it
"""
# checks for player 1 collision to player 2
if self.playerNum == 1:
if axis == 'x':
if self.rect.colliderect(self.game.player2):
if self.velX > 0:
self.x = self.game.player2.rect.left - self.rect.width
if self.velX < 0:
self.x = self.game.player2.rect.right
self.velX = 0
self.rect.x = self.x
if axis == 'y':
if self.rect.colliderect(self.game.player2):
if self.velY > 0:
self.y = self.game.player2.rect.top - self.rect.height
if self.velY < 0:
self.y = self.game.player2.rect.bottom
self.velY = 0
self.rect.y = self.y
# checks for player 2 collision to player 1
else:
if axis == 'x':
if self.rect.colliderect(self.game.player1):
if self.velX > 0:
self.x = self.game.player1.rect.left - self.rect.width
if self.velX < 0:
self.x = self.game.player1.rect.right
self.velX = 0
self.rect.x = self.x
if axis == 'y':
if self.rect.colliderect(self.game.player1):
if self.velY > 0:
self.y = self.game.player1.rect.top - self.rect.height
if self.velY < 0:
self.y = self.game.player1.rect.bottom
self.velY = 0
self.rect.y = self.y
def food_collision(self):
"""
checks for player collision with all food sprites killing any sprites it comes collides with and adding 1 to the players score value
"""
collides = pygame.sprite.spritecollide(self, self.game.food, True)
if collides:
self.score += 1
def update(self):
"""
updates the players position
"""
self.get_keys()
if self.joystick_enabled == True:
self.get_joystick_axis()
self.direction()
self.x += self.velX * self.game.dt
self.y += self.velY * self.game.dt
self.rect.x = self.x
self.wall_collision('x')
self.player_collision('x')
self.rect.y = self.y
self.wall_collision('y')
self.player_collision('y')
self.food_collision()
class Wall(pygame.sprite.Sprite):
"""
class to contain all the data for wall sprites
"""
def __init__(self, game, x, y):
"""
initalizes a wall sprite when an instance is create in the game
parameter, at the x and y paramters
"""
self.groups = game.all_sprites, game.walls
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.wall_image
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * tileSize
self.rect.y = y * tileSize
class Floor(pygame.sprite.Sprite):
"""
class to contain all the data for floor sprites
"""
def __init__(self, game, x, y):
"""
initalizes a floor sprite when an instance is created in the game
parameter, at the x and y paramters
"""
self.groups = game.all_sprites, game.floor
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.floor_image
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * tileSize
self.rect.y = y * tileSize
class Food(pygame.sprite.Sprite):
"""
class to contain all the data for food sprites
"""
def __init__(self, game, x, y):
"""
initalizes a food sprite when an instance is created in the game
parameter, at the x and y paramters
"""
self.groups = game.all_sprites, game.food
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
# picks random image for the sprite
self.image = pygame.image.load(os.path.join(food_folder, (random.choice(food_images)))).convert_alpha()
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * tileSize
self.rect.y = y * tileSize
# checks if the sprite is allowed to spawn in the x and y
self.spawnable = False
collided = pygame.sprite.spritecollide(self, self.game.floor, False)
for sprite in collided:
if self.x == sprite.x and self.y == sprite.y:
self.spawnable = True
if self.spawnable == False:
self.kill()
| 40.174026 | 140 | 0.541799 | 15,075 | 0.974656 | 0 | 0 | 0 | 0 | 0 | 0 | 3,468 | 0.224219 |
311469936b15c039449f088fcb08c289febfdf41 | 294 | py | Python | app/schemas/usage_logs.py | wiki-yu/fastapi-algorithm-library | 8f745e9fe4d1d063dc8505d4c7f467e95209a385 | [
"MIT"
]
| null | null | null | app/schemas/usage_logs.py | wiki-yu/fastapi-algorithm-library | 8f745e9fe4d1d063dc8505d4c7f467e95209a385 | [
"MIT"
]
| null | null | null | app/schemas/usage_logs.py | wiki-yu/fastapi-algorithm-library | 8f745e9fe4d1d063dc8505d4c7f467e95209a385 | [
"MIT"
]
| null | null | null | from typing import Optional, List
from pydantic import BaseModel
class UsageLog(BaseModel):
api_key: str
is_active: bool
never_expire: bool
expiration_date: str
latest_query_date: Optional[str]
total_queries: int
class UsageLogs(BaseModel):
logs: List[UsageLog]
| 18.375 | 36 | 0.738095 | 223 | 0.758503 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
31156273ccc04dff6529eec50612ae1c8d644cbc | 10,811 | py | Python | dstf/core.py | anthonydugois/dstf | a08bfc8927910e104234e4189113c40029cf96c0 | [
"MIT"
]
| null | null | null | dstf/core.py | anthonydugois/dstf | a08bfc8927910e104234e4189113c40029cf96c0 | [
"MIT"
]
| null | null | null | dstf/core.py | anthonydugois/dstf | a08bfc8927910e104234e4189113c40029cf96c0 | [
"MIT"
]
| null | null | null | from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from math import inf
from typing import Iterator, Any, List, Dict, Type, Optional
EPSILON = 1e-4
class Error(Exception):
pass
class ConstraintError(Error):
pass
class Constraint(metaclass=ABCMeta):
@abstractmethod
def isvalid(self, schedule: "Schedule", chunk: "Chunk") -> bool:
pass
def geterror(self, schedule: "Schedule", chunk: "Chunk") -> str:
return "'{}' constraint is not met".format(type(self).__name__)
class Property(metaclass=ABCMeta):
@abstractmethod
def get(self, schedule: "Schedule") -> Any:
pass
class Operator(metaclass=ABCMeta):
@abstractmethod
def apply(self, schedule: "Schedule") -> Any:
pass
class Task:
def __init__(self, name: str):
self.name = name
self.constraints = OrderedDict()
def __contains__(self, constraint_cls: Type["Constraint"]) -> bool:
return constraint_cls in self.constraints
def __iter__(self) -> Iterator[Type["Constraint"]]:
return iter(self.constraints)
def __getitem__(self, constraint_cls: Type["Constraint"]) -> "Constraint":
return self.constraints[constraint_cls]
def __getattr__(self, attr: str):
for ctr in self.constraints.values():
if attr in ctr.__dict__:
return ctr.__dict__[attr]
raise AttributeError("'{}' task has no attribute '{}'".format(self.name, attr))
def set(self, constraint: "Constraint") -> "Task":
self.constraints[type(constraint)] = constraint
return self
class Chunk:
def __init__(self, task: "Task", start_time: float, proctimes: Dict[Any, float]):
self.task = task
self.start_time = start_time
self.proctimes = proctimes
def completion_time(self, node: Any) -> float:
if node in self.proctimes:
return self.start_time + self.proctimes[node]
else:
return inf
def isvalid(self, schedule: "Schedule") -> bool:
for ctr in self.task.constraints.values():
if not ctr.isvalid(schedule, self):
return False
return True
def append_to(self, schedule: "Schedule"):
for ctr in self.task.constraints.values():
if not ctr.isvalid(schedule, self):
raise ConstraintError(ctr.geterror(schedule, self))
if self.task in schedule.taskmap:
schedule.taskmap[self.task].append(self)
else:
schedule.taskmap[self.task] = [self]
for node in self.proctimes:
if node in schedule.nodemap:
schedule.nodemap[node].add(self)
else:
schedule.nodemap[node] = ChunkTree(node).add(self)
def remove_from(self, schedule: "Schedule"):
schedule.taskmap[self.task].remove(self)
for node in self.proctimes:
schedule.nodemap[node].remove(self)
class ChunkNode:
def __init__(self, chunk: "Chunk"):
self.chunk = chunk
self.height = 1
self.hi = -inf
self.left = None
self.right = None
class ChunkTree:
def __init__(self, node: Any):
self.node = node
self.root = None
def __iter__(self) -> Optional[Iterator["ChunkNode"]]:
return self._iter_from(self.root)
def _iter_from(self, root: Optional["ChunkNode"]) -> Optional[Iterator["ChunkNode"]]:
if root is None:
return None
else:
yield from self._iter_from(root.left)
yield root
yield from self._iter_from(root.right)
def at(self, time: float) -> List["ChunkNode"]:
nodes = []
self._at_from(self.root, time, nodes)
return nodes
def _at_from(self, root: Optional["ChunkNode"], time: float, nodes: List["ChunkNode"]):
if root is not None:
if root.left is not None and time < root.left.hi:
self._at_from(root.left, time, nodes)
if root.chunk.start_time <= time < root.chunk.completion_time(self.node):
nodes.append(root)
self._at_from(root.right, time, nodes)
def over(self, lo: float, hi: float) -> List["ChunkNode"]:
nodes = []
self._over_from(self.root, lo, hi, nodes)
return nodes
def _over_from(self, root: Optional["ChunkNode"], lo: float, hi: float, nodes: List["ChunkNode"]):
if root is not None:
if root.left is not None and lo < root.left.hi:
self._over_from(root.left, lo, hi, nodes)
if lo < root.chunk.completion_time(self.node) and root.chunk.start_time < hi:
nodes.append(root)
self._over_from(root.right, lo, hi, nodes)
def add(self, chunk: "Chunk") -> "ChunkTree":
self.root = self._add_from(self.root, chunk)
return self
def _add_from(self, root: Optional["ChunkNode"], chunk: "Chunk") -> "ChunkNode":
if root is None:
treenode = ChunkNode(chunk)
treenode.hi = chunk.completion_time(self.node)
return treenode
else:
if chunk.start_time < root.chunk.start_time:
root.left = self._add_from(root.left, chunk)
else:
root.right = self._add_from(root.right, chunk)
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(self._hi(root), chunk.completion_time(self.node))
return self._rotate(root)
def remove(self, chunk: "Chunk") -> "ChunkTree":
self.root = self._remove_from(self.root, chunk)
return self
def _remove_from(self, root: Optional["ChunkNode"], chunk: "Chunk") -> Optional["ChunkNode"]:
if root is None:
return None
else:
if chunk.start_time < root.chunk.start_time:
root.left = self._remove_from(root.left, chunk)
elif chunk.start_time > root.chunk.start_time:
root.right = self._remove_from(root.right, chunk)
else:
if root.left is None:
return root.right
elif root.right is None:
return root.left
else:
successor = self._min_from(root.right)
root.chunk = successor.chunk
root.right = self._remove_from(root.right, successor.chunk)
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(root.chunk.completion_time(self.node), self._hi(root.left), self._hi(root.right))
return self._rotate(root)
def _rotate(self, root: "ChunkNode") -> "ChunkNode":
balance = self._balance(root)
if balance > 1 and self._balance(root.left) >= 0:
return self._rotate_right(root)
elif balance > 1 and self._balance(root.left) < 0:
root.left = self._rotate_left(root.left)
return self._rotate_right(root)
elif balance < -1 and self._balance(root.right) <= 0:
return self._rotate_left(root)
elif balance < -1 and self._balance(root.right) > 0:
root.right = self._rotate_right(root.right)
return self._rotate_left(root)
else:
return root
def _rotate_left(self, root: "ChunkNode") -> "ChunkNode":
pivot = root.right
child = pivot.left
pivot.left = root
root.right = child
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(root.chunk.completion_time(self.node), self._hi(root.left), self._hi(root.right))
pivot.height = 1 + max(self._height(pivot.left), self._height(pivot.right))
pivot.hi = max(pivot.chunk.completion_time(self.node), self._hi(pivot.left), self._hi(pivot.right))
return pivot
def _rotate_right(self, root: "ChunkNode") -> "ChunkNode":
pivot = root.left
child = pivot.right
pivot.right = root
root.left = child
root.height = 1 + max(self._height(root.left), self._height(root.right))
root.hi = max(root.chunk.completion_time(self.node), self._hi(root.left), self._hi(root.right))
pivot.height = 1 + max(self._height(pivot.left), self._height(pivot.right))
pivot.hi = max(pivot.chunk.completion_time(self.node), self._hi(pivot.left), self._hi(pivot.right))
return pivot
def _balance(self, root: "ChunkNode") -> int:
if root is None:
return 0
else:
return self._height(root.left) - self._height(root.right)
def _height(self, root: "ChunkNode") -> int:
if root is None:
return 0
else:
return root.height
def _hi(self, root: Optional["ChunkNode"]) -> float:
if root is None:
return -inf
else:
return root.hi
def min(self) -> Optional["ChunkNode"]:
return self._min_from(self.root)
def _min_from(self, root: "ChunkNode") -> Optional["ChunkNode"]:
if root is None:
return None
else:
current = root
while current.left is not None:
current = current.left
return current
def max(self) -> Optional["ChunkNode"]:
return self._max_from(self.root)
def _max_from(self, root: "ChunkNode") -> Optional["ChunkNode"]:
if root is None:
return None
else:
current = root
while current.right is not None:
current = current.right
return current
class Schedule:
def __init__(self):
self.taskmap = {}
self.nodemap = {}
def tasks(self) -> Iterator["Task"]:
return iter(self.taskmap)
def hastask(self, task: "Task") -> bool:
return task in self.taskmap
def task(self, task: "Task") -> Optional[List["Chunk"]]:
if task in self.taskmap:
return self.taskmap[task]
else:
return None
def nodes(self) -> Iterator[Any]:
return iter(self.nodemap)
def hasnode(self, node: Any) -> bool:
return node in self.nodemap
def node(self, node: Any) -> Optional["ChunkTree"]:
if node in self.nodemap:
return self.nodemap[node]
else:
return None
# def copy(self):
# chunk_map = self.taskmap.copy()
#
# for tsk in chunk_map:
# chunk_map[tsk] = chunk_map[tsk].copy()
#
# return Schedule(chunk_map)
def get(self, prop: "Property") -> Any:
return prop.get(self)
def apply(self, operator: "Operator") -> Any:
return operator.apply(self)
| 30.367978 | 107 | 0.590232 | 10,607 | 0.98113 | 272 | 0.02516 | 251 | 0.023217 | 0 | 0 | 801 | 0.074091 |
311598ad6b841ab60db61f4fd1280dc1532a3bf1 | 2,789 | py | Python | 2018/2018_06a.py | davidxiao93/Advent-of-Code | 29503100ae4eb46b048fc3ab68ff0181c6f00ee5 | [
"MIT"
]
| null | null | null | 2018/2018_06a.py | davidxiao93/Advent-of-Code | 29503100ae4eb46b048fc3ab68ff0181c6f00ee5 | [
"MIT"
]
| null | null | null | 2018/2018_06a.py | davidxiao93/Advent-of-Code | 29503100ae4eb46b048fc3ab68ff0181c6f00ee5 | [
"MIT"
]
| null | null | null | input = """154, 159
172, 84
235, 204
181, 122
161, 337
305, 104
128, 298
176, 328
146, 71
210, 87
341, 195
50, 96
225, 151
86, 171
239, 68
79, 50
191, 284
200, 122
282, 240
224, 282
327, 74
158, 289
331, 244
154, 327
317, 110
272, 179
173, 175
187, 104
44, 194
202, 332
249, 197
244, 225
52, 127
299, 198
123, 198
349, 75
233, 72
284, 130
119, 150
172, 355
147, 314
58, 335
341, 348
236, 115
185, 270
173, 145
46, 288
214, 127
158, 293
237, 311"""
from collections import namedtuple
Point = namedtuple("Point", ["id", "x", "y"])
points = set()
for id, line in enumerate(input.splitlines()):
words = line.split(",")
x, y = [int(a) for a in words]
points.add(Point(id, x, y))
# get bounds
a_point = next(iter(points))
left_bound = a_point.x
right_bound = a_point.x
up_bound = a_point.y
down_bound = a_point.y
for p in points:
if p.x < left_bound:
left_bound = p.x
if p.x > right_bound:
right_bound = p.x
if p.y < up_bound:
up_bound = p.y
if p.y > down_bound:
down_bound = p.y
# Find closest points within the bounds
# Anything outside the bounds is uninteresting as it just leads off into infinite space
def distance(p, q):
return abs(p.x - q.x) + abs(p.y - q.y)
def find_closest(p, points):
closest_dist = None
closest = set()
for q in points:
dist = distance(p, q)
if closest_dist == None or dist < closest_dist:
closest = {q.id}
closest_dist = dist
elif dist == closest_dist:
closest.add(q.id)
return closest
grid = [
[0] * (right_bound - left_bound + 1) for i in range(down_bound - up_bound + 1)
]
for y in range(up_bound, down_bound + 1):
for x in range(left_bound, right_bound + 1):
closest_points = find_closest(Point(id=None, x=x, y=y), points)
if len(closest_points) > 1:
grid[y-up_bound][x-left_bound] = -1
elif len(closest_points) == 0:
print("wtf")
exit(1)
else:
grid[y - up_bound][x - left_bound] = closest_points.pop()
# We have our grid, we can remove any point ids that lie on the edge as they
# will continue off to infinity
candidate_ids = {p.id for p in points}
for y in [0, down_bound - up_bound]:
for x in [0, right_bound - left_bound]:
if grid[y][x] in candidate_ids:
candidate_ids.remove(grid[y][x])
# we have our contenders
# now find which has the smallest finite space
ids_to_count = {}
for y in range(0, down_bound - up_bound + 1):
for x in range(0, right_bound - left_bound + 1):
if grid[y][x] in candidate_ids:
if grid[y][x] not in ids_to_count:
ids_to_count[grid[y][x]] = 0
ids_to_count[grid[y][x]] += 1
print(max(ids_to_count.values()))
| 20.357664 | 87 | 0.61635 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 779 | 0.279312 |
3117a458a92a3f74cb40891238fd7657a360b0d8 | 207 | py | Python | tests/test_backup.py | KonstantinPankratov/Backupy | bfbbc97242bbf3c16da5454b5ff8741bfafa74c0 | [
"MIT"
]
| 1 | 2020-02-12T12:58:28.000Z | 2020-02-12T12:58:28.000Z | tests/test_backup.py | KonstantinPankratov/Backupy | bfbbc97242bbf3c16da5454b5ff8741bfafa74c0 | [
"MIT"
]
| null | null | null | tests/test_backup.py | KonstantinPankratov/Backupy | bfbbc97242bbf3c16da5454b5ff8741bfafa74c0 | [
"MIT"
]
| null | null | null | import os
from Backupy import Backupy
def test_backup():
backup = Backupy()
backup.add_directory('./')
backup.start()
assert os.path.exists(backup.filename)
os.remove(backup.filename)
| 17.25 | 42 | 0.690821 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.019324 |
3117d8dd620c63b6e9601bc884ac493aa6485d06 | 1,100 | py | Python | tweetf0rm/process/crawler_process.py | amaurywalbert/mytweetf0rm | 2272b53214b3669eb104762f5b5b38ff4adda435 | [
"MIT"
]
| 1 | 2015-02-16T11:08:35.000Z | 2015-02-16T11:08:35.000Z | tweetf0rm/process/crawler_process.py | maruthiprithivi/tweetf0rm | f59e57495afda05032d41b161b5aed74f2bc4dfe | [
"MIT"
]
| null | null | null | tweetf0rm/process/crawler_process.py | maruthiprithivi/tweetf0rm | f59e57495afda05032d41b161b5aed74f2bc4dfe | [
"MIT"
]
| null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
import logging
logger = logging.getLogger(__name__)
import multiprocessing as mp
import tweetf0rm.handler
from tweetf0rm.redis_helper import CrawlerQueue
#MAX_QUEUE_SIZE = 32767
class CrawlerProcess(mp.Process):
def __init__(self, node_id, crawler_id, redis_config, handlers):
super(CrawlerProcess, self).__init__()
self.node_id = node_id
self.crawler_id = crawler_id
self.redis_config = redis_config
#self.queue = mp.Queue(maxsize=MAX_QUEUE_SIZE)
self.crawler_queue = CrawlerQueue(node_id, crawler_id, redis_config=redis_config)
self.crawler_queue.clear()
#self.lock = mp.Lock()
self.handlers = handlers
logger.debug("number of handlers attached: %d"%(len(handlers)))
def get_crawler_id(self):
return self.crawler_id
def enqueue(self, request):
#self.queue.put(request, block=True)
self.crawler_queue.put(request)
return True
def get_cmd(self):
#return self.queue.get(block=True)
return self.crawler_queue.get(block=True)
def get_queue_size(self):
self.crawler_queue.qsize()
def run(self):
pass
| 22.916667 | 83 | 0.75 | 868 | 0.789091 | 0 | 0 | 0 | 0 | 0 | 0 | 237 | 0.215455 |
3118987786d875c02803a2f89249abb09e8ea633 | 6,500 | py | Python | cloudify_aws/ec2/resources/dhcp.py | marrowne/cloudify-aws-plugin | e75506f29048c666aeed20ade8a5a8cdfb6f2adf | [
"Apache-2.0"
]
| null | null | null | cloudify_aws/ec2/resources/dhcp.py | marrowne/cloudify-aws-plugin | e75506f29048c666aeed20ade8a5a8cdfb6f2adf | [
"Apache-2.0"
]
| null | null | null | cloudify_aws/ec2/resources/dhcp.py | marrowne/cloudify-aws-plugin | e75506f29048c666aeed20ade8a5a8cdfb6f2adf | [
"Apache-2.0"
]
| null | null | null | # Copyright (c) 2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
EC2.DhcpOptions
~~~~~~~~~~~~~~
AWS EC2 DhcpOptions interface
"""
# Boto
from botocore.exceptions import ClientError
# Cloudify
from cloudify_aws.common import decorators, utils
from cloudify_aws.ec2 import EC2Base
from cloudify_aws.common.constants import EXTERNAL_RESOURCE_ID
RESOURCE_TYPE = 'EC2 Dhcp Options'
DHCPOPTIONS = 'DhcpOptions'
DHCPOPTIONS_ID = 'DhcpOptionsId'
DHCPOPTIONS_IDS = 'DhcpOptionsIds'
VPC_ID = 'VpcId'
VPC_TYPE = 'cloudify.nodes.aws.ec2.Vpc'
VPC_TYPE_DEPRECATED = 'cloudify.aws.nodes.Vpc'
class EC2DHCPOptions(EC2Base):
"""
EC2 DhcpOptions interface
"""
def __init__(self, ctx_node, resource_id=None, client=None, logger=None):
EC2Base.__init__(self, ctx_node, resource_id, client, logger)
self.type_name = RESOURCE_TYPE
@property
def properties(self):
"""Gets the properties of an external resource"""
params = {DHCPOPTIONS_IDS: [self.resource_id]}
try:
resources = \
self.client.describe_dhcp_options(**params)
except ClientError:
pass
else:
return resources.get(DHCPOPTIONS)[0] if resources else None
def create(self, params):
"""
Create a new AWS EC2 DhcpOptions.
"""
return self.make_client_call('create_dhcp_options', params)
def delete(self, params=None):
"""
Deletes an existing AWS EC2 DhcpOptions.
"""
self.logger.debug('Deleting %s with parameters: %s'
% (self.type_name, params))
res = self.client.delete_dhcp_options(**params)
self.logger.debug('Response: %s' % res)
return res
def attach(self, params):
'''
Attach an AWS EC2 DhcpOptions to a VPC.
'''
self.logger.debug('Attaching %s with: %s'
% (self.type_name, params.get(VPC_ID, None)))
res = self.client.associate_dhcp_options(**params)
self.logger.debug('Response: %s' % res)
return res
def detach(self, params):
'''
Detach an AWS EC2 VPN Gateway from a VPC.
'''
self.logger.debug('Detaching %s from: %s'
% (self.type_name, params.get(VPC_ID, None)))
self.logger.debug('Attaching default %s'
% (self.type_name))
res = self.client.associate_dhcp_options(**params)
self.logger.debug('Response: %s' % res)
return res
@decorators.aws_resource(EC2DHCPOptions, resource_type=RESOURCE_TYPE)
def prepare(ctx, resource_config, **_):
"""Prepares an AWS EC2 DhcpOptions"""
# Save the parameters
ctx.instance.runtime_properties['resource_config'] = resource_config
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE)
def create(ctx, iface, resource_config, **_):
"""Creates an AWS EC2 DhcpOptions"""
# Create a copy of the resource config for clean manipulation.
params = \
dict() if not resource_config else resource_config.copy()
# Actually create the resource
create_response = iface.create(params)[DHCPOPTIONS]
ctx.instance.runtime_properties['create_response'] = \
utils.JsonCleanuper(create_response).to_dict()
dhcp_options_id = create_response.get(DHCPOPTIONS_ID, '')
iface.update_resource_id(dhcp_options_id)
utils.update_resource_id(ctx.instance, dhcp_options_id)
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE,
ignore_properties=True)
def delete(ctx, iface, resource_config, **_):
"""Deletes an AWS EC2 DhcpOptions"""
# Create a copy of the resource config for clean manipulation.
params = \
dict() if not resource_config else resource_config.copy()
dhcp_options_id = params.get(DHCPOPTIONS_ID)
if not dhcp_options_id:
params[DHCPOPTIONS_ID] = \
iface.resource_id or \
ctx.instance.runtime_properties.get(EXTERNAL_RESOURCE_ID)
iface.delete(params)
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE)
def attach(ctx, iface, resource_config, **_):
'''Attaches an AWS EC2 DhcpOptions to a VPC'''
params = dict() if not resource_config else resource_config.copy()
dhcp_options_id = params.get(DHCPOPTIONS_ID)
if not dhcp_options_id:
dhcp_options_id = iface.resource_id
params.update({DHCPOPTIONS_ID: dhcp_options_id})
params.pop('DhcpConfigurations')
vpc_id = params.get(VPC_ID)
if not vpc_id:
targ = \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE) or \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE_DEPRECATED)
# Attempt to use the VPC ID from parameters.
# Fallback to connected VPC.
params[VPC_ID] = \
vpc_id or \
targ.target.instance.runtime_properties.get(EXTERNAL_RESOURCE_ID)
ctx.instance.runtime_properties['vpc_id'] = vpc_id
# # Actually attach the resources
iface.attach(params)
@decorators.aws_resource(EC2DHCPOptions, RESOURCE_TYPE,
ignore_properties=True)
def detach(ctx, iface, resource_config, **_):
'''Detach an AWS EC2 DhcpOptions from a VPC'''
params = dict() if not resource_config else resource_config.copy()
params.update({DHCPOPTIONS_ID: 'default'})
vpc_id = params.get(VPC_ID) or ctx.instance.runtime_properties['vpc_id']
if not vpc_id:
targ = \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE) or \
utils.find_rel_by_node_type(ctx.instance, VPC_TYPE_DEPRECATED)
# Attempt to use the VPC ID from parameters.
# Fallback to connected VPC.
params[VPC_ID] = \
vpc_id or \
targ.target.instance.runtime_properties.get(EXTERNAL_RESOURCE_ID)
else:
params.update({VPC_ID: vpc_id})
iface.detach(params)
| 34.210526 | 77 | 0.667077 | 1,977 | 0.304154 | 0 | 0 | 3,740 | 0.575385 | 0 | 0 | 1,974 | 0.303692 |
311bd4a62d810173917c5fcccdb8b5d1d7bc7a2e | 4,038 | py | Python | tests/testing_support/sample_applications.py | douglasfarinelli/newrelic-python-agent | 429edc07c29f318b7d233f4a70367634ad2b9ae4 | [
"Apache-2.0"
]
| 1 | 2020-08-27T21:36:14.000Z | 2020-08-27T21:36:14.000Z | tests/testing_support/sample_applications.py | douglasfarinelli/newrelic-python-agent | 429edc07c29f318b7d233f4a70367634ad2b9ae4 | [
"Apache-2.0"
]
| null | null | null | tests/testing_support/sample_applications.py | douglasfarinelli/newrelic-python-agent | 429edc07c29f318b7d233f4a70367634ad2b9ae4 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from urllib2 import urlopen # Py2.X
except ImportError:
from urllib.request import urlopen # Py3.X
import sqlite3 as db
from newrelic.api.time_trace import record_exception
from newrelic.api.transaction import (add_custom_parameter,
get_browser_timing_header, get_browser_timing_footer,
record_custom_event)
from newrelic.api.wsgi_application import wsgi_application
_custom_parameters = {
'user' : 'user-name',
'account' : 'account-name',
'product' : 'product-name',
'bytes' : b'bytes-value',
'string' : 'string-value',
'unicode' : u'unicode-value',
'integer' : 1,
'float' : 1.0,
'invalid-utf8' : b'\xe2',
'multibyte-utf8' : b'\xe2\x88\x9a',
'multibyte-unicode' : b'\xe2\x88\x9a'.decode('utf-8'),
'list' : [],
'tuple' : (),
'dict' : {},
}
_err_param = {
'err-param' : 'value'
}
def user_attributes_added():
"""Expected values when the custom parameters in this file are added as user
attributes
"""
user_attributes = _custom_parameters.copy()
user_attributes['list'] = '[]'
user_attributes['tuple'] = '()'
user_attributes['dict'] = '{}'
return user_attributes
def error_user_params_added():
return _err_param.copy()
@wsgi_application()
def fully_featured_app(environ, start_response):
status = '200 OK'
path = environ.get('PATH_INFO')
use_user_attrs = environ.get('record_attributes', 'TRUE') == 'TRUE'
if use_user_attrs:
for attr, val in _custom_parameters.items():
add_custom_parameter(attr, val)
if 'db' in environ and int(environ['db']) > 0:
connection = db.connect(":memory:")
for i in range(int(environ['db']) - 1):
connection.execute("create table test_db%d (a, b, c)" % i)
if 'external' in environ:
for i in range(int(environ['external'])):
r = urlopen('http://www.python.org')
r.read(10)
if 'err_message' in environ:
n_errors = int(environ.get('n_errors', 1))
for i in range(n_errors):
try:
# append number to stats engine to get unique errors, so they
# don't immediately get filtered out.
raise ValueError(environ['err_message'] + str(i))
except ValueError:
if use_user_attrs:
record_exception(params=_err_param)
else:
record_exception()
text = '<html><head>%s</head><body><p>RESPONSE</p>%s</body></html>'
output = (text % (get_browser_timing_header(),
get_browser_timing_footer())).encode('UTF-8')
response_headers = [('Content-type', 'text/html; charset=utf-8'),
('Content-Length', str(len(output)))]
start_response(status, response_headers)
return [output]
@wsgi_application()
def simple_exceptional_app(environ, start_response):
start_response('500 :(',[])
raise ValueError('Transaction had bad value')
@wsgi_application()
def simple_app(environ, start_response):
status = '200 OK'
start_response(status, response_headers=[])
return []
@wsgi_application()
def simple_custom_event_app(environ, start_response):
params = {'snowman': u'\u2603', 'foo': 'bar'}
record_custom_event('SimpleAppEvent', params)
start_response(status='200 OK', response_headers=[])
return []
| 30.360902 | 80 | 0.636701 | 0 | 0 | 0 | 0 | 2,138 | 0.52947 | 0 | 0 | 1,489 | 0.368747 |
311bd8e2e5361bb4ec7f13dfdb4e6813c5dab95c | 14,767 | py | Python | spydrnet_tmr/transformation/replication/nmr.py | byuccl/spydrnet-tmr | ca9f026db70be96d57aa3604447abecb68670c56 | [
"BSD-3-Clause"
]
| null | null | null | spydrnet_tmr/transformation/replication/nmr.py | byuccl/spydrnet-tmr | ca9f026db70be96d57aa3604447abecb68670c56 | [
"BSD-3-Clause"
]
| 6 | 2021-08-13T18:39:59.000Z | 2022-03-04T22:20:44.000Z | spydrnet_tmr/transformation/replication/nmr.py | byuccl/spydrnet-tmr | ca9f026db70be96d57aa3604447abecb68670c56 | [
"BSD-3-Clause"
]
| null | null | null | from spydrnet.ir import Port, Instance, InnerPin
from spydrnet_tmr.transformation.util import add_suffix_to_name
IN = Port.Direction.IN
OUT = Port.Direction.OUT
INOUT = Port.Direction.INOUT
def apply_nmr(ports_and_instances_to_replicate, degree, name_suffix='NMR', rename_original=True):
"""
Replicate the selected ports and instances to the n-th degree.
:param ports_and_instances_to_replicate:
:param degree: number of total copies
:param name_suffix: string to append to each replicated element (e.g. 'TMR' or 'DWC')
:param rename_original: rename orginal domain
:type rename_original: bool
:return: A map from an original element to its replicas
"""
nmr_agent = NMR.from_originals_degree_suffix_and_rename(ports_and_instances_to_replicate, degree, name_suffix,
rename_original)
replicas = nmr_agent.apply()
return replicas
class NMR:
@staticmethod
def from_originals_degree_suffix_and_rename(originals, degree, suffix, rename):
nmr_agent = NMR(originals, degree, suffix, rename)
return nmr_agent
def __init__(self, originals, degree, suffix, rename):
# Internal state
self._applied = False
self._wires_to_replicate = None
self._additional_ports_to_replicate = None
self._wiremap = None
self._replicas = dict()
# Inputs
for original in originals:
# if isinstance(original, HRef):
# original = original.item
if isinstance(original, (Port, Instance)):
self._replicas[original] = None
self.replication_degree = degree
self.name_suffix = suffix
self.rename_original = rename
def apply(self):
#self._validate_inputs()
self._identify_additional_wires_and_ports_to_replicate()
self._replicate_ports_and_instances()
self._replicate_wires()
self._connect_wires()
return self._replicas
def _identify_additional_wires_and_ports_to_replicate(self):
src_pins, snk_pins = self._idenfity_src_and_snk_pins_that_will_be_replicated()
wires_to_replicate = self.identify_additional_wires_to_replicate(src_pins, snk_pins)
ports_to_replicate = self.identify_additional_ports_to_replicate(wires_to_replicate)
self._wires_to_replicate = wires_to_replicate
self._replicas.update((port, None) for port in ports_to_replicate)
@staticmethod
def identify_additional_ports_to_replicate(wires_to_replicate):
ports_to_replicate = set()
inner_pins = set()
outer_pins = set()
for wire in wires_to_replicate:
for pin in wire.pins:
if isinstance(pin, InnerPin):
inner_pins.add(pin)
else:
outer_pins.add(pin)
for outer_pin in outer_pins:
inner_pin = outer_pin.inner_pin
if inner_pin in inner_pins:
port = inner_pin.port
ports_to_replicate.add(port)
for pin in port.pins:
inner_pins.discard(pin)
return ports_to_replicate
@staticmethod
def identify_additional_wires_to_replicate(src_pins, snk_pins):
wires_to_replicate = set()
wires_found = set()
for src_pin in src_pins:
wire = src_pin.wire
if not wire or wire in wires_found:
continue
wires_found.add(wire)
search_stack = [(wire, False)]
while search_stack:
wire, visited = search_stack.pop()
if visited:
continue
search_stack.append((wire, True))
for pin in wire.pins:
if pin in snk_pins:
for path_member, part_of_path in reversed(search_stack):
if part_of_path is True:
if path_member not in wires_to_replicate:
wires_to_replicate.add(path_member)
else:
break
elif pin not in src_pins:
other_wires = pin.get_wires(selection='OUTSIDE' if isinstance(pin, InnerPin) else 'INSIDE')
for other_wire in other_wires:
if other_wire not in wires_found:
wires_found.add(other_wire)
search_stack.append((other_wire, False))
return wires_to_replicate
def _idenfity_src_and_snk_pins_that_will_be_replicated(self):
src_pins = set()
snk_pins = set()
for original in self._replicas.keys():
if isinstance(original, Port):
direction = original.direction
if direction in {IN, INOUT}:
src_pins.update(original.get_pins(selection='INSIDE'))
snk_pins.update(original.get_pins(selection='OUTSIDE'))
if direction in {OUT, INOUT}:
src_pins.update(original.get_pins(selection='OUTSIDE'))
snk_pins.update(original.get_pins(selection='INSIDE'))
else:
reference = original.reference
for port in reference.ports:
direction = port.direction
if direction in {IN, INOUT}:
snk_pins.update(map(original.pins.get, port.pins))
if direction in {OUT, INOUT}:
src_pins.update(map(original.pins.get, port.pins))
return src_pins, snk_pins
def _replicate_ports_and_instances(self):
for original in self._replicas.keys():
if isinstance(original, Port):
self._replicate_port(original)
else:
self._replicate_instance(original)
self._reorder_ports_for_readability()
self._reorder_instances_for_readability()
def _replicate_port(self, port):
replicas = list()
for ii in range(1, self.replication_degree):
port_clone = port.clone()
add_suffix_to_name(port_clone, self.name_suffix + '_' + str(ii))
replicas.append(port_clone)
port.definition.add_port(port_clone)
if self.rename_original:
add_suffix_to_name(port, self.name_suffix + '_' + '0')
self._replicas[port] = replicas
def _replicate_instance(self, inst):
replicas = list()
for ii in range(1, self.replication_degree):
inst_clone = inst.clone()
add_suffix_to_name(inst_clone, self.name_suffix + '_' + str(ii))
replicas.append(inst_clone)
inst.parent.add_child(inst_clone)
if self.rename_original:
add_suffix_to_name(inst, self.name_suffix + '_' + '0')
self._replicas[inst] = replicas
def _reorder_ports_for_readability(self):
reordered_definitions = set()
for original in self._replicas.keys():
if isinstance(original, Port):
definition = original.definition
if definition not in reordered_definitions:
reordered_definitions.add(definition)
new_order = list()
def_ports = definition.ports
def_ports_len = len(def_ports)
for def_port in def_ports:
new_order.append(def_port)
if def_port in self._replicas:
new_order += self._replicas[def_port]
if len(new_order) == def_ports_len:
break
definition.ports = new_order
def _reorder_instances_for_readability(self):
reordered_definitions = set()
for original in self._replicas:
if isinstance(original, Instance):
definition = original.parent
if definition not in reordered_definitions:
reordered_definitions.add(definition)
new_order = list()
def_children = definition.children
def_children_len = len(def_children)
for def_child in def_children:
new_order.append(def_child)
if def_child in self._replicas:
new_order += self._replicas[def_child]
if len(new_order) == def_children_len:
break
definition.children = new_order
def _replicate_wires(self):
self._wiremap = dict()
replicated_cables = set()
for wire in self._wires_to_replicate:
cable = wire.cable
if cable not in replicated_cables:
replicated_cables.add(cable)
for ii in range(1, self.replication_degree):
cable_clone = cable.clone()
add_suffix_to_name(cable_clone, self.name_suffix + '_' + str(ii))
for wire_index, cable_wire in enumerate(cable.wires):
if cable_wire in self._wires_to_replicate:
if cable_wire not in self._wiremap:
self._wiremap[cable_wire] = list()
self._wiremap[cable_wire].append(cable_clone.wires[wire_index])
cable.definition.add_cable(cable_clone)
if self.rename_original:
add_suffix_to_name(cable, self.name_suffix + '_' + '0')
self._reorder_cables_for_readibility()
def _reorder_cables_for_readibility(self):
reordered_definitions = set()
for wire in self._wiremap:
definition = wire.cable.definition
if definition not in reordered_definitions:
reordered_definitions.add(definition)
new_order = list()
visited_cables = set()
def_cables = definition.cables
for def_cable in def_cables:
if def_cable in visited_cables:
continue
visited_cables.add(def_cable)
new_order.append(def_cable)
for wire in def_cable.wires:
if wire in self._wiremap:
other_cables = list(other_wire.cable for other_wire in self._wiremap[wire])
for other_cable in other_cables:
if other_cable not in visited_cables:
visited_cables.add(other_cable)
new_order.append(other_cable)
definition.cables = new_order
def _connect_wires(self):
self._connect_replicated_wires()
self._connect_non_replicated_wires_to_replicated_pins()
def _connect_replicated_wires(self):
for wire, other_wires in self._wiremap.items():
for pin in wire.pins:
if isinstance(pin, InnerPin):
port = pin.port
if port in self._replicas:
other_ports = self._replicas[port]
pin_index = port.pins.index(pin)
for ii in range(self.replication_degree - 1):
other_wires[ii].connect_pin(other_ports[ii].pins[pin_index])
else:
inner_pin = pin.inner_pin
instance = pin.instance
if instance in self._replicas:
other_instances = self._replicas[instance]
for ii in range(self.replication_degree - 1):
other_wires[ii].connect_pin(other_instances[ii].pins[inner_pin])
else: # TODO: if move this outside of the if does it do what we would expect?
port = inner_pin.port
if port in self._replicas:
other_ports = self._replicas[port]
pin_index = port.pins.index(inner_pin)
for ii in range(self.replication_degree - 1):
other_wires[ii].connect_pin(instance.pins[other_ports[ii].pins[pin_index]])
def _connect_non_replicated_wires_to_replicated_pins(self):
pinmap = dict()
for original in self._replicas:
if isinstance(original, Instance):
inst = original
other_instances = self._replicas[inst]
for pin in inst.pins:
if pin.inner_pin.port.direction in {IN, INOUT}:
wire = pin.wire
if wire and wire not in self._wiremap:
inner_pin = pin.inner_pin
pinmap[pin] = list()
for ii in range(self.replication_degree - 1):
other_pin = other_instances[ii].pins[inner_pin]
pinmap[pin].append(other_pin)
wire.connect_pin(other_pin)
elif isinstance(original, Port):
port = original
other_ports = self._replicas[port]
for pin in port.pins:
if port.direction in {OUT, INOUT}:
wire = pin.wire
if wire and wire not in self._wiremap:
pin_index = pin.port.pins.index(pin)
pinmap[pin] = list()
for ii in range(self.replication_degree - 1):
other_pin = other_ports[ii].pins[pin_index]
pinmap[pin].append(other_pin)
wire.connect_pin(other_pin)
self._reorder_pins_for_readibility(pinmap)
@staticmethod
def _reorder_pins_for_readibility(pinmap):
reordered_wires = set()
for pin in pinmap:
wire = pin.wire
if wire not in reordered_wires:
reordered_wires.add(wire)
new_order = list()
wire_pins = wire.pins
wire_pins_len = len(wire_pins)
for wire_pin in wire_pins:
new_order.append(wire_pin)
if wire_pin in pinmap:
new_order += pinmap[wire_pin]
if len(new_order) == wire_pins_len:
break
wire.pins = new_order
| 44.212575 | 115 | 0.556511 | 13,826 | 0.936277 | 0 | 0 | 2,990 | 0.202478 | 0 | 0 | 662 | 0.04483 |
311f1ca9e1d1619bad521ce3c12b38ab4bf5a569 | 1,088 | py | Python | pyspark/example/spark_core/4.7_spark_prog.py | chiliangpi/hellobi | e773c493d87bc84e7ae9f297232f0c267baaf184 | [
"Apache-2.0"
]
| 53 | 2017-09-01T04:43:38.000Z | 2021-11-30T14:59:39.000Z | pyspark/example/spark_core/4.7_spark_prog.py | chiliangpi/hellobi | e773c493d87bc84e7ae9f297232f0c267baaf184 | [
"Apache-2.0"
]
| null | null | null | pyspark/example/spark_core/4.7_spark_prog.py | chiliangpi/hellobi | e773c493d87bc84e7ae9f297232f0c267baaf184 | [
"Apache-2.0"
]
| 43 | 2017-12-27T02:11:48.000Z | 2021-09-12T09:04:31.000Z | import os
import numpy as np
import sys
import logging
LOG_PATH = os.environ['log']
spark_home = os.environ['SPARK_HOME']
sys.path.insert(0, os.path.join(spark_home, 'python'))
sys.path.insert(0, os.path.join(spark_home, 'python/lib/py4j-0.10.4-src.zip'))
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("test") \
.getOrCreate()
logger = logging.getLogger(__name__)
logger.addHandler(logging.FileHandler(LOG_PATH))
def main(*args):
top = int(args[0][0])
data = spark.read.csv("hdfs:///tmp/ratings.csv", sep = ',', header= True)
result = (data
.groupBy("movieid")
.agg({'rating': 'mean'})
.withColumnRenamed("avg(rating)", "avg_ratings")
.dropna()
.orderBy(['avg_ratings'], ascending=[0])
.limit(top))
logger.info("result: {}".format(result.toPandas()))
#spark.stop()
if __name__ == '__main__':
logging.basicConfig(format='[%(levelname)s] %(asctime)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO)
main(sys.argv[1:])
| 29.405405 | 78 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 248 | 0.227941 |
31214aa0773f4d81be3dd4aff0c91ccb664626ac | 1,436 | py | Python | infra_macros/fbcode_macros/tests/shell_test.py | xw285cornell/buckit | 678d97618c1324288770ba423a1ecc0ce06ead0f | [
"BSD-3-Clause"
]
| null | null | null | infra_macros/fbcode_macros/tests/shell_test.py | xw285cornell/buckit | 678d97618c1324288770ba423a1ecc0ce06ead0f | [
"BSD-3-Clause"
]
| null | null | null | infra_macros/fbcode_macros/tests/shell_test.py | xw285cornell/buckit | 678d97618c1324288770ba423a1ecc0ce06ead0f | [
"BSD-3-Clause"
]
| null | null | null | # Copyright 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import, division, print_function, unicode_literals
import shlex
import tests.utils
class ShellTest(tests.utils.TestCase):
includes = [("@fbcode_macros//build_defs:shell.bzl", "shell")]
@tests.utils.with_project()
def test_split_works_like_shlex_split(self, root):
test_strings = [
r"",
r"FOO BAR",
" foo \t\nbar\n baz",
r'foo -D"bar"',
r'foo -D"\"something quoted\"" last\ string',
r'foo -D"\n contains backslash still" ',
r"""foo -D'something something \"dark side\"'""",
r"""-DFOO -D"\ B'A'R=\"something here\""'something" else' -D\ BAZ -D\\some""",
r'''-DFOO -DBAR="baz \"\\\"lots of quotes\\\"\""''',
]
commands = ["shell.split(%r)" % s.encode("ascii") for s in test_strings]
expected = [shlex.split(s) for s in test_strings]
result = root.runUnitTests(self.includes, commands)
self.assertSuccess(result)
self.assertEqual(
expected, [[x.encode("utf-8") for x in line] for line in result.debug_lines]
)
| 35.02439 | 92 | 0.616295 | 1,024 | 0.713092 | 0 | 0 | 913 | 0.635794 | 0 | 0 | 669 | 0.465877 |
31220bdb897647890966f63a3b737299ebc02027 | 244 | py | Python | bash/src/_func_storage.py | BillGatesCat/yf | 118fdecd7e33cd4970b6250cf88be83e143d7a5f | [
"Apache-2.0"
]
| 19 | 2020-12-24T04:49:52.000Z | 2021-02-19T04:58:10.000Z | bash/src/_func_storage.py | BillGatesCat/yf | 118fdecd7e33cd4970b6250cf88be83e143d7a5f | [
"Apache-2.0"
]
| 2 | 2020-12-26T19:02:19.000Z | 2020-12-26T19:07:11.000Z | bash/src/_func_storage.py | BillGatesCat/yf | 118fdecd7e33cd4970b6250cf88be83e143d7a5f | [
"Apache-2.0"
]
| 2 | 2020-12-28T01:41:03.000Z | 2021-07-06T19:27:25.000Z | class _FuncStorage:
def __init__(self):
self._function_map = {}
def insert_function(self, name, function):
self._function_map[name] = function
def get_all_functions(self):
return self._function_map
| 24.4 | 46 | 0.651639 | 235 | 0.963115 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
3124e4502bdcf98a842434a1911bac0c10e7cab6 | 5,482 | py | Python | tests/unit/core/SubdomainTest.py | edgargmartinez/OpenPNM | c68745993b3e9895f53938164a9cf6305500748e | [
"MIT"
]
| 3 | 2019-07-17T01:35:09.000Z | 2021-05-08T02:03:35.000Z | tests/unit/core/SubdomainTest.py | ChahatAggarwal/OpenPNM | b3873d35270b0acaad019264368d0055c677d159 | [
"MIT"
]
| null | null | null | tests/unit/core/SubdomainTest.py | ChahatAggarwal/OpenPNM | b3873d35270b0acaad019264368d0055c677d159 | [
"MIT"
]
| null | null | null | import openpnm as op
import scipy as sp
import pytest
class SubdomainTest:
def setup_class(self):
ws = op.Workspace()
ws.settings['local_data'] = True
self.net = op.network.Cubic(shape=[3, 3, 3])
self.geo = op.geometry.GenericGeometry(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.geo['pore.diameter'] = sp.rand(self.net.Np)
self.geo.add_model(propname='pore.volume',
model=op.models.geometry.pore_volume.sphere)
self.geo['throat.diameter'] = sp.rand(self.net.Nt)
self.geo.add_model(propname='throat.area',
model=op.models.geometry.throat_area.cylinder)
self.geo.regenerate_models()
self.phase1 = op.phases.GenericPhase(network=self.net)
self.phase2 = op.phases.GenericPhase(network=self.net)
self.phys1 = op.physics.GenericPhysics(network=self.net,
geometry=self.geo,
phase=self.phase1)
self.phys1['pore.blah'] = 1.0
self.phys2 = op.physics.GenericPhysics(network=self.net,
geometry=self.geo,
phase=self.phase2)
self.phys2['pore.blah'] = 2.0
def teardown_class(self):
ws = op.Workspace()
ws.clear()
def test_drop_locations_from_geom_successively_with_single_geometry(self):
assert self.geo.Np == 27
assert self.geo.Nt == 54
self.geo._drop_locations(pores=[0, 1, 2], throats=[0, 1, 2])
assert self.geo.Np == 24
assert self.geo.Nt == 51
self.geo._drop_locations(pores=[3, 4], throats=[3, 4])
assert self.geo.Np == 22
assert self.geo.Nt == 49
self.geo._add_locations(pores=[0, 1, 2, 3, 4], throats=[0, 1, 2, 3, 4])
assert self.geo.Np == 27
assert self.geo.Nt == 54
def test_drop_locations_from_physics_successively_with_two_physics(self):
assert self.phys1.Np == 27
assert self.phys1.Nt == 54
self.phys1._drop_locations(pores=[0, 1], throats=[0, 1])
assert self.phys1.Np == 25
assert self.phys1.Nt == 52
self.phys1._drop_locations(pores=[3, 4], throats=[3, 4])
assert self.phys1.Np == 23
assert self.phys1.Nt == 50
self.phys1._add_locations(pores=[0, 1, 3, 4], throats=[0, 1, 3, 4])
assert self.phys1.Np == 27
assert self.phys1.Nt == 54
def test_drop_locations_all_but_not_complete(self):
assert self.phys1.Np == 27
assert self.phys1.Nt == 54
assert 'pore.'+self.phys1.name in self.phase1.keys()
assert 'throat.'+self.phys1.name in self.phase1.keys()
self.phys1._drop_locations(pores=self.net.Ps)
assert 'pore.'+self.phys1.name in self.phase1.keys()
assert self.phase1.num_pores(self.phys1.name) == 0
assert 'throat.'+self.phys1.name in self.phase1.keys()
self.phys1._drop_locations(throats=self.net.Ts)
assert 'throat.'+self.phys1.name in self.phase1.keys()
assert self.phase1.num_throats(self.phys1.name) == 0
self.phys1._add_locations(pores=self.net.Ps, throats=self.net.Ts)
def test_writting_subdict_names_across_subdomains(self):
ws = op.Workspace()
proj = ws.new_project()
pn = op.network.Cubic(shape=[10, 10, 10], spacing=1e-4, project=proj)
Ps = pn['pore.coords'][:, 0] < pn['pore.coords'][:, 0].mean()
Ts = pn.find_neighbor_throats(pores=Ps, mode='xnor')
geo1 = op.geometry.StickAndBall(network=pn, pores=Ps, throats=Ts)
Ps = pn['pore.coords'][:, 0] >= pn['pore.coords'][:, 0].mean()
Ts = pn.find_neighbor_throats(pores=Ps, mode='or')
geo2 = op.geometry.StickAndBall(network=pn, pores=Ps, throats=Ts)
pn['pore.foo'] = 1
# Can't create a subdict below foo
with pytest.raises(Exception):
pn['pore.foo.bar'] = 1
# Can create a subdict directly
pn['pore.baz.bar'] = 2
# Can't create a new item already used as subdict
with pytest.raises(Exception):
pn['pore.baz'] = 2
# Also works on subdomains
geo1['pore.blah'] = 1
with pytest.raises(Exception):
geo1['pore.blah.boo'] = 1
geo1['pore.bee.bop'] = 1
with pytest.raises(Exception):
geo1['pore.bee'] = 1
# Now start looking across objects
with pytest.raises(Exception):
geo1['pore.foo'] = 1 # Already exists on pn
with pytest.raises(Exception):
geo1['pore.foo.bar'] = 1 # pore.foo already exists on pn
with pytest.raises(Exception):
geo1['pore.baz'] = 1 # pore.baz.bar already exists on pn
# Now start looking across objects
geo2['pore.blah'] = 1
geo2['pore.bee.bop'] = 1
with pytest.raises(Exception):
geo1['pore.bee'] = 1
with pytest.raises(Exception):
pn['pore.bee'] = 1
with pytest.raises(Exception):
pn['pore.bee.bop'] = 1
if __name__ == '__main__':
t = SubdomainTest()
self = t
t.setup_class()
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
| 39.724638 | 79 | 0.572419 | 5,193 | 0.947282 | 0 | 0 | 0 | 0 | 0 | 0 | 714 | 0.130244 |
31253c2d60769e0b2c0da32d81b683d12aa6619e | 8,024 | py | Python | loci/io.py | SLIPO-EU/loci | 1b92b9a582c4d062b55176aad41cf305260f8f87 | [
"Apache-2.0"
]
| 3 | 2020-04-20T11:38:48.000Z | 2022-01-04T14:56:15.000Z | loci/io.py | SLIPO-EU/loci | 1b92b9a582c4d062b55176aad41cf305260f8f87 | [
"Apache-2.0"
]
| null | null | null | loci/io.py | SLIPO-EU/loci | 1b92b9a582c4d062b55176aad41cf305260f8f87 | [
"Apache-2.0"
]
| 1 | 2021-07-05T13:52:40.000Z | 2021-07-05T13:52:40.000Z | import pandas as pd
from shapely.geometry import Point
import geopandas as gpd
import math
import osmnx
import requests
from io import BytesIO
from zipfile import ZipFile
def read_poi_csv(input_file, col_id='id', col_name='name', col_lon='lon', col_lat='lat', col_kwds='kwds', col_sep=';',
kwds_sep=',', source_crs='EPSG:4326', target_crs='EPSG:4326', keep_other_cols=False):
"""Creates a POI GeoDataFrame from an input CSV file.
Args:
input_file (string): Path to the input csv file.
col_id (string): Name of the column containing the POI id (default: `id`).
col_name (string): Name of the column containing the POI name (default: `name`).
col_lon (string): Name of the column containing the POI longitude (default: `lon`).
col_lat (string): Name of the column containing the POI latitude (default: `lat`).
col_kwds (string): Name of the column containing the POI keywords (default: `kwds`).
col_sep (string): Column delimiter (default: `;`).
kwds_sep (string): Keywords delimiter (default: `,`).
source_crs (string): Coordinate Reference System of input data (default: `EPSG:4326`).
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
keep_other_cols (bool): Whether to keep the rest of the columns in the csv file (default: `False`).
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
def lon_lat_to_point(row, c_lon, c_lat):
try:
x_lon = float(row[c_lon])
y_lat = float(row[c_lat])
if math.isnan(x_lon) is False and math.isnan(y_lat) is False:
return Point(x_lon, y_lat)
else:
return float('NaN')
except:
return float('NaN')
pois = pd.read_csv(input_file, delimiter=col_sep, error_bad_lines=False)
init_poi_size = pois.index.size
columns = list(pois)
subset_cols = []
# Columns to Check for N/A, Nulls
if keep_other_cols:
subset_cols.extend(columns)
else:
subset_cols = [col_id, col_lon, col_lat]
if col_name in columns:
subset_cols.append(col_name)
if col_kwds in columns:
subset_cols.append(col_kwds)
# Geometry Column(Uncleaned)
pois['geometry'] = pois.apply(lambda row: lon_lat_to_point(row, col_lon, col_lat), axis=1)
subset_cols.append('geometry')
# Drop Columns Not in subset Columns.
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
# Drop all N/A, Null rows from DataFrame.
pois.dropna(inplace=True)
if init_poi_size - pois.index.size > 0:
print("Skipped", (init_poi_size - pois.index.size), "rows due to errors.")
if col_kwds in columns:
pois[col_kwds] = pois[col_kwds].map(lambda s: s.split(kwds_sep))
source_crs = {'init': source_crs}
target_crs = {'init': target_crs}
pois = gpd.GeoDataFrame(pois, crs=source_crs, geometry=pois['geometry']).to_crs(target_crs).drop(columns=[col_lon,
col_lat])
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def import_osmnx(bound, target_crs='EPSG:4326'):
"""Creates a POI GeoDataFrame from POIs retrieved by OSMNX (https://github.com/gboeing/osmnx).
Args:
bound (polygon): A polygon to be used as filter.
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
# retrieve pois
pois = osmnx.pois.pois_from_polygon(bound)
if len(pois.index) > 0:
# filter pois
pois = pois[pois.amenity.notnull()]
pois_filter = pois.element_type == 'node'
pois = pois[pois_filter]
# restructure gdf
subset_cols = ['osmid', 'amenity', 'name', 'geometry']
columns = list(pois)
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
pois = pois.reset_index(drop=True)
pois = pois.rename(columns={'osmid': 'id', 'amenity': 'kwds'})
pois['kwds'] = pois['kwds'].map(lambda s: [s])
if target_crs != 'EPSG:4326':
target_crs = {'init': target_crs}
pois = pois.to_crs(target_crs)
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def import_osmwrangle(osmwrangle_file, target_crs='EPSG:4326', bound=None):
"""Creates a POI GeoDataFrame from a file produced by OSMWrangle (https://github.com/SLIPO-EU/OSMWrangle).
Args:
osmwrangle_file (string): Path or URL to the input csv file.
target_crs (string): Coordinate Reference System of the GeoDataFrame to be created (default: `EPSG:4326`).
bound (polygon): A polygon to be used as filter.
Returns:
A POI GeoDataFrame with columns `id`, `name` and `kwds`.
"""
def lon_lat_to_point(row, c_lon, c_lat):
x_lon = float(row[c_lon])
y_lat = float(row[c_lat])
if math.isnan(x_lon) is False and math.isnan(y_lat) is False:
return Point(x_lon, y_lat)
else:
return float('NaN')
col_sep = '|'
col_id = 'ID'
col_lon = 'LON'
col_lat = 'LAT'
col_name = 'NAME'
col_cat = 'CATEGORY'
col_subcat = 'SUBCATEGORY'
source_crs = {'init': 'EPSG:4326'}
# Load the file
if osmwrangle_file.startswith('http') and osmwrangle_file.endswith('.zip'):
response = requests.get(osmwrangle_file)
zip_file = ZipFile(BytesIO(response.content))
with zip_file.open(zip_file.namelist()[0]) as csvfile:
pois = pd.read_csv(csvfile, delimiter=col_sep, error_bad_lines=False)
else:
pois = pd.read_csv(osmwrangle_file, delimiter=col_sep, error_bad_lines=False)
init_poi_size = pois.index.size
columns = list(pois)
subset_cols = [col_id, col_name, 'kwds', col_lon, col_lat]
# Geometry Column(Uncleaned)
pois['geometry'] = pois.apply(lambda row: lon_lat_to_point(row, col_lon, col_lat), axis=1)
subset_cols.append('geometry')
pois['kwds'] = pois[col_cat] + ',' + pois[col_subcat]
pois['kwds'] = pois['kwds'].map(lambda s: s.split(','))
# Drop Columns Not in subset Columns.
drop_columns = set(columns) - set(subset_cols)
pois.drop(drop_columns, inplace=True, axis=1)
# Drop all N/A, Null rows from DataFrame.
pois.dropna(inplace=True)
if init_poi_size - pois.index.size > 0:
print("Skipped", (init_poi_size - pois.index.size), "rows due to errors.")
pois = pois.rename(columns={col_id: 'id', col_name: 'name'})
pois = gpd.GeoDataFrame(pois, crs=source_crs, geometry=pois['geometry']).drop(columns=[col_lon, col_lat])
# Check whether location filter should be applied
if bound is not None:
spatial_filter = pois.geometry.intersects(bound)
pois = pois[spatial_filter]
if target_crs != 'EPSG:4326':
target_crs = {'init': target_crs}
pois = pois.to_crs(target_crs)
print('Loaded ' + str(len(pois.index)) + ' POIs.')
return pois
def retrieve_osm_loc(name, buffer_dist=0):
"""Retrieves a polygon from an OSM location.
Args:
name (string): Name of the location to be resolved.
buffer_dist (numeric): Buffer distance in meters.
Returns:
A polygon.
"""
geom = osmnx.core.gdf_from_place(name, buffer_dist=buffer_dist)
if len(geom.index) > 0:
geom = geom.iloc[0].geometry
else:
geom = None
return geom
def to_geojson(gdf, output_file):
"""Exports a GeoDataFrame to a GeoJSON file.
Args:
gdf (GeoDataFrame): The GeoDataFrame object to be exported.
output_file (string): Path to the output file.
"""
gdf.to_file(output_file, driver='GeoJSON')
| 35.039301 | 119 | 0.636964 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,168 | 0.394816 |
31256e5c7977352e63d64f5b9580f52cc98fe3c5 | 11,938 | py | Python | src/roles/wolf.py | timson622222/lykos | cdaae22b2f69fe907c5ac93d14c4d2d78e23eb1a | [
"BSD-2-Clause"
]
| null | null | null | src/roles/wolf.py | timson622222/lykos | cdaae22b2f69fe907c5ac93d14c4d2d78e23eb1a | [
"BSD-2-Clause"
]
| null | null | null | src/roles/wolf.py | timson622222/lykos | cdaae22b2f69fe907c5ac93d14c4d2d78e23eb1a | [
"BSD-2-Clause"
]
| null | null | null | import re
import random
from collections import defaultdict
import src.settings as var
from src.utilities import *
from src import debuglog, errlog, plog
from src.decorators import cmd, event_listener
from src.messages import messages
from src.events import Event
KILLS = {} # type: Dict[str, List[str]]
@cmd("kill", chan=False, pm=True, playing=True, phases=("night",))
def wolf_kill(cli, nick, chan, rest):
"""Kills one or more players as a wolf."""
role = get_role(nick)
# eventually cub will listen on targeted_command and block kills that way
if role not in var.WOLF_ROLES - {"wolf cub"}:
return
if nick in var.SILENCED:
pm(cli, nick, messages["silenced"])
return
if var.DISEASED_WOLVES:
pm(cli, nick, messages["ill_wolves"])
return
# eventually crow will listen on targeted_command and block kills that way
# (or more likely, that restriction will be lifted and crow can do both)
if role == "werecrow" and var.OBSERVED.get(nick):
pm(cli, nick, messages["werecrow_transformed_nokill"])
return
pieces = re.split(" +", rest)
victims = []
orig = []
num_kills = 1
if var.ANGRY_WOLVES:
num_kills = 2
i = 0
extra = 0
while i < num_kills + extra:
try:
victim = pieces[i]
except IndexError:
break
if victim.lower() == "and":
extra += 1
i += 1
victim = pieces[i]
victim = get_victim(cli, nick, victim, False)
if not victim:
return
if victim == nick:
pm(cli, nick, messages["no_suicide"])
return
if in_wolflist(nick, victim):
pm(cli, nick, messages["wolf_no_target_wolf"])
return
orig.append(victim)
evt = Event("targeted_command", {"target": victim, "misdirection": True, "exchange": True})
evt.dispatch(cli, var, "kill", nick, victim, frozenset({"detrimental"}))
if evt.prevent_default:
return
victim = evt.data["target"]
victims.append(victim)
i += 1
if len(set(victims)) < len(victims):
pm(cli, nick, messages["wolf_must_target_multiple"])
return
KILLS[nick] = victims
if len(orig) > 1:
# need to expand this eventually
msg = messages["wolf_target_multiple"].format(orig[0], orig[1])
pm(cli, nick, messages["player"].format(msg))
debuglog("{0} ({1}) KILL: {2} ({3}) and {4} ({5})".format(nick, role, victims[0], get_role(victims[0]), victims[1], get_role(victims[1])))
else:
msg = messages["wolf_target"].format(orig[0])
pm(cli, nick, messages["player"].format(msg))
if num_kills > 1:
pm(cli, nick, messages["wolf_target_second"])
debuglog("{0} ({1}) KILL: {2} ({3})".format(nick, role, victims[0], get_role(victims[0])))
if in_wolflist(nick, nick):
relay_wolfchat_command(cli, nick, messages["wolfchat"].format(nick, msg), var.WOLF_ROLES, is_wolf_command=True, is_kill_command=True)
chk_nightdone(cli)
@cmd("retract", "r", chan=False, pm=True, playing=True, phases=("night",))
def wolf_retract(cli, nick, chan, rest):
"""Removes a wolf's kill selection."""
if nick not in KILLS:
return
del KILLS[nick]
pm(cli, nick, messages["retracted_kill"])
relay_wolfchat_command(cli, nick, messages["wolfchat_retracted_kill"].format(nick), var.WOLF_ROLES, is_wolf_command=True, is_kill_command=True)
@event_listener("del_player")
def on_del_player(evt, cli, var, nick, nickrole, nicktpls, death_triggers):
for a,b in list(KILLS.items()):
for n in b:
if n == nick:
KILLS[a].remove(nick)
if a == nick or len(KILLS[a]) == 0:
del KILLS[a]
@event_listener("rename_player")
def on_rename(evt, cli, var, prefix, nick):
kvp = []
for a,b in KILLS.items():
nl = []
for n in b:
if n == prefix:
n = nick
nl.append(n)
if a == prefix:
a = nick
kvp.append((a,nl))
KILLS.update(kvp)
if prefix in KILLS:
del KILLS[prefix]
@event_listener("night_acted")
def on_acted(evt, cli, var, nick, sender):
if nick in KILLS:
evt.data["acted"] = True
@event_listener("transition_day", priority=1)
def on_transition_day(evt, cli, var):
# figure out wolf target
found = defaultdict(int)
# split off into event + wolfcub.py
num_kills = 1
if var.ANGRY_WOLVES:
num_kills = 2
for v in KILLS.values():
for p in v:
if p:
# kill target starting with ! is invalid
# right now nothing does this, but monster eventually will
if p[0] == "!":
continue
found[p] += 1
for i in range(num_kills):
maxc = 0
dups = []
for v, c in found.items():
if c > maxc:
maxc = c
dups = [v]
elif c == maxc:
dups.append(v)
if maxc and dups:
victim = random.choice(dups)
evt.data["victims"].append(victim)
evt.data["bywolves"].add(victim)
evt.data["onlybywolves"].add(victim)
# special key to let us know to randomly select a wolf in case of retribution totem
evt.data["killers"][victim].append("@wolves")
del found[victim]
# this should be moved to an event in kill, where monster prefixes their nick with !
# and fallen angel subsequently removes the ! prefix
if len(var.ROLES["fallen angel"]) == 0:
for monster in var.ROLES["monster"]:
if monster in victims:
evt.data["victims"].remove(monster)
evt.data["bywolves"].discard(monster)
evt.data["onlybywolves"].discard(monster)
@event_listener("exchange_roles")
def on_exchange(evt, cli, var, actor, nick, actor_role, nick_role):
if actor in KILLS:
del KILLS[actor]
if nick in KILLS:
del KILLS[nick]
@event_listener("chk_nightdone", priority=3)
def on_chk_nightdone(evt, cli, var):
if not var.DISEASED_WOLVES:
evt.data["actedcount"] += len(KILLS)
# eventually wolf cub will remove itself from nightroles in wolfcub.py
evt.data["nightroles"].extend(list_players(var.WOLF_ROLES - {"wolf cub"}))
@event_listener("chk_nightdone", priority=20)
def on_chk_nightdone2(evt, cli, var):
if not evt.prevent_default and not var.DISEASED_WOLVES:
# flatten KILLS
kills = set()
for ls in KILLS.values():
kills.update(ls)
# check if wolves are actually agreeing
# allow len(kills) == 0 through as that means that crow was dumb and observed instead
if not var.ANGRY_WOLVES and len(kills) > 1:
evt.data["actedcount"] -= 1
elif var.ANGRY_WOLVES and (len(kills) == 1 or len(kills) > 2):
evt.data["actedcount"] -= 1
@event_listener("transition_night_end", priority=2)
def on_transition_night_end(evt, cli, var):
ps = list_players()
wolves = list_players(var.WOLFCHAT_ROLES)
# roles in wolfchat (including those that can only listen in but not speak)
wcroles = var.WOLFCHAT_ROLES
# roles allowed to talk in wolfchat
talkroles = var.WOLFCHAT_ROLES
# condition imposed on talking in wolfchat (only during day/night, or None if talking is disabled)
wccond = ""
if var.RESTRICT_WOLFCHAT & var.RW_DISABLE_NIGHT:
if var.RESTRICT_WOLFCHAT & var.RW_DISABLE_DAY:
wccond = None
else:
wccond = " during day"
elif var.RESTRICT_WOLFCHAT & var.RW_DISABLE_DAY:
wccond = " during night"
if var.RESTRICT_WOLFCHAT & var.RW_REM_NON_WOLVES:
if var.RESTRICT_WOLFCHAT & var.RW_TRAITOR_NON_WOLF:
wcroles = var.WOLF_ROLES
talkroles = var.WOLF_ROLES
else:
wcroles = var.WOLF_ROLES | {"traitor"}
talkroles = var.WOLF_ROLES | {"traitor"}
elif var.RESTRICT_WOLFCHAT & var.RW_WOLVES_ONLY_CHAT:
if var.RESTRICT_WOLFCHAT & var.RW_TRAITOR_NON_WOLF:
talkroles = var.WOLF_ROLES
else:
talkroles = var.WOLF_ROLES | {"traitor"}
for wolf in wolves:
# should make the cursed information an event that cursedvillager can then add to
# (e.g. an event to change what prefixes are sent with the role message, and a
# 2nd event to change information in parens in player list)
normal_notify = wolf in var.PLAYERS and not is_user_simple(wolf)
role = get_role(wolf)
cursed = "cursed " if wolf in var.ROLES["cursed villager"] and role in wcroles else ""
if normal_notify:
msg = "{0}_notify".format(role.replace(" ", "_"))
cmsg = "cursed_" + msg
try:
if cursed:
try:
pm(cli, wolf, messages[cmsg])
except KeyError:
pm(cli, wolf, messages[msg].format(cursed))
else:
pm(cli, wolf, messages[msg].format(cursed))
except KeyError:
# catchall in case we forgot something above
an = 'n' if role.startswith(("a", "e", "i", "o", "u")) else ""
pm(cli, wolf, messages["undefined_role_notify"].format(an, role))
if len(wolves) > 1 and wccond is not None and role in talkroles:
pm(cli, wolf, messages["wolfchat_notify"].format(wccond))
else:
an = "n" if cursed == "" and role.startswith(("a", "e", "i", "o", "u")) else ""
pm(cli, wolf, messages["wolf_simple"].format(an, cursed, role)) # !simple
pl = ps[:]
random.shuffle(pl)
pl.remove(wolf) # remove self from list
if role in wcroles:
for i, player in enumerate(pl):
prole = get_role(player)
if prole in wcroles:
cursed = ""
if player in var.ROLES["cursed villager"]:
cursed = "cursed "
pl[i] = "\u0002{0}\u0002 ({1}{2})".format(player, cursed, prole)
elif player in var.ROLES["cursed villager"]:
pl[i] = player + " (cursed)"
elif role == "warlock":
for i, player in enumerate(pl):
if player in var.ROLES["cursed villager"]:
pl[i] = player + " (cursed)"
pm(cli, wolf, "Players: " + ", ".join(pl))
if role in var.WOLF_ROLES - {"wolf cub"} and var.DISEASED_WOLVES:
pm(cli, wolf, messages["ill_wolves"])
# TODO: split the following out into their own files (mystic, cub and alpha)
if role == "wolf mystic":
# if adding this info to !myrole, you will need to save off this count so that they can't get updated info until the next night
# # of special villagers = # of players - # of villagers - # of wolves - # of neutrals
numvills = len(ps) - len(list_players(var.WOLFTEAM_ROLES)) - len(list_players(("villager", "vengeful ghost", "time lord", "amnesiac", "lycan"))) - len(list_players(var.TRUE_NEUTRAL_ROLES))
pm(cli, wolf, messages["wolf_mystic_info"].format("are" if numvills != 1 else "is", numvills, "s" if numvills != 1 else ""))
if not var.DISEASED_WOLVES and var.ANGRY_WOLVES and role in var.WOLF_ROLES - {"wolf cub"}:
pm(cli, wolf, messages["angry_wolves"])
if var.ALPHA_ENABLED and role == "alpha wolf" and wolf not in var.ALPHA_WOLVES:
pm(cli, wolf, messages["wolf_bite"])
@event_listener("begin_day")
def on_begin_day(evt, cli, var):
KILLS.clear()
@event_listener("reset")
def on_reset(evt, var):
KILLS.clear()
# vim: set sw=4 expandtab:
| 39.269737 | 200 | 0.590467 | 0 | 0 | 0 | 0 | 11,580 | 0.970012 | 0 | 0 | 3,026 | 0.253476 |
31268631bfa9305773df79c3ef1137b982360dd1 | 2,878 | py | Python | BOG.py | punyajoy/biosbias | bedca0b8605e3e99d2a2b56c78a5b98c9839a77b | [
"MIT"
]
| null | null | null | BOG.py | punyajoy/biosbias | bedca0b8605e3e99d2a2b56c78a5b98c9839a77b | [
"MIT"
]
| null | null | null | BOG.py | punyajoy/biosbias | bedca0b8605e3e99d2a2b56c78a5b98c9839a77b | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 28 13:52:20 2020
@author: midas
"""
import os
import glob
import pandas as pd
import numpy as np
all_filenames=['Data/Train.csv', 'Data/Test.csv']
combined_csv = pd.concat([pd.read_csv(f) for f in all_filenames ])
combined_csv.to_csv( "combined_csv.csv", index=False, encoding='utf-8-sig')
from tqdm import tqdm_notebook,tqdm
from sklearn import preprocessing
train_data=pd.read_csv("Data/Train.csv")
test_data=pd.read_csv("Data/Test.csv")
train_wo_g=[]
train_w_g=[]
test_wo_g=[]
test_w_g=[]
combined_csv
for index,row in tqdm(combined_csv.iterrows()):
try:
index_to_start=int(row['start_pos'])
except:
continue
tuple1= [row['raw'][index_to_start:],row['title'],row['gender']]
tuple2= [row['bio'][index_to_start:],row['title'],row['gender']]
train_w_g.append(tuple1)
train_wo_g.append(tuple2)
TrainTestWithGen = pd.DataFrame(train_w_g, columns =['Text', 'title', 'gender'])
TrainTestWithoutGen= pd.DataFrame(train_wo_g, columns =['Text', 'title', 'gender'])
# Cleaning the texts
import re
import nltk
nltk.download('stopwords')
from nltk.corpus import stopwords
from nltk.stem.porter import PorterStemmer
corpus = []
for i in range(0, 74595):
review = re.sub('[^a-zA-Z]', ' ', TrainTestWithGen['Text'][i])
review = review.lower()
review = review.split()
ps = PorterStemmer()
review = [ps.stem(word) for word in review if not word in set(stopwords.words('english'))]
review = ' '.join(review)
corpus.append(review)
# Creating the Bag of Words model
from sklearn.feature_extraction.text import CountVectorizer
cv = CountVectorizer(max_features = 30000)
X = cv.fit_transform(corpus).toarray()
X_all=pd.DataFrame(X)
X_all['title']=TrainTestWithGen['title']
X_all['gender']=TrainTestWithGen['gender']
X_Train=X_all[:53754]
X_Test=X_all[53754:]
X_Train.to_csv('Train_With_Gen.csv')
X_Test.to_csv('Test_With_Gen.csv')
#Without Gender
corpus2 = []
for i in range(0, len(TrainTestWithGen)):
review = re.sub('[^a-zA-Z]', ' ', TrainTestWithGen['Text'][i])
review = review.lower()
review = review.split()
ps = PorterStemmer()
review = [ps.stem(word) for word in review if not word in set(stopwords.words('english'))]
review = ' '.join(review)
corpus2.append(review)
# Creating the Bag of Words model
from sklearn.feature_extraction.text import CountVectorizer
cv2 = CountVectorizer(max_features = 30000)
X2 = cv2.fit_transform(corpus2).toarray()
X_all2=pd.DataFrame(X2)
X_all2['title']=TrainTestWithoutGen['title']
X_all2['gender']=TrainTestWithoutGen['gender']
X_Train2=X_all2[:53754]
X_Test2=X_all2[53754:]
X_Train2.to_csv('Train_WithOut_Gen.csv')
X_Test2.to_csv('Test_WithOut_Gen.csv')
| 24.184874 | 95 | 0.683113 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 595 | 0.206741 |
31274ba4d0ae0b5f6147828e52210073924bc1c5 | 12,388 | py | Python | src/ploomber/jupyter/manager.py | idomic/ploomber | 89b5e544b0540cf2cbb6bcd09946537198115d17 | [
"Apache-2.0"
]
| null | null | null | src/ploomber/jupyter/manager.py | idomic/ploomber | 89b5e544b0540cf2cbb6bcd09946537198115d17 | [
"Apache-2.0"
]
| null | null | null | src/ploomber/jupyter/manager.py | idomic/ploomber | 89b5e544b0540cf2cbb6bcd09946537198115d17 | [
"Apache-2.0"
]
| null | null | null | """
Module for the jupyter extension
"""
import sys
import datetime
import os
import contextlib
from pprint import pprint
from pathlib import Path
from jupytext.contentsmanager import TextFileContentsManager
from ploomber.sources.notebooksource import (_cleanup_rendered_nb, inject_cell)
from ploomber.spec.dagspec import DAGSpec
from ploomber.exceptions import DAGSpecInitializationError
from ploomber.cli import parsers
from ploomber.jupyter.dag import JupyterDAGManager
@contextlib.contextmanager
def chdir(directory):
old_dir = os.getcwd()
try:
os.chdir(str(directory))
yield
finally:
os.chdir(old_dir)
def resolve_path(parent, path):
"""
Functions functions resolves paths to make the {source} -> {task} mapping
work even then `jupyter notebook` is initialized from a subdirectory
of pipeline.yaml
"""
try:
# FIXME: remove :linenumber
return Path(parent,
path).relative_to(Path('.').resolve()).as_posix().strip()
except ValueError:
return None
def check_metadata_filter(log, model):
try:
cell_metadata_filter = (
model['content']['metadata']['jupytext']['cell_metadata_filter'])
except Exception:
cell_metadata_filter = None
if cell_metadata_filter == '-all':
log.warning('Your notebook has filter that strips out '
'cell metadata when saving it from the Jupyter notebook '
'app. This will prevent you from tagging your '
'"parameters" cell. It is possible that this comes '
'from jupytext defaults, either add the tag by '
'editing the notebook in a text editor or enable '
'metadata in the Jupyter app: File -> Jupytext -> '
'Include metadata')
class PloomberContentsManager(TextFileContentsManager):
"""
Ploomber content manager subclasses jupytext TextFileContentsManager
to keep jupytext features of opening .py files as notebooks but adds
a feature that automatically injects parameters in notebooks if they
are part of a pipeline defined in pipeline.yaml, these injected parameters
are deleted before saving the file
"""
restart_msg = (' Fix the issue and and restart "jupyter notebook"')
def load_dag(self, starting_dir=None):
if self.dag is None or self.spec['meta']['jupyter_hot_reload']:
self.log.info('[Ploomber] Loading dag...')
msg = ('[Ploomber] An error occured when trying to initialize '
'the pipeline. Cells won\' be injected until your '
'pipeline processes correctly. See error details below.')
if self.spec and not self.spec['meta']['jupyter_hot_reload']:
msg += self.restart_msg
env_var = os.environ.get('ENTRY_POINT')
try:
if env_var:
(self.spec, self.dag,
self.path) = parsers.load_entry_point(env_var)
else:
hot_reload = (self.spec
and self.spec['meta']['jupyter_hot_reload'])
(self.spec, self.dag,
self.path) = DAGSpec._auto_load(starting_dir=starting_dir,
reload=hot_reload)
except DAGSpecInitializationError:
self.reset_dag()
self.log.exception(msg)
else:
if self.dag is not None:
current = os.getcwd()
if self.spec['meta'][
'jupyter_hot_reload'] and current not in sys.path:
# jupyter does not add the current working dir by
# default, if using hot reload and the dag loads
# functions from local files, importlib.reload will
# fail
# NOTE: might be better to only add this when the dag
# is actually loading from local files but that means
# we have to run some logic and increases load_dag
# running time, which we need to be fast
sys.path.append(current)
base_path = Path(self.path).resolve()
with chdir(base_path):
# this dag object won't be executed, forcing speeds
# rendering up
self.dag.render(force=True)
if self.spec['meta']['jupyter_functions_as_notebooks']:
self.manager = JupyterDAGManager(self.dag)
else:
self.manager = None
tuples = [(resolve_path(base_path, t.source.loc), t)
for t in self.dag.values()
if t.source.loc is not None]
self.dag_mapping = {
t[0]: t[1]
for t in tuples if t[0] is not None
}
self.log.info('[Ploomber] Initialized dag from '
'pipeline.yaml at'
': {}'.format(base_path))
self.log.info('[Ploomber] Pipeline mapping: {}'.format(
pprint(self.dag_mapping)))
else:
# no pipeline.yaml found...
self.log.info('[Ploomber] No pipeline.yaml found, '
'skipping DAG initialization...')
self.dag_mapping = None
def reset_dag(self):
self.spec = None
self.dag = None
self.path = None
self.dag_mapping = None
self.manager = None
def __init__(self, *args, **kwargs):
"""
Initialize the content manger, look for a pipeline.yaml file in the
current directory, if there is one, load it, if there isn't one
don't do anything
"""
self.reset_dag()
# try to automatically locate the dag spec
self.load_dag()
return super(PloomberContentsManager, self).__init__(*args, **kwargs)
def get(self, path, content=True, type=None, format=None):
"""
This is called when a file/directory is requested (even in the list
view)
"""
# FIXME: reloading inside a (functions) folder causes 404
if content:
self.load_dag()
if self.manager and path in self.manager:
return self.manager.get(path, content)
model = super(PloomberContentsManager, self).get(path=path,
content=content,
type=type,
format=format)
# user requested directory listing, check if there are task functions
# defined here
if model['type'] == 'directory' and self.manager:
if model['content']:
model['content'].extend(self.manager.get_by_parent(path))
check_metadata_filter(self.log, model)
# if opening a file (ignore file listing), load dag again
if (model['content'] and model['type'] == 'notebook'):
# Look for the pipeline.yaml file from the file we are rendering
# and search recursively. This is required to cover the case when
# pipeline.yaml is in a subdirectory from the folder where the
# user executed "jupyter notebook"
# FIXME: we actually don't need to reload the dag again, we just
# have to rebuild the mapping to make _model_in_dag work
self.load_dag(starting_dir=Path(os.getcwd(), model['path']).parent)
if self._model_in_dag(model):
self.log.info('[Ploomber] Injecting cell...')
inject_cell(model=model,
params=self.dag_mapping[model['path']]._params)
return model
def save(self, model, path=""):
"""
This is called when a file is saved
"""
if self.manager and path in self.manager:
out = self.manager.overwrite(model, path)
return out
else:
check_metadata_filter(self.log, model)
# not sure what's the difference between model['path'] and path
# but path has leading "/", _model_in_dag strips it
key = self._model_in_dag(model, path)
if key:
self.log.info(
'[Ploomber] Cleaning up injected cell in {}...'.format(
model.get('name') or ''))
model['content'] = _cleanup_rendered_nb(model['content'])
self.log.info("[Ploomber] Deleting product's metadata...")
self.dag_mapping[key].product.metadata.delete()
return super(PloomberContentsManager, self).save(model, path)
def _model_in_dag(self, model, path=None):
"""Determine if the model is part of the pipeline
"""
model_in_dag = False
if path is None:
path = model['path']
else:
path = path.strip('/')
if self.dag:
if ('content' in model and model['type'] == 'notebook'):
if path in self.dag_mapping:
# NOTE: not sure why sometimes the model comes with a
# name and sometimes it doesn't
self.log.info(
'[Ploomber] {} is part of the pipeline... '.format(
model.get('name') or ''))
model_in_dag = True
else:
self.log.info('[Ploomber] {} is not part of the pipeline, '
'skipping...'.format(
model.get('name') or ''))
return path if model_in_dag else False
def list_checkpoints(self, path):
if not self.manager or path not in self.manager:
return self.checkpoints.list_checkpoints(path)
def create_checkpoint(self, path):
if not self.manager or path not in self.manager:
return self.checkpoints.create_checkpoint(self, path)
else:
return {
'id': 'checkpoint',
'last_modified': datetime.datetime.now()
}
def _load_jupyter_server_extension(app):
"""
This function is called to configure the new content manager, there are a
lot of quirks that jupytext maintainers had to solve to make it work so
we base our implementation on theirs:
https://github.com/mwouts/jupytext/blob/bc1b15935e096c280b6630f45e65c331f04f7d9c/jupytext/__init__.py#L19
"""
if isinstance(app.contents_manager_class, PloomberContentsManager):
app.log.info("[Ploomber] NotebookApp.contents_manager_class "
"is a subclass of PloomberContentsManager already - OK")
return
# The server extension call is too late!
# The contents manager was set at NotebookApp.init_configurables
# Let's change the contents manager class
app.log.info('[Ploomber] setting content manager '
'to PloomberContentsManager')
app.contents_manager_class = PloomberContentsManager
try:
# And re-run selected init steps from:
# https://github.com/jupyter/notebook/blob/
# 132f27306522b32fa667a6b208034cb7a04025c9/notebook/notebookapp.py#L1634-L1638
app.contents_manager = app.contents_manager_class(parent=app,
log=app.log)
app.session_manager.contents_manager = app.contents_manager
app.web_app.settings["contents_manager"] = app.contents_manager
except Exception:
error = """[Ploomber] An error occured. Please
deactivate the server extension with "jupyter serverextension disable ploomber"
and configure the contents manager manually by adding
c.NotebookApp.contents_manager_class = "ploomber.jupyter.PloomberContentsManager"
to your .jupyter/jupyter_notebook_config.py file.
""" # noqa
app.log.error(error)
raise
| 40.220779 | 109 | 0.56813 | 8,733 | 0.704956 | 142 | 0.011463 | 169 | 0.013642 | 0 | 0 | 4,742 | 0.38279 |
3129453a0038e24bbee80e8d29bce23d328268df | 4,114 | py | Python | quake_reporter/quake_datafeed.py | shandozer/quake_reporter | 4e1eed5180b2f7dc3662b61ef32ef0b69c0fae01 | [
"MIT"
]
| null | null | null | quake_reporter/quake_datafeed.py | shandozer/quake_reporter | 4e1eed5180b2f7dc3662b61ef32ef0b69c0fae01 | [
"MIT"
]
| null | null | null | quake_reporter/quake_datafeed.py | shandozer/quake_reporter | 4e1eed5180b2f7dc3662b61ef32ef0b69c0fae01 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
"""
__author__ = Shannon T. Buckley, 10/8/16
Python 2.7.x
"""
import json
import urllib2
import datetime
import argparse
VERSION = '0.2.1'
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--magnitude', action="store", type=float,
help='Please enter minimum magnitude desired: 1.0, 2.5, or 4.5', default=2.5)
parser.add_argument('-t', '--timeframe', action="store", choices=['hour', 'day', 'week', 'month'],
help='Collect data over the last hour, day, week, or month.')
parser.add_argument('-s', '--savejson', action="store_true",
help='Use this flag to save output to a .json')
return parser
def get_data_from_api(url):
page = urllib2.urlopen(url)
data = page.read()
return data
def save_json_data(data, req_details):
with open('quake_request_{}_{:%Y_%m_%d_%H:%M}.json'.format(req_details, datetime.datetime.now()), 'wb') as f:
json.dump(data, f)
def print_results(data, magnitude):
json_data = json.loads(data)
if 'title' in json_data['metadata']:
print json_data['metadata']['title']
count = json_data['metadata']['count']
print '\n--> {} events found in the {}\n'.format(str(count), json_data['metadata']['title'].split(', ')[1])
tsunami_quakes = [quake for quake in json_data['features'] if quake['properties']['tsunami'] == 1]
tsunami_count = len(tsunami_quakes)
if tsunami_count > 0:
print "\t{} of these caused TSUNAMI\n".format(tsunami_count)
sorted_json = sorted(json_data['features'], key=lambda k: k['properties'].get('time', 0), reverse=True)
for i in sorted_json:
print '*' * 18 + '\n'
if i['properties']['time']:
local_quake_time = i['properties']['time']
quake_date = datetime.datetime(1970, 1, 1) + datetime.timedelta(milliseconds=local_quake_time)
print 'Date of Quake: {}'.format(quake_date.strftime('%m-%d-%Y %H:%M:%S'))
time_since_quake = datetime.timedelta() - datetime.timedelta(days=-quake_date.day,
hours=quake_date.hour,
minutes=quake_date.minute,
seconds=quake_date.second)
if i['properties']['tsunami'] == 1:
print "\n\t_/*~~~ TSUNAMI CREATED! ~~~*\_\n"
if i['properties']['mag']:
print '%2.1f' % i['properties']['mag'] + ',', i['properties']['place'], '\n'
print 'Depth: ' + str(i['geometry']['coordinates'][2]) + 'km'
print '*' * 20
def main():
parser = get_parser()
args = parser.parse_args()
intro_statement = '\n\nSearching for Global Earthquake Events'
if args.timeframe:
t = args.timeframe
intro_statement += ' within the last {}...'.format(t)
else:
intro_statement += ' (No timespan selected, using default: 1 week)'
t = 'week'
print intro_statement
if args.magnitude:
mag = args.magnitude
print '\nMagnitude requested: {}'.format(mag)
if mag >= 4.5:
mag = 4.5
elif mag > 2.5:
mag = 2.5
else:
mag = 1.0 # anything less than 2.5 gets the 1.0+ range
else:
print '\nNo Magnitude requested, using default... (2.5+)'
mag = 2.5 # a medium sized default
# Now grab your data
api_url = 'http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/{}_{}.geojson'.format(mag, t)
try:
data = get_data_from_api(api_url)
except urllib2.URLError:
print '\nUH OH! We were unable to extract any data! \n\n\t-->Check your Internet/WiFi Access? '
exit(1)
if data and args.savejson:
request_params = '{}mag-1{}'.format(mag, t)
save_json_data(data, request_params)
elif data:
print_results(data, mag)
if __name__ == '__main__':
main()
| 25.395062 | 113 | 0.564657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,304 | 0.316966 |
312a1142332c9f2c1bc137f8c71e0ab19ca1463f | 2,792 | py | Python | tests/visualization/data_sources/test_satellite.py | openclimatefix/nowcasting_utils | 7a45e9d24ce29693d96fd9c75a34ca1d205b64bc | [
"MIT"
]
| 2 | 2021-12-20T15:56:40.000Z | 2022-02-09T07:31:27.000Z | tests/visualization/data_sources/test_satellite.py | openclimatefix/nowcasting_utils | 7a45e9d24ce29693d96fd9c75a34ca1d205b64bc | [
"MIT"
]
| 37 | 2021-09-08T14:36:00.000Z | 2022-02-16T20:05:24.000Z | tests/visualization/data_sources/test_satellite.py | openclimatefix/nowcasting_utils | 7a45e9d24ce29693d96fd9c75a34ca1d205b64bc | [
"MIT"
]
| null | null | null | """ Tests to plot satellite data """
import os
import plotly.graph_objects as go
from nowcasting_dataset.data_sources.fake.batch import satellite_fake
from nowcasting_dataset.geospatial import osgb_to_lat_lon
from nowcasting_utils.visualization.data_sources.plot_satellite import (
make_animation_all_channels,
make_animation_one_channels,
make_traces_one_channel,
make_traces_one_channel_one_time,
)
from nowcasting_utils.visualization.utils import make_buttons
def test_make_traces_one_channel_one_time(configuration):
"""Test 'make_traces_one_channel_one_time' functions"""
satellite = satellite_fake(configuration=configuration)
example_index = 1
trace = make_traces_one_channel_one_time(
satellite=satellite, example_index=example_index, channel_index=0, time_index=1
)
fig = go.Figure(trace)
x = satellite.x[example_index].mean()
y = satellite.y[example_index].mean()
lat, lon = osgb_to_lat_lon(x=x, y=y)
fig.update_layout(
mapbox_style="carto-positron", mapbox_zoom=7, mapbox_center={"lat": lat, "lon": lon}
)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
def test_make_traces_one_channel(configuration):
"""Test 'make_traces_one_channel' functions"""
satellite = satellite_fake(configuration=configuration)
example_index = 1
traces = make_traces_one_channel(
satellite=satellite, example_index=example_index, channel_index=0
)
x = satellite.x[example_index].mean()
y = satellite.y[example_index].mean()
lat, lon = osgb_to_lat_lon(x=x, y=y)
frames = []
for i, trace in enumerate(traces[1:]):
frames.append(go.Frame(data=trace, name=f"frame{i+1}"))
fig = go.Figure(
data=traces[0],
layout=go.Layout(
title="Start Title",
),
frames=frames,
)
fig.update_layout(updatemenus=[make_buttons()])
fig.update_layout(
mapbox_style="carto-positron", mapbox_zoom=7, mapbox_center={"lat": lat, "lon": lon}
)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
def test_make_animation_one_channels(configuration):
"""Test 'make_animation_one_channels' functions"""
satellite = satellite_fake(configuration=configuration)
fig = make_animation_one_channels(satellite=satellite, example_index=1, channel_index=0)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
def test_make_animation_all_channesl(configuration):
"""Test 'make_animation_all_channels' functions"""
satellite = satellite_fake(configuration=configuration)
fig = make_animation_all_channels(satellite=satellite, example_index=0)
if "CI" not in os.environ.keys():
fig.show(renderer="browser")
| 29.389474 | 92 | 0.716691 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 367 | 0.131447 |
312a2c813f2b31f42294655c9412690a04165f37 | 200 | py | Python | owtf/__main__.py | Udbhavbisarya23/owtf | 27623937677caf975569f8de8af7983ca57611bc | [
"BSD-3-Clause"
]
| 1,514 | 2015-01-15T18:42:58.000Z | 2022-03-25T08:14:40.000Z | owtf/__main__.py | justdvnsh/owtf | 3a543b4eb2a7ad67155eb96dd2d99efbc181498d | [
"BSD-3-Clause"
]
| 652 | 2015-01-09T18:27:37.000Z | 2022-03-21T18:41:01.000Z | owtf/__main__.py | justdvnsh/owtf | 3a543b4eb2a7ad67155eb96dd2d99efbc181498d | [
"BSD-3-Clause"
]
| 506 | 2015-01-02T09:28:47.000Z | 2022-03-10T23:27:27.000Z | """
owtf.__main__
~~~~~~~~~~~~~
A __main__ method for OWTF so that internal services can be called as Python modules.
"""
import sys
from owtf.core import main
if __name__ == "__main__":
main()
| 16.666667 | 85 | 0.675 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 131 | 0.655 |
312a37dbf8253fa5df799a76f2660e8811afe2b8 | 1,823 | py | Python | sympyosis/logger.py | ZechCodes/sympyosis | 0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1 | [
"MIT"
]
| null | null | null | sympyosis/logger.py | ZechCodes/sympyosis | 0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1 | [
"MIT"
]
| null | null | null | sympyosis/logger.py | ZechCodes/sympyosis | 0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1 | [
"MIT"
]
| null | null | null | from enum import IntEnum
from typing import Type, TypeVar
import logging
T = TypeVar("T")
class LogLevel(IntEnum):
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
@classmethod
def get(cls: Type[T], name: str) -> T:
return getattr(cls, name.upper())
class Logger:
def __init__(
self, name: str, level: LogLevel, *, parent: logging.Logger | None = None
):
self._name = name
self._level = level
self._parent = parent
if parent:
self._logger = self._parent.getChild(self._name)
else:
self._logger = logging.getLogger(name)
self.set_level(self._level)
def log(self, message: str, level: LogLevel, *args, **kwargs):
self._logger.log(level, message, *args, **kwargs)
def debug(self, message: str, *args, **kwargs):
self.log(message, LogLevel.DEBUG, *args, **kwargs)
def info(self, message: str, *args, **kwargs):
self.log(message, LogLevel.INFO, *args, **kwargs)
def warning(self, message: str, *args, **kwargs):
self.log(message, LogLevel.WARNING, *args, **kwargs)
def error(self, message: str, *args, **kwargs):
self.log(message, LogLevel.ERROR, *args, **kwargs)
def critical(self, message: str, *args, **kwargs):
self.log(message, LogLevel.CRITICAL, *args, **kwargs)
def set_level(self, level: LogLevel):
self._level = level
self._logger.setLevel(level)
def create_child_logger(self, name: str, level: LogLevel | None = None):
return Logger(name, self._level, parent=level or self._logger)
@staticmethod
def initialize_loggers(level: LogLevel = LogLevel.ERROR):
logging.basicConfig(level=level)
| 28.484375 | 81 | 0.633571 | 1,725 | 0.946242 | 0 | 0 | 213 | 0.11684 | 0 | 0 | 3 | 0.001646 |
312a5215e0e355ad2b4d5e01dca1809280fd23f6 | 647 | py | Python | peframe/modules/apialert.py | ki1556ki/MJUOpenSource | 4087db825bbc7c460f8275428703e5c7066a84ae | [
"MIT"
]
| null | null | null | peframe/modules/apialert.py | ki1556ki/MJUOpenSource | 4087db825bbc7c460f8275428703e5c7066a84ae | [
"MIT"
]
| null | null | null | peframe/modules/apialert.py | ki1556ki/MJUOpenSource | 4087db825bbc7c460f8275428703e5c7066a84ae | [
"MIT"
]
| 1 | 2020-07-14T03:39:06.000Z | 2020-07-14T03:39:06.000Z | # -*- coding: utf-8 -*-
# json 형식 사용을 위한 임폴트
import json
# get함수, 각각의 반복문을 통해 apialert_found안에 문자열 삽입후 리스트형식으로 정렬하여 리턴값 반환.
def get(pe, strings_match):
alerts = strings_match['apialert']
apialert_found = []
# pe에 DIRECTORY_ENTRY_IMPORT라는 변수가 있는지 확인하여 있으면 참 없으면 거짓.
if hasattr(pe, 'DIRECTORY_ENTRY_IMPORT'):
for lib in pe.DIRECTORY_ENTRY_IMPORT:
for imp in lib.imports:
for alert in alerts:
if alert: # remove 'null'
# imp.name의 문자열안에 alert의 문자열이 있을경우 apialert_found안의 맨뒤에 imp.name을 넣음
if str(imp.name).startswith(alert):
apialert_found.append(imp.name)
return sorted(set(apialert_found))
| 32.35 | 75 | 0.693972 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 469 | 0.56711 |
312bfac4cf2875d133c13b3a00e0ae85f3c76c44 | 2,084 | py | Python | tests/conftest.py | Ninjagod1251/ape | 9b40ef15f25362ddb83cb6d571d60cab041fce4a | [
"Apache-2.0"
]
| null | null | null | tests/conftest.py | Ninjagod1251/ape | 9b40ef15f25362ddb83cb6d571d60cab041fce4a | [
"Apache-2.0"
]
| null | null | null | tests/conftest.py | Ninjagod1251/ape | 9b40ef15f25362ddb83cb6d571d60cab041fce4a | [
"Apache-2.0"
]
| null | null | null | import shutil
from pathlib import Path
from tempfile import mkdtemp
import pytest
from click.testing import CliRunner
import ape
# NOTE: Ensure that we don't use local paths for these
ape.config.DATA_FOLDER = Path(mkdtemp()).resolve()
ape.config.PROJECT_FOLDER = Path(mkdtemp()).resolve()
@pytest.fixture(scope="session")
def config():
yield ape.config
@pytest.fixture(scope="session")
def data_folder(config):
yield config.DATA_FOLDER
@pytest.fixture(scope="session")
def plugin_manager():
yield ape.networks.plugin_manager
@pytest.fixture(scope="session")
def accounts():
yield ape.accounts
@pytest.fixture(scope="session")
def compilers():
yield ape.compilers
@pytest.fixture(scope="session")
def networks():
yield ape.networks
@pytest.fixture(scope="session")
def chain():
yield ape.chain
@pytest.fixture(scope="session")
def project_folder(config):
yield config.PROJECT_FOLDER
@pytest.fixture(scope="session")
def project(config):
yield ape.Project(config.PROJECT_FOLDER)
@pytest.fixture
def keyparams():
# NOTE: password is 'a'
return {
"address": "7e5f4552091a69125d5dfcb7b8c2659029395bdf",
"crypto": {
"cipher": "aes-128-ctr",
"cipherparams": {"iv": "7bc492fb5dca4fe80fd47645b2aad0ff"},
"ciphertext": "43beb65018a35c31494f642ec535315897634b021d7ec5bb8e0e2172387e2812",
"kdf": "scrypt",
"kdfparams": {
"dklen": 32,
"n": 262144,
"r": 1,
"p": 8,
"salt": "4b127cb5ddbc0b3bd0cc0d2ef9a89bec",
},
"mac": "6a1d520975a031e11fc16cff610f5ae7476bcae4f2f598bc59ccffeae33b1caa",
},
"id": "ee424db9-da20-405d-bd75-e609d3e2b4ad",
"version": 3,
}
@pytest.fixture
def temp_accounts_path(config):
path = Path(config.DATA_FOLDER) / "accounts"
path.mkdir(exist_ok=True, parents=True)
yield path
if path.exists():
shutil.rmtree(path)
@pytest.fixture
def runner(project):
yield CliRunner()
| 21.265306 | 93 | 0.65739 | 0 | 0 | 651 | 0.31238 | 1,756 | 0.84261 | 0 | 0 | 580 | 0.278311 |
312c17d992442c57e3032d03093f0ff6832854f9 | 1,053 | py | Python | recipes/Python/576543_Prime_Number_Generator_Checker/recipe-576543.py | tdiprima/code | 61a74f5f93da087d27c70b2efe779ac6bd2a3b4f | [
"MIT"
]
| 2,023 | 2017-07-29T09:34:46.000Z | 2022-03-24T08:00:45.000Z | recipes/Python/576543_Prime_Number_Generator_Checker/recipe-576543.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
]
| 32 | 2017-09-02T17:20:08.000Z | 2022-02-11T17:49:37.000Z | recipes/Python/576543_Prime_Number_Generator_Checker/recipe-576543.py | unhacker/code | 73b09edc1b9850c557a79296655f140ce5e853db | [
"MIT"
]
| 780 | 2017-07-28T19:23:28.000Z | 2022-03-25T20:39:41.000Z | #
# prime number generator
# This program gets two number as input
# and prints
# Prime numbers in the range
# Actual number of primes in the range
# and Estimation based on formula
# n
# pi(n)= -------
# log(n)
# pi(n)=number of primes less than n
#
from math import *
def isPrime(n):
if n%2==0 and n!=2:return False #if number is EVEN AND it is NOT 2
k = n**0.5 ; m = ceil(k) #if number is PERFECT SQUARE
if k==m:return False
for i in xrange(3,int(m),2): #divisibility test ODDS ONLY
if n%i==0:return False
return True #otherwise it is PRIME
if __name__=='__main__':
s = input('Enter Start: ')
e = input('Enter End: ')
s|=1 #if s%2==0:s+=1 # ODDS only
list = [x for x in range(s,e,2) if isPrime(x)]
print list,'\n',len(list),'\n',int(ceil(e/log(e)-s/log(s)))
#prints list of primes , length of list , estimate using the formula
| 30.970588 | 73 | 0.531814 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 568 | 0.539411 |
312cb34d34abecdfef42214150394d17f2b7b90e | 118 | py | Python | Practica 1 E4.py | pardo13/python | 3d15c9a0414a240588da4d24184f63370b736d55 | [
"MIT"
]
| null | null | null | Practica 1 E4.py | pardo13/python | 3d15c9a0414a240588da4d24184f63370b736d55 | [
"MIT"
]
| null | null | null | Practica 1 E4.py | pardo13/python | 3d15c9a0414a240588da4d24184f63370b736d55 | [
"MIT"
]
| null | null | null | A=int(input("dame int"))
B=int(input("dame int"))
if(A>B):
print("A es mayor")
else:
print("B es mayor")
| 14.75 | 24 | 0.559322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.372881 |
312d87f76ce3d3d748ca0ec5f49e7654fc2b52fb | 11,520 | py | Python | selenium_driver_updater/_phantomJS.py | Svinokur/selenium_driver_updater | f773d5c4dc064e512c92cf58016843007ba3e6e3 | [
"MIT"
]
| 8 | 2021-04-18T17:50:00.000Z | 2022-01-01T18:01:34.000Z | selenium_driver_updater/_phantomJS.py | Svinokur/selenium_driver_updater | f773d5c4dc064e512c92cf58016843007ba3e6e3 | [
"MIT"
]
| 1 | 2021-11-02T22:05:36.000Z | 2021-11-03T07:58:00.000Z | selenium_driver_updater/_phantomJS.py | Svinokur/selenium_driver_updater | f773d5c4dc064e512c92cf58016843007ba3e6e3 | [
"MIT"
]
| 1 | 2021-08-04T12:45:30.000Z | 2021-08-04T12:45:30.000Z | #pylint: disable=logging-fstring-interpolation
#Standart library imports
import shutil
import os
import time
from typing import Tuple
from pathlib import Path
import re
from shutil import copyfile
import wget
# Local imports
from selenium_driver_updater.util.logger import logger
from selenium_driver_updater.util.exceptions import DriverVersionInvalidException
from selenium_driver_updater.driver_base import DriverBase
class PhantomJS(DriverBase):
"Class for working with Selenium phantomjs binary"
_repo_name = 'ariya/phantomjs'
_tmp_folder_path = 'tmp'
def __init__(self, **kwargs):
kwargs.update(repo_name=PhantomJS._repo_name)
DriverBase.__init__(self, **kwargs)
self.system_name = ''
#assign of specific os
specific_system = str(kwargs.get('system_name', ''))
specific_system = specific_system.replace('linux64', 'linux-x86_64')
specific_system = specific_system.replace('linux32', 'linux-i686').replace('macos', 'macosx')
if specific_system:
self.system_name = "phantomjs-{}-" + f"{specific_system}"
if 'win' in specific_system:
self.system_name = "phantomjs-{}-windows"
if 'linux' in specific_system:
self.system_name = self.system_name + '.tar.bz2'
else:
self.system_name = self.system_name + '.zip'
self.phantomjs_path = self.driver_path
def _get_latest_version_phantomjs(self) -> str:
"""Gets latest phantomjs version
Returns:
str
latest_version (str) : Latest version of phantomjs.
"""
latest_version : str = ''
repo_name = PhantomJS._repo_name
latest_version = self.github_viewer.get_latest_release_tag_by_repo_name(repo_name=repo_name)
logger.info(f'Latest version of phantomjs: {latest_version}')
return latest_version
def _compare_current_version_and_latest_version_phantomjs(self) -> Tuple[bool, str, str]:
"""Compares current version of phantomjs to latest version
Returns:
Tuple of bool, str and str
is_driver_up_to_date (bool) : It true the driver is up to date. Defaults to False.
current_version (str) : Current version of the driver.
latest_version (str) : Latest version of the driver.
"""
is_driver_up_to_date : bool = False
current_version : str = ''
latest_version : str = ''
current_version = super()._get_current_version_driver()
if not current_version:
return is_driver_up_to_date, current_version, latest_version
latest_version = self._get_latest_version_phantomjs()
if current_version == latest_version:
is_driver_up_to_date = True
message = ('Your existing phantomjs is up to date.'
f'current_version: {current_version} latest_version: {latest_version}')
logger.info(message)
return is_driver_up_to_date, current_version, latest_version
def _check_if_phantomjs_is_up_to_date(self) -> str:
"""Сhecks for the latest version, downloads or updates phantomjs binary
Returns:
str
driver_path (str) : Path where phantomjs was downloaded or updated.
"""
driver_path : str = ''
if self.check_driver_is_up_to_date and not self.system_name:
is_driver_up_to_date, current_version, latest_version = self._compare_current_version_and_latest_version_phantomjs()
if is_driver_up_to_date:
return self.phantomjs_path
driver_path = self._download_driver()
if self.check_driver_is_up_to_date and not self.system_name:
is_driver_up_to_date, current_version, latest_version = self._compare_current_version_and_latest_version_phantomjs()
if not is_driver_up_to_date:
message = ('Problem with updating phantomjs'
f'current_version: {current_version} latest_version: {latest_version}')
logger.error(message)
message = 'Trying to download previous latest version of phantomjs'
logger.info(message)
driver_path = self._download_driver(previous_version=True)
return driver_path
def __rename_driver(self, archive_folder_path : str, archive_driver_path : str) -> None:
"""Renames phantomjs if it was given
Args:
archive_folder_path (str) : Path to the main folder
archive_driver_path (str) : Path to the phantomjs archive
"""
renamed_driver_path : str = ''
new_path = archive_folder_path + os.path.sep + self.filename if not archive_folder_path.endswith(os.path.sep) else archive_folder_path + self.filename
if Path(new_path).exists():
Path(new_path).unlink()
os.rename(archive_driver_path, new_path)
renamed_driver_path = self.path + self.filename
if Path(renamed_driver_path).exists():
Path(renamed_driver_path).unlink()
copyfile(new_path, renamed_driver_path)
def main(self) -> str:
"""Main function, checks for the latest version, downloads or updates phantomjs binary or
downloads specific version of phantomjs.
Returns:
str
driver_path (str) : Path where phantomjs was downloaded or updated.
"""
driver_path : str = ''
if not self.version:
driver_path = self._check_if_phantomjs_is_up_to_date()
else:
driver_path = self._download_driver(version=self.version)
return driver_path
def _get_latest_previous_version_phantomjs_via_requests(self) -> str:
"""Gets previous latest phantomjs version
Returns:
str
latest_version_previous (str) : Latest previous version of phantomjs.
"""
latest_previous_version : str = ''
all_versions = []
url = self.setting["PhantomJS"]["LinkAllReleases"]
json_data = self.requests_getter.get_result_by_request(url=url, is_json=True)
values = json_data.get('values')
for value in values:
value_name = value.get('name')
if not 'beta' in value_name:
find_string = re.findall(self.setting["Program"]["wedriverVersionPattern"], value_name)
version = find_string[0] if len(find_string) > 0 else ''
all_versions.append(version)
all_versions = list(set(all_versions))
all_versions.sort(key=lambda s: list(map(int, s.split('.'))))
latest_previous_version = all_versions[len(all_versions)-2]
logger.info(f'Latest previous version of phantomjs: {latest_previous_version}')
return latest_previous_version
def _check_if_version_is_valid(self, url : str) -> None:
"""Checks the specified version for existence.
Args:
url (str) : Full download url of chromedriver.
"""
archive_name : str = url.split("/")[len(url.split("/"))-1]
url_releases : str = self.setting["PhantomJS"]["LinkAllReleases"]
is_found : bool = False
while is_found is False:
json_data = self.requests_getter.get_result_by_request(url=url_releases, is_json=True)
for data in json_data.get('values'):
if data.get('name') == archive_name:
is_found = True
break
url_releases = json_data.get('next')
if not url_releases:
break
if not is_found:
message = f'Wrong version or system_name was specified. archive_name: {archive_name} url: {url}'
raise DriverVersionInvalidException(message)
def _download_driver(self, version : str = '', previous_version : bool = False) -> str:
"""Function to download, delete or upgrade current phantomjs
Args:
version (str) : Specific phantomjs version to download. Defaults to empty string.
previous_version (boll) : If true, phantomjs latest previous version will be downloaded. Defaults to False.
Returns:
str
driver_path (str) : Path to unzipped driver.
"""
url : str = ''
latest_version : str = ''
latest_previous_version : str = ''
driver_path : str = ''
if self.upgrade:
super()._delete_current_driver_for_current_os()
if version:
logger.info(f'Started download phantomjs specific_version: {version}')
url = self.setting["PhantomJS"]["LinkLastReleaseFile"].format(version)
elif previous_version:
latest_previous_version = self._get_latest_previous_version_phantomjs_via_requests()
logger.info(f'Started download phantomjs latest_previous_version: {latest_previous_version}')
url = self.setting["PhantomJS"]["LinkLastReleaseFile"].format(latest_previous_version)
else:
latest_version = self._get_latest_version_phantomjs()
logger.info(f'Started download phantomjs latest_version: {latest_version}')
url = self.setting["PhantomJS"]["LinkLastReleaseFile"].format(latest_version)
if self.system_name:
url = url.replace(url.split("/")[-1], '')
version = [value for key,value in locals().items() if 'version' in key and value][0]
url = url + self.system_name.format(version)
logger.info(f'Started downloading geckodriver for specific system: {self.system_name}')
if any([version, self.system_name ,latest_previous_version]):
self._check_if_version_is_valid(url=url)
archive_name = url.split("/")[-1]
out_path = self.path + archive_name
if Path(out_path).exists():
Path(out_path).unlink()
logger.info(f'Started download phantomjs by url: {url}')
if self.info_messages:
archive_path = wget.download(url=url, out=out_path)
else:
archive_path = wget.download(url=url, out=out_path, bar=None)
time.sleep(2)
logger.info(f'PhantomJS was downloaded to path: {archive_path}')
out_path = self.path
parameters = dict(archive_path=archive_path, out_path=out_path)
self.extractor.extract_and_detect_archive_format(**parameters)
platform : str = self.setting["PhantomJS"]["LastReleasePlatform"]
archive_path_folder = self.path + url.split("/")[-1].replace('.zip', '').replace(".tar.bz2", '') + os.path.sep
archive_path_folder_bin = archive_path_folder + 'bin' + os.path.sep
driver_archive_path = archive_path_folder_bin + platform
if not self.filename:
copyfile(driver_archive_path, self.path + platform)
else:
parameters = dict(archive_folder_path=archive_path_folder_bin, archive_driver_path=driver_archive_path)
self.__rename_driver(**parameters)
if Path(archive_path_folder).exists():
shutil.rmtree(archive_path_folder)
driver_path = self.phantomjs_path
logger.info(f'PhantomJS was successfully unpacked by path: {driver_path}')
if self.chmod:
super()._chmod_driver()
return driver_path
| 33.198847 | 158 | 0.642795 | 11,096 | 0.963111 | 0 | 0 | 0 | 0 | 0 | 0 | 3,505 | 0.304227 |
312ecd011c5a15ca58383960f9caea10321bad18 | 1,779 | py | Python | src/ychaos/settings.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
]
| 8 | 2021-07-21T15:37:48.000Z | 2022-03-03T14:43:09.000Z | src/ychaos/settings.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
]
| 102 | 2021-07-20T16:08:29.000Z | 2022-03-25T07:28:37.000Z | src/ychaos/settings.py | vanderh0ff/ychaos | 5148c889912b744ee73907e4dd30c9ddb851aeb3 | [
"Apache-2.0"
]
| 8 | 2021-07-20T13:37:46.000Z | 2022-02-18T01:44:52.000Z | # Copyright 2021, Yahoo
# Licensed under the terms of the Apache 2.0 license. See the LICENSE file in the project root for terms
from pathlib import Path
from typing import Optional, Union
from pydantic import BaseModel
class ApplicationSettings(BaseModel):
"""
Defines the Global Settings that are consistent in both Development &
Production scenarios
"""
APP = "YChaos"
APP_DESC = "YChaos, The resilience testing framework"
PROG = "ychaos"
COMMAND_IDENTIFIER = "_cmd.{}"
LOG_FILE_PATH: Optional[Path] = None
@classmethod
def get_instance(cls):
return cls()
@classmethod
def get_version(cls):
import pkg_resources
return pkg_resources.get_distribution(cls.get_instance().PROG).version
class DevSettings(ApplicationSettings):
"""
Defines the Development settings for YChaos Application.
"""
CONFIG = "dev"
class ProdSettings(DevSettings):
"""
Defines the Production settings for YChaos Application
Prod Settings overrides the Dev Settings class and redefines all
the constants defined in DevSettings that can be used in the production scenario
"""
CONFIG = "prod"
class Settings:
__instance: Optional[Union[DevSettings, ProdSettings]] = None
@classmethod
def get_instance(cls) -> Union[DevSettings, ProdSettings]:
if cls.__instance is None:
cls(config="prod")
assert cls.__instance is not None
return cls.__instance
def __init__(self, config):
if config == "dev":
self.__class__.__instance = DevSettings()
elif config == "prod":
self.__class__.__instance = ProdSettings()
else:
raise AttributeError("Unknown configuration found")
| 25.782609 | 105 | 0.681282 | 1,544 | 0.867903 | 0 | 0 | 420 | 0.236088 | 0 | 0 | 660 | 0.370995 |
312efdddb68056b4177eee4701aa3c39ea0d5fe6 | 6,656 | py | Python | tests/func/test_pipeline.py | kacmak7/dvc | 7f92cc3be31f55a1d47c56fc5a39896dd5d1e313 | [
"Apache-2.0"
]
| null | null | null | tests/func/test_pipeline.py | kacmak7/dvc | 7f92cc3be31f55a1d47c56fc5a39896dd5d1e313 | [
"Apache-2.0"
]
| null | null | null | tests/func/test_pipeline.py | kacmak7/dvc | 7f92cc3be31f55a1d47c56fc5a39896dd5d1e313 | [
"Apache-2.0"
]
| null | null | null | import logging
from dvc.main import main
from tests.basic_env import TestDvc
from tests.func.test_repro import TestRepro
from tests.func.test_repro import TestReproChangedDeepData
class TestPipelineShowSingle(TestDvc):
def setUp(self):
super().setUp()
self.stage = "foo.dvc"
ret = main(["add", self.FOO])
self.assertEqual(ret, 0)
def test(self):
ret = main(["pipeline", "show", self.stage])
self.assertEqual(ret, 0)
def test_commands(self):
ret = main(["pipeline", "show", self.stage, "--commands"])
self.assertEqual(ret, 0)
def test_outs(self):
ret = main(["pipeline", "show", self.stage, "--outs"])
self.assertEqual(ret, 0)
def test_dot(self):
ret = main(["pipeline", "show", "--dot", self.stage])
self.assertEqual(ret, 0)
def test_tree(self):
ret = main(["pipeline", "show", "--tree", self.stage])
self.assertEqual(ret, 0)
def test_ascii_outs(self):
ret = main(["pipeline", "show", "--ascii", self.stage, "--outs"])
self.assertEqual(ret, 0)
def test_dot_commands(self):
ret = main(["pipeline", "show", "--dot", self.stage, "--commands"])
self.assertEqual(ret, 0)
def test_dot_outs(self):
ret = main(["pipeline", "show", "--dot", self.stage, "--outs"])
self.assertEqual(ret, 0)
def test_not_dvc_file(self):
ret = main(["pipeline", "show", self.FOO])
self.assertNotEqual(ret, 0)
def test_non_existing(self):
ret = main(["pipeline", "show", "non-existing"])
self.assertNotEqual(ret, 0)
def test_single_ascii(repo_dir, dvc_repo):
dvc_repo.add(repo_dir.FOO)
assert main(["pipeline", "show", "--ascii", "foo.dvc"]) == 0
def test_single_ascii_commands(repo_dir, dvc_repo):
dvc_repo.add(repo_dir.FOO)
assert main(["pipeline", "show", "--ascii", "foo.dvc", "--commands"]) == 0
class TestPipelineShow(TestRepro):
def test(self):
ret = main(["pipeline", "show", self.file1_stage])
self.assertEqual(ret, 0)
def test_commands(self):
ret = main(["pipeline", "show", self.file1_stage, "--commands"])
self.assertEqual(ret, 0)
def test_ascii(self):
ret = main(["pipeline", "show", "--ascii", self.file1_stage])
self.assertEqual(ret, 0)
def test_dot(self):
ret = main(["pipeline", "show", "--dot", self.file1_stage])
self.assertEqual(ret, 0)
def test_ascii_commands(self):
ret = main(
["pipeline", "show", "--ascii", self.file1_stage, "--commands"]
)
self.assertEqual(ret, 0)
def test_ascii_outs(self):
ret = main(["pipeline", "show", "--ascii", self.file1_stage, "--outs"])
self.assertEqual(ret, 0)
def test_dot_commands(self):
ret = main(
["pipeline", "show", "--dot", self.file1_stage, "--commands"]
)
self.assertEqual(ret, 0)
def test_print_locked_stages(repo_dir, dvc_repo, caplog):
dvc_repo.add("foo")
dvc_repo.add("bar")
dvc_repo.lock_stage("foo.dvc")
caplog.clear()
with caplog.at_level(logging.INFO, logger="dvc"):
assert main(["pipeline", "show", "foo.dvc", "--locked"]) == 0
assert "foo.dvc" in caplog.text
assert "bar.dvc" not in caplog.text
def test_dot_outs(repo_dir, dvc_repo):
dvc_repo.add(repo_dir.FOO)
dvc_repo.run(
outs=["file"],
deps=[repo_dir.FOO, repo_dir.CODE],
cmd="python {} {} {}".format(repo_dir.CODE, repo_dir.FOO, "file"),
)
assert main(["pipeline", "show", "--dot", "file.dvc", "--outs"]) == 0
class TestPipelineShowOuts(TestRepro):
def setUp(self):
super().setUp()
def test_outs(self):
ret = main(["pipeline", "show", self.file1_stage, "--outs"])
self.assertEqual(ret, 0)
class TestPipelineShowDeep(TestReproChangedDeepData):
def test(self):
ret = main(["pipeline", "show", self.file1_stage])
self.assertEqual(ret, 0)
def test_commands(self):
ret = main(["pipeline", "show", self.file1_stage, "--commands"])
self.assertEqual(ret, 0)
def test_outs(self):
ret = main(["pipeline", "show", self.file1_stage, "--outs"])
self.assertEqual(ret, 0)
def test_ascii(self):
ret = main(["pipeline", "show", "--ascii", self.file1_stage])
self.assertEqual(ret, 0)
def test_dot(self):
ret = main(["pipeline", "show", "--dot", self.file1_stage])
self.assertEqual(ret, 0)
def test_ascii_commands(self):
ret = main(
["pipeline", "show", "--ascii", self.file1_stage, "--commands"]
)
self.assertEqual(ret, 0)
def test_ascii_outs(self):
ret = main(["pipeline", "show", "--ascii", self.file1_stage, "--outs"])
self.assertEqual(ret, 0)
def test_dot_commands(self):
ret = main(
["pipeline", "show", "--dot", self.file1_stage, "--commands"]
)
self.assertEqual(ret, 0)
def test_dot_outs(self):
ret = main(["pipeline", "show", "--dot", self.file1_stage, "--outs"])
self.assertEqual(ret, 0)
class TestPipelineListEmpty(TestDvc):
def test(self):
ret = main(["pipeline", "list"])
self.assertEqual(ret, 0)
class TestPipelineListSingle(TestPipelineShowDeep):
def test(self):
ret = main(["pipeline", "list"])
self.assertEqual(ret, 0)
class TestDvcRepoPipeline(TestDvc):
def test_no_stages(self):
pipelines = self.dvc.pipelines
self.assertEqual(len(pipelines), 0)
def one_pipeline(self):
self.dvc.add("foo")
self.dvc.run(deps=["foo"], outs=["bar"], cmd="")
self.dvc.run(deps=["bar"], outs=["baz"], cmd="echo baz > baz")
pipelines = self.dvc.pipelines
self.assertEqual(len(pipelines), 1)
self.assertEqual(pipelines[0].nodes, 3)
self.assertEqual(pipelines[0].edges, 2)
def two_pipelines(self):
self.dvc.add("foo")
self.dvc.run(deps=["foo"], outs=["bar"], cmd="")
self.dvc.run(deps=["bar"], outs=["baz"], cmd="echo baz > baz")
self.dvc.add("code.py")
pipelines = self.dvc.pipelines
self.assertEqual(len(pipelines), 2)
self.assertEqual(pipelines[0].nodes, 3)
self.assertEqual(pipelines[0].edges, 2)
self.assertEqual(pipelines[1].nodes, 1)
self.assertEqual(pipelines[1].edges, 0)
def locked_stage(self):
self.dvc.add("foo")
self.dvc.lock_stage("foo.dvc")
pipelines = self.dvc.pipelines
self.assertEqual(len(pipelines), 0)
| 29.847534 | 79 | 0.594351 | 5,473 | 0.822266 | 0 | 0 | 0 | 0 | 0 | 0 | 1,115 | 0.167518 |
31305c3885e35daac8ecf91b5ede410dc7a3d63d | 5,497 | py | Python | my_modes/ChaseLoop.py | mjocean/T2Game | d85d1a9b9adb1e3836548ea60befac02b0907f6f | [
"MIT"
]
| null | null | null | my_modes/ChaseLoop.py | mjocean/T2Game | d85d1a9b9adb1e3836548ea60befac02b0907f6f | [
"MIT"
]
| null | null | null | my_modes/ChaseLoop.py | mjocean/T2Game | d85d1a9b9adb1e3836548ea60befac02b0907f6f | [
"MIT"
]
| null | null | null | import procgame.game
from procgame.game import AdvancedMode
import logging
class ChaseLoop(procgame.game.AdvancedMode):
"""
Example of T2 "Chase Loop" functionality
(described in the rules PDF on page J)
TODO: Sound effects, other visual feedback??
"""
def __init__(self, game):
super(ChaseLoop, self).__init__(game=game, priority=30, mode_type=AdvancedMode.Game)
# useful to set-up a custom logger so it's easier to track debugging messages for this mode
self.logger = logging.getLogger('ChaseLoop')
# the names of the progress lamps as a list for easier code
# via indexing, later
self.chase_lamps = ["twofiftyK", "fivehunK", "sevenfiftyK",
"oneMil", "threeMil", "fiveMil"]
self.collected = 0 # the number the player already has
self.loop_seq = [False, False, False, False]
self.awards = [250000, 500000, 750000, 1000000, 3000000, 5000000] # the list of awards
pass
def evt_player_added(self, player):
player.setState('chase_current',0)
def evt_ball_starting(self):
self.cancel_delayed(name="disabler")
self.mid_switches = [False, False, False]
self.collected = 0 # progress resets on new ball
self.sync_lamps_to_progress()
self.loop_seq = [False, False, False, False]
def evt_ball_ending(self, (shoot_again, last_ball)):
self.cancel_delayed(name="disabler")
def debug(self):
# self.logger.info("escL: %d, escH: %d, clH:%d, clL:%d" % (self.game.switches.escapeL.hw_timestamp, self.game.switches.escapeH.hw_timestamp, self.game.switches.chaseLoopHigh.hw_timestamp, self.game.switches.chaseLoopLow.hw_timestamp))
self.logger.info("collected = %d" % (self.collected))
def sw_chaseLoopLow_active(self, sw):
self.seq_handler(0)
def sw_chaseLoopHigh_active(self, sw):
self.seq_handler(1)
def sw_escapeH_active(self, sw):
self.seq_handler(2)
def sw_escapeL_active(self, sw):
if(self.seq_handler(3)):
# loop complete
self.chase_loop_award()
self.loop_seq = [False, False, False, False]
def seq_handler(self, num):
self.cancel_delayed(name="clear_%d" % num)
# if a previous switch is False, no sequence
if(False in self.loop_seq[0:num]):
self.logger.info("saw later switch -- sequence destroyed")
for i in range(0,num):
self.reset_switch_memory(i)
self.loop_seq[num] = False
self.logger.info("hit %d | Sequence: %s" % (num, self.loop_seq))
return False
self.loop_seq[num] = True
# clear later switches
for i in range(num+1,4):
self.reset_switch_memory(i)
self.logger.info("hit %d | Sequence: %s" % (num, self.loop_seq))
if(num!=3):
self.delay(name="clear_%d" % num, delay=4.0, handler=self.reset_switch_memory, param=num)
return True
def reset_switch_memory(self, switch_num):
self.cancel_delayed(name="clear_%d" % switch_num)
if(self.loop_seq[switch_num] == False):
return # nothing to do
self.loop_seq[switch_num] = False
self.logger.info("RESET %d | Sequence: %s" % (switch_num, self.loop_seq))
def OFF_sw_escapeL_active(self, sw):
self.debug()
if(self.game.switches.chaseLoopLow.hw_timestamp == None):
return procgame.game.SwitchContinue
if (((self.game.switches.escapeL.hw_timestamp - self.game.switches.chaseLoopLow.hw_timestamp) < 2000) and
(self.game.switches.escapeL.hw_timestamp > self.game.switches.escapeH.hw_timestamp) and
(self.game.switches.escapeH.hw_timestamp > self.game.switches.chaseLoopHigh.hw_timestamp) and
(self.game.switches.chaseLoopHigh.hw_timestamp > self.game.switches.chaseLoopLow.hw_timestamp)):
self.chase_loop_award()
return procgame.game.SwitchStop
else:
return procgame.game.SwitchContinue
def chase_loop_award(self):
self.sync_lamps_to_progress(special=self.collected)
self.game.displayText("Chase Loop " + str(self.awards[self.collected]))
self.game.score(self.awards[self.collected])
if(self.collected < len(self.chase_lamps)-1):
self.collected += 1
else:
# already got them all
pass
self.debug()
self.delay(name="lamp_sync", delay=1.0, handler=self.sync_lamps_to_progress)
def disable_progress_lamps(self):
for l in self.chase_lamps:
self.game.lamps[l].disable()
def set_lamp(self, lamp_name, state):
l = self.game.lamps[lamp_name]
if(state==0):
l.disable()
elif(state==1):
l.enable()
elif(state==2):
l.schedule(0xff00ff00)
elif(state==3):
l.schedule(0xf0f0f0f0)
def sync_lamps_to_progress(self, special=None):
self.cancel_delayed(name="lamp_sync")
for i in range(0, len(self.chase_lamps)):
l_state = 0
if(special is not None and i==special):
l_state=3
elif(self.collected>i):
l_state = 1
elif(self.collected==i):
l_state = 2
self.logger.info("setting " + self.chase_lamps[i] + " to " + str(l_state))
self.set_lamp(self.chase_lamps[i], l_state)
| 36.646667 | 242 | 0.62525 | 5,418 | 0.985629 | 0 | 0 | 0 | 0 | 0 | 0 | 1,072 | 0.195015 |
313105ee1f0beaa4963e8ca27411e52ee4288019 | 130 | py | Python | app/dists/admin.py | ariashahverdi/Backend | ea8976f1eec4e75eba895f467d157f0f1345b2b7 | [
"MIT"
]
| null | null | null | app/dists/admin.py | ariashahverdi/Backend | ea8976f1eec4e75eba895f467d157f0f1345b2b7 | [
"MIT"
]
| null | null | null | app/dists/admin.py | ariashahverdi/Backend | ea8976f1eec4e75eba895f467d157f0f1345b2b7 | [
"MIT"
]
| null | null | null | from django.contrib import admin
from .models import Distribution
admin.site.register(Distribution)
# Register your models here.
| 21.666667 | 33 | 0.823077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.215385 |
313142ab6ce549a139115eb0b45ced16e5e3b5d9 | 1,671 | py | Python | examples_ltnw/binary_classifier.py | gilbeckers/logictensornetworks | c4cc3628db91030230c78d3b964c26304a3b452b | [
"MIT"
]
| null | null | null | examples_ltnw/binary_classifier.py | gilbeckers/logictensornetworks | c4cc3628db91030230c78d3b964c26304a3b452b | [
"MIT"
]
| null | null | null | examples_ltnw/binary_classifier.py | gilbeckers/logictensornetworks | c4cc3628db91030230c78d3b964c26304a3b452b | [
"MIT"
]
| 1 | 2019-05-19T01:28:04.000Z | 2019-05-19T01:28:04.000Z | # -*- coding: utf-8 -*-
import logging; logging.basicConfig(level=logging.INFO)
import numpy as np
import matplotlib.pyplot as plt
import logictensornetworks_wrapper as ltnw
nr_samples=500
data=np.random.uniform([0,0],[1.,1.],(nr_samples,2)).astype(np.float32)
data_A=data[np.where(np.sum(np.square(data-[.5,.5]),axis=1)<.09)]
data_not_A=data[np.where(np.sum(np.square(data-[.5,.5]),axis=1)>=.09)]
ltnw.variable("?data_A",data_A)
ltnw.variable("?data_not_A",data_not_A)
ltnw.variable("?data",data)
ltnw.predicate("A",2)
ltnw.axiom("forall ?data_A: A(?data_A)")
ltnw.axiom("forall ?data_not_A: ~A(?data_not_A)")
ltnw.initialize_knowledgebase(initial_sat_level_threshold=.1)
sat_level=ltnw.train(track_sat_levels=1000,sat_level_epsilon=.99)
plt.figure(figsize=(12,8))
result=ltnw.ask("A(?data)")
plt.subplot(2,2,1)
plt.scatter(data[:,0],data[:,1],c=result.squeeze())
plt.colorbar()
plt.title("A(x) - training data")
result=ltnw.ask("~A(?data)")
plt.subplot(2,2,2)
plt.scatter(data[:,0],data[:,1],c=result.squeeze())
plt.colorbar()
plt.title("~A(x) - training data")
data_test=np.random.uniform([0,0],[1.,1.],(500,2)).astype(np.float32)
ltnw.variable("?data_test",data_test)
result=ltnw.ask("A(?data_test)")
plt.subplot(2,2,3)
plt.title("A(x) - test")
plt.scatter(data_test[:,0],data_test[:,1],c=result.squeeze())
plt.colorbar()
plt.title("A(x) - test data")
result=ltnw.ask("~A(?data_test)")
plt.subplot(2,2,4)
plt.scatter(data_test[:,0],data_test[:,1],c=result.squeeze())
plt.title("~A(x) - test data")
plt.show()
ltnw.constant("a",[0.25,.5])
ltnw.constant("b",[1.,1.])
print("a is in A: %s" % ltnw.ask("A(a)"))
print("b is in A: %s" % ltnw.ask("A(b)"))
| 27.85 | 71 | 0.689408 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 327 | 0.195691 |
31319d47ec8ad06ca44bd80af1576e1016d0086b | 837 | py | Python | leetcode/0015_3Sum/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
]
| null | null | null | leetcode/0015_3Sum/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
]
| null | null | null | leetcode/0015_3Sum/result.py | theck17/notes | f32f0f4b8f821b1ed38d173ef0913efddd094b91 | [
"MIT"
]
| null | null | null | # !/usr/bin/env python3
# Author: C.K
# Email: [email protected]
# DateTime:2021-03-15 00:07:14
# Description:
class Solution:
def threeSum(self, nums: List[int]) -> List[List[int]]:
result = set()
for i in range(0, len(nums) - 1):
# Reduce the problem to two sum(0)
two_sum = -nums[i]
cache = set()
for num in nums[i + 1:]:
remaining = two_sum - num
if remaining in cache:
#sorting to create unique tuples
triplet = tuple(sorted([nums[i], remaining, num]))
# using tuple in a set will eliminate duplicates combinations
result.add(triplet)
else:
cache.add(num)
return result
if __name__ == "__main__":
pass
| 28.862069 | 81 | 0.51135 | 687 | 0.820789 | 0 | 0 | 0 | 0 | 0 | 0 | 241 | 0.287933 |
313299b92381faa8b71fda04a1042894c5a9f593 | 2,937 | py | Python | customer/views.py | lautarianoo/django_shop | 9bc575df8b7af5452bd15cc3cf4fb375be6384bd | [
"MIT"
]
| null | null | null | customer/views.py | lautarianoo/django_shop | 9bc575df8b7af5452bd15cc3cf4fb375be6384bd | [
"MIT"
]
| null | null | null | customer/views.py | lautarianoo/django_shop | 9bc575df8b7af5452bd15cc3cf4fb375be6384bd | [
"MIT"
]
| null | null | null | from django.contrib.auth import authenticate, login
from django.shortcuts import render, redirect
from cart.models import Cart
from django.views import View
from .forms import LoginForm, RegistrationForm, CreateCompanyForm
from customer.models import Customer, ShippingAddress
from src.utils.mixins import CustomerMixin
from checkout.models import ApplyOrganization
class LoginView(CustomerMixin, View):
def get(self, request, *args, **kwargs):
if request.user.is_authenticated:
return redirect('catalog')
form = LoginForm()
return render(request, 'customer/login.html', {'form': form})
def post(self, request, *args, **kwargs):
form = LoginForm(request.POST or None)
if form.is_valid():
email = form.cleaned_data['email']
password = form.cleaned_data['password']
user = authenticate(request, email=email, password=password)
if user:
login(request, user)
return redirect('catalog')
return render(request, 'customer/login.html', {'form': form})
class RegistrationView(View):
def get(self, request, *args, **kwargs):
if request.user.is_authenticated:
return redirect('catalog')
form = RegistrationForm()
return render(request, 'customer/register.html', {'form': form})
def post(self, request, *args, **kwargs):
form = RegistrationForm(request.POST or None, request.FILES or None)
if form.is_valid():
new_user = form.save(commit=False)
customer = Customer.objects.create(user=new_user, status="Unrecognized")
customer.save()
cart = Cart.objects.create(customer=customer)
cart.save()
address = ShippingAddress.objects.create(customer=customer)
address.save()
new_user.set_password(form.cleaned_data['password1'])
new_user.save()
return redirect('login')
return render(request, 'customer/register.html', {'form': form})
class CreateCompany(View):
def get(self, request, *args, **kwargs):
if request.user.is_authenticated and request.user.STATUS_AUTH == "Recognized":
form = CreateCompanyForm()
return render(request, 'customer/create_company.html', {'form': form})
return redirect('catalog')
def post(self, request, *args, **kwargs):
if request.user.is_authenticated and request.user.STATUS_AUTH == "Recognized":
form = CreateCompanyForm(request.POST or None, request.FILES or None)
if form.is_valid():
new_company = form.save(commit=False)
new_company.STATUS_COMPANY = "No verify"
new_company.user = request.user
new_company.save()
return redirect('catalog')
return render(request, 'customer/register.html', {'form': form})
| 41.957143 | 86 | 0.640449 | 2,565 | 0.87334 | 0 | 0 | 0 | 0 | 0 | 0 | 309 | 0.105209 |
3132c447ee024e396e1324d642bb304d3461295b | 1,771 | py | Python | kickstarter/app.py | Annapurnaj91/kickstarter3 | 372b2fd53b0b2b35dad210a7d6d54baff1a16204 | [
"MIT"
]
| null | null | null | kickstarter/app.py | Annapurnaj91/kickstarter3 | 372b2fd53b0b2b35dad210a7d6d54baff1a16204 | [
"MIT"
]
| null | null | null | kickstarter/app.py | Annapurnaj91/kickstarter3 | 372b2fd53b0b2b35dad210a7d6d54baff1a16204 | [
"MIT"
]
| null | null | null | from flask import Flask, render_template, request
# from .recommendation import *
# import pickle
import pandas as pd
import numpy as np
# import keras
# from keras.models import load_model
import pickle
def create_app():
# initializes our app
APP = Flask(__name__)
@APP.route('/')
def form():
return render_template('base.html')
@APP.route('/data/', methods=['GET', 'POST'])
def data():
if request.method == 'POST':
# Get form data
name = request.form.get('name')
blurb = request.form.get('blurb', 'default')
country = request.form.get('country', 'default')
backers_count = request.form.get('backers_count', 'default')
prediction = preprocessDataAndPredict(name, blurb, country,
backers_count)
# print(prediction[0])
return render_template('data.html', prediction=prediction[0])
def preprocessDataAndPredict(name, blurb, country, backers_count):
# test_data = (blurb)
test_data = (name, blurb, country, backers_count)
# print(test_data)
test_data = np.array(test_data)
dftest = pd.DataFrame(test_data).T
dftest.columns = ['name', 'blurb', 'country', 'backers_count']
print(dftest)
print(dftest.shape)
# test_data = test_data.reshape(1, -1)
# print(test_data)
#file = open("model.pkl", "wb")
model = pickle.load(
open('model_knn', 'rb'))
# model = pickle.load(
# open('Kickstarter2/kickstarter/kick_model(1)', 'rb'))
prediction = model.predict(dftest)
# print(prediction)
return prediction
# return prediction
return APP | 29.032787 | 73 | 0.59345 | 0 | 0 | 0 | 0 | 668 | 0.377188 | 0 | 0 | 566 | 0.319593 |
3132c79a2cad6d01993855975464a0c7d164ed0d | 898 | py | Python | src/apiron/service/discoverable.py | tushar-deepsource/apiron | 6b542d498e1e2a76d5f8a2d086d237be43d09bc3 | [
"MIT"
]
| 109 | 2018-10-01T19:38:36.000Z | 2022-03-10T05:28:34.000Z | src/apiron/service/discoverable.py | tushar-deepsource/apiron | 6b542d498e1e2a76d5f8a2d086d237be43d09bc3 | [
"MIT"
]
| 39 | 2018-10-01T20:51:49.000Z | 2022-03-07T15:38:32.000Z | src/apiron/service/discoverable.py | tushar-deepsource/apiron | 6b542d498e1e2a76d5f8a2d086d237be43d09bc3 | [
"MIT"
]
| 10 | 2018-10-02T06:54:40.000Z | 2020-05-28T14:30:12.000Z | from typing import List, Type
from apiron.service.base import ServiceBase
class DiscoverableService(ServiceBase):
"""
A Service whose hosts are determined via a host resolver.
A host resolver is any class with a :func:`resolve` method
that takes a service name as its sole argument
and returns a list of host names that correspond to that service.
"""
host_resolver_class: Type
service_name: str
@classmethod
def get_hosts(cls) -> List[str]:
return cls.host_resolver_class.resolve(cls.service_name)
def __str__(self) -> str:
return self.service_name
def __repr__(self) -> str:
klass = self.__class__
return "{klass}(service_name={service_name}, host_resolver={host_resolver})".format(
klass=klass.__name__, service_name=klass.service_name, host_resolver=klass.host_resolver_class.__name__
)
| 30.965517 | 115 | 0.707127 | 820 | 0.91314 | 0 | 0 | 114 | 0.126949 | 0 | 0 | 326 | 0.363029 |
3133622434c8873a4080869ebe2b301b8b8641a5 | 2,765 | py | Python | src/plotman/plot_util.py | rafaelsteil/plotman | b7b4d97a12b60c31f66e18764ff60469930e3b37 | [
"Apache-2.0"
]
| null | null | null | src/plotman/plot_util.py | rafaelsteil/plotman | b7b4d97a12b60c31f66e18764ff60469930e3b37 | [
"Apache-2.0"
]
| null | null | null | src/plotman/plot_util.py | rafaelsteil/plotman | b7b4d97a12b60c31f66e18764ff60469930e3b37 | [
"Apache-2.0"
]
| null | null | null | import math
import os
import re
import shutil
from plotman import job
GB = 1_000_000_000
def df_b(d):
'Return free space for directory (in bytes)'
usage = shutil.disk_usage(d)
return usage.free
def get_k32_plotsize():
return 108 * GB
def is_valid_plot_dst(d, sched_cfg, all_jobs):
if sched_cfg.stop_when_dst_full:
space = df_b(d)
# Subtract space for current jobs which will be moved to the dir
# Note: This is underestimates the free space available when a
# job is in phase 4 since the plot is partially moved to dst,
# once phase 4 is complete a new plot will eventually kick off
jobs_to_dstdir = job.job_phases_for_dstdir(d, all_jobs)
space -= len(jobs_to_dstdir) * get_k32_plotsize()
return enough_space_for_k32(space)
return True
def enough_space_for_k32(b):
'Determine if there is enough space for a k32 given a number of free bytes'
return b > 1.2 * get_k32_plotsize()
def human_format(num, precision):
magnitude = 0
while abs(num) >= 1000:
magnitude += 1
num /= 1000.0
return (('%.' + str(precision) + 'f%s') %
(num, ['', 'K', 'M', 'G', 'T', 'P'][magnitude]))
def time_format(sec):
if sec is None:
return '-'
if sec < 60:
return '%ds' % sec
else:
return '%d:%02d' % (int(sec / 3600), int((sec % 3600) / 60))
def tmpdir_phases_str(tmpdir_phases_pair):
tmpdir = tmpdir_phases_pair[0]
phases = tmpdir_phases_pair[1]
phase_str = ', '.join(['%d:%d' % ph_subph for ph_subph in sorted(phases)])
return ('%s (%s)' % (tmpdir, phase_str))
def split_path_prefix(items):
if not items:
return ('', [])
prefix = os.path.commonpath(items)
if prefix == '/':
return ('', items)
else:
remainders = [ os.path.relpath(i, prefix) for i in items ]
return (prefix, remainders)
def list_k32_plots(d):
'List completed k32 plots in a directory (not recursive)'
plots = []
for plot in os.listdir(d):
if re.match(r'^plot-k32-.*plot$', plot):
plot = os.path.join(d, plot)
try:
if os.stat(plot).st_size > (0.95 * get_k32_plotsize()):
plots.append(plot)
except FileNotFoundError:
continue
return plots
def column_wrap(items, n_cols, filler=None):
'''Take items, distribute among n_cols columns, and return a set
of rows containing the slices of those columns.'''
rows = []
n_rows = math.ceil(len(items) / n_cols)
for row in range(n_rows):
row_items = items[row : : n_rows]
# Pad and truncate
rows.append( (row_items + ([filler] * n_cols))[:n_cols] )
return rows
| 30.384615 | 79 | 0.609403 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 667 | 0.24123 |
313400b0bdd90b560bc3c6b261a67708f28e5651 | 608 | py | Python | src/config.py | forkedbranch/femm-opt | 78d9cd56510f781fe2643da4c2dadae002537d64 | [
"Apache-2.0"
]
| 1 | 2016-05-03T13:58:00.000Z | 2016-05-03T13:58:00.000Z | src/config.py | forkedbranch/femm-opt | 78d9cd56510f781fe2643da4c2dadae002537d64 | [
"Apache-2.0"
]
| null | null | null | src/config.py | forkedbranch/femm-opt | 78d9cd56510f781fe2643da4c2dadae002537d64 | [
"Apache-2.0"
]
| null | null | null | # (c) Copyright 2016 forkedbranch (http://forkedbranch.eu/)
# Licensed under the Apache License, Version 2.0
import configparser
config = configparser.ConfigParser()
config.read('config.ini')
def get_input_folder():
return config['DEFAULT']['InputFolder']
def get_output_folder():
return config['DEFAULT']['OutputForlder']
def get_femm_exe():
return config['DEFAULT']['FemmExe']
def get_ffmpeg_exe():
return config['DEFAULT']['FfmpegExe']
def get_femm_scr_templ():
return config['DEFAULT']['FemmScrTempl']
def get_femm_scr_lib():
return config['DEFAULT']['FemmScrLib'] | 24.32 | 59 | 0.717105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 247 | 0.40625 |
3134461b8aadf2c623046676e642ba5bc115a4e1 | 4,091 | py | Python | vespene/workers/registration.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
]
| 680 | 2018-10-29T12:12:10.000Z | 2019-04-27T09:52:58.000Z | vespene/workers/registration.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
]
| 110 | 2018-10-29T12:33:34.000Z | 2019-02-14T02:31:43.000Z | vespene/workers/registration.py | Conan-Kudo/vespene | 9e9977523f45586e1326ccd77d8cc0cb10591a07 | [
"Apache-2.0"
]
| 92 | 2018-10-29T12:21:12.000Z | 2019-06-08T11:08:08.000Z | # Copyright 2018, Michael DeHaan LLC
# License: Apache License Version 2.0
# -------------------------------------------------------------------------
# registration.py - updates the database to say who is building something
# and what the current settings are, which is used by the file serving
# code to see if it is ok to serve up files in the buildroot. But also
# for record keeping.
# --------------------------------------------------------------------------
from datetime import datetime
import random
import fcntl
import subprocess
import os
from django.utils import timezone
from django.conf import settings
from vespene.common.logger import Logger
from vespene.models.worker import Worker
LOG = Logger()
WORKER_ID_FILE = "/etc/vespene/worker_id"
# =============================================================================
class RegistrationManager(object):
def __init__(self, builder, build):
self.builder = builder
self.build = build
self.project = self.build.project
def create_worker_id(self):
wid = ''.join(random.SystemRandom().choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50))
fd = open(WORKER_ID_FILE, "w+")
fd.write(wid)
fd.close()
return wid
def get_worker_id(self, fd):
return fd.readlines()[0].strip()
def get_worker_record(self, worker_id):
qs = Worker.objects.filter(worker_uid=worker_id)
if not qs.exists():
return None
return qs.first()
# worker_pool = models.ForeignKey('WorkerPool', null=False, on_delete=models.SET_NULL)
# hostname = models.CharField(max_length=1024, null=True)
# port = models.IntField(null=False, default=8080)
# working_dir = models.CharField(max_length=1024, null=True)
# first_checkin = models.DateTimeField(null=True, blank=True)
# last_checkin = models.DateTimeField(null=True, blank=True)
# fileserving_enabled = models.BooleanField(null=False, default=False)
def get_hostname(self):
if settings.FILESERVING_HOSTNAME:
return settings.FILESERVING_HOSTNAME
return self.guess_hostname()
def guess_hostname(self):
return subprocess.check_output("hostname").decode('utf-8').strip()
def get_port(self):
if settings.FILESERVING_PORT:
return settings.FILESERVING_PORT
else:
return 8000
def get_build_root(self):
return settings.BUILD_ROOT
def get_fileserving_enabled(self):
return settings.FILESERVING_ENABLED
def create_worker_record(self, worker_id):
now = datetime.now(tz=timezone.utc)
obj = Worker(
worker_uid = worker_id,
hostname = self.get_hostname(),
port = self.get_port(),
build_root = self.get_build_root(),
first_checkin = now,
last_checkin = now,
fileserving_enabled = self.get_fileserving_enabled()
)
obj.save()
return obj
def update_worker_record(self, worker):
now = datetime.now(tz=timezone.utc)
worker.hostname = self.get_hostname()
worker.port = self.get_port()
worker.build_root = self.get_build_root()
worker.last_checkin = now
worker.fileserving_enabled = self.get_fileserving_enabled()
worker.save()
return worker
def go(self):
"""
Trigger next stage of pipeline if build was successful
"""
if not os.path.exists(WORKER_ID_FILE):
worker_id = self.create_worker_id()
fd = open(WORKER_ID_FILE, "r")
fcntl.flock(fd, fcntl.LOCK_EX)
worker_id = self.get_worker_id(fd)
worker_record = self.get_worker_record(worker_id)
if not worker_record:
worker_record = self.create_worker_record(worker_id)
else:
worker_record = self.update_worker_record(worker_record)
self.build.worker = worker_record
self.build.save()
fcntl.flock(fd, fcntl.LOCK_UN)
| 32.728 | 124 | 0.620142 | 3,236 | 0.791005 | 0 | 0 | 0 | 0 | 0 | 0 | 1,170 | 0.285994 |
313582b593f74c9cfe2f0d1c30d9930aec3b40a3 | 12,957 | py | Python | src/robustness.py | mathigatti/sota-music-tagging-models | b4331b07fe45902af96830f2821926ab86e17d42 | [
"MIT"
]
| null | null | null | src/robustness.py | mathigatti/sota-music-tagging-models | b4331b07fe45902af96830f2821926ab86e17d42 | [
"MIT"
]
| null | null | null | src/robustness.py | mathigatti/sota-music-tagging-models | b4331b07fe45902af96830f2821926ab86e17d42 | [
"MIT"
]
| null | null | null | # coding: utf-8
'''
Deformation codes are borrowed from MUDA
McFee et al., A software framework for musical data augmentation, 2015
https://github.com/bmcfee/muda
'''
import os
import time
import subprocess
import tempfile
import numpy as np
import pandas as pd
import datetime
import tqdm
import csv
import fire
import argparse
import pickle
from sklearn import metrics
import pandas as pd
import librosa
import soundfile as psf
import torch
import torch.nn as nn
from torch.autograd import Variable
from solver import skip_files
from sklearn.preprocessing import LabelBinarizer
import model as Model
TAGS = ['genre---downtempo', 'genre---ambient', 'genre---rock', 'instrument---synthesizer', 'genre---atmospheric', 'genre---indie', 'instrument---electricpiano', 'genre---newage', 'instrument---strings', 'instrument---drums', 'instrument---drummachine', 'genre---techno', 'instrument---guitar', 'genre---alternative', 'genre---easylistening', 'genre---instrumentalpop', 'genre---chillout', 'genre---metal', 'mood/theme---happy', 'genre---lounge', 'genre---reggae', 'genre---popfolk', 'genre---orchestral', 'instrument---acousticguitar', 'genre---poprock', 'instrument---piano', 'genre---trance', 'genre---dance', 'instrument---electricguitar', 'genre---soundtrack', 'genre---house', 'genre---hiphop', 'genre---classical', 'mood/theme---energetic', 'genre---electronic', 'genre---world', 'genre---experimental', 'instrument---violin', 'genre---folk', 'mood/theme---emotional', 'instrument---voice', 'instrument---keyboard', 'genre---pop', 'instrument---bass', 'instrument---computer', 'mood/theme---film', 'genre---triphop', 'genre---jazz', 'genre---funk', 'mood/theme---relaxing']
def read_file(tsv_file):
tracks = {}
with open(tsv_file) as fp:
reader = csv.reader(fp, delimiter='\t')
next(reader, None) # skip header
for row in reader:
track_id = row[0]
tracks[track_id] = {
'path': row[3].replace('.mp3', '.npy'),
'tags': row[5:],
}
return tracks
class Predict(object):
def __init__(self, config):
self.model_type = config.model_type
self.model_load_path = config.model_load_path
self.dataset = config.dataset
self.data_path = config.data_path
self.batch_size = config.batch_size
self.is_cuda = torch.cuda.is_available()
self.build_model()
self.get_dataset()
self.mod = config.mod
self.rate = config.rate
self.PRESETS = {
"radio": ["0.01,1", "-90,-90,-70,-70,-60,-20,0,0", "-5"],
"film standard": ["0.1,0.3", "-90,-90,-70,-64,-43,-37,-31,-31,-21,-21,0,-20", "0", "0", "0.1"],
"film light": ["0.1,0.3", "-90,-90,-70,-64,-53,-47,-41,-41,-21,-21,0,-20", "0", "0", "0.1"],
"music standard": ["0.1,0.3", "-90,-90,-70,-58,-55,-43,-31,-31,-21,-21,0,-20", "0", "0", "0.1"],
"music light": ["0.1,0.3", "-90,-90,-70,-58,-65,-53,-41,-41,-21,-21,0,-11", "0", "0", "0.1"],
"speech": ["0.1,0.3", "-90,-90,-70,-55,-50,-35,-31,-31,-21,-21,0,-20", "0", "0", "0.1"]
}
self.preset_dict = {1: "radio",
2: "film standard",
3: "film light",
4: "music standard",
5: "music light",
6: "speech"}
def get_model(self):
if self.model_type == 'fcn':
self.input_length = 29 * 16000
return Model.FCN()
elif self.model_type == 'musicnn':
self.input_length = 3 * 16000
return Model.Musicnn(dataset=self.dataset)
elif self.model_type == 'crnn':
self.input_length = 29 * 16000
return Model.CRNN()
elif self.model_type == 'sample':
self.input_length = 59049
return Model.SampleCNN()
elif self.model_type == 'se':
self.input_length = 59049
return Model.SampleCNNSE()
elif self.model_type == 'short':
self.input_length = 59049
return Model.ShortChunkCNN()
elif self.model_type == 'short_res':
self.input_length = 59049
return Model.ShortChunkCNN_Res()
elif self.model_type == 'attention':
self.input_length = 15 * 16000
return Model.CNNSA()
elif self.model_type == 'hcnn':
self.input_length = 5 * 16000
return Model.HarmonicCNN()
else:
print('model_type has to be one of [fcn, musicnn, crnn, sample, se, short, short_res, attention]')
def build_model(self):
self.model = self.get_model()
# cuda
if self.is_cuda:
self.model.cuda()
# load model
self.load(self.model_load_path)
def get_dataset(self):
if self.dataset == 'mtat':
self.test_list = np.load('./../split/mtat/test.npy')
self.binary = np.load('./../split/mtat/binary.npy')
if self.dataset == 'msd':
test_file = os.path.join('./../split/msd','filtered_list_test.cP')
test_list = pickle.load(open(test_file,'rb'), encoding='bytes')
self.test_list = [value for value in test_list if value.decode() not in skip_files]
id2tag_file = os.path.join('./../split/msd', 'msd_id_to_tag_vector.cP')
self.id2tag = pickle.load(open(id2tag_file,'rb'), encoding='bytes')
if self.dataset == 'jamendo':
test_file = os.path.join('./../split/mtg-jamendo', 'autotagging_top50tags-test.tsv')
self.file_dict= read_file(test_file)
self.test_list= list(self.file_dict.keys())
self.mlb = LabelBinarizer().fit(TAGS)
if self.dataset == 'jamendo-mood':
test_file = os.path.join('./../split/mtg-jamendo-mood', 'autotagging_moodtheme-test.tsv')
self.file_dict= read_file(test_file)
self.test_list= list(self.file_dict.keys())
self.mlb = LabelBinarizer().fit(TAGS)
def load(self, filename):
S = torch.load(filename)
self.model.load_state_dict(S)
def to_var(self, x):
if torch.cuda.is_available():
x = x.cuda()
return Variable(x)
def get_tensor(self, fn):
# load audio
if self.dataset == 'mtat':
npy_path = os.path.join(self.data_path, 'mtat', 'npy', fn.split('/')[1][:-3]) + 'npy'
elif self.dataset == 'msd':
msid = fn.decode()
filename = '{}/{}/{}/{}.npy'.format(msid[2], msid[3], msid[4], msid)
npy_path = os.path.join(self.data_path, filename)
elif self.dataset == 'jamendo':
filename = self.file_dict[fn]['path']
npy_path = os.path.join(self.data_path, filename)
elif self.dataset == 'jamendo-mood':
filename = self.file_dict[fn]['path']
npy_path = os.path.join(self.data_path, filename)
raw = np.load(npy_path, mmap_mode='r')
raw = self.modify(raw, self.rate, self.mod)
# split chunk
length = len(raw)
hop = (length - self.input_length) // self.batch_size
x = torch.zeros(self.batch_size, self.input_length)
for i in range(self.batch_size):
x[i] = torch.Tensor(raw[i*hop:i*hop+self.input_length]).unsqueeze(0)
return x
def modify(self, x, mod_rate, mod_type):
if mod_type == 'time_stretch':
return self.time_stretch(x, mod_rate)
elif mod_type == 'pitch_shift':
return self.pitch_shift(x, mod_rate)
elif mod_type == 'dynamic_range':
return self.dynamic_range_compression(x, mod_rate)
elif mod_type == 'white_noise':
return self.white_noise(x, mod_rate)
else:
print('choose from [time_stretch, pitch_shift, dynamic_range, white_noise]')
def time_stretch(self, x, rate):
'''
[2 ** (-.5), 2 ** (.5)]
'''
return librosa.effects.time_stretch(x, rate)
def pitch_shift(self, x, rate):
'''
[-1, 1]
'''
return librosa.effects.pitch_shift(x, 16000, rate)
def dynamic_range_compression(self, x, rate):
'''
[4, 6]
Music standard & Speech
'''
return self.sox(x, 16000, "compand", *self.PRESETS[self.preset_dict[rate]])
@staticmethod
def sox(x, fs, *args):
assert fs > 0
fdesc, infile = tempfile.mkstemp(suffix=".wav")
os.close(fdesc)
fdesc, outfile = tempfile.mkstemp(suffix=".wav")
os.close(fdesc)
psf.write(infile, x, fs)
try:
arguments = ["sox", infile, outfile, "-q"]
arguments.extend(args)
subprocess.check_call(arguments)
x_out, fs = psf.read(outfile)
x_out = x_out.T
if x.ndim == 1:
x_out = librosa.to_mono(x_out)
finally:
os.unlink(infile)
os.unlink(outfile)
return x_out
def white_noise(self, x, rate):
'''
[0.1, 0.4]
'''
n_frames = len(x)
noise_white = np.random.RandomState().randn(n_frames)
noise_fft = np.fft.rfft(noise_white)
values = np.linspace(1, n_frames * 0.5 + 1, n_frames // 2 + 1)
colored_filter = np.linspace(1, n_frames / 2 + 1, n_frames // 2 + 1) ** 0
noise_filtered = noise_fft * colored_filter
noise = librosa.util.normalize(np.fft.irfft(noise_filtered)) * (x.max())
if len(noise) < len(x):
x = x[:len(noise)]
return (1 - rate) * x + (noise * rate)
def get_auc(self, est_array, gt_array):
roc_aucs = metrics.roc_auc_score(gt_array, est_array, average='macro')
pr_aucs = metrics.average_precision_score(gt_array, est_array, average='macro')
return roc_aucs, pr_aucs
def test(self):
roc_auc, pr_auc, loss = self.get_test_score()
print('loss: %.4f' % loss)
print('roc_auc: %.4f' % roc_auc)
print('pr_auc: %.4f' % pr_auc)
def get_test_score(self):
self.model = self.model.eval()
est_array = []
gt_array = []
losses = []
reconst_loss = nn.BCELoss()
for line in tqdm.tqdm(self.test_list):
if self.dataset == 'mtat':
ix, fn = line.split('\t')
elif self.dataset == 'msd':
fn = line
if fn.decode() in skip_files:
continue
elif self.dataset == 'jamendo':
fn = line
elif self.dataset == 'jamendo-mood':
fn = line
# load and split
x = self.get_tensor(fn)
# ground truth
if self.dataset == 'mtat':
ground_truth = self.binary[int(ix)]
elif self.dataset == 'msd':
ground_truth = self.id2tag[fn].flatten()
elif self.dataset == 'jamendo':
ground_truth = np.sum(self.mlb.transform(self.file_dict[fn]['tags']), axis=0)
elif self.dataset == 'jamendo-mood':
ground_truth = np.sum(self.mlb.transform(self.file_dict[fn]['tags']), axis=0)
# forward
x = self.to_var(x)
y = torch.tensor([ground_truth.astype('float32') for i in range(self.batch_size)]).cuda()
out = self.model(x)
loss = reconst_loss(out, y)
losses.append(float(loss.data))
out = out.detach().cpu()
# estimate
estimated = np.array(out).mean(axis=0)
est_array.append(estimated)
gt_array.append(ground_truth)
est_array, gt_array = np.array(est_array), np.array(gt_array)
loss = np.mean(losses)
roc_auc, pr_auc = self.get_auc(est_array, gt_array)
return roc_auc, pr_auc, loss
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--num_workers', type=int, default=0)
parser.add_argument('--dataset', type=str, default='mtat', choices=['mtat', 'msd', 'jamendo','jamendo-mood'])
parser.add_argument('--model_type', type=str, default='fcn',
choices=['fcn', 'musicnn', 'crnn', 'sample', 'se', 'short', 'short_res', 'attention', 'hcnn'])
parser.add_argument('--batch_size', type=int, default=16)
parser.add_argument('--model_load_path', type=str, default='.')
parser.add_argument('--data_path', type=str, default='./data')
parser.add_argument('--mod', type=str, default='time_stretch')
parser.add_argument('--rate', type=float, default=0)
config = parser.parse_args()
p = Predict(config)
p.test()
| 39.027108 | 1,080 | 0.56317 | 10,011 | 0.772633 | 0 | 0 | 655 | 0.050552 | 0 | 0 | 3,063 | 0.236397 |
31359392b650301014dc6082de0814c668402f3a | 742 | py | Python | tests/test_db_mathes_ui.py | AlexRovan/Python_training | 4878be4a7d354bd93e0e33b6e40a23a9592de213 | [
"Apache-2.0"
]
| null | null | null | tests/test_db_mathes_ui.py | AlexRovan/Python_training | 4878be4a7d354bd93e0e33b6e40a23a9592de213 | [
"Apache-2.0"
]
| null | null | null | tests/test_db_mathes_ui.py | AlexRovan/Python_training | 4878be4a7d354bd93e0e33b6e40a23a9592de213 | [
"Apache-2.0"
]
| null | null | null | from model.group import Group
from model.contact import Contact
def test_group_list(app,db):
ui_group = app.group.get_groups_list()
def clean(group):
return Group(id=group.id, name=group.name.strip())
bd_group = map(clean,db.get_group_list())
assert sorted(ui_group,key=Group.id_or_max) == sorted(bd_group, key=Group.id_or_max)
def test_contact_list(app,db):
ui_contact = app.contact.get_contacts_list()
def clean(contact):
return Contact(id=contact.id,firstname = ''.join(contact.firstname.split(' ')),lastname = ''.join(contact.lastname.split(' ')))
bd_contact = map(clean,db.get_contact_list())
assert sorted(ui_contact,key=Contact.id_or_max) == sorted(bd_contact, key=Contact.id_or_max)
| 43.647059 | 135 | 0.725067 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.013477 |
3136541ef58415903ff6578bb2e900d003116de0 | 1,065 | py | Python | 10 Days of Statistics/Day 5 - Normal Distribution I.py | sohammanjrekar/HackerRank | 1f5010133a1ac1e765e855a086053c97d9e958be | [
"MIT"
]
| null | null | null | 10 Days of Statistics/Day 5 - Normal Distribution I.py | sohammanjrekar/HackerRank | 1f5010133a1ac1e765e855a086053c97d9e958be | [
"MIT"
]
| null | null | null | 10 Days of Statistics/Day 5 - Normal Distribution I.py | sohammanjrekar/HackerRank | 1f5010133a1ac1e765e855a086053c97d9e958be | [
"MIT"
]
| null | null | null | """
Day 5: Normal Distribution I
In certain plant, the time taken to assemble a car is a random variable, X having a normal distribution
with a mean of 20 hours and a standard deviation of 2 hours. What is the probability that a car can be
assembled at this plant in:
1. Less han 19.5 hours?
2. Between 20 and 22 hours?
Author: Eda AYDIN
"""
import math
# less than 19.5 hours
def cumulative1(mean, std, less):
print(round(0.5 * (1 + math.erf((less - mean) / (std * (2 ** 0.5)))), 3))
# Between 20 and 22 hours
def cumulative2(mean, std, lower_range, upper_range):
print(round(0.5 * (1 + math.erf((upper_range - mean) / (std * (2 ** 0.5)))) -
0.5 * (1 + math.erf((lower_range - mean) / (std * (2 ** 0.5)))), 3))
values = list(map(float, input().split()))
mean = values[0]
std = values[1]
less = float(input())
boundaries = list(map(float, input().split()))
lower_range = boundaries[0]
upper_range = boundaries[1]
cumulative1(mean, std, less)
cumulative2(mean, std, lower_range, upper_range) | 27.307692 | 104 | 0.638498 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 406 | 0.381221 |
3136a4ea2bcd00c8806e57580d4c6c2d114d3d2d | 68 | py | Python | 11024/11024.py3.py | isac322/BOJ | 35959dd1a63d75ebca9ed606051f7a649d5c0c7b | [
"MIT"
]
| 14 | 2017-05-02T02:00:42.000Z | 2021-11-16T07:25:29.000Z | 11024/11024.py3.py | isac322/BOJ | 35959dd1a63d75ebca9ed606051f7a649d5c0c7b | [
"MIT"
]
| 1 | 2017-12-25T14:18:14.000Z | 2018-02-07T06:49:44.000Z | 11024/11024.py3.py | isac322/BOJ | 35959dd1a63d75ebca9ed606051f7a649d5c0c7b | [
"MIT"
]
| 9 | 2016-03-03T22:06:52.000Z | 2020-04-30T22:06:24.000Z | for _ in range(int(input())):
print(sum(map(int, input().split()))) | 34 | 38 | 0.632353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
3136edb9ff4f65fcd40844fb2d3465f8582e6c6f | 4,325 | py | Python | tests/test_contact_form.py | LaudateCorpus1/apostello | 1ace89d0d9e1f7a1760f6247d90a60a9787a4f12 | [
"MIT"
]
| 69 | 2015-10-03T20:27:53.000Z | 2021-04-06T05:26:18.000Z | tests/test_contact_form.py | LaudateCorpus1/apostello | 1ace89d0d9e1f7a1760f6247d90a60a9787a4f12 | [
"MIT"
]
| 73 | 2015-10-03T17:53:47.000Z | 2020-10-01T03:08:01.000Z | tests/test_contact_form.py | LaudateCorpus1/apostello | 1ace89d0d9e1f7a1760f6247d90a60a9787a4f12 | [
"MIT"
]
| 29 | 2015-10-23T22:00:13.000Z | 2021-11-30T04:48:06.000Z | import pytest
from apostello import models
@pytest.mark.slow
@pytest.mark.django_db
class TestContactForm:
"""Test the sending of SMS."""
def test_number_permissions_staff_exception(self, recipients, users):
"""Test sending a message now."""
calvin = recipients["calvin"]
# check good post:
prof = users["staff"].profile
prof.can_see_contact_nums = False
prof.save()
r = users["c_staff"].post(
f"/api/v2/recipients/{calvin.pk}/",
{
"pk": calvin.pk,
"first_name": calvin.first_name,
"last_name": calvin.last_name,
"number": "+447900000000",
"do_not_reply": calvin.do_not_reply,
},
)
assert r.status_code == 200
calvin.refresh_from_db()
assert calvin.number == "+447900000000"
def test_number_permissions_no_perm(self, recipients, users):
calvin = recipients["calvin"]
r = users["c_in"].post(
f"/api/v2/recipients/{calvin.pk}/",
{
"pk": calvin.pk,
"first_name": calvin.first_name,
"last_name": calvin.last_name,
"number": "+447900000000",
"do_not_reply": calvin.do_not_reply,
},
)
assert r.status_code == 400
assert "You do not have permission to change the number field." in r.json()["errors"]["__all__"]
def test_number_permissions_with_perm(self, recipients, users):
calvin = recipients["calvin"]
# check good post:
prof = users["notstaff2"].profile
prof.can_see_contact_nums = True
prof.save()
r = users["c_in"].post(
f"/api/v2/recipients/{calvin.pk}/",
{
"pk": calvin.pk,
"first_name": calvin.first_name,
"last_name": calvin.last_name,
"number": "+447900000001",
"do_not_reply": calvin.do_not_reply,
},
)
assert r.status_code == 200
calvin.refresh_from_db()
assert calvin.number == "+447900000001"
def test_notes_permissions_staff_exception(self, recipients, users):
"""Test sending a message now."""
calvin = recipients["calvin"]
# check good post:
prof = users["staff"].profile
prof.can_see_contact_notes = False
prof.save()
r = users["c_staff"].post(
f"/api/v2/recipients/{calvin.pk}/",
{
"pk": calvin.pk,
"first_name": calvin.first_name,
"last_name": calvin.last_name,
"number": calvin.number,
"do_not_reply": calvin.do_not_reply,
"notes": "hi there",
},
)
assert r.status_code == 200
calvin.refresh_from_db()
assert calvin.notes == "hi there"
def test_notes_permissions_no_perm(self, recipients, users):
calvin = recipients["calvin"]
r = users["c_in"].post(
f"/api/v2/recipients/{calvin.pk}/",
{
"pk": calvin.pk,
"first_name": calvin.first_name,
"last_name": calvin.last_name,
"do_not_reply": calvin.do_not_reply,
"notes": "hi there",
},
)
assert r.status_code == 400
assert "You do not have permission to change the notes field." in r.json()["errors"]["__all__"]
calvin.refresh_from_db()
assert not (calvin.notes == "hi there")
def test_notes_permissions_with_perm(self, recipients, users):
calvin = recipients["calvin"]
# check good post:
prof = users["notstaff2"].profile
prof.can_see_contact_notes = True
prof.save()
r = users["c_in"].post(
f"/api/v2/recipients/{calvin.pk}/",
{
"pk": calvin.pk,
"first_name": calvin.first_name,
"last_name": calvin.last_name,
"do_not_reply": calvin.do_not_reply,
"notes": "something something",
},
)
assert r.status_code == 200
calvin.refresh_from_db()
assert calvin.notes == "something something"
| 34.6 | 104 | 0.536647 | 4,237 | 0.979653 | 0 | 0 | 4,278 | 0.989133 | 0 | 0 | 1,099 | 0.254104 |
31383059d386bdab69fab697d6aece922f099277 | 1,236 | py | Python | tests/unit/records/format_hints.py | cwegrzyn/records-mover | e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2 | [
"Apache-2.0"
]
| 36 | 2020-03-17T11:56:51.000Z | 2022-01-19T16:03:32.000Z | tests/unit/records/format_hints.py | cwegrzyn/records-mover | e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2 | [
"Apache-2.0"
]
| 60 | 2020-03-02T23:13:29.000Z | 2021-05-19T15:05:42.000Z | tests/unit/records/format_hints.py | cwegrzyn/records-mover | e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2 | [
"Apache-2.0"
]
| 4 | 2020-08-11T13:17:37.000Z | 2021-11-05T21:11:52.000Z | bluelabs_format_hints = {
'field-delimiter': ',',
'record-terminator': "\n",
'compression': 'GZIP',
'quoting': None,
'quotechar': '"',
'doublequote': False,
'escape': '\\',
'encoding': 'UTF8',
'dateformat': 'YYYY-MM-DD',
'timeonlyformat': 'HH24:MI:SS',
'datetimeformattz': 'YYYY-MM-DD HH:MI:SSOF',
'datetimeformat': 'YYYY-MM-DD HH24:MI:SS',
'header-row': False,
}
csv_format_hints = {
'field-delimiter': ',',
'record-terminator': "\n",
'compression': 'GZIP',
'quoting': 'minimal',
'quotechar': '"',
'doublequote': True,
'escape': None,
'encoding': 'UTF8',
'dateformat': 'MM/DD/YY',
'timeonlyformat': 'HH24:MI:SS',
'datetimeformattz': 'MM/DD/YY HH24:MI',
'datetimeformat': 'MM/DD/YY HH24:MI',
'header-row': True,
}
vertica_format_hints = {
'field-delimiter': '\001',
'record-terminator': '\002',
'compression': None,
'quoting': None,
'quotechar': '"',
'doublequote': False,
'escape': None,
'encoding': 'UTF8',
'dateformat': 'YYYY-MM-DD',
'timeonlyformat': 'HH24:MI:SS',
'datetimeformat': 'YYYY-MM-DD HH:MI:SS',
'datetimeformattz': 'YYYY-MM-DD HH:MI:SSOF',
'header-row': False,
}
| 25.75 | 48 | 0.572006 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 796 | 0.644013 |
313a0495ccbff8beabc442ab70ce74da829e195f | 1,829 | py | Python | CONST.py | Bobobert/DQN-Vanilla | a793021692b58f9b8249819302cb2cd08ad57edb | [
"MIT"
]
| null | null | null | CONST.py | Bobobert/DQN-Vanilla | a793021692b58f9b8249819302cb2cd08ad57edb | [
"MIT"
]
| null | null | null | CONST.py | Bobobert/DQN-Vanilla | a793021692b58f9b8249819302cb2cd08ad57edb | [
"MIT"
]
| null | null | null | # FROM THE OP PAPER-ISH
MINI_BATCH_SIZE = 32
MEMORY_SIZE = 10**6
BUFFER_SIZE = 100
LHIST = 4
GAMMA = 0.99
UPDATE_FREQ_ONlINE = 4
UPDATE_TARGET = 2500 # This was 10**4 but is measured in actor steps, so it's divided update_freq_online
TEST_FREQ = 5*10**4 # Measure in updates
TEST_STEPS = 10**4
LEARNING_RATE = 0.00025
G_MOMENTUM = 0.95
EPSILON_INIT = 1.0
EPSILON_FINAL = 0.1
EPSILON_TEST = 0.05
EPSILON_LIFE = 10**6
REPLAY_START = 5*10**4
NO_OP_MAX = 30
UPDATES = 5*10**6
CLIP_REWARD = 1.0
CLIP_ERROR = 1.0
# MISC
PLAY_STEPS = 3000
BUFFER_SAMPLES = 20
CROP = (0, -1)
FRAMESIZE = [84,84]
FRAMESIZETP = (84,84)
#DROPS = [0.0,0.15,0.1,0.0]
DROPS = [0.0, 0.0, 0.0, 0.0]
Games = ['air_raid', 'alien', 'amidar', 'assault', 'asterix', 'asteroids', 'atlantis',
'bank_heist', 'battle_zone', 'beam_rider', 'bowling', 'boxing', 'breakout', 'carnival',
'centipede', 'chopper_command', 'crazy_climber', 'demon_attack', 'double_dunk',
'enduro', 'fishing_derby', 'freeway', 'frostbite', 'gopher', 'gravitar',
'hero', 'ice_hockey', 'jamesbond', 'kangaroo', 'krull', 'kung_fu_master',
'montezuma_revenge', 'ms_pacman', 'name_this_game', 'pong',
'private_eye', 'qbert', 'riverraid', 'road_runner', 'robotank', 'seaquest',
'space_invaders', 'star_gunner', 'tennis', 'time_pilot', 'tutankham', 'up_n_down',
'venture', 'video_pinball', 'wizard_of_wor', 'zaxxon']
GamesExtras = ['defender','phoenix','berzerk','skiing','yars_revenge','solaris','pitfall',]
ACTION_MEANING = {
0: "NOOP",
1: "FIRE",
2: "UP",
3: "RIGHT",
4: "LEFT",
5: "DOWN",
6: "UPRIGHT",
7: "UPLEFT",
8: "DOWNRIGHT",
9: "DOWNLEFT",
10: "UPFIRE",
11: "RIGHTFIRE",
12: "LEFTFIRE",
13: "DOWNFIRE",
14: "UPRIGHTFIRE",
15: "UPLEFTFIRE",
16: "DOWNRIGHTFIRE",
17: "DOWNLEFTFIRE",
} | 29.5 | 104 | 0.640241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 958 | 0.523783 |
313a4f7ab87e42cd98ccf9595c3af6b5e740cfb1 | 6,031 | py | Python | milieu/paper/methods/milieu.py | seyuboglu/milieu | dfcd23ecb4449e6fa5debf8ed371fbc5d5c22e8e | [
"Apache-2.0"
]
| 1 | 2020-07-15T18:52:29.000Z | 2020-07-15T18:52:29.000Z | milieu/paper/methods/milieu.py | seyuboglu/milieu | dfcd23ecb4449e6fa5debf8ed371fbc5d5c22e8e | [
"Apache-2.0"
]
| null | null | null | milieu/paper/methods/milieu.py | seyuboglu/milieu | dfcd23ecb4449e6fa5debf8ed371fbc5d5c22e8e | [
"Apache-2.0"
]
| null | null | null |
import os
import json
import logging
from collections import defaultdict
import numpy as np
import networkx as nx
import torch
from torch.utils.data import DataLoader
from torch.optim import Adam
from tqdm import tqdm
from milieu.util.util import place_on_cpu, place_on_gpu
from milieu.paper.methods.method import DPPMethod
class MilieuMethod(DPPMethod):
""" GCN method class
"""
def __init__(self, network, diseases, params):
super().__init__(network, diseases, params)
self.dir = params["dir"]
self.adjacency = self.network.adj_matrix
self.diseases = diseases
self.params = params
print(self.params)
if self.params.get("load", False):
self.load_method()
else:
self.train_method(diseases)
self.curr_fold = None
def load_method(self):
"""
"""
logging.info("Loading Params...")
with open(os.path.join(self.dir, "params.json")) as f:
params = json.load(f)["process_params"]["method_params"]
params.update(self.params)
self.params = params
logging.info("Loading Models...")
self.folds_to_models = {}
for model_file in os.listdir(os.path.join(self.dir, "models")):
split = parse.parse("model_{}.tar", model_file)[0]
self.folds_to_models[split] = os.path.join(self.dir,
"models",
model_file)
def train_method(self, diseases):
"""
"""
logging.info("Training Models...")
folds_to_diseases = defaultdict(set)
for disease in diseases.values():
if disease.split == "none":
continue
folds_to_diseases[disease.split].add(disease)
self.folds_to_models = {}
if not(os.path.exists(os.path.join(self.dir, "models"))):
os.mkdir(os.path.join(self.dir, "models"))
for test_fold in folds_to_diseases.keys():
logging.info("Training model for test {}".format(test_fold))
val_fold = str((int(test_fold) - 1) % len(folds_to_diseases))
test_dataset = DiseaseDataset([disease
for disease in folds_to_diseases[test_fold]],
self.network)
val_dataset = DiseaseDataset([disease
for disease in folds_to_diseases[val_fold]],
self.network)
train_dataset = DiseaseDataset([disease
for fold, diseases in folds_to_diseases.items()
if fold != test_fold and fold != val_fold
for disease in diseases],
self.network)
# ensure no data leakage
assert(not set.intersection(*[test_dataset.get_ids(),
train_dataset.get_ids()]))
assert(not set.intersection(*[val_dataset.get_ids(),
train_dataset.get_ids()]))
model = self.train_model(train_dataset, val_dataset)
path = os.path.join(self.dir, "models/model_{}.tar".format(test_fold))
torch.save(model.state_dict(), path)
self.folds_to_models[test_fold] = path
def train_model(self, train_dataset, val_dataset):
""" Trains the underlying model
"""
train_dl = DataLoader(train_dataset,
batch_size=self.params["batch_size"],
shuffle=True,
num_workers=self.params["num_workers"],
pin_memory=self.params["cuda"])
dev_dl = DataLoader(val_dataset,
batch_size=self.params["batch_size"],
shuffle=True,
num_workers=self.params["num_workers"],
pin_memory=self.params["cuda"])
if self.params["model_class"] == "LCIEmbModule":
model = LCIEmbModule(self.params["model_args"], self.network)
else:
model = LCIModule(self.params, self.adjacency)
if self.params["cuda"]:
model = model.cuda()
optimizer = Adam(model.parameters(), lr=self.params["learning_rate"],
weight_decay=self.params["weight_decay"])
logging.info("Starting training for {} epoch(s)".format(self.params["num_epochs"]))
model.train()
train_and_evaluate(
model,
train_dl,
dev_dl,
optimizer,
bce_loss,
metrics,
self.params,
self.dir
)
model.eval()
return model.cpu()
def compute_scores(self, train_pos, disease):
""" Compute the scores predicted by GCN.
Args:
"""
val_pos = None
# Adjacency: Get sparse representation of ppi_adj
N, _ = self.adjacency.shape
X = torch.zeros(1, N)
X[0, train_pos] = 1
if self.params["cuda"]:
X = X.cuda()
if disease.split != self.curr_fold:
if self.params["model_class"] == "LCIEmbModule":
model = LCIEmbModule(self.params["model_args"], self.network)
else:
model = LCIModule(self.params, self.adjacency)
model.load_state_dict(torch.load(self.folds_to_models[disease.split]))
model.eval()
model.cuda()
self.curr_model = model
self.curr_fold = disease.split
Y = self.curr_model(X)
scores = Y.cpu().detach().numpy().squeeze()
return scores
| 37.459627 | 92 | 0.521472 | 5,699 | 0.944951 | 0 | 0 | 0 | 0 | 0 | 0 | 696 | 0.115404 |
313cc87d19724030ef67c4661d1e5a9c641aaae1 | 5,185 | py | Python | scripts/telegram_bot.py | luigi311/ArBluna | bc65cb1be30339c83699f06f520ed8cb00048abf | [
"MIT"
]
| 18 | 2021-08-21T05:42:40.000Z | 2022-03-16T21:00:56.000Z | scripts/telegram_bot.py | luigi311/ArBluna | bc65cb1be30339c83699f06f520ed8cb00048abf | [
"MIT"
]
| 4 | 2021-08-21T06:05:04.000Z | 2021-12-07T02:23:05.000Z | scripts/telegram_bot.py | luigi311/ArBluna | bc65cb1be30339c83699f06f520ed8cb00048abf | [
"MIT"
]
| 5 | 2021-08-24T08:29:51.000Z | 2022-03-04T17:14:49.000Z | import os
import distutils.util
from telegram import Update
from telegram.ext import Updater, CommandHandler, Filters, CallbackContext
from dotenv import load_dotenv
from scripts.get_info import get_ratio
from scripts.terra import get_balances, execute_swap
load_dotenv(override=True)
notify_telegram = bool(distutils.util.strtobool(os.getenv("NOTIFY_TELEGRAM")))
if notify_telegram:
telegram_chat_id = int(os.getenv("TELEGRAM_CHAT_ID"))
token = os.getenv("TELEGRAM_TOKEN")
def ping_command(update: Update, context: CallbackContext) -> None:
"""Send a message when the command /ping is issued."""
update.message.reply_text("pong")
def help_command(update: Update, context: CallbackContext) -> None:
"""Send list of commands when /help is issued."""
update.message.reply_text(
"Commands:\n/ping check if thebot is online\n/luna get the bluna -> luna ratio\n/bluna get the luna -> bluna ratio\n/ust get the ust ratio\n/balance get the balances\n/swap_to_bluna_command to force a swap from luna to bluna\n/swap_to_luna_command to force a swap from bluna to luna"
)
def bluna_command(update: Update, context: CallbackContext) -> None:
"""Send the current luna to bluna ratio."""
luna_balance, bluna_balance, ust_balance = get_balances(notify_balance=False)
bluna_price = get_ratio("bluna", luna_balance)
update.message.reply_text(f"Luna -> bLuna ratio: {bluna_price}")
def luna_command(update: Update, context: CallbackContext) -> None:
"""Send the current luna to bluna ratio."""
luna_balance, bluna_balance, ust_balance = get_balances(notify_balance=False)
bluna_price = get_ratio("luna", bluna_balance)
update.message.reply_text(f"bLuna -> Luna ratio: {bluna_price}")
def ust_command(update: Update, context: CallbackContext) -> None:
"""Send the current luna to bluna ratio."""
luna_balance, bluna_balance, ust_balance = get_balances(notify_balance=False)
ust_price = get_ratio("ust", luna_balance)
update.message.reply_text(f"Luna -> UST price: {ust_price}")
def balance_command(update: Update, context: CallbackContext) -> None:
"""Send the current balances of the account."""
get_balances()
def swap_to_bluna_command(update: Update, context: CallbackContext) -> None:
"""Force swap to bluna."""
luna_balance, bluna_balance, ust_balance = get_balances()
price = get_ratio("bluna", luna_balance)
if luna_balance > 0 and ust_balance > 0.15:
execute_swap(luna_balance, "bluna", price)
else:
raise Exception(f"Not enough Luna {luna_balance} or UST {ust_balance}")
def swap_to_luna_command(update: Update, context: CallbackContext) -> None:
"""Force swap to luna."""
luna_balance, bluna_balance, ust_balance = get_balances()
price = get_ratio("luna", bluna_balance)
if bluna_balance > 0 and ust_balance > 0.15:
execute_swap(bluna_balance, "luna", price)
else:
raise Exception(f"Not enough bLuna {bluna_balance} or UST {ust_balance}")
def setup_bot() -> None:
print("Starting up telegram bot")
try:
# Create the Updater and pass it your bot's token.
updater = Updater(token, use_context=True)
# Get the dispatcher to register handlers
dispatcher = updater.dispatcher
# on different commands - answer in Telegram
dispatcher.add_handler(
CommandHandler(
"help", help_command, filters=Filters.chat(chat_id=telegram_chat_id)
)
)
dispatcher.add_handler(
CommandHandler(
"ping", ping_command, filters=Filters.chat(chat_id=telegram_chat_id)
)
)
dispatcher.add_handler(
CommandHandler(
"bluna", bluna_command, filters=Filters.chat(chat_id=telegram_chat_id)
)
)
dispatcher.add_handler(
CommandHandler(
"luna", luna_command, filters=Filters.chat(chat_id=telegram_chat_id)
)
)
dispatcher.add_handler(
CommandHandler(
"ust", ust_command, filters=Filters.chat(chat_id=telegram_chat_id)
)
)
dispatcher.add_handler(
CommandHandler(
"balance",
balance_command,
filters=Filters.chat(chat_id=telegram_chat_id),
)
)
dispatcher.add_handler(
CommandHandler(
"balances",
balance_command,
filters=Filters.chat(chat_id=telegram_chat_id),
)
)
dispatcher.add_handler(
CommandHandler(
"swap_to_bluna",
swap_to_bluna_command,
filters=Filters.chat(chat_id=telegram_chat_id),
)
)
dispatcher.add_handler(
CommandHandler(
"swap_to_luna",
swap_to_luna_command,
filters=Filters.chat(chat_id=telegram_chat_id),
)
)
# Start the Bot
updater.start_polling()
except Exception as e:
raise Exception(f"Telegram bot error: {e}")
| 34.798658 | 291 | 0.652652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,212 | 0.233751 |
313ce62aeca2701889c38df68499f25c82820cc8 | 3,179 | py | Python | envs/mujoco/utils/download_meshes.py | hzm2016/assistive-gym-robosuite | 5c529f4444cc386383618bfa584341740a8468f9 | [
"MIT"
]
| 1 | 2021-11-22T07:45:28.000Z | 2021-11-22T07:45:28.000Z | envs/mujoco/utils/download_meshes.py | hzm2016/assistive-gym-robosuite | 5c529f4444cc386383618bfa584341740a8468f9 | [
"MIT"
]
| null | null | null | envs/mujoco/utils/download_meshes.py | hzm2016/assistive-gym-robosuite | 5c529f4444cc386383618bfa584341740a8468f9 | [
"MIT"
]
| null | null | null | import os
import zipfile
import requests
def check_and_download(name, google_id, files=None, force_download=False):
"""
Checks if the meshes folder exists in the xml directory
If not it will ask the user if they want to download them
to be able to proceed
Parameters
----------
name: string
the file or directory to download
google_id: string
the google id that points to the location of the zip file.
This should be stored in the xml or config file
force_download: boolean, Optional (Default: False)
True to skip checking if the file or folder exists
"""
files_missing = False
if force_download:
files_missing = True
else:
# check if the provided name is a file or folder
if not os.path.isfile(name) and not os.path.isdir(name):
print("Checking for mesh files in : ", name)
files_missing = True
elif files is not None:
mesh_files = [
f for f in os.listdir(name) if os.path.isfile(os.path.join(name, f))
]
# files_missing = all(elem in sorted(mesh_files) for elem in sorted(files))
files_missing = set(files).difference(set(mesh_files))
if files_missing:
print("Checking for mesh files in : ", name)
print("The following files are missing: ", files_missing)
if files_missing:
yes = ["y", "yes"]
no = ["n", "no"]
answered = False
question = "Download mesh and texture files to run sim? (y/n): "
while not answered:
reply = str(input(question)).lower().strip()
if reply[0] in yes:
print("Downloading files...")
name = name.split("/")
name = "/".join(s for s in name[:-1])
download_files(google_id, name + "/tmp")
print("Sim files saved to %s" % name)
answered = True
elif reply[0] in no:
raise Exception("Please download the required files to run the demo")
else:
question = "Please Enter (y/n) "
def download_files(google_id, destination):
def _get_confirm_token(response):
for key, value in response.cookies.items():
if key.startswith("download_warning"):
return value
return None
def _save_response_content(response, destination):
CHUNK_SIZE = 32768
with open(destination, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
def _extract_zip_files(zip_file):
zip_file = "%s" % zip_file
zipball = zipfile.ZipFile(zip_file)
zipball.extractall(zip_file.split("tmp")[0])
zipball.close()
os.remove(zip_file)
URL = "https://docs.google.com/uc?export=download"
session = requests.Session()
response = session.get(URL, params={"id": google_id}, stream=True)
print(response)
token = _get_confirm_token(response)
if token:
params = {"id": google_id, "confirm": token}
response = session.get(URL, params=params, stream=True)
_save_response_content(response, destination)
_extract_zip_files(destination)
if __name__ == "__main__":
check_and_download('/home/zhimin/code/6_all_old_code/0_abr_control/abr_control/arms/',
'1doam-DgkW7OSPnwWZQM84edzX84ot-GK', files=None, force_download=True)
# download_files('1SjWRUl-D1FZ5fB2cy4jF4X9wTsQ5LWzo', '/home/zhimin/code/6_all_old_code/0_abr_control/abr_control/arms')
| 30.27619 | 121 | 0.709343 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,232 | 0.387543 |
313da56284119f917128f140877f55d791f270c7 | 2,188 | py | Python | wagtail/users/models.py | originell/wagtail | d2b02dff00edbd24bfaf971ae0a0d1fa71ea5df4 | [
"BSD-3-Clause"
]
| null | null | null | wagtail/users/models.py | originell/wagtail | d2b02dff00edbd24bfaf971ae0a0d1fa71ea5df4 | [
"BSD-3-Clause"
]
| null | null | null | wagtail/users/models.py | originell/wagtail | d2b02dff00edbd24bfaf971ae0a0d1fa71ea5df4 | [
"BSD-3-Clause"
]
| null | null | null | import os
import uuid
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
def upload_avatar_to(instance, filename):
filename, ext = os.path.splitext(filename)
return os.path.join(
'avatar_images',
'avatar_{uuid}_{filename}{ext}'.format(
uuid=uuid.uuid4(), filename=filename, ext=ext)
)
class UserProfile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='wagtail_userprofile'
)
submitted_notifications = models.BooleanField(
verbose_name=_('submitted notifications'),
default=True,
help_text=_("Receive notification when a page is submitted for moderation")
)
approved_notifications = models.BooleanField(
verbose_name=_('approved notifications'),
default=True,
help_text=_("Receive notification when your page edit is approved")
)
rejected_notifications = models.BooleanField(
verbose_name=_('rejected notifications'),
default=True,
help_text=_("Receive notification when your page edit is rejected")
)
preferred_language = models.CharField(
verbose_name=_('preferred language'),
max_length=10,
help_text=_("Select language for the admin"),
default=''
)
current_time_zone = models.CharField(
verbose_name=_('current time zone'),
max_length=40,
help_text=_("Select your current time zone"),
default=''
)
avatar = models.ImageField(
verbose_name=_('profile picture'),
upload_to=upload_avatar_to,
blank=True,
)
@classmethod
def get_for_user(cls, user):
return cls.objects.get_or_create(user=user)[0]
def get_preferred_language(self):
return self.preferred_language or settings.LANGUAGE_CODE
def get_current_time_zone(self):
return self.current_time_zone or settings.TIME_ZONE
def __str__(self):
return self.user.get_username()
class Meta:
verbose_name = _('user profile')
verbose_name_plural = _('user profiles')
| 28.415584 | 94 | 0.675503 | 1,790 | 0.818099 | 0 | 0 | 100 | 0.045704 | 0 | 0 | 461 | 0.210695 |
313df0c7ff39a3b7b860f162c9978fb31e8f55a5 | 248 | py | Python | python/simulator.py | chongdashu/puzzlescript-analyze | 0dfada4584428f7690e717d317d5396487e1f8d0 | [
"MIT"
]
| 1 | 2016-02-10T14:06:43.000Z | 2016-02-10T14:06:43.000Z | python/simulator.py | chongdashu/puzzlescript-analyze | 0dfada4584428f7690e717d317d5396487e1f8d0 | [
"MIT"
]
| null | null | null | python/simulator.py | chongdashu/puzzlescript-analyze | 0dfada4584428f7690e717d317d5396487e1f8d0 | [
"MIT"
]
| null | null | null | __author__ = 'Chong-U Lim, [email protected]'
import uinput
def Simulator():
def __init__(self):
pass
def test1(self):
device = uinput.Device([uinput.KEY_E, uinput.KEY_H, uinput.KEY_L, uinput.KEY_O])
device.emit_click(uinput.KEY_H)
| 11.809524 | 82 | 0.701613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.112903 |
313e16f482043a4d9b63143c96c69a32c7de5ed4 | 4,898 | py | Python | PRESUBMIT.py | oneumyvakin/catapult | 69d05ea70e77998c86149ac78044470e3ca364ad | [
"BSD-3-Clause"
]
| null | null | null | PRESUBMIT.py | oneumyvakin/catapult | 69d05ea70e77998c86149ac78044470e3ca364ad | [
"BSD-3-Clause"
]
| null | null | null | PRESUBMIT.py | oneumyvakin/catapult | 69d05ea70e77998c86149ac78044470e3ca364ad | [
"BSD-3-Clause"
]
| null | null | null | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for catapult.
See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
import re
import sys
_EXCLUDED_PATHS = (
r'(.*[\\/])?\.git[\\/].*',
r'.+\.png$',
r'.+\.svg$',
r'.+\.skp$',
r'.+\.gypi$',
r'.+\.gyp$',
r'.+\.gn$',
r'.*\.gitignore$',
r'.*codereview.settings$',
r'.*AUTHOR$',
r'^CONTRIBUTORS\.md$',
r'.*LICENSE$',
r'.*OWNERS$',
r'.*README\.md$',
r'^dashboard[\\/]dashboard[\\/]api[\\/]examples[\\/].*.js',
r'^dashboard[\\/]dashboard[\\/]templates[\\/].*',
r'^experimental[\\/]heatmap[\\/].*',
r'^experimental[\\/]trace_on_tap[\\/]third_party[\\/].*',
r'^perf_insights[\\/]test_data[\\/].*',
r'^perf_insights[\\/]third_party[\\/].*',
r'^telemetry[\\/]third_party[\\/].*',
r'^third_party[\\/].*',
r'^tracing[\\/]\.allow-devtools-save$',
r'^tracing[\\/]bower\.json$',
r'^tracing[\\/]\.bowerrc$',
r'^tracing[\\/]tracing_examples[\\/]string_convert\.js$',
r'^tracing[\\/]test_data[\\/].*',
r'^tracing[\\/]third_party[\\/].*',
r'^py_vulcanize[\\/]third_party[\\/].*',
r'^common/py_vulcanize[\\/].*', # TODO(hjd): Remove after fixing long lines.
)
_GITHUB_BUG_ID_RE = re.compile(r'#[1-9]\d*')
_MONORAIL_BUG_ID_RE = re.compile(r'[1-9]\d*')
_MONORAIL_PROJECT_NAMES = frozenset({'chromium', 'v8', 'angleproject'})
def CheckChangeLogBug(input_api, output_api):
if not input_api.change.issue:
# If there is no change issue, there won't be a bug yet. Skip the check.
return []
# Show a presubmit message if there is no Bug line or an empty Bug line.
if not input_api.change.BugsFromDescription():
return [output_api.PresubmitNotifyResult(
'If this change has associated bugs on GitHub or Monorail, add a '
'"Bug: <bug>(, <bug>)*" line to the patch description where <bug> can '
'be one of the following: catapult:#NNNN, ' +
', '.join('%s:NNNNNN' % n for n in _MONORAIL_PROJECT_NAMES) + '.')]
# Check that each bug in the BUG= line has the correct format.
error_messages = []
catapult_bug_provided = False
for index, bug in enumerate(input_api.change.BugsFromDescription()):
# Check if the bug can be split into a repository name and a bug ID (e.g.
# 'catapult:#1234' -> 'catapult' and '#1234').
bug_parts = bug.split(':')
if len(bug_parts) != 2:
error_messages.append('Invalid bug "%s". Bugs should be provided in the '
'"<project-name>:<bug-id>" format.' % bug)
continue
project_name, bug_id = bug_parts
if project_name == 'catapult':
if not _GITHUB_BUG_ID_RE.match(bug_id):
error_messages.append('Invalid bug "%s". Bugs in the Catapult '
'repository should be provided in the '
'"catapult:#NNNN" format.' % bug)
catapult_bug_provided = True
elif project_name in _MONORAIL_PROJECT_NAMES:
if not _MONORAIL_BUG_ID_RE.match(bug_id):
error_messages.append('Invalid bug "%s". Bugs in the Monorail %s '
'project should be provided in the '
'"%s:NNNNNN" format.' % (bug, project_name,
project_name))
else:
error_messages.append('Invalid bug "%s". Unknown repository "%s".' % (
bug, project_name))
return map(output_api.PresubmitError, error_messages)
def CheckChange(input_api, output_api):
results = []
try:
sys.path += [input_api.PresubmitLocalPath()]
from catapult_build import bin_checks
from catapult_build import html_checks
from catapult_build import js_checks
from catapult_build import repo_checks
results += input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
results += input_api.RunTests(
input_api.canned_checks.CheckVPythonSpec(input_api, output_api))
results += CheckChangeLogBug(input_api, output_api)
results += js_checks.RunChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
results += html_checks.RunChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
results += repo_checks.RunChecks(input_api, output_api)
results += bin_checks.RunChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS)
finally:
sys.path.remove(input_api.PresubmitLocalPath())
return results
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
| 37.676923 | 80 | 0.638424 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,140 | 0.436913 |
313f2defc3a370222bbd7525f5b1756e0fdd639e | 1,454 | py | Python | exam_at_home/2/boolean_expression.py | jamie-jjd/110_spring_IDS | 7f15c0c73b9d663373b791b9ddcc836957dcc3d2 | [
"MIT"
]
| 2 | 2022-02-21T10:37:22.000Z | 2022-03-02T01:43:30.000Z | exam_at_home/2/boolean_expression.py | jamie-jjd/110_spring_IDS | 7f15c0c73b9d663373b791b9ddcc836957dcc3d2 | [
"MIT"
]
| null | null | null | exam_at_home/2/boolean_expression.py | jamie-jjd/110_spring_IDS | 7f15c0c73b9d663373b791b9ddcc836957dcc3d2 | [
"MIT"
]
| 3 | 2022-02-21T05:06:19.000Z | 2022-03-27T07:58:11.000Z | # author: jamie
# email: [email protected]
def Priority (c):
if c == '&': return 3
elif c == '|': return 2
elif c == '^': return 1
elif c == '(': return 0
def InfixToPostfix (infix, postfix):
stack = []
for c in infix:
if c == '(':
stack.append('(')
elif c == ')':
while stack[-1] != '(':
postfix.append(stack.pop())
stack.pop()
elif c == '&' or c == '|' or c == '^':
while len(stack) and Priority(c) <= Priority(stack[-1]):
postfix.append(stack.pop())
stack.append(c)
else:
postfix.append(c)
while len(stack):
postfix.append(stack.pop())
def Evaluate (postfix, value):
stack = []
for c in postfix:
if c == '&' or c == '|' or c == '^':
rhs = stack.pop()
lhs = stack.pop()
if c == '&': stack.append(lhs & rhs)
elif c == '|': stack.append(lhs | rhs)
elif c == '^': stack.append(lhs ^ rhs)
elif c == '1' or c == '0':
stack.append(ord(c) - ord('0'))
else:
stack.append(value[ord(c) - ord('A')])
return stack.pop()
if __name__ == "__main__":
infix = input()
T = int(input())
for _ in range(T):
value = list(map(int, input().split()))
postfix = []
InfixToPostfix(infix, postfix)
print(Evaluate(postfix, value)) | 29.673469 | 68 | 0.462861 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 121 | 0.083219 |
313fe3ae0a54054320169a34676d7ed8d2ac4692 | 203 | py | Python | workoutlog/workout/admin.py | michaelrodgers/itc172_final | b71f25a5cbffab00b06c60c8816f339d169d9dc1 | [
"Apache-2.0"
]
| null | null | null | workoutlog/workout/admin.py | michaelrodgers/itc172_final | b71f25a5cbffab00b06c60c8816f339d169d9dc1 | [
"Apache-2.0"
]
| null | null | null | workoutlog/workout/admin.py | michaelrodgers/itc172_final | b71f25a5cbffab00b06c60c8816f339d169d9dc1 | [
"Apache-2.0"
]
| null | null | null | from django.contrib import admin
from .models import Target, Exercise, Workout
# Register your models here.
admin.site.register(Target)
admin.site.register(Exercise)
admin.site.register(Workout)
| 25.375 | 46 | 0.783251 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 29 | 0.142857 |
31416eaefbae7d0e56e7b72dee697c6ec5ef4bc8 | 1,015 | py | Python | ciclo.py | BeltetonJosue96/Ejercicio3Python | 2ee1584be1b1f1a096d9793a581013b1303feec4 | [
"CC0-1.0"
]
| null | null | null | ciclo.py | BeltetonJosue96/Ejercicio3Python | 2ee1584be1b1f1a096d9793a581013b1303feec4 | [
"CC0-1.0"
]
| null | null | null | ciclo.py | BeltetonJosue96/Ejercicio3Python | 2ee1584be1b1f1a096d9793a581013b1303feec4 | [
"CC0-1.0"
]
| null | null | null | class Ciclo:
def __init__(self):
self.cicloNew = ()
self.respu = ()
self.a = ()
self.b = ()
self.c = ()
def nuevoCiclo(self):
cicloNew = []
print(" ")
print("Formulario de ingreso de ciclos")
print("-----------------------------------")
respu = input("¿Quiere resgistrar un ciclo? (S/F): ")
while respu == "S" or respu == "s":
print("Ingrese el numero de semestre (1 o 2): ")
a = int(input())
print("Ingrese año: ")
b = int(input())
cicloNew.append((a, b))
respu = input("¿Quiere resgistrar otro ciclo? (S/F): ")
print(" ")
print("Datos guardados")
print("-----------------------------------")
for x in range(len(cicloNew)):
print("[Numero de semestre: ", cicloNew[x][0], "] [año: ", cicloNew[x][1],"]")
print(" ")
print(" ")
print(" ")
return None
Ciclo().nuevoCiclo() | 31.71875 | 90 | 0.429557 | 997 | 0.97841 | 0 | 0 | 0 | 0 | 0 | 0 | 319 | 0.313052 |
3143940a1c35677273eb448afe16a887f6eec384 | 4,997 | py | Python | timeglass.py | mountwebs/timeglass | 2f527e190cb90199cdd3a29ea7625e1f561fe01c | [
"MIT"
]
| 110 | 2020-05-07T12:00:28.000Z | 2022-03-05T17:52:13.000Z | timeglass.py | mountwebs/timeglass | 2f527e190cb90199cdd3a29ea7625e1f561fe01c | [
"MIT"
]
| 4 | 2020-05-07T12:01:21.000Z | 2021-01-01T14:23:59.000Z | timeglass.py | mountwebs/timeglass | 2f527e190cb90199cdd3a29ea7625e1f561fe01c | [
"MIT"
]
| 6 | 2020-05-07T16:16:53.000Z | 2021-11-10T02:42:00.000Z | import rumps
import sys
import icon_manager
from datetime import timedelta
import timekeeper
import os
# pyinstaller --onefile -w --add-data "Icons/:Icons" --icon="Icons/timeglass.png" --clean timeglass.spec
# rumps.debug_mode(True)
class TimerApp(rumps.App):
def __init__(self, initial_seconds):
super(TimerApp, self).__init__("")
self.mode = "hourglass"
self.timekeeper = timekeeper.Timer(initial_seconds)
self.template = True
self.im = icon_manager.Icon_manager(initial_seconds)
self.change_icon()
self.remaining_sec = rumps.MenuItem(self.timekeeper.get_remaining_string())
self.menu = [self.remaining_sec]
self.next_icon_change = self.im.icon_interval
self.rumps_timer = rumps.Timer(self.tick,0.5)
self.rumps_timer.callback(self.tick)
self.invert_counter = 0
self.notified = False
self.sound = True
def change_icon(self):
print("frame:", self.im.icon_counter)
self.icon = self.im.get_icon_path()
def change_remaining(self):
self.remaining_sec.title = self.timekeeper.get_remaining_string()
def tick(self, _):
if self.timekeeper.tick():
self.notDone = True
self.invert_counter = 0
self.change_remaining()
if self.timekeeper.elapsed >= self.next_icon_change:
self.im.icon_counter = int(self.timekeeper.elapsed/self.im.icon_interval) + 1 #1-89
self.change_icon()
self.next_icon_change += self.im.icon_interval
if self.timekeeper.done:
self.im.active = False
self.change_icon()
if not self.notified:
self.notify()
self.notified = True
if self.notDone:
self.icon = self.im.invert()
self.invert_counter += 1
if self.invert_counter > 5:
self.notDone = False
self.rumps_timer.stop()
self.reset()
def notify(self):
title = "Time is up!"
text = ""
sound = "Glass"
try:
if self.sound:
os.system("""osascript -e 'display notification "{}" with title "{}" sound name "{}"'""".format(text, title, sound))
else:
os.system("""osascript -e 'display notification "{}" with title "{}"'""".format(text, title, sound))
except:
print("Could not send notification")
@rumps.clicked("Start", key="s")
def pause(self, sender):
if sender.title == "Pause":
self.timekeeper.pause_timer()
self.rumps_timer.stop()
sender.title = "Start"
elif sender.title == "Start":
self.timekeeper.start()
self.im.active = True
self.change_icon()
self.rumps_timer.start()
sender.title = "Pause"
@rumps.clicked("Reset", key="r")
def reset_button(self, sender):
self.reset()
self.menu["Start"].title = "Start"
def reset(self):
self.timekeeper.reset()
self.rumps_timer.stop()
self.im.active = False
self.im.reset()
self.change_icon()
self.change_remaining()
self.next_icon_change = self.im.icon_interval
self.menu["Start"].title = "Start"
self.notified = False
def string_to_sec(self, text):
nums = text.split(":")
nums.reverse()
seconds = 0
for i,n in enumerate(nums):
if i == 0:
seconds += int(n)
else:
seconds += (60**i) * int(n)
print((i * 60) * int(n))
return seconds
def validate_input(self, text):
texts = text.split(":")
if len(texts)>3: return False
for s in texts:
try:
int(s)
except:
return False
return True
@rumps.clicked("Set time", key="t")
def set_time(self, _):
self.timekeeper.pause_timer()
response = rumps.Window("Enter time: (hours:minutes:seconds)").run()
if response.clicked:
if not self.validate_input(response.text):
skip = True
rumps.alert("Does not compute! Please try again.")
else:
seconds = self.string_to_sec(response.text)
print(seconds)
skip = False
if not skip:
self.rumps_timer.stop()
self.timekeeper.set_time(seconds)
self.im.set_icon_interval(seconds)
self.im.reset()
self.im.active = False
self.next_icon_change = self.im.icon_interval
self.change_icon()
self.change_remaining()
self.menu["Start"].title = "Start"
if __name__ == "__main__":
default_secounds = 60 * 60
TimerApp(default_secounds).run()
| 33.092715 | 132 | 0.553932 | 4,664 | 0.93336 | 0 | 0 | 1,474 | 0.294977 | 0 | 0 | 538 | 0.107665 |
3143e4df394889222436d2c1bdb781765f3da6bd | 223 | py | Python | example_bot/bot.py | JakeCover/Flare-DiscordPy | 24cc2541a6ef548583e46d58ae18abe72da5f37f | [
"MIT"
]
| 1 | 2021-04-02T20:16:03.000Z | 2021-04-02T20:16:03.000Z | example_bot/bot.py | JakeCover/Flare-DiscordPy | 24cc2541a6ef548583e46d58ae18abe72da5f37f | [
"MIT"
]
| null | null | null | example_bot/bot.py | JakeCover/Flare-DiscordPy | 24cc2541a6ef548583e46d58ae18abe72da5f37f | [
"MIT"
]
| null | null | null | import os
from discord.ext.commands import Bot
from Flare import Flare
bot = Bot("~~")
bot.add_cog(Flare(bot))
@bot.command("ping")
async def ping_pong(ctx):
ctx.send("pong")
bot.run(os.environ.get("BOT_TOKEN"))
| 13.117647 | 36 | 0.695067 | 0 | 0 | 0 | 0 | 67 | 0.300448 | 46 | 0.206278 | 27 | 0.121076 |
31449da74730870d929a003f3070a25b350f24be | 1,869 | py | Python | utils/logger.py | huangxd-/BTC-ISMIR19 | cc2df75de9aba9af1c240a243ca167b1a8b07f7b | [
"MIT"
]
| 82 | 2019-07-04T06:12:15.000Z | 2022-03-07T06:47:42.000Z | utils/logger.py | sungbohsun/b19 | 8d376c4284bd843664d81e3602d6e3e106d23cd4 | [
"MIT"
]
| 4 | 2020-01-28T01:47:24.000Z | 2021-05-21T16:45:39.000Z | utils/logger.py | sungbohsun/b19 | 8d376c4284bd843664d81e3602d6e3e106d23cd4 | [
"MIT"
]
| 13 | 2019-10-12T04:34:38.000Z | 2022-01-03T02:13:31.000Z | import logging
import os
import sys
import time
project_name = os.getcwd().split('/')[-1]
_logger = logging.getLogger(project_name)
_logger.addHandler(logging.StreamHandler())
def _log_prefix():
# Returns (filename, line number) for the stack frame.
def _get_file_line():
# pylint: disable=protected-access
# noinspection PyProtectedMember
f = sys._getframe()
# pylint: enable=protected-access
our_file = f.f_code.co_filename
f = f.f_back
while f:
code = f.f_code
if code.co_filename != our_file:
return code.co_filename, f.f_lineno
f = f.f_back
return '<unknown>', 0
# current time
now = time.time()
now_tuple = time.localtime(now)
now_millisecond = int(1e3 * (now % 1.0))
# current filename and line
filename, line = _get_file_line()
basename = os.path.basename(filename)
s = '%02d-%02d %02d:%02d:%02d.%03d %s:%d] ' % (
now_tuple[1], # month
now_tuple[2], # day
now_tuple[3], # hour
now_tuple[4], # min
now_tuple[5], # sec
now_millisecond,
basename,
line)
return s
def logging_verbosity(verbosity=0):
_logger.setLevel(verbosity)
def debug(msg, *args, **kwargs):
_logger.debug('D ' + project_name + ' ' + _log_prefix() + msg, *args, **kwargs)
def info(msg, *args, **kwargs):
_logger.info('I ' + project_name + ' ' + _log_prefix() + msg, *args, **kwargs)
def warn(msg, *args, **kwargs):
_logger.warning('W ' + project_name + ' ' + _log_prefix() + msg, *args, **kwargs)
def error(msg, *args, **kwargs):
_logger.error('E ' + project_name + ' ' + _log_prefix() + msg, *args, **kwargs)
def fatal(msg, *args, **kwargs):
_logger.fatal('F ' + project_name + ' ' + _log_prefix() + msg, *args, **kwargs)
| 25.60274 | 85 | 0.594971 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 310 | 0.165864 |
3144e6270af5c5cebfce71812937eeb79f014048 | 882 | py | Python | setup.py | VNOpenAI/OpenControl | 0087408c57bc77f34f524b28f8c4363b116700bb | [
"MIT"
]
| 5 | 2021-07-05T18:03:45.000Z | 2021-07-16T09:31:02.000Z | setup.py | VNOpenAI/OpenControl | 0087408c57bc77f34f524b28f8c4363b116700bb | [
"MIT"
]
| null | null | null | setup.py | VNOpenAI/OpenControl | 0087408c57bc77f34f524b28f8c4363b116700bb | [
"MIT"
]
| null | null | null | import setuptools
ver = {}
with open('OpenControl/_version.py') as fd:
exec(fd.read(), ver)
version = ver.get('__version__')
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="OpenControl",
version=version,
author="VNOpenAI",
author_email="[email protected]",
description="A python control systems package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://opencontrol.readthedocs.io/en/latest/",
project_urls={
"Bug Tracker": "https://github.com/VNOpenAI/OpenControl/issues",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
packages=setuptools.find_packages(),
python_requires=">=3.7",
) | 29.4 | 72 | 0.659864 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 377 | 0.427438 |
31462c4ba41af6d515b3c86cd2798c8806f3724f | 65,264 | py | Python | ismore/plants.py | DerekYJC/bmi_python | 7b9cf3f294a33688db24b0863c1035e9cc6999ea | [
"Apache-2.0"
]
| null | null | null | ismore/plants.py | DerekYJC/bmi_python | 7b9cf3f294a33688db24b0863c1035e9cc6999ea | [
"Apache-2.0"
]
| null | null | null | ismore/plants.py | DerekYJC/bmi_python | 7b9cf3f294a33688db24b0863c1035e9cc6999ea | [
"Apache-2.0"
]
| null | null | null | '''See the shared Google Drive documentation for an inheritance diagram that
shows the relationships between the classes defined in this file.
'''
import numpy as np
import socket
import time
from riglib import source
from ismore import settings, udp_feedback_client
import ismore_bmi_lib
from utils.constants import *
#import armassist
#import rehand
from riglib.filter import Filter
from riglib.plants import Plant
import os
class BasePlantUDP(Plant):
'''
Common UDP interface for the ArmAssist/ReHand
'''
debug = 0
sensor_data_timeout = 1 # seconds. if this number of seconds has passed since sensor data was received, velocity commands will not be sent
lpf_vel = 0
# define in subclasses!
ssm_cls = None
addr = None
feedback_data_cls = None
data_source_name = None
n_dof = None
blocking_joints = None
safety_grid = None
feedback_str = ''
def __init__(self, *args, **kwargs):
self.source = source.DataSource(self.feedback_data_cls, bufferlen=5, name=self.data_source_name)
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # used only for sending
ssm = self.ssm_cls()
self.pos_state_names = [s.name for s in ssm.states if s.order == 0]
self.vel_state_names = [s.name for s in ssm.states if s.order == 1]
self.aa_xy_ix = [i for i, j in enumerate(ssm.states) if j.name in ['aa_px', 'aa_py']]
self.aa_psi_ix = [i for i, j in enumerate(ssm.states) if j.name == 'aa_ppsi']
self.rh_pron_ix = [i for i, j in enumerate(ssm.states) if j.name == 'rh_pprono']
self.rh_pfings = [(i, j.name) for i, j in enumerate(ssm.states) if j.name in ['rh_pthumb', 'rh_pindex', 'rh_pfing3']]
self.drive_velocity_raw = np.zeros((len(self.vel_state_names),))
self.drive_velocity_raw_fb_gain = np.zeros((len(self.vel_state_names),))
self.drive_velocity_sent = np.zeros((len(self.vel_state_names),))
self.drive_velocity_sent_pre_safety = np.zeros((len(self.vel_state_names),))
self.pre_drive_state = np.zeros((len(self.vel_state_names), ))
# low-pass filters to smooth out command velocities
# from scipy.signal import butter
# b, a = butter(5, 0.1) # fifth order, 2 Hz bandpass (assuming 10 Hz update rate)
#omega, H = signal.freqz(b, a)
#plt.figure()
#plt.plot(omega/np.pi, np.abs(H))
# self.vel_command_lpfs = [None] * self.n_dof
# for k in range(self.n_dof):
# self.vel_command_lpfs[k] = Filter(b=b, a=a)
# self.last_sent_vel = np.ones(self.n_dof) * np.nan
# calculate coefficients for a 4th-order Butterworth LPF at 1.5 Hz for kinematic data received from the exo
# fs_synch = 20 #Frequency at which emg and kin data are synchronized
# nyq = 0.5 * fs_synch
# cuttoff_freq = 1.5 / nyq
# bpf_kin_coeffs = butter(4, cuttoff_freq, btype='low')
# self.pos_filt = [None] * self.n_dof
# for k in range(self.n_dof):
# self.pos_filt[k] = Filter(bpf_kin_coeffs[0], bpf_kin_coeffs[1])
def init(self):
from riglib import sink
sink.sinks.register(self.source)
def start(self):
# only start this DataSource after it has been registered with
# the SinkManager singleton (sink.sinks) in the call to init()
self.source.start()
self.ts_start_data = time.time()
def stop(self):
# send a zero-velocity command
self._send_command('SetSpeed %s %s\r' % (self.plant_type, self.pack_vel(np.zeros(self.n_dof))))
self.source.stop()
self.feedback_file.close()
def last_data_ts_arrival(self):
return self.source.read(n_pts=1)['ts_arrival'][0]
def _send_command(self, command):
self.sock.sendto(command, self.addr)
def pack_vel(self, vel):
format_str = "%f " * self.n_dof
return format_str % tuple(vel)
def send_vel(self, vel):
assert len(vel) == self.n_dof
vel = vel.copy()
vel *= self.vel_gain # change the units of the velocity, if necessary
self.last_sent_vel = vel
#command_vel is already fitlered at the task level, no need to filter it again.
#self.last_sent_vel = filt_vel = np.array([self.vel_command_lpfs[k](vel[k]) for k in range(self.n_dof)]).ravel()
if all(v <= 0.00000001 for v in abs(self.last_sent_vel)):
print ('last sent vel')
print (self.last_sent_vel)
if (self.last_data_ts_arrival() == 0) or ((self.last_data_ts_arrival() - time.time()) > self.sensor_data_timeout):
print ("sensor data not received for %s recently enough, not sending velocity command!" % self.plant_type)
return
# squash any velocities which would take joints outside of the rectangular bounding box
current_pos = self.get_pos() * self.vel_gain
projected_pos = current_pos + vel * 0.1
max_reached, = np.nonzero((projected_pos > self.max_pos_vals) * (vel > 0))
min_reached, = np.nonzero((projected_pos < self.min_pos_vals) * (vel < 0))
vel[max_reached] = 0
vel[min_reached] = 0
self._send_command('SetSpeed %s %s\r' % (self.plant_type, self.pack_vel(vel)))
# set max speed limts
faster_than_max_speed, = np.nonzero(np.abs(vel) > self.max_speed)
vel[faster_than_max_speed] = self.max_speed[faster_than_max_speed] * np.sign(vel[faster_than_max_speed])
#if we wanna define some limit values for the rehand use the filt_vel. Otherwise use vel
#self._send_command('SetSpeed %s %s\r' % (self.plant_type, self.pack_vel(filt_vel)))
self._send_command('SetSpeed %s %s\r' % (self.plant_type, self.pack_vel(vel)))
# set max speed limts
faster_than_max_speed, = np.nonzero(np.abs(vel) > self.max_speed)
vel[faster_than_max_speed] = self.max_speed[faster_than_max_speed] * np.sign(vel[faster_than_max_speed])
if self.debug:
print ("input vel")
print (vel)
print ("vel sent to %s" % self.plant_type)
print (vel)
print ("current_pos")
print (current_pos)
print ("projected_pos")
print (projected_pos)
print ("actual velocity")
print (self.get_vel())
if self.lpf_vel:
# squash any velocities which would take joints outside of the rectangular bounding box
current_pos = self.get_pos() * self.vel_gain
projected_pos = current_pos + vel * (1.0/20)
max_reached, = np.nonzero((projected_pos > self.max_pos_vals) * (vel > 0))
min_reached, = np.nonzero((projected_pos < self.min_pos_vals) * (vel < 0))
vel[max_reached] = 0
vel[min_reached] = 0
# set max speed limts
faster_than_max_speed, = np.nonzero(np.abs(vel) > self.max_speed)
vel[faster_than_max_speed] = self.max_speed[faster_than_max_speed] * np.sign(vel[faster_than_max_speed])
if faster_than_max_speed > 0:
print ('faster_than_max_speed')
print (faster_than_max_speed)
if self.debug:
print ("input vel")
print (vel)
print ("vel sent to %s" % self.plant_type)
print (vel)
#print "current_pos"
#print current_pos
#print "projected_pos"
#print projected_pos
#print "actual velocity"
#print self.get_vel()
self._send_command('SetSpeed %s %s\r' % (self.plant_type, self.pack_vel(vel)))
else:
self._send_command('SetSpeed %s %s\r' % (self.plant_type, self.pack_vel(vel)))
# def get_pos(self):
# # udp_feedback_client takes care of converting sensor data to cm or rad, as appropriate for the DOF
# return np.array(tuple(self.source.read(n_pts=1)['data'][self.pos_state_names][0]))
def drive(self, decoder):
vel = decoder['qdot']
vel_bl = vel.copy()
feedback_str = ''
if self.blocking_joints is not None:
vel_bl[self.blocking_joints] = 0
if self.safety_grid is not None:
#If the next position is outside of safety then damp velocity to only go to limit:
pos_pred = decoder['q'] + 0.1*vel_bl
#Make sure predicted AA PX, AA PY within bounds:
xy_change = True
if len(self.aa_xy_ix) > 0:
if self.safety_grid.is_valid_pos(pos_pred[self.aa_xy_ix]) is False:
#If not, make their velocity zero:
vel_bl[self.aa_xy_ix] = 0
xy_change = False
feedback_str = feedback_str+ ' stopping xy from moving'
else:
xy_change = False
# Make sure AA Psi within bounds:
if len(self.aa_psi_ix) > 0:
# If X/Y ok
if xy_change:
mn, mx = self.safety_grid.get_minmax_psi(pos_pred[self.aa_xy_ix])
# If x/y not ok:
else:
mn, mx = self.safety_grid.get_minmax_psi(decoder['q'][self.aa_xy_ix])
# Set psi velocity :
if np.logical_and(pos_pred[self.aa_psi_ix] >= mn, pos_pred[self.aa_psi_ix] <= mx):
pass
else:
vel_bl[self.aa_psi_ix] = 0
feedback_str = feedback_str+ 'stopping psi'
# Make sure RH Prono within bounds (if SSM is only RH, use settings.starting_pos for AAPX, AAPY)
if len(self.rh_pron_ix) > 0:
# If X/Y ok
if xy_change:
mn, mx = self.safety_grid.get_minmax_prono(pos_pred[self.aa_xy_ix])
# If x/y not ok or not moving bc not part of state pace :
else:
if len(self.aa_xy_ix) > 0:
mn, mx = self.safety_grid.get_minmax_prono(decoder['q'][self.aa_xy_ix])
else:
mn, mx = self.safety_grid.get_minmax_prono(settings.starting_pos['aa_px'], settings.starting_pos['aa_py'])
# Set prono velocity :
if np.logical_and(pos_pred[self.rh_pron_ix] >= mn, pos_pred[self.rh_pron_ix] <= mx):
pass
else:
vel_bl[self.rh_pron_ix] = 0
feedback_str = feedback_str+ 'stopping prono'
# Assure RH fingers are within range:
if len(self.rh_pfings) > 0:
for i, (ix, nm) in enumerate(self.rh_pfings):
mn, mx = self.safety_grid.get_rh_minmax(nm)
if np.logical_and(pos_pred[ix] >= mn, pos_pred[ix] <= mx):
pass
else:
vel_bl[ix] = 0
feedback_str = feedback_str+ 'stopping rh fings'
self.feedback_str = feedback_str
self.drive_velocity = vel_bl
self.send_vel(vel_bl)
decoder['q'] = self.get_pos()
def write_feedback(self):
pos_vel = [str(i) for i in np.hstack(( self.get_pos(), self.get_vel() )) ]
#self.feedback_file.write(','.join(pos_vel)+'\n')
if self.feedback_str != '':
self.feedback_file.write(self.feedback_str+ time.ctime() + '\n')
class ArmAssistPlantUDP(BasePlantUDP):
'''Sends velocity commands and receives feedback over UDP. Can be used
with either the real or simulated ArmAssist.
'''
ssm_cls = ismore_bmi_lib.StateSpaceArmAssist
addr = settings.ARMASSIST_UDP_SERVER_ADDR
feedback_data_cls = udp_feedback_client.ArmAssistData
data_source_name = 'armassist'
n_dof = 3
plant_type = 'ArmAssist'
vel_gain = np.array([cm_to_mm, cm_to_mm, rad_to_deg]) # convert units to: [mm/s, mm/s, deg/s]
max_pos_vals = np.array([np.inf, np.inf, np.inf])
min_pos_vals = np.array([-np.inf, -np.inf, -np.inf])
max_speed = np.array([np.inf, np.inf, np.inf])
feedback_file = open(os.path.expandvars('$HOME/code/bmi3d/log/armassist.txt'), 'w')
#max_speed = np.array([40, 60, 20]) # in mm/s and deg/s
#max_speed = np.array([60, 80, 50]) # in mm/s and deg/s
#parameters for kinematics low-pass filtering
from scipy.signal import butter, lfilter
from ismore.filter import Filter
fs_synch = 25 #Frequency at which emg and kin data are synchronized
nyq = 0.5 * fs_synch
cuttoff_freq = 1.5 / nyq
bpf_kin_coeffs = butter(2, cuttoff_freq, btype='low')
n_dof = 3
vel_filter = [None] * n_dof
for k in range(n_dof):
vel_filter[k] = Filter(bpf_kin_coeffs[0], bpf_kin_coeffs[1])
n_getpos_iter= 0
def __init__(self, *args, **kwargs):
super(ArmAssistPlantUDP, self).__init__(*args, **kwargs)
def set_pos_control(self): # position control with global reference system
self._send_command('SetControlMode ArmAssist Position')
def set_global_control(self): #velocity control with global reference system
self._send_command('SetControlMode ArmAssist Global')
def set_trajectory_control(self): #trajectory control with global reference system
self._send_command('SetControlMode ArmAssist Trajectory')
def send_vel(self, vel):
vel = vel.copy()
# units of vel should be: [cm/s, cm/s, rad/s]
assert len(vel) == self.n_dof
# convert units to: [mm/s, mm/s, deg/s] to send them through UDP to the ArmAssist application
vel[0] *= cm_to_mm
vel[1] *= cm_to_mm
vel[2] *= rad_to_deg
# set max speed limts
faster_than_max_speed, = np.nonzero(np.abs(vel) > self.max_speed)
vel[faster_than_max_speed] = self.max_speed[faster_than_max_speed] * np.sign(vel[faster_than_max_speed])
self.debug = True
if self.debug:
# print "vel sent to armassist"
# print vel
if faster_than_max_speed.any() > 0:
print ('faster_than_max_speed')
print (faster_than_max_speed)
print ("speed set to: ")
print (vel)
self._send_command('SetSpeed ArmAssist %f %f %f\r' % tuple(vel))
# get raw position
def get_pos_raw(self):
# udp_feedback_client takes care of converting sensor data to cm or rad, as appropriate for the DOF
#get the last poitns of data of the armassist and low-pass filter
return np.array(tuple(self.source.read(n_pts=1)['data'][self.pos_state_names][0]))
# get filtered position
def get_pos(self):
return np.array(tuple(self.source.read(n_pts=1)['data_filt'][self.pos_state_names][0]))
# calculate vel from raw position
def get_vel_raw(self):
recent_pos_data = self.source.read(n_pts=2)
pos = recent_pos_data['data'][self.pos_state_names]
ts = recent_pos_data['ts']
delta_pos = np.array(tuple(pos[1])) - np.array(tuple(pos[0]))
delta_ts = ts[1] - ts[0]
vel = delta_pos / delta_ts
#filt_vel = np.array([self.vel_command_lpfs[k](vel[k]) for k in range(self.n_dof)]).ravel() #nerea --> to test!
if ts[0] != 0 and any(np.isnan(v) for v in vel):
print ("WARNING -- delta_ts = 0 in AA vel calculation:", vel)
for i in range(3):
if np.isnan(vel[i]):
vel[i] = 0
return vel
#calculate vel from raw position and filter
def get_vel(self):
recent_pos_data = self.source.read(n_pts=2)
pos = recent_pos_data['data'][self.pos_state_names]
ts = recent_pos_data['ts']
delta_pos = np.array(tuple(pos[1])) - np.array(tuple(pos[0]))
delta_ts = ts[1] - ts[0]
vel = delta_pos / delta_ts
if ts[0] != 0 and any(np.isnan(v) for v in vel):
print ("WARNING -- delta_ts = 0 in AA vel calculation:", vel)
for i in range(3):
if np.isnan(vel[i]):
vel[i] = 0
# the first value of the pos because it is always NaN and if a NaN is introduced in the filter, all the following filtered values will be also NaNs
if np.any(np.isnan(vel)):
self.n_getpos_iter = self.n_getpos_iter +1
vel_filt = vel
else:
vel_filt = np.array([self.vel_filter[k](vel[k]) for k in range(self.n_dof)]).ravel()
return vel_filt
def send_pos(self, pos, time):
pos = pos.copy()
# units of vel should be: [cm/s, cm/s, rad/s]
assert len(pos) == 3
# convert units to: [mm/s, mm/s, deg/s]
pos[0] *= cm_to_mm
pos[1] *= cm_to_mm
pos[2] *= rad_to_deg
# mode 1: the forearm angle (psi) stays the same as it is. mode 2: psi will move according to the determined value
mode = 2
pos_command = np.zeros(5)
pos_command[0] = pos[0]
pos_command[1] = pos[1]
pos_command[2] = pos[2]
pos_command[3] = time
pos_command[4] = mode
print ("pos")
print (pos)
print ("time")
print (time)
self._send_command('SetPosition ArmAssist %f %f %f %f %f\r' % tuple(pos_command))
def enable(self):
self._send_command('SetControlMode ArmAssist Global\r')
def disable(self):
self._send_command('SetControlMode ArmAssist Disable\r')
def enable_watchdog(self, timeout_ms):
print ('ArmAssist watchdog not enabled, doing nothing')
def send_traj(self, pos_vel):
pos_vel = pos_vel.copy()
# units of vel should be: [cm/s, cm/s, rad/s]
assert len(pos_vel) == 6
# units to are alread in [mm/s, mm/s, rad/s]
# convert values to integers to reduce noise
#pos_vel_int = np.rint(pos_vel)
pos_vel_int = pos_vel
print ("trajectory sent to AA")
print ("x y psi vx vy vpsi")
print (pos_vel_int)
traj_command = np.zeros(6)
traj_command[0] = pos_vel_int[0]
traj_command[1] = pos_vel_int[1]
traj_command[2] = pos_vel_int[2]
traj_command[3] = pos_vel_int[3]
traj_command[4] = pos_vel_int[4]
traj_command[5] = pos_vel_int[5]
self._send_command('SetTrajectory ArmAssist %d %d %d %d %d %d\r' % tuple(traj_command))
class DummyPlantUDP(object):
drive_velocity_raw = np.array([0,0,0])
drive_velocity_sent = np.array([0,0,0])
drive_velocity_sent_pre_safety = np.array([0,0,0])
pre_drive_state = np.array([0, 0, 0])
def init(self):
pass
def enable(self):
pass
def start(self):
pass
def stop(self):
pass
def write_feedback(self):
pass
def get_pos_raw(self):
return np.array([0,0,0])
def get_pos(self):
return np.array([0,0,0])
def get_vel_raw(self):
return np.array([0,0,0])
def get_vel(self):
return np.array([0,0,0])
class ReHandPlantUDP(BasePlantUDP):
'''Sends velocity commands and receives feedback over UDP. Can be used
with either the real or simulated ReHand.
'''
ssm_cls = ismore_bmi_lib.StateSpaceReHand
addr = settings.REHAND_UDP_SERVER_ADDR
feedback_data_cls = udp_feedback_client.ReHandData
data_source_name = 'rehand'
n_dof = 4
plant_type = 'ReHand'
vel_gain = np.array([rad_to_deg, rad_to_deg, rad_to_deg, rad_to_deg])
max_pos_vals = np.array([60, 60, 60, 90], dtype=np.float64) # degrees
min_pos_vals = np.array([25, 25, 25, 25], dtype=np.float64) # degrees
max_speed = np.array([np.inf, np.inf, np.inf, np.inf], dtype=np.float64) # degrees/sec
#max_speed = np.array([15., 15., 15., 15.], dtype=np.float64) # degrees/sec
feedback_file = open(os.path.expandvars('$HOME/code/bmi3d/log/rehand.txt'), 'w')
def send_vel(self, vel):
vel = vel.copy()
# units of vel should be: [rad/s, rad/s, rad/s, rad/s]
assert len(vel) == self.n_dof
# convert units to: [deg/s, deg/s, deg/s, deg/s]
vel *= rad_to_deg
#filt_vel = np.array([self.vel_command_lpfs[k](vel[k]) for k in range(self.n_dof)]).ravel()
# set max speed limts
faster_than_max_speed, = np.nonzero(np.abs(vel) > self.max_speed)
vel[faster_than_max_speed] = self.max_speed[faster_than_max_speed] * np.sign(vel[faster_than_max_speed])
self.debug = True
if self.debug:
# print 'filt_vel in plants in degrees'
# print filt_vel #*np.array([deg_to_rad, deg_to_rad, deg_to_rad, deg_to_rad])
if faster_than_max_speed.any() > 0:
print ('faster_than_max_speed')
print (faster_than_max_speed)
print ("speed set to: ")
print (vel)
# self.plant.enable() #when we send vel commands always enable the rehand motors
# self._send_command('SystemEnable ReHand\r')
self._send_command('SetSpeed ReHand %f %f %f %f\r' % tuple(vel))
def get_vel_raw(self):
return np.array(tuple(self.source.read(n_pts=1)['data'][self.vel_state_names][0]))
def get_vel(self):
return np.array(tuple(self.source.read(n_pts=1)['data_filt'][self.vel_state_names][0]))
def enable(self):
self._send_command('SystemEnable ReHand\r')
def disable(self):
self._send_command('SystemDisable ReHand\r')
def diff_enable(self,DoFs):
self._send_command('DiffEnable ReHand %i %i %i %i\r' % tuple(DoFs))
def get_enable_state(self):
self._send_command('GetEnableState ReHand\r')
def enable_watchdog(self, timeout_ms):
self._send_command('WatchDogEnable ReHand %d\r' % timeout_ms)
def get_pos_raw(self):
# udp_feedback_client takes care of converting sensor data to cm or rad, as appropriate for the DOF
return np.array(tuple(self.source.read(n_pts=1)['data'][self.pos_state_names][0]))
#get pos filtered
def get_pos(self):
return np.array(tuple(self.source.read(n_pts=1)['data_filt'][self.pos_state_names][0]))
################################################
class BasePlantIsMore(Plant):
# define in subclasses!
aa_plant_cls = None
rh_plant_cls = None
safety_grid = None
both_feedback_str = ''
def __init__(self, *args, **kwargs):
self.aa_plant = self.aa_plant_cls()
self.rh_plant = self.rh_plant_cls()
self.drive_velocity_raw = np.zeros((7,))
self.drive_velocity_sent= np.zeros((7,))
self.drive_velocity_sent_pre_safety = np.zeros((7, ))
self.pre_drive_state = np.zeros((7, ))
self.prev_vel_bl_aa = np.zeros((3, ))*np.NaN
self.prev_vel_bl_rh = np.zeros((4, ))*np.NaN
self.accel_lim_armassist = np.inf #0.8
self.accel_lim_psi = np.inf #0.16
self.accel_lim_rehand = np.inf #0.16
def init(self):
self.aa_plant.init()
self.rh_plant.init()
def start(self):
self.aa_plant.start()
self.rh_plant.start()
self.ts_start_data = time.time()
def stop(self):
self.aa_plant.stop()
self.rh_plant.stop()
def last_data_ts_arrival(self):
return {
'ArmAssist': self.aa_plant.last_data_ts_arrival(),
'ReHand': self.rh_plant.last_data_ts_arrival(),
}
def send_vel(self, vel):
self.aa_plant.send_vel(vel[0:3])
self.rh_plant.send_vel(vel[3:7])
def get_pos_raw(self):
aa_pos = self.aa_plant.get_pos_raw()
rh_pos = self.rh_plant.get_pos_raw()
return np.hstack([aa_pos, rh_pos])
def get_pos(self):
aa_pos = self.aa_plant.get_pos()
rh_pos = self.rh_plant.get_pos()
return np.hstack([aa_pos, rh_pos])
def get_vel_raw(self):
aa_vel = self.aa_plant.get_vel_raw()
rh_vel = self.rh_plant.get_vel_raw()
return np.hstack([aa_vel, rh_vel])
def get_vel(self):
aa_vel = self.aa_plant.get_vel()
rh_vel = self.rh_plant.get_vel()
return np.hstack([aa_vel, rh_vel])
def enable(self):
self.aa_plant.enable()
self.rh_plant.enable()
def disable(self):
self.aa_plant.disable()
self.rh_plant.disable()
def drive(self, decoder):
# print self.aa_plant.aa_xy_ix: [0, 1]
# print self.aa_plant.aa_psi_ix: [2]
# print self.rh_plant.rh_pfings: [0, 1, 2]
# print self.rh_plant.rh_pron_ix: [3]
vel = decoder['qdot']
vel_bl = vel.copy()
current_state = self.get_pos()
self.pre_drive_state = current_state.copy()
self.drive_velocity_raw = vel_bl.copy()
if self.blocking_joints is not None:
vel_bl[self.blocking_joints] = 0
vel_bl_aa0 = vel_bl[0:3].copy()
vel_bl_rh0 = vel_bl[3:7].copy()
### Accel Limit Velocitites ###
# if not np.all(np.isnan(np.hstack((self.prev_vel_bl_aa, self.prev_vel_bl_rh)))):
# aa_output_accel = vel_bl_aa - self.prev_vel_bl_aa
# rh_output_accel = vel_bl_rh - self.prev_vel_bl_rh
# ### AA XY ###
# for i in np.arange(2):
# if aa_output_accel[i] > self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] + self.accel_lim_armassist
# elif aa_output_accel[i] < -1*self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] - self.accel_lim_armassist
# ### AA PSI ###
# if aa_output_accel[2] > self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] + self.accel_lim_psi
# elif aa_output_accel[2] < -1*self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] - self.accel_lim_psi
# ### RH All ###
# for i in np.arange(4):
# if rh_output_accel[i] > self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] + self.accel_lim_rehand
# elif rh_output_accel[i] < -1*self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] - self.accel_lim_rehand
### Add Attractor ###
if self.safety_grid is not None:
attractor_point_aa = self.safety_grid.attractor_point[:3]
attractor_point_rh = self.safety_grid.attractor_point[3:]
vel_bl_aa_pull = self.attractor_speed_const*(attractor_point_aa - current_state[:3])/0.05
vel_bl_rh_pull = self.attractor_speed_const*(attractor_point_rh - current_state[3:])/0.05
vel_bl_aa = vel_bl_aa0 + vel_bl_aa_pull.copy()
vel_bl_rh = vel_bl_rh0 + vel_bl_rh_pull.copy()
else:
vel_bl_aa = vel_bl_aa0
vel_bl_rh = vel_bl_rh0
### LPF Filter Velocities ###
for s, state in enumerate(['aa_vx', 'aa_vy', 'aa_vpsi']):
vel_bl_aa[s] = self.command_lpfs[state](vel_bl_aa[s])
if np.isnan(vel_bl_aa[s]):
vel_bl_aa[s] = 0
for s, state in enumerate(['rh_vthumb', 'rh_vindex', 'rh_vfing3', 'rh_vprono']):
vel_bl_rh[s] = self.command_lpfs[state](vel_bl_rh[s])
if np.isnan(vel_bl_rh[s]):
vel_bl_rh[s] = 0
self.drive_velocity_sent_pre_safety = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
#If the next position is outside of safety then damp velocity to only go to limit:
pos_pred = current_state + 0.05*np.hstack((vel_bl_aa, vel_bl_rh))
pos_pred_aa = pos_pred[0:3]
pos_pred_rh = pos_pred[3:7]
both_feedback_str = ''
if self.safety_grid is not None:
if len(self.aa_plant.aa_xy_ix) > 0:
x_tmp = self.safety_grid.is_valid_pos(pos_pred_aa[self.aa_plant.aa_xy_ix])
if x_tmp == False:
current_pos = current_state[self.aa_plant.aa_xy_ix]
pos_valid = attractor_point_aa[self.aa_plant.aa_xy_ix]
#d_to_valid, pos_valid = self.safety_grid.dist_to_valid_point(current_pos)
vel_bl_aa[self.aa_plant.aa_xy_ix] = self.attractor_speed*(pos_valid - current_pos)/0.05
pos_pred_aa[self.aa_plant.aa_xy_ix] = current_pos + 0.05*vel_bl_aa[self.aa_plant.aa_xy_ix]
#print 'plant adjust: ', vel_bl_aa[self.aa_plant.aa_xy_ix], pos_pred_aa[self.aa_plant.aa_xy_ix]
xy_change = True
# Make sure AA Psi within bounds:
if len(self.aa_plant.aa_psi_ix) > 0:
mn, mx = self.safety_grid.get_minmax_psi(pos_pred_aa[self.aa_plant.aa_xy_ix])
predx, predy= pos_pred_aa[[0, 1]]
# Set psi velocity :
psi_ok = False
if np.logical_and(pos_pred_aa[self.aa_plant.aa_psi_ix] >= mn, pos_pred_aa[self.aa_plant.aa_psi_ix] <= mx):
# Test if globally ok:
global_ok = self.safety_grid.global_hull.hull3d.find_simplex(np.array([predx, predy, pos_pred_aa[2]])) >=0
if global_ok:
psi_ok = True
if psi_ok == False:
# Move psi back to attractor pos:
psi_neutral = attractor_point_aa[self.aa_plant.aa_psi_ix]
vel_bl_aa[self.aa_plant.aa_psi_ix] = self.attractor_speed*(psi_neutral-current_state[self.aa_plant.aa_psi_ix])/0.05
# Make sure RH Prono within bounds (if SSM is only RH, use settings.starting_pos for AAPX, AAPY)
if len(self.rh_plant.rh_pron_ix) > 0:
mn, mx = self.safety_grid.get_minmax_prono(pos_pred_aa[self.aa_plant.aa_xy_ix])
# Set prono velocity :
if np.logical_and(pos_pred_rh[self.rh_plant.rh_pron_ix] >= mn, pos_pred_rh[self.rh_plant.rh_pron_ix] <= mx):
pass
else:
tmp_pos = pos_pred_rh[self.rh_plant.rh_pron_ix]
prono_neutral = attractor_point_rh[self.rh_plant.rh_pron_ix]
vel_bl_rh[self.rh_plant.rh_pron_ix] = self.attractor_speed*(prono_neutral-tmp_pos)/0.05
# Assure RH fingers are within range:
if len(self.rh_plant.rh_pfings) > 0:
for i, (ix, nm) in enumerate(self.rh_plant.rh_pfings):
mn, mx = self.safety_grid.get_rh_minmax(nm)
if np.logical_and(pos_pred_rh[ix] >= mn, pos_pred_rh[ix] <= mx):
pass
else:
tmp_ = pos_pred_rh[ix]
neutral = attractor_point_rh[ix]
vel_bl_rh[ix] = self.attractor_speed*(neutral - tmp_)/0.05
# If in the rest state -- block the arm:
if self.task_state in ['rest', 'prep', 'baseline_check']:
vel_bl_aa[:] = 0
vel_bl_rh[:] = 0
elif self.task_state == 'emg_rest':
scaling = self.rest_emg_output
if scaling <= 0.5:
scaling = 0
else:
scaling = 0.5*scaling
vel_bl_aa = scaling*vel_bl_aa
vel_bl_rh = scaling*vel_bl_rh
max_vel_xy = 10.
vel_bl_aa[vel_bl_aa>max_vel_xy] = max_vel_xy
vel_bl_aa[vel_bl_aa<-1*max_vel_xy] = -1*max_vel_xy
max_vel_ang = 2.
if vel_bl_aa[2] > max_vel_ang:
vel_bl_aa[2] = max_vel_ang
elif vel_bl_aa[2] < -1*max_vel_ang:
vel_bl_aa[2] = -1*max_vel_ang
vel_bl_rh[vel_bl_rh>max_vel_ang] = max_vel_ang
vel_bl_rh[vel_bl_rh<-1*max_vel_ang] = -1*max_vel_ang
if self.blocking_joints is not None:
for j in [0, 1, 2]:
if j in self.blocking_joints:
vel_bl_aa[j] = 0
#print 'blocking vel_bl_aa: ', j
for j in [3, 4, 5, 6]:
if j in self.blocking_joints:
vel_bl_rh[j-3] = 0
#print 'blocking vel_bl_rh: ', j-3
self.both_feedback_str = both_feedback_str
self.aa_plant.send_vel(vel_bl_aa)
self.rh_plant.send_vel(vel_bl_rh)
self.prev_vel_bl_aa = vel_bl_aa.copy()
self.prev_vel_bl_rh = vel_bl_rh.copy()
self.drive_velocity_sent = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
decoder['q'] = self.get_pos()
class IsMorePlantUDP(BasePlantIsMore):
'''Sends velocity commands and receives feedback over UDP. Can be used
with either the real or simulated ArmAssist+ReHand.
'''
aa_plant_cls = ArmAssistPlantUDP
rh_plant_cls = ReHandPlantUDP
def write_feedback(self):
self.aa_plant.feedback_str = self.both_feedback_str
self.aa_plant.write_feedback()
#self.rh_plant.write_feedback()
class IsMorePlantEMGControl(IsMorePlantUDP): # Plant used for the pure EMG control task
def drive(self):
vel_bl = self.drive_velocity_raw
current_state = self.get_pos()
self.pre_drive_state = current_state.copy()
if self.blocking_joints is not None:
vel_bl[self.blocking_joints] = 0
vel_bl_aa0 = vel_bl[0:3].copy()
vel_bl_rh0 = vel_bl[3:7].copy()
### Accel Limit Velocitites ###
# if not np.all(np.isnan(np.hstack((self.prev_vel_bl_aa, self.prev_vel_bl_rh)))):
# aa_output_accel = vel_bl_aa - self.prev_vel_bl_aa
# rh_output_accel = vel_bl_rh - self.prev_vel_bl_rh
# ### AA XY ###
# for i in np.arange(2):
# if aa_output_accel[i] > self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] + self.accel_lim_armassist
# elif aa_output_accel[i] < -1*self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] - self.accel_lim_armassist
# ### AA PSI ###
# if aa_output_accel[2] > self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] + self.accel_lim_psi
# elif aa_output_accel[2] < -1*self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] - self.accel_lim_psi
# ### RH All ###
# for i in np.arange(4):
# if rh_output_accel[i] > self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] + self.accel_lim_rehand
# elif rh_output_accel[i] < -1*self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] - self.accel_lim_rehand
### Add Attractor ###
attractor_point_aa = self.safety_grid.attractor_point[:3]
attractor_point_rh = self.safety_grid.attractor_point[3:]
vel_bl_aa_pull = self.attractor_speed_const*(attractor_point_aa - current_state[:3])/0.05
vel_bl_rh_pull = self.attractor_speed_const*(attractor_point_rh - current_state[3:])/0.05
vel_bl_aa = vel_bl_aa0 + vel_bl_aa_pull.copy()
vel_bl_rh = vel_bl_rh0 + vel_bl_rh_pull.copy()
### LPF Filter Velocities ###
for s, state in enumerate(['aa_vx', 'aa_vy', 'aa_vpsi']):
vel_bl_aa[s] = self.command_lpfs[state](vel_bl_aa[s])
if np.isnan(vel_bl_aa[s]):
vel_bl_aa[s] = 0
for s, state in enumerate(['rh_vthumb', 'rh_vindex', 'rh_vfing3', 'rh_vprono']):
vel_bl_rh[s] = self.command_lpfs[state](vel_bl_rh[s])
if np.isnan(vel_bl_rh[s]):
vel_bl_rh[s] = 0
self.drive_velocity_sent_pre_safety = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
#If the next position is outside of safety then damp velocity to only go to limit:
pos_pred = current_state + 0.05*np.hstack((vel_bl_aa, vel_bl_rh))
pos_pred_aa = pos_pred[0:3]
pos_pred_rh = pos_pred[3:7]
both_feedback_str = ''
if self.safety_grid is not None:
if len(self.aa_plant.aa_xy_ix) > 0:
x_tmp = self.safety_grid.is_valid_pos(pos_pred_aa[self.aa_plant.aa_xy_ix])
if x_tmp == False:
current_pos = current_state[self.aa_plant.aa_xy_ix]
pos_valid = attractor_point_aa[self.aa_plant.aa_xy_ix]
#d_to_valid, pos_valid = self.safety_grid.dist_to_valid_point(current_pos)
vel_bl_aa[self.aa_plant.aa_xy_ix] = self.attractor_speed*(pos_valid - current_pos)/0.05
pos_pred_aa[self.aa_plant.aa_xy_ix] = current_pos + 0.05*vel_bl_aa[self.aa_plant.aa_xy_ix]
#print 'plant adjust: ', vel_bl_aa[self.aa_plant.aa_xy_ix], pos_pred_aa[self.aa_plant.aa_xy_ix]
xy_change = True
# Make sure AA Psi within bounds:
if len(self.aa_plant.aa_psi_ix) > 0:
mn, mx = self.safety_grid.get_minmax_psi(pos_pred_aa[self.aa_plant.aa_xy_ix])
predx, predy= pos_pred_aa[[0, 1]]
# Set psi velocity :
psi_ok = False
if np.logical_and(pos_pred_aa[self.aa_plant.aa_psi_ix] >= mn, pos_pred_aa[self.aa_plant.aa_psi_ix] <= mx):
# Test if globally ok:
global_ok = self.safety_grid.global_hull.hull3d.find_simplex(np.array([predx, predy, pos_pred_aa[2]])) >=0
if global_ok:
psi_ok = True
if psi_ok == False:
# Move psi back to attractor pos:
psi_neutral = attractor_point_aa[self.aa_plant.aa_psi_ix]
vel_bl_aa[self.aa_plant.aa_psi_ix] = self.attractor_speed*(psi_neutral-current_state[self.aa_plant.aa_psi_ix])/0.05
# Make sure RH Prono within bounds (if SSM is only RH, use settings.starting_pos for AAPX, AAPY)
if len(self.rh_plant.rh_pron_ix) > 0:
mn, mx = self.safety_grid.get_minmax_prono(pos_pred_aa[self.aa_plant.aa_xy_ix])
# Set prono velocity :
if np.logical_and(pos_pred_rh[self.rh_plant.rh_pron_ix] >= mn, pos_pred_rh[self.rh_plant.rh_pron_ix] <= mx):
pass
else:
tmp_pos = pos_pred_rh[self.rh_plant.rh_pron_ix]
prono_neutral = attractor_point_rh[self.rh_plant.rh_pron_ix]
vel_bl_rh[self.rh_plant.rh_pron_ix] = self.attractor_speed*(prono_neutral-tmp_pos)/0.05
# Assure RH fingers are within range:
if len(self.rh_plant.rh_pfings) > 0:
for i, (ix, nm) in enumerate(self.rh_plant.rh_pfings):
mn, mx = self.safety_grid.get_rh_minmax(nm)
if np.logical_and(pos_pred_rh[ix] >= mn, pos_pred_rh[ix] <= mx):
pass
else:
tmp_ = pos_pred_rh[ix]
neutral = attractor_point_rh[ix]
vel_bl_rh[ix] = self.attractor_speed*(neutral - tmp_)/0.05
# If in the rest state -- block the arm:
if self.task_state in ['rest', 'prep']:
vel_bl_aa[:] = 0
vel_bl_rh[:] = 0
elif self.task_state == 'emg_rest':
scaling = self.rest_emg_output
if scaling <= 0.5:
scaling = 0
else:
scaling = 0.5*scaling
vel_bl_aa = scaling*vel_bl_aa
vel_bl_rh = scaling*vel_bl_rh
max_vel_xy = 10.
vel_bl_aa[vel_bl_aa>max_vel_xy] = max_vel_xy
vel_bl_aa[vel_bl_aa<-1*max_vel_xy] = -1*max_vel_xy
max_vel_ang = 2.
if vel_bl_aa[2] > max_vel_ang:
vel_bl_aa[2] = max_vel_ang
elif vel_bl_aa[2] < -1*max_vel_ang:
vel_bl_aa[2] = -1*max_vel_ang
vel_bl_rh[vel_bl_rh>max_vel_ang] = max_vel_ang
vel_bl_rh[vel_bl_rh<-1*max_vel_ang] = -1*max_vel_ang
if self.blocking_joints is not None:
for j in [0, 1, 2]:
if j in self.blocking_joints:
vel_bl_aa[j] = 0
#print 'blocking vel_bl_aa: ', j
for j in [3, 4, 5, 6]:
if j in self.blocking_joints:
vel_bl_rh[j-3] = 0
#print 'blocking vel_bl_rh: ', j-3
self.both_feedback_str = both_feedback_str
self.aa_plant.send_vel(vel_bl_aa)
self.rh_plant.send_vel(vel_bl_rh)
self.prev_vel_bl_aa = vel_bl_aa.copy()
self.prev_vel_bl_rh = vel_bl_rh.copy()
self.drive_velocity_sent = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
class IsMorePlantHybridBMI(IsMorePlantUDP): # Plant used for the hybrid (EMG + brain) BMI task.
def __init__(self, *args, **kwargs):
self.drive_velocity_raw_brain = np.zeros((7,))
self.emg_vel_raw_scaled = np.zeros((7,))
super(IsMorePlantHybridBMI, self).__init__(*args, **kwargs)
def drive(self, decoder):
vel = decoder['qdot']
vel_brain = vel.copy()
vel_brain_aa = vel_brain[[0, 1, 2]]
vel_brain_fingers = vel_brain[[3, 4, 5]]
vel_brain_prono = vel_brain[[6]]
self.drive_velocity_raw_brain = vel_brain.copy()
# Use EMG scaled array to scale the output:
vel_emg = self.emg_vel.copy()
vel_emg_scaled = []
for i in range(7):
vel_emg_scaled.append(vel_emg[i]*self.scale_emg_pred_arr[i])
vel_emg_scaled = np.hstack((vel_emg_scaled))
self.emg_vel_raw_scaled = vel_emg_scaled.copy()
vel_emg_aa = vel_emg_scaled[[0, 1, 2]]
vel_emg_fingers = vel_emg_scaled[[3, 4, 5]]
vel_emg_prono = vel_emg_scaled[[6]]
vel_bl_aa = vel_emg_aa*self.emg_weight_aa + vel_brain_aa*(1-self.emg_weight_aa)
vel_bl_fingers = vel_emg_fingers*self.emg_weight_fingers + vel_brain_fingers*(1-self.emg_weight_fingers)
vel_bl_prono = vel_emg_prono*self.emg_weight_prono + vel_brain_prono*(1-self.emg_weight_prono)
vel_bl = np.hstack((vel_bl_aa, vel_bl_fingers, vel_bl_prono))
# Fuse velocities from EMG and neural decoders
#vel_bl = vel_emg*self.emg_weight + vel_brain*(1-self.emg_weight)
self.drive_velocity_raw = vel_bl.copy()
vel_bl_fb_gain = []
for i in range(7):
vel_bl_fb_gain.append(vel_bl[i]*self.fb_vel_gain[i])
vel_bl_fb_gain = np.hstack((vel_bl_fb_gain))
self.drive_velocity_raw_fb_gain = vel_bl_fb_gain.copy()
current_state = self.get_pos()
self.pre_drive_state = current_state.copy()
if self.blocking_joints is not None:
print ('self.blocking_joints --> ', self.blocking_joints)
vel_bl_fb_gain[self.blocking_joints] = 0
vel_bl_aa0 = vel_bl_fb_gain[0:3].copy()
vel_bl_rh0 = vel_bl_fb_gain[3:7].copy()
### Accel Limit Velocitites ###
# if not np.all(np.isnan(np.hstack((self.prev_vel_bl_aa, self.prev_vel_bl_rh)))):
# aa_output_accel = vel_bl_aa - self.prev_vel_bl_aa
# rh_output_accel = vel_bl_rh - self.prev_vel_bl_rh
# ### AA XY ###
# for i in np.arange(2):
# if aa_output_accel[i] > self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] + self.accel_lim_armassist
# elif aa_output_accel[i] < -1*self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] - self.accel_lim_armassist
# ### AA PSI ###
# if aa_output_accel[2] > self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] + self.accel_lim_psi
# elif aa_output_accel[2] < -1*self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] - self.accel_lim_psi
# ### RH All ###
# for i in np.arange(4):
# if rh_output_accel[i] > self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] + self.accel_lim_rehand
# elif rh_output_accel[i] < -1*self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] - self.accel_lim_rehand
### Add Attractor ###
attractor_point_aa = self.safety_grid.attractor_point[:3]
attractor_point_rh = self.safety_grid.attractor_point[3:]
vel_bl_aa_pull = self.attractor_speed_const*(attractor_point_aa - current_state[:3])/0.05
vel_bl_rh_pull = self.attractor_speed_const*(attractor_point_rh - current_state[3:])/0.05
vel_bl_aa = vel_bl_aa0 + vel_bl_aa_pull.copy()
vel_bl_rh = vel_bl_rh0 + vel_bl_rh_pull.copy()
### LPF Filter Velocities ###
for s, state in enumerate(['aa_vx', 'aa_vy', 'aa_vpsi']):
vel_bl_aa[s] = self.command_lpfs[state](vel_bl_aa[s])
if np.isnan(vel_bl_aa[s]):
vel_bl_aa[s] = 0
for s, state in enumerate(['rh_vthumb', 'rh_vindex', 'rh_vfing3', 'rh_vprono']):
vel_bl_rh[s] = self.command_lpfs[state](vel_bl_rh[s])
if np.isnan(vel_bl_rh[s]):
vel_bl_rh[s] = 0
self.drive_velocity_sent_pre_safety = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
#If the next position is outside of safety then damp velocity to only go to limit:
pos_pred = current_state + 0.05*np.hstack((vel_bl_aa, vel_bl_rh))
pos_pred_aa = pos_pred[0:3]
pos_pred_rh = pos_pred[3:7]
both_feedback_str = ''
if self.safety_grid is not None:
if len(self.aa_plant.aa_xy_ix) > 0:
x_tmp = self.safety_grid.is_valid_pos(pos_pred_aa[self.aa_plant.aa_xy_ix])
if x_tmp == False:
print ('false position')
current_pos = current_state[self.aa_plant.aa_xy_ix]
pos_valid = attractor_point_aa[self.aa_plant.aa_xy_ix]
#d_to_valid, pos_valid = self.safety_grid.dist_to_valid_point(current_pos)
vel_bl_aa[self.aa_plant.aa_xy_ix] = self.attractor_speed*(pos_valid - current_pos)/0.05
pos_pred_aa[self.aa_plant.aa_xy_ix] = current_pos + 0.05*vel_bl_aa[self.aa_plant.aa_xy_ix]
#print 'plant adjust: ', vel_bl_aa[self.aa_plant.aa_xy_ix], pos_pred_aa[self.aa_plant.aa_xy_ix]
xy_change = True
# Make sure AA Psi within bounds:
if len(self.aa_plant.aa_psi_ix) > 0:
mn, mx = self.safety_grid.get_minmax_psi(pos_pred_aa[self.aa_plant.aa_xy_ix])
predx, predy= pos_pred_aa[[0, 1]]
# Set psi velocity :
psi_ok = False
if np.logical_and(pos_pred_aa[self.aa_plant.aa_psi_ix] >= mn, pos_pred_aa[self.aa_plant.aa_psi_ix] <= mx):
# Test if globally ok:
#global_ok = self.safety_grid.global_hull.hull3d.find_simplex(np.array([predx, predy, pos_pred_aa[2]])) >=0
global_ok = True
if global_ok:
psi_ok = True
if psi_ok == False:
# Move psi back to attractor pos:
psi_neutral = attractor_point_aa[self.aa_plant.aa_psi_ix]
vel_bl_aa[self.aa_plant.aa_psi_ix] = self.attractor_speed*(psi_neutral-current_state[self.aa_plant.aa_psi_ix])/0.05
# Make sure RH Prono within bounds (if SSM is only RH, use settings.starting_pos for AAPX, AAPY)
if len(self.rh_plant.rh_pron_ix) > 0:
mn, mx = self.safety_grid.get_minmax_prono(pos_pred_aa[self.aa_plant.aa_xy_ix])
# Set prono velocity :
if np.logical_and(pos_pred_rh[self.rh_plant.rh_pron_ix] >= mn, pos_pred_rh[self.rh_plant.rh_pron_ix] <= mx):
pass
else:
tmp_pos = pos_pred_rh[self.rh_plant.rh_pron_ix]
prono_neutral = attractor_point_rh[self.rh_plant.rh_pron_ix]
vel_bl_rh[self.rh_plant.rh_pron_ix] = self.attractor_speed*(prono_neutral-tmp_pos)/0.05
# Assure RH fingers are within range:
if len(self.rh_plant.rh_pfings) > 0:
for i, (ix, nm) in enumerate(self.rh_plant.rh_pfings):
mn, mx = self.safety_grid.get_rh_minmax(nm)
if np.logical_and(pos_pred_rh[ix] >= mn, pos_pred_rh[ix] <= mx):
pass
else:
tmp_ = pos_pred_rh[ix]
neutral = attractor_point_rh[ix]
vel_bl_rh[ix] = self.attractor_speed*(neutral - tmp_)/0.05
# print 'safely adjusting fingers! ', nm, 'min: ', mn, ' max: ', mx, ' pred: ', pos_pred_rh[ix]
# If in the rest state -- block the arm:
if self.task_state in ['rest', 'prep', 'baseline_check']:
vel_bl_aa[:] = 0
vel_bl_rh[:] = 0
elif self.task_state == 'emg_rest':
scaling = self.rest_emg_output
if scaling <= 0.5:
scaling = 0
else:
scaling = 0.5*scaling
vel_bl_aa = scaling*vel_bl_aa
vel_bl_rh = scaling*vel_bl_rh
elif self.task_state == 'rest_back':
vel_bl_aa = vel_bl_aa_pull/self.attractor_speed_const*self.rest_back_attractor_speed
vel_bl_rh = vel_bl_rh_pull/self.attractor_speed_const*self.rest_back_attractor_speed
elif self.task_state in ['drive_to_start', 'drive_to_rest']:
vel_bl_aa = self.back_to_target_speed*(self.drive_to_start_target[:3] - current_state[:3])/0.05
vel_bl_rh = self.back_to_target_speed*(self.drive_to_start_target[3:] - current_state[3:])/0.05
max_vel_xy = 10.
vel_bl_aa[vel_bl_aa>max_vel_xy] = max_vel_xy
vel_bl_aa[vel_bl_aa<-1*max_vel_xy] = -1*max_vel_xy
max_vel_ang = 2.
if vel_bl_aa[2] > max_vel_ang:
vel_bl_aa[2] = max_vel_ang
elif vel_bl_aa[2] < -1*max_vel_ang:
vel_bl_aa[2] = -1*max_vel_ang
vel_bl_rh[vel_bl_rh>max_vel_ang] = max_vel_ang
vel_bl_rh[vel_bl_rh<-1*max_vel_ang] = -1*max_vel_ang
if self.blocking_joints is not None:
for j in [0, 1, 2]:
if j in self.blocking_joints:
vel_bl_aa[j] = 0
#print 'blocking vel_bl_aa: ', j
for j in [3, 4, 5, 6]:
if j in self.blocking_joints:
vel_bl_rh[j-3] = 0
#print 'blocking vel_bl_rh: ', j-3
self.both_feedback_str = both_feedback_str
self.aa_plant.send_vel(vel_bl_aa)
self.rh_plant.send_vel(vel_bl_rh)
self.prev_vel_bl_aa = vel_bl_aa.copy()
self.prev_vel_bl_rh = vel_bl_rh.copy()
self.drive_velocity_sent = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
decoder['q'] = self.get_pos()
class IsMorePlantHybridBMISoftSafety(IsMorePlantHybridBMI):
def drive(self, decoder):
vel = decoder['qdot']
vel_brain = vel.copy()
vel_brain_aa = vel_brain[[0, 1, 2]]
vel_brain_fingers = vel_brain[[3, 4, 5]]
vel_brain_prono = vel_brain[[6]]
self.drive_velocity_raw_brain = vel_brain.copy()
# Use EMG scaled array to scale the output:
vel_emg = self.emg_vel.copy()
vel_emg_scaled = []
for i in range(7):
vel_emg_scaled.append(vel_emg[i]*self.scale_emg_pred_arr[i])
vel_emg_scaled = np.hstack((vel_emg_scaled))
self.emg_vel_raw_scaled = vel_emg_scaled.copy()
vel_emg_aa = vel_emg_scaled[[0, 1, 2]]
vel_emg_fingers = vel_emg_scaled[[3, 4, 5]]
vel_emg_prono = vel_emg_scaled[[6]]
vel_bl_aa = vel_emg_aa*self.emg_weight_aa + vel_brain_aa*(1-self.emg_weight_aa)
vel_bl_fingers = vel_emg_fingers*self.emg_weight_fingers + vel_brain_fingers*(1-self.emg_weight_fingers)
vel_bl_prono = vel_emg_prono*self.emg_weight_prono + vel_brain_prono*(1-self.emg_weight_prono)
vel_bl = np.hstack((vel_bl_aa, vel_bl_fingers, vel_bl_prono))
# Fuse velocities from EMG and neural decoders
#vel_bl = vel_emg*self.emg_weight + vel_brain*(1-self.emg_weight)
self.drive_velocity_raw = vel_bl.copy()
vel_bl_fb_gain = []
for i in range(7):
vel_bl_fb_gain.append(vel_bl[i]*self.fb_vel_gain[i])
vel_bl_fb_gain = np.hstack((vel_bl_fb_gain))
self.drive_velocity_raw_fb_gain = vel_bl_fb_gain.copy()
current_state = self.get_pos()
self.pre_drive_state = current_state.copy()
if self.blocking_joints is not None:
vel_bl_fb_gain[self.blocking_joints] = 0
vel_bl_aa0 = vel_bl_fb_gain[0:3].copy()
vel_bl_rh0 = vel_bl_fb_gain[3:7].copy()
### Accel Limit Velocitites ###
# if not np.all(np.isnan(np.hstack((self.prev_vel_bl_aa, self.prev_vel_bl_rh)))):
# aa_output_accel = vel_bl_aa - self.prev_vel_bl_aa
# rh_output_accel = vel_bl_rh - self.prev_vel_bl_rh
# ### AA XY ###
# for i in np.arange(2):
# if aa_output_accel[i] > self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] + self.accel_lim_armassist
# elif aa_output_accel[i] < -1*self.accel_lim_armassist:
# vel_bl_aa[i] = self.prev_vel_bl_aa[i] - self.accel_lim_armassist
# ### AA PSI ###
# if aa_output_accel[2] > self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] + self.accel_lim_psi
# elif aa_output_accel[2] < -1*self.accel_lim_psi:
# vel_bl_aa[2] = self.prev_vel_bl_aa[2] - self.accel_lim_psi
# ### RH All ###
# for i in np.arange(4):
# if rh_output_accel[i] > self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] + self.accel_lim_rehand
# elif rh_output_accel[i] < -1*self.accel_lim_rehand:
# vel_bl_rh[i] = self.prev_vel_bl_rh[i] - self.accel_lim_rehand
### Add Attractor ###
attractor_point_aa = self.safety_grid.attractor_point[:3]
attractor_point_rh = self.safety_grid.attractor_point[3:]
vel_bl_aa_pull = self.attractor_speed_const*(attractor_point_aa - current_state[:3])/0.05
vel_bl_rh_pull = self.attractor_speed_const*(attractor_point_rh - current_state[3:])/0.05
vel_bl_aa = vel_bl_aa0 + vel_bl_aa_pull.copy()
vel_bl_rh = vel_bl_rh0 + vel_bl_rh_pull.copy()
### LPF Filter Velocities ###
for s, state in enumerate(['aa_vx', 'aa_vy', 'aa_vpsi']):
vel_bl_aa[s] = self.command_lpfs[state](vel_bl_aa[s])
if np.isnan(vel_bl_aa[s]):
vel_bl_aa[s] = 0
for s, state in enumerate(['rh_vthumb', 'rh_vindex', 'rh_vfing3', 'rh_vprono']):
vel_bl_rh[s] = self.command_lpfs[state](vel_bl_rh[s])
if np.isnan(vel_bl_rh[s]):
vel_bl_rh[s] = 0
self.drive_velocity_sent_pre_safety = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
#If the next position is outside of safety then damp velocity to only go to limit:
pos_pred = current_state + 0.05*np.hstack((vel_bl_aa, vel_bl_rh))
pos_pred_aa = pos_pred[0:3]
pos_pred_rh = pos_pred[3:7]
both_feedback_str = ''
if self.safety_grid is not None:
if len(self.aa_plant.aa_xy_ix) > 0:
x_tmp = self.safety_grid.is_valid_pos(pos_pred_aa[self.aa_plant.aa_xy_ix])
if x_tmp == False:
# Find the closest point on the boundary of the safety grid and set velocity in same
# direction, but at 90% of way to get to the edge of the safety grid:
current_pos = current_state[self.aa_plant.aa_xy_ix]
### loop through percentages of velocity and check validity of point:
valid_scale = False
scale = 1.0
while valid_scale is False:
scale -= 0.05
pos_pred_xy = current_pos + 0.05*(vel_bl_aa[self.aa_plant.aa_xy_ix]*scale)
valid_scale = self.safety_grid.is_valid_pos(pos_pred_xy)
if scale < -1.0:
scale = 0.0
break
#d_to_valid, pos_valid = self.safety_grid.dist_to_valid_point(current_pos)
vel_bl_aa[self.aa_plant.aa_xy_ix] = vel_bl_aa[self.aa_plant.aa_xy_ix]*scale
pos_pred_aa[self.aa_plant.aa_xy_ix] = current_pos + 0.05*vel_bl_aa[self.aa_plant.aa_xy_ix]
#print 'plant adjust: ', vel_bl_aa[self.aa_plant.aa_xy_ix], pos_pred_aa[self.aa_plant.aa_xy_ix]
xy_change = True
# Make sure AA Psi within bounds:
if len(self.aa_plant.aa_psi_ix) > 0:
mn, mx = self.safety_grid.get_minmax_psi(pos_pred_aa[self.aa_plant.aa_xy_ix])
predx, predy= pos_pred_aa[[0, 1]]
# Set psi velocity :
psi_ok = False
if np.logical_and(pos_pred_aa[self.aa_plant.aa_psi_ix] >= mn, pos_pred_aa[self.aa_plant.aa_psi_ix] <= mx):
# Test if globally ok:
#global_ok = self.safety_grid.global_hull.hull3d.find_simplex(np.array([predx, predy, pos_pred_aa[2]])) >=0
global_ok = True
if global_ok:
psi_ok = True
if psi_ok == False:
valid_scale_psi = False
scale = 1.0
while valid_scale_psi is False:
scale -= 0.05
psi_pred = current_state[self.aa_plant.aa_psi_ix] + 0.05*(scale*vel_bl_aa[self.aa_plant.aa_psi_ix])
if np.logical_and(psi_pred >= mn, psi_pred <= mx):
valid_scale_psi = True
if scale < -1.0:
scale = 0.0
break
vel_bl_aa[self.aa_plant.aa_psi_ix] = scale*vel_bl_aa[self.aa_plant.aa_psi_ix]
# Make sure RH Prono within bounds (if SSM is only RH, use settings.starting_pos for AAPX, AAPY)
if len(self.rh_plant.rh_pron_ix) > 0:
mn, mx = self.safety_grid.get_minmax_prono(pos_pred_aa[self.aa_plant.aa_xy_ix])
# Set prono velocity :
if np.logical_and(pos_pred_rh[self.rh_plant.rh_pron_ix] >= mn, pos_pred_rh[self.rh_plant.rh_pron_ix] <= mx):
pass
else:
valid_scale_prono = False
scale = 1.0
while valid_scale_prono is False:
scale -= 0.05
pron_pred = pos_pred_rh[self.rh_plant.rh_pron_ix] + 0.05*(scale*vel_bl_rh[self.rh_plant.rh_pron_ix])
if np.logical_and(pron_pred >= mn, pron_pred <= mx):
valid_scale_prono = True
if scale < -1.0:
scale = 0.
break
vel_bl_rh[self.rh_plant.rh_pron_ix] = scale*vel_bl_rh[self.rh_plant.rh_pron_ix]
# Assure RH fingers are within range:
if len(self.rh_plant.rh_pfings) > 0:
for i, (ix, nm) in enumerate(self.rh_plant.rh_pfings):
mn, mx = self.safety_grid.get_rh_minmax(nm)
if np.logical_and(pos_pred_rh[ix] >= mn, pos_pred_rh[ix] <= mx):
pass
else:
finger_scale = False
scale = 1.0
while finger_scale is False:
scale -= 0.05
fing_pred = pos_pred_rh[ix] + 0.05*(scale*vel_bl_rh[ix])
if np.logical_and(fing_pred >= mn, fing_pred<= mx):
finger_scale = True
if scale < -1.0:
scale = 0.0
break
vel_bl_rh[ix] = scale*vel_bl_rh[ix]
# If in the rest state -- block the arm:
if self.task_state in ['rest', 'prep', 'baseline_check', 'wait']:
vel_bl_aa[:] = 0
vel_bl_rh[:] = 0
elif self.task_state == 'emg_rest':
scaling = self.rest_emg_output
if scaling <= 0.5:
scaling = 0
else:
scaling = 0.5*scaling
vel_bl_aa = scaling*vel_bl_aa
vel_bl_rh = scaling*vel_bl_rh
elif self.task_state == 'rest_back':
vel_bl_aa = vel_bl_aa_pull/self.attractor_speed_const*self.rest_back_attractor_speed
vel_bl_rh = vel_bl_rh_pull/self.attractor_speed_const*self.rest_back_attractor_speed
elif self.task_state in ['drive_to_start', 'drive_to_rest']:
vel_bl_aa = self.back_to_target_speed*(self.drive_to_start_target[:3] - current_state[:3])/0.05
vel_bl_rh = self.back_to_target_speed*(self.drive_to_start_target[3:] - current_state[3:])/0.05
max_vel_xy = 10.
vel_bl_aa[vel_bl_aa>max_vel_xy] = max_vel_xy
vel_bl_aa[vel_bl_aa<-1*max_vel_xy] = -1*max_vel_xy
max_vel_ang = 2.
if vel_bl_aa[2] > max_vel_ang:
vel_bl_aa[2] = max_vel_ang
elif vel_bl_aa[2] < -1*max_vel_ang:
vel_bl_aa[2] = -1*max_vel_ang
vel_bl_rh[vel_bl_rh>max_vel_ang] = max_vel_ang
vel_bl_rh[vel_bl_rh<-1*max_vel_ang] = -1*max_vel_ang
if self.blocking_joints is not None:
for j in [0, 1, 2]:
if j in self.blocking_joints:
vel_bl_aa[j] = 0
#print 'blocking vel_bl_aa: ', j
for j in [3, 4, 5, 6]:
if j in self.blocking_joints:
vel_bl_rh[j-3] = 0
#print 'blocking vel_bl_rh: ', j-3
self.both_feedback_str = both_feedback_str
self.aa_plant.send_vel(vel_bl_aa)
self.rh_plant.send_vel(vel_bl_rh)
self.prev_vel_bl_aa = vel_bl_aa.copy()
self.prev_vel_bl_rh = vel_bl_rh.copy()
self.drive_velocity_sent = np.hstack(( vel_bl_aa.copy(), vel_bl_rh.copy()))
decoder['q'] = self.get_pos()
UDP_PLANT_CLS_DICT = {
'ArmAssist': ArmAssistPlantUDP,
'ReHand': ReHandPlantUDP,
'IsMore': IsMorePlantUDP,
'IsMoreEMGControl': IsMorePlantEMGControl,
'IsMoreHybridControl': IsMorePlantHybridBMI,
'IsMorePlantHybridBMISoftSafety': IsMorePlantHybridBMISoftSafety,
'DummyPlant': DummyPlantUDP,
}
###########################
##### Deprecated code #####
###########################
class BasePlant(object):
def __init__(self, *args, **kwargs):
raise NotImplementedError('Implement in subclasses!')
def init(self):
raise NotImplementedError('Implement in subclasses!')
def start(self):
raise NotImplementedError('Implement in subclasses!')
def stop(self):
raise NotImplementedError('Implement in subclasses!')
def last_data_ts_arrival(self):
raise NotImplementedError('Implement in subclasses!')
def send_vel(self, vel):
raise NotImplementedError('Implement in subclasses!')
def get_pos(self):
raise NotImplementedError('Implement in subclasses!')
def get_vel(self):
raise NotImplementedError('Implement in subclasses!')
def enable(self):
'''Disable the device's motor drivers.'''
raise NotImplementedError('Implement in subclasses!')
def disable(self):
'''Disable the device's motor drivers.'''
raise NotImplementedError('Implement in subclasses!')
def enable_watchdog(self, timeout_ms):
raise NotImplementedError('Implement in subclasses!')
def get_intrinsic_coordinates(self):
return self.get_pos()
| 40.637609 | 155 | 0.58412 | 64,335 | 0.985766 | 0 | 0 | 0 | 0 | 0 | 0 | 16,450 | 0.252053 |
3146c14380ad5914b64e35f3048435f94f9e6ee7 | 22,089 | py | Python | catalog/client/services/catalog.py | eoss-cloud/madxxx_catalog_api | ef37374a36129de4f0a6fe5dd46b5bc2e2f01d1d | [
"MIT"
]
| null | null | null | catalog/client/services/catalog.py | eoss-cloud/madxxx_catalog_api | ef37374a36129de4f0a6fe5dd46b5bc2e2f01d1d | [
"MIT"
]
| null | null | null | catalog/client/services/catalog.py | eoss-cloud/madxxx_catalog_api | ef37374a36129de4f0a6fe5dd46b5bc2e2f01d1d | [
"MIT"
]
| null | null | null | #-*- coding: utf-8 -*-
""" EOSS catalog system
functionality for the catalog endpoint
"""
from utilities.web_utils import remote_file_exists
__author__ = "Thilo Wehrmann, Steffen Gebhardt"
__copyright__ = "Copyright 2016, EOSS GmbH"
__credits__ = ["Thilo Wehrmann", "Steffen Gebhardt"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Thilo Wehrmann"
__email__ = "[email protected]"
__status__ = "Production"
import datetime
import ujson
import time
import dateparser
import falcon
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import csv
from xlsxwriter import Workbook
from dateutil.parser import parse
import numpy
from sqlalchemy import and_
import logging
from collections import defaultdict
from model.orm import Catalog_Dataset, Spatial_Reference
from api import General_Structure
from .db_calls import Persistance
from . import getKeysFromDict
from .tools import get_base_url, can_zip_response, compress_body, serialize, make_GeoJson
from api_logging import logger
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
raise TypeError
GRID_SYSTEMS = {'Sentinel - 2A': 10,
'LANDSAT_ETM': 11,
'LANDSAT_ETM_SLC_OFF': 11,
'OLI_TIRS': 11,
'TIRS': 11}
class Catalog(object):
"""
EOSS catalog class from web API
"""
def __init__(self):
self.logger = logging.getLogger('eoss.' + __name__)
self.aggregations = defaultdict(list)
for agg in Persistance().get_all_sensor_aggregations():
self.aggregations[agg.aggregation_name.lower()].append(agg)
def _query_(self, areas, dates, sensors, clouds):
sensors_filter = list()
grid_list = defaultdict(set)
for sensor_grid in set(GRID_SYSTEMS.values()):
if 'ref_group' in areas[0].keys():
ref_type_id, ref_id = areas[0]['ref_group'], areas[0]['ref_id']
spatial_query = Persistance().get_reference_by_sensorgrid(ref_id, ref_type_id, sensor_grid)
elif 'aoi' in areas[0].keys():
aoi = areas[0]['aoi']
spatial_query = Persistance().get_referencebyaoi(aoi, sensor_grid)
for grid in spatial_query.all():
grid_list[sensor_grid].add(grid)
if len(grid_list) == 0:
description = 'Please specify valid reference object for data. (type:%s, id:%s)' \
% (ref_type_id, ref_id)
raise falcon.HTTPBadRequest('SensorGrid', description,
href='http://docs.example.com/auth')
joint_gridset = grid_list[10] | grid_list[11] # TODO: better grid system handling from extra table?
for item in sensors:
sensor, level = item['sensor_name'], item['level']
if len(sensor) > 0 and len(level) > 0:
sensors_filter.append(and_(Catalog_Dataset.level == level, Catalog_Dataset.sensor == sensor))
elif len(sensor) == 0 and len(level) > 0:
sensors_filter.append(Catalog_Dataset.level == level)
elif len(sensor) > 0 and len(level) == 0:
sensors_filter.append(Catalog_Dataset.sensor == sensor)
dates_filter = list()
for item in dates:
# ExtJS POST requests has provides unicode body
if type(item["start_date"]) is unicode:
item["start_date"] = parse(item["start_date"])
if type(item["end_date"]) is unicode:
item["end_date"] = parse(item["end_date"])
dates_filter.append(
and_(Catalog_Dataset.acq_time >= item["start_date"].isoformat(), Catalog_Dataset.acq_time <= item["end_date"].isoformat()))
query = Persistance().find_dataset(dates_filter, sensors_filter, grid_list, joint_gridset, clouds)
return query
def _get_datasets(self, query):
query_result = list()
for ds in query:
values = dict()
types = dict()
for k, v in ds.__dict__.iteritems():
if '_' != k[0]:
values[k] = v
types[k] = type(v)
x = General_Structure(values, types)
x.__class__.__name__ = 'Catalog_Dataset'
query_result.append(serialize(x, as_json=False)['data'])
return query_result
# TODO: tiles list as input - only first will be returned or exception thrown !
def _query_tile_geom(self, tiles):
tile_objs = Persistance().get_tile_geom(tiles)
return tile_objs.all()
def _export_query(self, found_dataset):
row_keys = ['tile_identifier', 'entity_id', 'acq_time', 'clouds']
resources = [('resources', 'metadata'), ('resources', 'quicklook')]
row = list()
rows = list()
for k in row_keys:
row.append(k)
for k in resources:
row.append(' '.join(k))
row.append('data')
rows.append(row)
for ds in found_dataset:
row = list()
for k in row_keys:
row.append(ds.get(k))
for k in resources:
row.append(getKeysFromDict(ds, k))
if ds.get('sensor') in ['LANDSAT_TM', 'LANDSAT_ETM', 'LANDSAT_ETM_SLC_OFF']:
if 'google' in ds.get('resources').keys():
row.append(getKeysFromDict(ds, ('resources', 'google', 'link')))
elif 'usgs' in ds.get('resources').keys():
row.append(getKeysFromDict(ds, ('resources', 'usgs', 'link')))
else:
row.append('?')
elif ds.get('sensor') in ['OLI_TIRS', 'OLI', 'TIRS']:
if 's3public' in ds.get('resources').keys():
row.append(getKeysFromDict(ds, ('resources', 's3public', 'zip')))
elif 'google' in ds.get('resources').keys():
row.append(getKeysFromDict(ds, ('resources', 'google', 'link')))
elif ds.get('sensor') in ['Sentinel-2A']:
if 's3public' in ds.get('resources').keys():
if getKeysFromDict(ds, ('resources', 's3public')) != None:
row.append(getKeysFromDict(ds, ('resources', 's3public', 'zip')))
else:
row.append('?')
else:
row.append('?')
rows.append(row)
return rows
class CatalogApi(Catalog):
def __init__(self, my_router):
Catalog.__init__(self)
self.router = my_router
def on_get(self, req, resp, format, check_resources=False):
"""Handles GET requests
http://localhost:8000/catalog/search/result.json?from_date=2016-05-01&to_date=2016-06-02&sensor=sentinel2&ref_group=9&ref_id=73&clouds=50
"""
BASE_URL = get_base_url(req.url)
start_time = time.time()
query_filter = req.params
results = dict()
results['action'] = 'catalog search'
results['action-time'] = str(datetime.datetime.now())
results.update({'query': query_filter})
dates = list()
sensor_list = list()
try:
for date_string in ['from_date', 'to_date']:
date = dateparser.parse(req.params[date_string])
if date is None:
description = 'Please format date propery, used %s:%s.' % (date_string, date)
raise falcon.HTTPBadRequest('DateFormat', description,
href='http://docs.example.com/auth')
else:
dates.append(date)
if dates[0] == dates[1]:
description = 'Given dates didnt cover date range. Please correct date span. (%s-%s)' \
% (req.params['from_date'], req.params['to_date'])
raise falcon.HTTPBadRequest('DateFormat', description,
href='http://docs.example.com/auth')
elif dates[0] > dates[1]:
description = 'Given end date is before start date. Please reverse dates. (%s-%s)' \
% (req.params['from_date'], req.params['to_date'])
raise falcon.HTTPBadRequest('DateFormat', description,
href='http://docs.example.com/auth')
if not req.params['sensor'].lower() in self.aggregations.keys():
description = 'Sensor label is unknown in aggregation table, use %s' % str(map(str, self.aggregations.keys()))
raise falcon.HTTPBadRequest('DateFormat', description,
href='http://docs.example.com/auth')
for agg in self.aggregations[req.params['sensor'].lower()]:
sensor_list.append({"sensor_name": agg.sensor, "level": agg.level})
ref_group, ref_id, clouds = int(req.params['ref_group']), int(req.params['ref_id']), int(req.params['clouds'])
except KeyError, e:
description = 'Search key: %s missing in query.' % e
raise falcon.HTTPBadRequest('KeyError', description,
href='http://docs.example.com/auth')
except ValueError, e:
description = 'Given parameters contain bad values: %s'% str(e)
raise falcon.HTTPBadRequest('KeyError', description,
href='http://docs.example.com/auth')
query = self._query_([{"ref_group": ref_group, "ref_id": ref_id}],
[{"start_date": dates[0], "end_date": dates[1]}],
sensor_list, clouds)
query_struct = {'area':[{"ref_group": ref_group, "ref_id": ref_id}],
'dates':[{"start_date": dates[0], "end_date": dates[1]}],
'sensors':sensor_list, 'clouds':clouds
}
found_dataset = self._get_datasets(query)
logger.info('[GET] /catalog/search/result.%s' % format, extra={x:str(y) for x,y in query_struct.iteritems()})
if check_resources:
for ds in found_dataset:
if 's3public' in ds['resources'].keys():
if 'zip' in ds['resources']['s3public'].keys():
if not remote_file_exists( ds['resources']['s3public']['zip']):
print '%s missing' % ds['resources']['s3public']['zip']
if format.lower() == 'json':
if 'search/count' in req.url:
results['count'] = query.count()
else:
results['count'] = query.count()
results['found_dataset'] = found_dataset
results['found_tiles'] = sorted(list(set([x['tile_identifier'] for x in found_dataset])))
results['found_resources'] = [BASE_URL + self.router.reverse('dataset_entity', entity_id=x['entity_id'])
for x in results['found_dataset']]
results['processing_time'] = time.time() - start_time
elif format.lower() == 'geojson':
tilegrids = defaultdict(lambda: defaultdict(list))
geoms, attrs = list(), list()
for x in found_dataset:
tilegrids[x['tile_identifier']]['acq_time'].append(x['acq_time'])
# tilegrids[x['tile_identifier']]['acq_time_js'].append(
# int(time.mktime(dateparser.parse(x['acq_time']).timetuple())) * 1000)
tilegrids[x['tile_identifier']]['tile_identifier'].append(x['tile_identifier'])
tilegrids[x['tile_identifier']]['clouds'].append(x['clouds'])
for tile_id in tilegrids.keys():
tilegrids[tile_id]['count'] = len(tilegrids[tile_id]['clouds'])
tilegrids[tile_id]['tile_identifier'] = tilegrids[tile_id]['tile_identifier'][0]
tiles_dict = dict()
if len(tilegrids.keys()) > 0:
for ref_name, geom in self._query_tile_geom(tilegrids.keys()):
tiles_dict[ref_name] = geom
for tile_id in tilegrids.keys():
geoms.append(ujson.loads(tiles_dict[tile_id]))
attrs.append(tilegrids[tile_id])
results = make_GeoJson(geoms, attrs)
elif format.lower() == 'csv':
rows = self._export_query(found_dataset)
si = StringIO.StringIO()
cw = csv.writer(si, delimiter='\t')
for row in rows:
cw.writerow(row)
results = si.getvalue().strip('\r\n')
elif format.lower() == 'xlsx':
rows = self._export_query(found_dataset)
strIO = StringIO.StringIO()
workbook = Workbook(strIO, {'in_memory': True, 'constant_memory': True})
bold = workbook.add_format({'bold': True})
big_bold = workbook.add_format({'bold': True, 'size': 20})
italic = workbook.add_format({'italic': True})
worksheet = workbook.add_worksheet(name='EOSS analysis')
worksheet.write(0, 0, 'EOSS data analysis', big_bold)
ref_obj = Persistance().get_reference(query_filter.get('ref_id'), query_filter.get('ref_group')).one()
query_filter['reference_name'] = ref_obj.ref_name
query_filter['reference_type'] = ref_obj.referencetype.name
# {'clouds': '60', 'ref_id': '5502', 'from_date': '09/07/2016', 'to_date': '10/07/2016', 'ref_group': '12', 'sensor': 'Sentinel2'}
r = 3
worksheet.write(r - 1, 0, 'query filter:', big_bold)
for c, k in enumerate(['sensor', 'from_date', 'to_date', 'clouds', 'reference_name', 'reference_type']):
worksheet.write(r + c, 0, k, bold)
worksheet.write(r + c, 1, query_filter[k])
r = 13
worksheet.write(r - 2, 0, 'query set:', big_bold)
for c, k in enumerate(rows[0]):
worksheet.write(r - 1, c, k, bold)
for values in rows[1:]:
for c, v in enumerate(values):
worksheet.write(r, c, v)
r += 1
workbook.close()
strIO.seek(0)
results = strIO.read()
elif format.lower() == 'hist':
found_tiles = sorted(list(set([x['tile_identifier'] for x in found_dataset])))
result_list = []
first = dict()
first['tile_identifier'] = 'percentagelabel'
first['span'] = 100
result_list.append(first)
data = numpy.zeros((len(found_dataset)))
tileslist = []
i = 0
for x in found_dataset:
tileslist.append(x['tile_identifier'])
data[i] = float(x['clouds'])
i = i + 1
for t in found_tiles:
ix = numpy.array(tileslist) == t
subset_clouds = data[ix]
num_scenes = sum(ix)
hist_abs = numpy.histogram(subset_clouds, bins=[-1] + range(0, 120, 20))
hist_rel = hist_abs[0] * 1.0 / num_scenes
hist_struct = dict()
hist_struct['tile_identifier'] = t
hist_struct['span'] = 100
hist_struct['scenes_perc_-1'] = hist_rel[0]
hist_struct['scenes_perc_20'] = hist_rel[1]
hist_struct['scenes_perc_40'] = hist_rel[2]
hist_struct['scenes_perc_60'] = hist_rel[3]
hist_struct['scenes_perc_80'] = hist_rel[4]
hist_struct['scenes_perc_100'] = hist_rel[5]
hist_struct['scenes_abs_-1'] = hist_abs[0][0]
hist_struct['scenes_abs_20'] = hist_abs[0][1]
hist_struct['scenes_abs_40'] = hist_abs[0][2]
hist_struct['scenes_abs_60'] = hist_abs[0][3]
hist_struct['scenes_abs_80'] = hist_abs[0][4]
hist_struct['scenes_abs_100'] = hist_abs[0][5]
result_list.append(hist_struct)
results['found_tiles'] = result_list
resp.status = falcon.HTTP_200
if can_zip_response(req.headers):
if format.lower() in ['hist', 'json', 'geojson']:
resp.set_header('Content-Type', 'application/json')
resp.set_header('Content-Encoding', 'gzip')
resp.body = compress_body(ujson.dumps(results))
elif format.lower() == 'csv':
resp.set_header('Content-Type', 'text/csv')
resp.set_header('Content-disposition', 'attachment;filename=%s;' % self.create_output_name('csv'))
resp.set_header('Content-Encoding', 'gzip')
resp.body = compress_body(results)
elif format.lower() == 'xlsx':
resp.set_header('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
resp.set_header('Content-disposition', 'attachment;filename=%s;' % self.create_output_name('xlsx'))
resp.set_header('Content-Encoding', 'gzip')
resp.body = compress_body(results)
else:
if format.lower() in ['hist', 'json', 'geojson']:
resp.set_header('Content-Type', 'application/json')
resp.body = ujson.dumps(results)
elif format.lower() == 'csv':
resp.set_header('Content-Type', 'text/csv')
resp.set_header('Content-disposition', 'attachment;filename=%s;' % self.create_output_name('csv'))
resp.body = results
elif format.lower() == 'xlsx':
resp.set_header('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
resp.set_header('Content-disposition', 'attachment;filename=%s;' % self.create_output_name('xlsx'))
resp.body = results
def create_output_name(self, extension):
return 'EOSS_analysis_%s.%s' % (datetime.datetime.now().isoformat(), extension)
def on_post(self, req, resp, format):
"""Handles POST requests
{
"daterange": [{
"start_date": "05/31/2000",
"end_date": "07/02/2003"
}],
"clouds": 1,
"sensors": [
{"sensor_name": "LANDSAT_ETM", "level": "" }],
"areas": [{
"ref_group": 12,
"ref_id": 6208
}]
}
{"clouds":20,"daterange":[{"start_date":"09/02/2015","end_date":"09/14/2016"}],
"sensors":[{"name":"landsat"}],
"areas":[{"ref_id":362,"ref_group":"9"}]}
"""
# TODO: loop over areas
sensor_list = list()
results = dict()
start_time = time.time()
output = StringIO.StringIO()
while True:
chunk = req.stream.read(4096)
if not chunk:
break
output.write(chunk)
body = output.getvalue()
output.close()
try:
struct = ujson.loads(body.decode('utf-8'))
except ValueError, e:
# try decode x-www-form-urlencoded
query_str = falcon.util.uri.decode(body.decode('utf-8'))
query_str = query_str[query_str.find('{'):query_str.rfind('}') + 1]
try:
struct = ujson.loads(query_str)
except ValueError, e:
description = 'Give request is no valid JSON nor urlencoded psot body.'
raise falcon.HTTPUnsupportedMediaType(description,
href='http://docs.example.com/auth')
try:
for s in struct['sensors']:
if 'sensor_name' in s.keys() and 'level' in s.keys():
sensor_list.append(s)
elif 'name' in s.keys():
if not s['name'].lower() in self.aggregations.keys():
description = 'Sensor label is unknown in aggregation table'
raise falcon.HTTPBadRequest('Catalog', description,
href='http://docs.example.com/auth')
for agg in self.aggregations[s['name'].lower()]:
sensor_list.append({"sensor_name": agg.sensor, "level": agg.level})
else:
description = 'Sensor is not specified in query'
raise falcon.HTTPBadRequest('Catalog', description,
href='http://docs.example.com/auth')
query = self._query_(struct['areas'], struct['daterange'], sensor_list, struct['clouds'])
query_struct = {'area': struct['areas'],
'dates': struct['daterange'],
'sensors': sensor_list, 'clouds': struct['clouds']
}
logger.info('[POST] /catalog/search/result.%s' % format, extra={x:str(y) for x,y in query_struct.iteritems()})
except KeyError, e:
description = 'Search key: %s missing in query.' % e
raise falcon.HTTPBadRequest('KeyError', description,
href='http://docs.example.com/auth')
results['count'] = query.count()
found_dataset = self._get_datasets(query)
results['found_dataset'] = found_dataset
results['found_tiles'] = sorted(list(set([x['tile_identifier'] for x in found_dataset])))
# results.update({'query': struct})
resp.body = ujson.dumps(results)
resp.status = falcon.HTTP_200
results['processing_time'] = time.time() - start_time
if can_zip_response(req.headers):
resp.set_header('Content-Type', 'application/json')
resp.set_header('Content-Encoding', 'gzip')
resp.body = compress_body(ujson.dumps(results))
else:
resp.set_header('Content-Type', 'application/json')
resp.body = ujson.dumps(results)
| 43.740594 | 145 | 0.552175 | 20,725 | 0.93825 | 0 | 0 | 0 | 0 | 0 | 0 | 5,450 | 0.246729 |
3146dc4305a3e628e2c3b3c5cb8a0e5989294e0f | 8,919 | py | Python | tests/ui/test_pvc_ui.py | MeridianExplorer/ocs-ci | a33d5116128b88f176f5eff68a3ef805125cdba1 | [
"MIT"
]
| null | null | null | tests/ui/test_pvc_ui.py | MeridianExplorer/ocs-ci | a33d5116128b88f176f5eff68a3ef805125cdba1 | [
"MIT"
]
| null | null | null | tests/ui/test_pvc_ui.py | MeridianExplorer/ocs-ci | a33d5116128b88f176f5eff68a3ef805125cdba1 | [
"MIT"
]
| null | null | null | import logging
import pytest
from ocs_ci.framework.testlib import tier1, skipif_ui_not_support, ui
from ocs_ci.ocs.ui.pvc_ui import PvcUI
from ocs_ci.framework.testlib import skipif_ocs_version
from ocs_ci.framework.pytest_customization.marks import green_squad
from ocs_ci.ocs.resources.pvc import get_all_pvc_objs, get_pvc_objs
from ocs_ci.ocs import constants
from ocs_ci.helpers import helpers
from ocs_ci.helpers.helpers import wait_for_resource_state, create_unique_resource_name
from ocs_ci.utility.utils import get_ocp_version
from ocs_ci.ocs.ui.views import locators
from ocs_ci.ocs.resources.pod import get_fio_rw_iops
logger = logging.getLogger(__name__)
@ui
@skipif_ocs_version("<4.6")
@skipif_ui_not_support("pvc")
@green_squad
class TestPvcUserInterface(object):
"""
Test PVC User Interface
"""
@tier1
@pytest.mark.parametrize(
argnames=["sc_name", "access_mode", "pvc_size", "vol_mode"],
argvalues=[
pytest.param(
"ocs-storagecluster-cephfs",
"ReadWriteMany",
"2",
"Filesystem",
),
pytest.param(
"ocs-storagecluster-ceph-rbd",
"ReadWriteMany",
"3",
"Block",
),
pytest.param(
"ocs-storagecluster-cephfs",
"ReadWriteOnce",
"10",
"Filesystem",
),
pytest.param(
"ocs-storagecluster-ceph-rbd",
"ReadWriteOnce",
"11",
"Block",
),
pytest.param(
"ocs-storagecluster-ceph-rbd",
"ReadWriteOnce",
"13",
"Filesystem",
),
],
)
def test_create_resize_delete_pvc(
self,
project_factory,
teardown_factory,
setup_ui,
sc_name,
access_mode,
pvc_size,
vol_mode,
):
"""
Test create, resize and delete pvc via UI
"""
# Creating a test project via CLI
pro_obj = project_factory()
project_name = pro_obj.namespace
pvc_ui_obj = PvcUI(setup_ui)
# Creating PVC via UI
pvc_name = create_unique_resource_name("test", "pvc")
pvc_ui_obj.create_pvc_ui(
project_name, sc_name, pvc_name, access_mode, pvc_size, vol_mode
)
pvc_objs = get_all_pvc_objs(namespace=project_name)
pvc = [pvc_obj for pvc_obj in pvc_objs if pvc_obj.name == pvc_name]
assert pvc[0].size == int(pvc_size), (
f"size error| expected size:{pvc_size} \n "
f"actual size:{str(pvc[0].size)}"
)
assert pvc[0].get_pvc_access_mode == access_mode, (
f"access mode error| expected access mode:{access_mode} "
f"\n actual access mode:{pvc[0].get_pvc_access_mode}"
)
assert pvc[0].backed_sc == sc_name, (
f"storage class error| expected storage class:{sc_name} "
f"\n actual storage class:{pvc[0].backed_sc}"
)
assert pvc[0].get_pvc_vol_mode == vol_mode, (
f"volume mode error| expected volume mode:{vol_mode} "
f"\n actual volume mode:{pvc[0].get_pvc_vol_mode}"
)
# Verifying PVC via UI
logger.info("Verifying PVC Details via UI")
pvc_ui_obj.verify_pvc_ui(
pvc_size=pvc_size,
access_mode=access_mode,
vol_mode=vol_mode,
sc_name=sc_name,
pvc_name=pvc_name,
project_name=project_name,
)
logger.info("PVC Details Verified via UI..!!")
# Creating Pod via CLI
logger.info("Creating Pod")
if sc_name in (constants.DEFAULT_STORAGECLASS_RBD,):
interface_type = constants.CEPHBLOCKPOOL
else:
interface_type = constants.CEPHFILESYSTEM
new_pod = helpers.create_pod(
interface_type=interface_type,
pvc_name=pvc_name,
namespace=project_name,
raw_block_pv=vol_mode == constants.VOLUME_MODE_BLOCK,
)
logger.info(f"Waiting for Pod: state= {constants.STATUS_RUNNING}")
wait_for_resource_state(resource=new_pod, state=constants.STATUS_RUNNING)
# Calling the Teardown Factory Method to make sure Pod is deleted
teardown_factory(new_pod)
# Expanding the PVC
logger.info("Pvc Resizing")
new_size = int(pvc_size) + 3
pvc_ui_obj.pvc_resize_ui(
pvc_name=pvc_name, new_size=new_size, project_name=project_name
)
assert new_size > int(
pvc_size
), f"New size of the PVC cannot be less than existing size: new size is {new_size})"
ocp_version = get_ocp_version()
self.pvc_loc = locators[ocp_version]["pvc"]
# Verifying PVC expansion
logger.info("Verifying PVC resize")
expected_capacity = f"{new_size} GiB"
pvc_resize = pvc_ui_obj.verify_pvc_resize_ui(
project_name=project_name,
pvc_name=pvc_name,
expected_capacity=expected_capacity,
)
assert pvc_resize, "PVC resize failed"
logger.info(
"Pvc resize verified..!!"
f"New Capacity after PVC resize is {expected_capacity}"
)
# Running FIO
logger.info("Execute FIO on a Pod")
if vol_mode == constants.VOLUME_MODE_BLOCK:
storage_type = constants.WORKLOAD_STORAGE_TYPE_BLOCK
else:
storage_type = constants.WORKLOAD_STORAGE_TYPE_FS
new_pod.run_io(storage_type, size=(new_size - 1), invalidate=0, rate="1000m")
get_fio_rw_iops(new_pod)
logger.info("FIO execution on Pod successfully completed..!!")
# Checking if the Pod is deleted or not
new_pod.delete(wait=True)
new_pod.ocp.wait_for_delete(resource_name=new_pod.name)
# Deleting the PVC via UI
logger.info(f"Delete {pvc_name} pvc")
pvc_ui_obj.delete_pvc_ui(pvc_name, project_name)
pvc[0].ocp.wait_for_delete(pvc_name, timeout=120)
pvc_objs = get_all_pvc_objs(namespace=project_name)
pvcs = [pvc_obj for pvc_obj in pvc_objs if pvc_obj.name == pvc_name]
if len(pvcs) > 0:
assert f"PVC {pvcs[0].name} does not deleted"
@tier1
@pytest.mark.parametrize(
argnames=["sc_name", "access_mode", "clone_access_mode"],
argvalues=[
pytest.param(
"ocs-storagecluster-ceph-rbd",
constants.ACCESS_MODE_RWO,
constants.ACCESS_MODE_RWO,
),
pytest.param(
"ocs-storagecluster-cephfs",
constants.ACCESS_MODE_RWX,
constants.ACCESS_MODE_RWO,
),
],
)
def test_clone_pvc(
self,
project_factory,
teardown_factory,
setup_ui,
sc_name,
access_mode,
clone_access_mode,
):
"""
Test to verify PVC clone from UI
"""
pvc_size = "1"
vol_mode = constants.VOLUME_MODE_FILESYSTEM
# Creating a project from CLI
pro_obj = project_factory()
project_name = pro_obj.namespace
pvc_ui_obj = PvcUI(setup_ui)
# Creating PVC from UI
pvc_name = create_unique_resource_name("test", "pvc")
pvc_ui_obj.create_pvc_ui(
project_name, sc_name, pvc_name, access_mode, pvc_size, vol_mode
)
teardown_factory(get_pvc_objs(pvc_names=[pvc_name], namespace=project_name)[0])
# Verifying PVC details in UI
logger.info("Verifying PVC details in UI")
pvc_ui_obj.verify_pvc_ui(
pvc_size=pvc_size,
access_mode=access_mode,
vol_mode=vol_mode,
sc_name=sc_name,
pvc_name=pvc_name,
project_name=project_name,
)
logger.info("Verified PVC details in UI")
# Clone PVC from UI
clone_pvc_name = f"{pvc_name}-clone"
pvc_ui_obj.pvc_clone_ui(
project_name=project_name,
pvc_name=pvc_name,
cloned_pvc_access_mode=clone_access_mode,
cloned_pvc_name=clone_pvc_name,
)
teardown_factory(
get_pvc_objs(pvc_names=[clone_pvc_name], namespace=project_name)[0]
)
# Verifying cloned PVC details in UI
logger.info("Verifying cloned PVC details in UI")
pvc_ui_obj.verify_pvc_ui(
pvc_size=pvc_size,
access_mode=clone_access_mode,
vol_mode=vol_mode,
sc_name=sc_name,
pvc_name=clone_pvc_name,
project_name=project_name,
)
logger.info("Verified cloned PVC details in UI")
| 31.40493 | 92 | 0.595582 | 8,173 | 0.916358 | 0 | 0 | 8,248 | 0.924767 | 0 | 0 | 2,088 | 0.234107 |
3146f37afae0ee7bf1ffcaaddfa9a23cd4051a59 | 1,955 | py | Python | feature_generation/datasets/CSCW.py | s0lvang/ideal-pancake | f7a55f622b02b03a987d74cfdff1c51288bfb657 | [
"MIT"
]
| 6 | 2020-09-22T06:54:51.000Z | 2021-03-25T05:38:05.000Z | feature_generation/datasets/CSCW.py | s0lvang/ideal-pancake | f7a55f622b02b03a987d74cfdff1c51288bfb657 | [
"MIT"
]
| 12 | 2020-09-21T13:20:49.000Z | 2021-04-07T08:01:12.000Z | feature_generation/datasets/CSCW.py | s0lvang/ideal-pancake | f7a55f622b02b03a987d74cfdff1c51288bfb657 | [
"MIT"
]
| null | null | null | import pandas as pd
from feature_generation.datasets.Timeseries import Timeseries
from os.path import basename
class CSCW(Timeseries):
def __init__(self):
super().__init__("cscw")
self.column_name_mapping = {
"id": self.column_names["subject_id"],
"Fixation Start [ms]": self.column_names["time"],
"Position X": self.column_names["x"],
"Position Y": self.column_names["y"],
"Average Pupil Size [px] X": self.column_names["pupil_diameter"],
"Fixation Duration [ms]": self.column_names["duration"],
"Fixation End [ms]": self.column_names["fixation_end"],
}
self.label = "Posttest.Score"
def prepare_files(self, file_references, metadata_references):
labels = pd.DataFrame()
dataset = []
with metadata_references[0].open("r") as f:
metadata_file = pd.read_csv(f, sep=";")
for file_reference in file_references:
dataset, labels = self.prepare_file(
file_reference, metadata_file, dataset, labels
)
labels = labels.T
return dataset, labels
def prepare_file(self, file_reference, metadata_file, dataset, labels):
participant_name_array = basename(file_reference.reference).split("_")
participant_name = "_".join(participant_name_array[0:3])
participant_name_with_type = "_".join(participant_name_array[0:4])
with file_reference.open("r") as f:
csv = pd.read_csv(f)
csv = csv.rename(columns=self.column_name_mapping)
csv[self.column_names["subject_id"]] = participant_name_with_type
dataset.append(csv)
print(participant_name)
labels[participant_name_with_type] = metadata_file[
metadata_file["Participant"] == participant_name
].iloc[0]
return dataset, labels
def __str__(self):
return super().__str__()
| 39.897959 | 78 | 0.636317 | 1,841 | 0.941688 | 0 | 0 | 0 | 0 | 0 | 0 | 248 | 0.126854 |
314716be771983df86f570f640cfc57d97707513 | 16,103 | py | Python | reactics-smt/rs/reaction_system_with_concentrations.py | arturmeski/reactics | a565b5bf5ec671ccad4bbdab38ad264b9d8369cc | [
"MIT"
]
| 2 | 2019-03-04T08:51:00.000Z | 2019-11-04T10:42:13.000Z | reactics-smt/rs/reaction_system_with_concentrations.py | arturmeski/reactics | a565b5bf5ec671ccad4bbdab38ad264b9d8369cc | [
"MIT"
]
| null | null | null | reactics-smt/rs/reaction_system_with_concentrations.py | arturmeski/reactics | a565b5bf5ec671ccad4bbdab38ad264b9d8369cc | [
"MIT"
]
| null | null | null | from sys import exit
from colour import *
from rs.reaction_system import ReactionSystem
class ReactionSystemWithConcentrations(ReactionSystem):
def __init__(self):
self.reactions = []
self.meta_reactions = dict()
self.permanent_entities = dict()
self.background_set = []
self.context_entities = [] # legacy. to be removed
self.reactions_by_prod = None
self.max_concentration = 0
self.max_conc_per_ent = dict()
def add_bg_set_entity(self, e):
name = ""
def_max_conc = -1
if type(e) is tuple and len(e) == 2:
name, def_max_conc = e
elif type(e) is str:
name = e
print("\nWARNING: no maximal concentration level specified for:", e, "\n")
else:
raise RuntimeError(
"Bad entity type when adding background set element")
self.assume_not_in_bgset(name)
self.background_set.append(name)
if def_max_conc != -1:
ent_id = self.get_entity_id(name)
self.max_conc_per_ent.setdefault(ent_id, 0)
if self.max_conc_per_ent[ent_id] < def_max_conc:
self.max_conc_per_ent[ent_id] = def_max_conc
if self.max_concentration < def_max_conc:
self.max_concentration = def_max_conc
def get_max_concentration_level(self, e):
if e in self.max_conc_per_ent:
return self.max_conc_per_ent[e]
else:
return self.max_concentration
def is_valid_entity_with_concentration(self, e):
"""Sanity check for entities with concentration"""
if type(e) is tuple:
if len(e) == 2 and type(e[1]) is int:
return True
if type(e) is list:
if len(e) == 2 and type(e[1]) is int:
return True
print("FATAL. Invalid entity+concentration: {:s}".format(e))
exit(1)
return False
def get_state_ids(self, state):
"""Returns entities of the given state without levels"""
return [e for e, c in state]
def has_non_zero_concentration(self, elem):
if elem[1] < 1:
raise RuntimeError(
"Unexpected concentration level in state: " + str(elem))
def process_rip(self, R, I, P, ignore_empty_R=False):
"""Chcecks concentration levels and converts entities names into their ids"""
if R == [] and not ignore_empty_R:
raise RuntimeError("No reactants defined")
reactants = []
for r in R:
self.is_valid_entity_with_concentration(r)
self.has_non_zero_concentration(r)
entity, level = r
reactants.append((self.get_entity_id(entity), level))
if self.max_concentration < level:
self.max_concentration = level
inhibitors = []
for i in I:
self.is_valid_entity_with_concentration(i)
self.has_non_zero_concentration(i)
entity, level = i
inhibitors.append((self.get_entity_id(entity), level))
if self.max_concentration < level:
self.max_concentration = level
products = []
for p in P:
self.is_valid_entity_with_concentration(p)
self.has_non_zero_concentration(p)
entity, level = p
products.append((self.get_entity_id(entity), level))
return reactants, inhibitors, products
def add_reaction(self, R, I, P):
"""Adds a reaction"""
if P == []:
raise RuntimeError("No products defined")
reaction = self.process_rip(R, I, P)
self.reactions.append(reaction)
def add_reaction_without_reactants(self, R, I, P):
"""Adds a reaction"""
if P == []:
raise RuntimeError("No products defined")
reaction = self.process_rip(R, I, P, ignore_empty_R=True)
self.reactions.append(reaction)
def add_reaction_inc(self, incr_entity, incrementer, R, I):
"""Adds a macro/meta reaction for increasing the value of incr_entity"""
reactants, inhibitors, products = self.process_rip(
R, I, [], ignore_empty_R=True)
incr_entity_id = self.get_entity_id(incr_entity)
self.meta_reactions.setdefault(incr_entity_id, [])
self.meta_reactions[incr_entity_id].append(
("inc", self.get_entity_id(incrementer), reactants, inhibitors))
def add_reaction_dec(self, decr_entity, decrementer, R, I):
"""Adds a macro/meta reaction for decreasing the value of incr_entity"""
reactants, inhibitors, products = self.process_rip(
R, I, [], ignore_empty_R=True)
decr_entity_id = self.get_entity_id(decr_entity)
self.meta_reactions.setdefault(decr_entity_id, [])
self.meta_reactions[decr_entity_id].append(
("dec", self.get_entity_id(decrementer), reactants, inhibitors))
def add_permanency(self, ent, I):
"""Sets entity to be permanent unless it is inhibited"""
ent_id = self.get_entity_id(ent)
if ent_id in self.permanent_entities:
raise RuntimeError(
"Permanency for {0} already defined.".format(ent))
inhibitors = self.process_rip([], I, [], ignore_empty_R=True)[1]
self.permanent_entities[ent_id] = inhibitors
def set_context_entities(self, entities):
raise NotImplementedError
def entities_names_set_to_str(self, entities):
s = ""
for entity in entities:
s += entity + ", "
s = s[:-2]
return s
def entities_ids_set_to_str(self, entities):
s = ""
for entity in entities:
s += self.get_entity_name(entity) + ", "
s = s[:-2]
return s
def state_to_str(self, state):
s = ""
for ent, level in state:
s += self.get_entity_name(ent) + "=" + str(level) + ", "
s = s[:-2]
return s
def show_background_set(self):
print(
C_MARK_INFO + " Background set: {" + self.entities_names_set_to_str(self.background_set) + "}")
def show_meta_reactions(self):
print(C_MARK_INFO + " Meta reactions:")
for param_ent, reactions in self.meta_reactions.items():
for r_type, command, reactants, inhibitors in reactions:
if r_type == "inc" or r_type == "dec":
print(" - [ Type=" + repr(r_type) + " Operand=( " + self.get_entity_name(param_ent) + " ) Command=( " + self.get_entity_name(
command) + " ) ] -- ( R={" + self.state_to_str(reactants) + "}, I={" + self.state_to_str(inhibitors) + "} )")
else:
raise RuntimeError(
"Unknown meta-reaction type: " + repr(r_type))
def show_max_concentrations(self):
print(
C_MARK_INFO +
" Maximal allowed concentration levels (for optimized translation to RS):")
for e, max_conc in self.max_conc_per_ent.items():
print(" - {0:^20} = {1:<6}".format(self.get_entity_name(e), max_conc))
def show_permanent_entities(self):
print(C_MARK_INFO + " Permanent entities:")
for e, inhibitors in self.permanent_entities.items():
print(" - {0:^20}{1:<6}".format(self.get_entity_name(e) + ": ",
"I={" + self.state_to_str(inhibitors) + "}"))
def show(self, soft=False):
self.show_background_set()
self.show_reactions(soft)
self.show_permanent_entities()
self.show_meta_reactions()
self.show_max_concentrations()
def get_reactions_by_product(self):
"""Sorts reactions by their products and returns a dictionary of products"""
if self.reactions_by_prod != None:
return self.reactions_by_prod
producible_entities = set()
for reaction in self.reactions:
product_entities = [e for e, c in reaction[2]]
producible_entities = producible_entities.union(
set(product_entities))
reactions_by_prod = {}
for p_e in producible_entities:
reactions_by_prod[p_e] = []
rcts_for_p_e = reactions_by_prod[p_e]
for r in self.reactions:
product_entities = [e for e, c in r[2]]
if p_e in product_entities:
reactants = r[0]
inhibitors = r[1]
products = [(e, c) for e, c in r[2] if e == p_e]
prod_conc = products[0][1]
insert_place = None
# we need to order the reactions w.r.t. the concentration levels produced (increasing order)
for i in range(0, len(rcts_for_p_e)):
checked_conc = rcts_for_p_e[i][2][0][1]
if prod_conc <= checked_conc:
insert_place = i
break
if insert_place == None: # empty or the is only one element which is smaller than the element being added
# we append (to the end)
rcts_for_p_e.append((reactants, inhibitors, products))
else:
rcts_for_p_e.insert(
insert_place, (reactants, inhibitors, products))
# save in cache
self.reactions_by_prod = reactions_by_prod
return reactions_by_prod
def get_reaction_system(self):
rs = ReactionSystem()
for reactants, inhibitors, products in self.reactions:
new_reactants = []
new_inhibitors = []
new_products = []
for ent, conc in reactants:
n = self.get_entity_name(ent) + "#" + str(conc)
rs.ensure_bg_set_entity(n)
new_reactants.append(n)
for ent, conc in inhibitors:
n = self.get_entity_name(ent) + "#" + str(conc)
rs.ensure_bg_set_entity(n)
new_inhibitors.append(n)
for ent, conc in products:
for i in range(1, conc+1):
n = self.get_entity_name(ent) + "#" + str(i)
rs.ensure_bg_set_entity(n)
new_products.append(n)
rs.add_reaction(new_reactants, new_inhibitors, new_products)
for param_ent, reactions in self.meta_reactions.items():
for r_type, command, reactants, inhibitors in reactions:
param_ent_name = self.get_entity_name(param_ent)
new_reactants = []
new_inhibitors = []
for ent, conc in reactants:
n = self.get_entity_name(ent) + "#" + str(conc)
rs.ensure_bg_set_entity(n)
new_reactants.append(n)
for ent, conc in inhibitors:
n = self.get_entity_name(ent) + "#" + str(conc)
rs.ensure_bg_set_entity(n)
new_inhibitors.append(n)
max_cmd_c = self.max_concentration
if command in self.max_conc_per_ent:
max_cmd_c = self.max_conc_per_ent[command]
else:
print(
"WARNING:\n\tThere is no maximal concentration level defined for "
+ self.get_entity_name(command))
print("\tThis is a very bad idea -- expect degraded performance\n")
for l in range(1, max_cmd_c+1):
cmd_ent = self.get_entity_name(command) + "#" + str(l)
rs.ensure_bg_set_entity(cmd_ent)
if r_type == "inc":
# pre_conc -- predecessor concentration
# succ_conc -- successor concentration concentration
for i in range(1, self.max_concentration):
pre_conc = param_ent_name + "#" + str(i)
rs.ensure_bg_set_entity(pre_conc)
new_products = []
succ_value = i+l
for j in range(1, succ_value+1):
if j > self.max_concentration:
break
new_p = param_ent_name + "#" + str(j)
rs.ensure_bg_set_entity(new_p)
new_products.append(new_p)
if new_products != []:
rs.add_reaction(
set(new_reactants + [pre_conc, cmd_ent]),
set(new_inhibitors),
set(new_products))
elif r_type == "dec":
for i in range(1, self.max_concentration+1):
pre_conc = param_ent_name + "#" + str(i)
rs.ensure_bg_set_entity(pre_conc)
new_products = []
succ_value = i-l
for j in range(1, succ_value+1):
if j > self.max_concentration:
break
new_p = param_ent_name + "#" + str(j)
rs.ensure_bg_set_entity(new_p)
new_products.append(new_p)
if new_products != []:
rs.add_reaction(
set(new_reactants + [pre_conc, cmd_ent]),
set(new_inhibitors),
set(new_products))
else:
raise RuntimeError(
"Unknown meta-reaction type: " + repr(r_type))
for ent, inhibitors in self.permanent_entities.items():
max_c = self.max_concentration
if ent in self.max_conc_per_ent:
max_c = self.max_conc_per_ent[ent]
else:
print(
"WARNING:\n\tThere is no maximal concentration level defined for "
+ self.get_entity_name(ent))
print("\tThis is a very bad idea -- expect degraded performance\n")
def e_value(i):
return self.get_entity_name(ent) + "#" + str(i)
for value in range(1, max_c+1):
new_reactants = []
new_inhibitors = []
new_products = []
new_reactants = [e_value(value)]
for e_inh, conc in inhibitors:
n = self.get_entity_name(e_inh) + "#" + str(conc)
rs.ensure_bg_set_entity(n)
new_inhibitors.append(n)
for i in range(1, value+1):
new_products.append(e_value(i))
rs.add_reaction(new_reactants, new_inhibitors, new_products)
return rs
class ReactionSystemWithAutomaton(object):
def __init__(self, reaction_system, context_automaton):
self.rs = reaction_system
self.ca = context_automaton
def show(self, soft=False):
self.rs.show(soft)
self.ca.show()
def is_with_concentrations(self):
if not isinstance(self.rs, ReactionSystemWithConcentrations):
return False
if not isinstance(self.ca, ContextAutomatonWithConcentrations):
return False
return True
def sanity_check(self):
pass
def get_ordinary_reaction_system_with_automaton(self):
if not self.is_with_concentrations():
raise RuntimeError("Not RS/CA with concentrations")
ors = self.rs.get_reaction_system()
oca = self.ca.get_automaton_with_flat_contexts(ors)
return ReactionSystemWithAutomaton(ors, oca)
# EOF
| 37.103687 | 145 | 0.541949 | 16,000 | 0.993604 | 0 | 0 | 0 | 0 | 0 | 0 | 1,821 | 0.113085 |
3147807fd05cce08c68a76d2267bf8400ec93917 | 9,241 | py | Python | src/models/cnn_train.py | zh272/AIGOGO | 0255cf8c4776358b73ee6b1792325a151a0cfa78 | [
"BSD-3-Clause"
]
| null | null | null | src/models/cnn_train.py | zh272/AIGOGO | 0255cf8c4776358b73ee6b1792325a151a0cfa78 | [
"BSD-3-Clause"
]
| null | null | null | src/models/cnn_train.py | zh272/AIGOGO | 0255cf8c4776358b73ee6b1792325a151a0cfa78 | [
"BSD-3-Clause"
]
| null | null | null | import os
import time
import fire
import torch
import random
import numpy as np
import pandas as pd
import torch.nn.functional as F
## to detach from monitor
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from trainer import Trainer
from model import ConvNet1D
from helpers import get_dataset, test_epoch, ready, save_obj, load_obj
def get_submission(
X_train, X_valid, y_train, y_valid, X_test, model=ConvNet1D, max_epoch=200, base_lr=0.1,
momentum=0.9, weight_decay=0.0001, batch_size = 128, train_params={}, plot=True,
test_along=False, optimizer='sgd', hyper={}, save=False, load=False, mdl_name='cnn.pt'
):
train_set, valid_set, X_test_np, X_train_np, X_valid_np, _ = get_dataset(
X_train.values, y_train.values, X_test.values, X_valid.values, y_valid.values
)
PATH = './saved_model'
if not os.path.isdir(PATH): os.makedirs(PATH)
start_time = time.time()
end_time = start_time
if load:
trainer = Trainer(
torch.load(os.path.join(PATH, mdl_name)), train_set=train_set, loss_fn=F.l1_loss, hyper=hyper,
valid_set=valid_set, batch_size=batch_size, epochs=max_epoch, optimizer=optimizer
)
else:
trainer = Trainer(
model(**train_params), train_set=train_set, loss_fn=F.l1_loss, hyper=hyper,
valid_set=valid_set, batch_size=batch_size, epochs=max_epoch, optimizer=optimizer
)
valid_hist = []
for epochs in range(max_epoch):
trainer.train_epoch()
temp_lr = trainer.optimizer.param_groups[0]['lr']
if test_along:
temp_valid = trainer.loss_epoch()
valid_hist.append(temp_valid)
print('Epoch {:3}: Training MAE={:8.2f}, Valid MAE={:8.2f}, lr={}'.format(epochs, trainer.eval(), temp_valid, temp_lr))
else:
print('Epoch {:3}: Training MAE={:8.2f}, lr={}'.format(epochs, trainer.eval(), temp_lr))
end_time = time.time()
if plot:
t_step = np.arange(0, max_epoch, 1)
train_hist = trainer.evaluator.hist
fig_path = 'figures'
if not os.path.isdir(fig_path): os.makedirs(fig_path)
plt.figure()
plt.plot(t_step, train_hist, 'r', ls='-', label='training MAE')
if test_along:
plt.plot(t_step, valid_hist, 'b', ls='--', label='validation MAE')
plt.legend(loc='best')
plt.xlabel('steps')
plt.title('Training and Validation MAE')
plt.grid()
plt.savefig(os.path.join(fig_path, 'training_plot.png'))
plt.close()
if save:
torch.save(trainer.model, os.path.join(PATH, mdl_name))
train_loss = trainer.loss_epoch(load='train')
valid_loss = trainer.loss_epoch(load='valid')
state_dict = trainer.model.state_dict()
if torch.cuda.device_count() > 1:
input_weights = state_dict['module.regressor.fc0.weight'].cpu().numpy()
else:
input_weights = state_dict['regressor.fc0.weight'].cpu().numpy()
# assume std deviation of each feature is 1
avg_w = np.mean(np.abs(input_weights), axis=0)
feature_importances = avg_w
feature_names = X_train.columns.values
sorted_idx = np.argsort(feature_importances*-1) # descending order
summary = '====== MLPRegressor Training Summary ======\n'
summary += '>>> epochs={}, lr={}, momentum={}, weight_decay={}\n'.format(max_epoch,base_lr,momentum,weight_decay)
summary += '>>> schedule={}\n'.format(hyper['lr_schedule'])
summary += '>>> hidden={}, optimizer="{}", batch_size={}\n'.format(train_params['num_neuron'],optimizer,batch_size)
for idx in sorted_idx:
summary += '[{:<25s}] {:<10.4f}\n'.format(feature_names[idx], feature_importances[idx])
summary += '>>> training_time={:10.2f}min\n'.format((end_time-start_time)/60)
summary += '>>> Final MAE: {:10.4f}(Training), {:10.4f}(Validation)\n'.format(train_loss,valid_loss)
# Generate submission
test_output = trainer.predict(torch.FloatTensor(X_test_np)).cpu().data.numpy()
submission = pd.DataFrame(data=test_output,index=X_test.index, columns=['Next_Premium'])
train_output = trainer.predict(torch.FloatTensor(X_train_np)).cpu().data.numpy()
submission_train = pd.DataFrame(data=train_output,index=X_train.index, columns=['Next_Premium'])
valid_output = trainer.predict(torch.FloatTensor(X_valid_np)).cpu().data.numpy()
submission_valid = pd.DataFrame(data=valid_output,index=X_valid.index, columns=['Next_Premium'])
return {
'model': trainer, 'submission': submission,
'submission_train':submission_train, 'submission_valid':submission_valid,
'valid_loss':valid_loss, 'summary':summary
}
def read_interim_data(file_name, index_col='Policy_Number'):
'''
In: file_name
Out: interim_data
Description: read data from directory /data/interim
'''
# set the path of raw data
interim_data_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), os.path.pardir, os.path.pardir, 'data', 'interim'
)
file_path = os.path.join(interim_data_path, file_name)
interim_data = pd.read_csv(file_path, index_col=index_col)
return(interim_data)
def write_precessed_data(df, suffix=None):
'''
In:
DataFrame(df),
str(file_name),
Out:
None
Description:
Write sample data to directory /data/interim
'''
precessed_data_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), os.path.pardir, os.path.pardir, 'data', 'processed'
)
if suffix is None:
file_name = 'testing-set.csv'
else:
file_name = 'testing-set_{}.csv'.format(suffix)
write_sample_path = os.path.join(precessed_data_path, file_name)
df.to_csv(write_sample_path)
# empirical scale: weight_decay=0.0001
def demo(
epochs=100, base_lr=0.0001, momentum=0.9, weight_decay=0,
batch_size=128, optimizer='sgd', dropout=False, seed=None,
get_train=False, get_test=False, save=False, load=False
):
if seed is not None:
rand_reset(seed)
# X_train = read_interim_data('X_train_prefs.csv')
# y_train = read_interim_data('y_train_prefs.csv')
# X_valid = read_interim_data('X_valid_prefs.csv')
# y_valid = read_interim_data('y_valid_prefs.csv')
# X_test = read_interim_data('X_test_prefs.csv')
X_train = read_interim_data('X_train_new.csv')
y_train = read_interim_data('y_train_new.csv')
X_valid = read_interim_data('X_valid_new.csv')
y_valid = read_interim_data('y_valid_new.csv')
X_test = read_interim_data('X_test_new.csv')
feature_list = [feature for feature in X_train.columns.values if 'cat_' not in feature]
num_features = len(feature_list)
print('Number of features: {}'.format(num_features))
# Filter features
X_train = X_train[feature_list]
X_valid = X_valid[feature_list]
X_test = X_test[feature_list]
### Fill Missing Values
X_train = X_train.apply(lambda x:x.fillna(-1))
X_valid = X_valid.apply(lambda x:x.fillna(-1))
X_test = X_test.apply(lambda x:x.fillna(-1))
# begin training
# n_input = X_train.shape[1]
train_params = {
'num_cv_filter': [1,40,80],
'num_fc_neuron': [20,5,1],
'dropout': dropout
}
optim_hyper = {
'lr':base_lr,
'momentum':momentum,
'weight_decay':weight_decay,
'lr_schedule':{
epochs//4:base_lr,
epochs//2:base_lr/5,
epochs//4*3:base_lr/50,
epochs: base_lr/200
}
}
model_output = get_submission(
X_train, X_valid, y_train, y_valid, X_test,
model=ConvNet1D, max_epoch=epochs, base_lr=base_lr,
momentum=momentum, weight_decay=weight_decay,
batch_size = batch_size, train_params=train_params,
test_along=True, optimizer=optimizer, hyper=optim_hyper,
save=save, load=load
)
summary = model_output['summary']
summary += '>>> random seed: {}\n'.format(seed)
print(summary)
# generate submission
if get_test:
write_precessed_data(model_output['submission'], suffix='mlptest{}'.format(int(model_output['valid_loss'])))
with open('summary_mlp{}.txt'.format(int(model_output['valid_loss'])), 'w') as f:
f.write(summary)
if get_train:
write_precessed_data(model_output['submission_train'], suffix='mlptrain')
write_precessed_data(model_output['submission_valid'], suffix='mlpvalid')
def rand_reset(seed):
random.seed(seed)
torch.manual_seed(random.randint(0,1000))
torch.cuda.manual_seed_all(random.randint(0,1000))
np.random.seed(random.randint(0,1000))
if __name__ == '__main__':
# Example usage: "python nn_train.py --epochs 100"
fire.Fire(demo) | 37.872951 | 136 | 0.634455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,060 | 0.22292 |
31489fbab4ef2b5a4414a0a8b4a377eb59e53266 | 730 | py | Python | tests/fetchers/test_hvdcLineCktOwnersFetcher.py | rohit98077/mis_outages_ingest | 1f563962ea8f93ed1825a7e07bde231b67240bcb | [
"MIT"
]
| null | null | null | tests/fetchers/test_hvdcLineCktOwnersFetcher.py | rohit98077/mis_outages_ingest | 1f563962ea8f93ed1825a7e07bde231b67240bcb | [
"MIT"
]
| 5 | 2020-09-19T08:52:48.000Z | 2021-06-02T03:29:38.000Z | tests/fetchers/test_hvdcLineCktOwnersFetcher.py | rohit98077/mis_outages_ingest | 1f563962ea8f93ed1825a7e07bde231b67240bcb | [
"MIT"
]
| 2 | 2020-09-19T08:45:47.000Z | 2020-11-07T12:00:24.000Z | import unittest
from src.fetchers.hvdcLineCktOwnersFetcher import getOwnersForHvdcLineCktIds
import datetime as dt
from src.appConfig import getConfig
class TestHvdcLineCktOwnersFetcher(unittest.TestCase):
appConfig: dict = {}
def setUp(self):
self.appConfig = getConfig()
def test_run(self) -> None:
"""tests the function that fetches the owners of
HvdcLineCkts from reporting software
"""
elemIds = [12, 13]
ownersDict = getOwnersForHvdcLineCktIds(
self.appConfig['reportsConStr'], elemIds)
expectedDict = {
12: "POWERGRID-WR1 (PGCIL)", 13: "POWERGRID-SR,POWERGRID-WR1 (PGCIL)"}
self.assertTrue(ownersDict == expectedDict)
| 31.73913 | 82 | 0.680822 | 576 | 0.789041 | 0 | 0 | 0 | 0 | 0 | 0 | 180 | 0.246575 |
314a3567674f4832f50804842163798ba6755e31 | 1,322 | py | Python | 3rdParty/boost/1.71.0/libs/python/test/iterator.py | rajeev02101987/arangodb | 817e6c04cb82777d266f3b444494140676da98e2 | [
"Apache-2.0"
]
| 12,278 | 2015-01-29T17:11:33.000Z | 2022-03-31T21:12:00.000Z | 3rdParty/boost/1.71.0/libs/python/test/iterator.py | rajeev02101987/arangodb | 817e6c04cb82777d266f3b444494140676da98e2 | [
"Apache-2.0"
]
| 9,469 | 2015-01-30T05:33:07.000Z | 2022-03-31T16:17:21.000Z | 3rdParty/boost/1.71.0/libs/python/test/iterator.py | rajeev02101987/arangodb | 817e6c04cb82777d266f3b444494140676da98e2 | [
"Apache-2.0"
]
| 892 | 2015-01-29T16:26:19.000Z | 2022-03-20T07:44:30.000Z | # Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from __future__ import print_function
'''
>>> from iterator_ext import *
>>> from input_iterator import *
>>> x = list_int()
>>> x.push_back(1)
>>> x.back()
1
>>> x.push_back(3)
>>> x.push_back(5)
>>> for y in x:
... print(y)
1
3
5
>>> z = range(x)
>>> for y in z:
... print(y)
1
3
5
Range2 wraps a transform_iterator which doubles the elements it
traverses. This proves we can wrap input iterators
>>> z2 = range2(x)
>>> for y in z2:
... print(y)
2
6
10
>>> l2 = two_lists()
>>> for y in l2.primes:
... print(y)
2
3
5
7
11
13
>>> for y in l2.evens:
... print(y)
2
4
6
8
10
12
>>> ll = list_list()
>>> ll.push_back(x)
>>> x.push_back(7)
>>> ll.push_back(x)
>>> for a in ll: #doctest: +NORMALIZE_WHITESPACE
... for b in a:
... print(b, end='')
... print('')
...
1 3 5
1 3 5 7
'''
def run(args = None):
import sys
import doctest
if args is not None:
sys.argv = args
return doctest.testmod(sys.modules.get(__name__))
if __name__ == '__main__':
print("running...")
import sys
status = run()[0]
if (status == 0): print("Done.")
sys.exit(status)
| 16.734177 | 71 | 0.599849 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 998 | 0.754917 |
314a37a6a50187301c229c5c2b0248cf18eb14ce | 5,995 | py | Python | py/locationdb/geonames.py | acorg/locationdb | f3a70e254c3b81e12b8ddbbd02bb4baf7b1395b7 | [
"MIT"
]
| null | null | null | py/locationdb/geonames.py | acorg/locationdb | f3a70e254c3b81e12b8ddbbd02bb4baf7b1395b7 | [
"MIT"
]
| null | null | null | py/locationdb/geonames.py | acorg/locationdb | f3a70e254c3b81e12b8ddbbd02bb4baf7b1395b7 | [
"MIT"
]
| null | null | null | # -*- Python -*-
# license
# license.
# ======================================================================
"""Looks name up in the [geonames database](http://www.geonames.org/).
[GeoNames Search Webservice API](http://www.geonames.org/export/geonames-search.html)
"""
import sys, os, urllib.request, json, time
from pathlib import Path
import logging; module_logger = logging.getLogger(__name__)
from .utilities import is_chinese
# ======================================================================
def geonames(name):
if not name:
return name
if is_chinese(name):
r = _lookup_chinese(name=name)
else:
r = _lookup("search", isNameRequired="true", name=name)
return r
# ----------------------------------------------------------------------
def _lookup(feature, **args):
def make(entry):
if entry.get("fcl") in ["A", "P"]:
return {
# "local_name": entry[],
"name": entry["toponymName"],
"province": entry["adminName1"],
"country": entry["countryName"],
"latitude": entry["lat"],
"longitude": entry["lng"],
}
else:
return None
return _get(feature, make, args)
# ----------------------------------------------------------------------
def _get(feature, result_maker, args):
args.update({"username": "acorg", "type": "json"})
url = "http://api.geonames.org/{}?{}".format(feature, urllib.parse.urlencode(args))
# module_logger.debug('_lookup {!r}'.format(url))
while True:
rj = json.loads(urllib.request.urlopen(url=url).read().decode("utf-8"))
try:
return [e2 for e2 in (result_maker(e1) for e1 in rj["geonames"]) if e2]
except KeyError:
if "the hourly limit of" in rj.get("status", {}).get("message"):
print(f"WARNING: {rj['status']['message']}", file=sys.stderr)
seconds_to_wait = 120
print(f"WARNING: about to wait {seconds_to_wait} seconds", file=sys.stderr)
time.sleep(seconds_to_wait)
else:
print(f"ERROR: {rj}", file=sys.stderr)
raise RuntimeError(str(rj))
except Exception as err:
print(f"ERROR: {rj}: {err}", file=sys.stderr)
raise RuntimeError(f"{rj}: {err}")
# ----------------------------------------------------------------------
def _lookup_chinese(name):
if len(name) > 3:
r = []
if provinces := _find_chinese_province(name):
province = provinces[0]
county = _find_chinese_county(name, province);
if county:
r = [{
"local_name": name,
"name": _make_chinese_name(province, county),
"province": _make_province_name(province),
"country": province["countryName"],
"latitude": county["lat"],
"longitude": county["lng"],
}]
else:
def make(entry):
province_name = _make_province_name(entry)
return {
"local_name": name,
"name": province_name,
"province": province_name,
"country": entry["countryName"],
"latitude": entry["lat"],
"longitude": entry["lng"],
}
r = [make(e) for e in _find_chinese_province(name)]
return r
# ----------------------------------------------------------------------
def _find_chinese_province(name):
r = _get("search", lambda e: e if e["name"] == name[:2] else None, {"isNameRequired": "true", "name_startsWith": name[:2], "fclass": "A", "fcode": "ADM1", "lang": "cn"})
# module_logger.debug('name: {!r} : {!r}'.format(name[:2], r))
if not r: # Inner Mongolia is written using 3 Hanzi
r = _get("search", lambda e: e if e["name"] == name[:3] else None, {"isNameRequired": "true", "name_startsWith": name[:3], "fclass": "A", "fcode": "ADM1", "lang": "cn"})
return r
# ----------------------------------------------------------------------
def _make_province_name(entry):
r = entry["toponymName"].upper()
space_pos = r.find(' ', 6 if r[:6] == "INNER " else 0)
if space_pos >= 0:
r = r[:space_pos]
return r;
# ----------------------------------------------------------------------
def _find_chinese_county(full_name, province):
name = full_name[len(province["name"]):]
r = _get("search", lambda e: e, {"isNameRequired": "true", "name_startsWith": name, "fclass": "A", "fcode": "ADM3", "adminCode1": province["adminCode1"], "lang": "cn"})
if not r:
r = _get("search", lambda e: e, {"isNameRequired": "true", "name_startsWith": name, "adminCode1": province["adminCode1"], "lang": "cn"})
# module_logger.debug('_find_chinese_county {}'.format(r))
return r[0] if r else None
# ----------------------------------------------------------------------
def _make_chinese_name(province, county):
return _make_province_name(province) + " " + _make_county_name(county)
# ----------------------------------------------------------------------
def _make_county_name(county):
def remove_suffix(source, suffix):
if source[-len(suffix):] == suffix:
source = source[:-len(suffix)]
return source
def remove_apostrophe(source):
return source.replace("’", "")
r = county["toponymName"].upper()
r1 = remove_suffix(r, " ZIZHIXIAN")
if r1 != r:
r = remove_suffix(r1, "ZU")
else:
for s in [" QU", " XIAN", " SHI"]:
r2 = remove_suffix(r, s)
if r2 != r:
r = r2
break
r = remove_apostrophe(r)
return r
# ======================================================================
### Local Variables:
### eval: (if (fboundp 'eu-rename-buffer) (eu-rename-buffer))
### End:
| 37.006173 | 177 | 0.481568 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,290 | 0.381858 |
314a686b41ad5ce98c4fc71d262791e5baa688cc | 117 | py | Python | python/testData/completion/notImportedQualifiedName/UseImportPriorityWhenAddingImport/main.py | 06needhamt/intellij-community | 63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b | [
"Apache-2.0"
]
| null | null | null | python/testData/completion/notImportedQualifiedName/UseImportPriorityWhenAddingImport/main.py | 06needhamt/intellij-community | 63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b | [
"Apache-2.0"
]
| null | null | null | python/testData/completion/notImportedQualifiedName/UseImportPriorityWhenAddingImport/main.py | 06needhamt/intellij-community | 63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b | [
"Apache-2.0"
]
| null | null | null | import subprocess
import sys
import django.conf
import django.utils.encoding
subprocess.Popen
sys.argv
plt.<caret> | 11.7 | 28 | 0.820513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
314b449f6ae2a854d067dfd6393a546317c348b5 | 2,921 | py | Python | Array/271EncodeDecodeStrings.py | john-the-dev/leetcode | f1038a5357c841a0d3c8aca1ae1a7d0387f77545 | [
"Apache-2.0"
]
| null | null | null | Array/271EncodeDecodeStrings.py | john-the-dev/leetcode | f1038a5357c841a0d3c8aca1ae1a7d0387f77545 | [
"Apache-2.0"
]
| null | null | null | Array/271EncodeDecodeStrings.py | john-the-dev/leetcode | f1038a5357c841a0d3c8aca1ae1a7d0387f77545 | [
"Apache-2.0"
]
| null | null | null | # 271. Encode and Decode Strings
'''
Design an algorithm to encode a list of strings to a string. The encoded string is then sent over the network and is decoded back to the original list of strings.
Machine 1 (sender) has the function:
string encode(vector<string> strs) {
// ... your code
return encoded_string;
}
Machine 2 (receiver) has the function:
vector<string> decode(string s) {
//... your code
return strs;
}
So Machine 1 does:
string encoded_string = encode(strs);
and Machine 2 does:
vector<string> strs2 = decode(encoded_string);
strs2 in Machine 2 should be the same as strs in Machine 1.
Implement the encode and decode methods.
Note:
The string may contain any possible characters out of 256 valid ascii characters. Your algorithm should be generalized enough to work on any possible characters.
Do not use class member/global/static variables to store states. Your encode and decode algorithms should be stateless.
Do not rely on any library method such as eval or serialize methods. You should implement your own encode/decode algorithm.
'''
from common import *
'''
Encode to numbers and decode from numbers.
O(N) runtime for both encode and decode, in which N is total # of characters in strs. O(N) storage.
Beat 5% runtime, 29% storage of all Leetcode submissions.
'''
class Codec:
def toNum(self, s):
num,zero,i,n = 0,0,0,len(s)
while i < n and ord(s[i]) == 0:
zero += 1
i += 1
while i < n:
num = num << 8
num += ord(s[i])
i += 1
return [zero,num]
def toStr(self, zero, num):
s = []
while num > 0:
s.append(chr(num % 256))
num = num >> 8
s.extend([chr(0)]*zero)
return ''.join(s[::-1])
def encode(self, strs: [str]) -> str:
"""Encodes a list of strings to a single string.
"""
out = []
for s in strs:
zero,num = self.toNum(s)
out.append('{}:{}'.format(zero,num))
return ','.join(out)
def decode(self, s: str) -> [str]:
"""Decodes a single string to a list of strings.
"""
out = []
strs = s.split(',') if len(s) > 0 else []
for s in strs:
zero,num = s.split(':')
out.append(self.toStr(int(zero),int(num)))
return out
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.decode(codec.encode(strs))
# Tests.
codec = Codec()
strs = ['Great','Nice']
encoded = codec.encode(strs)
assert(codec.decode(encoded) == strs)
strs = ['{}leading'.format(chr(0)),'Nice']
encoded = codec.encode(strs)
assert(codec.decode(encoded) == strs)
strs = ['{}l:eadi.ng'.format(chr(0)),'{}leading,{}'.format(chr(0),chr(1))]
encoded = codec.encode(strs)
assert(codec.decode(encoded) == strs)
strs = []
encoded = codec.encode(strs)
assert(codec.decode(encoded) == strs)
| 30.113402 | 162 | 0.624444 | 1,050 | 0.359466 | 0 | 0 | 0 | 0 | 0 | 0 | 1,599 | 0.547415 |
314bff70a566f2c7c9e67a0b7e9a88c99668d8c8 | 857 | py | Python | ois_api_client/v2_0/deserialization/deserialize_invoice_number_query.py | peterkulik/ois_api_client | 51dabcc9f920f89982c4419bb058f5a88193cee0 | [
"MIT"
]
| 7 | 2020-10-22T08:15:29.000Z | 2022-01-27T07:59:39.000Z | ois_api_client/v2_0/deserialization/deserialize_invoice_number_query.py | peterkulik/ois_api_client | 51dabcc9f920f89982c4419bb058f5a88193cee0 | [
"MIT"
]
| null | null | null | ois_api_client/v2_0/deserialization/deserialize_invoice_number_query.py | peterkulik/ois_api_client | 51dabcc9f920f89982c4419bb058f5a88193cee0 | [
"MIT"
]
| null | null | null | from typing import Optional
import xml.etree.ElementTree as ET
from ...xml.XmlReader import XmlReader as XR
from ..namespaces import API
from ..namespaces import DATA
from ...deserialization.create_enum import create_enum
from ..dto.InvoiceNumberQuery import InvoiceNumberQuery
from ..dto.InvoiceDirection import InvoiceDirection
def deserialize_invoice_number_query(element: ET.Element) -> Optional[InvoiceNumberQuery]:
if element is None:
return None
result = InvoiceNumberQuery(
invoice_number=XR.get_child_text(element, 'invoiceNumber', API),
invoice_direction=create_enum(InvoiceDirection, XR.get_child_text(element, 'invoiceDirection', API)),
batch_index=XR.get_child_int(element, 'batchIndex', API),
supplier_tax_number=XR.get_child_text(element, 'supplierTaxNumber', API),
)
return result
| 37.26087 | 109 | 0.771295 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.074679 |
314c834bc34744a5153b8449b8d6ede84e3fa535 | 1,614 | py | Python | scripts/markov_rulesets.py | takuyakanbr/covfefe | 8d6a88c838945fc8c8b8c88d19b775ec48a998b7 | [
"BSD-3-Clause"
]
| null | null | null | scripts/markov_rulesets.py | takuyakanbr/covfefe | 8d6a88c838945fc8c8b8c88d19b775ec48a998b7 | [
"BSD-3-Clause"
]
| 4 | 2019-12-02T17:39:27.000Z | 2019-12-02T17:43:49.000Z | scripts/markov_rulesets.py | takuyakanbr/covfefe | 8d6a88c838945fc8c8b8c88d19b775ec48a998b7 | [
"BSD-3-Clause"
]
| null | null | null | # Script to generate the necessary grammar rules for the
# markov generator output type
# Dataset:
# http://www.drmaciver.com/2009/12/i-want-one-meelyun-sentences/
import re
ALPHA = ' abcdefghijklmnopqrstuvwxyz'
# read data from file
with open('sentences', 'r', encoding="utf8") as f:
content = f.read().splitlines()
n = len(content)
freq = {}
# process sentences
for i in range(n):
content[i] = re.sub('[^a-z]+', ' ', content[i].lower())
for word in content[i].split(' '):
if len(word) < 1: continue
word = ' ' + word + ' '
# sum up next-letter frequencies
pc = ''
for j in range(len(word) - 1):
c = word[j]
if pc != ' ': c = pc + c
nc = word[j+1]
if c not in freq:
freq[c] = {}
for a in ALPHA:
freq[c][a] = 0
freq[c][nc] += 1
pc = word[j]
# normalize frequencies
for c, d in freq.items():
sum_ = sum(d.values())
for nc in d:
d[nc] /= sum_
# helper functions for printing rulesets
def make_name(c):
if c == ' ': return '@mstart'
return '@m' + c
def make_option(pc, c, nc):
if nc == ' ': return pc + c + '|'
if c == ' ': return '@m' + nc + '|'
if len(pc) == 0: return '@m' + c + nc + '|'
return pc + ',@m' + c + nc + '|'
# print rulesets
for c, d in freq.items():
rule = make_name(c) + '='
pc = c[:-1]
c = c[-1]
for nc in d:
if d[nc] <= 0.0055: continue
mult = max(1, int(d[nc] / 0.01))
rule += make_option(pc, c, nc) * mult
print(rule[:-1])
| 24.454545 | 64 | 0.502478 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 436 | 0.270136 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.