hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f203d39ef60e2f5a44270e70b2db9a749876f722 | 5,788 | py | Python | 4USCityEmotion/get_flickr_photos.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
]
| 3 | 2021-07-15T15:58:52.000Z | 2021-07-16T13:22:47.000Z | 4USCityEmotion/get_flickr_photos.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
]
| null | null | null | 4USCityEmotion/get_flickr_photos.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
]
| 4 | 2017-08-04T12:41:06.000Z | 2019-01-31T14:55:10.000Z | # coding:utf-8
# version:python3.5.1
# author:kyh
import flickrapi
import datetime
import psycopg2
import time
# flickr照片类
class flickr_photo(object):
def __init__(self, photo_id, photo_city, photo_url):
self.id = photo_id
self.city = photo_city
self.url = photo_url
# 将照片插入数据库
def insert_db(self, db_connection, db_cursor):
try:
sql_command_insert = "INSERT INTO photo(id,url,city) VALUES({0},'{1}','{2}')".format(self.id,
self.url,
self.city
)
db_cursor.execute(sql_command_insert)
db_connection.commit()
return True
except Exception as e:
with open('log.txt','a') as log:
log.writelines(str(e))
db_connection.rollback()
return False
# 连接数据库
def db_connect():
try:
connection = psycopg2.connect(database="PlaceEmotion", user="postgres",
password="postgres", host="127.0.0.1", port="5432")
cursor = connection.cursor()
print("Database Connection has been opened completely!")
return connection, cursor
except Exception as e:
with open('log.txt','a') as log:
log.writelines(str(e))
# 查询需要挖掘数据的地点
def query_location(db_connection, db_cursor):
sql_command_select = "SELECT id, city_name, lat, lon FROM location WHERE start_query='FALSE' LIMIT 1"
db_cursor.execute(sql_command_select)
db_connection.commit()
location = db_cursor.fetchone()
# 如果存在这样的地点,记录经纬度进行挖掘
if location is not None:
location_id = location[0]
city = location[1]
lat = location[2]
lon = location[3]
sql_command_update = "UPDATE location SET start_query='TRUE' WHERE id ='{0}'".format(location_id)
db_cursor.execute(sql_command_update)
db_connection.commit()
return city, lat, lon
# 不存在这样的地点,说明已经全部挖掘完毕
else:
return None, None, None
# flickr api信息
def query_api(db_connection, db_cursor):
sql_command_select = "SELECT key, secret FROM API WHERE type = 'flickr' AND start_use = FALSE LIMIT 1"
db_cursor.execute(sql_command_select)
db_connection.commit()
api = db_cursor.fetchone()
# 如果存在这样的API,记录API进行挖掘
if api is not None:
key = api[0]
secret = api[1]
sql_command_update = "UPDATE API SET start_use='TRUE' WHERE key='{0}'".format(key)
db_cursor.execute(sql_command_update)
db_connection.commit()
api_key = u'{0}'.format(key)
api_secret = u'{0}'.format(secret)
flickr = flickrapi.FlickrAPI(api_key, api_secret, cache=True)
print("API:", api_key, api_secret)
return flickr, key
# 不存在这样的API,说明已经全部挖掘完毕
else:
return None, None
# 计算时间
def compute_time(db_connection, db_cursor, location, latitude, longitude, flickr_api):
DATE=datetime.date(2012,1,1)
while(True):
DATE2=DATE+datetime.timedelta(days=10)
datemin ="{0}-{1}-{2}".format(DATE.year,DATE.month,DATE.day)
datemax ="{0}-{1}-{2}".format(DATE2.year,DATE2.month,DATE2.day)
DATE=DATE+datetime.timedelta(days=10)
#print(datemin,datemax)
get_photo_from_location(db_connection, db_cursor, location, latitude, longitude, datemin, datemax, flickr_api)
if DATE.year==2018 and DATE.month==11:
break
# 获取照片
def get_photo_from_location(db_connection, db_cursor, location, latitude, longitude, datemin, datemax, flickr):
# 获取所有图片
try:
time.sleep(2)
#latitude = 48.8584
#longitude = 2.2945
photos = flickr.walk(lat=latitude, lon=longitude, radius=1,
min_taken_date=datemin, max_taken_date=datemax, per_page=500, extras='url_c')
except Exception as e:
with open('log.txt','a') as log:
log.writelines(str(e))
# 获取每一张图片
try:
for photo_url in photos:
url = photo_url.get('url_c')
print(url)
# 如果url不为空,将该图片插入数据库
if url is not None:
photo_id = int(photo_url.get('id'))
photo = flickr_photo(photo_id, location, url)
if photo.insert_db(db_connection, db_cursor):
print("Success! Photo id:" + str(photo_id) + "\tPhoto url:" + url)
except Exception as e:
with open('log.txt','a') as log:
log.writelines(str(e))
def release_api(db_connection, db_cursor, api_key):
try:
sql_command_update = "UPDATE API SET start_use = FALSE WHERE key = '{0}'".format(api_key)
db_cursor.execute(sql_command_update)
db_connection.commit()
except Exception as e:
db_connection.rollback()
# 关闭数据库
def close_connection(connection):
try:
connection.close()
print("Database Connection has been closed completely!")
return True
except Exception as e:
with open('log.txt','a') as log:
log.writelines(str(e))
# 主操作步骤
if __name__ == '__main__':
db_connection, db_cursor = db_connect()
flickr, api_key = query_api(db_connection, db_cursor)
location, lat, lon= query_location(db_connection, db_cursor)
while location is not None:
compute_time(db_connection, db_cursor, location, lat, lon, flickr)
location, lat, lon= query_location(db_connection, db_cursor)
print("All locations have been recorded!")
release_api(db_connection, db_cursor, api_key)
close_connection(db_connection)
| 35.292683 | 118 | 0.604008 | 972 | 0.160237 | 0 | 0 | 0 | 0 | 0 | 0 | 1,314 | 0.216617 |
f204e6f22d0c9b479799a0897aaa41e742212566 | 5,767 | py | Python | Lianjia/LianjiaErShouFang.py | Detailscool/YHSpider | ab1276c9167f70fed3ccff17e02fb62d51e4a469 | [
"MIT"
]
| 1 | 2017-05-04T08:10:34.000Z | 2017-05-04T08:10:34.000Z | Lianjia/LianjiaErShouFang.py | Detailscool/YHSpider | ab1276c9167f70fed3ccff17e02fb62d51e4a469 | [
"MIT"
]
| null | null | null | Lianjia/LianjiaErShouFang.py | Detailscool/YHSpider | ab1276c9167f70fed3ccff17e02fb62d51e4a469 | [
"MIT"
]
| null | null | null | # -*- coding:utf-8 -*-
import requests
from bs4 import BeautifulSoup
import sys
import csv
reload(sys)
sys.setdefaultencoding('utf-8')
def not_empty(str):
return str and str.strip()
if __name__ == '__main__':
url_main = 'http://gz.lianjia.com'
f = open(u'广州二手房.csv', 'wb')
f.write(unicode('\xEF\xBB\xBF', 'utf-8')) # 文件头
writer = csv.writer(f)
writer.writerow(['区域', '小区名称', '户型', '面积', '价格(万)', '单价(元/平米)',
'性质', '朝向', '装修', '是否有电梯', '楼层', '建筑年代', '楼型'])
res = requests.get(url_main+'ershoufang')
res = res.text.encode(res.encoding).decode('utf-8')
soup = BeautifulSoup(res, 'html.parser')
# print soup.prettify()
districts = soup.find(name='div', attrs={'data-role':'ershoufang'}) # <div data-role="ershoufang">
# soup.select()
for district in districts.find_all(name='a'):
print district['title']
district_name = district.text # '东城', '西城', '朝阳', '海淀'......
url = '%s%s' % (url_main, district['href'])
# print url
res = requests.get(url)
res = res.text.encode(res.encoding).decode('utf-8')
soup = BeautifulSoup(res,'html.parser')
# print soup.prettify()
page = soup.find('div', {'class':'page-box house-lst-page-box'})
if not page: # 平谷区没有房源,直接返回
continue
total_pages = dict(eval(page['page-data']))['totalPage'] # 总页数
# print total_pages
for j in range(1, total_pages+1):
url_page = '%spg%d/' % (url, j)
res = requests.get(url_page)
res = res.text.encode(res.encoding).decode('utf-8')
soup = BeautifulSoup(res, 'html.parser')
# print soup.prettify()
sells = soup.find(name='ul', attrs={'class':'sellListContent', 'log-mod':'list'})
if not sells:
continue
# <a class="title" data-bl="list" data-el="ershoufang" data-log_index="1" href="XX" target="_blank">
titles = soup.find_all(name='a', attrs={'class':'title', 'data-bl':'list', 'data-el':'ershoufang'})
# <a data-el="region" data-log_index="1" href="X" target="_blank">
regions = sells.find_all(name='a', attrs={'data-el':'region'})
infos = sells.find_all(name='div', class_='houseInfo') # <div class="houseInfo">
infos2 = sells.find_all(name='div', class_='positionInfo') # <div class="positionInfo">
prices = sells.find_all(name='div', class_='totalPrice') # <div class="totalPrice">
unit_prices = sells.find_all(name='div', class_='unitPrice') # <div class="unitPrice" data-hid="X" data-price="X" data-rid="X">
subways = sells.find_all(name='span', class_='subway') # <span class="subway">
taxs = sells.find_all(name='span', class_='taxfree') # <span class="taxfree">
N = max(len(titles), len(regions), len(prices), len(unit_prices), len(subways), len(taxs), len(infos), len(infos2))
# for title, region, price, unit_price, subway, tax, info, info2 in zip(titles, regions, prices, unit_prices, subways, taxs, infos, infos2):
for i in range(N):
room_type = area = orientation = decoration = elevator = floor = year = slab_tower = None
title = titles[i] if len(titles) > i else None
region = regions[i] if len(regions) > i else None
price = prices[i] if len(prices) > i else None
unit_price = unit_prices[i] if len(unit_prices) > i else None
subway = subways[i] if len(subways) > i else None
tax = taxs[i] if len(taxs) > i else None
info = infos[i] if len(infos) > i else None
info2 = infos2[i] if len(infos2) > i else None
if title:
print 'Title: ', title.text
if region:
region = region.text
if price:
price = price.text
price = price[:price.find('万')]
if unit_price:
unit_price = unit_price.span.text.strip()
unit_price = unit_price[:unit_price.find('元/平米')]
if unit_price.find('单价') != -1:
unit_price = unit_price[2:]
if subway:
subway = subway.text.strip()
if tax:
tax = tax.text.strip()
if info:
info = info.text.split('|')
room_type = info[1].strip() # 几室几厅
area = info[2].strip() # 房屋面积
area = area[:area.find('平米')]
orientation = info[3].strip().replace(' ', '') # 朝向
decoration = '-'
if len(info) > 4: # 如果是车位,则该项为空
decoration = info[4].strip() # 装修类型:简装、中装、精装、豪装、其他
elevator = '无'
if len(info) > 5:
elevator = info[5].strip() # 是否有电梯:有、无
if info2:
info2 = filter(not_empty, info2.text.split(' '))
floor = info2[0].strip()
info2 = info2[1]
year = info2[:info2.find('年')]
slab_tower = info2[info2.find('建')+1:]
print district_name, region, room_type, area, price, unit_price, tax, orientation, decoration, elevator, floor, year, slab_tower
writer.writerow([district_name, region, room_type, area, price, unit_price, tax, orientation, decoration, elevator, floor, year, slab_tower])
# break
# break
# break
f.close()
| 50.147826 | 157 | 0.521935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,657 | 0.275112 |
f20509812fdd5299a74f5fce1649f221aa576b8e | 1,165 | py | Python | test/test_main.py | bluesheeptoken/PyGolf | 421117d1fa1c197b475112e5655fbf7693d475a2 | [
"MIT"
]
| 7 | 2020-04-25T19:54:01.000Z | 2022-03-10T21:54:51.000Z | test/test_main.py | bluesheeptoken/PyGolf | 421117d1fa1c197b475112e5655fbf7693d475a2 | [
"MIT"
]
| 1 | 2020-04-28T08:13:12.000Z | 2020-04-28T08:19:00.000Z | test/test_main.py | bluesheeptoken/PyGolf | 421117d1fa1c197b475112e5655fbf7693d475a2 | [
"MIT"
]
| 1 | 2020-04-25T20:35:57.000Z | 2020-04-25T20:35:57.000Z | import argparse
import tempfile
import unittest
from pygolf.__main__ import get_arguments_warning, read_input_code, shorten
class TestMain(unittest.TestCase):
def test_reduce(self):
self.assertEqual(shorten("print( 1 + 2 )"), "print(1+2)")
self.assertEqual(shorten("not valid code"), None)
def test_read_input_code(self):
name_space = argparse.Namespace()
name_space.code = None
name_space.clipboard = None
name_space.input_file = None
name_space.code = "print('code')"
self.assertEqual(read_input_code(name_space), "print('code')")
name_space.code = None
with tempfile.NamedTemporaryFile("w+") as fp:
fp.write("print('input_file')")
fp.flush()
name_space.input_file = fp.name
self.assertEqual(read_input_code(name_space), "print('input_file')")
name_space.input_file = None
def test_get_arguments_warning(self):
name_space = argparse.Namespace()
name_space.input_file = None
name_space.output_file = "path"
self.assertEqual(len(list(get_arguments_warning(name_space))), 1)
| 33.285714 | 80 | 0.661803 | 1,037 | 0.890129 | 0 | 0 | 0 | 0 | 0 | 0 | 126 | 0.108155 |
f205b580166717e0b19f49119e8357e063a3858d | 545 | py | Python | Session_01/koch.py | UP-RS-ESP/GEW-DAP05-2018 | 04ca0327b4a4ea5b6869e3e985672639651771e8 | [
"MIT"
]
| 2 | 2018-11-16T12:44:33.000Z | 2021-12-20T06:34:22.000Z | Session_01/koch.py | UP-RS-ESP/GEW-DAP05-2018 | 04ca0327b4a4ea5b6869e3e985672639651771e8 | [
"MIT"
]
| null | null | null | Session_01/koch.py | UP-RS-ESP/GEW-DAP05-2018 | 04ca0327b4a4ea5b6869e3e985672639651771e8 | [
"MIT"
]
| null | null | null | import sys
import numpy as np
from matplotlib import pyplot as pl
def koch(x0, y0, rho, phi, order):
global xr, yr
x1, y1 = x0 + rho * np.cos(phi), y0 + rho * np.sin(phi)
if order:
x, y = x0, y0
for angle in [0, np.pi/3, 5*np.pi/3, 0]:
x, y = koch(x, y, rho / 3.0, phi + angle, order - 1)
else:
xr.append(x1)
yr.append(y1)
return (x1, y1)
xr = [1,]
yr = [1,]
koch(xr[0], yr[0], 1, 0, 5)
pl.plot(xr, yr, 'r.-', lw = 0.5)
ax = pl.gca()
ax.set_aspect('equal')
pl.grid()
pl.show()
| 20.185185 | 64 | 0.519266 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.022018 |
f206882462f0a5905d5255d1814f64fdc9855a48 | 2,995 | py | Python | core/views.py | xuhang57/atmosphere | f53fea2a74ee89ccc8852906799b1d9a7e9178b7 | [
"BSD-3-Clause"
]
| null | null | null | core/views.py | xuhang57/atmosphere | f53fea2a74ee89ccc8852906799b1d9a7e9178b7 | [
"BSD-3-Clause"
]
| null | null | null | core/views.py | xuhang57/atmosphere | f53fea2a74ee89ccc8852906799b1d9a7e9178b7 | [
"BSD-3-Clause"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Core views to provide custom operations
"""
import uuid
from datetime import datetime
from django.http import HttpResponseRedirect
from threepio import logger
from atmosphere import settings
from django_cyverse_auth.decorators import atmo_login_required
from django_cyverse_auth.models import Token as AuthToken
from core.models import AtmosphereUser as DjangoUser
@atmo_login_required
def emulate_request(request, username=None):
try:
logger.info("Emulate attempt: %s wants to be %s"
% (request.user, username))
logger.info(request.session.__dict__)
if not username and 'emulator' in request.session:
logger.info("Clearing emulation attributes from user")
username = request.session['emulator']
orig_token = request.session['emulator_token']
request.session['username'] = username
request.session['token'] = orig_token
del request.session['emulator']
del request.session['emulator_token']
# Allow user to fall through on line below
return HttpResponseRedirect(settings.REDIRECT_URL + "/api/v1/profile")
try:
user = DjangoUser.objects.get(username=username)
except DjangoUser.DoesNotExist:
logger.info("Emulate attempt failed. User <%s> does not exist"
% username)
return HttpResponseRedirect(
settings.REDIRECT_URL +
"/api/v1/profile")
logger.info("Emulate success, creating tokens for %s" % username)
token = AuthToken(
user=user,
key=str(uuid.uuid4()),
issuedTime=datetime.now(),
remote_ip=request.META['REMOTE_ADDR'],
api_server_url=settings.API_SERVER_URL
)
token.save()
# Keep original emulator+token if it exists, or use the last known username+token
if 'emulator' not in request.session:
original_emulator = request.session['username']
request.session['emulator'] = original_emulator
logger.info("Returning user %s - Emulated as user %s - to api profile "
% (original_emulator, username))
if 'emulator_token' not in request.session:
original_token = request.session['token']
request.session['emulator_token'] = original_token
# # Set the username to the user to be emulated
# # to whom the token also belongs
request.session['username'] = username
request.session['token'] = token.key
request.session.save()
logger.info(request.session.__dict__)
logger.info(request.user)
return HttpResponseRedirect(settings.REDIRECT_URL + "/api/v1/profile")
except Exception as e:
logger.warn("Emulate request failed")
logger.exception(e)
return HttpResponseRedirect(settings.REDIRECT_URL + "/api/v1/profile")
| 39.933333 | 89 | 0.642738 | 0 | 0 | 0 | 0 | 2,596 | 0.866778 | 0 | 0 | 771 | 0.257429 |
f207285596d5e7ef8253ffc7d5fe5e11b93828ce | 1,714 | py | Python | indico/util/serializer.py | jgrigera/indico | b5538f2755bc38a02313d079bac831ee3dfb44ab | [
"MIT"
]
| 1 | 2018-11-12T21:29:26.000Z | 2018-11-12T21:29:26.000Z | indico/util/serializer.py | jgrigera/indico | b5538f2755bc38a02313d079bac831ee3dfb44ab | [
"MIT"
]
| 9 | 2020-09-08T09:25:57.000Z | 2022-01-13T02:59:05.000Z | indico/util/serializer.py | jgrigera/indico | b5538f2755bc38a02313d079bac831ee3dfb44ab | [
"MIT"
]
| 3 | 2020-07-20T09:09:44.000Z | 2020-10-19T00:29:49.000Z | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from enum import Enum
from indico.core.errors import IndicoError
from indico.core.logger import Logger
class Serializer(object):
__public__ = []
def to_serializable(self, attr='__public__', converters=None):
serializable = {}
if converters is None:
converters = {}
for k in getattr(self, attr):
try:
if isinstance(k, tuple):
k, name = k
else:
k, name = k, k
v = getattr(self, k)
if callable(v): # to make it generic, we can get rid of it by properties
v = v()
if isinstance(v, Serializer):
v = v.to_serializable()
elif isinstance(v, list):
v = [e.to_serializable() for e in v]
elif isinstance(v, dict):
v = dict((k, vv.to_serializable() if isinstance(vv, Serializer) else vv)
for k, vv in v.iteritems())
elif isinstance(v, Enum):
v = v.name
if type(v) in converters:
v = converters[type(v)](v)
serializable[name] = v
except Exception:
msg = 'Could not retrieve {}.{}.'.format(self.__class__.__name__, k)
Logger.get('Serializer{}'.format(self.__class__.__name__)).exception(msg)
raise IndicoError(msg)
return serializable
| 35.708333 | 92 | 0.524504 | 1,393 | 0.812719 | 0 | 0 | 0 | 0 | 0 | 0 | 316 | 0.184364 |
f2082b7572a268703ff36753a9f8e86b4e7ec828 | 814 | py | Python | step/lambdas/get_image_status.py | mbeacom/cloudendure-python | b854b1b2ea47c18a8ef03908abcdd653b77684ac | [
"MIT"
]
| 7 | 2019-06-28T23:30:47.000Z | 2019-08-23T16:57:12.000Z | step/lambdas/get_image_status.py | mbeacom/cloudendure-python | b854b1b2ea47c18a8ef03908abcdd653b77684ac | [
"MIT"
]
| 27 | 2019-06-14T20:39:10.000Z | 2019-08-30T17:20:40.000Z | step/lambdas/get_image_status.py | mbeacom/cloudendure-python | b854b1b2ea47c18a8ef03908abcdd653b77684ac | [
"MIT"
]
| 2 | 2019-08-23T16:50:20.000Z | 2019-08-30T18:22:23.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Check the state of an AWS AMI."""
from __future__ import annotations
import json
from typing import Any, Dict
import boto3
print("Loading function get_image_status")
ec2_client = boto3.client("ec2")
# {
# "instance_id": "i-identifier",
# "kms_id": "KMS ID",
# "account": "account_number",
# "instance_status": "should be there if in loop"
# "migrated_ami_id": "ami-identifier"
# }
def lambda_handler(event: Dict[str, Any], context: Any) -> str:
"""Handle signaling and entry into the AWS Lambda."""
print("Received event: " + json.dumps(event, indent=2))
migrated_ami_id: str = event["migrated_ami_id"]
ami_state: Dict[str, Any] = ec2_client.describe_images(ImageIds=[migrated_ami_id])
return ami_state["Images"][0]["State"]
| 24.666667 | 86 | 0.683047 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 408 | 0.501229 |
f2095b25bea143e9b82c7fbfb9522beac7c96f69 | 344 | py | Python | starfish/types.py | kne42/starfish | 78b348c9756f367221dcca725cfa5107e5520b33 | [
"MIT"
]
| null | null | null | starfish/types.py | kne42/starfish | 78b348c9756f367221dcca725cfa5107e5520b33 | [
"MIT"
]
| null | null | null | starfish/types.py | kne42/starfish | 78b348c9756f367221dcca725cfa5107e5520b33 | [
"MIT"
]
| null | null | null | # constants
from starfish.core.types import ( # noqa: F401
Axes,
Clip,
Coordinates,
CORE_DEPENDENCIES,
Features,
LOG,
OverlapStrategy,
PHYSICAL_COORDINATE_DIMENSION,
PhysicalCoordinateTypes,
STARFISH_EXTRAS_KEY,
TransformType,
)
from starfish.core.types import CoordinateValue, Number # noqa: F401
| 21.5 | 69 | 0.715116 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 35 | 0.101744 |
f209fda8f0cfe43f72b6eb3a30447ef4d992f64f | 6,764 | py | Python | python/alertsActor/rules/dangerKey.py | sdss/twistedAlertsActor | 857588f6da39b7716263f8bd8e3f1be8bb4ce0f7 | [
"BSD-3-Clause"
]
| null | null | null | python/alertsActor/rules/dangerKey.py | sdss/twistedAlertsActor | 857588f6da39b7716263f8bd8e3f1be8bb4ce0f7 | [
"BSD-3-Clause"
]
| null | null | null | python/alertsActor/rules/dangerKey.py | sdss/twistedAlertsActor | 857588f6da39b7716263f8bd8e3f1be8bb4ce0f7 | [
"BSD-3-Clause"
]
| null | null | null | #!/usr/bin/env python
# encoding: utf-8
#
# dangerKey.py
#
# Created by John Donor on 10 April 2019
import re, time
from yaml import YAMLObject
from alertsActor import log
class diskCheck(YAMLObject):
"""evaluate a disk keyword
"""
def __init__(self):
pass
def __call__(self, keyState):
"""The keyval is an enum ('Ok','Warning','Serious','Critical')
and the amount of free space (GB)
"""
keyval = keyState.keyword
if (keyval[0]).upper() == 'OK':
return "ok"
elif (keyval[0]).upper() == 'WARNING':
return "warn"
elif (keyval[0]).upper() == 'SERIOUS':
return "serious"
elif (keyval[0]).upper() == 'CRITICAL':
return "critical"
else:
return "info"
class doNothing(object):
"""camcheck alerts can't check themselves
dummy class to facilitate that
"""
def __init__(self):
pass
def __call__(self, keyState):
return keyState.severity
class camCheck(YAMLObject):
"""evaluate a camCheck alert
"""
def __init__(self):
# NEVER GETS CALLED!!!! -_-
pass
def generateCamCheckAlert(self, key, severity):
inst = key[:3]
side = key[3]
key = "camCheck." + key
instruments = ["boss"]
# most keywords will be SP[12][RB]
# check if they are and assign appropriate instruments
if inst in ["SP1", "SP2"]:
instruments.append("boss.{}".format(inst))
if side in ["R", "B"]:
instruments.append("boss.{}.{}".format(inst, side))
if severity in ["critical", "serious"]:
selfClear = False
addresses = self.emailAddresses
else:
selfClear = True
addresses = None
if key not in self.triggered:
self.triggered.append(key)
if key not in self.alertsActor.monitoring:
dumbCheck = doNothing()
self.alertsActor.addKey(key, severity=severity, checkAfter=120,
selfClear=selfClear, checker=dumbCheck,
keyword="'Reported by camCheck'",
instruments=instruments, emailAddresses=addresses,
emailDelay=0)
if self.alertsActor.monitoring[key].active:
self.alertsActor.monitoring[key].stampTime()
else:
self.alertsActor.monitoring[key].setActive(severity)
def __call__(self, keyState):
keyval = keyState.keyword
if self.alertsActor is None:
print("setting alertsActor for camCheck!!")
self.alertsActor = keyState.alertsActorReference
# do this only once hopefully
for i in ["boss.SP1", "boss.SP2", "boss.SP1.R", "boss.SP2.R",
"boss.SP1.B", "boss.SP2.B"]:
self.alertsActor.instrumentDown[i] = False
# print("CAMCHECK, len {}, type {}, key: {}".format(len(keyval), type(keyval), keyval))
log.info('CAMCHECK reported {}'.format(keyval))
if type(keyval) == str:
# could possibly try to fix this in hubModel casts, but easier here
keyval = [keyval]
if len(keyval) == 1 and keyval[0] == "None": # this is a bug somewhere upstream
keyval = []
for k in keyval:
if re.search(r"SP[12][RB][0-3]?CCDTemp", k):
self.generateCamCheckAlert(k, "critical")
elif re.search(r"SP[12]SecondaryDewarPress", k):
self.generateCamCheckAlert(k, "critical")
elif re.search(r"SP[12](DAQ|Mech|Micro)NotTalking", k):
self.generateCamCheckAlert(k, "critical")
elif re.search(r"DACS_SET", k):
self.generateCamCheckAlert(k, "critical")
elif re.search(r"SP[12]LN2Fill", k):
self.generateCamCheckAlert(k, "serious")
elif re.search(r"SP[12](Exec|Phase)Boot", k):
self.generateCamCheckAlert(k, "serious")
else:
self.generateCamCheckAlert(k, "warn")
for k in self.triggered:
if k.split(".")[-1] not in keyval: # b/c we know its camCheck already
self.alertsActor.monitoring[k].severity = "ok"
# now it can check itself and find out its cool
# and then decide to disappear if its acknowledged, etc etc
self.alertsActor.monitoring[k].checkKey()
self.triggered.remove(k)
# never flag camCheck, always monitored keys
return "ok"
class heartbeatCheck(YAMLObject):
"""check a heartbeat.
"""
def __init__(self):
pass
def __call__(self, keyState):
if time.time() - keyState.lastalive < keyState.checkAfter:
return "ok"
elif time.time() - keyState.lastalive > 5*keyState.checkAfter:
return "critical"
else:
return keyState.defaultSeverity
class above(YAMLObject):
"""literally: is the value too high
"""
def __init__(self):
pass
def __call__(self, keyState):
if keyState.keyword > keyState.dangerVal:
return keyState.defaultSeverity
else:
return "ok"
class below(YAMLObject):
"""literally: is the value too low
"""
def __init__(self):
pass
def __call__(self, keyState):
if keyState.keyword < keyState.dangerVal:
return keyState.defaultSeverity
else:
return "ok"
class neq(YAMLObject):
"""literally: is the value too low
"""
def __init__(self):
pass
def __call__(self, keyState):
if keyState.keyword != keyState.dangerVal:
return keyState.defaultSeverity
else:
return "ok"
class inList(YAMLObject):
"""is any value in the list "True", e.g. flagged
"""
def __init__(self):
pass
def __call__(self, keyState):
if [k for k in keyState.keyword if k]:
return keyState.defaultSeverity
else:
return "ok"
class firstElem(YAMLObject):
"""is any value in the list "True", e.g. flagged
"""
def __init__(self):
pass
def __call__(self, keyState):
if keyState.keyword[0] == keyState.dangerVal:
return keyState.defaultSeverity
else:
return "ok"
class default(object):
"""check equality to a dangerval
"""
def __init__(self):
pass
def __call__(self, keyState):
if keyState.keyword == keyState.dangerVal:
return keyState.defaultSeverity
else:
return "ok"
| 29.797357 | 95 | 0.563128 | 6,560 | 0.96984 | 0 | 0 | 0 | 0 | 0 | 0 | 1,742 | 0.25754 |
f20a036a9143b93d4e11c864b212d417d5d17645 | 22,382 | py | Python | jsonsubschema/old/_jsonschema.py | lukeenterprise/json-subschema | f273d62ed1517f5a83a57abf148232ed927a771a | [
"Apache-2.0"
]
| 1 | 2019-08-01T15:28:26.000Z | 2019-08-01T15:28:26.000Z | jsonsubschema/old/_jsonschema.py | lukeenterprise/json-subschema | f273d62ed1517f5a83a57abf148232ed927a771a | [
"Apache-2.0"
]
| null | null | null | jsonsubschema/old/_jsonschema.py | lukeenterprise/json-subschema | f273d62ed1517f5a83a57abf148232ed927a771a | [
"Apache-2.0"
]
| null | null | null | '''
Created on June 24, 2019
@author: Andrew Habib
'''
import copy
import json
import sys
import math
import numbers
import intervals as I
from abc import ABC, abstractmethod
from greenery.lego import parse
from intervals import inf as infinity
import config
import _constants
from canoncalization import canoncalize_object
from _normalizer import lazy_normalize
from _utils import (
validate_schema,
print_db,
is_sub_interval_from_optional_ranges,
is_num,
is_list,
is_dict,
is_empty_dict_or_none,
is_dict_or_true,
one
)
class JSONschema(dict):
kw_defaults = {}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# self.validate()
self.updateKeys()
# self.canoncalize()
if self.isUninhabited():
sys.exit("Found an uninhabited type at: " + str(self))
def __getattr__(self, name):
if name in self:
return self[name]
else:
raise AttributeError("No such attribute: ", name)
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
if name in self:
del self[name]
else:
raise AttributeError("No such attribute: ", name)
def validate(self):
validate_schema(self)
def updateKeys(self):
for k, v in self.kw_defaults.items():
if k == "items":
k = "items_"
if k not in self.keys():
self[k] = v
def isBoolean(self):
return self.keys() & _constants.Jconnectors
def isUninhabited(self):
return self._isUninhabited()
def _isUninhabited(self):
pass
def meet(self, s2):
pass
def join(self, s2):
pass
def isSubtype(self, s2):
if s2 == {} or s2 == True or self == s2:
return True
return self._isSubtype(s2)
def isSubtype_handle_rhs(self, s2, isSubtype_cb):
if s2.isBoolean():
# TODO revisit all of this. They are wrong.
if "anyOf" in s2:
return any(self.isSubtype(s) for s in s2["anyOf"])
elif "allOf" in s2:
return all(self.isSubtype(s) for s in s2["allOf"])
elif "oneOf" in s2:
return one(self.isSubtype(s) for s in s2["oneOf"])
elif "not" in s2:
# TODO
print("No handling of not yet.")
return None
else:
print_db("cb on rhs")
return isSubtype_cb(self, s2)
class JSONTypeString(JSONschema):
kw_defaults = {"minLength": 0, "maxLength": infinity, "pattern": ".*"}
def __init__(self, s):
super().__init__(s)
def _isUninhabited(self):
return self.minLength > self.maxLength
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isStringSubtype(self, s2):
if s2.type != "string":
return False
is_sub_interval = is_sub_interval_from_optional_ranges(
self.minLength, self.maxLength, s2.minLength, s2.maxLength)
if not is_sub_interval:
return False
#
# at this point, length is compatible,
# so we should now worry about pattern only.
if s2.pattern == None or s2.pattern == "":
return True
elif self.pattern == None or self.pattern == "":
return False
elif self.pattern == s2.pattern:
return True
else:
regex = parse(self.pattern)
regex2 = parse(s2.pattern)
result = regex & regex2.everythingbut()
if result.empty():
return True
else:
return False
return super().isSubtype_handle_rhs(s2, _isStringSubtype)
def JSONNumericFactory(s):
if s.get("type") == "number":
if s.get("multipleOf") and float(s.get("multipleOf")).is_integer():
s["type"] = "integer"
if s.get("minimum") != None: # -I.inf:
s["minimum"] = math.floor(s.get("minimum")) if s.get(
"exclusiveMinimum") else math.ceil(s.get("minimum"))
if s.get("maximum") != None: # I.inf:
s["maximum"] = math.ceil(s.get("maximum")) if s.get(
"exclusiveMaximum") else math.floor(s.get("maximum"))
return JSONTypeInteger(s)
else:
return JSONTypeNumber(s)
else:
return JSONTypeInteger(s)
class JSONTypeInteger(JSONschema):
kw_defaults = {"minimum": -infinity, "maximum": infinity,
"exclusiveMinimum": False, "exclusiveMaximum": False, "multipleOf": None}
def __init__(self, s):
super().__init__(s)
def build_interval_draft4(self):
if self.exclusiveMinimum and self.exclusiveMaximum:
self.interval = I.closed(self.minimum+1, self.maximum-1)
elif self.exclusiveMinimum:
self.interval = I.closed(self.minimum+1, self.maximum)
elif self.exclusiveMaximum:
self.interval = I.closed(self.minimum, self.maximum-1)
else:
self.interval = I.closed(self.minimum, self.maximum)
def _isUninhabited(self):
self.build_interval_draft4()
return self.interval.is_empty() or \
(self.multipleOf != None and self.multipleOf not in self.interval)
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isIntegerSubtype(self, s2):
if s2.type not in ["integer", "number"]:
return False
#
is_sub_interval = self.interval in s2.interval
if not is_sub_interval:
print_db("num__00")
return False
#
if (self.multipleOf == s2.multipleOf) \
or (self.multipleOf != None and s2.multipleOf == None) \
or (self.multipleOf != None and s2.multipleOf != None and self.multipleOf % s2.multipleOf == 0) \
or (self.multipleOf == None and s2.multipleOf == 1):
print_db("num__02")
return True
if self.multipleOf == None and s2.multipleOf != None:
return False
return super().isSubtype_handle_rhs(s2, _isIntegerSubtype)
class JSONTypeNumber(JSONschema):
kw_defaults = {"minimum": -infinity, "maximum": infinity,
"exclusiveMinimum": False, "exclusiveMaximum": False, "multipleOf": None}
def __init__(self, s):
super().__init__(s)
def build_interval_draft4(self):
if self.exclusiveMinimum and self.exclusiveMaximum:
self.interval = I.open(self.minimum, self.maximum)
elif self.exclusiveMinimum:
self.interval = I.openclosed(self.minimum, self.maximum)
elif self.exclusiveMaximum:
self.interval = I.closedopen(self.minimum, self.maximum)
else:
self.interval = I.closed(self.minimum, self.maximum)
def _isUninhabited(self):
self.build_interval_draft4()
return self.interval.is_empty() or \
(self.multipleOf != None and self.multipleOf not in self.interval)
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isNumberSubtype(self, s2):
if s2.type != "number":
return False
#
is_sub_interval = self.interval in s2.interval
if not is_sub_interval:
print_db("num__00")
return False
#
if self.type == "number" and s2.type == "integer":
print_db("num__01")
return False
#
if (self.multipleOf == s2.multipleOf) \
or (self.multipleOf != None and s2.multipleOf == None) \
or (self.multipleOf != None and s2.multipleOf != None and self.multipleOf % s2.multipleOf == 0) \
or (self.multipleOf == None and s2.multipleOf == 1):
print_db("num__02")
return True
return super().isSubtype_handle_rhs(s2, _isNumberSubtype)
class JSONTypeBoolean(JSONschema):
kw_defaults = {}
def __init__(self, s):
super().__init__(s)
def _isSubtype(self, s2):
def _isBooleanSubtype(self, s2):
if s2.type == "boolean":
return True
else:
return False
return super().isSubtype_handle_rhs(s2, _isBooleanSubtype)
class JSONTypeNull(JSONschema):
kw_defaults = {}
def __init__(self, s):
super().__init__(s)
def _isSubtype(self, s2):
def _isNullSubtype(self, s2):
if s2.type == "null":
return True
else:
return False
return super().isSubtype_handle_rhs(s2, _isNullSubtype)
class JSONTypeObject(JSONschema):
kw_defaults = {"properties": {}, "additionalProperties": {}, "required": [
], "minProperties": 0, "maxProperties": infinity, "dependencies": {}, "patternProperties": {}}
def __init__(self, s):
super().__init__(s)
def meet(self, s2):
pass
def _isSubtype(self, s2):
def _isObjectSubtype(self, s2):
pass
return super().isSubtype_handle_rhs(s2, _isObjectSubtype)
class JSONTypeArray(JSONschema):
kw_defaults = {"minItems": 0, "maxItems": infinity,
"items": JSONTypeObject({}), "additionalItems": JSONTypeObject({}), "uniqueItems": False}
def __init__(self, s):
super().__init__(s)
def _isUninhabited(self):
return (self.minItems > self.maxItems) or \
(is_list(self.items) and self.additionalItems ==
False and self.minItems > len(self.items))
def meet(self, s2):
pass
def _isSubtype(self, s2):
def _isArraySubtype(self, s2):
print_db("in array subtype")
if s2.type != "array":
return False
#
#
# self = JsonArray(self)
# s2 = JsonArray(s2)
#
# uninhabited = handle_uninhabited_types(self, s2)
# if uninhabited != None:
# return uninhabited
#
# -- minItems and maxItems
is_sub_interval = is_sub_interval_from_optional_ranges(
self.minItems, self.maxItems, s2.minItems, s2.maxItems)
# also takes care of {'items' = [..], 'additionalItems' = False}
if not is_sub_interval:
print_db("__01__")
return False
#
# -- uniqueItemsue
# TODO Double-check. Could be more subtle?
if not self.uniqueItems and s2.uniqueItems:
print_db("__02__")
return False
#
# -- items = {not empty}
# no need to check additionalItems
if is_dict(self.items_):
if is_dict(s2.items_):
print_db(self.items_)
print_db(s2.items_)
# if subschemachecker.Checker.is_subtype(self.items_, s2.items_):
if self.items_.isSubtype(s2.items_):
print_db("__05__")
return True
else:
print_db("__06__")
return False
elif is_list(s2.items_):
if s2.additionalItems == False:
print_db("__07__")
return False
elif s2.additionalItems == True:
for i in s2.items_:
# if not subschemachecker.Checker.is_subtype(self.items_, i):
if not self.items_.isSubtype(i):
print_db("__08__")
return False
print_db("__09__")
return True
elif is_dict(s2.additionalItems):
for i in s2.items_:
# if not subschemachecker.Checker.is_subtype(self.items_, i):
if not self.items_.isSubtype(i):
print_db("__10__")
return False
# if subschemachecker.Checker.is_subtype(self.items_, s2.additionalItems):
if self.items_.isSubtype(s2.additionalItems):
print_db("__11__")
return True
else:
print_db("__12__")
return False
#
elif is_list(self.items_):
print_db("lhs is list")
if is_dict(s2.items_):
if self.additionalItems == False:
for i in self.items_:
# if not subschemachecker.Checker.is_subtype(i, s2.items_):
if not i.isSubtype(s2.items_):
print_db("__13__")
return False
print_db("__14__")
return True
elif self.additionalItems == True:
for i in self.items_:
# if not subschemachecker.Checker.is_subtype(i, s2.items_):
if not i.isSubtype(s2.items_):
return False
return True
elif is_dict(self.additionalItems):
for i in self.items_:
# if not subschemachecker.Checker.is_subtype(i, s2.items_):
if not i.isSubtype(s2.items_):
return False
# if subschemachecker.Checker.is_subtype(self.additionalItems, s2.items_):
if self.additionalItems.isSubtype(s2.items_):
return True
else:
return False
# now lhs and rhs are lists
elif is_list(s2.items_):
print_db("lhs & rhs are lists")
len1 = len(self.items_)
len2 = len(s2.items_)
for i, j in zip(self.items_, s2.items_):
# if not subschemachecker.Checker.is_subtype(i, j):
if not i.isSubtype(j):
return False
if len1 == len2:
print_db("len1 == len2")
if self.additionalItems == s2.additionalItems:
return True
elif self.additionalItems == True and s2.additionalItems == False:
return False
elif self.additionalItems == False and s2.additionalItems == True:
return True
else:
# return subschemachecker.Checker.is_subtype(self.additionalItems, s2.additionalItems)
return self.additionalItems.isSubtype(s2.additionalItems)
elif len1 > len2:
diff = len1 - len2
for i in range(len1-diff, len1):
# if not subschemachecker.Checker.is_subtype(self.items_[i], s2.additionalItems):
if not self.items_[i].isSubtype(s2.additionalItems):
print_db("9999")
return False
print_db("8888")
return True
else: # len2 > len 1
# if self.additionalItems:
diff = len2 - len1
for i in range(len2 - diff, len2):
print_db("self.additionalItems",
self.additionalItems)
print_db(i, s2.items_[i])
# if not subschemachecker.Checker.is_subtype(self.additionalItems, s2.items_[i]):
if not self.additionalItems.isSubtype(s2.items_[i]):
print_db("!!!")
return False
# return subschemachecker.Checker.is_subtype(self.additionalItems, s2.additionalItems)
return self.additionalItems.isSubtype(s2.additionalItems)
return super().isSubtype_handle_rhs(s2, _isArraySubtype)
class JSONanyOf(JSONschema):
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isAnyofSubtype(self, s2):
for s in self.anyOf:
if not s.isSubtype(s2):
return False
return True
return super().isSubtype_handle_rhs(s2, _isAnyofSubtype)
class JSONallOf(JSONschema):
def meet(self, s):
pass
def _isSubtype(Self, s2):
def _isAllOfSubtype(self, s2):
for s in self.allOf:
if not s.isSubtype(s2):
return False
return True
return super().isSubtype_handle_rhs(s2, _isAllOfSubtype)
class JSONoneOf(JSONschema):
def meet(self, s):
pass
def _isSubtype(self, s2):
sys.exit("onOf on the lhs is not supported yet.")
class JSONnot(JSONschema):
def meet(self, s):
pass
def _isSubtype(self, s):
pass
typeToConstructor = {
"string": JSONTypeString,
"integer": JSONNumericFactory,
"number": JSONNumericFactory,
"boolean": JSONTypeBoolean,
"null": JSONTypeNull,
"array": JSONTypeArray,
"object": JSONTypeObject
}
boolToConstructor = {
"anyOf": JSONanyOf,
"allOf": JSONallOf,
"oneOf": JSONoneOf,
"not": JSONnot
}
class JSONSchemaSubtypeFactory(json.JSONDecoder):
def __init__(self, *args, **kwargs):
json.JSONDecoder.__init__(
self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, d):
print_db("object before canon.", d)
# return JSONSchemaSubtypeFactory.canoncalize_object(d)
return canoncalize_object(d)
# @staticmethod
# def canoncalize_object(d):
# validate_schema(d)
# if d == {}:
# return d
# t = d.get("type")
# if isinstance(t, list):
# return JSONSchemaSubtypeFactory.canoncalize_list_of_types(d)
# elif isinstance(t, str):
# return JSONSchemaSubtypeFactory.canoncalize_single_type(d)
# else:
# connectors = set(d.keys()) & set(_constants.Jconnectors)
# if connectors:
# return JSONSchemaSubtypeFactory.canoncalize_connectors(d)
# else:
# d["type"] = _constants.Jtypes
# return JSONSchemaSubtypeFactory.canoncalize_list_of_types(d)
# @staticmethod
# def canoncalize_list_of_types(d):
# t = d.get("type")
# choices = []
# for t_i in t:
# if t_i in typeToConstructor.keys():
# s_i = copy.deepcopy(d)
# s_i["type"] = t_i
# s_i = JSONSchemaSubtypeFactory.canoncalize_single_type(s_i)
# choices.append(s_i)
# else:
# print("Unknown schema type {} at:".format(t))
# print(d)
# print("Exiting...")
# sys.exit(1)
# d = {"anyOf": choices}
# # TODO do we need to return JSONanyOf ?
# return boolToConstructor.get("anyOf")(d)
# @staticmethod
# def canoncalize_single_type(d):
# t = d.get("type")
# # check type is known
# if t in typeToConstructor.keys():
# # remove irrelevant keywords
# tmp = copy.deepcopy(d)
# for k in tmp.keys():
# if k not in _constants.Jcommonkw and k not in _constants.JtypesToKeywords.get(t):
# d.pop(k)
# return typeToConstructor[t](d)
# else:
# print("Unknown schema type {} at:".format(t))
# print(d)
# print("Exiting...")
# sys.exit(1)
# @staticmethod
# def canoncalize_connectors(d):
# # TODO
# connectors = set(d.keys()) & set(_constants.Jconnectors)
# if len(connectors) == 1:
# return boolToConstructor[connectors.pop()](d)
# elif len(connectors) > 1:
# return boolToConstructor["allOf"]({"allOf": list({k: v} for k, v in d.items())})
# else:
# print("Something went wrong")
class JSONSubtypeChecker:
def __init__(self, s1, s2):
# validate_schema(s1)
# validate_schema(s2)
self.s1 = self.canoncalize_json(s1)
self.s2 = self.canoncalize_json(s2)
def canoncalize_json(self, obj):
if isinstance(obj, str) or isinstance(obj, numbers.Number) or isinstance(obj, bool) or isinstance(obj, type(None)) or isinstance(obj, list):
return obj
elif isinstance(obj, dict):
# return JSONSchemaSubtypeFactory.canoncalize_object(obj)
return canoncalize_object(obj)
def isSubtype(self):
return self.s1.isSubtype(self.s2)
if __name__ == "__main__":
s1_file = sys.argv[1]
s2_file = sys.argv[2]
print("Loading json schemas from:\n{}\n{}\n".format(s1_file, s2_file))
#######################################
with open(s1_file, 'r') as f1:
s1 = json.load(f1, cls=JSONSchemaSubtypeFactory)
with open(s2_file, 'r') as f2:
s2 = json.load(f2, cls=JSONSchemaSubtypeFactory)
print(s1)
print(s2)
print("Usage scenario 1:", s1.isSubtype(s2))
#######################################
with open(s1_file, 'r') as f1:
s1 = json.load(f1)
with open(s2_file, 'r') as f2:
s2 = json.load(f2)
print(s1)
print(s2)
print("Usage scenario 2:", JSONSubtypeChecker(s1, s2).isSubtype()) | 33.556222 | 148 | 0.524752 | 19,995 | 0.893352 | 0 | 0 | 0 | 0 | 0 | 0 | 5,161 | 0.230587 |
f20a9c6a0a0f41308a9f256ea4ec3d2997af5cd5 | 6,388 | py | Python | eruditio/shared_apps/django_community/utils.py | genghisu/eruditio | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | [
"BSD-3-Clause",
"MIT"
]
| null | null | null | eruditio/shared_apps/django_community/utils.py | genghisu/eruditio | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | [
"BSD-3-Clause",
"MIT"
]
| null | null | null | eruditio/shared_apps/django_community/utils.py | genghisu/eruditio | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | [
"BSD-3-Clause",
"MIT"
]
| null | null | null | """
Various utilities functions used by django_community and
other apps to perform authentication related tasks.
"""
import hashlib, re
import django.forms as forms
from django.core.exceptions import ObjectDoesNotExist
from django.forms import ValidationError
import django.http as http
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth import logout as auth_logout
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from django_community.models import UserOpenID, UserProfile
def openid_logout(request):
"""
Clears session which effectively logs out the current
OpenId user.
"""
request.session.flush()
def handle_logout(request):
"""
Log out.
"""
auth_logout(request)
def get_logged_user(request):
"""
Returns the current user who is logged in, checks for openid user first,
then for regular user, return None if no user is currently logged in
"""
if settings.OPENID_ENABLED and hasattr(request, 'openid'):
user = UserOpenID.objects.get_for_openid(request, request.openid)
if not user:
user = request.user
return user
def handle_login(request, data):
"""
Logs the user in based on form data from django_community.LoginForm.
"""
user = authenticate(username = data.get('username', None),
password = data.get('password', None))
user_object = User.objects.get(username = data.get('username', None))
if user is not None:
login(request, user)
return user
def handle_signup(request, data):
"""
Signs a user up based on form data from django_community.SignupForm.
"""
from django.contrib.auth.models import get_hexdigest
username = data.get('username', None)
email = data.get('email', None)
password = data.get('password', None)
try:
user = User.objects.get(username = username, email = email)
except ObjectDoesNotExist:
user = User(username = username, email = email)
user.save()
user.set_password(password)
user_profile = UserProfile.objects.get_user_profile(user)
user = authenticate(username = username, password = password)
login(request, user)
return user
def get_or_create_from_openid(openid):
"""
Returns an User with the given openid or
creates a new user and associates openid with that user.
"""
try:
user = User.objects.get(username = openid)
except ObjectDoesNotExist:
password = hashlib.sha256(openid).hexdigest()
user = User(username = openid, email = '', password = password)
user.save()
user.display_name = "%s_%s" % ('user', str(user.id))
user.save()
return user
def generate_random_user_name():
"""
Generates a random user name user_{user_id}_{salt}
to be used for creating new users.
"""
import random
current_users = User.objects.all().order_by('-id')
if current_users:
next_id = current_users[0].id + 1
else:
next_id = 1
random_salt = random.randint(1, 5000)
return 'user_%s_%s' % (str(next_id), str(random_salt))
def create_user_from_openid(request, openid):
"""
Creates a new User object associated with the given
openid.
"""
from django_community.config import OPENID_FIELD_MAPPING
from django_utils.request_helpers import get_ip
username = generate_random_user_name()
profile_attributes = {}
for attribute in OPENID_FIELD_MAPPING.keys():
mapped_attribute = OPENID_FIELD_MAPPING[attribute]
if openid.sreg and openid.sreg.get(attribute, ''):
profile_attributes[mapped_attribute] = openid.sreg.get(attribute, '')
new_user = User(username = username)
new_user.save()
new_openid = UserOpenID(openid = openid.openid, user = new_user)
new_openid.save()
new_user_profile = UserProfile.objects.get_user_profile(new_user)
for filled_attribute in profile_attributes.keys():
setattr(new_user, filled_attribute, profile_attributes[filled_attribute])
new_user_profile.save()
return new_user
def get_anon_user(request):
"""
Returns an anonmymous user corresponding to this IP address if one exists.
Else create an anonymous user and return it.
"""
try:
anon_user = User.objects.get(username = generate_anon_user_name(request))
except ObjectDoesNotExist:
anon_user = create_anon_user(request)
return anon_user
def create_anon_user(request):
"""
Creates a new anonymous user based on the ip provided by the request
object.
"""
anon_user_name = generate_anon_user_name(request)
anon_user = User(username = anon_user_name)
anon_user.save()
user_profile = UserProfile(user = anon_user, display_name = 'anonymous')
user_profile.save()
return anon_user
def generate_anon_user_name(request):
"""
Generate an anonymous user name based on and ip address.
"""
from django_utils.request_helpers import get_ip
ip = get_ip(request)
return "anon_user_%s" % (str(ip))
def is_anon_user(user):
"""
Determine if an user is anonymous or not.
"""
return user.username[0:10] == 'anon_user_'
def is_random(name):
"""
Determine if a user has a randomly generated display name.
"""
if len(name.split('_')) and name.startswith('user'):
return True
else:
return False
def process_ax_data(user, ax_data):
"""
Process OpenID AX data.
"""
import django_openidconsumer.config
emails = ax_data.get(django_openidconsumer.config.URI_GROUPS.get('email').get('type_uri', ''), '')
display_names = ax_data.get(django_openidconsumer.config.URI_GROUPS.get('alias').get('type_uri', ''), '')
if emails and not user.email.strip():
user.email = emails[0]
user.save()
if not user.profile.display_name.strip() or is_random(user.profile.display_name):
if display_names:
user.profile.display_name = display_names[0]
elif emails:
user.profile.display_name = emails[0].split('@')[0]
user.profile.save() | 32.262626 | 109 | 0.681277 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,526 | 0.238885 |
f20ad7ae21fec4c62f9a2ffdfad7aa4815cb96a9 | 1,909 | py | Python | launch/test_motion.launch.py | RoboJackets/robocup-software | ae2920b8b98213e625d0565dd67005e7a8595fac | [
"Apache-2.0"
]
| 200 | 2015-01-26T01:45:34.000Z | 2022-03-19T13:05:31.000Z | launch/test_motion.launch.py | RoboJackets/robocup-software | ae2920b8b98213e625d0565dd67005e7a8595fac | [
"Apache-2.0"
]
| 1,254 | 2015-01-03T01:57:35.000Z | 2022-03-16T06:32:21.000Z | launch/test_motion.launch.py | RoboJackets/robocup-software | ae2920b8b98213e625d0565dd67005e7a8595fac | [
"Apache-2.0"
]
| 206 | 2015-01-21T02:03:18.000Z | 2022-02-01T17:57:46.000Z | import os
from pathlib import Path
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import Node
def generate_launch_description():
bringup_dir = Path(get_package_share_directory('rj_robocup'))
launch_dir = bringup_dir / 'launch'
stdout_linebuf_envvar = SetEnvironmentVariable(
'RCUTILS_CONSOLE_STDOUT_LINE_BUFFERED', '1')
grsim = Node(package='rj_robocup', executable='grSim', arguments=[])
radio = Node(package='rj_robocup',
executable='sim_radio_node',
output='screen',
on_exit=Shutdown())
control = Node(package='rj_robocup',
executable='control_node',
output='screen',
on_exit=Shutdown())
config_server = Node(package='rj_robocup',
executable='config_server',
output='screen',
on_exit=Shutdown())
vision_receiver_launch_path = str(launch_dir / "vision_receiver.launch.py")
vision_receiver = IncludeLaunchDescription(
PythonLaunchDescriptionSource(vision_receiver_launch_path))
ref_receiver = Node(package='rj_robocup',
executable='internal_referee_node',
output='screen',
on_exit=Shutdown())
vision_filter_launch_path = str(launch_dir / "vision_filter.launch.py")
vision_filter = IncludeLaunchDescription(
PythonLaunchDescriptionSource(vision_filter_launch_path))
return LaunchDescription([
grsim, stdout_linebuf_envvar, config_server, radio, control,
vision_receiver, vision_filter, ref_receiver
])
| 36.711538 | 85 | 0.677842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 280 | 0.146674 |
f20b33fbb1accee4936549d7e876ab92878ab6ba | 1,856 | py | Python | demo.py | nikp29/eDensiometer | e85a861c2faefb1911cf7b9cf10ee180afe85f13 | [
"FSFAP"
]
| 2 | 2020-03-23T20:08:21.000Z | 2021-06-06T21:15:40.000Z | demo.py | nikp29/eDensiometer | e85a861c2faefb1911cf7b9cf10ee180afe85f13 | [
"FSFAP"
]
| 1 | 2022-02-12T03:46:54.000Z | 2022-02-12T03:46:54.000Z | demo.py | nikp29/eDensiometer | e85a861c2faefb1911cf7b9cf10ee180afe85f13 | [
"FSFAP"
]
| null | null | null | # A Rapid Proof of Concept for the eDensiometer
# Copyright 2018, Nikhil Patel. All Rights Reserved. Created with contributions from Billy Pierce.
# Imports
from PIL import Image
from pprint import pprint
import numpy as np
import time as time_
def millis(): # from https://stackoverflow.com/questions/5998245/get-current-time-in-milliseconds-in-python/6000198#6000198
return int(round(time_.time() * 1000))
start = millis()
# Constants
# BRIGHT_CUTOFF = 175
RED_CUTOFF = 200
GREEN_CUTOFF = 150
BLUE_CUTOFF = 200
# Pull from test.jpg image in local directory
temp = np.asarray(Image.open('test.jpg'))
print(temp.shape)
# Variable Initialization
result = np.zeros((temp.shape[0], temp.shape[1], temp.shape[2]))
temp_bright = np.zeros((temp.shape[0], temp.shape[1]))
count_total = 0
count_open = 0
# Cycle through image
for row in range(0, temp.shape[0]):
for element in range(0, temp.shape[1]):
count_total += 1
temp_bright[row, element] = (int(temp[row][element][0]) + int(temp[row][element][1]) + int(temp[row][element][2]))/3
# bright = temp_bright[row][element] > BRIGHT_CUTOFF
red_enough = temp[row][element][0] > RED_CUTOFF
green_enough = temp[row][element][1] > GREEN_CUTOFF
blue_enough = temp[row][element][2] > BLUE_CUTOFF
if red_enough and green_enough and blue_enough:
# print(temp[row, element])
count_open += 1
result[row, element] = [255, 255, 255]
# Save filtered image as final.jpg
final = Image.fromarray(result.astype('uint8'), 'RGB')
final.save('final.jpg')
# Return/Print Percent Coverage
percent_open = count_open/count_total
percent_cover = 1 - percent_open
end = millis()
print("Percent Open: " + str(percent_open))
print("Percent Cover: " + str(percent_cover))
runtime = end-start
print("Runtime in MS: " + str(runtime)) | 30.933333 | 124 | 0.696659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 613 | 0.33028 |
f20c450c0dce05186c845a952d08081cb7846ab5 | 1,833 | py | Python | chart/script/provenance_ycsb_thruput.py | RUAN0007/nusthesis | 932367195171da2d1c82870cc5b96c0e760b4ca8 | [
"MIT"
]
| null | null | null | chart/script/provenance_ycsb_thruput.py | RUAN0007/nusthesis | 932367195171da2d1c82870cc5b96c0e760b4ca8 | [
"MIT"
]
| null | null | null | chart/script/provenance_ycsb_thruput.py | RUAN0007/nusthesis | 932367195171da2d1c82870cc5b96c0e760b4ca8 | [
"MIT"
]
| null | null | null | import sys
import os
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import matplotlib as mpl
import config
def main():
if 1 < len(sys.argv) :
diagram_path = sys.argv[1]
else:
diagram_path = ""
curDir = os.path.dirname(os.path.realpath(__file__))
data_path = os.path.join(curDir, "data", "provenance", "ycsb_thruput")
x_axis, series_names, series = config.parse(data_path)
# print x_axis
# print series_names
# print series
blk_sizes = x_axis
xlabels = [str(int(x)/100) for x in blk_sizes]
series_count = len(series_names)
width, offsets = config.compute_width_offsets(series_count)
f, (ax) = plt.subplots()
# # f.set_size_inches(, 4)
for i, series_name in enumerate(series_names):
series_data = series[series_name]
series_offsets = [offsets[i]] * len(series_data)
base_xticks = range(len(series_data))
xticks = config.sum_list(base_xticks, series_offsets)
# print xticks
# print series_name
# print series_data
ax.bar(xticks, series_data, width=width, color=config.colors[series_name], edgecolor='black',align='center', label=series_name)
# ax.set_title("Throughput")
ax.set(xlabel=r'# of txns per block (x100)', ylabel='tps')
ax.set_xticks(base_xticks)
ax.set_xticklabels(xlabels)
ax.set_ylim([0, 2500])
handles, labels = ax.get_legend_handles_labels()
f.legend(handles, labels,
loc='upper center', ncol=1, bbox_to_anchor=(0.47, 0.90),
columnspacing=1, handletextpad=1, fontsize=20)
if diagram_path == "":
plt.tight_layout()
plt.show()
else:
f.tight_layout()
f.savefig(diagram_path, bbox_inches='tight')
if __name__ == "__main__":
sys.exit(main()) | 30.55 | 135 | 0.651391 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 270 | 0.1473 |
f20d43c8664dcca2ef65c9dd2e88a696d94a4ea3 | 3,157 | py | Python | core/handlers/filters_chat.py | Smashulica/nebula8 | 010df165e3cc61e0154d20310fa972482ec0e7be | [
"Apache-2.0"
]
| null | null | null | core/handlers/filters_chat.py | Smashulica/nebula8 | 010df165e3cc61e0154d20310fa972482ec0e7be | [
"Apache-2.0"
]
| null | null | null | core/handlers/filters_chat.py | Smashulica/nebula8 | 010df165e3cc61e0154d20310fa972482ec0e7be | [
"Apache-2.0"
]
| null | null | null | from core.utilities.functions import delete_message
from core.utilities.message import message
from core.database.repository.group import GroupRepository
"""
This function allows you to terminate the type
of file that contains a message on telegram and filter it
"""
def init(update, context):
apk = 'application/vnd.android.package-archive'
doc = 'application/msword'
docx = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
exe = 'application/x-ms-dos-executable'
gif = 'video/mp4'
jpg = 'image/jpeg'
mp3 = 'audio/mpeg'
pdf = 'application/pdf'
py = 'text/x-python'
svg = 'image/svg+xml'
txt = 'text/plain'
targz = 'application/x-compressed-tar'
wav = 'audio/x-wav'
xml = 'application/xml'
filezip = 'application/zip'
msg = update.effective_message
chat = update.effective_message.chat_id
group = GroupRepository().getById(chat)
if msg.document is not None:
#No APK Allowed
if msg.document.mime_type == apk and group['apk_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No APK Allowed!</b>")
#No DOC/DOCX Allowed
if msg.document.mime_type == doc or msg.document.mime_type == docx and group['docx_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No DOC/DOCX Allowed!</b>")
#No EXE Allowed
if msg.document.mime_type == exe and group['exe_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No EXE Allowed!</b>")
#No GIF Allowed
if msg.document.mime_type == gif and group['gif_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No GIF Allowed!</b>")
#No JPG Allowed
if msg.document.mime_type == jpg and group['jpg_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No JPG Allowed!</b>")
#No TARGZ Allowed
if msg.document.mime_type == targz and group['targz_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No TARGZ Allowed!</b>")
#No ZIP Allowed
if msg.document.mime_type == filezip and group['zip_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No ZIP Allowed!</b>")
if msg.document.mime_type == wav:
print("NO WAV ALLOWED")
if msg.document.mime_type == xml:
print("NO XML ALLOWED")
if msg.document.mime_type == mp3:
print("NO MP3 ALLOWED")
if msg.document.mime_type == pdf:
print("NO PDF ALLOWED")
if msg.document.mime_type == py:
print("NO PY ALLOWED")
if msg.document.mime_type == svg:
print("NO SVG ALLOWED")
if msg.document.mime_type == txt:
print("NO TXT ALLOWED") | 44.464789 | 105 | 0.631929 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,124 | 0.356034 |
f20eb1617a65a8d8e7031e114930d28913b16142 | 4,287 | py | Python | mipsplusplus/parser.py | alexsocha/mipsplusplus | ee7f87605682fe0b219f754069bf11da80c0312a | [
"MIT"
]
| 1 | 2021-01-04T09:35:50.000Z | 2021-01-04T09:35:50.000Z | mipsplusplus/parser.py | alexsocha/mipsplusplus | ee7f87605682fe0b219f754069bf11da80c0312a | [
"MIT"
]
| null | null | null | mipsplusplus/parser.py | alexsocha/mipsplusplus | ee7f87605682fe0b219f754069bf11da80c0312a | [
"MIT"
]
| null | null | null | from mipsplusplus import utils
from mipsplusplus import operations
OPERATOR_ORDERING = [
['addressof', 'not', 'neg'],
['*', '/', '%'],
['+', '-'],
['<<', '>>', '<<<', '>>>'],
['<', '>', '<=', '>='],
['==', '!='],
['and', 'or', 'xor', 'nor'],
['as']
]
EXPR_OPERATORS = set([op for ops in OPERATOR_ORDERING for op in ops] + ['(', ')'])
def splitExpression(expression):
squareBracketDepth = 0
isSingleQuote = False
isDoubleQuote = False
funcBracketDepth = 0
# Split expression on whitespace or single operators,
# given it isn't in single quotes, double quotes, square brackets,
# or within a a function such as alloc(...)
tokenList = ['']
tokenIdx = 0
i = 0
while i < len(expression):
char = expression[i]
if char == '\'': isSingleQuote = not isSingleQuote
if char == '"': isDoubleQuote = not isDoubleQuote
if isSingleQuote == False and isDoubleQuote == False:
if funcBracketDepth == 0:
if char == '[': squareBracketDepth += 1
elif char == ']': squareBracketDepth -= 1
elif char == '(':
isSysFunc = False
for func in utils.SYS_FUNCTIONS:
if tokenList[tokenIdx] == func: isSysFunc = True
if isSysFunc: funcBracketDepth += 1
if funcBracketDepth == 0 and squareBracketDepth == 0:
nextOperator = None
for op in EXPR_OPERATORS:
spacedOp = ' {} '.format(op) if op.isalnum() else op
if expression[i:].startswith(spacedOp):
if nextOperator is None or len(spacedOp) > len(nextOperator):
nextOperator = spacedOp
if char.isspace() or nextOperator is not None:
if tokenList[tokenIdx] != '':
tokenList += ['']
tokenIdx += 1
if nextOperator is not None:
tokenList[tokenIdx] += nextOperator.strip()
tokenList += ['']
tokenIdx += 1
i += len(nextOperator)-1
i += 1
while i < len(expression):
if not expression[i].isspace(): break
else: i += 1
continue
else:
if char == '(': funcBracketDepth += 1
elif char == ')': funcBracketDepth -= 1
tokenList[tokenIdx] += char
i += 1
if len(tokenList) > 0 and tokenList[-1] == '':
tokenList = tokenList[:-1]
# Convert minus sign to negative e.g. ['+', '-', '8'] => ['+', '-8']
newTokenList = []
tokenIdx = 0
while tokenIdx < len(tokenList):
if tokenList[tokenIdx] == '-' and tokenIdx < len(tokenList)-1:
if tokenIdx == 0 or tokenList[tokenIdx-1] in EXPR_OPERATORS:
newTokenList.append('-' + tokenList[tokenIdx+1])
tokenIdx += 2
continue
newTokenList.append(tokenList[tokenIdx])
tokenIdx += 1
return newTokenList
def infixToPostfix(tokenList, getToken = lambda item: item):
# Get priorities from ordering
priorities = {}
for (level, ops) in enumerate(OPERATOR_ORDERING):
priorities = {**priorities, **{op: len(OPERATOR_ORDERING)-level for op in ops}}
# Convert expression to reverse polish (postfix) notation
stack = []
output = []
for item in tokenList:
token = getToken(item)
if token not in EXPR_OPERATORS:
output.append(item)
elif token == '(':
stack.append(item)
elif token == ')':
while stack and getToken(stack[-1]) != '(':
output.append(stack.pop())
stack.pop()
else:
while stack and getToken(stack[-1]) != '(' and priorities[token] <= priorities[getToken(stack[-1])]:
output.append(stack.pop())
stack.append(item)
while stack: output.append(stack.pop())
return output
def isInBrackets(string, idx, b1='(', b2=')'):
bracketTeir = 0
for i, ch in enumerate(string):
if ch == b1: bracketTeir += 1
if ch == b2: bracketTeir -= 1
if i == idx: return bracketTeir > 0
def isTopLevel(string, idx):
if isInBrackets(string, idx, '(', ')'): return False
if isInBrackets(string, idx, '[', ']'): return False
isSingleQuote = False
isDoubleQuote = False
for i, ch in enumerate(string):
if ch == '\'': isSingleQuote = not isSingleQuote
if ch == '"': isDoubleQuote = not isDoubleQuote
if i == idx: return not isSingleQuote and not isDoubleQuote
| 32.233083 | 106 | 0.585258 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 504 | 0.117565 |
f20f24fc882a6bfe17d609d7a92bcf0cfdf1dd3a | 464 | py | Python | learn-to-code-with-python/10-Lists-Iteration/iterate-in-reverse-with-the-reversed-function.py | MaciejZurek/python_practicing | 0a426f2aed151573e1f8678e0239ff596d92bbde | [
"MIT"
]
| null | null | null | learn-to-code-with-python/10-Lists-Iteration/iterate-in-reverse-with-the-reversed-function.py | MaciejZurek/python_practicing | 0a426f2aed151573e1f8678e0239ff596d92bbde | [
"MIT"
]
| null | null | null | learn-to-code-with-python/10-Lists-Iteration/iterate-in-reverse-with-the-reversed-function.py | MaciejZurek/python_practicing | 0a426f2aed151573e1f8678e0239ff596d92bbde | [
"MIT"
]
| null | null | null | the_simpsons = ["Homer", "Marge", "Bart", "Lisa", "Maggie"]
print(the_simpsons[::-1])
for char in the_simpsons[::-1]:
print(f"{char} has a total of {len(char)} characters.")
print(reversed(the_simpsons))
print(type(reversed(the_simpsons))) # generator object
for char in reversed(the_simpsons): # laduje za kazda iteracja jeden element listy, a nie cala liste od razu, dobre przy duzych listach
print(f"{char} has a total of {len(char)} characters.")
| 33.142857 | 135 | 0.709052 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 247 | 0.532328 |
f20f3b3cdb095ea301a3efa6ea5c8c922e9be8db | 640 | py | Python | ghiaseddin/scripts/download-dataset-lfw10.py | yassersouri/ghiaseddin | a575f2375729e7586ae7c682f8505dbb7619e622 | [
"MIT"
]
| 44 | 2016-09-07T11:04:10.000Z | 2022-03-14T07:38:17.000Z | ghiaseddin/scripts/download-dataset-lfw10.py | yassersouri/ghiaseddin | a575f2375729e7586ae7c682f8505dbb7619e622 | [
"MIT"
]
| 1 | 2016-09-06T23:33:54.000Z | 2016-09-06T23:33:54.000Z | ghiaseddin/scripts/download-dataset-lfw10.py | yassersouri/ghiaseddin | a575f2375729e7586ae7c682f8505dbb7619e622 | [
"MIT"
]
| 13 | 2016-09-17T15:31:06.000Z | 2021-05-22T07:28:46.000Z | from subprocess import call
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
import settings
data_zip_path = os.path.join(settings.lfw10_root, "LFW10.zip")
data_url = "http://cvit.iiit.ac.in/images/Projects/relativeParts/LFW10.zip"
# Downloading the data zip and extracting it
call(["wget",
"--continue", # do not download things again
"--tries=0", # try many times to finish the download
"--output-document=%s" % data_zip_path, # save it to the appropriate place
data_url])
call(["unzip -d %s %s" % (settings.lfw10_root, data_zip_path)], shell=True)
| 33.684211 | 85 | 0.714063 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 289 | 0.451563 |
f210443ae14873f6d0154e4872180eb345a39221 | 9,874 | py | Python | hops/dist_allreduce.py | Limmen/hops-util-py | 99263edcd052dbb554f0cde944fbdc748dc95f06 | [
"Apache-2.0"
]
| null | null | null | hops/dist_allreduce.py | Limmen/hops-util-py | 99263edcd052dbb554f0cde944fbdc748dc95f06 | [
"Apache-2.0"
]
| null | null | null | hops/dist_allreduce.py | Limmen/hops-util-py | 99263edcd052dbb554f0cde944fbdc748dc95f06 | [
"Apache-2.0"
]
| null | null | null | """
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import pydoop.hdfs
import subprocess
import os
import stat
import sys
import threading
import time
import socket
from hops import hdfs as hopshdfs
from hops import tensorboard
from hops import devices
from hops import util
import coordination_server
run_id = 0
def launch(spark_session, notebook):
""" Run notebook pointed to in HopsFS as a python file in mpirun
Args:
:spark_session: SparkSession object
:notebook: The path in HopsFS to the notebook
"""
global run_id
print('\nStarting TensorFlow job, follow your progress on TensorBoard in Jupyter UI! \n')
sys.stdout.flush()
sc = spark_session.sparkContext
app_id = str(sc.applicationId)
conf_num = int(sc._conf.get("spark.executor.instances"))
#Each TF task should be run on 1 executor
nodeRDD = sc.parallelize(range(conf_num), conf_num)
server = coordination_server.Server(conf_num)
server_addr = server.start()
#Force execution on executor, since GPU is located on executor
nodeRDD.foreachPartition(prepare_func(app_id, run_id, notebook, server_addr))
print('Finished TensorFlow job \n')
print('Make sure to check /Logs/TensorFlow/' + app_id + '/runId.' + str(run_id) + ' for logfile and TensorBoard logdir')
def get_logdir(app_id):
global run_id
return hopshdfs.project_path() + '/Logs/TensorFlow/' + app_id + '/horovod/run.' + str(run_id)
def prepare_func(app_id, run_id, nb_path, server_addr):
def _wrapper_fun(iter):
for i in iter:
executor_num = i
client = coordination_server.Client(server_addr)
node_meta = {'host': get_ip_address(),
'executor_cwd': os.getcwd(),
'cuda_visible_devices_ordinals': devices.get_minor_gpu_device_numbers()}
client.register(node_meta)
t_gpus = threading.Thread(target=devices.print_periodic_gpu_utilization)
if devices.get_num_gpus() > 0:
t_gpus.start()
# Only spark executor with index 0 should create necessary HDFS directories and start mpirun
# Other executors simply block until index 0 reports mpirun is finished
clusterspec = client.await_reservations()
#pydoop.hdfs.dump('', os.environ['EXEC_LOGFILE'], user=hopshdfs.project_user())
#hopshdfs.init_logger()
#hopshdfs.log('Starting Spark executor with arguments')
gpu_str = '\n\nChecking for GPUs in the environment\n' + devices.get_gpu_info()
#hopshdfs.log(gpu_str)
print(gpu_str)
mpi_logfile_path = os.getcwd() + '/mpirun.log'
if os.path.exists(mpi_logfile_path):
os.remove(mpi_logfile_path)
mpi_logfile = open(mpi_logfile_path, 'w')
py_runnable = localize_scripts(nb_path, clusterspec)
# non-chief executor should not do mpirun
if not executor_num == 0:
client.await_mpirun_finished()
else:
hdfs_exec_logdir, hdfs_appid_logdir = hopshdfs.create_directories(app_id, run_id, param_string='Horovod')
tb_hdfs_path, tb_pid = tensorboard.register(hdfs_exec_logdir, hdfs_appid_logdir, 0)
mpi_cmd = 'HOROVOD_TIMELINE=' + tensorboard.logdir() + '/timeline.json' + \
' TENSORBOARD_LOGDIR=' + tensorboard.logdir() + \
' mpirun -np ' + str(get_num_ps(clusterspec)) + ' --hostfile ' + get_hosts_file(clusterspec) + \
' -bind-to none -map-by slot ' + \
' -x LD_LIBRARY_PATH ' + \
' -x HOROVOD_TIMELINE ' + \
' -x TENSORBOARD_LOGDIR ' + \
' -x NCCL_DEBUG=INFO ' + \
' -mca pml ob1 -mca btl ^openib ' + \
os.environ['PYSPARK_PYTHON'] + ' ' + py_runnable
mpi = subprocess.Popen(mpi_cmd,
shell=True,
stdout=mpi_logfile,
stderr=mpi_logfile,
preexec_fn=util.on_executor_exit('SIGTERM'))
t_log = threading.Thread(target=print_log)
t_log.start()
mpi.wait()
client.register_mpirun_finished()
if devices.get_num_gpus() > 0:
t_gpus.do_run = False
t_gpus.join()
return_code = mpi.returncode
if return_code != 0:
cleanup(tb_hdfs_path)
t_log.do_run = False
t_log.join()
raise Exception('mpirun FAILED, look in the logs for the error')
cleanup(tb_hdfs_path)
t_log.do_run = False
t_log.join()
return _wrapper_fun
def print_log():
mpi_logfile_path = os.getcwd() + '/mpirun.log'
mpi_logfile = open(mpi_logfile_path, 'r')
t = threading.currentThread()
while getattr(t, "do_run", True):
where = mpi_logfile.tell()
line = mpi_logfile.readline()
if not line:
time.sleep(1)
mpi_logfile.seek(where)
else:
print line
# Get the last outputs
line = mpi_logfile.readline()
while line:
where = mpi_logfile.tell()
print line
line = mpi_logfile.readline()
mpi_logfile.seek(where)
def cleanup(tb_hdfs_path):
hopshdfs.log('Performing cleanup')
handle = hopshdfs.get()
if not tb_hdfs_path == None and not tb_hdfs_path == '' and handle.exists(tb_hdfs_path):
handle.delete(tb_hdfs_path)
hopshdfs.kill_logger()
def get_ip_address():
"""Simple utility to get host IP address"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
def get_hosts_string(clusterspec):
hosts_string = ''
for host in clusterspec:
hosts_string = hosts_string + ' ' + host['host'] + ':' + str(len(host['cuda_visible_devices_ordinals']))
def get_num_ps(clusterspec):
num = 0
for host in clusterspec:
num += len(host['cuda_visible_devices_ordinals'])
return num
def get_hosts_file(clusterspec):
hf = ''
host_file = os.getcwd() + '/host_file'
for host in clusterspec:
hf = hf + '\n' + host['host'] + ' ' + 'slots=' + str(len(host['cuda_visible_devices_ordinals']))
with open(host_file, 'w') as hostfile: hostfile.write(hf)
return host_file
def find_host_in_clusterspec(clusterspec, host):
for h in clusterspec:
if h['name'] == host:
return h
# The code generated by this function will be called in an eval, which changes the working_dir and cuda_visible_devices for process running mpirun
def generate_environment_script(clusterspec):
import_script = 'import os \n' \
'from hops import util'
export_script = ''
for host in clusterspec:
export_script += 'def export_workdir():\n' \
' if util.get_ip_address() == \"' + find_host_in_clusterspec(clusterspec, host['host'])['host'] + '\":\n' \
' os.chdir=\"' + host['executor_cwd'] + '\"\n' \
' os.environ["CUDA_DEVICE_ORDER"]=\"PCI_BUS_ID\" \n' \
' os.environ["CUDA_VISIBLE_DEVICES"]=\"' + ",".join(str(x) for x in host['cuda_visible_devices_ordinals']) + '\"\n'
return import_script + '\n' + export_script
def localize_scripts(nb_path, clusterspec):
# 1. Download the notebook as a string
fs_handle = hopshdfs.get_fs()
fd = fs_handle.open_file(nb_path, flags='r')
note = fd.read()
fd.close()
path, filename = os.path.split(nb_path)
f_nb = open(filename,"w+")
f_nb.write(note)
f_nb.flush()
f_nb.close()
# 2. Convert notebook to py file
jupyter_runnable = os.path.abspath(os.path.join(os.environ['PYSPARK_PYTHON'], os.pardir)) + '/jupyter'
conversion_cmd = jupyter_runnable + ' nbconvert --to python ' + filename
conversion = subprocess.Popen(conversion_cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
conversion.wait()
stdout, stderr = conversion.communicate()
print(stdout)
print(stderr)
# 3. Prepend script to export environment variables and Make py file runnable
py_runnable = os.getcwd() + '/' + filename.split('.')[0] + '.py'
notebook = 'with open("generate_env.py", "r") as myfile:\n' \
' data=myfile.read()\n' \
' exec(data)\n'
with open(py_runnable, 'r') as original: data = original.read()
with open(py_runnable, 'w') as modified: modified.write(notebook + data)
st = os.stat(py_runnable)
os.chmod(py_runnable, st.st_mode | stat.S_IEXEC)
# 4. Localize generate_env.py script
environment_script = generate_environment_script(clusterspec)
generate_env_path = os.getcwd() + '/generate_env.py'
f_env = open(generate_env_path, "w+")
f_env.write(environment_script)
f_env.flush()
f_env.close()
# 5. Make generate_env.py runnable
st = os.stat(generate_env_path)
os.chmod(py_runnable, st.st_mode | stat.S_IEXEC)
return py_runnable | 36.435424 | 295 | 0.594896 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,707 | 0.274154 |
f21153cc2731f7ee87405f5ca13164bed51c9656 | 713 | py | Python | api/migrations/versions/e956985ff509_.py | SnSation/Pokemart | c91dcd155ae3abe343781b3d26211d2463d41ff3 | [
"MIT"
]
| null | null | null | api/migrations/versions/e956985ff509_.py | SnSation/Pokemart | c91dcd155ae3abe343781b3d26211d2463d41ff3 | [
"MIT"
]
| null | null | null | api/migrations/versions/e956985ff509_.py | SnSation/Pokemart | c91dcd155ae3abe343781b3d26211d2463d41ff3 | [
"MIT"
]
| null | null | null | """empty message
Revision ID: e956985ff509
Revises: 4b471bbc0004
Create Date: 2020-12-02 22:47:08.536332
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e956985ff509'
down_revision = '4b471bbc0004'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('national_pokemon', 'description')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('national_pokemon', sa.Column('description', sa.VARCHAR(length=500), autoincrement=False, nullable=True))
# ### end Alembic commands ###
| 24.586207 | 123 | 0.71108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 422 | 0.591865 |
f2125363bb0906c29b0070780b0f856daaf2354c | 926 | py | Python | tests/features/steps/ahk_steps.py | epth/ahk | 3a09830b10bf93d6dabda5f055665024570ff6c8 | [
"MIT"
]
| 1 | 2021-02-16T14:16:58.000Z | 2021-02-16T14:16:58.000Z | tests/features/steps/ahk_steps.py | epth/ahk | 3a09830b10bf93d6dabda5f055665024570ff6c8 | [
"MIT"
]
| null | null | null | tests/features/steps/ahk_steps.py | epth/ahk | 3a09830b10bf93d6dabda5f055665024570ff6c8 | [
"MIT"
]
| null | null | null | from behave.matchers import RegexMatcher
from ahk import AHK
from behave_classy import step_impl_base
Base = step_impl_base()
class AHKSteps(AHK, Base):
@Base.given(u'the mouse position is ({xpos:d}, {ypos:d})')
def given_mouse_move(self, xpos, ypos):
self.mouse_move(x=xpos, y=ypos)
@Base.when(u'I move the mouse (UP|DOWN|LEFT|RIGHT) (\d+)px', matcher=RegexMatcher)
def move_direction(self, direction, px):
px = int(px)
if direction in ('UP', 'DOWN'):
axis = 'y'
else:
axis = 'x'
if direction in ('LEFT', 'UP'):
px = px * -1
kwargs = {axis: px, 'relative': True}
self.mouse_move(**kwargs)
@Base.then(u'I expect the mouse position to be ({xpos:d}, {ypos:d})')
def check_position(self, xpos, ypos):
x, y = self.mouse_position
assert x == xpos
assert y == ypos
AHKSteps().register() | 29.870968 | 86 | 0.596112 | 774 | 0.835853 | 0 | 0 | 731 | 0.789417 | 0 | 0 | 186 | 0.200864 |
f2160ab0d4f01e332dfeaf82b6dd74a2d6cbaae5 | 1,703 | py | Python | snakewm/apps/games/pong/bat.py | sigmaister/snakeware_os | 4a821c2a0dc7762c4ab35053286f5e23125386d0 | [
"MIT"
]
| 1,621 | 2020-05-29T06:49:27.000Z | 2022-03-15T08:20:08.000Z | snakewm/apps/games/pong/bat.py | sigmaister/snakeware_os | 4a821c2a0dc7762c4ab35053286f5e23125386d0 | [
"MIT"
]
| 119 | 2020-05-29T17:10:45.000Z | 2021-12-19T23:43:17.000Z | snakewm/apps/games/pong/bat.py | sigmaister/snakeware_os | 4a821c2a0dc7762c4ab35053286f5e23125386d0 | [
"MIT"
]
| 125 | 2020-05-29T07:43:22.000Z | 2022-03-18T22:13:08.000Z | import pygame
from pygame.locals import *
class ControlScheme:
def __init__(self):
self.up = K_UP
self.down = K_DOWN
class Bat:
def __init__(self, start_pos, control_scheme, court_size):
self.control_scheme = control_scheme
self.move_up = False
self.move_down = False
self.move_speed = 450.0
self.court_size = court_size
self.length = 30.0
self.width = 5.0
self.position = [float(start_pos[0]), float(start_pos[1])]
self.rect = pygame.Rect((start_pos[0], start_pos[1]), (self.width, self.length))
self.colour = pygame.Color("#FFFFFF")
def process_event(self, event):
if event.type == KEYDOWN:
if event.key == self.control_scheme.up:
self.move_up = True
if event.key == self.control_scheme.down:
self.move_down = True
if event.type == KEYUP:
if event.key == self.control_scheme.up:
self.move_up = False
if event.key == self.control_scheme.down:
self.move_down = False
def update(self, dt):
if self.move_up:
self.position[1] -= dt * self.move_speed
if self.position[1] < 10.0:
self.position[1] = 10.0
self.rect.y = self.position[1]
if self.move_down:
self.position[1] += dt * self.move_speed
if self.position[1] > self.court_size[1] - self.length - 10:
self.position[1] = self.court_size[1] - self.length - 10
self.rect.y = self.position[1]
def render(self, screen):
pygame.draw.rect(screen, self.colour, self.rect)
| 28.383333 | 88 | 0.570757 | 1,655 | 0.971814 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0.005285 |
f21845fdd846667effc17afb28dfd50fe6d29229 | 3,527 | py | Python | mimic/model/rackspace_image_store.py | ksheedlo/mimic | c84b6a0d336e8a37a685b5d71537aec5e44d9a8f | [
"Apache-2.0"
]
| 141 | 2015-01-07T19:28:31.000Z | 2022-02-11T06:04:13.000Z | mimic/model/rackspace_image_store.py | ksheedlo/mimic | c84b6a0d336e8a37a685b5d71537aec5e44d9a8f | [
"Apache-2.0"
]
| 575 | 2015-01-04T20:23:08.000Z | 2019-10-04T08:20:04.000Z | mimic/model/rackspace_image_store.py | ksheedlo/mimic | c84b6a0d336e8a37a685b5d71537aec5e44d9a8f | [
"Apache-2.0"
]
| 63 | 2015-01-09T20:39:41.000Z | 2020-07-06T14:20:56.000Z | """
An image store representing Rackspace specific images
"""
from __future__ import absolute_import, division, unicode_literals
import attr
from six import iteritems
from mimic.model.rackspace_images import (RackspaceWindowsImage,
RackspaceCentOSPVImage, RackspaceCentOSPVHMImage,
RackspaceCoreOSImage, RackspaceDebianImage,
RackspaceFedoraImage, RackspaceFreeBSDImage,
RackspaceGentooImage, RackspaceOpenSUSEImage,
RackspaceRedHatPVImage, RackspaceRedHatPVHMImage,
RackspaceUbuntuPVImage, RackspaceUbuntuPVHMImage,
RackspaceVyattaImage, RackspaceScientificImage,
RackspaceOnMetalCentOSImage, RackspaceOnMetalCoreOSImage,
RackspaceOnMetalDebianImage, RackspaceOnMetalFedoraImage,
RackspaceOnMetalUbuntuImage)
from mimic.model.rackspace_images import create_rackspace_images
@attr.s
class RackspaceImageStore(object):
"""
A store for images to share between nova_api and glance_api
:var image_list: list of Rackspace images
"""
image_list = attr.ib(default=attr.Factory(list))
def create_image_store(self, tenant_id):
"""
Generates the data for each image in each image class
"""
image_classes = [RackspaceWindowsImage, RackspaceCentOSPVImage,
RackspaceCentOSPVHMImage, RackspaceCoreOSImage, RackspaceDebianImage,
RackspaceFedoraImage, RackspaceFreeBSDImage, RackspaceGentooImage,
RackspaceOpenSUSEImage, RackspaceRedHatPVImage, RackspaceRedHatPVHMImage,
RackspaceUbuntuPVImage, RackspaceUbuntuPVHMImage, RackspaceVyattaImage,
RackspaceScientificImage, RackspaceOnMetalCentOSImage,
RackspaceOnMetalCoreOSImage, RackspaceOnMetalDebianImage,
RackspaceOnMetalFedoraImage, RackspaceOnMetalUbuntuImage]
if len(self.image_list) < 1:
for image_class in image_classes:
for image, image_spec in iteritems(image_class.images):
image_name = image
image_id = image_spec['id']
minRam = image_spec['minRam']
minDisk = image_spec['minDisk']
image_size = image_spec['OS-EXT-IMG-SIZE:size']
image = image_class(image_id=image_id, tenant_id=tenant_id,
image_size=image_size, name=image_name, minRam=minRam,
minDisk=minDisk)
if 'com.rackspace__1__ui_default_show' in image_spec:
image.set_is_default()
self.image_list.append(image)
self.image_list.extend(create_rackspace_images(tenant_id))
return self.image_list
def get_image_by_id(self, image_id):
"""
Get an image by its id
"""
for image in self.image_list:
if image_id == image.image_id:
return image
def add_image_to_store(self, image):
"""
Add a new image to the list of images
"""
self.image_list.append(image)
| 48.986111 | 99 | 0.588602 | 2,320 | 0.657783 | 0 | 0 | 2,328 | 0.660051 | 0 | 0 | 444 | 0.125886 |
f218482525c6f07411100d66a18c105ea0a2d6c8 | 926 | py | Python | samples/noxfile_config.py | ikuleshov/python-analytics-admin | f3d6fa78292878e7470806be0c116c6ca589eec5 | [
"Apache-2.0"
]
| null | null | null | samples/noxfile_config.py | ikuleshov/python-analytics-admin | f3d6fa78292878e7470806be0c116c6ca589eec5 | [
"Apache-2.0"
]
| null | null | null | samples/noxfile_config.py | ikuleshov/python-analytics-admin | f3d6fa78292878e7470806be0c116c6ca589eec5 | [
"Apache-2.0"
]
| null | null | null | TEST_CONFIG_OVERRIDE = {
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
"gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {
"GA_TEST_PROPERTY_ID": "276206997",
"GA_TEST_ACCOUNT_ID": "199820965",
"GA_TEST_USER_LINK_ID": "103401743041912607932",
"GA_TEST_PROPERTY_USER_LINK_ID": "105231969274497648555",
"GA_TEST_ANDROID_APP_DATA_STREAM_ID": "2828100949",
"GA_TEST_IOS_APP_DATA_STREAM_ID": "2828089289",
"GA_TEST_WEB_DATA_STREAM_ID": "2828068992",
},
}
| 46.3 | 70 | 0.712743 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 757 | 0.817495 |
f219263481489e692def3950e22f5330890ee7b8 | 476 | py | Python | exercises/fr/test_01_09.py | tuanducdesign/spacy-course | f8d092c5fa2997fccb3f367d174dce8667932b3d | [
"MIT"
]
| null | null | null | exercises/fr/test_01_09.py | tuanducdesign/spacy-course | f8d092c5fa2997fccb3f367d174dce8667932b3d | [
"MIT"
]
| null | null | null | exercises/fr/test_01_09.py | tuanducdesign/spacy-course | f8d092c5fa2997fccb3f367d174dce8667932b3d | [
"MIT"
]
| null | null | null | def test():
assert "for ent in doc.ents" in __solution__, "Itères-tu sur les entités ?"
assert x_pro.text == "X Pro", "Es-tu certain que x_pro contient les bons tokens ?"
__msg__.good(
"Parfait ! Bien sur, tu n'as pas besoin de faire cela manuellement à chaque fois."
"Dans le prochain exercice, tu vas découvrir le matcher à base de règles de spaCy, "
"qui peut t'aider à trouver des mots et des phrases spécifiques dans un texte."
)
| 47.6 | 92 | 0.676471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 362 | 0.747934 |
f219a6a5d2eba5cb99ad3a1f9f919f6e65e608c6 | 77 | py | Python | settings/channel_archiver/NIH.pressure_downstream_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
]
| null | null | null | settings/channel_archiver/NIH.pressure_downstream_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
]
| 1 | 2019-10-22T21:28:31.000Z | 2019-10-22T21:39:12.000Z | settings/channel_archiver/NIH.pressure_downstream_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
]
| 2 | 2019-06-06T15:06:46.000Z | 2020-07-20T02:03:22.000Z | filename = '//mx340hs/data/anfinrud_1903/Archive/NIH.pressure_downstream.txt' | 77 | 77 | 0.831169 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 66 | 0.857143 |
f219b4c368dddd54cf8c1f93b4bad8299a4df851 | 17,181 | py | Python | sfepy/terms/terms_navier_stokes.py | vondrejc/sfepy | 8e427af699c4b2858eb096510057abb3ae7e28e8 | [
"BSD-3-Clause"
]
| null | null | null | sfepy/terms/terms_navier_stokes.py | vondrejc/sfepy | 8e427af699c4b2858eb096510057abb3ae7e28e8 | [
"BSD-3-Clause"
]
| null | null | null | sfepy/terms/terms_navier_stokes.py | vondrejc/sfepy | 8e427af699c4b2858eb096510057abb3ae7e28e8 | [
"BSD-3-Clause"
]
| 2 | 2019-01-14T03:12:34.000Z | 2021-05-25T11:44:50.000Z | import numpy as nm
from sfepy.linalg import dot_sequences
from sfepy.terms.terms import Term, terms
class DivGradTerm(Term):
r"""
Diffusion term.
:Definition:
.. math::
\int_{\Omega} \nu\ \nabla \ul{v} : \nabla \ul{u} \mbox{ , }
\int_{\Omega} \nu\ \nabla \ul{u} : \nabla \ul{w} \\
\int_{\Omega} \nabla \ul{v} : \nabla \ul{u} \mbox{ , }
\int_{\Omega} \nabla \ul{u} : \nabla \ul{w}
:Arguments 1:
- material : :math:`\nu` (viscosity, optional)
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
:Arguments 2:
- material : :math:`\nu` (viscosity, optional)
- parameter_1 : :math:`\ul{u}`
- parameter_2 : :math:`\ul{w}`
"""
name = 'dw_div_grad'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'parameter_1', 'parameter_2'))
arg_shapes = {'opt_material' : '1, 1', 'virtual' : ('D', 'state'),
'state' : 'D', 'parameter_1' : 'D', 'parameter_2' : 'D'}
modes = ('weak', 'eval')
function = staticmethod(terms.term_ns_asm_div_grad)
def d_div_grad(self, out, grad1, grad2, mat, vg, fmode):
sh = grad1.shape
g1 = grad1.reshape((sh[0], sh[1], sh[2] * sh[3]))
g2 = grad2.reshape((sh[0], sh[1], sh[2] * sh[3]))
aux = mat * dot_sequences(g1[..., None], g2, 'ATB')[..., None]
if fmode == 2:
out[:] = aux
status = 0
else:
status = vg.integrate(out, aux, fmode)
return status
def get_fargs(self, mat, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
if mat is None:
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(state)
mat = nm.ones((1, n_qp, 1, 1), dtype=nm.float64)
if mode == 'weak':
if diff_var is None:
grad = self.get(state, 'grad').transpose((0, 1, 3, 2))
sh = grad.shape
grad = grad.reshape((sh[0], sh[1], sh[2] * sh[3], 1))
fmode = 0
else:
grad = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return grad, mat, vg, fmode
elif mode == 'eval':
grad1 = self.get(virtual, 'grad')
grad2 = self.get(state, 'grad')
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return grad1, grad2, mat, vg, fmode
else:
raise ValueError('unsupported evaluation mode in %s! (%s)'
% (self.name, mode))
def get_eval_shape(self, mat, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(state)
return (n_el, 1, 1, 1), state.dtype
def set_arg_types(self):
if self.mode == 'weak':
self.function = terms.term_ns_asm_div_grad
else:
self.function = self.d_div_grad
class ConvectTerm(Term):
r"""
Nonlinear convective term.
:Definition:
.. math::
\int_{\Omega} ((\ul{u} \cdot \nabla) \ul{u}) \cdot \ul{v}
:Arguments:
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
"""
name = 'dw_convect'
arg_types = ('virtual', 'state')
arg_shapes = {'virtual' : ('D', 'state'), 'state' : 'D'}
function = staticmethod(terms.term_ns_asm_convect)
def get_fargs(self, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
grad = self.get(state, 'grad').transpose((0, 1, 3, 2)).copy()
val_qp = self.get(state, 'val')
fmode = diff_var is not None
return grad, val_qp, vg, fmode
class LinearConvectTerm(Term):
r"""
Linearized convective term.
:Definition:
.. math::
\int_{\Omega} ((\ul{b} \cdot \nabla) \ul{u}) \cdot \ul{v}
.. math::
((\ul{b} \cdot \nabla) \ul{u})|_{qp}
:Arguments:
- virtual : :math:`\ul{v}`
- parameter : :math:`\ul{b}`
- state : :math:`\ul{u}`
"""
name = 'dw_lin_convect'
arg_types = ('virtual', 'parameter', 'state')
arg_shapes = {'virtual' : ('D', 'state'), 'parameter' : 'D', 'state' : 'D'}
function = staticmethod(terms.dw_lin_convect)
def get_fargs(self, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
val_qp = self.get(parameter, 'val')
if mode == 'weak':
if diff_var is None:
grad = self.get(state, 'grad').transpose((0, 1, 3, 2)).copy()
fmode = 0
else:
grad = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return grad, val_qp, vg, fmode
elif mode == 'qp':
grad = self.get(state, 'grad').transpose((0, 1, 3, 2)).copy()
fmode = 2
return grad, val_qp, vg, fmode
else:
raise ValueError('unsupported evaluation mode in %s! (%s)'
% (self.name, mode))
class StokesTerm(Term):
r"""
Stokes problem coupling term. Corresponds to weak forms of gradient and
divergence terms. Can be evaluated.
:Definition:
.. math::
\int_{\Omega} p\ \nabla \cdot \ul{v} \mbox{ , }
\int_{\Omega} q\ \nabla \cdot \ul{u}
\mbox{ or }
\int_{\Omega} c\ p\ \nabla \cdot \ul{v} \mbox{ , }
\int_{\Omega} c\ q\ \nabla \cdot \ul{u}
:Arguments 1:
- material : :math:`c` (optional)
- virtual : :math:`\ul{v}`
- state : :math:`p`
:Arguments 2:
- material : :math:`c` (optional)
- state : :math:`\ul{u}`
- virtual : :math:`q`
:Arguments 3:
- material : :math:`c` (optional)
- parameter_v : :math:`\ul{u}`
- parameter_s : :math:`p`
"""
name = 'dw_stokes'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'state', 'virtual'),
('opt_material', 'parameter_v', 'parameter_s'))
arg_shapes = [{'opt_material' : '1, 1',
'virtual/grad' : ('D', None), 'state/grad' : 1,
'virtual/div' : (1, None), 'state/div' : 'D',
'parameter_v' : 'D', 'parameter_s' : 1},
{'opt_material' : None}]
modes = ('grad', 'div', 'eval')
@staticmethod
def d_eval(out, coef, vec_qp, div, vvg):
out_qp = coef * vec_qp * div
status = vvg.integrate(out, out_qp)
return status
def get_fargs(self, coef, vvar, svar,
mode=None, term_mode=None, diff_var=None, **kwargs):
if self.mode == 'grad':
qp_var, qp_name = svar, 'val'
else:
qp_var, qp_name = vvar, 'div'
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(vvar)
if coef is None:
coef = nm.ones((1, n_qp, 1, 1), dtype=nm.float64)
if mode == 'weak':
vvg, _ = self.get_mapping(vvar)
svg, _ = self.get_mapping(svar)
if diff_var is None:
val_qp = self.get(qp_var, qp_name)
fmode = 0
else:
val_qp = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return coef, val_qp, svg, vvg, fmode
elif mode == 'eval':
vvg, _ = self.get_mapping(vvar)
div = self.get(vvar, 'div')
vec_qp = self.get(svar, 'val')
return coef, vec_qp, div, vvg
else:
raise ValueError('unsupported evaluation mode in %s! (%s)'
% (self.name, mode))
def get_eval_shape(self, coef, vvar, svar,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(vvar)
return (n_el, 1, 1, 1), vvar.dtype
def set_arg_types(self):
self.function = {
'grad' : terms.dw_grad,
'div' : terms.dw_div,
'eval' : self.d_eval,
}[self.mode]
class GradTerm(Term):
r"""
Evaluate gradient of a scalar or vector field.
Supports 'eval', 'el_avg' and 'qp' evaluation modes.
:Definition:
.. math::
\int_{\Omega} \nabla p \mbox{ or } \int_{\Omega} \nabla \ul{w}
.. math::
\mbox{vector for } K \from \Ical_h: \int_{T_K} \nabla p /
\int_{T_K} 1 \mbox{ or } \int_{T_K} \nabla \ul{w} /
\int_{T_K} 1
.. math::
(\nabla p)|_{qp} \mbox{ or } \nabla \ul{w}|_{qp}
:Arguments:
- parameter : :math:`p` or :math:`\ul{w}`
"""
name = 'ev_grad'
arg_types = ('parameter',)
arg_shapes = [{'parameter' : 1}, {'parameter' : 'D'}]
@staticmethod
def function(out, grad, vg, fmode):
if fmode == 2:
out[:] = grad
status = 0
else:
status = vg.integrate(out, grad, fmode)
return status
def get_fargs(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(parameter)
grad = self.get(parameter, 'grad')
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return grad, vg, fmode
def get_eval_shape(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(parameter)
if mode != 'qp':
n_qp = 1
return (n_el, n_qp, dim, n_c), parameter.dtype
class DivTerm(Term):
r"""
Evaluate divergence of a vector field.
Supports 'eval', 'el_avg' and 'qp' evaluation modes.
:Definition:
.. math::
\int_{\Omega} \nabla \cdot \ul{u}
.. math::
\mbox{vector for } K \from \Ical_h:
\int_{T_K} \nabla \cdot \ul{u} / \int_{T_K} 1
.. math::
(\nabla \cdot \ul{u})|_{qp}
:Arguments:
- parameter : :math:`\ul{u}`
"""
name = 'ev_div'
arg_types = ('parameter',)
arg_shapes = {'parameter' : 'D'}
@staticmethod
def function(out, div, vg, fmode):
if fmode == 2:
out[:] = div
status = 0
else:
status = vg.integrate(out, div, fmode)
return status
def get_fargs(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(parameter)
div = self.get(parameter, 'div')
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return div, vg, fmode
def get_eval_shape(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(parameter)
if mode != 'qp':
n_qp = 1
return (n_el, n_qp, 1, 1), parameter.dtype
class DivOperatorTerm(Term):
r"""
Weighted divergence term of a test function.
:Definition:
.. math::
\int_{\Omega} \nabla \cdot \ul{v} \mbox { or } \int_{\Omega} c \nabla
\cdot \ul{v}
:Arguments:
- material : :math:`c` (optional)
- virtual : :math:`\ul{v}`
"""
name = 'dw_div'
arg_types = ('opt_material', 'virtual')
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : ('D', None)},
{'opt_material' : None}]
@staticmethod
def function(out, mat, vg):
div_bf = vg.bfg
n_el, n_qp, dim, n_ep = div_bf.shape
div_bf = div_bf.reshape((n_el, n_qp, dim * n_ep, 1))
div_bf = nm.ascontiguousarray(div_bf)
if mat is not None:
status = vg.integrate(out, mat * div_bf)
else:
status = vg.integrate(out, div_bf)
return status
def get_fargs(self, mat, virtual,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(virtual)
return mat, vg
class GradDivStabilizationTerm(Term):
r"""
Grad-div stabilization term ( :math:`\gamma` is a global stabilization
parameter).
:Definition:
.. math::
\gamma \int_{\Omega} (\nabla\cdot\ul{u}) \cdot (\nabla\cdot\ul{v})
:Arguments:
- material : :math:`\gamma`
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
"""
name = 'dw_st_grad_div'
arg_types = ('material', 'virtual', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : ('D', 'state'),
'state' : 'D'}
function = staticmethod(terms.dw_st_grad_div)
def get_fargs(self, gamma, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
if diff_var is None:
div = self.get(state, 'div')
fmode = 0
else:
div = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return div, gamma, vg, fmode
from sfepy.terms.terms_diffusion import LaplaceTerm
class PSPGPStabilizationTerm(LaplaceTerm):
r"""
PSPG stabilization term, pressure part ( :math:`\tau` is a local
stabilization parameter), alias to Laplace term dw_laplace.
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \tau_K\ \nabla p \cdot \nabla q
:Arguments:
- material : :math:`\tau_K`
- virtual : :math:`q`
- state : :math:`p`
"""
name = 'dw_st_pspg_p'
class PSPGCStabilizationTerm(Term):
r"""
PSPG stabilization term, convective part ( :math:`\tau` is a local
stabilization parameter).
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \tau_K\ ((\ul{b} \cdot \nabla) \ul{u})
\cdot \nabla q
:Arguments:
- material : :math:`\tau_K`
- virtual : :math:`q`
- parameter : :math:`\ul{b}`
- state : :math:`\ul{u}`
"""
name = 'dw_st_pspg_c'
arg_types = ('material', 'virtual', 'parameter', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : (1, None),
'parameter' : 'D', 'state' : 'D'}
function = staticmethod(terms.dw_st_pspg_c)
def get_fargs(self, tau, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
sap, svg = self.get_approximation(virtual)
vap, vvg = self.get_approximation(state)
val_qp = self.get(parameter, 'val')
conn = vap.get_connectivity(self.region, self.integration)
if diff_var is None:
fmode = 0
else:
fmode = 1
return val_qp, state(), tau, svg, vvg, conn, fmode
class SUPGPStabilizationTerm(Term):
r"""
SUPG stabilization term, pressure part ( :math:`\delta` is a local
stabilization parameter).
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \delta_K\ \nabla p\cdot ((\ul{b} \cdot
\nabla) \ul{v})
:Arguments:
- material : :math:`\delta_K`
- virtual : :math:`\ul{v}`
- parameter : :math:`\ul{b}`
- state : :math:`p`
"""
name = 'dw_st_supg_p'
arg_types = ('material', 'virtual', 'parameter', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : ('D', None),
'parameter' : 'D', 'state' : 1}
function = staticmethod(terms.dw_st_supg_p)
def get_fargs(self, delta, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vvg, _ = self.get_mapping(virtual)
svg, _ = self.get_mapping(state)
val_qp = self.get(parameter, 'val')
if diff_var is None:
grad = self.get(state, 'grad')
fmode = 0
else:
grad = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return val_qp, grad, delta, vvg, svg, fmode
class SUPGCStabilizationTerm(Term):
r"""
SUPG stabilization term, convective part ( :math:`\delta` is a local
stabilization parameter).
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \delta_K\ ((\ul{b} \cdot \nabla)
\ul{u})\cdot ((\ul{b} \cdot \nabla) \ul{v})
:Arguments:
- material : :math:`\delta_K`
- virtual : :math:`\ul{v}`
- parameter : :math:`\ul{b}`
- state : :math:`\ul{u}`
"""
name = 'dw_st_supg_c'
arg_types = ('material', 'virtual', 'parameter', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : ('D', 'state'),
'parameter' : 'D', 'state' : 'D'}
function = staticmethod(terms.dw_st_supg_c)
def get_fargs(self, delta, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
ap, vg = self.get_approximation(virtual)
val_qp = self.get(parameter, 'val')
conn = ap.get_connectivity(self.region, self.integration)
if diff_var is None:
fmode = 0
else:
fmode = 1
return val_qp, state(), delta, vg, conn, fmode
| 28.635 | 79 | 0.521623 | 17,004 | 0.989698 | 0 | 0 | 978 | 0.056923 | 0 | 0 | 6,618 | 0.385193 |
f21a51bd13a2f891e2303ec8e105009193f93ecb | 422 | py | Python | saleor/unurshop/crawler/migrations/0013_auto_20210921_0452.py | nlkhagva/saleor | 0d75807d08ac49afcc904733724ac870e8359c10 | [
"CC-BY-4.0"
]
| null | null | null | saleor/unurshop/crawler/migrations/0013_auto_20210921_0452.py | nlkhagva/saleor | 0d75807d08ac49afcc904733724ac870e8359c10 | [
"CC-BY-4.0"
]
| 1 | 2022-02-15T03:31:12.000Z | 2022-02-15T03:31:12.000Z | saleor/unurshop/crawler/migrations/0013_auto_20210921_0452.py | nlkhagva/ushop | abf637eb6f7224e2d65d62d72a0c15139c64bb39 | [
"CC-BY-4.0"
]
| null | null | null | # Generated by Django 3.1.1 on 2021-09-21 04:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('crawler', '0012_auto_20210921_0451'),
]
operations = [
migrations.AlterField(
model_name='crawlerline',
name='ustatus',
field=models.PositiveIntegerField(blank=True, default=1, null=True),
),
]
| 22.210526 | 80 | 0.618483 | 329 | 0.779621 | 0 | 0 | 0 | 0 | 0 | 0 | 103 | 0.244076 |
f21a701d87cd77a1cae7afc78f9f781cba559ff4 | 2,713 | py | Python | src/cli/examples/oss-fuzz-target.py | gdhuper/onefuzz | 6aca32ed9c8318aa81887eeaacff03c406c0e98c | [
"MIT"
]
| 1 | 2020-10-27T08:05:57.000Z | 2020-10-27T08:05:57.000Z | src/cli/examples/oss-fuzz-target.py | gdhuper/onefuzz | 6aca32ed9c8318aa81887eeaacff03c406c0e98c | [
"MIT"
]
| 1 | 2021-02-15T00:38:32.000Z | 2021-02-15T00:38:32.000Z | src/cli/examples/oss-fuzz-target.py | gdhuper/onefuzz | 6aca32ed9c8318aa81887eeaacff03c406c0e98c | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import logging
import os
import sys
import tempfile
from subprocess import PIPE, CalledProcessError, check_call # nosec
from typing import List, Optional
from onefuzztypes.models import NotificationConfig
from onefuzztypes.primitives import PoolName
from onefuzz.api import Command, Onefuzz
from onefuzz.cli import execute_api
SANITIZERS = ["address", "dataflow", "memory", "undefined"]
class Ossfuzz(Command):
def build(self, project: str, sanitizer: str) -> None:
""" Build the latest oss-fuzz target """
self.logger.info("building %s:%s", project, sanitizer)
cmd = [
"docker",
"run",
"--rm",
"-ti",
"-e",
"SANITIZER=%s" % sanitizer,
"--mount",
"src=%s,target=/out,type=bind" % os.getcwd(),
"gcr.io/oss-fuzz/%s" % project,
"compile",
]
check_call(cmd, stderr=PIPE, stdout=PIPE)
def fuzz(
self,
project: str,
build: str,
pool: PoolName,
sanitizers: Optional[List[str]] = None,
notification_config: Optional[NotificationConfig] = None,
) -> None:
""" Build & Launch all of the libFuzzer targets for a given project """
if sanitizers is None:
sanitizers = SANITIZERS
for sanitizer in sanitizers:
with tempfile.TemporaryDirectory() as tmpdir:
os.chdir(tmpdir)
try:
self.build(project, sanitizer)
except CalledProcessError:
self.logger.warning("building %s:%s failed", project, sanitizer)
continue
self.logger.info("launching %s:%s build:%s", project, sanitizer, build)
self.onefuzz.template.ossfuzz.libfuzzer(
project,
"%s:%s" % (sanitizer, build),
pool,
max_target_count=0,
sync_inputs=True,
notification_config=notification_config,
)
def stop(self, project: str) -> None:
for job in self.onefuzz.jobs.list():
if job.config.project != project:
continue
if job.config.build != "base":
continue
self.logger.info("stopping %s: %s", job.job_id, job.state)
self.onefuzz.jobs.delete(job.job_id)
def main() -> int:
return execute_api(
Ossfuzz(Onefuzz(), logging.getLogger("ossfuzz")), [Command], "0.0.1"
)
if __name__ == "__main__":
sys.exit(main())
| 29.48913 | 87 | 0.557317 | 2,042 | 0.752672 | 0 | 0 | 0 | 0 | 0 | 0 | 480 | 0.176926 |
f21c03303c0e86780d94fa0daa72a6287b00df39 | 3,721 | py | Python | stubs/workspaces.py | claytonbrown/troposphere | bf0f1e48b14f578de0221d50f711467ad716ca87 | [
"BSD-2-Clause"
]
| null | null | null | stubs/workspaces.py | claytonbrown/troposphere | bf0f1e48b14f578de0221d50f711467ad716ca87 | [
"BSD-2-Clause"
]
| null | null | null | stubs/workspaces.py | claytonbrown/troposphere | bf0f1e48b14f578de0221d50f711467ad716ca87 | [
"BSD-2-Clause"
]
| null | null | null | from . import AWSObject, AWSProperty
from .validators import *
from .constants import *
# -------------------------------------------
class WorkSpacesWorkspace(AWSObject):
"""# AWS::WorkSpaces::Workspace - CloudFormationResourceSpecification version: 1.4.0
{
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html",
"Properties": {
"BundleId": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-bundleid",
"PrimitiveType": "String",
"Required": true,
"UpdateType": "Conditional"
},
"DirectoryId": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-directoryid",
"PrimitiveType": "String",
"Required": true,
"UpdateType": "Conditional"
},
"RootVolumeEncryptionEnabled": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-rootvolumeencryptionenabled",
"PrimitiveType": "Boolean",
"Required": false,
"UpdateType": "Conditional"
},
"UserName": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-username",
"PrimitiveType": "String",
"Required": true,
"UpdateType": "Immutable"
},
"UserVolumeEncryptionEnabled": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-uservolumeencryptionenabled",
"PrimitiveType": "Boolean",
"Required": false,
"UpdateType": "Conditional"
},
"VolumeEncryptionKey": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-volumeencryptionkey",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Conditional"
}
}
}
"""
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True, 'Conditional', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-bundleid'),
'DirectoryId': (basestring, True, 'Conditional', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-directoryid'),
'RootVolumeEncryptionEnabled': (boolean, False, 'Conditional', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-rootvolumeencryptionenabled'),
'UserName': (basestring, True, 'Immutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-username'),
'UserVolumeEncryptionEnabled': (boolean, False, 'Conditional', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-uservolumeencryptionenabled'),
'VolumeEncryptionKey': (basestring, False, 'Conditional', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-workspaces-workspace.html#cfn-workspaces-workspace-volumeencryptionkey')
}
| 56.378788 | 228 | 0.685837 | 3,581 | 0.962376 | 0 | 0 | 0 | 0 | 0 | 0 | 3,341 | 0.897877 |
f21e91816629e68e43e8282cecfca50b522c0148 | 5,718 | py | Python | hystrix/command.py | grofers/hystrix-py | 9876b39980bc8dcb334fcb0ee8c15d6949112203 | [
"Apache-2.0"
]
| 93 | 2015-01-29T10:10:49.000Z | 2021-12-05T08:45:04.000Z | hystrix/command.py | grofers/hystrix-py | 9876b39980bc8dcb334fcb0ee8c15d6949112203 | [
"Apache-2.0"
]
| 1 | 2018-01-30T00:32:37.000Z | 2018-01-30T00:32:37.000Z | hystrix/command.py | grofers/hystrix-py | 9876b39980bc8dcb334fcb0ee8c15d6949112203 | [
"Apache-2.0"
]
| 20 | 2015-09-18T02:04:24.000Z | 2020-03-25T10:31:07.000Z | """
Used to wrap code that will execute potentially risky functionality
(typically meaning a service call over the network) with fault and latency
tolerance, statistics and performance metrics capture, circuit breaker and
bulkhead functionality.
"""
from __future__ import absolute_import
import logging
import six
from hystrix.group import Group
from hystrix.command_metrics import CommandMetrics
from hystrix.command_properties import CommandProperties
log = logging.getLogger(__name__)
# TODO: Change this to an AbstractCommandMetaclass
class CommandMetaclass(type):
__blacklist__ = ('Command', 'CommandMetaclass')
def __new__(cls, name, bases, attrs):
# Command key initialization
command_key = attrs.get('command_key') or name
new_class = type.__new__(cls, command_key, bases, attrs)
if name in cls.__blacklist__:
return new_class
# TODO: Check instance CommandProperties here?
command_properties_defaults = attrs.get('command_properties_defaults')
if command_properties_defaults is None:
command_properties_defaults = CommandProperties.setter()
# Properties initialization
properties_strategy = attrs.get('properties_strategy')
if properties_strategy is None:
properties_strategy = CommandProperties(
command_key, command_properties_defaults)
setattr(new_class, 'properties', properties_strategy)
# Pool key
# This defines which pool this command should run on.
# It uses the pool_key if provided, then defaults to use Group key.
# It can then be overridden by a property if defined so it can be
# changed at runtime.
pool_key = attrs.get('pool_key')
# Group key initialization
group_key = attrs.get('group_key') or '{}Group'.format(command_key)
NewGroup = type(group_key, (Group,),
dict(group_key=group_key, pool_key=pool_key))
setattr(new_class, 'group', NewGroup())
setattr(new_class, 'group_key', group_key)
setattr(new_class, 'command_key', command_key)
# Metrics initialization
command_metrics_key = '{}CommandMetrics'.format(command_key)
# TODO: Check instance CommandMetrics here?
metrics = attrs.get('metrics')
if metrics is None:
NewCommandMetrics = type(
command_metrics_key, (CommandMetrics,),
dict(command_metrics_key=command_metrics_key,
group_key=group_key, pool_key=pool_key))
metrics = NewCommandMetrics(properties=properties_strategy)
setattr(new_class, 'metrics', metrics)
return new_class
# TODO: Change this to inherit from an AbstractCommand
class Command(six.with_metaclass(CommandMetaclass, object)):
command_key = None
group_key = None
def __init__(self, group_key=None, command_key=None,
pool_key=None, circuit_breaker=None, pool=None,
command_properties_defaults=None,
pool_properties_defaults=None, metrics=None,
fallback_semaphore=None, execution_semaphore=None,
properties_strategy=None, execution_hook=None, timeout=None):
self.timeout = timeout
def run(self):
raise NotImplementedError('Subclasses must implement this method.')
def fallback(self):
raise NotImplementedError('Subclasses must implement this method.')
def cache(self):
raise NotImplementedError('Subclasses must implement this method.')
def execute(self, timeout=None):
timeout = timeout or self.timeout
future = self.group.pool.submit(self.run)
try:
return future.result(timeout)
except Exception:
log.exception('exception calling run for {}'.format(self))
log.info('run raises {}'.format(future.exception))
try:
log.info('trying fallback for {}'.format(self))
future = self.group.pool.submit(self.fallback)
return future.result(timeout)
except Exception:
log.exception('exception calling fallback for {}'.format(self))
log.info('run() raised {}'.format(future.exception))
log.info('trying cache for {}'.format(self))
future = self.group.pool.submit(self.cache)
return future.result(timeout)
def observe(self, timeout=None):
timeout = timeout or self.timeout
return self.__async(timeout=timeout)
def queue(self, timeout=None):
timeout = timeout or self.timeout
return self.__async(timeout=timeout)
def __async(self, timeout=None):
timeout = timeout or self.timeout
future = self.group.pool.submit(self.run)
try:
# Call result() to check for exception
future.result(timeout)
return future
except Exception:
log.exception('exception calling run for {}'.format(self))
log.info('run raised {}'.format(future.exception))
try:
log.info('trying fallback for {}'.format(self))
future = self.group.pool.submit(self.fallback)
# Call result() to check for exception
future.result(timeout)
return future
except Exception:
log.exception('exception calling fallback for {}'.format(self))
log.info('fallback raised {}'.format(future.exception))
log.info('trying cache for {}'.format(self))
return self.group.pool.submit(self.cache)
| 38.635135 | 79 | 0.64603 | 5,114 | 0.894369 | 0 | 0 | 0 | 0 | 0 | 0 | 1,445 | 0.252711 |
f21f243b3b146cb9c4185deea25898637e21bb4c | 12,079 | py | Python | glue/viewers/table/qt/data_viewer.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
]
| 550 | 2015-01-08T13:51:06.000Z | 2022-03-31T11:54:47.000Z | glue/viewers/table/qt/data_viewer.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
]
| 1,362 | 2015-01-03T19:15:52.000Z | 2022-03-30T13:23:11.000Z | glue/viewers/table/qt/data_viewer.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
]
| 142 | 2015-01-08T13:08:00.000Z | 2022-03-18T13:25:57.000Z | import os
from functools import lru_cache
import numpy as np
from qtpy.QtCore import Qt
from qtpy import QtCore, QtGui, QtWidgets
from matplotlib.colors import ColorConverter
from glue.utils.qt import get_qapp
from glue.config import viewer_tool
from glue.core import BaseData, Data
from glue.utils.qt import load_ui
from glue.viewers.common.qt.data_viewer import DataViewer
from glue.viewers.common.qt.toolbar import BasicToolbar
from glue.viewers.common.tool import CheckableTool
from glue.viewers.common.layer_artist import LayerArtist
from glue.core.subset import ElementSubsetState
from glue.utils.colors import alpha_blend_colors
from glue.utils.qt import mpl_to_qt_color, messagebox_on_error
from glue.core.exceptions import IncompatibleAttribute
from glue.viewers.table.compat import update_table_viewer_state
try:
import dask.array as da
DASK_INSTALLED = True
except ImportError:
DASK_INSTALLED = False
__all__ = ['TableViewer', 'TableLayerArtist']
COLOR_CONVERTER = ColorConverter()
class DataTableModel(QtCore.QAbstractTableModel):
def __init__(self, table_viewer):
super(DataTableModel, self).__init__()
if table_viewer.data.ndim != 1:
raise ValueError("Can only use Table widget for 1D data")
self._table_viewer = table_viewer
self._data = table_viewer.data
self.show_coords = False
self.order = np.arange(self._data.shape[0])
self._update_visible()
def data_changed(self):
top_left = self.index(0, 0)
bottom_right = self.index(self.columnCount(), self.rowCount())
self._update_visible()
self.data_by_row_and_column.cache_clear()
self.dataChanged.emit(top_left, bottom_right)
self.layoutChanged.emit()
@property
def columns(self):
if self.show_coords:
return self._data.components
else:
return self._data.main_components + self._data.derived_components
def columnCount(self, index=None):
return len(self.columns)
def rowCount(self, index=None):
# Qt bug: Crashes on tables bigger than this
return min(self.order_visible.size, 71582788)
def headerData(self, section, orientation, role):
if role != Qt.DisplayRole:
return None
if orientation == Qt.Horizontal:
column_name = self.columns[section].label
units = self._data.get_component(self.columns[section]).units
if units != '':
column_name += "\n{0}".format(units)
return column_name
elif orientation == Qt.Vertical:
return str(self.order_visible[section])
def data(self, index, role):
if not index.isValid():
return None
return self.data_by_row_and_column(index.row(), index.column(), role)
# The data() method gets called many times, often with the same parameters,
# for example if bringing the window to the foreground/background, shifting
# up/down/left/right by one cell, etc. This can be very slow when e.g. dask
# columns are present so we cache the most recent 65536 calls which should
# have a reasonably sensible memory footprint.
@lru_cache(maxsize=65536)
def data_by_row_and_column(self, row, column, role):
if role == Qt.DisplayRole:
c = self.columns[column]
idx = self.order_visible[row]
comp = self._data[c]
value = comp[idx]
if isinstance(value, bytes):
return value.decode('ascii')
else:
if DASK_INSTALLED and isinstance(value, da.Array):
return str(value.compute())
else:
return str(comp[idx])
elif role == Qt.BackgroundRole:
idx = self.order_visible[row]
# Find all subsets that this index is part of
colors = []
for layer_artist in self._table_viewer.layers[::-1]:
if isinstance(layer_artist.layer, BaseData):
continue
if layer_artist.visible:
subset = layer_artist.layer
try:
if subset.to_mask(view=slice(idx, idx + 1))[0]:
colors.append(subset.style.color)
except IncompatibleAttribute as exc:
# Only disable the layer if enabled, as otherwise we
# will recursively call clear and _refresh, causing
# an infinite loop and performance issues.
if layer_artist.enabled:
layer_artist.disable_invalid_attributes(*exc.args)
else:
layer_artist.enabled = True
# Blend the colors using alpha blending
if len(colors) > 0:
color = alpha_blend_colors(colors, additional_alpha=0.5)
color = mpl_to_qt_color(color)
return QtGui.QBrush(color)
def sort(self, column, ascending):
c = self.columns[column]
comp = self._data.get_component(c)
self.order = np.argsort(comp.data)
if ascending == Qt.DescendingOrder:
self.order = self.order[::-1]
self._update_visible()
self.data_by_row_and_column.cache_clear()
self.layoutChanged.emit()
def _update_visible(self):
"""
Given which layers are visible or not, convert order to order_visible.
"""
self.data_by_row_and_column.cache_clear()
# First, if the data layer is visible, show all rows
for layer_artist in self._table_viewer.layers:
if layer_artist.visible and isinstance(layer_artist.layer, BaseData):
self.order_visible = self.order
return
# If not then we need to show only the rows with visible subsets
visible = np.zeros(self.order.shape, dtype=bool)
for layer_artist in self._table_viewer.layers:
if layer_artist.visible:
mask = layer_artist.layer.to_mask()
if DASK_INSTALLED and isinstance(mask, da.Array):
mask = mask.compute()
visible |= mask
self.order_visible = self.order[visible]
class TableLayerArtist(LayerArtist):
def __init__(self, table_viewer, viewer_state, layer_state=None, layer=None):
self._table_viewer = table_viewer
super(TableLayerArtist, self).__init__(viewer_state,
layer_state=layer_state,
layer=layer)
self.redraw()
def _refresh(self):
self._table_viewer.model.data_changed()
def redraw(self):
self._refresh()
def update(self):
self._refresh()
def clear(self):
self._refresh()
@viewer_tool
class RowSelectTool(CheckableTool):
tool_id = 'table:rowselect'
icon = 'glue_row_select'
action_text = 'Select row(s)'
tool_tip = ('Select rows by clicking on rows and pressing enter '
'once the selection is ready to be applied')
status_tip = ('CLICK to select, press ENTER to finalize selection, '
'ALT+CLICK or ALT+UP/DOWN to apply selection immediately')
def __init__(self, viewer):
super(RowSelectTool, self).__init__(viewer)
self.deactivate()
def activate(self):
self.viewer.ui.table.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
def deactivate(self):
# Don't do anything if the viewer has already been closed
if self.viewer is None:
return
self.viewer.ui.table.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
self.viewer.ui.table.clearSelection()
class TableViewWithSelectionSignal(QtWidgets.QTableView):
selection_changed = QtCore.Signal()
def selectionChanged(self, *args, **kwargs):
self.selection_changed.emit()
super(TableViewWithSelectionSignal, self).selectionChanged(*args, **kwargs)
class TableViewer(DataViewer):
LABEL = "Table Viewer"
_toolbar_cls = BasicToolbar
_data_artist_cls = TableLayerArtist
_subset_artist_cls = TableLayerArtist
inherit_tools = False
tools = ['table:rowselect']
def __init__(self, session, state=None, parent=None, widget=None):
super(TableViewer, self).__init__(session, state=state, parent=parent)
self.ui = load_ui('data_viewer.ui',
directory=os.path.dirname(__file__))
self.setCentralWidget(self.ui)
hdr = self.ui.table.horizontalHeader()
hdr.setStretchLastSection(True)
hdr.setSectionResizeMode(hdr.Interactive)
hdr = self.ui.table.verticalHeader()
hdr.setSectionResizeMode(hdr.Interactive)
self.data = None
self.model = None
self.ui.table.selection_changed.connect(self.selection_changed)
self.state.add_callback('layers', self._on_layers_changed)
self._on_layers_changed()
def selection_changed(self):
app = get_qapp()
if app.queryKeyboardModifiers() == Qt.AltModifier:
self.finalize_selection(clear=False)
def keyPressEvent(self, event):
if self.toolbar.active_tool is self.toolbar.tools['table:rowselect']:
if event.key() in [Qt.Key_Enter, Qt.Key_Return]:
self.finalize_selection()
super(TableViewer, self).keyPressEvent(event)
def finalize_selection(self, clear=True):
model = self.ui.table.selectionModel()
selected_rows = [self.model.order_visible[x.row()] for x in model.selectedRows()]
subset_state = ElementSubsetState(indices=selected_rows, data=self.data)
mode = self.session.edit_subset_mode
mode.update(self._data, subset_state, focus_data=self.data)
if clear:
# We block the signals here to make sure that we don't update
# the subset again once the selection is cleared.
self.ui.table.blockSignals(True)
self.ui.table.clearSelection()
self.ui.table.blockSignals(False)
def _on_layers_changed(self, *args):
for layer_state in self.state.layers:
if isinstance(layer_state.layer, BaseData):
break
else:
return
self.data = layer_state.layer
self.setUpdatesEnabled(False)
self.model = DataTableModel(self)
self.ui.table.setModel(self.model)
self.setUpdatesEnabled(True)
@messagebox_on_error("Failed to add data")
def add_data(self, data):
with self._layer_artist_container.ignore_empty():
self.state.layers[:] = []
return super(TableViewer, self).add_data(data)
@messagebox_on_error("Failed to add subset")
def add_subset(self, subset):
if self.data is None:
self.add_data(subset.data)
self.state.layers[0].visible = False
elif subset.data != self.data:
raise ValueError("subset parent data does not match existing table data")
return super(TableViewer, self).add_subset(subset)
@property
def window_title(self):
if len(self.state.layers) > 0:
return 'Table: ' + self.state.layers[0].layer.label
else:
return 'Table'
def closeEvent(self, event):
"""
On close, Qt seems to scan through the entire model
if the data set is big. To sidestep that,
we swap out with a tiny data set before closing
"""
super(TableViewer, self).closeEvent(event)
if self.model is not None:
self.model._data = Data(x=[0])
event.accept()
def get_layer_artist(self, cls, layer=None, layer_state=None):
return cls(self, self.state, layer=layer, layer_state=layer_state)
@staticmethod
def update_viewer_state(rec, context):
return update_table_viewer_state(rec, context)
| 35.215743 | 92 | 0.637304 | 11,040 | 0.913983 | 0 | 0 | 3,871 | 0.320474 | 0 | 0 | 1,703 | 0.140988 |
f21fb8769d6f7f12c55b09713729ab92490aa213 | 1,781 | py | Python | azure-servicefabric/azure/servicefabric/models/restore_partition_description.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
]
| 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-servicefabric/azure/servicefabric/models/restore_partition_description.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
]
| 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-servicefabric/azure/servicefabric/models/restore_partition_description.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
]
| 1 | 2018-10-16T13:08:23.000Z | 2018-10-16T13:08:23.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RestorePartitionDescription(Model):
"""Specifies the parameters needed to trigger a restore of a specific
partition.
All required parameters must be populated in order to send to Azure.
:param backup_id: Required. Unique backup ID.
:type backup_id: str
:param backup_location: Required. Location of the backup relative to the
backup storage specified/ configured.
:type backup_location: str
:param backup_storage: Location of the backup from where the partition
will be restored.
:type backup_storage: ~azure.servicefabric.models.BackupStorageDescription
"""
_validation = {
'backup_id': {'required': True},
'backup_location': {'required': True},
}
_attribute_map = {
'backup_id': {'key': 'BackupId', 'type': 'str'},
'backup_location': {'key': 'BackupLocation', 'type': 'str'},
'backup_storage': {'key': 'BackupStorage', 'type': 'BackupStorageDescription'},
}
def __init__(self, **kwargs):
super(RestorePartitionDescription, self).__init__(**kwargs)
self.backup_id = kwargs.get('backup_id', None)
self.backup_location = kwargs.get('backup_location', None)
self.backup_storage = kwargs.get('backup_storage', None)
| 37.893617 | 87 | 0.638967 | 1,265 | 0.710275 | 0 | 0 | 0 | 0 | 0 | 0 | 1,279 | 0.718136 |
f2222e7a4067aa3e2de0115ba3b31e143ef1fc7b | 5,438 | py | Python | psq/queue.py | Tomesco/bookshelf-demo-project | 9d422f3aa04edbb3312d3e177caf699653ed6a73 | [
"Apache-2.0"
]
| 210 | 2015-07-29T16:50:01.000Z | 2022-03-02T15:24:52.000Z | psq/queue.py | Tomesco/bookshelf-demo-project | 9d422f3aa04edbb3312d3e177caf699653ed6a73 | [
"Apache-2.0"
]
| 60 | 2015-12-03T23:15:57.000Z | 2021-01-21T09:25:42.000Z | psq/queue.py | Tomesco/bookshelf-demo-project | 9d422f3aa04edbb3312d3e177caf699653ed6a73 | [
"Apache-2.0"
]
| 47 | 2015-12-21T06:09:36.000Z | 2021-09-04T13:20:21.000Z | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from contextlib import contextmanager
import functools
import logging
from uuid import uuid4
import google.cloud.exceptions
from .globals import queue_context
from .storage import Storage
from .task import Task, TaskResult
from .utils import dumps, measure_time, unpickle, UnpickleError
logger = logging.getLogger(__name__)
PUBSUB_OBJECT_PREFIX = 'psq'
class Queue(object):
def __init__(self, publisher_client, subscriber_client, project,
name='default', storage=None, extra_context=None,
asynchronous=True):
self._async = asynchronous
self.name = name
self.project = project
if self._async:
self.publisher_client = publisher_client
self.subscriber_client = subscriber_client
self.topic_path = self._get_or_create_topic()
self.storage = storage or Storage()
self.subscription = None
self.extra_context = extra_context if extra_context else dummy_context
def _get_topic_path(self):
topic_name = '{}-{}'.format(PUBSUB_OBJECT_PREFIX, self.name)
return self.publisher_client.topic_path(self.project, topic_name)
def _get_or_create_topic(self):
topic_path = self._get_topic_path()
try:
self.publisher_client.get_topic(topic_path)
except google.cloud.exceptions.NotFound:
logger.info("Creating topic {}".format(topic_path))
try:
self.publisher_client.create_topic(topic_path)
except google.cloud.exceptions.Conflict:
# Another process created the topic before us, ignore.
pass
return topic_path
def _get_or_create_subscription(self):
"""Workers all share the same subscription so that tasks are
distributed across all workers."""
topic_path = self._get_topic_path()
subscription_name = '{}-{}-shared'.format(
PUBSUB_OBJECT_PREFIX, self.name)
subscription_path = self.subscriber_client.subscription_path(
self.project, subscription_name)
try:
self.subscriber_client.get_subscription(subscription_path)
except google.cloud.exceptions.NotFound:
logger.info("Creating shared subscription {}".format(
subscription_name))
try:
self.subscriber_client.create_subscription(
subscription_path, topic=topic_path)
except google.cloud.exceptions.Conflict:
# Another worker created the subscription before us, ignore.
pass
return subscription_path
def enqueue(self, f, *args, **kwargs):
"""Enqueues a function for the task queue to execute."""
task = Task(uuid4().hex, f, args, kwargs)
self.storage.put_task(task)
return self.enqueue_task(task)
def enqueue_task(self, task):
"""Enqueues a task directly. This is used when a task is retried or if
a task was manually created.
Note that this does not store the task.
"""
data = dumps(task)
if self._async:
self.publisher_client.publish(self.topic_path, data=data)
logger.info('Task {} queued.'.format(task.id))
else:
unpickled_task = unpickle(data)
logger.info(
'Executing task {} synchronously.'.format(unpickled_task.id)
)
with measure_time() as summary, self.queue_context():
unpickled_task.execute(queue=self)
summary(unpickled_task.summary())
return TaskResult(task.id, self)
@staticmethod
def _pubsub_message_callback(task_callback, message):
message.ack()
try:
task = unpickle(message.data)
task_callback(task)
except UnpickleError:
logger.exception('Failed to unpickle task {}.'.format(message))
def listen(self, callback):
if not self.subscription:
self.subscription = self._get_or_create_subscription()
message_callback = functools.partial(
self._pubsub_message_callback, callback)
return self.subscriber_client.subscribe(
self.subscription, callback=message_callback)
def cleanup(self):
"""Does nothing for this queue, but other queues types may use this to
perform clean-up after listening for tasks."""
pass
def queue_context(self):
"""
Returns a context manager that sets this queue as the current_queue
global. Similar to flask's app.app_context. This is used by the workers
to make the global available inside of task functions.
"""
return queue_context(self)
@contextmanager
def dummy_context():
yield
| 34.417722 | 79 | 0.658698 | 4,412 | 0.811328 | 30 | 0.005517 | 333 | 0.061236 | 0 | 0 | 1,529 | 0.28117 |
f22234640a48085e7e67ec5bc155d8fda74563b6 | 2,453 | py | Python | mac/google-cloud-sdk/lib/surface/access_context_manager/levels/update.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
]
| null | null | null | mac/google-cloud-sdk/lib/surface/access_context_manager/levels/update.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
]
| 4 | 2020-07-21T12:51:46.000Z | 2022-01-22T10:29:25.000Z | mac/google-cloud-sdk/lib/surface/access_context_manager/levels/update.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
]
| 1 | 2020-07-25T18:17:57.000Z | 2020-07-25T18:17:57.000Z | # -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud access-context-manager levels update` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.accesscontextmanager import levels as levels_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.accesscontextmanager import levels
from googlecloudsdk.command_lib.accesscontextmanager import policies
@base.ReleaseTracks(base.ReleaseTrack.GA)
class UpdateLevelsGA(base.UpdateCommand):
"""Update an existing access level."""
_API_VERSION = 'v1'
@staticmethod
def Args(parser):
UpdateLevelsGA.ArgsVersioned(parser, version='v1')
@staticmethod
def ArgsVersioned(parser, version='v1'):
levels.AddResourceArg(parser, 'to update')
levels.AddLevelArgs(parser, version=version)
levels.AddLevelSpecArgs(parser, version=version)
def Run(self, args):
client = levels_api.Client(version=self._API_VERSION)
level_ref = args.CONCEPTS.level.Parse()
policies.ValidateAccessPolicyArg(level_ref, args)
mapper = levels.GetCombineFunctionEnumMapper(version=self._API_VERSION)
combine_function = mapper.GetEnumForChoice(args.combine_function)
return client.Patch(
level_ref,
description=args.description,
title=args.title,
combine_function=combine_function,
basic_level_conditions=args.basic_level_spec)
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class UpdateLevelsBeta(UpdateLevelsGA):
_API_VERSION = 'v1beta'
@staticmethod
def Args(parser):
UpdateLevelsGA.ArgsVersioned(parser, version='v1beta')
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateLevelsAlpha(UpdateLevelsGA):
_API_VERSION = 'v1alpha'
@staticmethod
def Args(parser):
UpdateLevelsGA.ArgsVersioned(parser, version='v1alpha')
| 32.706667 | 76 | 0.772523 | 1,264 | 0.515287 | 0 | 0 | 1,395 | 0.568691 | 0 | 0 | 762 | 0.31064 |
f2235aff62f649f7be3dedbcbc6809a427c1c2ca | 775 | py | Python | convert.py | lordcodingsound/autodj | dc43c8a8bd07006d02a5a7d5d2ae74d2eb9bf685 | [
"MIT"
]
| null | null | null | convert.py | lordcodingsound/autodj | dc43c8a8bd07006d02a5a7d5d2ae74d2eb9bf685 | [
"MIT"
]
| null | null | null | convert.py | lordcodingsound/autodj | dc43c8a8bd07006d02a5a7d5d2ae74d2eb9bf685 | [
"MIT"
]
| null | null | null | import wave
import struct
import subprocess
import os
import opusenc
import base64
import zlib
import sys
tmp = sys.argv[1] + ".wav"
subprocess.Popen(["ffmpeg", "-i", sys.argv[1], "-ar", "48000", "-ac", "2", "-y", tmp], stdout=subprocess.PIPE, stderr=subprocess.PIPE).wait()
f = open(sys.argv[2], "wb")
e = zlib.compressobj(9)
c = 0
b = ""
opusenc.initialize(256000)
wf = wave.open(tmp)
while True:
rc = wf.readframes(480)
if len(rc) != 1920:
break
opus = opusenc.encode(rc)
b += base64.b64encode(opus).decode("utf-8") + "\n"
c += 1
if c >= 100:
c = 0
f.write(e.compress(b.encode()) + e.flush(zlib.Z_SYNC_FLUSH))
b = ""
f.write(e.compress(b.encode()) + e.flush(zlib.Z_SYNC_FLUSH))
f.close()
wf.close()
os.remove(tmp)
| 20.394737 | 142 | 0.616774 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 61 | 0.07871 |
f2243459193cb30ca1ec87a1cec0d50174acfaea | 170 | py | Python | polliwog/tri/__init__.py | lace/polliwog | 7744ce171738e4739e391fcff4f4689d9f177196 | [
"BSD-2-Clause"
]
| 18 | 2019-05-03T02:08:12.000Z | 2022-03-24T11:49:59.000Z | polliwog/tri/__init__.py | lace/polliwog | 7744ce171738e4739e391fcff4f4689d9f177196 | [
"BSD-2-Clause"
]
| 76 | 2019-04-03T15:24:01.000Z | 2022-03-01T14:07:04.000Z | polliwog/tri/__init__.py | lace/polliwog | 7744ce171738e4739e391fcff4f4689d9f177196 | [
"BSD-2-Clause"
]
| 3 | 2019-11-04T16:22:07.000Z | 2022-03-09T08:50:52.000Z | from . import functions as _functions
from .functions import * # noqa: F401,F403
from .quad_faces import quads_to_tris
__all__ = _functions.__all__ + ["quads_to_tris"]
| 28.333333 | 48 | 0.776471 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 32 | 0.188235 |
f224a1293fdc148ee28c3d5f42f88c489aa0c477 | 10,961 | py | Python | packages/pegasus-api/src/Pegasus/api/replica_catalog.py | spxiwh/pegasus | ebe3e205ae34c1721c540465712da557979c7437 | [
"Apache-2.0"
]
| null | null | null | packages/pegasus-api/src/Pegasus/api/replica_catalog.py | spxiwh/pegasus | ebe3e205ae34c1721c540465712da557979c7437 | [
"Apache-2.0"
]
| null | null | null | packages/pegasus-api/src/Pegasus/api/replica_catalog.py | spxiwh/pegasus | ebe3e205ae34c1721c540465712da557979c7437 | [
"Apache-2.0"
]
| null | null | null | from collections import OrderedDict
from pathlib import Path
from typing import Dict, Optional, Set, Union
from ._utils import _chained
from .errors import DuplicateError
from .mixins import MetadataMixin
from .writable import Writable, _filter_out_nones
PEGASUS_VERSION = "5.0"
__all__ = ["File", "ReplicaCatalog"]
class _PFN:
"""A physical file name comprising site and path"""
def __init__(self, site, pfn):
self.site = site
self.pfn = pfn
def __eq__(self, other):
if isinstance(other, _PFN):
return self.site == other.site and self.pfn == other.pfn
return False
def __hash__(self):
return hash((self.site, self.pfn))
def __repr__(self):
return "<_PFN site: {}, pfn: {}>".format(self.site, self.pfn)
def __json__(self):
return {"site": self.site, "pfn": self.pfn}
class File(MetadataMixin):
"""
A workflow File. This class is used to represent the inputs and outputs of a
:py:class:`~Pegasus.api.workflow.Job`.
.. code-block:: python
# Example
input_file = File("data.txt").add_metadata(creator="ryan")
"""
def __init__(self, lfn: str, size: Optional[int] = None):
"""
:param lfn: a unique logical filename
:type lfn: str
:param size: size in bytes, defaults to None
:type size: int
"""
if not isinstance(lfn, str):
raise TypeError(
"invalid lfn: {lfn}; lfn must be of type str".format(lfn=lfn)
)
self.metadata = dict()
self.lfn = lfn
self.size = size
if size:
self.metadata["size"] = size
def __str__(self):
return self.lfn
def __hash__(self):
return hash(self.lfn)
def __eq__(self, other):
if isinstance(other, File):
return self.lfn == other.lfn
return False
def __repr__(self):
return "<{} {}>".format(self.__class__.__name__, self.lfn)
def __json__(self):
return _filter_out_nones(
{
"lfn": self.lfn,
"metadata": self.metadata if len(self.metadata) > 0 else None,
"size": self.size,
}
)
class _ReplicaCatalogEntry:
def __init__(
self,
lfn: str,
pfns: Set[_PFN],
checksum: Optional[Dict[str, str]] = None,
metadata: Optional[Dict[str, Union[int, str, float]]] = None,
regex: bool = False,
):
self.lfn = lfn
self.pfns = pfns
self.checksum = checksum or dict()
self.metadata = metadata or dict()
self.regex = regex
def __json__(self):
return _filter_out_nones(
{
"lfn": self.lfn,
"pfns": [pfn for pfn in self.pfns],
"checksum": self.checksum if len(self.checksum) > 0 else None,
"metadata": self.metadata if len(self.metadata) > 0 else None,
"regex": self.regex if self.regex else None,
}
)
class ReplicaCatalog(Writable):
"""Maintains a mapping of logical filenames to physical filenames. Any input
files to the workflow are specified here so that Pegasus knows where to
obtain them.
.. code-block:: python
# Example
if1 = File("if")
if2 = File("if2")
rc = ReplicaCatalog()\\
.add_replica("local", if1, "/nfs/u2/ryan/data.csv")\\
.add_replica("local", "if2", "/nfs/u2/ryan/data2.csv")\\
.write()
"""
_DEFAULT_FILENAME = "replicas.yml"
_SUPPORTED_CHECKSUMS = {"sha256"}
def __init__(self):
# Using key = (<lfn or pattern>, <is_regex>) to preserve insertion
# order of entries while distinguishing between regex and
# non regex entries
self.entries = OrderedDict()
@_chained
def add_regex_replica(
self,
site: str,
pattern: str,
pfn: Union[str, Path],
metadata: Optional[Dict[str, Union[int, str, float]]] = None,
):
r"""
add_regex_replica(self, site: str, pattern: str, pfn: Union[str, Path], metadata: Optional[Dict[str, Union[int, str, float]]] = None)
Add an entry to this replica catalog using a regular expression pattern.
Note that regular expressions should follow Java regular expression syntax
as the underlying code that handles this catalog is Java based.
.. code-block:: python
# Example 1: Match f<any-character>a i.e. faa, f.a, f0a, etc.
rc.add_regex_replica("local", "f.a", "/Volumes/data/input/f.a")
# Example 2: Using groupings
rc.add_regex_replica("local", "alpha\.(csv|txt|xml)", "/Volumes/data/input/[1]/[0]")
# If the file being looked up is alpha.csv, the pfn for the file will be
# generated as /Volumes/data/input/csv/alpha.csv
# Example 3: Specifying a default location for all lfns that don't match any
# regular expressions. Note that this should be the last entry into the replica
# catalog if used.
rc.add_regex_replica("local", ".*", Path("/Volumes/data") / "input/[0]")
:param site: the site at which this replica (file) resides
:type site: str
:param pattern: regular expression used to match a file
:type pattern: str
:param pfn: path to the file (may also be a pattern as shown in the example above)
:type pfn: Union[str, Path]
:param metadata: any metadata to be associated with the matched files, for example: :code:`{"creator": "pegasus"}`, defaults to None
:type metadata: Optional[Dict[str, Union[int, str, float]]]
:raises DuplicateError: Duplicate patterns with different PFNs are currently not supported
"""
metadata = metadata or dict()
# restricting pattern to single pfn (may be relaxed in future release)
if (pattern, True) in self.entries:
raise DuplicateError(
"Pattern: {} already exists in this replica catalog".format(pattern)
)
# handle Path obj if given for pfn
if isinstance(pfn, Path):
if not pfn.is_absolute():
raise ValueError(
"Invalid pfn: {}, the given pfn must be an absolute path".format(
pfn
)
)
pfn = str(pfn)
self.entries[(pattern, True)] = _ReplicaCatalogEntry(
lfn=pattern, pfns={_PFN(site, pfn)}, metadata=metadata, regex=True
)
@_chained
def add_replica(
self,
site: str,
lfn: Union[str, File],
pfn: Union[str, Path],
checksum: Optional[Dict[str, str]] = None,
metadata: Optional[Dict[str, Union[int, str, float]]] = None,
):
"""
add_replica(self, site: str, lfn: Union[str, File], pfn: Union[str, Path], checksum: Optional[Dict[str, str]] = None, metadata: Optiona[Dict[str, Union[int, str, float]]] = None)
Add an entry to this replica catalog.
.. code-block:: python
# Example 1
f = File("in.txt").add_metadata(creator="pegasus")
rc.add_replica("local", f, Path(".").resolve() / "in.txt")
# Example 2: Adding metadata and a checksum
rc.add_replica(
"local",
"in.txt",
"/home/ryan/wf/in.txt",
checksum={"sha256": "abc123"},
metadata={"creator": "pegasus"}
)
# Example 3: Adding multiple pfns for the same lfn (metadata and checksum will be
# updated for that lfn if given.
rc.add_replica("local", "in.txt", Path(".").resolve() / "in.txt")
rc.add_replica("condorpool", "in.txt", "/path/to/file/in.txt")
:param site: the site at which this replica (file) resides
:type site: str
:param lfn: logical file name
:type lfn: Union[str, File]
:param pfn: physical file name such as :code:`Path("f.txt").resolve()`, :code:`/home/ryan/file.txt`, or :code:`http://pegasus.isi.edu/file.txt`
:type pfn: Union[str, Path]
:param checksum: Dict containing checksums for this file. Currently only sha256 is given. This should be entered as :code:`{"sha256": <value>}`, defaults to None
:type checksum: Optional[Dict[str, str]], optional
:param metadata: metadata key value pairs associated with this lfn such as :code:`{"created": "Thu Jun 18 22:18:36 PDT 2020", "owner": "pegasus"}`, defaults to None
:type metadata: Optional[Dict[str, Union[int, str, float]]], optional
:raises ValueError: if pfn is given as a :code:`pathlib.Path`, it must be an absolute path
:raises ValueError: an unsupported checksum type was given
"""
# handle Path obj if given for pfn
if isinstance(pfn, Path):
if not pfn.is_absolute():
raise ValueError(
"Invalid pfn: {}, the given path must be an absolute path".format(
str(pfn)
)
)
pfn = str(pfn)
metadata = metadata or dict()
checksum = checksum or dict()
# File might contain metadata that should be included
if isinstance(lfn, File):
if lfn.metadata:
metadata.update(lfn.metadata)
lfn = lfn.lfn
# ensure supported checksum type given
if len(checksum) > 0:
for checksum_type in checksum:
if checksum_type.lower() not in ReplicaCatalog._SUPPORTED_CHECKSUMS:
raise ValueError(
"Invalid checksum: {}, supported checksum types are: {}".format(
checksum_type, ReplicaCatalog._SUPPORTED_CHECKSUMS
)
)
# if an entry with the given lfn already exists, update it
# else create and add a new one
if (lfn, False) in self.entries:
self.entries[(lfn, False)].pfns.add(_PFN(site, pfn))
self.entries[(lfn, False)].checksum.update(checksum)
self.entries[(lfn, False)].metadata.update(metadata)
else:
self.entries[(lfn, False)] = _ReplicaCatalogEntry(
lfn,
{_PFN(site, pfn)},
checksum=checksum,
metadata=metadata,
regex=False,
)
def __json__(self):
return OrderedDict(
[
("pegasus", PEGASUS_VERSION),
("replicas", [v for _, v in self.entries.items()]),
]
)
| 35.244373 | 186 | 0.560624 | 10,630 | 0.969802 | 0 | 0 | 6,820 | 0.622206 | 0 | 0 | 5,777 | 0.52705 |
f225855419247f9e8048a49c1d9c71b3af0a2082 | 4,513 | py | Python | words.py | ashutoshkrris/Terminal-Wordle | edafc99a6adb12824495e53dd0c5be6dc89b8839 | [
"MIT"
]
| null | null | null | words.py | ashutoshkrris/Terminal-Wordle | edafc99a6adb12824495e53dd0c5be6dc89b8839 | [
"MIT"
]
| null | null | null | words.py | ashutoshkrris/Terminal-Wordle | edafc99a6adb12824495e53dd0c5be6dc89b8839 | [
"MIT"
]
| null | null | null | word_list = ['ABOUT', 'ABOVE', 'ABUSE', 'ACTOR', 'ACUTE', 'ADMIT', 'ADOPT', 'ADULT', 'AFTER', 'AGAIN', 'AGENT', 'AGREE', 'AHEAD', 'ALARM', 'ALBUM', 'ALERT', 'ALIKE', 'ALIVE', 'ALLOW', 'ALONE', 'ALONG', 'ALTER', 'AMONG', 'ANGER', 'ANGLE', 'ANGRY', 'APART', 'APPLE', 'APPLY', 'ARENA', 'ARGUE', 'ARISE', 'ARRAY', 'ASIDE', 'ASSET', 'AUDIO', 'AUDIT', 'AVOID', 'AWARD', 'AWARE', 'BADLY', 'BAKER', 'BASES', 'BASIC', 'BASIS', 'BEACH', 'BEGAN', 'BEGIN', 'BEGUN', 'BEING', 'BELOW', 'BENCH', 'BILLY', 'BIRTH', 'BLACK', 'BLAME', 'BLIND', 'BLOCK', 'BLOOD', 'BOARD', 'BOOST', 'BOOTH', 'BOUND', 'BRAIN', 'BRAND', 'BREAD', 'BREAK', 'BREED', 'BRIEF', 'BRING', 'BROAD', 'BROKE', 'BROWN', 'BUILD', 'BUILT', 'BUYER', 'CABLE', 'CALIF', 'CARRY', 'CATCH', 'CAUSE', 'CHAIN', 'CHAIR', 'CHART', 'CHASE', 'CHEAP', 'CHECK', 'CHEST', 'CHIEF', 'CHILD', 'CHINA', 'CHOSE', 'CIVIL', 'CLAIM', 'CLASS', 'CLEAN', 'CLEAR', 'CLICK', 'CLOCK', 'CLOSE', 'COACH', 'COAST', 'COULD', 'COUNT', 'COURT', 'COVER', 'CRAFT', 'CRASH', 'CREAM', 'CRIME', 'CROSS', 'CROWD', 'CROWN', 'CURVE', 'CYCLE', 'DAILY', 'DANCE', 'DATED', 'DEALT', 'DEATH', 'DEBUT', 'DELAY', 'DEPTH', 'DOING', 'DOUBT', 'DOZEN', 'DRAFT', 'DRAMA', 'DRAWN', 'DREAM', 'DRESS', 'DRILL', 'DRINK', 'DRIVE', 'DROVE', 'DYING', 'EAGER', 'EARLY', 'EARTH', 'EIGHT', 'ELITE', 'EMPTY', 'ENEMY', 'ENJOY', 'ENTER', 'ENTRY', 'EQUAL', 'ERROR', 'EVENT', 'EVERY', 'EXACT', 'EXIST', 'EXTRA', 'FAITH', 'FALSE', 'FAULT', 'FIBER', 'FIELD', 'FIFTH', 'FIFTY', 'FIGHT', 'FINAL', 'FIRST', 'FIXED', 'FLASH', 'FLEET', 'FLOOR', 'FLUID', 'FOCUS', 'FORCE', 'FORTH', 'FORTY', 'FORUM', 'FOUND', 'FRAME', 'FRANK', 'FRAUD', 'FRESH', 'FRONT', 'FRUIT', 'FULLY', 'FUNNY', 'GIANT', 'GIVEN', 'GLASS', 'GLOBE', 'GOING', 'GRACE', 'GRADE', 'GRAND', 'GRANT', 'GRASS', 'GREAT', 'GREEN', 'GROSS', 'GROUP', 'GROWN', 'GUARD', 'GUESS', 'GUEST', 'GUIDE', 'HAPPY', 'HARRY', 'HEART', 'HEAVY', 'HENCE', 'HENRY', 'HORSE', 'HOTEL', 'HOUSE', 'HUMAN', 'IDEAL', 'IMAGE', 'INDEX', 'INNER', 'INPUT', 'ISSUE', 'JAPAN', 'JIMMY', 'JOINT', 'JONES', 'JUDGE', 'KNOWN', 'LABEL', 'LARGE', 'LASER', 'LATER', 'LAUGH', 'LAYER', 'LEARN', 'LEASE', 'LEAST', 'LEAVE', 'LEGAL', 'LEVEL', 'LEWIS', 'LIGHT', 'LIMIT', 'LINKS', 'LIVES', 'LOCAL', 'LOGIC', 'LOOSE', 'LOWER', 'LUCKY', 'LUNCH', 'LYING', 'MAGIC', 'MAJOR', 'MAKER', 'MARCH', 'MARIA', 'MATCH', 'MAYBE', 'MAYOR', 'MEANT', 'MEDIA', 'METAL', 'MIGHT', 'MINOR', 'MINUS', 'MIXED', 'MODEL', 'MONEY', 'MONTH', 'MORAL', 'MOTOR', 'MOUNT', 'MOUSE', 'MOUTH', 'MOVIE', 'MUSIC', 'NEEDS', 'NEVER', 'NEWLY', 'NIGHT', 'NOISE', 'NORTH', 'NOTED', 'NOVEL', 'NURSE', 'OCCUR', 'OCEAN', 'OFFER', 'OFTEN', 'ORDER', 'OTHER', 'OUGHT', 'PAINT', 'PANEL', 'PAPER', 'PARTY', 'PEACE', 'PETER', 'PHASE', 'PHONE', 'PHOTO', 'PIECE', 'PILOT', 'PITCH', 'PLACE', 'PLAIN', 'PLANE', 'PLANT', 'PLATE', 'POINT', 'POUND', 'POWER', 'PRESS', 'PRICE', 'PRIDE', 'PRIME', 'PRINT', 'PRIOR', 'PRIZE', 'PROOF', 'PROUD', 'PROVE', 'QUEEN', 'QUICK', 'QUIET', 'QUITE', 'RADIO', 'RAISE', 'RANGE', 'RAPID', 'RATIO', 'REACH', 'READY', 'REFER', 'RIGHT', 'RIVAL', 'RIVER', 'ROBIN', 'ROGER', 'ROMAN', 'ROUGH', 'ROUND', 'ROUTE', 'ROYAL', 'RURAL', 'SCALE', 'SCENE', 'SCOPE', 'SCORE', 'SENSE', 'SERVE', 'SEVEN', 'SHALL', 'SHAPE', 'SHARE', 'SHARP', 'SHEET', 'SHELF', 'SHELL', 'SHIFT', 'SHIRT', 'SHOCK', 'SHOOT', 'SHORT', 'SHOWN', 'SIGHT', 'SINCE', 'SIXTH', 'SIXTY', 'SIZED', 'SKILL', 'SLEEP', 'SLIDE', 'SMALL', 'SMART', 'SMILE', 'SMITH', 'SMOKE', 'SOLID', 'SOLVE', 'SORRY', 'SOUND', 'SOUTH', 'SPACE', 'SPARE', 'SPEAK', 'SPEED', 'SPEND', 'SPENT', 'SPLIT', 'SPOKE', 'SPORT', 'STAFF', 'STAGE', 'STAKE', 'STAND', 'START', 'STATE', 'STEAM', 'STEEL', 'STICK', 'STILL', 'STOCK', 'STONE', 'STOOD', 'STORE', 'STORM', 'STORY', 'STRIP', 'STUCK', 'STUDY', 'STUFF', 'STYLE', 'SUGAR', 'SUITE', 'SUPER', 'SWEET', 'TABLE', 'TAKEN', 'TASTE', 'TAXES', 'TEACH', 'TEETH', 'TERRY', 'TEXAS', 'THANK', 'THEFT', 'THEIR', 'THEME', 'THERE', 'THESE', 'THICK', 'THING', 'THINK', 'THIRD', 'THOSE', 'THREE', 'THREW', 'THROW', 'TIGHT', 'TIMES', 'TIRED', 'TITLE', 'TODAY', 'TOPIC', 'TOTAL', 'TOUCH', 'TOUGH', 'TOWER', 'TRACK', 'TRADE', 'TRAIN', 'TREAT', 'TREND', 'TRIAL', 'TRIED', 'TRIES', 'TRUCK', 'TRULY', 'TRUST', 'TRUTH', 'TWICE', 'UNDER', 'UNDUE', 'UNION', 'UNITY', 'UNTIL', 'UPPER', 'UPSET', 'URBAN', 'USAGE', 'USUAL', 'VALID', 'VALUE', 'VIDEO', 'VIRUS', 'VISIT', 'VITAL', 'VOICE', 'WASTE', 'WATCH', 'WATER', 'WHEEL', 'WHERE', 'WHICH', 'WHILE', 'WHITE', 'WHOLE', 'WHOSE', 'WOMAN', 'WOMEN', 'WORLD', 'WORRY', 'WORSE', 'WORST', 'WORTH', 'WOULD', 'WOUND', 'WRITE', 'WRONG', 'WROTE', 'YIELD', 'YOUNG', 'YOUTH']
| 2,256.5 | 4,512 | 0.555949 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,500 | 0.775537 |
f225fb07e45aa0d878182c25737de8508c76dbb0 | 6,759 | py | Python | importanize/groups.py | xiachufang/importanize | 594e33b7827a9619c15aaacbe03b8cdf42a5c7a0 | [
"MIT"
]
| null | null | null | importanize/groups.py | xiachufang/importanize | 594e33b7827a9619c15aaacbe03b8cdf42a5c7a0 | [
"MIT"
]
| null | null | null | importanize/groups.py | xiachufang/importanize | 594e33b7827a9619c15aaacbe03b8cdf42a5c7a0 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import itertools
import operator
from collections import OrderedDict, defaultdict
from functools import reduce
import six
from .formatters import DEFAULT_FORMATTER, DEFAULT_LENGTH
from .utils import is_site_package, is_std_lib
@six.python_2_unicode_compatible
class BaseImportGroup(object):
def __init__(self, config=None, **kwargs):
self.config = config or {}
self.statements = kwargs.get("statements", [])
self.file_artifacts = kwargs.get("file_artifacts", {})
@property
def unique_statements(self):
return sorted(list(set(self.merged_statements)))
@property
def merged_statements(self):
"""
Merge statements with the same import stems
"""
leafless_counter = defaultdict(list)
counter = defaultdict(list)
for statement in self.statements:
if statement.leafs:
counter[statement.stem].append(statement)
else:
leafless_counter[statement.stem].append(statement)
merged_statements = list(itertools.chain(*leafless_counter.values()))
def merge(statements):
_special = []
_statements = []
for i in statements:
if i.leafs and i.leafs[0].name == "*":
_special.append(i)
else:
_statements.append(i)
_reduced = []
if _statements:
_reduced = [reduce(lambda a, b: a + b, _statements)]
return _special + _reduced
for statements in counter.values():
merged_statements.extend(merge(statements))
return merged_statements
def all_line_numbers(self):
return sorted(
list(
set(
list(
itertools.chain(
*map(
operator.attrgetter("line_numbers"),
self.statements,
)
)
)
)
)
)
def should_add_statement(self, statement):
raise NotImplementedError
def add_statement(self, statement):
if self.should_add_statement(statement):
self.statements.append(statement)
return True
return False
def as_string(self):
sep = self.file_artifacts.get("sep", "\n")
return sep.join(
map(operator.methodcaller("as_string"), self.unique_statements)
)
def formatted(self, formatter=DEFAULT_FORMATTER, length=DEFAULT_LENGTH):
sep = self.file_artifacts.get("sep", "\n")
return sep.join(
map(
operator.methodcaller(
"formatted", formatter=formatter, length=length
),
self.unique_statements,
)
)
def __str__(self):
return self.as_string()
class StdLibGroup(BaseImportGroup):
def should_add_statement(self, statement):
return is_std_lib(statement.root_module)
class SitePackagesGroup(BaseImportGroup):
def should_add_statement(self, statement):
return is_site_package(statement.root_module)
class PackagesGroup(BaseImportGroup):
def __init__(self, *args, **kwargs):
super(PackagesGroup, self).__init__(*args, **kwargs)
if "packages" not in self.config:
msg = (
'"package" config must be supplied ' "for packages import group"
)
raise ValueError(msg)
def should_add_statement(self, statement):
return statement.root_module in self.config.get("packages", [])
class LocalGroup(BaseImportGroup):
def should_add_statement(self, statement):
return statement.stem.startswith(".")
class RemainderGroup(BaseImportGroup):
def should_add_statement(self, statement):
return True
# -- RemainderGroup goes last and catches everything left over
GROUP_MAPPING = OrderedDict(
(
("stdlib", StdLibGroup),
("sitepackages", SitePackagesGroup),
("packages", PackagesGroup),
("local", LocalGroup),
("remainder", RemainderGroup),
)
)
def sort_groups(groups):
return sorted(
groups, key=lambda i: list(GROUP_MAPPING.values()).index(type(i))
)
@six.python_2_unicode_compatible
class ImportGroups(list):
def __init__(self, *args, **kwargs):
super(ImportGroups, self).__init__(*args)
self.file_artifacts = kwargs.get("file_artifacts", {})
def all_line_numbers(self):
return sorted(
list(
set(
list(
itertools.chain(
*map(
operator.methodcaller("all_line_numbers"), self
)
)
)
)
)
)
def add_group(self, config):
if "type" not in config:
msg = '"type" must be specified in ' "import group config"
raise ValueError(msg)
if config["type"] not in GROUP_MAPPING:
msg = '"{}" is not supported import group'.format(config["type"])
raise ValueError(msg)
self.append(GROUP_MAPPING[config["type"]](config))
def add_statement_to_group(self, statement):
groups_by_priority = sort_groups(self)
added = False
for group in groups_by_priority:
if group.add_statement(statement):
added = True
break
if not added:
msg = (
"Import statement was not added into "
"any of the import groups. "
"Perhaps you can consider adding "
'"remaining" import group which will '
"catch all remaining import statements."
)
raise ValueError(msg)
def as_string(self):
sep = self.file_artifacts.get("sep", "\n") * 2
return sep.join(
filter(None, map(operator.methodcaller("as_string"), self))
)
def formatted(self, formatter=DEFAULT_FORMATTER, length=DEFAULT_LENGTH):
sep = self.file_artifacts.get("sep", "\n") * 2
return sep.join(
filter(
None,
map(
operator.methodcaller(
"formatted", formatter=formatter, length=length
),
self,
),
)
)
def __str__(self):
return self.as_string()
| 28.884615 | 80 | 0.557627 | 5,927 | 0.876905 | 0 | 0 | 5,037 | 0.745229 | 0 | 0 | 736 | 0.108892 |
f22667e27e25306a81ed4197a3f3283e37b3daea | 846 | py | Python | NLP4CCB/migrations/0005_auto_20170415_2236.py | rossmechanic/know_your_nyms | 805ca845121fa93a38088f09cd0a430ddb9f95cf | [
"BSD-3-Clause"
]
| 1 | 2020-02-12T13:24:15.000Z | 2020-02-12T13:24:15.000Z | NLP4CCB/migrations/0005_auto_20170415_2236.py | rossmechanic/know_your_nyms | 805ca845121fa93a38088f09cd0a430ddb9f95cf | [
"BSD-3-Clause"
]
| null | null | null | NLP4CCB/migrations/0005_auto_20170415_2236.py | rossmechanic/know_your_nyms | 805ca845121fa93a38088f09cd0a430ddb9f95cf | [
"BSD-3-Clause"
]
| 1 | 2017-10-25T11:24:51.000Z | 2017-10-25T11:24:51.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-04-15 22:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("NLP4CCB", "0004_auto_20170330_0255")]
operations = [
migrations.RenameField(
model_name="userstat", old_name="index", new_name="meronyms_index"
),
migrations.AddField(
model_name="userstat",
name="antonyms_index",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="userstat",
name="hyponyms_index",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="userstat",
name="synonyms_index",
field=models.IntegerField(default=0),
),
]
| 26.4375 | 78 | 0.582742 | 727 | 0.859338 | 0 | 0 | 0 | 0 | 0 | 0 | 216 | 0.255319 |
f226b3e74e0c07da106f197b5ad2bd3632fb47b8 | 2,198 | py | Python | synchCams/start_server.py | ateshkoul/synchCams | 3f73cf593e27d57b72f65d453d13cc535646e86d | [
"MIT"
]
| null | null | null | synchCams/start_server.py | ateshkoul/synchCams | 3f73cf593e27d57b72f65d453d13cc535646e86d | [
"MIT"
]
| null | null | null | synchCams/start_server.py | ateshkoul/synchCams | 3f73cf593e27d57b72f65d453d13cc535646e86d | [
"MIT"
]
| null | null | null | import socket
import json
import pdb
import copy
def dict_to_bytes(the_dict):
string = json.dumps(the_dict).encode('utf-8')
return(string)
def bytes_to_dict(string):
the_dict = json.loads(string.decode('utf-8'))
return(the_dict)
class server_con():
def __init__(self,host='',port=30):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.bind((host, port))
def server_read(self):
# pdb.set_trace()
print("Waiting to read ...")
self.s.listen(1)
self.conn, self.addr = self.s.accept()
print('Connected by', self.addr)
return_data = {}
try:
in_data = self.conn.recv(1024)
# pdb.set_trace()
if in_data: return_data = copy.deepcopy(in_data)
# if not in_data: break
print("Client Says: "+return_data.decode("utf-8"))
# self.conn.sendall(b"Server Says:hi")
except socket.error:
print("Error Occured.")
# self.conn.close()
return(bytes_to_dict(return_data))
def server_write(self,data,host="151.100.55.63",port=30):
# pdb.set_trace()
print('Writing values ...')
self.conn.sendall(dict_to_bytes(data))
# def server_read(host='',port=30):
# # host = '' # Symbolic name meaning all available interfaces
# # port = 30 # Arbitrary non-privileged port
# s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# s.bind((host, port))
# print(host , port)
# s.listen(1)
# conn, addr = s.accept()
# print('Connected by', addr)
# return_data = {}
# while True:
# try:
# in_data = conn.recv(1024)
# # pdb.set_trace()
# if in_data: return_data = copy.deepcopy(in_data)
# if not in_data: break
# print("Client Says: "+return_data.decode("utf-8"))
# conn.sendall(b"Server Says:hi")
# except socket.error:
# print("Error Occured.")
# break
# conn.close()
# return(bytes_to_dict(return_data))
# # return(return_data) | 27.475 | 73 | 0.556415 | 1,032 | 0.469518 | 0 | 0 | 0 | 0 | 0 | 0 | 1,104 | 0.502275 |
f226f9e28b1182a033e88cc3340054c8eee83b4e | 2,243 | py | Python | 9.part2.py | elp2/advent_of_code_2020 | 71e12e25769aa7d5154213077ffae595ad9a4019 | [
"Apache-2.0"
]
| 1 | 2021-12-02T15:19:36.000Z | 2021-12-02T15:19:36.000Z | 2020/9.part2.py | elp2/advent_of_code | 600e2db9a7d5b576937c9b39c5c6805db406f57b | [
"Apache-2.0"
]
| null | null | null | 2020/9.part2.py | elp2/advent_of_code | 600e2db9a7d5b576937c9b39c5c6805db406f57b | [
"Apache-2.0"
]
| null | null | null | from collections import defaultdict
def return_default():
return 0
def dd():
return defaultdict(return_default)
CHALLENGE_DAY = "9"
REAL = open(CHALLENGE_DAY + ".txt").read()
assert len(REAL) > 1
SAMPLE = open(CHALLENGE_DAY + ".sample.txt").read()
SAMPLE_EXPECTED = 127
# SAMPLE_EXPECTED =
def parse_lines(raw):
# Groups.
# split = raw.split("\n\n")
# return list(map(lambda group: group.split("\n"), split))
split = raw.split("\n")
# return split # raw
# return list(map(lambda l: l.split(" "), split)) # words.
return list(map(int, split))
# return list(map(lambda l: l.strip(), split)) # beware leading / trailing WS
def lastnums(nums, last, sumsto):
f = last - 25
to = last
print("considering ", f, to)
for j in range(f, to):
for k in range(f, to):
if j == k:
continue
if nums[j] + nums[k] == sumsto:
return True
return False
def pream(nums, last):
at = last
for i in range(last, len(nums)):
print(i)
if not lastnums(nums, i, nums[i]):
return nums[i]
else:
print("Not", nums[i])
def solve(raw):
parsed = parse_lines(raw)
# Debug here to make sure parsing is good.
TARGET=1639024365
for i in range(len(parsed)):
for j in range(i, (len(parsed))):
arr = parsed[i:j]
here = sum(arr)
if here == TARGET:
return min(arr) + max(arr)
return ret
def test_parsing(lines):
if isinstance(lines, list):
for i in range(min(5, len(lines))):
print(lines[i])
elif isinstance(lines, dict) or isinstance(lines, defaultdict):
nd = {}
for k in list(lines.keys())[0: 5]:
print("\"" + k + "\": " + str(lines[k]))
test_parsing(parse_lines(SAMPLE))
print("^^^^^^^^^PARSED SAMPLE SAMPLE^^^^^^^^^")
# sample = solve(SAMPLE)
# if SAMPLE_EXPECTED is None:
# print("*** SKIPPING SAMPLE! ***")
# else:
# assert sample == SAMPLE_EXPECTED
# print("*** SAMPLE PASSED ***")
solved = solve(REAL)
print("SOLUTION: ", solved)
# assert solved
| 25.202247 | 82 | 0.543914 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 621 | 0.276861 |
f227f1a6050eb38656085af87d1b77f4623a92c4 | 3,091 | py | Python | exif_address_finder/ExifAddressFinderManager.py | jonathanlurie/ExifAddressFinder | ddb3e526040a80534f2f72246f1b9f96c9c5d0b0 | [
"MIT"
]
| null | null | null | exif_address_finder/ExifAddressFinderManager.py | jonathanlurie/ExifAddressFinder | ddb3e526040a80534f2f72246f1b9f96c9c5d0b0 | [
"MIT"
]
| null | null | null | exif_address_finder/ExifAddressFinderManager.py | jonathanlurie/ExifAddressFinder | ddb3e526040a80534f2f72246f1b9f96c9c5d0b0 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
'''
Author : Jonathan Lurie
Email : [email protected]
Version : 0.1
Licence : MIT
description : The entry point to the library.
'''
import GeoToolbox
import exifread
import piexif
from IFD_KEYS_REFERENCE import *
import exifWriter
import os
class ExifAddressFinderManager:
_geotoolbox = None
def __init__(self):
self._geotoolbox = GeoToolbox.GeoToolbox()
# return a dictionnary {"lat": yy.yyy, "lon": xx.xxx}
# or None if not found
def _getGpsCoordinates(self, fileAddress):
f = open(fileAddress, 'rb')
# Return Exif tags
tags = exifread.process_file(f)
# add positionning
if('EXIF GPS GPSLatitude' in tags.keys() and 'EXIF GPS GPSLongitude' in tags.keys()):
# dealing with latitutes
latValues = tags["EXIF GPS GPSLatitude"].values
latRef = tags["EXIF GPS GPSLatitudeRef"]
latInt = float(latValues[0].num)
latDec = float(latValues[1].num) / float(latValues[1].den) / 60. + float(latValues[2].num) / float(latValues[2].den) / 3600.
lat = latInt + latDec
if(latRef.values != 'N'):
lat = lat * (-1)
# dealing with longitudes
lonValues = tags["EXIF GPS GPSLongitude"].values
lonRef = tags["EXIF GPS GPSLongitudeRef"]
lonInt = float(lonValues[0].num)
lonDec = float(lonValues[1].num) / float(lonValues[1].den) / 60. + float(lonValues[2].num) / float(lonValues[2].den) / 3600.
lon = lonInt + lonDec
if(lonRef.values != 'E'):
lon = lon * (-1)
return {"lat": lat, "lon": lon}
else:
return None
# return the address if found
# returns None if not retrieve
def _retrieveAddress(self, latitude, longitude):
address = self._geotoolbox.getAddress(latitude=latitude, longitude=longitude)
# if the address was well retrieve
if(address["status"]):
return address["address"]
else:
return None
# update the EXIF Decription field with the real postal address
def _updateDescription(self, fileAddress, locationAddress, addToFormer=False):
# reading exif
exifDict = piexif.load(fileAddress)
newDict = exifWriter.writeField(exifDict, DESCRIPTION_FIELD, locationAddress, addToFormer)
exifWriter.writeExifToFile(newDict, fileAddress)
def addAddressToImage(self, fileAddress, prefix="", suffix="", addToFormer=False):
coordinates = self._getGpsCoordinates(fileAddress)
if(not coordinates):
print("\tERROR: "+ os.path.basename(fileAddress) +" is not geo tagged")
return None
postalAddress = self._retrieveAddress(coordinates["lat"], coordinates["lon"])
if(not postalAddress):
print("\tERROR: The address was impossible to retrieve")
return None
self._updateDescription(fileAddress, prefix + postalAddress + suffix, addToFormer)
return 1
| 30.60396 | 136 | 0.620835 | 2,802 | 0.906503 | 0 | 0 | 0 | 0 | 0 | 0 | 774 | 0.250404 |
1ee6c5cf51fc01113d2c8df3b5c4886a89607d63 | 1,402 | py | Python | src/anaplan_api/Model.py | jeswils-ap/anaplan-api | e08ea75828a60e96024d596b2f30184c18fa31d3 | [
"BSD-2-Clause"
]
| 2 | 2021-09-23T08:49:40.000Z | 2022-03-28T08:40:02.000Z | src/anaplan_api/Model.py | jeswils-ap/anaplan-api | e08ea75828a60e96024d596b2f30184c18fa31d3 | [
"BSD-2-Clause"
]
| 3 | 2021-11-06T09:58:03.000Z | 2021-11-11T14:00:40.000Z | src/anaplan_api/Model.py | jeswils-ap/anaplan-api | e08ea75828a60e96024d596b2f30184c18fa31d3 | [
"BSD-2-Clause"
]
| 1 | 2022-02-13T15:59:42.000Z | 2022-02-13T15:59:42.000Z | import json
import logging
import requests
from typing import List
from requests.exceptions import HTTPError, ConnectionError, SSLError, Timeout, ConnectTimeout, ReadTimeout
from .User import User
from .ModelDetails import ModelDetails
logger = logging.getLogger(__name__)
class Model(User):
def get_models(self) -> List[ModelDetails]:
model_details_list = [ModelDetails]
model_list = {}
url = ''.join([super().get_url(), super().get_id(), "/models"])
authorization = super().get_conn().get_auth().get_auth_token()
get_header = {
"Authorization": authorization,
"Content-Type": "application/json"
}
logger.info(f"Fetching models for {super().get_id()}")
try:
model_list = json.loads(requests.get(url, headers=get_header, timeout=(5, 30)).text)
except (HTTPError, ConnectionError, SSLError, Timeout, ConnectTimeout, ReadTimeout) as e:
logger.error(f"Error getting models list: {e}", exc_info=True)
raise Exception(f"Error getting model list {e}")
except ValueError as e:
logger.error(f"Error loading model list {e}", exc_info=True)
raise Exception(f"Error loading model list {e}")
if 'models' in model_list:
models = model_list['models']
logger.info("Finished fetching models.")
for item in models:
model_details_list.append(ModelDetails(item))
return model_details_list
else:
raise AttributeError("Models not found in response.")
| 32.604651 | 106 | 0.733951 | 1,125 | 0.802425 | 0 | 0 | 0 | 0 | 0 | 0 | 299 | 0.213267 |
1ee821fc6ac5eced71be163ce2b2c80f9de72828 | 1,757 | py | Python | reproduction/Summarization/BertSum/model.py | KuNyaa/fastNLP | 22f9b87c54a4eebec7352c7ff772cd24685c7186 | [
"Apache-2.0"
]
| 1 | 2019-10-05T06:02:44.000Z | 2019-10-05T06:02:44.000Z | reproduction/Summarization/BertSum/model.py | awesomemachinelearning/fastNLP | 945b30bb6174751130744231aa26119bf9bb2601 | [
"Apache-2.0"
]
| 1 | 2019-12-09T06:34:44.000Z | 2019-12-09T06:34:44.000Z | reproduction/Summarization/BertSum/model.py | awesomemachinelearning/fastNLP | 945b30bb6174751130744231aa26119bf9bb2601 | [
"Apache-2.0"
]
| 2 | 2020-04-21T06:17:59.000Z | 2020-05-05T11:22:11.000Z | import torch
from torch import nn
from torch.nn import init
from fastNLP.modules.encoder.bert import BertModel
class Classifier(nn.Module):
def __init__(self, hidden_size):
super(Classifier, self).__init__()
self.linear = nn.Linear(hidden_size, 1)
self.sigmoid = nn.Sigmoid()
def forward(self, inputs, mask_cls):
h = self.linear(inputs).squeeze(-1) # [batch_size, seq_len]
sent_scores = self.sigmoid(h) * mask_cls.float()
return sent_scores
class BertSum(nn.Module):
def __init__(self, hidden_size=768):
super(BertSum, self).__init__()
self.hidden_size = hidden_size
self.encoder = BertModel.from_pretrained('/path/to/uncased_L-12_H-768_A-12')
self.decoder = Classifier(self.hidden_size)
def forward(self, article, segment_id, cls_id):
# print(article.device)
# print(segment_id.device)
# print(cls_id.device)
input_mask = 1 - (article == 0)
mask_cls = 1 - (cls_id == -1)
assert input_mask.size() == article.size()
assert mask_cls.size() == cls_id.size()
bert_out = self.encoder(article, token_type_ids=segment_id, attention_mask=input_mask)
bert_out = bert_out[0][-1] # last layer
sent_emb = bert_out[torch.arange(bert_out.size(0)).unsqueeze(1), cls_id]
sent_emb = sent_emb * mask_cls.unsqueeze(-1).float()
assert sent_emb.size() == (article.size(0), cls_id.size(1), self.hidden_size) # [batch_size, seq_len, hidden_size]
sent_scores = self.decoder(sent_emb, mask_cls) # [batch_size, seq_len]
assert sent_scores.size() == (article.size(0), cls_id.size(1))
return {'pred': sent_scores, 'mask': mask_cls}
| 33.788462 | 122 | 0.647126 | 1,639 | 0.93284 | 0 | 0 | 0 | 0 | 0 | 0 | 211 | 0.120091 |
1ee8df8ae43bb7100b118b6ba8aa926ea9cbaa1d | 2,226 | py | Python | p4p2p/dht/constants.py | ntoll/p4p2p | 189a35ae964bef7e6db094283f3ead79c6356a6c | [
"MIT"
]
| 8 | 2015-03-08T11:02:58.000Z | 2020-04-20T15:36:24.000Z | p4p2p/dht/constants.py | ntoll/p4p2p | 189a35ae964bef7e6db094283f3ead79c6356a6c | [
"MIT"
]
| null | null | null | p4p2p/dht/constants.py | ntoll/p4p2p | 189a35ae964bef7e6db094283f3ead79c6356a6c | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Defines constants used by P4P2P. Usually these are based upon concepts from
the Kademlia DHT and where possible naming is derived from the original
Kademlia paper as are the suggested default values.
"""
#: Represents the degree of parallelism in network calls.
ALPHA = 3
#: The maximum number of contacts stored in a bucket. Must be an even number.
K = 20
#: The default maximum time a NodeLookup is allowed to take (in seconds).
LOOKUP_TIMEOUT = 600
#: The timeout for network connections (in seconds).
RPC_TIMEOUT = 5
#: The timeout for receiving complete message once a connection is made (in
#: seconds). Ensures there are no stale deferreds in the node's _pending
#: dictionary.
RESPONSE_TIMEOUT = 1800 # half an hour
#: How long to wait before an unused bucket is refreshed (in seconds).
REFRESH_TIMEOUT = 3600 # 1 hour
#: How long to wait before a node replicates any data it stores (in seconds).
REPLICATE_INTERVAL = REFRESH_TIMEOUT
#: How long to wait before a node checks whether any buckets need refreshing or
#: data needs republishing (in seconds).
REFRESH_INTERVAL = int(REFRESH_TIMEOUT / 6) # Every 10 minutes.
#: The number of failed remote procedure calls allowed for a peer node. If this
#: is equalled or exceeded then the contact is removed from the routing table.
ALLOWED_RPC_FAILS = 5
#: The number of nodes to attempt to use to store a value in the network.
DUPLICATION_COUNT = K
#: The duration (in seconds) that is added to a value's creation time in order
#: to work out its expiry timestamp. -1 denotes no expiry point.
EXPIRY_DURATION = -1
#: Defines the errors that can be reported between nodes in the network.
ERRORS = {
# The message simply didn't make any sense.
1: 'Bad message',
# The message was parsed but not recognised.
2: 'Unknown message type',
# The message was parsed and recognised but the node encountered a problem
# when dealing with it.
3: 'Internal error',
# The message was too big for the node to handle.
4: 'Message too big',
# Unsupported version of the protocol.
5: 'Unsupported protocol',
# The message could not be cryptographically verified.
6: 'Unverifiable provenance'
}
| 35.903226 | 79 | 0.737646 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,841 | 0.827044 |
1eea6f3a21c71f86ef8549a937b8ac0d9d222692 | 600 | py | Python | turtle-crossing/car_manager.py | twbm/Git-Learning-Thingy | 7dce0d4f1329df911e1e7008800f843217a5a9a2 | [
"MIT"
]
| 1 | 2022-03-20T17:00:32.000Z | 2022-03-20T17:00:32.000Z | turtle-crossing/car_manager.py | Theodor45/Projects | e311e4a3ae047d6d01d24b3b868ee05ac595f391 | [
"MIT"
]
| null | null | null | turtle-crossing/car_manager.py | Theodor45/Projects | e311e4a3ae047d6d01d24b3b868ee05ac595f391 | [
"MIT"
]
| null | null | null | from turtle import Turtle
import random
COLORS = ["red", "orange", "yellow", "green", "blue", "purple"]
STARTING_MOVE_DISTANCE = 5
MOVE_INCREMENT = 10
class CarManager(Turtle):
def __del__(self):
print(f"Deleted: {self}")
def __init__(self):
super().__init__('square')
self.speed('fast')
self.hideturtle()
self.setheading(180)
self.shapesize(1, 2.5)
self.color(random.choice(COLORS))
self.penup()
self.goto(320, random.randint(-250, 250))
self.showturtle()
def move(self):
self.forward(10)
| 22.222222 | 63 | 0.598333 | 445 | 0.741667 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.123333 |
1eec0296b555ebcc98cbc7b360b616946e53db82 | 941 | py | Python | manabi/apps/flashcards/permissions.py | aehlke/manabi | 1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b | [
"MIT"
]
| 14 | 2015-10-03T07:34:28.000Z | 2021-09-20T07:10:29.000Z | manabi/apps/flashcards/permissions.py | aehlke/manabi | 1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b | [
"MIT"
]
| 23 | 2019-10-25T08:47:23.000Z | 2022-01-30T02:00:45.000Z | manabi/apps/flashcards/permissions.py | aehlke/manabi | 1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b | [
"MIT"
]
| 7 | 2016-10-04T08:10:36.000Z | 2021-09-20T07:10:33.000Z | from django.shortcuts import get_object_or_404
from rest_framework import permissions
from manabi.apps.flashcards.models import Deck
WRITE_ACTIONS = ['create', 'update', 'partial_update', 'delete']
class DeckSynchronizationPermission(permissions.BasePermission):
message = "You don't have permission to add this deck to your library."
def has_permission(self, request, view):
if view.action in WRITE_ACTIONS:
upstream_deck = get_object_or_404(
Deck, pk=request.data['synchronized_with'])
return upstream_deck.shared
return True
class IsOwnerPermission(permissions.BasePermission):
message = "You don't own this."
def has_object_permission(self, request, view, obj):
if view.action in WRITE_ACTIONS:
return (
request.user.is_authenticated and
obj.owner.pk == request.user.pk
)
return True
| 30.354839 | 75 | 0.679065 | 734 | 0.780021 | 0 | 0 | 0 | 0 | 0 | 0 | 141 | 0.149841 |
1eef8337b3089adedce496c555766805e7a14c76 | 365 | py | Python | Scripts/create_phone_number.py | yogeshwaran01/Mini-Projects | c1a8790079d904405d49c71d6903ca4daaa77b38 | [
"MIT"
]
| 4 | 2020-09-30T17:18:13.000Z | 2021-06-11T21:02:10.000Z | Scripts/create_phone_number.py | yogeshwaran01/Mini-Projects | c1a8790079d904405d49c71d6903ca4daaa77b38 | [
"MIT"
]
| null | null | null | Scripts/create_phone_number.py | yogeshwaran01/Mini-Projects | c1a8790079d904405d49c71d6903ca4daaa77b38 | [
"MIT"
]
| 1 | 2021-04-02T14:51:00.000Z | 2021-04-02T14:51:00.000Z | """
Function convert lists of 10 elements
into in the format of phone number
Example,
(123) 456-789
"""
def create_phone_number(n: list) -> str:
"""
>>> create_phone_number([1,2,3,4,5,6,7,8,9,0])
'(123) 456-7890'
"""
return "({}{}{}) {}{}{}-{}{}{}{}".format(*n)
if __name__ == "__main__":
import doctest
doctest.testmod()
| 15.869565 | 50 | 0.556164 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 232 | 0.635616 |
1eefb7ddb845bc64282cda5039ab52bf01d96b1d | 1,539 | py | Python | pointscan/scan.py | gtfierro/point_label_sharing | add0db472ec0bade566c3c1bf9428786c759d980 | [
"BSD-3-Clause"
]
| 5 | 2019-08-19T10:16:49.000Z | 2021-12-19T17:18:18.000Z | pointscan/scan.py | gtfierro/point_label_sharing | add0db472ec0bade566c3c1bf9428786c759d980 | [
"BSD-3-Clause"
]
| null | null | null | pointscan/scan.py | gtfierro/point_label_sharing | add0db472ec0bade566c3c1bf9428786c759d980 | [
"BSD-3-Clause"
]
| 1 | 2019-10-11T15:48:42.000Z | 2019-10-11T15:48:42.000Z | import click
import logging
import pandas as pd
from pathlib import Path
@click.group()
def main():
pass
@main.command(help="Scan for BACnet devices on your network")
@click.option("--ip", help="source IP to use (interface)")
@click.option("--dest", default=".", help="destination of scraped points")
def scan(ip, dest):
import BAC0
BAC0.log_level('error')
c = BAC0.connect(ip=ip)
c.discover()
points = []
for dev in c.devices:
logging.info(f"Scanning BACnet device {dev}")
devname = f"{dev[0]}-{dev[1]}-{dev[2]}-{dev[3]}.csv"
device = BAC0.device(dev[2], dev[3], c, history_size=1)
for point in device.points:
try:
d = {
'name': getattr(point.properties, 'name', None),
'units': getattr(point.properties, 'units', None),
'description': getattr(point.properties,
'description', None),
}
points.append(d)
except Exception as e:
logging.error(point)
logging.error(e)
c.disconnect()
df = pd.DataFrame.from_records(points)
df.to_csv(Path(dest) / Path(devname.replace(' ', '_')), index=False)
@main.command(help="Run webserver to clean/publish datasets")
@click.option("--port", default=5000, help="webserver port")
def web(port):
from pointscan.app import app
app.run(host='0.0.0.0', port=port, debug=True)
if __name__ == '__main__':
main()
| 28.5 | 74 | 0.57245 | 0 | 0 | 0 | 0 | 1,417 | 0.920728 | 0 | 0 | 341 | 0.221572 |
1ef0038cb2a91e1c8d60b3c9d94b61a72a9905a8 | 569 | py | Python | var/spack/repos/builtin/packages/py-jdatetime/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
]
| 1 | 2019-09-15T23:55:48.000Z | 2019-09-15T23:55:48.000Z | var/spack/repos/builtin/packages/py-jdatetime/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
]
| null | null | null | var/spack/repos/builtin/packages/py-jdatetime/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
]
| 1 | 2017-01-21T17:19:32.000Z | 2017-01-21T17:19:32.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJdatetime(PythonPackage):
"""jdatetime is Jalali implementation of Python's datetime module"""
homepage = "https://github.com/slashmili/python-jalali"
url = "https://pypi.io/packages/source/j/jdatetime/jdatetime-3.6.2.tar.gz"
version('3.6.2', sha256='a589e35f0dab89283c1a3de9d70ed6cf657932aaed8e8ce1b0e5801aaab1da67')
| 35.5625 | 95 | 0.755712 | 348 | 0.611599 | 0 | 0 | 0 | 0 | 0 | 0 | 446 | 0.783831 |
1ef103ee8055d6489e89b6cf03c9f9136b33632a | 646 | py | Python | interview/leet/147_Insertion_Sort_List_Challenge.py | eroicaleo/LearningPython | 297d46eddce6e43ce0c160d2660dff5f5d616800 | [
"MIT"
]
| 1 | 2020-10-12T13:33:29.000Z | 2020-10-12T13:33:29.000Z | interview/leet/147_Insertion_Sort_List_Challenge.py | eroicaleo/LearningPython | 297d46eddce6e43ce0c160d2660dff5f5d616800 | [
"MIT"
]
| null | null | null | interview/leet/147_Insertion_Sort_List_Challenge.py | eroicaleo/LearningPython | 297d46eddce6e43ce0c160d2660dff5f5d616800 | [
"MIT"
]
| 1 | 2016-11-09T07:28:45.000Z | 2016-11-09T07:28:45.000Z | #!/usr/bin/env python
from linklist import *
class Solution:
def insertionSortList(self, head):
dumm = head
while head:
val, head, prev = head.val, head.next, dumm
while val >= prev.val:
prev = prev.next
while prev != head:
prev.val, prev, val = val, prev.next, prev.val
return dumm
sol = Solution()
nodeStringList = [
'[4,2,1,3]',
'[-1,5,3,4,0]',
'[3,2]',
'[23]',
'[]'
]
for nodeString in nodeStringList:
head = linkListBuilder(nodeString)
traverse(head)
traverse(sol.insertionSortList(head))
| 22.275862 | 62 | 0.53096 | 333 | 0.51548 | 0 | 0 | 0 | 0 | 0 | 0 | 63 | 0.097523 |
1ef14d5232899017df5a28aea662b5304b5bbc53 | 976 | py | Python | robocrm/migrations/0020_auto_20141027_0145.py | CMU-Robotics-Club/roboticsclub.org | 5f2ad4a15dc62160c6d03c87c121e934cacb8228 | [
"MIT"
]
| null | null | null | robocrm/migrations/0020_auto_20141027_0145.py | CMU-Robotics-Club/roboticsclub.org | 5f2ad4a15dc62160c6d03c87c121e934cacb8228 | [
"MIT"
]
| 16 | 2015-01-01T03:42:36.000Z | 2016-06-21T05:14:16.000Z | robocrm/migrations/0020_auto_20141027_0145.py | CMU-Robotics-Club/roboticsclub.org | 5f2ad4a15dc62160c6d03c87c121e934cacb8228 | [
"MIT"
]
| 2 | 2015-07-23T14:37:16.000Z | 2021-09-11T01:23:25.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('robocrm', '0019_auto_20141021_1157'),
]
operations = [
migrations.RemoveField(
model_name='robouser',
name='sec_major_one',
),
migrations.RemoveField(
model_name='robouser',
name='sec_major_two',
),
migrations.AlterField(
model_name='robouser',
name='cell',
field=models.DecimalField(help_text='Cell Phone # if you wish to provide it to Officers', blank=True, decimal_places=0, null=True, max_digits=10),
),
migrations.AlterField(
model_name='robouser',
name='magnetic',
field=models.CharField(help_text='9 Character Magnetic Card ID(found on Student ID)', max_length=9, null=True, blank=True),
),
]
| 29.575758 | 158 | 0.599385 | 867 | 0.88832 | 0 | 0 | 0 | 0 | 0 | 0 | 246 | 0.252049 |
1ef2ba31fbb403bcb4ce6125ac2b8a6fd53306d0 | 527 | py | Python | src/tests/flow.py | SeleSchaefer/super_resolution | bf28a959fb150ceeadbd9f0bcfc12f3025cf82f4 | [
"MIT"
]
| 5 | 2019-11-11T10:01:52.000Z | 2020-12-08T11:56:33.000Z | src/tests/flow.py | SeleSchaefer/super_resolution | bf28a959fb150ceeadbd9f0bcfc12f3025cf82f4 | [
"MIT"
]
| 1 | 2020-06-13T06:39:44.000Z | 2020-06-13T06:39:44.000Z | src/tests/flow.py | SeleSchaefer/super_resolution | bf28a959fb150ceeadbd9f0bcfc12f3025cf82f4 | [
"MIT"
]
| 1 | 2020-07-16T23:07:28.000Z | 2020-07-16T23:07:28.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import cv2
import imageio
import numpy as np
from tar.miscellaneous import convert_flow_to_color
prev = imageio.imread("ressources/1_1.png")
prev = cv2.cvtColor(prev, cv2.COLOR_RGB2GRAY)
curr = imageio.imread("ressources/1_2.png")
curr = cv2.cvtColor(curr, cv2.COLOR_RGB2GRAY)
flow = cv2.calcOpticalFlowFarneback(prev, curr, None, 0.9, 15, 20, 100, 10, 1.5, cv2.OPTFLOW_FARNEBACK_GAUSSIAN)
rgb = convert_flow_to_color(flow)
imageio.imsave("/Users/sele/Desktop/test.png", rgb)
| 29.277778 | 112 | 0.759013 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.218216 |
1ef394d030837a85a23ff0b3c23491f9f879dcc0 | 998 | py | Python | assignment4/utils.py | nicedi/ML_course_projects | 136a18ec8615ae72bb60b4d60e920beb77728115 | [
"Apache-2.0"
]
| null | null | null | assignment4/utils.py | nicedi/ML_course_projects | 136a18ec8615ae72bb60b4d60e920beb77728115 | [
"Apache-2.0"
]
| null | null | null | assignment4/utils.py | nicedi/ML_course_projects | 136a18ec8615ae72bb60b4d60e920beb77728115 | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
def plot_loss(model, n_iter):
plt.figure()
plt.plot(model.trainloss, 'b-', model.validloss, 'r-')
plt.xlim(0, n_iter)
plt.xlabel('iteration')
plt.ylabel('loss')
plt.title('learning curve')
plt.legend(['training loss', 'validation loss'])
plt.show()
def plot_F1(model, n_iter):
plt.figure()
plt.plot(model.trainF1, 'b-', model.validF1, 'r-')
plt.xlim(0, n_iter)
plt.xlabel('iteration')
plt.ylabel('F1 score')
plt.title('F1 metric curve')
plt.legend(['training F1', 'validation F1'], loc='lower right')
plt.show()
def confusion_matrix(threshold, y_hat, y_target):
# 任务2:实现该函数。函数应返回 TP, FP, FN, TN 四个值。
# y_hat = (y_hat > threshold).astype(np.int32) # 高于阈值的预测值置为1,反之为0
# 提示:对比 y_hat 和 y_target 中的值计算 True Positive,False Positive 等
tmp = np.hstack((y_target, y_hat > threshold))
pass
# return TP, FP, FN, TN
| 27.722222 | 69 | 0.625251 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 459 | 0.421875 |
1ef4bd40a0edef859ca09644504d0ac02de309a6 | 746 | py | Python | post/migrations/0009_auto_20171207_2320.py | silvareal/personal-blog | 9ed8ac48864510cd5b3227b7b0f7d335beb648de | [
"MIT"
]
| 2 | 2018-03-15T16:53:11.000Z | 2020-01-17T15:56:33.000Z | post/migrations/0009_auto_20171207_2320.py | silvareal/personal-blog | 9ed8ac48864510cd5b3227b7b0f7d335beb648de | [
"MIT"
]
| null | null | null | post/migrations/0009_auto_20171207_2320.py | silvareal/personal-blog | 9ed8ac48864510cd5b3227b7b0f7d335beb648de | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-12-07 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0008_auto_20171207_2256'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='category',
),
migrations.AddField(
model_name='post',
name='category',
field=models.CharField(choices=[('frontend', 'Frontend'), ('backend', 'Backend'), ('interview', 'Interview'), ('devop', 'Devop')], default='backend', max_length=15),
),
migrations.DeleteModel(
name='Category',
),
]
| 26.642857 | 177 | 0.577748 | 590 | 0.790885 | 0 | 0 | 0 | 0 | 0 | 0 | 225 | 0.301609 |
1ef4eeb144b92d317488e7746cdc05ddecffcf45 | 3,018 | py | Python | tests/test_utils_project.py | FingerCrunch/scrapy | 3225de725720bba246ba8c9845fe4b84bc0c82e7 | [
"BSD-3-Clause"
]
| 41,267 | 2015-01-01T07:39:25.000Z | 2022-03-31T20:09:40.000Z | tests/test_utils_project.py | FingerCrunch/scrapy | 3225de725720bba246ba8c9845fe4b84bc0c82e7 | [
"BSD-3-Clause"
]
| 4,420 | 2015-01-02T09:35:38.000Z | 2022-03-31T22:53:32.000Z | tests/test_utils_project.py | FingerCrunch/scrapy | 3225de725720bba246ba8c9845fe4b84bc0c82e7 | [
"BSD-3-Clause"
]
| 11,080 | 2015-01-01T18:11:30.000Z | 2022-03-31T15:33:19.000Z | import unittest
import os
import tempfile
import shutil
import contextlib
from pytest import warns
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.project import data_path, get_project_settings
@contextlib.contextmanager
def inside_a_project():
prev_dir = os.getcwd()
project_dir = tempfile.mkdtemp()
try:
os.chdir(project_dir)
with open('scrapy.cfg', 'w') as f:
# create an empty scrapy.cfg
f.close()
yield project_dir
finally:
os.chdir(prev_dir)
shutil.rmtree(project_dir)
class ProjectUtilsTest(unittest.TestCase):
def test_data_path_outside_project(self):
self.assertEqual(
os.path.join('.scrapy', 'somepath'),
data_path('somepath')
)
abspath = os.path.join(os.path.sep, 'absolute', 'path')
self.assertEqual(abspath, data_path(abspath))
def test_data_path_inside_project(self):
with inside_a_project() as proj_path:
expected = os.path.join(proj_path, '.scrapy', 'somepath')
self.assertEqual(
os.path.realpath(expected),
os.path.realpath(data_path('somepath'))
)
abspath = os.path.join(os.path.sep, 'absolute', 'path')
self.assertEqual(abspath, data_path(abspath))
@contextlib.contextmanager
def set_env(**update):
modified = set(update.keys()) & set(os.environ.keys())
update_after = {k: os.environ[k] for k in modified}
remove_after = frozenset(k for k in update if k not in os.environ)
try:
os.environ.update(update)
yield
finally:
os.environ.update(update_after)
for k in remove_after:
os.environ.pop(k)
class GetProjectSettingsTestCase(unittest.TestCase):
def test_valid_envvar(self):
value = 'tests.test_cmdline.settings'
envvars = {
'SCRAPY_SETTINGS_MODULE': value,
}
with set_env(**envvars), warns(None) as warnings:
settings = get_project_settings()
assert not warnings
assert settings.get('SETTINGS_MODULE') == value
def test_invalid_envvar(self):
envvars = {
'SCRAPY_FOO': 'bar',
}
with set_env(**envvars), warns(None) as warnings:
get_project_settings()
assert len(warnings) == 1
assert warnings[0].category == ScrapyDeprecationWarning
assert str(warnings[0].message).endswith(': FOO')
def test_valid_and_invalid_envvars(self):
value = 'tests.test_cmdline.settings'
envvars = {
'SCRAPY_FOO': 'bar',
'SCRAPY_SETTINGS_MODULE': value,
}
with set_env(**envvars), warns(None) as warnings:
settings = get_project_settings()
assert len(warnings) == 1
assert warnings[0].category == ScrapyDeprecationWarning
assert str(warnings[0].message).endswith(': FOO')
assert settings.get('SETTINGS_MODULE') == value
| 30.795918 | 70 | 0.629556 | 2,017 | 0.668323 | 714 | 0.236581 | 768 | 0.254473 | 0 | 0 | 321 | 0.106362 |
1ef590187d92be6eb1062c6742984e4a21a536f0 | 212 | py | Python | trainer/__init__.py | Greeser/gate-decorator-pruning | 1069fc89099100091412b6f89ead0519d382c518 | [
"Apache-2.0"
]
| 192 | 2019-09-18T10:02:16.000Z | 2022-03-24T16:31:18.000Z | trainer/__init__.py | pawopawo/gate-decorator-pruning | d89021802fa56b1eba97921db3d8cadcacdd2073 | [
"Apache-2.0"
]
| 25 | 2019-09-24T10:53:51.000Z | 2022-01-18T07:13:52.000Z | trainer/__init__.py | pawopawo/gate-decorator-pruning | d89021802fa56b1eba97921db3d8cadcacdd2073 | [
"Apache-2.0"
]
| 33 | 2019-09-19T02:21:58.000Z | 2022-03-31T10:04:20.000Z | from trainer.normal import NormalTrainer
from config import cfg
def get_trainer():
pair = {
'normal': NormalTrainer
}
assert (cfg.train.trainer in pair)
return pair[cfg.train.trainer]()
| 19.272727 | 40 | 0.679245 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0.037736 |
1ef715bb94a229f05900f1d0c867b3d0fe21f76d | 776 | py | Python | Tasks/Community/ts_scriptExamples/pythonLogging.py | nneul/Velocity-assets | 9be7cd6f483754871c5a541d0083fbe933dfb456 | [
"MIT"
]
| 4 | 2019-05-27T23:36:34.000Z | 2020-11-12T17:08:04.000Z | Tasks/Community/ts_scriptExamples/pythonLogging.py | nneul/Velocity-assets | 9be7cd6f483754871c5a541d0083fbe933dfb456 | [
"MIT"
]
| 12 | 2019-04-17T02:47:25.000Z | 2021-04-02T09:15:37.000Z | Tasks/Community/ts_scriptExamples/pythonLogging.py | nneul/Velocity-assets | 9be7cd6f483754871c5a541d0083fbe933dfb456 | [
"MIT"
]
| 15 | 2018-04-26T05:18:12.000Z | 2021-11-06T04:44:58.000Z | #!/usr/bin/python
import logging
# create logger
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create file handler which and set level to debug
fh = logging.FileHandler('pythonLogging.log')
fh.setLevel(logging.WARNING)
# create formatter
formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s")
# add formatter to ch and fh
ch.setFormatter(formatter)
fh.setFormatter(formatter)
# add ch and fh to logger
logger.addHandler(ch)
logger.addHandler(fh)
# "application" code
logger.debug("debug message")
logger.info("info message")
logger.warn("warn message")
logger.error("error message")
logger.critical("critical message")
print('\nDone')
| 25.866667 | 72 | 0.76933 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 364 | 0.469072 |
1ef7b25dff5a6ddf0729f1a5e0bea3ab89df1ed3 | 3,565 | py | Python | google/datalab/commands/_datalab.py | freyrsae/pydatalab | 9aba1ac6bbe8e1384e7a4b07c5042af84348797d | [
"Apache-2.0"
]
| 198 | 2016-07-14T19:47:52.000Z | 2022-03-15T08:45:21.000Z | google/datalab/commands/_datalab.py | freyrsae/pydatalab | 9aba1ac6bbe8e1384e7a4b07c5042af84348797d | [
"Apache-2.0"
]
| 534 | 2016-07-15T19:12:43.000Z | 2022-03-11T23:11:39.000Z | google/datalab/commands/_datalab.py | freyrsae/pydatalab | 9aba1ac6bbe8e1384e7a4b07c5042af84348797d | [
"Apache-2.0"
]
| 86 | 2016-07-13T17:39:05.000Z | 2021-11-03T03:39:41.000Z | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Platform library - datalab cell magic."""
from __future__ import absolute_import
from __future__ import unicode_literals
try:
import IPython
import IPython.core.display
import IPython.core.magic
except ImportError:
raise Exception('This module can only be loaded in ipython.')
import google.datalab.utils.commands
@IPython.core.magic.register_line_cell_magic
def datalab(line, cell=None):
"""Implements the datalab cell magic for ipython notebooks.
Args:
line: the contents of the datalab line.
Returns:
The results of executing the cell.
"""
parser = google.datalab.utils.commands.CommandParser(
prog='%datalab',
description="""
Execute operations that apply to multiple Datalab APIs.
Use "%datalab <command> -h" for help on a specific command.
""")
config_parser = parser.subcommand(
'config', help='List or set API-specific configurations.')
config_sub_commands = config_parser.add_subparsers(dest='command')
# %%datalab config list
config_list_parser = config_sub_commands.add_parser(
'list', help='List configurations')
config_list_parser.set_defaults(func=_config_list_fn)
# %%datalab config set -n <NAME> -v <VALUE>
config_set_parser = config_sub_commands.add_parser(
'set', help='Set configurations')
config_set_parser.add_argument(
'-n', '--name',
help='The name of the configuration value', required=True)
config_set_parser.add_argument(
'-v', '--value', help='The value to set', required=True)
config_set_parser.set_defaults(func=_config_set_fn)
project_parser = parser.subcommand(
'project', help='Get or set the default project ID')
project_sub_commands = project_parser.add_subparsers(dest='command')
# %%datalab project get
project_get_parser = project_sub_commands.add_parser(
'get', help='Get the default project ID')
project_get_parser.set_defaults(func=_project_get_fn)
# %%datalab project set -p <PROJECT_ID>
project_set_parser = project_sub_commands.add_parser(
'set', help='Set the default project ID')
project_set_parser.add_argument(
'-p', '--project', help='The default project ID', required=True)
project_set_parser.set_defaults(func=_project_set_fn)
return google.datalab.utils.commands.handle_magic_line(line, cell, parser)
def _config_list_fn(args, cell):
ctx = google.datalab.Context.default()
return google.datalab.utils.commands.render_dictionary([ctx.config])
def _config_set_fn(args, cell):
name = args['name']
value = args['value']
ctx = google.datalab.Context.default()
ctx.config[name] = value
return google.datalab.utils.commands.render_dictionary([ctx.config])
def _project_get_fn(args, cell):
ctx = google.datalab.Context.default()
return google.datalab.utils.commands.render_text(ctx.project_id)
def _project_set_fn(args, cell):
project = args['project']
ctx = google.datalab.Context.default()
ctx.set_project_id(project)
return
| 33.317757 | 76 | 0.748387 | 0 | 0 | 0 | 0 | 1,976 | 0.554278 | 0 | 0 | 1,482 | 0.415708 |
1ef7c90725f50f509ebf7ce67bf02498f0dcedf7 | 181 | py | Python | src/server/__main__.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
]
| 1 | 2021-06-20T05:47:53.000Z | 2021-06-20T05:47:53.000Z | src/server/__main__.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
]
| null | null | null | src/server/__main__.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
]
| null | null | null | from sys import argv
from server.AServer import AServer
if '--old' in argv:
from server.server import Server
Server()
else:
AServer( websocket='--websocket' in argv ).Start()
| 16.454545 | 51 | 0.729282 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 0.110497 |
1ef930c42df2781ea2ef6709774093b794cfc83e | 3,081 | py | Python | testing/tests/registers.py | Wynjones1/gbvhdl | 46cef04cef308967ea4764eeeaf7d611dc783ae4 | [
"MIT"
]
| null | null | null | testing/tests/registers.py | Wynjones1/gbvhdl | 46cef04cef308967ea4764eeeaf7d611dc783ae4 | [
"MIT"
]
| null | null | null | testing/tests/registers.py | Wynjones1/gbvhdl | 46cef04cef308967ea4764eeeaf7d611dc783ae4 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python2.7
from common import *
from random import randint, choice
registers = {\
"a" : int("0000", 2),
"f" : int("0001", 2),
"b" : int("0010", 2),
"c" : int("0011", 2),
"d" : int("0100", 2),
"e" : int("0101", 2),
"h" : int("0110", 2),
"l" : int("0111", 2),
"af" : int("1000", 2),
"bc" : int("1001", 2),
"de" : int("1010", 2),
"hl" : int("1011", 2),
"sp" : int("1100", 2),
"pc" : int("1101", 2),
}
def output_line(fp, reg_write, reg_read, we,
write_data, read_data, reg_w_name, reg_r_name):
fp.write("%s %s %s %s %s #%s %s\n" %
(to_bin(reg_write, 4),
to_bin(reg_read, 4),
"1" if we else "0",
to_bin(write_data, 16),
to_bin(read_data, 16),
reg_w_name,
reg_r_name))
class Registers(object):
def __init__(self):
self.regs = [0] * 8
self.sp = 0
self.pc = 0
def write(self, reg, value):
if reg == "af":
self.regs[registers["a"]] = (value >> 8) & 0xff
self.regs[registers["f"]] = (value >> 0) & 0xff
elif reg == "bc":
self.regs[registers["b"]] = (value >> 8) & 0xff
self.regs[registers["c"]] = (value >> 0) & 0xff
elif reg == "de":
self.regs[registers["d"]] = (value >> 8) & 0xff
self.regs[registers["e"]] = (value >> 0) & 0xff
elif reg == "hl":
self.regs[registers["h"]] = (value >> 8) & 0xff
self.regs[registers["l"]] = (value >> 0) & 0xff
elif reg == "sp":
self.sp = value
elif reg == "pc":
self.pc = value
else:
self.regs[registers[reg]] = (value) & 0xff
def read(self, reg):
if reg == "af":
return self.regs[registers["a"]] << 8 | self.regs[registers["f"]];
elif reg == "bc":
return self.regs[registers["b"]] << 8 | self.regs[registers["c"]];
elif reg == "de":
return self.regs[registers["d"]] << 8 | self.regs[registers["e"]];
elif reg == "hl":
return self.regs[registers["h"]] << 8 | self.regs[registers["l"]];
elif reg == "sp":
return self.sp
elif reg == "pc":
return self.pc
else:
return self.regs[registers[reg]];
def random_op(self):
we = randint(0, 1)
reg_write = choice(registers.keys())
reg_read = choice(registers.keys())
write_data = randint(0, 0xffff)
read_data = self.read(reg_read)
if we:
self.write(reg_write, write_data)
return (registers[reg_write], registers[reg_read],
we, write_data, read_data, reg_write, reg_read)
def main():
fp = open("registers.txt", "w")
reg = Registers()
m = 1000000
for i in xrange(m):
if i % 10000 == 0:
f = 100 * float(i) / float(m)
print("%s" % f)
output_line(fp, *reg.random_op())
if __name__ == "__main__":
main()
| 31.121212 | 79 | 0.477118 | 1,935 | 0.628043 | 0 | 0 | 0 | 0 | 0 | 0 | 315 | 0.10224 |
1efa2e6d895702b8d443cbba288ae926b3327dee | 290 | py | Python | DiscordRPC/__init__.py | EterNomm/discord-rpc | 86bdf35a75df9ab8971763042d19f2f820e08a51 | [
"Apache-2.0"
]
| 4 | 2021-12-13T13:26:00.000Z | 2022-02-20T17:11:19.000Z | DiscordRPC/__init__.py | LyQuid12/discord-rpc | 86bdf35a75df9ab8971763042d19f2f820e08a51 | [
"Apache-2.0"
]
| null | null | null | DiscordRPC/__init__.py | LyQuid12/discord-rpc | 86bdf35a75df9ab8971763042d19f2f820e08a51 | [
"Apache-2.0"
]
| null | null | null | from .presence import *
from .button import button
from .exceptions import *
#from .get_current_app import GCAR (Disabling due to a bug)
__title__ = "Discord-RPC"
__version__ = "3.5"
__authors__ = "LyQuid"
__license__ = "Apache License 2.0"
__copyright__ = "Copyright 2021-present LyQuid"
| 26.363636 | 59 | 0.762069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 136 | 0.468966 |
1efa6932e5014bf06e1e937b4bacbf01a0d855e1 | 11,392 | py | Python | brax/training/ars.py | benelot/brax | 6b74009face5a12ae3e47b87cdb1abc45181040e | [
"Apache-2.0"
]
| 1 | 2021-09-27T18:38:49.000Z | 2021-09-27T18:38:49.000Z | brax/training/ars.py | benelot/brax | 6b74009face5a12ae3e47b87cdb1abc45181040e | [
"Apache-2.0"
]
| null | null | null | brax/training/ars.py | benelot/brax | 6b74009face5a12ae3e47b87cdb1abc45181040e | [
"Apache-2.0"
]
| 1 | 2021-09-27T18:38:57.000Z | 2021-09-27T18:38:57.000Z | # Copyright 2021 The Brax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Augmented Random Search training.
See: https://arxiv.org/pdf/1803.07055.pdf
"""
import time
from typing import Any, Callable, Dict, Optional
from absl import logging
from brax import envs
from brax.training import env
from brax.training import networks
from brax.training import normalization
import flax
import jax
import jax.numpy as jnp
import optax
Params = Any
@flax.struct.dataclass
class TrainingState:
"""Contains training state for the learner."""
key: jnp.ndarray
normalizer_params: Params
policy_params: Params
def make_ars_model(act_size: int, obs_size: int):
return networks.FeedForwardModel(
init=lambda _: jnp.zeros((obs_size, act_size)),
apply=lambda m, o: jnp.matmul(o, m))
def get_policy_head(head_type):
def head(params):
if not head_type:
return params
if head_type == 'clip':
return jnp.clip(params, -1, 1)
if head_type == 'tanh':
return jnp.tanh(params)
assert f'policy head type {head_type} is not known'
return head
def train(
environment_fn: Callable[..., envs.Env],
num_timesteps: int = 100,
log_frequency: int = 1,
episode_length: int = 1000,
action_repeat: int = 1,
num_eval_envs: int = 128,
seed: int = 0,
normalize_observations: bool = False,
step_size: float = 0.015,
max_devices_per_host: Optional[int] = None,
number_of_directions: int = 60,
exploration_noise_std: float = 0.025,
top_directions: int = 20,
head_type: str = '',
reward_shift: float = 0.0,
progress_fn: Optional[Callable[[int, Dict[str, Any]], None]] = None,
):
"""ARS."""
# TODO: pmap it
max_devices_per_host = 1
xt = time.time()
top_directions = min(top_directions, number_of_directions)
num_envs = number_of_directions * 2 # antitethic
epochs = 1 + num_timesteps // episode_length // num_envs
log_frequency = min(log_frequency, epochs)
process_count = jax.process_count()
process_id = jax.process_index()
local_device_count = jax.local_device_count()
local_devices_to_use = local_device_count
if max_devices_per_host:
local_devices_to_use = min(local_devices_to_use, max_devices_per_host)
logging.info(
'Device count: %d, process count: %d (id %d), local device count: %d, '
'devices to be used count: %d',
jax.device_count(), process_count, process_id, local_device_count,
local_devices_to_use)
key = jax.random.PRNGKey(seed)
key, key_model, key_env, key_eval = jax.random.split(key, 4)
core_env = environment_fn(
action_repeat=action_repeat,
batch_size=num_envs // local_devices_to_use // process_count,
episode_length=episode_length)
first_state, step_fn = env.wrap(core_env, key_env)
core_eval_env = environment_fn(
action_repeat=action_repeat,
batch_size=num_eval_envs,
episode_length=episode_length)
eval_first_state, eval_step_fn = env.wrap(core_eval_env, key_eval)
_, obs_size = eval_first_state.core.obs.shape
policy_model = make_ars_model(core_env.action_size, obs_size)
policy_head = get_policy_head(head_type)
normalizer_params, obs_normalizer_update_fn, obs_normalizer_apply_fn = (
normalization.create_observation_normalizer(
obs_size, normalize_observations, num_leading_batch_dims=1,
apply_clipping=False))
policy_params = policy_model.init(key_model)
def do_one_step_eval(carry, unused_target_t):
state, policy_params, normalizer_params = carry
obs = obs_normalizer_apply_fn(normalizer_params, state.core.obs)
actions = policy_head(policy_model.apply(policy_params, obs))
nstate = eval_step_fn(state, actions)
return (nstate, policy_params, normalizer_params), ()
@jax.jit
def run_eval(state, policy_params, normalizer_params) -> env.EnvState:
(state, _, _), _ = jax.lax.scan(
do_one_step_eval, (state, policy_params, normalizer_params), (),
length=episode_length // action_repeat)
return state
@jax.vmap
def training_inference(params, obs):
return policy_model.apply(params, obs)
def do_one_step(carry, unused_target_t):
state, policy_params, cumulative_reward, normalizer_params = carry
obs = obs_normalizer_apply_fn(normalizer_params, state.core.obs)
actions = policy_head(training_inference(policy_params, obs))
nstate = step_fn(state, actions)
cumulative_reward = cumulative_reward + nstate.core.reward - reward_shift
return (nstate, policy_params, cumulative_reward,
normalizer_params), state.core.obs
def run_ars_eval(state, params, normalizer_params):
cumulative_reward = jnp.zeros(state.core.obs.shape[0])
(state, _, cumulative_reward, _), obs = jax.lax.scan(
do_one_step, (state, params, cumulative_reward, normalizer_params),
(), length=episode_length // action_repeat)
return cumulative_reward, obs, state
def add_noise(params, key):
noise = jax.random.normal(key, shape=params.shape, dtype=params.dtype)
params_with_noise = params + noise * exploration_noise_std
anit_params_with_noise = params - noise * exploration_noise_std
return params_with_noise, anit_params_with_noise, noise
def ars_one_epoch(carry, unused_t):
state, training_state = carry
params = jnp.repeat(jnp.expand_dims(training_state.policy_params, axis=0),
num_envs // 2, axis=0)
key, key_petr = jax.random.split(training_state.key)
# generate perturbations
params_with_noise, params_with_anti_noise, noise = add_noise(
params, key_petr)
pparams = jnp.concatenate([params_with_noise, params_with_anti_noise],
axis=0)
eval_scores, obs, state = run_ars_eval(
state, pparams, training_state.normalizer_params)
obs = jnp.reshape(obs, [-1] + list(obs.shape[2:]))
normalizer_params = obs_normalizer_update_fn(
training_state.normalizer_params, obs)
reward_plus, reward_minus = jnp.split(eval_scores, 2, axis=0)
reward_max = jnp.maximum(reward_plus, reward_minus)
reward_rank = jnp.argsort(jnp.argsort(-reward_max))
reward_weight = jnp.where(reward_rank < top_directions, 1, 0)
reward_weight_double = jnp.concatenate([reward_weight, reward_weight],
axis=0)
reward_std = jnp.std(eval_scores, where=reward_weight_double)
noise = jnp.sum(jnp.transpose(jnp.transpose(noise) * reward_weight *
(reward_plus - reward_minus)), axis=0)
policy_params = (training_state.policy_params +
step_size / (top_directions * reward_std) * noise)
metrics = {
'params_norm': optax.global_norm(policy_params),
'eval_scores_mean': jnp.mean(eval_scores),
'eval_scores_std': jnp.std(eval_scores),
'reward_std': reward_std,
'weights': jnp.mean(reward_weight),
}
return (state,
TrainingState(
key=key,
normalizer_params=normalizer_params,
policy_params=policy_params)), metrics
epochs_per_step = (epochs + log_frequency - 1) // log_frequency
@jax.jit
def run_ars(state, training_state):
(state, training_state), metrics = jax.lax.scan(
ars_one_epoch, (state, training_state), (), length=epochs_per_step)
return state, training_state, jax.tree_map(jnp.mean, metrics)
training_state = TrainingState(key=key,
normalizer_params=normalizer_params,
policy_params=policy_params)
training_walltime = 0
eval_walltime = 0
sps = 0
eval_sps = 0
metrics = {}
summary = {}
state = first_state
for it in range(log_frequency + 1):
logging.info('starting iteration %s %s', it, time.time() - xt)
t = time.time()
if process_id == 0:
eval_state = run_eval(eval_first_state,
training_state.policy_params,
training_state.normalizer_params)
eval_state.completed_episodes.block_until_ready()
eval_walltime += time.time() - t
eval_sps = (
episode_length * eval_first_state.core.reward.shape[0] /
(time.time() - t))
avg_episode_length = (
eval_state.completed_episodes_steps / eval_state.completed_episodes)
metrics = dict(
dict({
f'eval/episode_{name}': value / eval_state.completed_episodes
for name, value in eval_state.completed_episodes_metrics.items()
}),
**dict({
f'train/{name}': value for name, value in summary.items()
}),
**dict({
'eval/completed_episodes': eval_state.completed_episodes,
'eval/episode_length': avg_episode_length,
'speed/sps': sps,
'speed/eval_sps': eval_sps,
'speed/training_walltime': training_walltime,
'speed/eval_walltime': eval_walltime,
'speed/timestamp': training_walltime,
}))
logging.info('Step %s metrics %s',
int(training_state.normalizer_params[0]) * action_repeat,
metrics)
if progress_fn:
progress_fn(int(training_state.normalizer_params[0]) * action_repeat,
metrics)
if it == log_frequency:
break
t = time.time()
# optimization
state, training_state, summary = run_ars(state, training_state)
jax.tree_map(lambda x: x.block_until_ready(), training_state)
sps = episode_length * num_envs * epochs_per_step / (
time.time() - t)
training_walltime += time.time() - t
_, inference = make_params_and_inference_fn(core_env.observation_size,
core_env.action_size,
normalize_observations,
head_type)
params = training_state.normalizer_params, training_state.policy_params
return (inference, params, metrics)
def make_params_and_inference_fn(observation_size, action_size,
normalize_observations, head_type=None):
"""Creates params and inference function for the ES agent."""
obs_normalizer_params, obs_normalizer_apply_fn = normalization.make_data_and_apply_fn(
observation_size, normalize_observations, apply_clipping=False)
policy_head = get_policy_head(head_type)
policy_model = make_ars_model(action_size, observation_size)
def inference_fn(params, obs, unused_rng):
normalizer_params, policy_params = params
obs = obs_normalizer_apply_fn(normalizer_params, obs)
action = policy_head(policy_model.apply(policy_params, obs))
return action
params = (obs_normalizer_params, policy_model.init(jax.random.PRNGKey(0)))
return params, inference_fn
| 36.512821 | 88 | 0.684077 | 140 | 0.012289 | 0 | 0 | 751 | 0.065923 | 0 | 0 | 1,280 | 0.11236 |
1efb1e54ed275e79479018453c75c13bf653026c | 330 | py | Python | docs/api/conf.py | kagemeka/selext | 1882e518f8698f6d257549cdb36c79e05e801d39 | [
"MIT"
]
| 1 | 2022-02-15T12:02:02.000Z | 2022-02-15T12:02:02.000Z | docs/api/conf.py | kagemeka/filesystem-python | 565beb128326f5ee41a5bb2b3a751788d4a02e4c | [
"MIT"
]
| 6 | 2022-01-05T09:15:54.000Z | 2022-01-09T05:48:43.000Z | docs/api/conf.py | kagemeka/python-algorithms | dface89b8c618845cf524429aa8e97c4b2b10ceb | [
"MIT"
]
| null | null | null | import os
import sys
def find_docs_root() -> str:
filepath = os.path.abspath(__file__)
path_chunks = filepath.split(os.path.sep)
while path_chunks[-1] != "docs":
path_chunks.pop()
return os.path.sep.join(path_chunks)
sys.path.append(find_docs_root())
from _rtd_conf import *
from _sphinx_conf import *
| 20.625 | 45 | 0.70303 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0.018182 |
1efbf1d335ee13e467149f16bec6b633d71434fe | 1,314 | py | Python | src/graph/cli/server.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
]
| null | null | null | src/graph/cli/server.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
]
| null | null | null | src/graph/cli/server.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
]
| null | null | null | import socket
import click
import uvicorn # type: ignore
def get_address(default: str = "127.0.0.1") -> str:
try:
ip_address = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(("8.8.8.8", 1))
ip_address = s.getsockname()[0]
except socket.gaierror:
ip_address = default
finally:
s.close()
return ip_address
@click.group()
@click.pass_context
def server(ctx):
pass
@server.command()
@click.option("--host", default=None, help="Specify application host")
@click.option("--port", default=5000, help="Specify application port")
@click.pass_context
def run(ctx, host, port):
try:
port = int(port)
if port < 1024 and port > 65535:
raise RuntimeError("Port should be from 1024 to 65535")
except ValueError:
raise RuntimeError("Port should be numeric")
if not host:
host = "127.0.0.1"
address = "127.0.0.1"
else:
address = get_address()
uvicorn.run(
"graph:init",
host=address,
port=port,
access_log=False,
log_level="info",
log_config=None,
loop="uvloop",
factory=True,
)
| 22.655172 | 70 | 0.590563 | 0 | 0 | 0 | 0 | 811 | 0.617199 | 0 | 0 | 209 | 0.159056 |
1efec6ae65507f91537c6a7a371e02ca57452f0d | 175 | py | Python | settings/libs.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
]
| null | null | null | settings/libs.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
]
| null | null | null | settings/libs.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
]
| null | null | null | # grappelli
GRAPPELLI_ADMIN_TITLE = 'pangolin - Administration panel'
# rest framework
# REST_FRAMEWORK = {
# 'PAGINATE_BY_PARAM': 'limit',
# 'SEARCH_PARAM': 'q'
# }
| 19.444444 | 57 | 0.68 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 143 | 0.817143 |
1efed2c2a7cb93434e5d67d1db9954f3a5ff1653 | 1,543 | py | Python | kubespawner/clients.py | moskiGithub/spawner_test | 405f088041054080f53b620b68fe040e5e0b091a | [
"BSD-3-Clause"
]
| null | null | null | kubespawner/clients.py | moskiGithub/spawner_test | 405f088041054080f53b620b68fe040e5e0b091a | [
"BSD-3-Clause"
]
| null | null | null | kubespawner/clients.py | moskiGithub/spawner_test | 405f088041054080f53b620b68fe040e5e0b091a | [
"BSD-3-Clause"
]
| null | null | null | """Shared clients for kubernetes
avoids creating multiple kubernetes client objects,
each of which spawns an unused max-size thread pool
"""
from unittest.mock import Mock
import weakref
import kubernetes.client
from kubernetes.client import api_client
# FIXME: remove when instantiating a kubernetes client
# doesn't create N-CPUs threads unconditionally.
# monkeypatch threadpool in kubernetes api_client
# to avoid instantiating ThreadPools.
# This is known to work for kubernetes-4.0
# and may need updating with later kubernetes clients
_dummy_pool = Mock()
api_client.ThreadPool = lambda *args, **kwargs: _dummy_pool
_client_cache = {}
def shared_client(ClientType, *args, **kwargs):
"""Return a single shared kubernetes client instance
A weak reference to the instance is cached,
so that concurrent calls to shared_client
will all return the same instance until
all references to the client are cleared.
"""
kwarg_key = tuple((key, kwargs[key]) for key in sorted(kwargs))
cache_key = (ClientType, args, kwarg_key)
client = None
if cache_key in _client_cache:
# resolve cached weakref
# client can still be None after this!
client = _client_cache[cache_key]()
if client is None:
Client = getattr(kubernetes.client, ClientType)
client = Client(*args, **kwargs)
# cache weakref so that clients can be garbage collected
_client_cache[cache_key] = weakref.ref(client)
return client
| 32.829787 | 68 | 0.711601 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 806 | 0.522359 |
1effabe25e75813c61f0401ae397020afd635812 | 8,956 | py | Python | src/syncgitlab2msproject/gitlab_issues.py | lcv3/SyncGitlab2MSProject | 4a81191b7deb6974e893d44f3b04fcfc1da36571 | [
"MIT"
]
| null | null | null | src/syncgitlab2msproject/gitlab_issues.py | lcv3/SyncGitlab2MSProject | 4a81191b7deb6974e893d44f3b04fcfc1da36571 | [
"MIT"
]
| null | null | null | src/syncgitlab2msproject/gitlab_issues.py | lcv3/SyncGitlab2MSProject | 4a81191b7deb6974e893d44f3b04fcfc1da36571 | [
"MIT"
]
| null | null | null | import dateutil.parser
from datetime import datetime
from functools import lru_cache
from gitlab import Gitlab
from gitlab.v4.objects import Project
from logging import getLogger
from typing import Dict, List, Optional, Union
from .custom_types import GitlabIssue, GitlabUserDict
from .exceptions import MovedIssueNotDefined
from .funcions import warn_once
logger = getLogger(f"{__package__}.{__name__}")
def get_user_identifier(user_dict: GitlabUserDict) -> str:
"""
Return the user identifier
keep as separate function to allow easier changes later if required
"""
return str(user_dict["name"])
class Issue:
"""
Wrapper class around Group/Project Issues
"""
# The issue object itself is not dynamic only the contained obj is!
__slots__ = [
"obj",
"_moved_reference",
"_fixed_group_id",
]
def __init__(self, obj: GitlabIssue, fixed_group_id: Optional[int] = None):
"""
:param obj:
:param fixed_group_id: Do not extract the group_id from the
Gitlab issue but assume it is fixed
"""
self._fixed_group_id = fixed_group_id
self.obj: GitlabIssue = obj
self._moved_reference: Optional[Issue] = None
def __getattr__(self, item: str):
"""Default to get the values from the original objext"""
return getattr(self.obj, item)
@property
def moved_reference(self) -> Optional["Issue"]:
"""
get the reference to the moved issue if defined
:exceptions MovedIssueNotDefined
"""
if self.moved_to_id is None:
return None
else:
if self._moved_reference is None:
raise MovedIssueNotDefined(
"The issue is marked as moved but was not referenced "
"in the loaded issues, so tracking is not possible."
)
else:
return self._moved_reference
@moved_reference.setter
def moved_reference(self, value: "Issue"):
if not isinstance(value, Issue):
raise ValueError("Can only set an Issue object as moved reference!")
self._moved_reference = value
def __str__(self):
return f"'{self.title}' (ID: {self.id})"
# **************************************************************
# *** Define some default properties to allow static typing ***
# **************************************************************
@property
def id(self) -> int:
"""
The id of an issue - it seems to be unique within an installation
"""
return self.obj.id
@property
def iid(self) -> int:
return self.obj.iid
@property
def project_id(self) -> int:
return self.obj.project_id
@property
def group_id(self) -> Optional[int]:
"""
Return the group id, if negative a user id is given
The group ID is either taken from the issue itself or if a project is given
the issue is fixed (see #7)
If group_id isn't fixed and can't be extracted, only give a warning and do
not fail, as it isn't required to have the sync working. Only the
issue id or weblink is used to find the related issue.
See `sync.py`
* `get_issue_ref_from_task`
* `IssueFinder`
"""
if self._fixed_group_id is not None:
return self._fixed_group_id
try:
return self.obj.group_id
except AttributeError:
warn_once(
logger,
"Could not extract group_id from Issue. "
"This is not required for syncing, so I will continue.",
)
return None
@property
def has_tasks(self) -> bool:
return self.obj.has_tasks
@property
def is_closed(self) -> bool:
return str(self.obj.state).lower().strip().startswith("closed")
@property
def is_open(self):
return not self.is_closed
@property
def percentage_tasks_done(self) -> int:
"""
Percentage of tasks done, 0 if no tasks are defined and not closed.
By definition always 100 if issue is closed (and not moved)
:exceptions MovedIssueNotDefined
"""
if self.is_closed:
if self.moved_to_id is not None:
# Needed for
assert self._moved_reference is not None
return self._moved_reference.percentage_tasks_done
return 100
if not self.has_tasks:
return 0
task = self.task_completion_status
return round(task["completed_count"] / task["count"] * 100)
@property
def moved_to_id(self) -> Optional[int]:
return self.obj.moved_to_id
@property
def title(self) -> str:
return self.obj.title
@property
def description(self) -> str:
return self.obj.description
@property
def closed_at(self) -> Optional[datetime]:
if (val := self.obj.closed_at) is not None:
return dateutil.parser.parse(val)
return None
@property
def created_at(self) -> Optional[datetime]:
if (val := self.obj.created_at) is not None:
return dateutil.parser.parse(val)
return None
@property
def due_date(self) -> Optional[datetime]:
if (val := self.obj.due_date) is not None:
return dateutil.parser.parse(val)
return None
@property
def closed_by(self) -> Optional[str]:
if (val := self.obj.closed_by) is not None:
return get_user_identifier(val)
return None
def _get_from_time_stats(self, key) -> Optional[float]:
"""
Somehow the python-gitlab API seems to be not 100% fixed,
see issue #9
:param key: key to query from time stats
:return: the value if existing or none
"""
query_dict: Dict[str, float]
if callable(self.obj.time_stats):
query_dict = self.obj.time_stats()
else:
query_dict = self.obj.time_stats
return query_dict.get(key, None)
@property
def time_estimated(self) -> Optional[float]:
"""
Time estimated in minutes
"""
if (time_estimate := self._get_from_time_stats("time_estimate")) is not None:
return time_estimate / 60
else:
logger.warning("Time Estimate is None")
return None
@property
def time_spent_total(self) -> Optional[float]:
"""
Total time spent in minutes
"""
if (time_spend := self._get_from_time_stats("total_time_spent")) is not None:
return time_spend / 60
else:
logger.warning("Time spend is None")
return None
@property
def assignees(self) -> List[str]:
"""
list of Gitlab Assignees.
Note in the community edition only one assignee is possible
"""
return [get_user_identifier(user) for user in self.obj.assignees]
@property
def labels(self) -> List[str]:
"""
list of labels
"""
return self.obj.labels
@property
def full_ref(self) -> str:
"""
give the full reference through which the issue can be accessed
"""
return self.obj.attributes['references']['full']
@property
def web_url(self) -> str:
"""
give the url from which the issue can be accessed
"""
return self.obj.web_url
@lru_cache(10)
def get_group_id_from_gitlab_project(project: Project) -> Optional[int]:
"""
Get user id form gitlab project.
If the namespace of the project is a user, a negativ
value is returned
:param project:
"""
try:
namespace: Dict[str, Union[int, str]] = project.namespace
except AttributeError:
logger.warning(
f"Could not extract name space for project '{project.get_id()}' - "
"This error will be ignored."
)
return None
if str(namespace["kind"]).lower() == "user":
return -int(namespace["id"])
else:
return int(namespace["id"])
def get_gitlab_class(server: str, personal_token: Optional[str] = None) -> Gitlab:
if personal_token is None:
return Gitlab(server, ssl_verify=False)
else:
return Gitlab(server, private_token=personal_token, ssl_verify=False)
def get_group_issues(gitlab: Gitlab, group_id: int) -> List[Issue]:
group = gitlab.groups.get(group_id, lazy=True)
return [Issue(issue) for issue in group.issues.list(all=True)]
def get_project_issues(gitlab: Gitlab, project_id: int) -> List[Issue]:
project = gitlab.projects.get(project_id)
return [
Issue(issue, fixed_group_id=get_group_id_from_gitlab_project(project))
for issue in project.issues.list(all=True)
]
| 30.154882 | 85 | 0.600715 | 6,971 | 0.778361 | 0 | 0 | 5,927 | 0.661791 | 0 | 0 | 2,951 | 0.3295 |
48026c0db5235d96fe4f5a2a24dc36b8317b3710 | 964 | py | Python | pytest/track_test.py | Sergej91/TheiaSfM | e603e16888456c3e565a2c197fa9f8643c176175 | [
"BSD-3-Clause"
]
| null | null | null | pytest/track_test.py | Sergej91/TheiaSfM | e603e16888456c3e565a2c197fa9f8643c176175 | [
"BSD-3-Clause"
]
| null | null | null | pytest/track_test.py | Sergej91/TheiaSfM | e603e16888456c3e565a2c197fa9f8643c176175 | [
"BSD-3-Clause"
]
| null | null | null | import pytheia as pt
import os
import numpy as np
def test_track_set_descriptor_read_write():
recon = pt.sfm.Reconstruction()
view_id1 = recon.AddView("0",0.0)
m_view1 = recon.MutableView(view_id1)
m_view1.IsEstimated = True
view_id2 = recon.AddView("1",1.0)
m_view2 = recon.MutableView(view_id2)
m_view2.IsEstimated = True
t_id = recon.AddTrack()
m_track = recon.MutableTrack(t_id)
m_track.AddView(view_id1)
m_track.AddView(view_id2)
m_track.IsEstimated = True
desc = np.asarray([100,200,300,400])
m_track.SetReferenceDescriptor(desc)
assert (m_track.ReferenceDescriptor() == desc).all()
# read write
pt.io.WriteReconstruction(recon,"test")
recon_loaded = pt.io.ReadReconstruction("test")[1]
s_track = recon_loaded.Track(t_id)
assert (s_track.ReferenceDescriptor() == desc).all()
os.remove("test")
if __name__ == "__main__":
test_track_set_descriptor_read_write() | 27.542857 | 56 | 0.701245 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 46 | 0.047718 |
4805394e98503f43fbc6141c4232f0ba1a824264 | 1,732 | py | Python | jayk/util.py | alekratz/jayk | 87dc1aa4fd7be9ee1757ddee066dffb1bd7df09b | [
"ISC"
]
| 1 | 2018-04-06T23:06:21.000Z | 2018-04-06T23:06:21.000Z | jayk/util.py | alekratz/jayk | 87dc1aa4fd7be9ee1757ddee066dffb1bd7df09b | [
"ISC"
]
| 5 | 2017-10-12T12:13:11.000Z | 2018-05-15T22:32:04.000Z | jayk/util.py | alekratz/jayk | 87dc1aa4fd7be9ee1757ddee066dffb1bd7df09b | [
"ISC"
]
| 5 | 2017-10-10T21:59:18.000Z | 2019-06-28T13:28:10.000Z | """Common utilities used through this codebase."""
import logging
import logging.config
class LogMixin:
"""
A logging mixin class, which provides methods for writing log messages.
"""
def __init__(self, logger_name: str):
"""
Creates the logger with the specified name.
:param logger_name: the name for this logger. When in doubt, use MyType.__name__.
"""
self.__logger = logging.getLogger(logger_name)
def critical(self, message, *args, **kwargs):
"""
Passes a critical logging message on to the internal logger.
"""
self.__logger.critical(message, *args, **kwargs)
def error(self, message, *args, **kwargs):
"""
Passes an error logging message on to the internal logger.
"""
self.__logger.error(message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
"""
Passes an warning logging message on to the internal logger.
"""
self.__logger.warning(message, *args, **kwargs)
def info(self, message, *args, **kwargs):
"""
Passes an info logging message on to the internal logger.
"""
self.__logger.info(message, *args, **kwargs)
def debug(self, message, *args, **kwargs):
"""
Passes a debug logging message on to the internal logger.
"""
self.__logger.debug(message, *args, **kwargs)
def exception(self, message, *args, **kwargs):
"""
Passes an exception logging message on to the internal logger. This should only be called
when in the "except" clause of an exception handler.
"""
self.__logger.exception(message, *args, **kwargs)
| 30.928571 | 97 | 0.612587 | 1,639 | 0.946305 | 0 | 0 | 0 | 0 | 0 | 0 | 881 | 0.508661 |
48065a4ff80756ba525a0a9808129ee5012b319d | 224 | py | Python | experimentation/trap/statistics_calculator.py | GruppoPBDMNG-10/AIExam | 22fd4dad20bcff689deeae227f179267b92a60d8 | [
"MIT"
]
| null | null | null | experimentation/trap/statistics_calculator.py | GruppoPBDMNG-10/AIExam | 22fd4dad20bcff689deeae227f179267b92a60d8 | [
"MIT"
]
| 5 | 2018-09-24T13:13:19.000Z | 2018-09-24T18:34:22.000Z | experimentation/trap/statistics_calculator.py | GruppoPBDMNG-10/AIExam | 22fd4dad20bcff689deeae227f179267b92a60d8 | [
"MIT"
]
| null | null | null | import experimentation.statistics.statistics as statistics
intersection = statistics.find_matches_from_file('result/experimentation/hmm/anomalous.json', 'result/experimentation/rnn/anomalous.json')
print(len(intersection)) | 44.8 | 138 | 0.852679 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 86 | 0.383929 |
4807121b7fee14846de82fc2e6158a386836a6aa | 13,056 | py | Python | objects/CSCG/_3d/exact_solutions/status/incompressible_Navier_Stokes/Sin_Cos.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
]
| 1 | 2020-10-14T12:48:35.000Z | 2020-10-14T12:48:35.000Z | objects/CSCG/_3d/exact_solutions/status/incompressible_Navier_Stokes/Sin_Cos.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
]
| null | null | null | objects/CSCG/_3d/exact_solutions/status/incompressible_Navier_Stokes/Sin_Cos.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
@author: Yi Zhang.
Department of Aerodynamics
Faculty of Aerospace Engineering
TU Delft, Delft, Netherlands
"""
from numpy import sin, cos, pi
from objects.CSCG._3d.exact_solutions.status.incompressible_Navier_Stokes.base import incompressible_NavierStokes_Base
from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField
# noinspection PyAbstractClass
class SinCosRebholz_Conservation(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, Leo G. Rebholz, 2007]
"""
def __init__(self, es):
super(SinCosRebholz_Conservation, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
def fx(self, t, x, y, z): return 0 * x # can not name it by _fx_
def fy(self, t, x, y, z): return 0 * x # can not name it by _fy_
def fz(self, t, x, y, z): return 0 * x # can not name it by _fz_
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
class SinCosRebholz_Dissipation(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.3 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, Leo G. Rebholz, 2007]
"""
def __init__(self, es, nu=1):
super(SinCosRebholz_Dissipation, self).__init__(es, nu)
def u(self, t, x, y, z): return (2 - t) * cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return - 2 * pi * (2 - t) * sin(2 * pi * z)
def u_t(self, t, x, y, z): return - cos(2 * pi * z)
def u_xx(self, t, x, y, z): return 0 * x
def u_yy(self, t, x, y, z): return 0 * y
def u_zz(self, t, x, y, z): return -4 * pi ** 2 * (2 - t) * cos(2 * pi * z)
def v(self, t, x, y, z): return (1 + t) * sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * (1 + t) * cos(2 * pi * z)
def v_t(self, t, x, y, z): return sin(2 * pi * z)
def v_xx(self, t, x, y, z): return 0 * x
def v_yy(self, t, x, y, z): return 0 * x
def v_zz(self, t, x, y, z): return - 4 * pi ** 2 * (1 + t) * sin(2 * pi * z)
def w(self, t, x, y, z): return (1 - t) * sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * (1 - t) * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
def w_t(self, t, x, y, z): return - sin(2 * pi * x)
def w_xx(self, t, x, y, z): return - 4 * pi ** 2 * (1 - t) * sin(2 * pi * x)
def w_yy(self, t, x, y, z): return 0 * x
def w_zz(self, t, x, y, z): return 0 * x
def p(self, t, x, y, z): return sin(2 * pi * (x + y + z + t))
def p_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_y(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
class SinCos_Modified_Dissipation(incompressible_NavierStokes_Base):
"""A modified case that the solution along t is not linear."""
def __init__(self, es, nu=1):
super(SinCos_Modified_Dissipation, self).__init__(es, nu)
def u(self, t, x, y, z): return (1 - sin(2*pi*t)) * cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return - 2 * pi * (1 - sin(2*pi*t)) * sin(2 * pi * z)
def u_t(self, t, x, y, z): return - 2*pi*cos(2*pi*t) * cos(2 * pi * z)
def u_xx(self, t, x, y, z): return 0 * x
def u_yy(self, t, x, y, z): return 0 * y
def u_zz(self, t, x, y, z): return -4 * pi ** 2 * (1 - sin(2*pi*t)) * cos(2 * pi * z)
def v(self, t, x, y, z): return (1 + cos(2*pi*t)) * sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * (1 + cos(2*pi*t)) * cos(2 * pi * z)
def v_t(self, t, x, y, z): return -2*pi*sin(2*pi*t) * sin(2 * pi * z)
def v_xx(self, t, x, y, z): return 0 * x
def v_yy(self, t, x, y, z): return 0 * x
def v_zz(self, t, x, y, z): return - 4 * pi ** 2 * (1 + cos(2*pi*t)) * sin(2 * pi * z)
def w(self, t, x, y, z): return (1 - sin(2*pi*t)) * sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * (1 - sin(2*pi*t)) * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
def w_t(self, t, x, y, z): return - 2*pi*cos(2*pi*t) * sin(2 * pi * x)
def w_xx(self, t, x, y, z): return - 4 * pi ** 2 * (1 - sin(2*pi*t)) * sin(2 * pi * x)
def w_yy(self, t, x, y, z): return 0 * x
def w_zz(self, t, x, y, z): return 0 * x
def p(self, t, x, y, z): return sin(2 * pi * (x + y + z + t))
def p_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_y(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, Leo G. Rebholz, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# varphi(t,x,y,z) = t * sin(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fx(self, t, x, y, z): return 2 * pi * t * cos(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fy(self, t, x, y, z): return 2 * pi * t * sin(2 * pi * x) * cos(2 * pi * y) * sin(2 * pi * z)
def fz(self, t, x, y, z): return 2 * pi * t * sin(2 * pi * x) * sin(2 * pi * y) * cos(2 * pi * z)
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force1(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, Leo G. Rebholz, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force1, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# varphi(t,x,y,z) = sin(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fx(self, t, x, y, z): return 2 * pi * cos(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fy(self, t, x, y, z): return 2 * pi * sin(2 * pi * x) * cos(2 * pi * y) * sin(2 * pi * z)
def fz(self, t, x, y, z): return 2 * pi * sin(2 * pi * x) * sin(2 * pi * y) * cos(2 * pi * z)
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force_POLYNOMIALS(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, Leo G. Rebholz, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force_POLYNOMIALS, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# phi(t,x,y,z) = t * (x**3/3 - x**2/2 + y**3/3 - y**2/2 + z**3/3 - z**2/2)
def fx(self, t, x, y, z): return t * x * (x-1)
def fy(self, t, x, y, z): return t * y * (y-1)
def fz(self, t, x, y, z): return t * z * (z-1)
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force_CONSTANT(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, Leo G. Rebholz, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force_CONSTANT, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# phi(t,x,y,z) = x
def fx(self, t, x, y, z): return 1 + 0 * x * y * z
def fy(self, t, x, y, z): return 0 + 0 * x * y * z
def fz(self, t, x, y, z): return 0 + 0 * x * y * z
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
| 30.72 | 118 | 0.555913 | 12,487 | 0.956419 | 0 | 0 | 1,715 | 0.131357 | 0 | 0 | 2,422 | 0.185509 |
4807de81e5cd93efaec1325cded4f4d3e15bd5c9 | 93 | py | Python | aaem_summaries/components/transmission/__init__.py | gina-alaska/alaska_affordable_energy_model | 96fed0137152985ce280ea37e0affec131e3087f | [
"MIT-feh"
]
| 1 | 2022-01-23T07:18:36.000Z | 2022-01-23T07:18:36.000Z | aaem_summaries/components/transmission/__init__.py | gina-alaska/alaska_affordable_energy_model | 96fed0137152985ce280ea37e0affec131e3087f | [
"MIT-feh"
]
| 5 | 2017-07-14T21:56:46.000Z | 2017-07-14T21:59:15.000Z | aaem_summaries/components/transmission/__init__.py | gina-alaska/alaska_affordable_energy_model | 96fed0137152985ce280ea37e0affec131e3087f | [
"MIT-feh"
]
| 2 | 2020-04-28T18:12:55.000Z | 2021-01-13T01:56:57.000Z | """
__init__.py
summary for
Transmission Line in a community
"""
from summary import *
| 11.625 | 32 | 0.698925 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 70 | 0.752688 |
4808f358b59d11b04181bc6422be2976a2eb690f | 2,344 | py | Python | setup.py | doconce/preprocess | c09c61ca7571699c6ed8d93eaf4fac13ee614409 | [
"MIT"
]
| 5 | 2020-05-23T14:56:13.000Z | 2021-07-30T15:26:27.000Z | setup.py | doconce/preprocess | c09c61ca7571699c6ed8d93eaf4fac13ee614409 | [
"MIT"
]
| 2 | 2018-02-19T10:12:50.000Z | 2020-11-15T15:27:17.000Z | setup.py | doconce/preprocess | c09c61ca7571699c6ed8d93eaf4fac13ee614409 | [
"MIT"
]
| 2 | 2018-06-10T11:51:52.000Z | 2020-09-02T20:41:46.000Z | #!/usr/bin/env python
# Copyright (c) 2002-2005 ActiveState Software Ltd.
"""preprocess: a multi-language preprocessor
There are millions of templating systems out there (most of them
developed for the web). This isn't one of those, though it does share
some basics: a markup syntax for templates that are processed to give
resultant text output. The main difference with `preprocess.py` is
that its syntax is hidden in comments (whatever the syntax for comments
maybe in the target filetype) so that the file can still have valid
syntax. A comparison with the C preprocessor is more apt.
`preprocess.py` is targetted at build systems that deal with many
types of files. Languages for which it works include: C++, Python,
Perl, Tcl, XML, JavaScript, CSS, IDL, TeX, Fortran, PHP, Java, Shell
scripts (Bash, CSH, etc.) and C#. Preprocess is usable both as a
command line app and as a Python module.
"""
import os
import sys
import distutils
import re
from setuptools import setup
version = '.'.join(re.findall('__version_info__ = \((\d+), (\d+), (\d+)\)',
open('lib/preprocess.py', 'r').read())[0])
classifiers = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python
Operating System :: OS Independent
Topic :: Software Development :: Libraries :: Python Modules
Topic :: Text Processing :: Filters
"""
if sys.version_info < (2, 3):
# Distutils before Python 2.3 doesn't accept classifiers.
_setup = setup
def setup(**kwargs):
if kwargs.has_key("classifiers"):
del kwargs["classifiers"]
_setup(**kwargs)
doclines = __doc__.split("\n")
setup(
name="preprocess",
version=version,
author="Trent Mick",
author_email="[email protected]",
maintainer="Kristian Gregorius Hustad",
maintainer_email="[email protected]",
url="http://github.com/doconce/preprocess/",
license="http://www.opensource.org/licenses/mit-license.php",
platforms=["any"],
py_modules=["preprocess"],
package_dir={"": "lib"},
entry_points={'console_scripts': ['preprocess = preprocess:main']},
install_requires=['future'],
description=doclines[0],
classifiers=filter(None, classifiers.split("\n")),
long_description="\n".join(doclines[2:]),
)
| 34.470588 | 75 | 0.700939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,609 | 0.686433 |
48098c0ad42aef816d1cc33d9f2b4cf7db1cf4ab | 1,504 | py | Python | bubblebbs/config.py | kawa-kokosowa/bubblebbs | 2b70fd66c27f717ed009cbe5efc0d0d5433f3423 | [
"MIT"
]
| 7 | 2019-01-03T01:21:32.000Z | 2020-09-03T01:52:09.000Z | bubblebbs/config.py | lily-mayfield/bubblebbs | 2b70fd66c27f717ed009cbe5efc0d0d5433f3423 | [
"MIT"
]
| 109 | 2018-05-04T10:32:23.000Z | 2018-06-24T03:35:01.000Z | bubblebbs/config.py | kawa-kokosowa/bubblebbs | 2b70fd66c27f717ed009cbe5efc0d0d5433f3423 | [
"MIT"
]
| 3 | 2018-05-14T15:10:03.000Z | 2018-05-19T01:13:03.000Z | import os
BEHIND_REVERSE_PROXY = bool(os.environ.get('BBBS_BEHIND_REVERSE_PROXY', False))
POSTS_PER_PAGE = 25
TEMPLATES_AUTO_RELOAD = True
RECAPTCHA_ENABLED = os.environ.get('BBBS_RECAPTCHA_ENABLED', False)
RECAPTCHA_SITE_KEY = os.environ.get('BBBS_RECAPTCHA_SITE_KEY', 'CHANGEGME')
RECAPTCHA_SECRET_KEY = os.environ.get('BBS_RECAPTCHA_SECRET_KEY', 'CHANGEME')
SECRET_KEY = os.environ.get('BBBS_SECRET_KEY', 'PLEASE CHANGE ME')
SECRET_SALT = os.environ.get('BBBS_SECRET_SALT', 'CHANGEME')
SQLALCHEMY_DATABASE_URI = os.environ.get('BBBS_DB_STRING', 'sqlite:///test.db')
SITE_TAGLINE = os.environ.get('BBBS_SITE_TAGLINE', 'some tagline')
SITE_TITLE = os.environ.get('BBBS_SITE_TAGLINE', 'super title')
SITE_FOOTER = os.environ.get(
'BBBS_SITE_FOOTER',
'<a href="https://github.com/kawa-kokosowa/bubblebbs">Powered by BubbleBBS</a>',
)
RATELIMIT_STORAGE_URL = os.environ.get('BBBS_RATELIMIT_STORAGE_URL', 'redis://localhost:6379/1')
RATELIMIT_DEFAULT = "400 per day, 100 per hour"
RATELIMIT_ENABLED = True
RATELIMIT_LIST_THREADS = "20 per minute, 1 per second"
RATELIMIT_VIEW_SPECIFIC_POST = "20 per minute, 1 per second"
RATELIMIT_NEW_REPLY = "20 per hour, 1 per second, 2 per minute"
RATELIMIT_VIEW_TRIP_META = "50 per hour, 15 per minute"
RATELIMIT_EDIT_TRIP_META = "60 per hour, 1 per second, 4 per minute"
RATELIMIT_MANAGE_COOKIE = '60 per hour, 1 per second, 7 per minute'
RATELIMIT_CREATE_THREAD = '700 per hour, 100 per minute'
RATELIMIT_NEW_THREAD_FORM = '60 per hour, 1 per second'
| 42.971429 | 96 | 0.776596 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 730 | 0.485372 |
480a52a59f5e6ca79a9056130cb2d9abb336a9ed | 11,497 | py | Python | sim_user/mailLib.py | silicom-hub/IS_simulator | 4d134a8051c3604a94c2552503ff24015a3e86ee | [
"MIT"
]
| 4 | 2021-11-24T10:58:51.000Z | 2022-03-11T15:13:22.000Z | sim_user/mailLib.py | silicom-hub/IS_simulator | 4d134a8051c3604a94c2552503ff24015a3e86ee | [
"MIT"
]
| 1 | 2021-11-24T09:16:08.000Z | 2021-11-30T16:19:41.000Z | sim_user/mailLib.py | silicom-hub/IS_simulator | 4d134a8051c3604a94c2552503ff24015a3e86ee | [
"MIT"
]
| 1 | 2021-11-24T11:10:38.000Z | 2021-11-24T11:10:38.000Z | import os
import wget
import time
import glob
import getpass
import tarfile
import subprocess
import email.mime.multipart
import email.mime.text
import email.mime.image
import email.mime.audio
from datetime import datetime
from pprint import pprint
from colorama import Style, Fore
from smtplib import SMTP, SMTP_SSL
from imaplib import IMAP4_SSL, IMAP4
def smtp_connect(smtp_server, verbose=True):
""" Conection to smtp server.
smtp_server_ip (str): This value is the smtp server's ip.
verbose (boolean): Print information about function progress.
Returns:
None
"""
try:
smtp = SMTP_SSL(host=smtp_server)
smtp.ehlo()
if verbose:
print(Fore.GREEN+ " ==> [smtp_connect] with SSL" +Style.RESET_ALL)
return smtp
except:
try:
smtp = SMTP(host=smtp_server)
smtp.ehlo()
if verbose:
print(Fore.GREEN+ " ==> [smtp_connect] without SSL" +Style.RESET_ALL)
return smtp
except:
print(Fore.RED+ " ==> [smtp_connect] failed!" +Style.RESET_ALL)
return 1
def imap_connect(imap_server, username, password, verbose=True):
""" Connection to imp server.
imap_server_ip (str): This value is the imap server's ip.
verbose (boolean): Print information about function progress.
Returns:
None
"""
try:
imap = IMAP4_SSL(imap_server)
imap.login(username, password)
if verbose:
print(Fore.GREEN+ " ==> [imap_connect] with SSL" +Style.RESET_ALL)
return imap
except:
try:
imap = IMAP4(imap_server)
imap.login(username, password)
if verbose:
print(Fore.GREEN+ " ==> [imap_connect] without SSL" +Style.RESET_ALL)
return imap
except:
print(Fore.RED+ " ==> [imap_connect] failed!" +Style.RESET_ALL)
def send_mail(smtp_server, FROM="", TO="", subject="", msg="", attachements=[], verbose=True):
""" Send mail.
smtp_server_ip (str): This value is the smtp server's ip.
FROM (str): This value is the sender email address.
TO (list): This value is a list of multiple recipient
SUBJECT (str, Optional): This value is the email's subject content.
msg (str, Optional): This value is the email's message content.
attachements (list Optional):
verbose (boolean): Print information about function progress.
Returns:
None
"""
smtp = smtp_connect(smtp_server, verbose=False)
mail = email.mime.multipart.MIMEMultipart()
mail["Subject"] = "[ "+subject+" ]"
mail["From"] = FROM
mail["To"] = TO
msg = email.mime.text.MIMEText(msg, _subtype="plain")
msg.add_header("Content-Disposition", "email message")
mail.attach(msg)
for attachement in attachements:
if attachement[0] == "image":
img = email.mime.image.MIMEImage(open(attachement[1], "rb").read())
img.add_header("Content-Disposition", "attachement")
img.add_header("Attachement-type", "image")
img.add_header("Attachement-filename", attachement[1])
mail.attach(img)
if attachement[0] == "file":
text = email.mime.text.MIMEText(open(attachement[1], "r").read())
text.add_header("Content-Disposition", "attachement")
text.add_header("Attachement-type", "filetext")
text.add_header("Attachement-filename", attachement[1])
mail.attach(text)
try:
smtp.sendmail(mail["From"], mail["To"], mail.as_string())
if verbose:
print(Fore.GREEN+ " ==> [send_mail] "+mail["From"]+" --> "+mail["To"]+" {"+subject+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
smtp_logout(smtp, verbose=False)
except Exception as e:
print(Fore.RED+ " ==> [send_mail] failed! "+mail["From"]+" --> "+mail["To"]+" -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
print(Fore.RED+str(e)+Style.RESET_ALL)
smtp_logout(smtp, verbose=False)
def read_mailbox(imap_server, username, password, verbose=True): # attribut [ _payload ]
""" Read email inbox
imap_server_ip (str): This value is the imap server's ip.
login (str): This value is the username login.
password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
imap = imap_connect(imap_server, username, password, verbose=False)
all_mails = []
imap.select("INBOX")
status, mails = imap.search(None, "ALL")
for mail in mails[0].split():
status, data = imap.fetch(mail, "(RFC822)")
mail_content = email.message_from_string(data[0][1].decode("utf-8"))
all_mails.append(mail_content)
for part in mail_content.walk():
if not part.is_multipart():
pass
if verbose:
print(Fore.GREEN+ " ==> [read_mailbox] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
imap_logout(imap, verbose=False)
return all_mails
def read_mailbox_download_execute(imap_server, imap_login, imap_password):
""" Read email inbox and download link inside.
imap_server_ip (str): This value is the imap server's ip.
imap_login (str): This value is the username login.
imap_password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
try:
path = None
mails = read_mailbox(imap_server, imap_login, imap_password, verbose=False)
if len(mails) <= 0:
print(Fore.YELLOW+ " ==> [read_mailbox_download_execute] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
return 0
for mail in mails:
for element in str(mail).replace("\n", " ").split(" "):
if "http" in element:
path = wget.download(element)
if path == None:
print(Fore.YELLOW+ " ==> [read_mailbox_download_execute] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
return 0
tarf_file = tarfile.open(path)
tarf_file.extractall(".")
tarf_file.close()
python_files = glob.glob("*/*maj*.py")
for python_script in python_files:
subprocess.getoutput("python3 "+python_script)
print(Fore.GREEN+ " ==> [read_mailbox_download_execute] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
return True
except Exception as e:
print(Fore.RED+ " ==> [read_mailbox_download_execute] failed during execution! -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
print(e)
return False
def download_attachements(imap_server, username, password, verbose=True):
""" Read email inbox and download attachements.
imap_server_ip (str): This value is the imap server's ip.
imap_login (str): This value is the username login.
imap_password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
imap = imap_connect(imap_server, username, password, verbose=False)
#INIT
if not os.path.isdir("/home/"+getpass.getuser()+"/Downloads"):
os.makedirs("/home/"+getpass.getuser()+"/Downloads")
mails = []
imap.select("INBOX")
status, mails = imap.search(None, "ALL")
for mail in mails[0].split():
status, data = imap.fetch(mail, "(RFC822)")
mail_content = email.message_from_string(data[0][1].decode("utf-8"))
for part in mail_content.walk():
if not part.is_multipart():
if part["Content-Disposition"] == "attachement" and part["Attachement-type"] == "filetext":
username = getpass.getuser()
file = open(part["Attachement-filename"],"w")
file.write(part._payload)
file.close()
imap_logout(imap, verbose=False)
print(Fore.GREEN+ " ==> [download_attachements] --- " + time.strftime("%H:%M:%S", time.localtime())+Style.RESET_ALL)
# In progress
def delete_old_emails(imap, time_laps=60):
delete_messages = []
imap.select("INBOX")
status, mails = imap.search(None, "ALL")
for mail in mails[0].split():
status, data = imap.fetch(mail, "(RFC822)")
mail_content = email.message_from_string(data[0][1].decode("utf-8"))
if (time.time() - time.mktime(time.strptime(mail_content["Date"], "%a, %d %b %Y %H:%M:%S %z")) >= time_laps ):
delete_messages.append(mail)
delete_emails(imap, delete_messages)
def delete_emails(imap, mails):
""" Delete mails specified in attributs
imap (imap_object): This value is the imap server's object.
mails (list): This value is an email list to delete.
Returns:
list of str: all emails content
"""
for mail in mails:
imap.store(mail,"+FLAGS","\\Deleted")
imap.expunge()
def delete_all_emails(imap_server, username, password, verbose=True):
""" Delete all emails in INBOX.
imap_server_ip (str): This value is the imap server's ip.
imap_login (str): This value is the username login.
imap_password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
imap = imap_connect(imap_server, username, password, verbose=False)
delete_messages = []
imap.select("INBOX")
status, mails = imap.search(None, "ALL")
for mail in mails[0].split():
delete_messages.append(mail)
delete_emails(imap, delete_messages)
status, mails = imap.search(None, "ALL")
if len(mails) == 1:
print(Fore.GREEN+ " ==> [delete_all_emails] was successfull --- " + time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
imap_logout(imap, verbose=False)
return 0
print(Fore.RED+ " ==> [delete_all_emails] failed! --- " + time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
imap_logout(imap, verbose=False)
return 1
def imap_logout(imap, verbose=True):
""" Logout out to the imap service
imap (imap_object): This value is the imap server's object.
Returns:
None
"""
try:
imap.close()
imap.logout()
if verbose:
print(Fore.GREEN+ " ==> [imap_logout] was successfull" +Style.RESET_ALL)
except:
print(Fore.RED+ " ==> [imap_logout] failed" +Style.RESET_ALL)
def smtp_logout(smtp, verbose=True):
""" Logout out to the smtp service
smtp (smtp_object): This value is the smtp server's object.
Returns:
None
"""
try:
smtp.quit()
if verbose:
print(Fore.GREEN+ " ==> [smtp_logout] was successfull" +Style.RESET_ALL)
except:
print(Fore.RED+ " ==> [smtp_logout] failed" +Style.RESET_ALL)
| 41.060714 | 168 | 0.609898 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,483 | 0.389928 |
480ae2b25c3bc1935302502823a56a560d147572 | 736 | py | Python | setup.py | vishnumenon/pyims | d6b1403332e83477661baa6443fba82daaf10542 | [
"MIT"
]
| 1 | 2018-05-21T01:38:31.000Z | 2018-05-21T01:38:31.000Z | setup.py | vishnumenon/pyims | d6b1403332e83477661baa6443fba82daaf10542 | [
"MIT"
]
| 1 | 2019-03-01T09:40:15.000Z | 2019-03-01T09:40:15.000Z | setup.py | vishnumenon/pyims | d6b1403332e83477661baa6443fba82daaf10542 | [
"MIT"
]
| null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(name="pyims",
version='0.1.2',
description='A python wrapper for the IMS Word Sense Disambiguation tool (Zhong and Ng, 2010)',
url='http://github.com/vishnumenon/pyims',
author="Vishnu Menon",
author_email="[email protected]",
long_description=long_description,
long_description_content_type="text/markdown",
license='MIT',
packages=setuptools.find_packages(),
install_requires=[
'nltk',
],
classifiers=(
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
),
zip_safe=False)
| 30.666667 | 99 | 0.658967 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 320 | 0.434783 |
480af4411812ec81b6aa03218911dad18afb95a5 | 728 | py | Python | hass_apps/schedy/actor/__init__.py | weese/hass-apps | 1c0f7828213ab26924b571ad6d33514d52675ca0 | [
"Apache-2.0"
]
| null | null | null | hass_apps/schedy/actor/__init__.py | weese/hass-apps | 1c0f7828213ab26924b571ad6d33514d52675ca0 | [
"Apache-2.0"
]
| null | null | null | hass_apps/schedy/actor/__init__.py | weese/hass-apps | 1c0f7828213ab26924b571ad6d33514d52675ca0 | [
"Apache-2.0"
]
| null | null | null | """
This package contains the various actor implementations.
"""
import typing as T
from .base import ActorBase
from .custom import CustomActor
from .generic import GenericActor
from .switch import SwitchActor
from .thermostat import ThermostatActor
__all__ = [
"ActorBase",
"CustomActor",
"GenericActor",
"SwitchActor",
"ThermostatActor",
]
def get_actor_types() -> T.Iterable[T.Type[ActorBase]]:
"""Yields available actor classes."""
globs = globals()
for actor_class_name in __all__:
actor_type = globs.get(actor_class_name)
if actor_type is not ActorBase and isinstance(actor_type, type) and \
issubclass(actor_type, ActorBase):
yield actor_type
| 22.75 | 77 | 0.703297 | 0 | 0 | 359 | 0.493132 | 0 | 0 | 0 | 0 | 169 | 0.232143 |
480bacc7e218e7aba2dc5770fad3518638833a8b | 11,372 | py | Python | triggmine_sdk/tests/test_client.py | TriggMineAdmin/TriggMine-Python-SDK | 9c5bd7c9ad2118bf5dc77796ccedc4eef0937df3 | [
"MIT"
]
| null | null | null | triggmine_sdk/tests/test_client.py | TriggMineAdmin/TriggMine-Python-SDK | 9c5bd7c9ad2118bf5dc77796ccedc4eef0937df3 | [
"MIT"
]
| null | null | null | triggmine_sdk/tests/test_client.py | TriggMineAdmin/TriggMine-Python-SDK | 9c5bd7c9ad2118bf5dc77796ccedc4eef0937df3 | [
"MIT"
]
| null | null | null | # UnitTests of all triggmine events
import unittest
import datetime
from client import Client
class ClientTest(unittest.TestCase):
def setUp(self):
self.client = Client('YOUR API_URL', 'YOUR API_KEY')
# Registration event
def test_registration_success(self):
response = self.client.registration.create(device_id='4c3d48512d48b2603092b5a45ba74c8c',
device_id_1='465060737',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created=str(datetime.datetime.now()))
self.assertEqual(201, response.status_code)
# Diagnostic event
def test_diagnostic_success(self):
response = self.client.diagnostic.create(date_created=str(datetime.datetime.now()),
diagnostic_type="Install_Test_Plugin", description="TestCms", status=1)
self.assertEqual(201, response.status_code)
# Cart event
def test_cart_success(self):
response = self.client.cart.create(order_id="22",price_total="210.86",qty_total="1",
products=[dict(product_id= "421",
product_name= "Elizabeth Knit Top",
product_desc= "Loose fitting from the shoulders, open weave knit top. Semi sheer. Slips on. Faux button closure detail on the back. Linen/Cotton. Machine wash.",
product_sku= "wbk013",
product_image= "https://1924magento.triggmine.com.ua/media/catalog/product/cache/1/image/265x/9df78eab33525d08d6e5fb8d27136e95/w/b/wbk012t.jpg",
product_url= "https://1924magento.triggmine.com.ua/elizabeth-knit-top-596.html",
product_qty= 1,
product_price= 210,
product_total_val= 210,
product_categories= ['New Arrivals','Tops & Blouses'])],
customer=dict(device_id='4c3d48512d48b2603092b5a45ba74c8c',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created="2016-09-08 10:20:37"))
# Login event
def test_login_success(self):
response = self.client.login.create(device_id='4c3d48512d48b2603092b5a45ba74c8c',
device_id_1='465060737',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created=str(datetime.datetime.now()))
self.assertEqual(200, response.status_code)
# Logout event
def test_logout_success(self):
response = self.client.logout.create(device_id='4c3d48512d48b2603092b5a45ba74c8c',
device_id_1='465060737',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created=str(datetime.datetime.now()))
self.assertEqual(200, response.status_code)
# History event
def test_history_success(self):
response = self.client.history.create(orders=
[dict(order_id="22",price_total="210.86",qty_total="1",
products=[dict(product_id= "421",
product_name= "Elizabeth Knit Top",
product_desc= "Loose fitting from the shoulders, open weave knit top. Semi sheer. Slips on. Faux button closure detail on the back. Linen/Cotton. Machine wash.",
product_sku= "wbk013",
product_image= "https://1924magento.triggmine.com.ua/media/catalog/product/cache/1/image/265x/9df78eab33525d08d6e5fb8d27136e95/w/b/wbk012t.jpg",
product_url= "https://1924magento.triggmine.com.ua/elizabeth-knit-top-596.html",
product_qty= 1,
product_price= 210,
product_total_val= 210,
product_categories= ['New Arrivals','Tops & Blouses'])],
customer=dict(device_id='4c3d48512d48b2603092b5a45ba74c8c',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created="2016-09-08 10:20:37")),
dict(order_id="22",price_total="210.86",qty_total="1",
products=[dict(product_id= "421",
product_name= "Elizabeth Knit Top",
product_desc= "Loose fitting from the shoulders, open weave knit top. Semi sheer. Slips on. Faux button closure detail on the back. Linen/Cotton. Machine wash.",
product_sku= "wbk013",
product_image= "https://1924magento.triggmine.com.ua/media/catalog/product/cache/1/image/265x/9df78eab33525d08d6e5fb8d27136e95/w/b/wbk012t.jpg",
product_url= "https://1924magento.triggmine.com.ua/elizabeth-knit-top-596.html",
product_qty= 1,
product_price= 210,
product_total_val= 210,
product_categories= ['New Arrivals','Tops & Blouses'])],
customer=dict(device_id='4c3d48512d48b2603092b5a45ba74c8c',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created="2016-09-08 10:20:37"))])
self.assertEqual(200, response.status_code)
# Navigation event
def test_navigation_success(self):
response = self.client.navigation.create(user_agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36",
products=[dict(product_id= "421",
product_name= "Elizabeth Knit Top",
product_desc= "Loose fitting from the shoulders, open weave knit top. Semi sheer. Slips on. Faux button closure detail on the back. Linen/Cotton. Machine wash.",
product_sku= "wbk013",
product_image= "https://1924magento.triggmine.com.ua/media/catalog/product/cache/1/image/265x/9df78eab33525d08d6e5fb8d27136e95/w/b/wbk012t.jpg",
product_url= "https://1924magento.triggmine.com.ua/elizabeth-knit-top-596.html",
product_qty= 1,
product_price= 210,
product_total_val= 210,
product_categories= ['New Arrivals','Tops & Blouses'])],
customer=dict(device_id='4c3d48512d48b2603092b5a45ba74c8c',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created="2016-09-08 10:20:37"))
self.assertEqual(201, response.status_code)
# Order event
def test_order_success(self):
response = self.client.order.create(order_id="22",price_total="210.86",qty_total="1",status="Paid",
products=[dict(product_id= "421",
product_name= "Elizabeth Knit Top",
product_desc= "Loose fitting from the shoulders, open weave knit top. Semi sheer. Slips on. Faux button closure detail on the back. Linen/Cotton. Machine wash.",
product_sku= "wbk013",
product_image= "https://1924magento.triggmine.com.ua/media/catalog/product/cache/1/image/265x/9df78eab33525d08d6e5fb8d27136e95/w/b/wbk012t.jpg",
product_url= "https://1924magento.triggmine.com.ua/elizabeth-knit-top-596.html",
product_qty= 1,
product_price= 210,
product_total_val= 210,
product_categories= ['New Arrivals','Tops & Blouses'])],
customer=dict(device_id='4c3d48512d48b2603092b5a45ba74c8c',
customer_id='1',
customer_first_name='Jhon',
customer_last_name='Doe',
customer_email='[email protected]',
customer_date_created="2016-09-08 10:20:37"))
self.assertEqual(201, response.status_code)
if __name__ == '__main__':
unittest.main() | 73.367742 | 210 | 0.453922 | 11,226 | 0.987161 | 0 | 0 | 0 | 0 | 0 | 0 | 3,117 | 0.274094 |
480bf69db63d53cb496e110eff657c50a64491da | 1,605 | py | Python | lib/python2.7/site-packages/mpl_toolkits/tests/test_axes_grid.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
]
| 1 | 2017-12-05T15:35:47.000Z | 2017-12-05T15:35:47.000Z | lib/python2.7/site-packages/mpl_toolkits/tests/test_axes_grid.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
]
| 10 | 2017-07-13T00:24:03.000Z | 2017-07-17T07:39:03.000Z | lib/python2.7/site-packages/mpl_toolkits/tests/test_axes_grid.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
]
| 7 | 2017-08-01T04:02:07.000Z | 2018-10-06T21:07:20.000Z |
from matplotlib.testing.decorators import image_comparison
from mpl_toolkits.axes_grid1 import ImageGrid
import numpy as np
import matplotlib.pyplot as plt
@image_comparison(baseline_images=['imagegrid_cbar_mode'],
extensions=['png'],
remove_text=True)
def test_imagegrid_cbar_mode_edge():
X, Y = np.meshgrid(np.linspace(0, 6, 30), np.linspace(0, 6, 30))
arr = np.sin(X) * np.cos(Y) + 1j*(np.sin(3*Y) * np.cos(Y/2.))
fig = plt.figure(figsize=(18, 9))
positions = (241, 242, 243, 244, 245, 246, 247, 248)
directions = ['row']*4 + ['column']*4
cbar_locations = ['left', 'right', 'top', 'bottom']*2
for position, direction, location in zip(positions,
directions,
cbar_locations):
grid = ImageGrid(fig, position,
nrows_ncols=(2, 2),
direction=direction,
cbar_location=location,
cbar_size='20%',
cbar_mode='edge')
ax1, ax2, ax3, ax4, = grid
im1 = ax1.imshow(arr.real, cmap='nipy_spectral')
im2 = ax2.imshow(arr.imag, cmap='hot')
im3 = ax3.imshow(np.abs(arr), cmap='jet')
im4 = ax4.imshow(np.arctan2(arr.imag, arr.real), cmap='hsv')
# Some of these colorbars will be overridden by later ones,
# depending on the direction and cbar_location
ax1.cax.colorbar(im1)
ax2.cax.colorbar(im2)
ax3.cax.colorbar(im3)
ax4.cax.colorbar(im4)
| 37.325581 | 68 | 0.556386 | 0 | 0 | 0 | 0 | 1,445 | 0.900312 | 0 | 0 | 211 | 0.131464 |
480cf2d366aba92e09518883be9b0629f1fbd5c8 | 4,985 | py | Python | src/resources/lib/listitem.py | ffoxin/kodi.kino.pub | f085beb99fcbab3da4efc698d56775d9553b9f28 | [
"BSD-3-Clause"
]
| 59 | 2018-10-07T19:54:27.000Z | 2022-03-27T08:55:57.000Z | src/resources/lib/listitem.py | ffoxin/kodi.kino.pub | f085beb99fcbab3da4efc698d56775d9553b9f28 | [
"BSD-3-Clause"
]
| 147 | 2018-10-07T19:02:13.000Z | 2022-03-05T17:15:16.000Z | src/resources/lib/listitem.py | ffoxin/kodi.kino.pub | f085beb99fcbab3da4efc698d56775d9553b9f28 | [
"BSD-3-Clause"
]
| 22 | 2018-11-14T16:40:19.000Z | 2022-02-11T22:30:28.000Z | # -*- coding: utf-8 -*-
from xbmcgui import ListItem
class ExtendedListItem(ListItem):
def __new__(cls, name, label2="", path="", **kwargs):
return super(ExtendedListItem, cls).__new__(cls, name, label2, path)
def __init__(
self,
name,
label2="",
iconImage="",
thumbnailImage="",
path="",
poster=None,
fanart=None,
video_info=None,
properties=None,
addContextMenuItems=False,
subtitles=None,
plugin=None,
):
super(ExtendedListItem, self).__init__(name, label2, path)
self.plugin = plugin
if properties:
self.setProperties(**properties)
if video_info:
self.setInfo("video", video_info)
self.setResumeTime(video_info.get("time"))
if poster:
self.setArt({"poster": poster})
if fanart:
self.setArt({"fanart": fanart})
if thumbnailImage:
self.setArt({"thumb": thumbnailImage})
if iconImage:
self.setArt({"icon": iconImage})
if subtitles:
self.setSubtitles(subtitles)
if addContextMenuItems:
self.addPredefinedContextMenuItems()
def _addWatchlistContextMenuItem(self, menu_items):
in_watchlist = self.getProperty("in_watchlist")
if in_watchlist == "":
return
label = "Не буду смотреть" if int(in_watchlist) else "Буду смотреть"
url = self.plugin.routing.build_url(
"toggle_watchlist", self.getProperty("id"), added=int(not int(in_watchlist))
)
menu_items.append((label, f"Container.Update({url})"))
def _addWatchedContextMenuItem(self, menu_items):
item_id = self.getProperty("id")
season_number = self.getVideoInfoTag().getSeason()
video_number = self.getVideoInfoTag().getEpisode()
video_number = video_number if video_number != -1 else 1
watched = int(self.getVideoInfoTag().getPlayCount()) > 0
label = "Отметить как непросмотренное" if watched else "Отметить как просмотренное"
if self.getVideoInfoTag().getMediaType() == "tvshow":
return
elif self.getVideoInfoTag().getMediaType() == "season":
kwargs = {"season": season_number}
elif self.getProperty("subtype") == "multi":
kwargs = {}
elif season_number != -1:
kwargs = {"season": season_number, "video": video_number}
else:
kwargs = {"video": video_number}
url = self.plugin.routing.build_url("toggle_watched", item_id, **kwargs)
menu_items.append((label, f"Container.Update({url})"))
def _addBookmarksContextMenuItem(self, menu_items):
if self.getVideoInfoTag().getMediaType() == "season":
return
item_id = self.getProperty("id")
label = "Изменить закладки"
url = self.plugin.routing.build_url("edit_bookmarks", item_id)
menu_items.append((label, f"Container.Update({url})"))
def _addCommentsContextMenuItem(self, menu_items):
item_id = self.getProperty("id")
label = "Комментарии KinoPub"
url = self.plugin.routing.build_url("comments", item_id)
menu_items.append((label, f"Container.Update({url})"))
def _addSimilarContextMenuItem(self, menu_items):
item_id = self.getProperty("id")
title = self.getLabel()
label = "Похожие фильмы"
url = self.plugin.routing.build_url("similar", item_id, title=title)
menu_items.append((label, f"Container.Update({url})"))
def _addSeparatorContextMenuItem(self, menu_items):
# 21 is the maximum number of characters when the horizontal scrolling doesn't appear.
menu_items.append(("─" * 21, ""))
def addPredefinedContextMenuItems(self, items=None):
items = items or ["watched", "watchlist", "bookmarks", "comments", "similar", "separator"]
menu_items = []
for item in items:
getattr(self, f"_add{item.capitalize()}ContextMenuItem")(menu_items)
self.addContextMenuItems(menu_items)
def setProperties(self, **properties):
for prop, value in properties.items():
self.setProperty(prop, str(value))
def setResumeTime(self, resumetime, totaltime=None):
totaltime = float(totaltime or self.getVideoInfoTag().getDuration())
if (
resumetime is not None
and totaltime > 0
and 100 * resumetime / totaltime
<= self.plugin.settings.advanced("video", "playcountminimumpercent")
and resumetime > self.plugin.settings.advanced("video", "ignoresecondsatstart")
or resumetime == 0
):
self.setProperties(resumetime=resumetime, totaltime=totaltime)
def markAdvert(self, has_advert):
if self.plugin.settings.mark_advert == "true" and has_advert:
self.setLabel(f"{self.getLabel()} (!)")
| 39.88 | 98 | 0.620662 | 5,047 | 0.989026 | 0 | 0 | 0 | 0 | 0 | 0 | 931 | 0.182442 |
480d3d853c8689806f27c896226d9a525a7a54e6 | 3,451 | py | Python | static/firespread.py | thabat12/TetraNet | 48e98095e743f949fdb88491735acb3f19e27df7 | [
"MIT"
]
| null | null | null | static/firespread.py | thabat12/TetraNet | 48e98095e743f949fdb88491735acb3f19e27df7 | [
"MIT"
]
| null | null | null | static/firespread.py | thabat12/TetraNet | 48e98095e743f949fdb88491735acb3f19e27df7 | [
"MIT"
]
| 1 | 2021-06-02T00:33:13.000Z | 2021-06-02T00:33:13.000Z | import numpy as np
import imageio
import tensorflow as tf
from keras.models import load_model
from PIL import Image, ImageOps
import numpy as np
from numpy import asarray
from matplotlib import pyplot as plt
from keras.utils import normalize
import os
import random
import azure_get_unet
import random
# for testing purposes only
def img_dir_to_arr(image_dir):
mask = azure_get_unet.get_mask(image_dir)
mask = mask.astype('uint8')
return mask
def generate_firespread_prediction(image_dir):
original_shape = Image.open(image_dir).size
result = img_dir_to_arr(image_dir)
a = []
for i in range(1, 100):
a.append(random.uniform(0, 1))
print(a)
# Cell States
# 0 = Clear, 1 = Fuel, 2 = Fire
prob = 1.0 # probability of a cell being fuel, otherwise it's clear
total_time = 300 # simulation time
terrain_size = [128, 128] # size of the simulation: 10000 cells
result = asarray(result)
result.flags
state = result.copy()
state.setflags(write=1)
print(state[80][1])
# states hold the state of each cell
states = np.zeros((total_time, *terrain_size))
states[0] = state
states[0][1][110] = 2
print(states.shape)
print(states[0][1])
z = np.where(states[0][1] == 1)
print(z)
# set the middle cell on fire!!!
import random
for t in range(1, total_time):
# Make a copy of the original states
states[t] = states[t - 1].copy()
for x in range(1, terrain_size[0] - 1):
for y in range(1, terrain_size[1] - 1):
if states[t - 1, x, y] == 2: # It's on fire
states[t, x, y] = 0 # Put it out and clear it
# If there's fuel surrounding it
# set it on fire!
temp = random.uniform(0, 1)
if states[t - 1, x + 1, y] == 1 and temp > prob:
states[t, x + 1, y] = 2
temp = random.uniform(0, 1)
if states[t - 1, x - 1, y] == 1 and temp > prob:
states[t, x - 1, y] = 2
temp = random.uniform(0, 1)
if states[t - 1, x, y + 1] == 1 and temp > prob:
states[t, x, y + 1] = 2
temp = random.uniform(0, 1)
if states[t - 1, x, y - 1] == 1 and temp > prob:
states[t, x, y - 1] = 2
colored = np.zeros((total_time, *terrain_size, 3), dtype=np.uint8)
# Color
for t in range(states.shape[0]):
for x in range(states[t].shape[0]):
for y in range(states[t].shape[1]):
value = states[t, x, y].copy()
if value == 0:
colored[t, x, y] = [139, 69, 19] # Clear
elif value == 1:
colored[t, x, y] = [0, 255, 0] # Fuel
elif value == 2:
colored[t, x, y] = [255, 0, 0] # Burning
# Crop
cropped = colored[:200, 1:terrain_size[0] - 1, 1:terrain_size[1] - 1]
imageio.mimsave('./video.gif', cropped)
resized_list = []
for arr in cropped:
img = Image.fromarray(arr)
img = img.resize((original_shape[0], original_shape[1]))
img = asarray(img)
resized_list.append(img)
resized_list = np.array(resized_list)
print(resized_list.shape)
imageio.mimsave('./ppea.gif', resized_list)
| 29.75 | 73 | 0.541003 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 440 | 0.127499 |
480d49129b8a557b65a1a726cce2b2b64435ab5e | 1,418 | py | Python | streamlitfront/tests/dummy_app.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
]
| null | null | null | streamlitfront/tests/dummy_app.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
]
| 1 | 2022-02-03T15:21:57.000Z | 2022-02-05T00:51:33.000Z | streamlitfront/tests/dummy_app.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
]
| null | null | null | from streamlitfront.base import get_pages_specs, get_func_args_specs, BasePageFunc
import streamlit as st
from pydantic import BaseModel
import streamlit_pydantic as sp
def multiple(x: int, word: str) -> str:
return str(x) + word
class Input(BaseModel):
x: int
y: str
def multiple_input(input: Input):
return input.x * input.y
class SimplePageFunc2(BasePageFunc):
def __call__(self, state):
self.prepare_view(state)
# args_specs = get_func_args_specs(self.func)
element = sp.pydantic_input('input', Input)
st.write(element)
# func_inputs = dict(self.sig.defaults, **state['page_state'][self.func])
func_inputs = {'input': element}
st.write(func_inputs)
# for argname, spec in args_specs.items():
# st.write(f"argname:{argname}")
# st.write(f"spec:{spec}")
# element_factory, kwargs = spec["element_factory"]
# func_inputs[argname] = element_factory(**kwargs)
# st.write(f"element_factory:{element_factory}")
# st.write(f"kwargs:{kwargs}")
submit = st.button('Submit')
if submit:
st.write(self.func(func_inputs['input']))
# state['page_state'][self.func].clear()
DFLT_PAGE_FACTORY = SimplePageFunc2
if __name__ == '__main__':
app = get_pages_specs([multiple_input], page_factory=DFLT_PAGE_FACTORY)
app['Multiple Input'](None)
| 28.938776 | 82 | 0.658674 | 936 | 0.660085 | 0 | 0 | 0 | 0 | 0 | 0 | 492 | 0.346968 |
480d62d6a3b8b59327c71459cf291592859ce935 | 367 | py | Python | app/build.py | dhost-project/build-microservice | 4376169a2753f37fe8c7985525bd3fd3af6f11e7 | [
"MIT"
]
| null | null | null | app/build.py | dhost-project/build-microservice | 4376169a2753f37fe8c7985525bd3fd3af6f11e7 | [
"MIT"
]
| null | null | null | app/build.py | dhost-project/build-microservice | 4376169a2753f37fe8c7985525bd3fd3af6f11e7 | [
"MIT"
]
| null | null | null | from flask_restful import Resource, reqparse
parser = reqparse.RequestParser()
parser.add_argument('command', required=True)
parser.add_argument('docker', required=True)
class Build(Resource):
def get(self):
return {'status': 'building'}
def post(self):
args = parser.parse_args()
print(args)
return {'status': 'started'}
| 21.588235 | 45 | 0.6703 | 193 | 0.525886 | 0 | 0 | 0 | 0 | 0 | 0 | 52 | 0.141689 |
480d6eb2f995a9bfa4e6589d0220badcbea502c9 | 1,224 | py | Python | src/unicon/plugins/windows/__init__.py | nielsvanhooy/unicon.plugins | 3416fd8223f070cbb67a2cbe604e3c5d13584318 | [
"Apache-2.0"
]
| 18 | 2019-11-23T23:14:53.000Z | 2022-01-10T01:17:08.000Z | src/unicon/plugins/windows/__init__.py | nielsvanhooy/unicon.plugins | 3416fd8223f070cbb67a2cbe604e3c5d13584318 | [
"Apache-2.0"
]
| 12 | 2020-11-09T20:39:25.000Z | 2022-03-22T12:46:59.000Z | src/unicon/plugins/windows/__init__.py | nielsvanhooy/unicon.plugins | 3416fd8223f070cbb67a2cbe604e3c5d13584318 | [
"Apache-2.0"
]
| 32 | 2020-02-12T15:42:22.000Z | 2022-03-15T16:42:10.000Z | __copyright__ = "# Copyright (c) 2018 by cisco Systems, Inc. All rights reserved."
__author__ = "dwapstra"
from unicon.plugins.generic import GenericSingleRpConnection, service_implementation as svc
from unicon.plugins.generic.connection_provider import GenericSingleRpConnectionProvider
from unicon.plugins.generic import ServiceList, service_implementation as svc
from . import service_implementation as windows_svc
from .statemachine import WindowsStateMachine
from .settings import WindowsSettings
class WindowsConnectionProvider(GenericSingleRpConnectionProvider):
"""
Connection provider class for windows connections.
"""
def init_handle(self):
pass
class WindowsServiceList(ServiceList):
""" windows services. """
def __init__(self):
super().__init__()
self.execute = windows_svc.Execute
class WindowsConnection(GenericSingleRpConnection):
"""
Connection class for windows connections.
"""
os = 'windows'
platform = None
chassis_type = 'single_rp'
state_machine_class = WindowsStateMachine
connection_provider_class = WindowsConnectionProvider
subcommand_list = WindowsServiceList
settings = WindowsSettings()
| 29.853659 | 91 | 0.763072 | 711 | 0.580882 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.205882 |
480db89fecb8063418ca5134d6a59815af7cc219 | 702 | py | Python | sera/commands/symlink.py | bretth/sera | 507976b9ace58bdf4c8055dbfcf2fc10840eacb2 | [
"Apache-2.0"
]
| null | null | null | sera/commands/symlink.py | bretth/sera | 507976b9ace58bdf4c8055dbfcf2fc10840eacb2 | [
"Apache-2.0"
]
| 12 | 2016-10-04T20:19:45.000Z | 2017-01-31T03:59:57.000Z | sera/commands/symlink.py | bretth/sera | 507976b9ace58bdf4c8055dbfcf2fc10840eacb2 | [
"Apache-2.0"
]
| null | null | null | from pathlib import Path
from shutil import which
from subprocess import run, PIPE
import click
from .main import main, lprint
@main.command()
@click.pass_context
@click.argument('watcher')
def symlink(ctx, watcher):
"""Locally install a symlink to sera"""
if ctx.parent.params['watcher']:
click.echo("This command runs locally")
raise click.Abort
source = Path(which('sera'))
target = source.parent / watcher
if ctx.obj['verbosity']:
click.echo('Installing symlink at %s' % str(target))
out = run(
['ln', '-s', str(source), str(target)],
stdout=PIPE,
stderr=PIPE,
universal_newlines=True)
return lprint(ctx, out) | 25.071429 | 60 | 0.649573 | 0 | 0 | 0 | 0 | 571 | 0.81339 | 0 | 0 | 135 | 0.192308 |
480e88801229fe7f9b5057dd51b7998fec8f0003 | 3,597 | py | Python | self-attention.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
]
| null | null | null | self-attention.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
]
| null | null | null | self-attention.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
]
| null | null | null | import torch
from torch import nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.models import resnet18
from torchvision.datasets import CIFAR10
from tqdm import tqdm
from torchvision.utils import save_image, make_grid
from matplotlib import pyplot as plt
from matplotlib.colors import hsv_to_rgb
from matplotlib.image import BboxImage
from matplotlib.transforms import Bbox, TransformedBbox
import numpy as np
from IPython import display
import requests
from io import BytesIO
from PIL import Image
from PIL import Image, ImageSequence
from IPython.display import HTML
import warnings
from matplotlib import rc
import gc
import matplotlib
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
gc.enable()
plt.ioff()
def set_model():
num_classes = 10
resnet = resnet18(pretrained=True)
resnet.conv1 = nn.Conv2d(3,64,3,stride=1,padding=1)
resnet_ = list(resnet.children())[:-2]
resnet_[3] = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)
classifier = nn.Conv2d(512,num_classes,1)
torch.nn.init.kaiming_normal_(classifier.weight)
resnet_.append(classifier)
resnet_.append(nn.Upsample(size=32, mode='bilinear', align_corners=False))
tiny_resnet = nn.Sequential(*resnet_)
return tiny_resnet
def set_data():
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=8),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = CIFAR10(root='/root/dataset/CIFAR', train=True, download=True, transform=transform_train)
train_iter = DataLoader(trainset, batch_size=128, shuffle=True, num_workers=16, pin_memory=True, drop_last=True)
testset = CIFAR10(root='/root/dataset/CIFAR', train=False, download=True, transform=transform_test)
test_iter = DataLoader(testset, batch_size=100, shuffle=False, num_workers=16, pin_memory=True)
classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
return train_iter, test_iter, classes
def attention(x):
return torch.sigmoid(torch.logsumexp(x,1, keepdim=True))
def main():
trainloader, testloader, class_name = set_data()
model = nn.DataParallel(set_model()).cuda()
criterion = nn.BCEWithLogitsLoss()
optimizer = torch.optim.SGD(model.parameters(), momentum=0.9, weight_decay=1e-4)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer,78,eta_min=0.001)
num_epochs = 50
for epoch in tqdm(range(num_epochs)):
epoch_loss = 0.0
acc = 0.0
var = 0.0
model.train()
train_pbar = trainloader
for i, (x, _label) in enumerate(train_pbar):
x = x.cuda()
_label = _label.cuda()
label = F.one_hot(_label).float()
seg_out = model(x)
attn = attention(seg_out)
# Smooth Max Aggregation
logit = torch.log(torch.exp(seg_out*0.5).mean((-2,-1)))*2
loss = criterion(logit, label)
optimizer.zero_grad()
loss.backward()
optimizer.step()
lr_scheduler.step()
epoch_loss += loss.item()
acc += (logit.argmax(-1)==_label).sum()
return 0
if __name__ == "__main__":
main() | 34.92233 | 116 | 0.683903 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.050876 |
480f4cbaf998ac30c959d68f9aa7a041e7838e2a | 1,100 | py | Python | src/libraries/maimai_plate.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
]
| 4 | 2022-01-24T05:33:34.000Z | 2022-03-25T06:29:19.000Z | src/libraries/maimai_plate.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
]
| null | null | null | src/libraries/maimai_plate.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
]
| 3 | 2022-02-08T13:24:59.000Z | 2022-03-13T06:42:40.000Z | from typing import Optional, Dict, List
import aiohttp
plate_to_version = {
'真1': 'maimai',
'真2': 'maimai PLUS',
'超': 'maimai GreeN',
'檄': 'maimai GreeN PLUS',
'橙': 'maimai ORANGE',
'暁': 'maimai ORANGE PLUS',
'晓': 'maimai ORANGE PLUS',
'桃': 'maimai PiNK',
'櫻': 'maimai PiNK PLUS',
'樱': 'maimai PiNK PLUS',
'紫': 'maimai MURASAKi',
'菫': 'maimai MURASAKi PLUS',
'堇': 'maimai MURASAKi PLUS',
'白': 'maimai MiLK',
'雪': 'MiLK PLUS',
'輝': 'maimai FiNALE',
'辉': 'maimai FiNALE',
'熊': 'maimai でらっくす',
'華': 'maimai でらっくす PLUS',
'华': 'maimai でらっくす PLUS',
'爽': 'maimai でらっくす Splash'
}
async def get_player_plate(payload: Dict):
async with aiohttp.request("POST", "https://www.diving-fish.com/api/maimaidxprober/query/plate", json=payload) as resp:
if resp.status == 400:
return None, 400
elif resp.status == 403:
return None, 403
plate_data = await resp.json()
return plate_data, 0 | 29.72973 | 123 | 0.53 | 0 | 0 | 0 | 0 | 0 | 0 | 356 | 0.301184 | 559 | 0.472927 |
48100064f1145ddaa5540b7d9cd09cc85ae092af | 6,390 | py | Python | env/lib/python2.7/site-packages/billiard/py2/reduction.py | jlwysf/onduty | 20d90583a6996d037912af08eb29a6d6fa06bf66 | [
"MIT"
]
| 39 | 2016-12-05T14:36:37.000Z | 2021-07-29T18:22:34.000Z | env/lib/python2.7/site-packages/billiard/py2/reduction.py | jlwysf/onduty | 20d90583a6996d037912af08eb29a6d6fa06bf66 | [
"MIT"
]
| 68 | 2016-12-12T20:38:47.000Z | 2020-07-26T18:28:49.000Z | p2p/lib/python2.7/site-packages/billiard/py2/reduction.py | sivaprakashniet/push_pull | 757be6fcdfdc8e73eeea5cb41a733d1916c7ae20 | [
"BSD-3-Clause"
]
| 120 | 2016-08-18T14:53:03.000Z | 2020-06-16T13:27:20.000Z | #
# Module to allow connection and socket objects to be transferred
# between processes
#
# multiprocessing/reduction.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
from __future__ import absolute_import
import os
import sys
import socket
import threading
from pickle import Pickler
from .. import current_process
from .._ext import _billiard, win32
from ..util import register_after_fork, debug, sub_debug
is_win32 = sys.platform == 'win32'
is_pypy = hasattr(sys, 'pypy_version_info')
is_py3k = sys.version_info[0] == 3
if not(is_win32 or is_pypy or is_py3k or hasattr(_billiard, 'recvfd')):
raise ImportError('pickling of connections not supported')
close = win32.CloseHandle if sys.platform == 'win32' else os.close
__all__ = []
# globals set later
_listener = None
_lock = None
_cache = set()
#
# ForkingPickler
#
class ForkingPickler(Pickler): # noqa
dispatch = Pickler.dispatch.copy()
@classmethod
def register(cls, type, reduce):
def dispatcher(self, obj):
rv = reduce(obj)
self.save_reduce(obj=obj, *rv)
cls.dispatch[type] = dispatcher
def _reduce_method(m): # noqa
if m.__self__ is None:
return getattr, (m.__self__.__class__, m.__func__.__name__)
else:
return getattr, (m.__self__, m.__func__.__name__)
ForkingPickler.register(type(ForkingPickler.save), _reduce_method)
def _reduce_method_descriptor(m):
return getattr, (m.__objclass__, m.__name__)
ForkingPickler.register(type(list.append), _reduce_method_descriptor)
ForkingPickler.register(type(int.__add__), _reduce_method_descriptor)
try:
from functools import partial
except ImportError:
pass
else:
def _reduce_partial(p):
return _rebuild_partial, (p.func, p.args, p.keywords or {})
def _rebuild_partial(func, args, keywords):
return partial(func, *args, **keywords)
ForkingPickler.register(partial, _reduce_partial)
def dump(obj, file, protocol=None):
ForkingPickler(file, protocol).dump(obj)
#
# Platform specific definitions
#
if sys.platform == 'win32':
# XXX Should this subprocess import be here?
import _subprocess # noqa
def send_handle(conn, handle, destination_pid):
from ..forking import duplicate
process_handle = win32.OpenProcess(
win32.PROCESS_ALL_ACCESS, False, destination_pid
)
try:
new_handle = duplicate(handle, process_handle)
conn.send(new_handle)
finally:
close(process_handle)
def recv_handle(conn):
return conn.recv()
else:
def send_handle(conn, handle, destination_pid): # noqa
_billiard.sendfd(conn.fileno(), handle)
def recv_handle(conn): # noqa
return _billiard.recvfd(conn.fileno())
#
# Support for a per-process server thread which caches pickled handles
#
def _reset(obj):
global _lock, _listener, _cache
for h in _cache:
close(h)
_cache.clear()
_lock = threading.Lock()
_listener = None
_reset(None)
register_after_fork(_reset, _reset)
def _get_listener():
global _listener
if _listener is None:
_lock.acquire()
try:
if _listener is None:
from ..connection import Listener
debug('starting listener and thread for sending handles')
_listener = Listener(authkey=current_process().authkey)
t = threading.Thread(target=_serve)
t.daemon = True
t.start()
finally:
_lock.release()
return _listener
def _serve():
from ..util import is_exiting, sub_warning
while 1:
try:
conn = _listener.accept()
handle_wanted, destination_pid = conn.recv()
_cache.remove(handle_wanted)
send_handle(conn, handle_wanted, destination_pid)
close(handle_wanted)
conn.close()
except:
if not is_exiting():
sub_warning('thread for sharing handles raised exception',
exc_info=True)
#
# Functions to be used for pickling/unpickling objects with handles
#
def reduce_handle(handle):
from ..forking import Popen, duplicate
if Popen.thread_is_spawning():
return (None, Popen.duplicate_for_child(handle), True)
dup_handle = duplicate(handle)
_cache.add(dup_handle)
sub_debug('reducing handle %d', handle)
return (_get_listener().address, dup_handle, False)
def rebuild_handle(pickled_data):
from ..connection import Client
address, handle, inherited = pickled_data
if inherited:
return handle
sub_debug('rebuilding handle %d', handle)
conn = Client(address, authkey=current_process().authkey)
conn.send((handle, os.getpid()))
new_handle = recv_handle(conn)
conn.close()
return new_handle
#
# Register `_billiard.Connection` with `ForkingPickler`
#
def reduce_connection(conn):
rh = reduce_handle(conn.fileno())
return rebuild_connection, (rh, conn.readable, conn.writable)
def rebuild_connection(reduced_handle, readable, writable):
handle = rebuild_handle(reduced_handle)
return _billiard.Connection(
handle, readable=readable, writable=writable
)
# Register `socket.socket` with `ForkingPickler`
#
def fromfd(fd, family, type_, proto=0):
s = socket.fromfd(fd, family, type_, proto)
if s.__class__ is not socket.socket:
s = socket.socket(_sock=s)
return s
def reduce_socket(s):
reduced_handle = reduce_handle(s.fileno())
return rebuild_socket, (reduced_handle, s.family, s.type, s.proto)
def rebuild_socket(reduced_handle, family, type_, proto):
fd = rebuild_handle(reduced_handle)
_sock = fromfd(fd, family, type_, proto)
close(fd)
return _sock
ForkingPickler.register(socket.socket, reduce_socket)
#
# Register `_billiard.PipeConnection` with `ForkingPickler`
#
if sys.platform == 'win32':
def reduce_pipe_connection(conn):
rh = reduce_handle(conn.fileno())
return rebuild_pipe_connection, (rh, conn.readable, conn.writable)
def rebuild_pipe_connection(reduced_handle, readable, writable):
handle = rebuild_handle(reduced_handle)
return _billiard.PipeConnection(
handle, readable=readable, writable=writable
)
| 25.662651 | 74 | 0.680751 | 279 | 0.043662 | 0 | 0 | 196 | 0.030673 | 0 | 0 | 885 | 0.138498 |
4810516c04a5fc1c2f18a86f01879ae7f2e15131 | 92 | py | Python | BOJ_Solved/BOJ-19698.py | CodingLeeSeungHoon/Python_Algorithm_TeamNote | 1e92986999b45aa9951e12e67b23062e410e9b36 | [
"MIT"
]
| 7 | 2021-11-19T14:50:59.000Z | 2022-02-25T20:00:20.000Z | BOJ_Solved/BOJ-19698.py | CodingLeeSeungHoon/Python_Algorithm_TeamNote | 1e92986999b45aa9951e12e67b23062e410e9b36 | [
"MIT"
]
| null | null | null | BOJ_Solved/BOJ-19698.py | CodingLeeSeungHoon/Python_Algorithm_TeamNote | 1e92986999b45aa9951e12e67b23062e410e9b36 | [
"MIT"
]
| null | null | null | """
백준 19698번 : 헛간 청약
"""
N, W, H, L = map(int, input().split())
print(min(W//L * H//L, N)) | 15.333333 | 38 | 0.48913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.367925 |
4811f993b7e4266adf0f540c90cd21e38ddb5532 | 13,723 | py | Python | .venv/lib/python2.7/site-packages/ansible/module_utils/nxos.py | Achraf-Ben/Ansible- | a271b4c32948a7f8726e3f3174e12fe6ff491619 | [
"MIT"
]
| null | null | null | .venv/lib/python2.7/site-packages/ansible/module_utils/nxos.py | Achraf-Ben/Ansible- | a271b4c32948a7f8726e3f3174e12fe6ff491619 | [
"MIT"
]
| null | null | null | .venv/lib/python2.7/site-packages/ansible/module_utils/nxos.py | Achraf-Ben/Ansible- | a271b4c32948a7f8726e3f3174e12fe6ff491619 | [
"MIT"
]
| null | null | null | #
# This code is part of Ansible, but is an independent component.
#
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2017 Red Hat, Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import collections
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network_common import to_list, ComplexList
from ansible.module_utils.connection import exec_command
from ansible.module_utils.six import iteritems
from ansible.module_utils.urls import fetch_url
_DEVICE_CONNECTION = None
nxos_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE'])),
'use_ssl': dict(type='bool'),
'validate_certs': dict(type='bool'),
'timeout': dict(type='int'),
'transport': dict(default='cli', choices=['cli', 'nxapi'])
}
nxos_argument_spec = {
'provider': dict(type='dict', options=nxos_provider_spec),
}
nxos_argument_spec.update(nxos_provider_spec)
# Add argument's default value here
ARGS_DEFAULT_VALUE = {
'transport': 'cli'
}
def get_argspec():
return nxos_argument_spec
def check_args(module, warnings):
for key in nxos_argument_spec:
if module._name == 'nxos_user':
if key not in ['password', 'provider', 'transport'] and module.params[key]:
warnings.append('argument %s has been deprecated and will be in a future version' % key)
else:
if key not in ['provider', 'transport'] and module.params[key]:
warnings.append('argument %s has been deprecated and will be removed in a future version' % key)
# set argument's default value if not provided in input
# This is done to avoid unwanted argument deprecation warning
# in case argument is not given as input (outside provider).
for key in ARGS_DEFAULT_VALUE:
if not module.params.get(key, None):
module.params[key] = ARGS_DEFAULT_VALUE[key]
def load_params(module):
provider = module.params.get('provider') or dict()
for key, value in iteritems(provider):
if key in nxos_argument_spec:
if module.params.get(key) is None and value is not None:
module.params[key] = value
def get_connection(module):
global _DEVICE_CONNECTION
if not _DEVICE_CONNECTION:
load_params(module)
if is_nxapi(module):
conn = Nxapi(module)
else:
conn = Cli(module)
_DEVICE_CONNECTION = conn
return _DEVICE_CONNECTION
class Cli:
def __init__(self, module):
self._module = module
self._device_configs = {}
def exec_command(self, command):
if isinstance(command, dict):
command = self._module.jsonify(command)
return exec_command(self._module, command)
def get_config(self, flags=[]):
"""Retrieves the current config from the device or cache
"""
cmd = 'show running-config '
cmd += ' '.join(flags)
cmd = cmd.strip()
try:
return self._device_configs[cmd]
except KeyError:
rc, out, err = self.exec_command(cmd)
if rc != 0:
self._module.fail_json(msg=to_text(err, errors='surrogate_then_replace'))
cfg = to_text(out, errors='surrogate_then_replace').strip()
self._device_configs[cmd] = cfg
return cfg
def run_commands(self, commands, check_rc=True):
"""Run list of commands on remote device and return results
"""
responses = list()
for item in to_list(commands):
if item['output'] == 'json' and not is_json(item['command']):
cmd = '%s | json' % item['command']
elif item['output'] == 'text' and is_json(item['command']):
cmd = item['command'].split('|')[0]
else:
cmd = item['command']
rc, out, err = self.exec_command(cmd)
out = to_text(out, errors='surrogate_then_replace')
if check_rc and rc != 0:
self._module.fail_json(msg=to_text(err, errors='surrogate_then_replace'))
try:
out = self._module.from_json(out)
except ValueError:
out = str(out).strip()
responses.append(out)
return responses
def load_config(self, config, return_error=False):
"""Sends configuration commands to the remote device
"""
rc, out, err = self.exec_command('configure')
if rc != 0:
self._module.fail_json(msg='unable to enter configuration mode', output=to_text(err, errors='surrogate_then_replace'))
msgs = []
for cmd in config:
rc, out, err = self.exec_command(cmd)
if rc != 0:
self._module.fail_json(msg=to_text(err, errors='surrogate_then_replace'))
elif out:
msgs.append(out)
self.exec_command('end')
return msgs
class Nxapi:
OUTPUT_TO_COMMAND_TYPE = {
'text': 'cli_show_ascii',
'json': 'cli_show',
'bash': 'bash',
'config': 'cli_conf'
}
def __init__(self, module):
self._module = module
self._nxapi_auth = None
self._device_configs = {}
self._module.params['url_username'] = self._module.params['username']
self._module.params['url_password'] = self._module.params['password']
host = self._module.params['host']
port = self._module.params['port']
if self._module.params['use_ssl']:
proto = 'https'
port = port or 443
else:
proto = 'http'
port = port or 80
self._url = '%s://%s:%s/ins' % (proto, host, port)
def _error(self, msg, **kwargs):
self._nxapi_auth = None
if 'url' not in kwargs:
kwargs['url'] = self._url
self._module.fail_json(msg=msg, **kwargs)
def _request_builder(self, commands, output, version='1.0', chunk='0', sid=None):
"""Encodes a NXAPI JSON request message
"""
try:
command_type = self.OUTPUT_TO_COMMAND_TYPE[output]
except KeyError:
msg = 'invalid format, received %s, expected one of %s' % \
(output, ','.join(self.OUTPUT_TO_COMMAND_TYPE.keys()))
self._error(msg=msg)
if isinstance(commands, (list, set, tuple)):
commands = ' ;'.join(commands)
msg = {
'version': version,
'type': command_type,
'chunk': chunk,
'sid': sid,
'input': commands,
'output_format': 'json'
}
return dict(ins_api=msg)
def send_request(self, commands, output='text', check_status=True, return_error=False):
# only 10 show commands can be encoded in each request
# messages sent to the remote device
if output != 'config':
commands = collections.deque(to_list(commands))
stack = list()
requests = list()
while commands:
stack.append(commands.popleft())
if len(stack) == 10:
body = self._request_builder(stack, output)
data = self._module.jsonify(body)
requests.append(data)
stack = list()
if stack:
body = self._request_builder(stack, output)
data = self._module.jsonify(body)
requests.append(data)
else:
body = self._request_builder(commands, 'config')
requests = [self._module.jsonify(body)]
headers = {'Content-Type': 'application/json'}
result = list()
timeout = self._module.params['timeout']
for req in requests:
if self._nxapi_auth:
headers['Cookie'] = self._nxapi_auth
response, headers = fetch_url(
self._module, self._url, data=req, headers=headers,
timeout=timeout, method='POST'
)
self._nxapi_auth = headers.get('set-cookie')
if headers['status'] != 200:
self._error(**headers)
try:
response = self._module.from_json(response.read())
except ValueError:
self._module.fail_json(msg='unable to parse response')
if response['ins_api'].get('outputs'):
output = response['ins_api']['outputs']['output']
for item in to_list(output):
if check_status and item['code'] != '200':
if return_error:
result.append(item)
else:
self._error(output=output, **item)
elif 'body' in item:
result.append(item['body'])
# else:
# error in command but since check_status is disabled
# silently drop it.
# result.append(item['msg'])
return result
def get_config(self, flags=[]):
"""Retrieves the current config from the device or cache
"""
cmd = 'show running-config '
cmd += ' '.join(flags)
cmd = cmd.strip()
try:
return self._device_configs[cmd]
except KeyError:
out = self.send_request(cmd)
cfg = str(out[0]).strip()
self._device_configs[cmd] = cfg
return cfg
def run_commands(self, commands, check_rc=True):
"""Run list of commands on remote device and return results
"""
output = None
queue = list()
responses = list()
def _send(commands, output):
return self.send_request(commands, output, check_status=check_rc)
for item in to_list(commands):
if is_json(item['command']):
item['command'] = str(item['command']).split('|')[0]
item['output'] = 'json'
if all((output == 'json', item['output'] == 'text')) or all((output == 'text', item['output'] == 'json')):
responses.extend(_send(queue, output))
queue = list()
output = item['output'] or 'json'
queue.append(item['command'])
if queue:
responses.extend(_send(queue, output))
return responses
def load_config(self, commands, return_error=False):
"""Sends the ordered set of commands to the device
"""
commands = to_list(commands)
msg = self.send_request(commands, output='config', check_status=True, return_error=return_error)
if return_error:
return msg
else:
return []
def is_json(cmd):
return str(cmd).endswith('| json')
def is_text(cmd):
return not is_json(cmd)
def is_nxapi(module):
transport = module.params['transport']
provider_transport = (module.params['provider'] or {}).get('transport')
return 'nxapi' in (transport, provider_transport)
def to_command(module, commands):
if is_nxapi(module):
default_output = 'json'
else:
default_output = 'text'
transform = ComplexList(dict(
command=dict(key=True),
output=dict(default=default_output),
prompt=dict(),
answer=dict()
), module)
commands = transform(to_list(commands))
for item in commands:
if is_json(item['command']):
item['output'] = 'json'
return commands
def get_config(module, flags=[]):
conn = get_connection(module)
return conn.get_config(flags)
def run_commands(module, commands, check_rc=True):
conn = get_connection(module)
return conn.run_commands(to_command(module, commands), check_rc)
def load_config(module, config, return_error=False):
conn = get_connection(module)
return conn.load_config(config, return_error=return_error)
| 33.717445 | 130 | 0.602274 | 8,339 | 0.607666 | 0 | 0 | 0 | 0 | 0 | 0 | 3,937 | 0.286891 |
48123959326e46f67183f1e789974d0424c29d58 | 2,315 | py | Python | example/bin_provider.py | dell/dataiq-plugin-example | 7c323ec2d878ed705c4c74ab045e15595bd84b0b | [
"Apache-2.0"
]
| 1 | 2021-04-12T09:32:21.000Z | 2021-04-12T09:32:21.000Z | example/bin_provider.py | dell/dataiq-plugin-example | 7c323ec2d878ed705c4c74ab045e15595bd84b0b | [
"Apache-2.0"
]
| null | null | null | example/bin_provider.py | dell/dataiq-plugin-example | 7c323ec2d878ed705c4c74ab045e15595bd84b0b | [
"Apache-2.0"
]
| null | null | null | # Copyright 2020 Dell Inc, or its subsidiaries.
#
# SPDX-License-Identifier: Apache-2.0
import csv
import datetime
import os
import sys
from abc import ABC, abstractmethod
from collections import namedtuple
from typing import Tuple, List, Iterator
from dateutil.relativedelta import relativedelta
from dataiq.plugin.user import User
# If LOCAL_DEV environment variable is not set, use ClarityNow API
if os.environ.get('LOCAL_DEV') is None:
try:
import claritynowapi
except ImportError:
sys.path.append('/usr/local/claritynow/scripts/python')
import claritynowapi
Bin = namedtuple('Bin', 'latest count')
class BinProvider(ABC):
@abstractmethod
def bins_for(self, user: User, path: str, depth: int) -> Iterator[Tuple[str, List[Bin]]]:
pass
class ClarityNowApiBinProvider(BinProvider):
def __init__(self):
pass
def bins_for(self, user: User, path: str, depth: int):
kind = "mtime"
mode = "condensed"
date_fmt = "yyyy-MM-dd"
api = claritynowapi.ClarityNowConnection(
# hostname="localhost", port=8443, override_localhost=False,
plugin_name="TimeBound")
bins = api.getBins(user.username, path, depth, depth,
kind, mode, date_fmt, count=True, size=False)
return bins['paths']
class DummyBinProvider(BinProvider):
CSV_SOURCE = os.path.join(os.path.dirname(__file__), 'dummy_bin.csv')
def __init__(self):
self.children = ('childA', 'childB', 'childC')
self.histogram = []
epoch = datetime.datetime(1970, 1, 1)
now = datetime.datetime.today()
with open(DummyBinProvider.CSV_SOURCE) as fp:
for relative, count in csv.reader(fp):
then = now - eval(relative, {'delta': relativedelta}) - epoch
self.histogram.append(Bin(datetime.datetime.fromtimestamp(
then.total_seconds()).strftime('%Y-%m-%d'), int(count)))
def bins_for(self, user: User, path: str, depth: int):
# 'path' and 'bins' are the keys ClarityNow uses for respective values
yield {'path': path, 'bins': self.histogram}
if depth > 0:
for c in self.children:
yield from self.bins_for(user, os.path.join(path, c), depth-1)
| 32.605634 | 93 | 0.64622 | 1,666 | 0.719654 | 323 | 0.139525 | 122 | 0.0527 | 0 | 0 | 465 | 0.200864 |
48126352dee9a8d203347caa05ca59556d920c18 | 268 | py | Python | test.py | ewuerfel66/lambdata-mjh09 | 7f87f923ec351e7af139d098d84a320861632616 | [
"MIT"
]
| null | null | null | test.py | ewuerfel66/lambdata-mjh09 | 7f87f923ec351e7af139d098d84a320861632616 | [
"MIT"
]
| 4 | 2020-03-24T17:42:54.000Z | 2021-06-02T00:22:30.000Z | test.py | ewuerfel66/lambdata-mjh09 | 7f87f923ec351e7af139d098d84a320861632616 | [
"MIT"
]
| 1 | 2019-08-14T15:17:22.000Z | 2019-08-14T15:17:22.000Z | import unittest
class TestSum(unittest.TestCase):
def test_sum(self):
self.assertEqual(sum([1,2,3]), 6, 'Should be 6')
def test_sum_tuple(self):
self.assertEqual(sum((1,2,2)), 6, 'Should be 6')
if __name__ == '__main__':
unittest.main() | 22.333333 | 56 | 0.634328 | 203 | 0.757463 | 0 | 0 | 0 | 0 | 0 | 0 | 36 | 0.134328 |
4812aa4cff01a349a2420a59bd686d36663cfba9 | 1,913 | py | Python | md5tosha256.py | yym68686/VirusTotalSpider | 7620b068e87964f6011d46003dbbf88be5c7dac9 | [
"MIT"
]
| 2 | 2021-06-17T03:36:59.000Z | 2021-12-02T09:13:10.000Z | md5tosha256.py | yym68686/VirusTotalSpider | 7620b068e87964f6011d46003dbbf88be5c7dac9 | [
"MIT"
]
| null | null | null | md5tosha256.py | yym68686/VirusTotalSpider | 7620b068e87964f6011d46003dbbf88be5c7dac9 | [
"MIT"
]
| null | null | null | import os
import re
import time
import numpy as np
from msedge.selenium_tools import EdgeOptions, Edge
from selenium.webdriver.common.action_chains import ActionChains
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36 Edg/85.0.564.41'
}
print('load data...')
sha256set = np.loadtxt(os.getcwd() + "/Gorgon Group.csv", delimiter=",", usecols=(0), dtype=str, skiprows=1) # usecols=(0) 0表示hash值是第0列,这个需要按情况做修改。
print('finish data load...')
opt = EdgeOptions() # 使用基于Chromium内核的Microsoft Edge浏览器,其他浏览器需要看情况更改
opt.use_chromium = True
# opt.add_argument("headless") # 无头浏览器,如果运行出错请注释掉这句。
opt.add_argument("disable-gpu")
opt.add_experimental_option('excludeSwitches', ['enable-logging'])
driver = Edge(executable_path = os.getcwd() + "/msedgedriver.exe", options = opt) # 这里msedgedriver.exe需要跟下载的webdriver名字对应,默认在项目文件根目录
for filehash in sha256set:
noerror = 1
while(noerror):
try:
fileurl = 'https://www.virustotal.com/gui/file/' + filehash + '/behavior/VirusTotal%20Cuckoofork'
driver.get(fileurl)
driver.implicitly_wait(7)
driver.find_element_by_tag_name('body')
time.sleep(1.5)
print(driver.current_url)
if driver.current_url == "https://www.virustotal.com/gui/captcha": # 检测是否被网站拦截,拦截了手动通过图形验证码限时60s
ActionChains(driver).move_by_offset(342, 146).click().perform() # 自动点击,打开图形验证码
ActionChains(driver).move_by_offset(-342, -146).perform()
time.sleep(90) # 等待手动通过
matchresult = re.findall(r"file.(.*?).detection", driver.current_url, re.M)
with open(os.getcwd() + '/sha256.txt', 'a+', encoding='UTF-8') as f: # 保存文件
f.write(matchresult[0] + '\n')
f.close()
noerror = 0
except:
noerror = 1
| 44.488372 | 150 | 0.658129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 938 | 0.432059 |
48137f6833204958dbcfd12efea83db5d3727b1f | 158 | py | Python | python/square root.py | SULING4EVER/learngit | d55f942fbd782b309b0490c34a1bb743f6c4ef03 | [
"Apache-2.0"
]
| null | null | null | python/square root.py | SULING4EVER/learngit | d55f942fbd782b309b0490c34a1bb743f6c4ef03 | [
"Apache-2.0"
]
| null | null | null | python/square root.py | SULING4EVER/learngit | d55f942fbd782b309b0490c34a1bb743f6c4ef03 | [
"Apache-2.0"
]
| null | null | null | x=input("Enter a umber of which you want to know the square root.")
x=int(x)
g=x/2
while (g*g-x)*(g*g-x)>0.00000000001:
g=(g+x/g)/2
print(g)
print(g)
| 19.75 | 67 | 0.620253 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.367089 |
481471f6b03716720f9e82b4bb3fce65fee25248 | 59 | py | Python | chapter 5/sampleCode22.py | DTAIEB/Thoughtful-Data-Science | 8b80e8f3e33b6fdc6672ecee1f27e0b983b28241 | [
"Apache-2.0"
]
| 15 | 2018-06-01T19:18:32.000Z | 2021-11-28T03:31:35.000Z | chapter 5/sampleCode22.py | chshychen/Thoughtful-Data-Science | 8b80e8f3e33b6fdc6672ecee1f27e0b983b28241 | [
"Apache-2.0"
]
| 1 | 2018-12-17T02:01:42.000Z | 2018-12-17T02:01:42.000Z | chapter 5/sampleCode22.py | chshychen/Thoughtful-Data-Science | 8b80e8f3e33b6fdc6672ecee1f27e0b983b28241 | [
"Apache-2.0"
]
| 10 | 2018-09-23T02:45:45.000Z | 2022-03-12T15:32:05.000Z | import pixiedust
my_logger = pixiedust.getLogger(__name__)
| 19.666667 | 41 | 0.847458 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
481650f94557e95d1e169f088c7d5dec8a6391f7 | 1,212 | py | Python | iaso/migrations/0052_fix_period_before_after.py | ekhalilbsq/iaso | e6400c52aeb4f67ce1ca83b03efa3cb11ef235ee | [
"MIT"
]
| 29 | 2020-12-26T07:22:19.000Z | 2022-03-07T13:40:09.000Z | iaso/migrations/0052_fix_period_before_after.py | ekhalilbsq/iaso | e6400c52aeb4f67ce1ca83b03efa3cb11ef235ee | [
"MIT"
]
| 150 | 2020-11-09T15:03:27.000Z | 2022-03-07T15:36:07.000Z | iaso/migrations/0052_fix_period_before_after.py | ekhalilbsq/iaso | e6400c52aeb4f67ce1ca83b03efa3cb11ef235ee | [
"MIT"
]
| 4 | 2020-11-09T10:38:13.000Z | 2021-10-04T09:42:47.000Z | # Generated by Django 2.1.11 on 2020-06-04 09:19
from django.db import migrations, models
def fix_period_before_after(apps, schema_editor):
# noinspection PyPep8Naming
Form = apps.get_model("iaso", "Form")
for form in Form.objects.filter(period_type=None).exclude(periods_before_allowed=0, periods_after_allowed=0):
form.periods_before_allowed = 0
form.periods_after_allowed = 0
form.save()
class Migration(migrations.Migration):
dependencies = [("iaso", "0051_device_position")]
operations = [
migrations.AlterField(
model_name="form",
name="period_type",
field=models.TextField(
blank=True,
choices=[("MONTH", "Month"), ("QUARTER", "Quarter"), ("SIX_MONTH", "Six-month"), ("YEAR", "Year")],
null=True,
),
),
migrations.AlterField(model_name="form", name="periods_after_allowed", field=models.IntegerField(default=0)),
migrations.AlterField(model_name="form", name="periods_before_allowed", field=models.IntegerField(default=0)),
migrations.RunPython(fix_period_before_after, reverse_code=migrations.RunPython.noop),
]
| 35.647059 | 118 | 0.655116 | 778 | 0.641914 | 0 | 0 | 0 | 0 | 0 | 0 | 259 | 0.213696 |
4816994d8585786c6c9791f101c25452477dc72a | 169 | py | Python | vc/manager/generation_result.py | very-meanly/vc | 41f63e8a8b159f3a49430bbee6872162de060901 | [
"MIT"
]
| null | null | null | vc/manager/generation_result.py | very-meanly/vc | 41f63e8a8b159f3a49430bbee6872162de060901 | [
"MIT"
]
| null | null | null | vc/manager/generation_result.py | very-meanly/vc | 41f63e8a8b159f3a49430bbee6872162de060901 | [
"MIT"
]
| null | null | null | from vc.manager.base import Manager
from vc.model.generation_result import GenerationResult
class GenerationResultManager(Manager):
model_class = GenerationResult
| 24.142857 | 55 | 0.840237 | 74 | 0.43787 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.