seq_id
stringlengths 7
11
| text
stringlengths 156
1.7M
| repo_name
stringlengths 7
125
| sub_path
stringlengths 4
132
| file_name
stringlengths 4
77
| file_ext
stringclasses 6
values | file_size_in_byte
int64 156
1.7M
| program_lang
stringclasses 1
value | lang
stringclasses 38
values | doc_type
stringclasses 1
value | stars
int64 0
24.2k
⌀ | dataset
stringclasses 1
value | pt
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|---|
25585522305
|
import os
import requests
import re
import yaml
from packaging import version
# Update the avalanchego_vms_list variable in roles/node/vars
# with new VM versions available and their compatibility with AvalancheGo
GITHUB_RAW_URL = 'https://raw.githubusercontent.com'
GITHUB_API_URL = 'https://api.github.com'
VARS_YAML_PATH = '../roles/node/vars/main.yml'
VARS_YAML_HEADER_SIZE = 3
VMS_REPOS = {
'subnet-evm': 'ava-labs/subnet-evm',
}
MIN_AVAX_VERSION = '1.9.6'
vms_versions_comp = {}
# For each VM, fetch AvalancheGo compatibility info from README
for vm, repo in VMS_REPOS.items():
repo_info = requests.get(f'{GITHUB_API_URL}/repos/{repo}')
default_branch = repo_info.json()['default_branch']
readme_url = f'{GITHUB_RAW_URL}/{repo}/{default_branch}/README.md'
readme_raw = requests.get(readme_url)
compatibility_specs = list(
re.finditer(
r'^\[v(?P<vm_start_ver>\d+\.\d+\.\d+)-?v?(?P<vm_end_ver>\d+\.\d+\.\d+)?\] '
r'AvalancheGo@v(?P<avax_start_ver>\d+\.\d+\.\d+)-?v?(?P<avax_end_ver>\d+\.\d+\.\d+)?',
readme_raw.text,
flags=re.MULTILINE,
)
)
# Iterate on all versions
versions_comp = {}
for c in compatibility_specs:
vm_start_ver = version.parse(c.group('vm_start_ver'))
vm_end_ver = version.parse(c.group('vm_end_ver') or c.group('vm_start_ver'))
for major in range(vm_start_ver.major, vm_end_ver.major + 1):
for minor in range(vm_start_ver.minor, vm_end_ver.minor + 1):
for micro in range(vm_start_ver.micro, vm_end_ver.micro + 1):
if version.parse(c.group('avax_start_ver')) >= version.parse(
MIN_AVAX_VERSION
):
versions_comp.update(
{
f'{major}.{minor}.{micro}': {
'ge': c.group('avax_start_ver'),
'le': c.group('avax_end_ver')
or c.group('avax_start_ver'),
}
}
)
vms_versions_comp.update({vm: versions_comp})
vars_yaml_abs_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)), VARS_YAML_PATH
)
with open(vars_yaml_abs_path) as vars_yaml:
vars_header = ''.join([vars_yaml.readline() for l in range(VARS_YAML_HEADER_SIZE)])
vars_obj = yaml.load(vars_yaml, Loader=yaml.CLoader)
# Enrich the avalanchego_vms_list with updated versions_comp
for vm, v_comp in vms_versions_comp.items():
vars_obj['avalanchego_vms_list'][vm]['versions_comp'] = v_comp
with open(vars_yaml_abs_path + '.updated', 'w') as vars_yaml:
vars_yaml.write(vars_header + yaml.dump(vars_obj, Dumper=yaml.CDumper))
|
AshAvalanche/ansible-avalanche-collection
|
scripts/update_vm_versions.py
|
update_vm_versions.py
|
py
| 2,832 |
python
|
en
|
code
| 10 |
github-code
|
6
|
28923395310
|
from enums import PositionX, PositionY
from constants import AmmoIndicator as Properties
from functions import get_surface
from models.GameObject import PositionalGameObject
import pygame as pg
class AmmoIndicator(PositionalGameObject):
GROUP_NAME = 'ammo_indicator'
def __init__(self, scene, *groups, position_x=PositionX.RIGHT, position_y=PositionY.BOTTOM):
super().__init__(scene, position_x, position_y, *groups)
self.max = 0
self.current = 0
self.full_image = pg.image.load(Properties.FULL_IMAGE_PATH)
self.empty_image = pg.image.load(Properties.EMPTY_IMAGE_PATH)
self.column_count = 0
self.row_count = 0
self.set_current(self.max)
def attach_weapon(self, weapon):
self.max = weapon.CAPACITY
self.column_count = weapon.AMMO_INDICATOR_COLUMN_SIZE
self.row_count = (self.max + self.column_count - 1) // self.column_count
self.width = Properties.WIDTH * min(self.max, self.column_count)
self.width += Properties.OFFSET_X * (min(self.max, self.column_count) - 1) * int(self.max != 0)
self.height = Properties.HEIGHT * self.row_count
self.height += Properties.OFFSET_Y * (self.row_count - 1) * int(self.max != 0)
self.set_current(weapon.get_remaining())
def update_surface(self):
image = get_surface(self.width, self.height)
for i in range(self.max):
if i < self.current:
img = self.full_image
else:
img = self.empty_image
x = (Properties.WIDTH + Properties.OFFSET_X) * (i % self.column_count)
y = (Properties.HEIGHT + Properties.OFFSET_Y) * (i // self.column_count)
image.blit(img, (x, y))
self.set_image(image)
self.update_position()
def set_current(self, value):
if 0 <= value <= self.max:
self.current = value
self.update_surface()
|
Thavin2147483648/shoot_platform
|
objects/AmmoIndicator.py
|
AmmoIndicator.py
|
py
| 1,946 |
python
|
en
|
code
| 0 |
github-code
|
6
|
38633504444
|
def tetration(value, tetronent):
"""
>>> print(tetration(3,3)) # 3 ** 3 ** 3, or 3^(3^3)
7625597484987
"""
if tetronent == 1:
return value
else:
return value ** tetration(value, tetronent-1)
number = int(input('Number: '))
tetronent_value = int(input('Tetronent: '))
print(tetration(number, tetronent_value))
|
mthezeng/hello-world
|
tetration.py
|
tetration.py
|
py
| 350 |
python
|
en
|
code
| 0 |
github-code
|
6
|
37970241909
|
import cv2
import numpy as np
#frame = np.full((360, 480, 3), 0, dtype=int)
frame = cv2.imread("/home/pi/Pictures/2020-07-20_1439.jpg")
cv2.imshow("Frame", frame)
while True:
key = cv2.waitKey(1)
if key != -1:
print("Key", key)
if key == ord("q"): # up key
break
|
webbhm/FlaskExperiment
|
python/test_key.py
|
test_key.py
|
py
| 295 |
python
|
en
|
code
| 0 |
github-code
|
6
|
19773599717
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
"""
Runs IBIES gui
"""
from __future__ import absolute_import, division, print_function
import multiprocessing
import utool as ut
import ibeis # NOQA
import sys
CMD = ut.get_argflag('--cmd')
# For Pyinstaller
#from ibeis.all_imports import * # NOQA
def dependencies_for_myprogram():
""" Let pyintaller find these modules
References:
http://stackoverflow.com/questions/18596410/importerror-no-module-named-mpl-toolkits-with-maptlotlib-1-3-0-and-py2exe
"""
from guitool.__PYQT__ import QtCore, QtGui # Pyinstaller hacks # NOQA
from PyQt4 import QtCore, QtGui # NOQA
#from PyQt4 import QtCore, QtGui # NOQA
from scipy.sparse.csgraph import _validation # NOQA
from scipy.special import _ufuncs_cxx # NOQA
from mpl_toolkits.axes_grid1 import make_axes_locatable # NOQA
#import lru # NOQA
# Workaround for mpl_toolkits
import importlib
importlib.import_module('mpl_toolkits').__path__
def run_ibeis():
r"""
CommandLine:
python -m ibeis
python -m ibeis find_installed_tomcat
python -m ibeis get_annot_groundtruth:1
"""
#ut.set_process_title('IBEIS_main')
#main_locals = ibeis.main()
#ibeis.main_loop(main_locals)
#ut.set_process_title('IBEIS_main')
cmdline_varags = ut.get_cmdline_varargs()
if len(cmdline_varags) > 0 and cmdline_varags[0] == 'rsync':
from ibeis.scripts import rsync_ibeisdb
rsync_ibeisdb.rsync_ibsdb_main()
sys.exit(0)
if ut.get_argflag('--devcmd'):
# Hack to let devs mess around when using an installer version
# TODO: add more hacks
#import utool.tests.run_tests
#utool.tests.run_tests.run_tests()
ut.embed()
# Run the tests of other modules
elif ut.get_argflag('--run-utool-tests'):
import utool.tests.run_tests
retcode = utool.tests.run_tests.run_tests()
print('... exiting')
sys.exit(retcode)
elif ut.get_argflag('--run-vtool-tests'):
import vtool.tests.run_tests
retcode = vtool.tests.run_tests.run_tests()
print('... exiting')
sys.exit(retcode)
elif ut.get_argflag(('--run-ibeis-tests', '--run-tests')):
from ibeis.tests import run_tests
retcode = run_tests.run_tests()
print('... exiting')
sys.exit(retcode)
if ut.get_argflag('-e'):
"""
ibeis -e print -a default -t default
"""
# Run dev script if -e given
import ibeis.dev # NOQA
ibeis.dev.devmain()
print('... exiting')
sys.exit(0)
# Attempt to run a test using the funciton name alone
# with the --tf flag
import ibeis.tests.run_tests
import ibeis.tests.reset_testdbs
ignore_prefix = [
#'ibeis.tests',
'ibeis.control.__SQLITE3__',
'_autogen_explicit_controller']
ignore_suffix = ['_grave']
func_to_module_dict = {
'demo_bayesnet': 'ibeis.algo.hots.demobayes',
}
ut.main_function_tester('ibeis', ignore_prefix, ignore_suffix,
func_to_module_dict=func_to_module_dict)
#if ut.get_argflag('-e'):
# import ibeis
# expt_kw = ut.get_arg_dict(ut.get_func_kwargs(ibeis.run_experiment),
# prefix_list=['--', '-'])
# ibeis.run_experiment(**expt_kw)
# sys.exit(0)
doctest_modname = ut.get_argval(
('--doctest-module', '--tmod', '-tm', '--testmod'),
type_=str, default=None, help_='specify a module to doctest')
if doctest_modname is not None:
"""
Allow any doctest to be run the main ibeis script
python -m ibeis --tmod utool.util_str --test-align:0
python -m ibeis --tmod ibeis.algo.hots.pipeline --test-request_ibeis_query_L0:0 --show
python -m ibeis --tf request_ibeis_query_L0:0 --show
./dist/ibeis/IBEISApp --tmod ibeis.algo.hots.pipeline --test-request_ibeis_query_L0:0 --show # NOQA
./dist/ibeis/IBEISApp --tmod utool.util_str --test-align:0
./dist/IBEIS.app/Contents/MacOS/IBEISApp --tmod utool.util_str --test-align:0
./dist/IBEIS.app/Contents/MacOS/IBEISApp --run-utool-tests
./dist/IBEIS.app/Contents/MacOS/IBEISApp --run-vtool-tests
"""
print('[ibeis] Testing module')
mod_alias_list = {
'exptdraw': 'ibeis.expt.experiment_drawing'
}
doctest_modname = mod_alias_list.get(doctest_modname, doctest_modname)
module = ut.import_modname(doctest_modname)
(nPass, nTotal, failed_list, error_report_list) = ut.doctest_funcs(module=module)
retcode = 1 - (len(failed_list) == 0)
#print(module)
sys.exit(retcode)
import ibeis
main_locals = ibeis.main()
execstr = ibeis.main_loop(main_locals)
# <DEBUG CODE>
if 'back' in main_locals and CMD:
#from ibeis.all_imports import * # NOQA
back = main_locals['back']
front = getattr(back, 'front', None) # NOQA
#front = back.front
#ui = front.ui
ibs = main_locals['ibs'] # NOQA
exec(execstr)
# </DEBUG CODE>
if __name__ == '__main__':
multiprocessing.freeze_support() # for win32
run_ibeis()
|
smenon8/ibeis
|
ibeis/__main__.py
|
__main__.py
|
py
| 5,256 |
python
|
en
|
code
| null |
github-code
|
6
|
72474544829
|
import sys
import cwiid
from onewii import oneWii
class WiimoteControl:
def __init__(self, newWiimote, wid, wnum ):
self.wiimote = newWiimote
self.wiimote.led = wnum
self.wid = wid
self.setupDataRead()
self.cal = max( self.wiimote.get_acc_cal(cwiid.EXT_NONE) )
self.eventdetector = oneWii()
def __del__(self):
self.wiimote.close()
def setupDataRead(self):
self.wiimote.enable(cwiid.FLAG_MESG_IFC)
reportMode = 0
reportMode ^= cwiid.RPT_BTN
reportMode ^= cwiid.RPT_ACC
self.wiimote.rpt_mode = reportMode
self.wiimote.mesg_callback = self.testForEvents
def testForEvents(self,mesg_list,timestamp):
for mesg in mesg_list:
if mesg[0] == cwiid.MESG_ACC:
# this appears backward but it isn't...
x = self.cal[0] - mesg[1][cwiid.X]
y = mesg[1][cwiid.Y] - self.cal[1]
z = mesg[1][cwiid.Z] - self.cal[2]
self.eventdetector(timestamp,self.wid,xyz=(x,y,z))
elif mesg[0] == cwiid.MESG_BTN:
self.eventdetector(timestamp,self.wid,buttons=mesg[1])
|
cloew/WiiCanDoIt-Framework
|
src/ProtocolGame/wiis/wiireader.py
|
wiireader.py
|
py
| 1,008 |
python
|
en
|
code
| 2 |
github-code
|
6
|
32124013729
|
import os
from gpt_interaction import process_content_with_gpt
from web_scraping import scrape_web_content
from utils import read_file, convert_pdf_to_text, convert_docx_to_text, convert_excel_to_csv
def process_files(input_dir, tmp_dir, output_dir):
for root, dirs, files in os.walk(input_dir):
for file in files:
file_path = os.path.join(root, file)
# Handle different file types
if file.endswith('.pdf'):
text_content = convert_pdf_to_text(file_path, tmp_dir)
elif file.endswith('.docx'):
text_content = convert_docx_to_text(file_path, tmp_dir)
elif file.endswith('.xlsx'):
text_content = convert_excel_to_csv(file_path, tmp_dir)
else:
text_content = read_file(file_path)
# Process text content with GPT-4 API
json_content, json_filename = process_content_with_gpt(text_content)
# Save JSON content to output directory
with open(os.path.join(output_dir, json_filename), 'w') as json_file:
json_file.write(json_content)
def process_links(links_file, tmp_dir, output_dir):
with open(links_file, 'r') as file:
urls = file.readlines()
for url in urls:
text_content = scrape_web_content(url.strip(), tmp_dir)
# Process text content with GPT-4 API
json_content, json_filename = process_content_with_gpt(text_content)
# Save JSON content to output directory
with open(os.path.join(output_dir, json_filename), 'w') as json_file:
json_file.write(json_content)
|
vontainment/v-openai-data2json
|
file_handling.py
|
file_handling.py
|
py
| 1,643 |
python
|
en
|
code
| 0 |
github-code
|
6
|
6757765764
|
from django.shortcuts import render
import subprocess
def index(request):
if request.method == "POST":
link = request.POST["link"]
cont = request.POST["cont"]
# Baixa o torrent
subprocess.run(["transmission-cli", "-w", "./", link])
# Converte arquivos para MP4
subprocess.run(["ffmpeg", "-i", "*.webm", "-c:v", "copy", "-c:a", "copy", "webm.mp4"])
subprocess.run(["ffmpeg", "-i", "*.mkv", "-c:v", "copy", "-c:a", "copy", "mkv.mp4"])
subprocess.run(["ffmpeg", "-i", "*.avi", "-c:v", "copy", "-c:a", "copy", "avi.mp4"])
subprocess.run(["ffmpeg", "-i", "*.ts", "-c:v", "copy", "-c:a", "copy", "ts.mp4"])
# Renomeia o arquivo final
subprocess.run(["mv", "*.mp4", f"{cont}.mp4"])
# Move o arquivo final para o diretório do Apache2
subprocess.run(["sudo", "mv", f"{cont}.mp4", "/var/www/html/"])
# Adiciona a mensagem de sucesso ao template
success_message = f"Torrent baixado com sucesso! Arquivo final: {cont}.mp4"
return render(request, "index.html", {'success_message': success_message})
return render(request, "index.html")
|
SrTristeSad/Download-torrent
|
views.py
|
views.py
|
py
| 1,169 |
python
|
vi
|
code
| 0 |
github-code
|
6
|
12177475498
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.homepage, name='home'),
path('id<int:id>', views.profile, name='profile'),
path('friends<int:user_id>', views.FriendsView.as_view(), name='friends'),
path('edit', views.edit_profile, name='edit_profile'),
path('friendship_request/<int:id>', views.friendship_request, name='friendship_request'),
path('telegraph', views.DialogsView.as_view(), name='dialogs'),
path('telegraph/dialogue/start/<user_id>', views.CreateDialogView.as_view(), name='start_dialog'),
path('telegraph/dialogue/<chat_id>', views.MessagesView.as_view(), name='messages'),
path('logout/', views.LogoutView.as_view(), name='logout'),
# api
# user posts
path('api/add_post', views.PostAPIAdd.as_view(), name='post_create'),
path('post_<int:postid>/update/<post_text>', views.PostAPIUpdate.as_view(), name='post_update'),
path('post_<int:postid>/remove', views.PostAPIRemove.as_view(), name='post_remove'),
path('post_<int:postid>/like', views.PostLikeAPIToggle.as_view(), name='post_like'),
path('post_<int:postid>/comment', views.PostAPIAddComment.as_view(), name='post_comment'),
# user
path('user_create/<username>_<email>_<password>_<first_name>_<last_name>', views.UserAPICreate.as_view(), name='create_user'),
path('user_update', views.UserAPIUpdate.as_view(), name='update_user'),
path('user_update_status', views.UserAPIUpdateStatus.as_view(), name='update_status_user'),
path('user_friendlist', views.UserAPIFriends.as_view(), name='friends_user'),
# chat
path('api/read/<chat_id>', views.ChatAPIMessagesRead.as_view(), name='api_read_messages'),
path('api/get_unreaded/<chat_id>', views.ChatAPIMessagesUnreaded.as_view(), name='api_unreaded_messages'),
path('api/send_message', views.ChatAPIMessagesSend.as_view(), name='api_send_message'),
path('api/remove_message/<message_id>', views.ChatAPIMessagesRemove.as_view(), name='api_remove_message'),
path('api/get_last_unreaded_message', views.ChatAPIMessagesGetUnreaded.as_view(), name='api_getlast_message'),
#path('get_mark_status/<int:id>', views.get_mark_status),
#path('get_marks_count/<int:id>', views.get_marks_count),
#path('/mail<int:id>', views.profile, name='mail'),
]
|
synchro123/LetsTalk
|
social/apps/main/urls.py
|
urls.py
|
py
| 2,226 |
python
|
en
|
code
| 0 |
github-code
|
6
|
11844491154
|
#!/usr/bin/python3
#======================exec.py=====================#
#---------------------Encodage---------------------#
# -*- coding: utf-8 -*-
#--------------------------------------------------#
#---------------------Imports----------------------#
import sys
#--------------------------------------------------#
#--------------------Description-------------------#
# Ce fichier gère les potentielles erreurs du code
# donné en paramètre du programme, permettant le bon
# fonctionnement de ce dernier.
#--------------------------------------------------#
#----------------------Auteurs---------------------#
# Sébastien HERT
# Dejan PARIS
#--------------------------------------------------#
#----------------Variables globales----------------#
#--------------------------------------------------#
#---------------------Méthodes---------------------#
def checkBooleen(identTable, name):
"""
Description : Vérifie que le paramètre / la variable "name" n'est pas un booléen (pour get()).
Paramètres :
- identTable : table des identificateurs
- name : nom du paramètre / de la variable à tester
Retour : None
Auteurs :
- Sébastien HERT
- Dejan PARIS
"""
for e in identTable[::-1] :
if e[0] == name :
if e[2] == "boolean" :
sys.exit("Erreur : l'argument " + name + " de get() ne peut pas être un booléen\n")
def checkType(identTable, name, scope, type):
"""
Description : Vérifie que le paramètre / la variable "name" est utilisée conformément à son type.
Paramètres :
- identTable : table des identificateurs
- name : nom du paramètre / de la variable à tester
- type : type supposé du paramètre / de la variable
Retour : None
Appelle :
- getType
Auteurs :
- Sébastien HERT
- Dejan PARIS
"""
expectedType = getType(identTable, name, scope)
if expectedType != type :
sys.exit("Erreur : " + name + " est déclaré comme " + translate(expectedType) + ", mais utilisé comme " + translate(type) + " !\n")
def checkReturnType(identTable, scope, type):
"""
Description : Vérifie que "return" est utilisé dans une fonction, et que le type du retour est correct.
Paramètres :
- identTable : table des identificateurs
- scope : niveau d'indentation de la fonction
- type : type supposé du retour
Retour : None
Auteur :
- Dejan PARIS
"""
low_scope = scope
for e in identTable[::-1] :
if e[1] < low_scope :
low_scope = e[1]
if e[1] == low_scope and e[3] != "null":
if e[3] != type :
sys.exit("Erreur : la fonction " + e[0] + " doit retourner un " + translate(e[3]) + " mais retourne un " + translate(type) + " !\n")
return
sys.exit("Erreur : la commande 'return' est utilisée en dehors d'une fonction !\n")
def checkDoubleDeclaOp(identTable, name):
"""
Description : Vérifie que la déclaration d'un paramètre / d'une variable ne fait pas doublon
Paramètres :
- identTable : table des identificateurs
- name : nom du paramètre / de la variable à tester
Retour : None
Auteurs :
- Sébastien HERT
"""
for e in identTable[::-1] :
if e[0] == name :
sys.exit("Erreur : " + name + " est déclaré plusieurs fois\n")
def checkDoubleDeclaVar(identTable, name, scope):
"""
Description : Vérifie que la déclaration d'un paramètre / d'une variable ne fait pas doublon
Paramètres :
- identTable : table des identificateurs
- name : nom du paramètre / de la variable à tester
- scope : portée de la variable / du paramètre
Retour : None
Auteurs :
- Sébastien HERT
"""
low_scope = scope
for e in identTable[::-1] :
if e[1] < low_scope :
low_scope = e[1]
if e[1] == low_scope :
if e[0] == name :
sys.exit("Erreur : " + name + " est déclaré plusieurs fois\n")
def checkNoDeclaOp(identTable, name):
"""
Description : Vérifie qu'une procédure / fonction a été déclarée
Paramètres :
- identTable : table des identificateurs
- name : nom de la procédure / fonction à tester
Retour : None
Auteurs :
- Sébastien HERT
"""
defined = False
for e in identTable[::-1] :
if e[0] == name :
defined = True
if not defined :
sys.exit("Erreur : " + name + " n'est pas déclaré\n")
def checkNoDeclaVar(identTable, name, scope):
"""
Description : Vérifie qu'un paramètre / une variable a été déclaré(e)
Paramètres :
- identTable : table des identificateurs
- name : nom du paramètre / de la variable à tester
- scope : portée de la variable / du paramètre
Retour : None
Auteurs :
- Sébastien HERT
"""
defined = False
low_scope = scope
for e in identTable[::-1] :
if e[1] < low_scope :
low_scope = e[1]
if e[1] == low_scope :
if e[0] == name :
defined = True
if not defined :
sys.exit("Erreur : " + name + " n'est pas déclaré\n")
def getType(identTable, name, scope):
"""
Description : Renvoie le type d'une variable / d'un paramètre s'il existe
Paramètres :
- identTable : table des identificateurs
- name : nom du paramètre / de la variable à tester
- scope : portée de la variable / du paramètre
Retour :
- e[2] : type de la variable / du paramètre enregistré dans identTable
Appelle :
- checkNoDeclaVar
Auteurs :
- Dejan PARIS
"""
low_scope = scope
for e in identTable[::-1] :
if e[1] < low_scope :
low_scope = e[1]
if e[1] == low_scope :
if e[0] == name :
return e[2]
checkNoDeclaVar(identTable, name, scope)
def translate(type):
"""
Description : Traduit les types pour les messages d'erreur en français.
Paramètres :
- type : type à traduire
Retour :
- traduction
Auteurs :
- Dejan PARIS
"""
if type == "integer" :
return "entier"
if type == "boolean" :
return "booléen"
|
Darius1325/Project_compil
|
src/erreur.py
|
erreur.py
|
py
| 6,261 |
python
|
fr
|
code
| null |
github-code
|
6
|
39784628011
|
from unittest.mock import Mock
import pytest
from juju.action import Action
from juju_verify.utils.action import data_from_action
@pytest.mark.parametrize(
"data, key, exp_value",
[
({"results": {"host": "compute.0", "test": "test"}}, "host", "compute.0"),
({"results": {"test": "test"}}, "host", "default"),
({"results": {"ids": "[1, 2, 3]", "test": "test"}}, "ids", "[1, 2, 3]"),
({"test": "test"}, "host", "default"),
],
)
def test_data_from_action(data, key, exp_value):
"""Test helper function that parses data from Action.data.results dict."""
action = Mock(spec_set=Action)
action.data = data
output = data_from_action(action, key, "default")
assert output == exp_value
|
canonical/juju-verify
|
tests/unit/utils/test_action.py
|
test_action.py
|
py
| 746 |
python
|
en
|
code
| 2 |
github-code
|
6
|
32094636352
|
T = int(input()) # 테스트 케이스 수
print(T)
for t in range(1, T+1):
N = int(input()) # 입력 줄 수
print(N)
for s in range(1, N+1):
numbers = list(map(int,input().split()))
for i in numbers:
print(i, end=' ')
print('')
|
doll2gom/TIL
|
KDT/week3/01.11/practice/06.py
|
06.py
|
py
| 277 |
python
|
ko
|
code
| 2 |
github-code
|
6
|
3126066655
|
from unittest import TestCase
from player import Player
from item import Item
from direction import Direction
from location import Location
from game import Game
class TestPlayer(TestCase):
def setUp(self):
self.north = Direction('north')
self.south = Direction('south')
self.west = Direction('west')
self.pool_table = Item('pool table', 'A felt lined pool table', 100, 10)
self.room1 = Location("Ballroom", "A well furnished ballroom")
self.room1.add_item(self.pool_table)
self.tv = Item('TV', 'The family television', 10, 50)
self.couch = Item('couch', 'A comfy couch', 20, 100)
self.room2 = Location("Family Room", "A well furnished family room")
self.room2.add_item(self.couch)
self.room2.add_item(self.tv)
self.room1.add_exit(self.north, self.room2)
self.room2.add_exit(self.south, self.room1)
self.player = Player(location=self.room1, game=Game())
def test_go_success(self):
self.assertEqual(self.player.location, self.room1)
r = self.player.go(self.north)
self.assertTrue(r.success)
self.assertEqual(self.player.location, self.room2)
def test_go_failure(self):
self.assertEqual(self.player.location, self.room1)
r = self.player.go(self.south)
self.assertFalse(r.success)
self.assertEqual(self.player.location, self.room1)
def test_get_success(self):
self.player.location.add_item(self.tv)
self.assertFalse(self.tv.id in self.player.inventory.items)
r = self.player.get(self.tv)
self.assertTrue(r.success)
self.assertTrue(self.tv.id in self.player.inventory.items)
def test_get_failure_item_too_heavy(self):
self.assertFalse(self.pool_table in self.player.inventory.items)
self.player.inventory.capacity = self.pool_table.size - 1
r = self.player.get(self.pool_table)
self.assertFalse(r.success)
self.assertFalse(self.pool_table in self.player.inventory.items)
def test_get_failure_item_not_here(self):
self.assertFalse(self.tv in self.player.inventory.items)
r = self.player.get(self.tv)
self.assertFalse(r.success)
self.assertFalse(self.tv in self.player.inventory.items)
def test_drop_success(self):
self.player.inventory.add_item(self.tv)
self.player.location.remove_item(self.tv)
r = self.player.drop(self.tv)
self.assertTrue(r.success)
self.assertFalse(self.tv.id in self.player.inventory.items)
self.assertTrue(self.tv.id in self.player.location.items)
def test_drop_failure_do_not_have(self):
r = self.player.drop(self.tv)
self.assertFalse(r.success)
def test_look_at_location(self):
r = self.player.look()
self.assertEqual(str(r),
"\n----Ballroom----\n\n"
+ "A well furnished ballroom\n\n"
+ "Exits lead north\n"
+ "There is a pool table here")
def test_check_inventory_empty(self):
r = self.player.list_inventory()
self.assertEqual(str(r), "You're not carrying anything")
def test_check_inventory_items(self):
self.player.inventory.add_item(self.tv)
r = self.player.list_inventory()
self.assertEqual(str(r), "You are carrying:\n\tThe family television")
|
ccastiglione/adventure
|
test_player.py
|
test_player.py
|
py
| 3,437 |
python
|
en
|
code
| 0 |
github-code
|
6
|
12811416970
|
# plus, minus, multiply, divide
import operator
d = {
'plus': operator.add,
'minus': operator.sub,
'multiply': operator.mul,
'divide': operator.truediv
}
inp = input().strip().split(' ')
res = d[inp[1]](int(inp[0]), int(inp[2]))
print(res)
|
sergeymong/Python
|
Stepik Python tasks/Math interp.py
|
Math interp.py
|
py
| 285 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5061101951
|
from ctypes import c_int, create_string_buffer
import json
import platform
if platform.system() == "Linux" :
from ctypes import cdll
else :
from ctypes import windll
ID_TIPO_COMPROBANTE_TIQUET = c_int( 1 ).value # "83" Tique
ID_TIPO_COMPROBANTE_TIQUE_FACTURA = c_int( 2 ).value # "81" Tique Factura A, "82" Tique Factura B, "111" Tique Factura C, "118" Tique Factura M
ID_TIPO_COMPROBANTE_TIQUE_NOTA_DE_CREDITO = c_int( 3 ).value # "110" Tique Nota de Credito, "112" Tique Nota de Credito A, "113" Tique Nota de Credito B, "114" Tique Nota de Credito C, "119" Tique Nota de Credito M
ID_TIPO_COMPROBANTE_TIQUE_NOTA_DE_DEBITO = c_int( 4 ).value # "115" Tique Nota de Debito A, "116" Tique Nota de Debito B, "117" Tique Nota de Debito C, "120" Tique Nota de Debito M
ID_TIPO_COMPROBANTE_NO_FISCAL = c_int( 21 ).value
ID_TIPO_DOCUMENTO_NINGUNO = c_int( 0 ).value
ID_TIPO_DOCUMENTO_DNI = c_int( 1 ).value
ID_TIPO_DOCUMENTO_CUIL = c_int( 2 ).value
ID_TIPO_DOCUMENTO_CUIT = c_int( 3 ).value
ID_TIPO_DOCUMENTO_CEDULA_IDENTIDAD = c_int( 4 ).value
ID_TIPO_DOCUMENTO_PASAPORTE = c_int( 5 ).value
ID_TIPO_DOCUMENTO_LIB_CIVICA = c_int( 6 ).value
ID_TIPO_DOCUMENTO_LIB_ENROLAMIENTO = c_int( 7 ).value
ID_RESPONSABILIDAD_IVA_NINGUNO = c_int( 0 ).value
ID_RESPONSABILIDAD_IVA_RESPONSABLE_INSCRIPTO = c_int( 1 ).value
ID_RESPONSABILIDAD_IVA_NO_RESPONSABLE = c_int( 3 ).value
ID_RESPONSABILIDAD_IVA_MONOTRIBUTISTA = c_int( 4 ).value
ID_RESPONSABILIDAD_IVA_CONSUMIDOR_FINAL = c_int( 5 ).value
ID_RESPONSABILIDAD_IVA_EXENTO = c_int( 6 ).value
ID_RESPONSABILIDAD_IVA_NO_CATEGORIZADO = c_int( 7 ).value
ID_RESPONSABILIDAD_IVA_MONOTRIBUTISTA_SOCIAL = c_int( 8 ).value
ID_RESPONSABILIDAD_IVA_CONTRIBUYENTE_EVENTUAL = c_int( 9 ).value
ID_RESPONSABILIDAD_IVA_CONTRIBUYENTE_EVENTUAL_SOCIAL = c_int( 10 ).value
ID_RESPONSABILIDAD_IVA_MONOTRIBUTO_INDEPENDIENTE_PROMOVIDO = c_int( 11 ).value
ID_MODIFICADOR_AGREGAR_ITEM = c_int( 200 ).value
ID_MODIFICADOR_ANULAR_ITEM = c_int( 201 ).value
ID_MODIFICADOR_AGREGAR_ITEM_RETORNO_ENVASES = c_int( 202 ).value
ID_MODIFICADOR_ANULAR_ITEM_RETORNO_ENVASES = c_int( 203 ).value
ID_MODIFICADOR_AGREGAR_ITEM_BONIFICACION = c_int( 204 ).value
ID_MODIFICADOR_ANULAR_ITEM_BONIFICACION = c_int( 205 ).value
ID_MODIFICADOR_AGREGAR_ITEM_DESCUENTO = c_int( 206 ).value
ID_MODIFICADOR_ANULAR_ITEM_DESCUENTO = c_int( 207 ).value
ID_MODIFICADOR_AGREGAR_ITEM_ANTICIPO = c_int( 208 ).value
ID_MODIFICADOR_ANULAR_ITEM_ANTICIPO = c_int( 209 ).value
ID_MODIFICADOR_AGREGAR_ITEM_DESCUENTO_ANTICIPO = c_int( 210 ).value
ID_MODIFICADOR_ANULAR_ITEM_DESCUENTO_ANTICIPO = c_int( 211 ).value
ID_MODIFICADOR_DESCUENTO = c_int( 400 ).value
ID_MODIFICADOR_AJUSTE = c_int( 401 ).value
ID_MODIFICADOR_AJUSTE_NEGATIVO = c_int( 402 ).value
ID_MODIFICADOR_AUDITORIA_DETALLADA = c_int( 500 ).value
ID_MODIFICADOR_AUDITORIA_RESUMIDA = c_int( 501 ).value
ID_MODIFICADOR_AGREGAR = ID_MODIFICADOR_AGREGAR_ITEM
ID_MODIFICADOR_ANULAR = ID_MODIFICADOR_ANULAR_ITEM
ID_TASA_IVA_NINGUNO = c_int( 0 ).value
ID_TASA_IVA_EXENTO = c_int( 1 ).value
ID_TASA_IVA_10_50 = c_int( 4 ).value
ID_TASA_IVA_21_00 = c_int( 5 ).value
ID_TASA_IVA_27_00 = c_int( 6 ).value
ID_IMPUESTO_NINGUNO = c_int( 0 ).value
ID_IMPUESTO_INTERNO_FIJO = c_int( 1 ).value
ID_IMPUESTO_INTERNO_PORCENTUAL = c_int( 2 ).value
ID_CODIGO_INTERNO = c_int( 1 ).value
ID_CODIGO_MATRIX = c_int( 2 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_SIN_DESCRIPCION = c_int( 0 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_KILOGRAMO = c_int( 1 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_METROS = c_int( 2 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_METRO_CUADRADO = c_int( 3 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_METRO_CUBICO = c_int( 4 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_LITROS = c_int( 5 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_UNIDAD = c_int( 7 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_PAR = c_int( 8 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_DOCENA = c_int( 9 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_QUILATE = c_int( 10 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MILLAR = c_int( 11 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MEGA_U_INTER_ACT_ANTIB = c_int( 12 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_UNIDAD_INT_ACT_INMUNG = c_int( 13 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_GRAMO = c_int( 14 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MILIMETRO = c_int( 15 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MILIMETRO_CUBICO = c_int( 16 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_KILOMETRO = c_int( 17 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_HECTOLITRO = c_int( 18 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MEGA_UNIDAD_INT_ACT_INMUNG = c_int( 19 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_CENTIMETRO = c_int( 20 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_KILOGRAMO_ACTIVO = c_int( 21 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_GRAMO_ACTIVO = c_int( 22 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_GRAMO_BASE = c_int( 23 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_UIACTHOR = c_int( 24 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_JGO_PQT_MAZO_NAIPES = c_int( 25 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MUIACTHOR = c_int( 26 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_CENTIMETRO_CUBICO = c_int( 27 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_UIACTANT = c_int( 28 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_TONELADA = c_int( 29 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_DECAMETRO_CUBICO = c_int( 30 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_HECTOMETRO_CUBICO = c_int( 31 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_KILOMETRO_CUBICO = c_int( 32 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MICROGRAMO = c_int( 33 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_NANOGRAMO = c_int( 34 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_PICOGRAMO = c_int( 35 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MUIACTANT = c_int( 36 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_UIACTIG = c_int( 37 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MILIGRAMO = c_int( 41 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MILILITRO = c_int( 47 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_CURIE = c_int( 48 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MILICURIE = c_int( 49 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MICROCURIE = c_int( 50 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_U_INTER_ACT_HORMONAL = c_int( 51 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MEGA_U_INTER_ACT_HORMONAL = c_int( 52 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_KILOGRAMO_BASE = c_int( 53 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_GRUESA = c_int( 54 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_MUIACTIG = c_int( 55 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_KILOGRAMO_BRUTO = c_int( 61 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_PACK = c_int( 62 ).value
AFIP_CODIGO_UNIDAD_MEDIDA_HORMA = c_int( 63 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_IMPUESTOS_NACIONALES = c_int( 1 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_IMPUESTOS_PROVINCIAL = c_int( 2 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_IMPUESTO_MUNICIPAL = c_int( 3 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_IMPUESTO_INTERNOS = c_int( 4 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_INGRESOS_BRUTOS = c_int( 5 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_PERCEPCION_DE_IVA = c_int( 6 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_PERCEPCION_DE_INGRESOS_BRUTOS = c_int( 7 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_PERCEPCION_POR_IMPUESTOS_MUNICIPALES = c_int( 8 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_OTRAS_PERCEPCIONES = c_int( 9 ).value
AFIP_CODIGO_OTROS_TRIBUTOS_OTROS = c_int( 99 ).value
AFIP_CODIGO_FORMA_DE_PAGO_CARTA_DE_CREDITO_DOCUMENTARIO = c_int( 1 ).value
AFIP_CODIGO_FORMA_DE_PAGO_CARTAS_DE_CREDITO_SIMPLE = c_int( 2 ).value
AFIP_CODIGO_FORMA_DE_PAGO_CHEQUE = c_int( 3 ).value
AFIP_CODIGO_FORMA_DE_PAGO_CHEQUES_CANCELATORIOS = c_int( 4 ).value
AFIP_CODIGO_FORMA_DE_PAGO_CREDITO_DOCUMENTARIO = c_int( 5 ).value
AFIP_CODIGO_FORMA_DE_PAGO_CUENTA_CORRIENTE = c_int( 6 ).value
AFIP_CODIGO_FORMA_DE_PAGO_DEPOSITO = c_int( 7 ).value
AFIP_CODIGO_FORMA_DE_PAGO_EFECTIVO = c_int( 8 ).value
AFIP_CODIGO_FORMA_DE_PAGO_ENDOSO_DE_CHEQUE = c_int( 9 ).value
AFIP_CODIGO_FORMA_DE_PAGO_FACTURA_DE_CREDITO = c_int( 10 ).value
AFIP_CODIGO_FORMA_DE_PAGO_GARANTIAS_BANCARIAS = c_int( 11 ).value
AFIP_CODIGO_FORMA_DE_PAGO_GIROS = c_int( 12 ).value
AFIP_CODIGO_FORMA_DE_PAGO_LETRAS_DE_CAMBIO = c_int( 13 ).value
AFIP_CODIGO_FORMA_DE_PAGO_MEDIOS_DE_PAGO_DE_COMERCIO_EXTERIOR = c_int( 14 ).value
AFIP_CODIGO_FORMA_DE_PAGO_ORDEN_DE_PAGO_DOCUMENTARIA = c_int( 15 ).value
AFIP_CODIGO_FORMA_DE_PAGO_ORDEN_DE_PAGO_SIMPLE = c_int( 16 ).value
AFIP_CODIGO_FORMA_DE_PAGO_PAGO_CONTRA_REEMBOLSO = c_int( 17 ).value
AFIP_CODIGO_FORMA_DE_PAGO_REMESA_DOCUMENTARIA = c_int( 18 ).value
AFIP_CODIGO_FORMA_DE_PAGO_REMESA_SIMPLE = c_int( 19 ).value
AFIP_CODIGO_FORMA_DE_PAGO_TARJETA_DE_CREDITO = c_int( 20 ).value
AFIP_CODIGO_FORMA_DE_PAGO_TARJETA_DE_DEBITO = c_int( 21 ).value
AFIP_CODIGO_FORMA_DE_PAGO_TICKET = c_int( 22 ).value
AFIP_CODIGO_FORMA_DE_PAGO_TRANSFERENCIA_BANCARIA = c_int( 23 ).value
AFIP_CODIGO_FORMA_DE_PAGO_TRANSFERENCIA_NO_BANCARIA = c_int( 24 ).value
AFIP_CODIGO_FORMA_DE_PAGO_OTROS_MEDIOS_DE_PAGO = c_int( 99 ).value
def cargarLibreria() :
sistema = platform.system()
if sistema == "Linux" :
return cdll.LoadLibrary("./EpsonFiscalInterface.so")
else :
if sistema == "Windows" :
return windll.LoadLibrary("./EpsonFiscalInterface.dll")
# -----------------------------------------------------------------------------
# Function: ticket
# -----------------------------------------------------------------------------
def ticket(datos_ticket):
try :
# get handle from DLL
Handle_HL = cargarLibreria()
# # connect
###Handle_HL.ConfigurarVelocidad( c_int(9600).value )
Handle_HL.ConfigurarPuerto( "0" )
ejecutarComando(Handle_HL, Handle_HL.Conectar())
# # try cancel all
ejecutarComando(Handle_HL, Handle_HL.Cancelar())
# # open
ejecutarComando(Handle_HL, Handle_HL.AbrirComprobante( ID_TIPO_COMPROBANTE_TIQUET ))
## Aca en esta funcion tenemos que poder evaluar los errores que puede arrojar
# # get document number
str_doc_number_max_len = 20
str_doc_number = create_string_buffer( b'\000' * str_doc_number_max_len )
error = Handle_HL.ConsultarNumeroComprobanteActual( str_doc_number, c_int(str_doc_number_max_len).value )
print("Get Doc. Number Error : "),
print(error)
print("Doc Number : "),
print(str_doc_number.value)
# # get document type
str_doc_type_max_len = 20
str_doc_type = create_string_buffer( b'\000' * str_doc_type_max_len )
print(str_doc_type)
error = Handle_HL.ConsultarTipoComprobanteActual( str_doc_type, c_int(str_doc_type_max_len).value )
print("Get Type Doc. Error : "),
print(error)
print("Doc Type : "),
print(str_doc_type.value)
# item
# imprimirItems(datos_ticket['items'], Handle_HL)
for item in datos_ticket['itemsComprobante'] :
# error = Handle_HL.ImprimirItem( ID_MODIFICADOR_AGREGAR, "Sardinas", "1", "100.1234", ID_TASA_IVA_EXENTO, ID_IMPUESTO_NINGUNO, "0", ID_CODIGO_INTERNO, "CodigoInterno4567890123456789012345678901234567890", "", AFIP_CODIGO_UNIDAD_MEDIDA_KILOGRAMO )
error = ejecutarComando(Handle_HL, Handle_HL.ImprimirItem( ID_MODIFICADOR_AGREGAR, enviar_texto(item["descripcion"]), enviar_texto(item['cantidad']), enviar_texto(item["importeOriginal"]), ID_TASA_IVA_EXENTO, ID_IMPUESTO_NINGUNO, "0", ID_CODIGO_INTERNO, enviar_texto(item["codigo"]), "", AFIP_CODIGO_UNIDAD_MEDIDA_UNIDAD))
print(str(item['cantidad'] + ' ' + item['descripcion'].ljust(40) + item['importeOriginal']))
# subtotal
ejecutarComando(Handle_HL, Handle_HL.ImprimirSubtotal())
# print(datos_ticket["total"])
print(str("IMPORTE" + " ").ljust(42) + str(datos_ticket["total"]))
# get subtotal gross amount
str_subtotal_max_len = 20
str_subtotal = create_string_buffer( b'\000' * str_subtotal_max_len )
error = Handle_HL.ConsultarSubTotalBrutoComprobanteActual( str_subtotal, c_int(str_subtotal_max_len).value )
print("Get Subtotal Gross : "),
print(error)
print("Subtotal Gross Amount : "),
print(str_subtotal.value)
# get subtotal gross amount
str_subtotal_max_len = 20
str_subtotal = create_string_buffer( b'\000' * str_subtotal_max_len )
print("como imprime:" + str(str_subtotal))
error = Handle_HL.ConsultarSubTotalNetoComprobanteActual( str_subtotal, c_int(str_subtotal_max_len).value )
print("Get Subtotal Net : "),
print(error)
print("Subtotal Net Amount : "),
print(str_subtotal.value)
# close
ejecutarComando(Handle_HL, Handle_HL.CerrarComprobante())
res = {"con_errores": 0, "descripcion": "OK", "numero": str(str_doc_number.value)[2:-1]}
except Exception as err :
res = {"con_errores": 1, "descripcion": str(err)}
finally:
ejecutarComando(Handle_HL, Handle_HL.Desconectar())
return json.dumps(res)
## Formato de datos de ticket
# ticket_str = "{'cliente': 'Martin Ramos'}"
# # , "Importe": "100.00"
# # , "Items":
# # [{ "descripcion": "Coca Cola"
# # , "importe": "120.00"}
# # ]}
def ticket_no_fiscal(datos_ticket):
try :
# get handle from DLL
Handle_HL = cargarLibreria()
# connect
###Handle_HL.ConfigurarVelocidad( c_int(9600).value )
Handle_HL.ConfigurarPuerto( "0" )
ejecutarComando(Handle_HL, Handle_HL.Conectar())
# try cancel all
ejecutarComando(Handle_HL, Handle_HL.Cancelar())
# open
ejecutarComando(Handle_HL, Handle_HL.AbrirComprobante( ID_TIPO_COMPROBANTE_NO_FISCAL ))
ejecutarComando(Handle_HL, Handle_HL.ImprimirTextoLibre(enviar_texto("Numero: " + str(datos_ticket['numero']))))
ejecutarComando(Handle_HL, Handle_HL.ImprimirTextoLibre(enviar_texto(datos_ticket['cliente'])))
imprimirItems(datos_ticket['itemsComprobante'], Handle_HL)
# subtotal
ejecutarComando(Handle_HL, Handle_HL.ImprimirTextoLibre(enviar_texto(str("IMPORTE" + " ").ljust(40) + str(datos_ticket['total']))))
# close
ejecutarComando(Handle_HL, Handle_HL.CerrarComprobante())
res = {"con_errores": 0, "descripcion": 'OK'}
except Exception as err :
res = {"con_errores": 1, "descripcion": str(err)}
finally:
ejecutarComando(Handle_HL, Handle_HL.Desconectar())
return json.dumps(res)
def enviar_texto(string) :
return string.encode('ascii')
def imprimirItems(datos_items, Handle_HL) :
for item in datos_items :
ejecutarComando(Handle_HL, Handle_HL.ImprimirTextoLibre(enviar_texto(str(item['cantidad'] + ' ' + item['descripcion'].ljust(40) + item['importeOriginal']))))
def encabezado() :
#title
print("*** Seteando Encabezado ***")
# get handle from DLL
Handle_HL = cargarLibreria()
# connect
###Handle_HL.ConfigurarVelocidad( c_int(9600).value )
Handle_HL.ConfigurarPuerto( "0")
error = Handle_HL.Conectar()
print("Connect : "),
print(hex(error))
# try cancel all
error = Handle_HL.Cancelar()
print("Cancel : "),
print(hex(error))
error = Handle_HL.EstablecerEncabezado(1, "Universidad Nacional de Quilmes")
print("Cancel : "),
print(hex(error))
# disconect
error = Handle_HL.Desconectar()
print("Disconect : "),
print(error)
def descargar_reportes() :
#title
print("*** Seteando Encabezado ***")
# get handle from DLL
Handle_HL = cargarLibreria()
# connect
###Handle_HL.ConfigurarVelocidad( c_int(9600).value )
Handle_HL.ConfigurarPuerto( "0" )
error = Handle_HL.Conectar()
print("Connect : "),
print(hex(error))
# try cancel all
error = Handle_HL.Cancelar()
print("Cancel : "),
print(hex(error))
error = Handle_HL.Descargar(enviar_texto("201021"), enviar_texto("211021"), "downloads")
print("Descargando Auditoria : "),
print(hex(error))
# disconect
error = Handle_HL.Desconectar()
print("Disconect : "),
print(error)
def cierreZ():
#title
print("*** Haciendo Cierre Z ***")
try :
# get handle from.so
Handle_HL = cargarLibreria()
# connect
###Handle_HL.ConfigurarVelocidad( c_int(9600).value )
error = Handle_HL.ConfigurarPuerto( "0" )
ejecutarComando(Handle_HL, Handle_HL.Conectar())
# try cancel all
ejecutarComando(Handle_HL, Handle_HL.Cancelar())
ejecutarComando(Handle_HL, Handle_HL.ImprimirCierreZ())
res = {"con_errores": 0, "descripcion": 'OK'}
except Exception as err :
res = {"con_errores": 1, "descripcion": str(err)}
finally:
ejecutarComando(Handle_HL, Handle_HL.Desconectar())
return json.dumps(res)
def cierreX():
print("*** Haciendo Cierre X ***")
try :
# get handle from.so
Handle_HL = cargarLibreria()
# connect
error = Handle_HL.ConfigurarPuerto( "0" )
ejecutarComando(Handle_HL, Handle_HL.Conectar())
# try cancel all
ejecutarComando(Handle_HL, Handle_HL.Cancelar())
ejecutarComando(Handle_HL, Handle_HL.ImprimirCierreX())
res = {"con_errores": 0, "descripcion": 'OK'}
except Exception as err :
res = {"con_errores": 1, "descripcion": str(err)}
finally:
ejecutarComando(Handle_HL, Handle_HL.Desconectar())
return json.dumps(res)
def ejecutarComando(Handle_HL, comando) :
### En caso que el hexa sea 0x0 o bien 0x05000024
if not (comando == 0 or comando == 83886116 or comando == 83886127) :
raise ValueError(verificarError(Handle_HL, comando))
def verificarError(Handle_HL, error) :
descripcion_error = create_string_buffer(b'\000' * 500)
error = Handle_HL.ConsultarDescripcionDeError(error, descripcion_error, c_int(500).value)
return str(descripcion_error.value)[1:]
def reportes() :
print("*** Reportes ***")
# get handle from DLL
Handle_HL = cargarLibreria()
# connect
###Handle_HL.ConfigurarVelocidad( c_int(9600).value )
Handle_HL.ConfigurarPuerto( "0" )
error = Handle_HL.Conectar()
print("Connect : "),
print(hex(error))
# try cancel all
##error = Handle_HL.Cancelar()
print("Cancel : "),
print(hex(error))
error = Handle_HL.EnviarComando( "0970|0000|1|3")
print("Reporte : "),
print(hex(error))
def pruebaTicket(datos_ticket):
try :
Handle_HL = cargarLibreria()
for item in datos_ticket['itemsComprobante'] :
print(str(item['cantidad'] + ' ' + item['descripcion'].ljust(40) + item['importeOriginal']))
print(str("IMPORTE" + " ").ljust(42) + str(datos_ticket["total"]))
raise ValueError("Esto es un error de prueba")
res = {"con_errores": 0, "descripcion": "OK"}
except Exception as err :
res = {"con_errores": 1, "descripcion": str(err)}
finally:
return json.dumps(res)
# # -----------------------------------------------------------------------------
# # main
# # -----------------------------------------------------------------------------
# print(" ")
# print(" ")
# print("----Basic Test")
# # dll_version()
# # dll_ll_test_comm()
# # equipment_machine_version()
# # print_X_and_Z()
# # set_and_get_header_trailer()
# # set_and_get_datetime()
# # cancel_all()
# print(" ")
# print(" ")
# print("----Testing Sales")
# ##encabezado()
# # ticket_str = '{"cliente": "Martin Ramos", "items": [{"cantidad":"2", "codigo":"123456789", "descripcion": "coca cola", "importe": "120.00"}], "total": "240"}'
# # ticket(ticket_str)
# # ticket_no_fiscal(ticket_str)
# # cierreZ()
# # cierreX()
# # descargar_reportes()
# #prueba_json(ticket_str)
# cierreX()
# # ticket_from_ticket_invoice()
# # ticket_invoice()
# # ticket_invoice_B()
# # ticket_debit_note()
# # ticket_debit_note_B()
# # ticket_credit_note()
# # ticket_credit_note_B()
# print(" ")
# print(" ")
# print("----Test Close Day")
# # audit()
# # download()
|
martin-ramos/epsonfiscalproxy
|
epsonproxy.py
|
epsonproxy.py
|
py
| 21,977 |
python
|
es
|
code
| 0 |
github-code
|
6
|
21681869620
|
from datetime import date
from time import sleep
ano=int(input('Que ano quer analisar? Colo que 0 para analisar o ano atual:'))
print('Processando...')
sleep(2)
###################################################################################
if(ano==0):
ano=date.today().year
if((ano % 4 == 0) and (ano % 100 != 0) or (ano % 400 == 0)):
print('O ano {} é BISSEXTO.'.format(ano))
else:
print('O ano {} NÂO É BISSEXTO.'.format(ano))
|
VitorFidelis/Exercicios-Python
|
Desafio032.py
|
Desafio032.py
|
py
| 448 |
python
|
gl
|
code
| 2 |
github-code
|
6
|
72532883389
|
"""
Multi-scale rabbit cardiac electrophysiology models
Rabbit Soltis-Saucerman model with full b-AR signalling (Rabbit SS 1D cardiac)
$ cd examples
$ make install-ci
$ make .env
SEE https://sparc.science/datasets/4?type=dataset
"""
import os
import sys
import time
from pathlib import Path
from time import sleep
from typing import Optional
import osparc
from dotenv import load_dotenv
from osparc.models import File, JobStatus
assert osparc.__version__ == "0.4.3"
current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
data_dir = current_dir / "data_rabbit_cardiac"
load_dotenv()
cfg = osparc.Configuration(
host=os.environ.get("OSPARC_API_URL", "http://127.0.0.1:8006"),
username=os.environ["OSPARC_API_KEY"],
password=os.environ["OSPARC_API_SECRET"],
)
print("Entrypoint", cfg.host)
with osparc.ApiClient(cfg) as api_client:
# Upload init states file.
files_api = osparc.FilesApi(api_client)
initial_wtstates_file = files_api.upload_file(
str(data_dir / "initial_WTstates.txt")
)
# Create our simulation.
solvers_api = osparc.SolversApi(api_client)
solver = solvers_api.get_solver_release(
"simcore/services/comp/rabbit-ss-1d-cardiac-model", "1.0.0"
)
# SEE data_rabbit_cardiac/ss1d_meta.json::inputs
job = solvers_api.create_job(
solver.id,
solver.version,
osparc.JobInputs(
{
"Na": 0,
"GKr": 1,
"TotalSimulationTime": 50,
"TargetHeartRatePhase1": 60,
"TargetHeartRatePhase2": 150,
"TargetHeartRatePhase3": 60,
"cAMKII": "WT",
"tissue_size_tw": 165,
"tissue_size_tl": 165,
"Homogeneity": "homogeneous",
"num_threads": 4,
"initialWTStates": initial_wtstates_file,
}
),
)
print("Job created", job)
# Start our simulation.
status = solvers_api.start_job(solver.id, solver.version, job.id)
start_t = time.perf_counter()
# Check the status of our simulation until it has completed.
while True:
status = solvers_api.inspect_job(solver.id, solver.version, job.id)
print(
f">>> Progress: {status.progress}% ",
f"[elapsed:{time.perf_counter() - start_t:4.2f}s]...",
flush=True,
)
if status.progress == 100:
break
sleep(1)
# Retrieve our simulation outputs.
print("---------------------------------------")
last_status: JobStatus = solvers_api.inspect_job(solver.id, solver.version, job.id)
print(">>> What is the status?", last_status)
outputs = solvers_api.get_job_outputs(solver.id, solver.version, job.id)
# SEE data_rabbit_cardiac/ss1d_meta.json::outputs
for output_name, result in outputs.results.items():
print(f">>> {output_name} = {result}")
# Retrieve our simulation results.
print("---------------------------------------")
result: Optional[File]
for output_name, result in outputs.results.items():
if result is None:
print(
"Can't retrieve our simulation results {output_name}...?!",
"Failed ?",
last_status.state,
"Finished ?",
last_status.progress == 100 or not last_status.stopped_at,
)
else:
# Print out the id of our simulation results file (?).
print("---------------------------------------")
print(">>> ", result.id)
# Download our simulation results file (?).
download_path: str = files_api.download_file(result.id)
print("Downloaded to", download_path)
print("Content-Type: ", result.content_type)
if result.content_type == "text/plain":
print("Result:", Path(download_path).read_text()[:100])
print("Status: ", Path(download_path).stat())
# List all the files that are available.
print("---------------------------------------")
print(files_api.list_files())
|
ITISFoundation/osparc-simcore
|
tests/public-api/examples/rabbit_cardiac_ss1d.py
|
rabbit_cardiac_ss1d.py
|
py
| 4,184 |
python
|
en
|
code
| 35 |
github-code
|
6
|
73239039229
|
""" Program to check the given word is palindrome or not """
# check using reverse method
# def is_palindrome(str1):
# reverse_string = list(reversed(str1))
# if list(str1) == reverse_string:
# return True
# else:
# return False
def is_palindrome(str1):
"""
Function to check palindrome
Parameter:
str1(string): given string
Return:
Boolean: True or False
"""
length = len(str1)
middle = length // 2
for i in range(middle):
if str1[i] != str1[length-i-1]:
return False
return True
def main():
""" Main Function """
# user input
user_input = input('Enter the string: ')
if is_palindrome(user_input):
print(user_input, ' is palindrome.')
else:
print(user_input, ' is not palindrome.')
if __name__ == '__main__':
main()
|
danny237/Python-Assignment2
|
palindrome.py
|
palindrome.py
|
py
| 883 |
python
|
en
|
code
| 0 |
github-code
|
6
|
3013315354
|
import time
import platform
import cpuinfo
os_version = platform.system()
print('CPU: ' + cpuinfo.get_cpu_info().get('brand_raw', "Unknown"))
print('Arch: ' + cpuinfo.get_cpu_info().get('arch_string_raw', "Unknown"))
print(f'OS: {str(os_version)}')
print('\nBenchmarking: \n')
start_benchmark = 10000 # change this if you like (sample: 1000, 5000, etc)
start_benchmark = start_benchmark
repeat_benchmark = 10 # attemps, change this if you like (sample: 3, 5, etc)
repeat_benchmark = repeat_benchmark
average_benchmark = 0
for _ in range(0,repeat_benchmark):
start = time.time()
for _ in range(0,start_benchmark):
for x in range(1,1000):
3.141592 * 2**x
for x in range(1,10000):
float(x) / 3.141592
for x in range(1,10000):
3.141592 / x
end = time.time()
duration = (end - start)
duration = round(duration, 3)
average_benchmark += duration
print(f'Time: {str(duration)}s')
average_benchmark = round(average_benchmark / repeat_benchmark, 3)
print(f'Average (from 10 repeats): {str(average_benchmark)}s')
|
LopeKinz/raspberry_debug
|
test.py
|
test.py
|
py
| 1,056 |
python
|
en
|
code
| 0 |
github-code
|
6
|
70097358589
|
import random
class Move:
def __init__(self, name, owner, damage, chance, pp):
self.owner = owner
self.name = name
self.damage = damage
self.chance = chance
self.pp = pp
def attack(self, consolemon):
if random.randint(0, 100) < self.chance and self.pp <= 1:
consolemon.hp -= self.damage
print(self.owner.name + " has used " + self.name + "! Dealt " +
str(self.damage) + " damage.")
else:
print(self.owner.name + " has missed " + self.name + "!")
self.pp -= 1
|
IanTheBean/Consolemon
|
src/classes/moves.py
|
moves.py
|
py
| 588 |
python
|
en
|
code
| 0 |
github-code
|
6
|
39688256184
|
# LC Contest 170
# Time: O(n+m), n=len(arr), m= len(queries)
# Space: O(n), O(1) excluding output
class Solution:
def xorQueries(self, arr: List[int], queries: List[List[int]]) -> List[int]:
cum_arr = [0]
for i in range(len(arr)):
cum_arr.append(cum_arr[i]^arr[i])
# print(cum_arr)
output = []
for l,r in queries:
output.append(cum_arr[r+1]^cum_arr[l])
return output
|
cmattey/leetcode_problems
|
Python/lc_1310_xor_queries_subarray.py
|
lc_1310_xor_queries_subarray.py
|
py
| 448 |
python
|
en
|
code
| 4 |
github-code
|
6
|
21836856619
|
import sys
sys.stdin = open("../inputdata/swea_5189.txt", "r")
def addEnergy(start, d, total):
if d == n-1:
total += energies[start][0]
res_list.append(total)
else:
for i in range(1, n):
if not visited[i]:
total += energies[start][i]
visited[i] = 1
addEnergy(i, d+1, total)
visited[i] = 0
total -= energies[start][i]
for test in range(int(input())):
n = int(input())
energies = [list(map(int, input().split())) for _ in range(n)]
visited = [0] * n
res_list = []
addEnergy(0, 0, 0)
print('#{} {}'.format(test+1, min(res_list)))
|
liza0525/algorithm-study
|
SWEA/swea_5189.py
|
swea_5189.py
|
py
| 680 |
python
|
en
|
code
| 0 |
github-code
|
6
|
30367005261
|
from traits.api import Any, Enum, Int, Property, Union
from enable.api import NativeScrollBar
from .chaco_traits import Optional
class PlotScrollBar(NativeScrollBar):
"""
A ScrollBar that can be wired up to anything with an xrange or yrange
and which can be attached to a plot container.
"""
# The axis corresponding to this scrollbar.
axis = Enum("index", "value")
# The renderer or Plot to attach this scrollbar to. By default, this
# is just self.component.
plot = Property
# The mapper for associated with the scrollbar. By default, this is the
# mapper on **plot** that corresponds to **axis**.
mapper = Property
# ------------------------------------------------------------------------
# Private traits
# ------------------------------------------------------------------------
# The value of the override plot to use, if any. If None, then uses
# self.component.
_plot = Any()
# The value of the override mapper to use, if any. If None, then uses the
# mapper on self.component.
_mapper = Any()
# Stores the index (0 or 1) corresponding to self.axis
_axis_index = Optional(Int)
# ----------------------------------------------------------------------
# Public methods
# ----------------------------------------------------------------------
def force_data_update(self):
"""This forces the scrollbar to recompute its range bounds. This
should be used if datasources are changed out on the range, or if
the data ranges on existing datasources of the range are changed.
"""
self._handle_dataspace_update()
def overlay(self, component, gc, view_bounds=None, mode="default"):
self.do_layout()
self._draw_mainlayer(gc, view_bounds, "default")
def _draw_plot(self, gc, view_bounds=None, mode="default"):
self._draw_mainlayer(gc, view_bounds, "default")
def _do_layout(self):
if getattr(self.plot, "_layout_needed", False):
self.plot.do_layout()
axis = self._determine_axis()
low, high = self.mapper.screen_bounds
self.bounds[axis] = high - low
self.position[axis] = low
self._widget_moved = True
def _get_abs_coords(self, x, y):
if self.container is not None:
return self.container.get_absolute_coords(x, y)
else:
return self.component.get_absolute_coords(x, y)
# ----------------------------------------------------------------------
# Scrollbar
# ----------------------------------------------------------------------
def _handle_dataspace_update(self):
# This method reponds to changes from the dataspace side, e.g.
# a change in the range bounds or the data bounds of the datasource.
# Get the current datasource bounds
range = self.mapper.range
bounds_list = [
source.get_bounds()
for source in range.sources
if source.get_size() > 0
]
mins, maxes = zip(*bounds_list)
dmin = min(mins)
dmax = max(maxes)
view = float(range.high - range.low)
# Take into account the range's current low/high and the data bounds
# to compute the total range
totalmin = min(range.low, dmin)
totalmax = max(range.high, dmax)
# Compute the size available for the scrollbar to scroll in
scrollrange = (totalmax - totalmin) - view
if round(scrollrange / 20.0) > 0.0:
ticksize = scrollrange / round(scrollrange / 20.0)
else:
ticksize = 1
foo = (totalmin, totalmax, view, ticksize)
self.trait_setq(
range=foo,
scroll_position=max(
min(self.scroll_position, totalmax - view), totalmin
),
)
self._scroll_updated = True
self.request_redraw()
def _scroll_position_changed(self):
super()._scroll_position_changed()
# Notify our range that we've changed
range = self.mapper.range
view_width = range.high - range.low
new_scroll_pos = self.scroll_position
range.set_bounds(new_scroll_pos, new_scroll_pos + view_width)
# ----------------------------------------------------------------------
# Event listeners
# ----------------------------------------------------------------------
def _component_changed(self, old, new):
# Check to see if we're currently overriding the value of self.component
# in self.plot. If so, then don't change the event listeners.
if self._plot is not None:
return
if old is not None:
self._modify_plot_listeners(old, "detach")
if new is not None:
self._modify_plot_listeners(new, "attach")
self._update_mapper_listeners()
def __plot_changed(self, old, new):
if old is not None:
self._modify_plot_listeners(old, "detach")
elif self.component is not None:
# Remove listeners from self.component, if it exists
self._modify_plot_listeners(self.component, "detach")
if new is not None:
self._modify_plot_listeners(new, "attach")
self._update_mapper_listeners()
elif self.component is not None:
self._modify_plot_listeners(self.component, "attach")
self._update_mapper_listeners()
def _modify_plot_listeners(self, plot, action="attach"):
if action == "attach":
remove = False
else:
remove = True
plot.observe(
self._component_bounds_handler, "bounds.items", remove=remove
)
plot.observe(
self._component_pos_handler, "position.items", remove=remove
)
def _component_bounds_handler(self, event):
self._handle_dataspace_update()
self._widget_moved = True
def _component_pos_handler(self, event):
self._handle_dataspace_update()
self._widget_moved = True
def _update_mapper_listeners(self):
# if self._mapper
pass
def _handle_mapper_updated(self):
self._handle_dataspace_update()
# ------------------------------------------------------------------------
# Property getter/setters
# ------------------------------------------------------------------------
def _get_plot(self):
if self._plot is not None:
return self._plot
else:
return self.component
def _set_plot(self, val):
self._plot = val
def _get_mapper(self):
if self._mapper is not None:
return self._mapper
else:
return getattr(self.plot, self.axis + "_mapper")
def _set_mapper(self, new_mapper):
self._mapper = new_mapper
def _get_axis_index(self):
if self._axis_index is None:
return self._determine_axis()
else:
return self._axis_index
def _set_axis_index(self, val):
self._axis_index = val
# ------------------------------------------------------------------------
# Private methods
# ------------------------------------------------------------------------
def _get_axis_coord(self, event, axis="index"):
"""Returns the coordinate of the event along the axis of interest
to this tool (or along the orthogonal axis, if axis="value").
"""
event_pos = (event.x, event.y)
if axis == "index":
return event_pos[self.axis_index]
else:
return event_pos[1 - self.axis_index]
def _determine_axis(self):
"""Determines whether the index of the coordinate along this tool's
axis of interest is the first or second element of an (x,y) coordinate
tuple.
This method is only called if self._axis_index hasn't been set (or is
None).
"""
if self.axis == "index":
if self.plot.orientation == "h":
return 0
else:
return 1
else: # self.axis == "value"
if self.plot.orientation == "h":
return 1
else:
return 0
|
enthought/chaco
|
chaco/plotscrollbar.py
|
plotscrollbar.py
|
py
| 8,287 |
python
|
en
|
code
| 286 |
github-code
|
6
|
25090333654
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class Model(nn.Module):
def __init__(self, zSize=10):
super(Model, self).__init__()
self.zSize = zSize
def create(self, opts):
self.scale_factor = 8 / (512 / opts.imsize)
self.nLatentDims = opts.nLatentDims
self.nChIn = opts.nChIn
self.nChOut = opts.nChOut
self.nOther = opts.nOther
self.dropoutRate = opts.dropoutRate
self.opts = opts
self.create_autoencoder()
self.create_adversary()
self.assemble()
def create_autoencoder(self):
scale = self.scale_factor
# Create encoder (generator)
self.encoder = nn.Sequential(
nn.Conv2d(self.nChIn, 64, 4, 2, 1),
nn.BatchNorm2d(64),
nn.PReLU(),
nn.Conv2d(64, 128, 4, 2, 1),
nn.BatchNorm2d(128),
nn.PReLU(),
nn.Conv2d(128, 256, 4, 2, 1),
nn.BatchNorm2d(256),
nn.PReLU(),
nn.Conv2d(256, 512, 4, 2, 1),
nn.BatchNorm2d(512),
nn.PReLU(),
nn.Conv2d(512, 1024, 4, 2, 1),
nn.BatchNorm2d(1024),
nn.PReLU(),
nn.Conv2d(1024, 1024, 4, 2, 1),
nn.BatchNorm2d(1024),
nn.PReLU(),
nn.Flatten(),
nn.PReLU(),
nn.Linear(1024 * scale * scale, self.nLatentDims),
nn.BatchNorm1d(self.nLatentDims)
)
# Create decoder
self.decoder = nn.Sequential(
nn.Linear(self.nLatentDims, 1024 * scale * scale),
nn.Unflatten(1, (1024, scale, scale)),
nn.PReLU(),
nn.ConvTranspose2d(1024, 1024, 4, 2, 1),
nn.BatchNorm2d(1024),
nn.PReLU(),
nn.ConvTranspose2d(1024, 512, 4, 2, 1),
nn.BatchNorm2d(512),
nn.PReLU(),
nn.ConvTranspose2d(512, 256, 4, 2, 1),
nn.BatchNorm2d(256),
nn.PReLU(),
nn.ConvTranspose2d(256, 128, 4, 2, 1),
nn.BatchNorm2d(128),
nn.PReLU(),
nn.ConvTranspose2d(128, 64, 4, 2, 1),
nn.BatchNorm2d(64),
nn.PReLU(),
nn.ConvTranspose2d(64, 1, 4, 2, 1),
nn.Sigmoid()
)
self.encoder.apply(weights_init)
self.decoder.apply(weights_init)
def create_adversary(self):
# Create adversary (discriminator)
noise = 0.1
self.adversary = nn.Sequential(
nn.Linear(self.nLatentDims, 1024),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(1024, 1024),
nn.BatchNorm1d(1024),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(1024, 512),
nn.BatchNorm1d(512),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(512, 1),
nn.Sigmoid()
)
self.adversary.apply(weights_init)
def assemble(self):
self.autoencoder = nn.Sequential(self.encoder, self.decoder)
def forward(self, x):
return self.autoencoder(x), self.adversary(x)
|
TylerJost/learnPytorch
|
autoencoders/aaeGaudenz.py
|
aaeGaudenz.py
|
py
| 3,140 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71366123387
|
#!/usr/bin/env python
#-*-coding: utf-8 -*-
import numpy as np
import numpy.linalg as LA
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
def plot_regret(rewards, bestRewards, label, filename):
sumRewards = np.cumsum(rewards)
sumBestRewards = np.cumsum(bestRewards)
regret = (sumBestRewards - sumRewards)
fig = plt.figure(figsize=(7, 6))
plt.plot(np.arange(1,len(regret)+1), regret, label=label)
plt.legend()
plt.savefig('./img/'+filename+'.png')
plt.close(fig)
return
def plot_beta_estimation(betaEstimations, filename):
fig = plt.figure(figsize=(7, 6))
plt.axis([0, len(betaEstimations), 0, 1.])
plt.plot(np.arange(1,len(betaEstimations)+1),betaEstimations,label='loss distance distance')
plt.legend()
plt.savefig('./img/'+filename+'.png')
plt.close(fig)
return
def plot_contexts_and_beta(AllContexts, theta, beta_estimation, filename):
fig = plt.gcf()
plt.gca().set_xlim((0.,1.2))
plt.gca().set_ylim((0.,1.2))
plt.gca().plot(np.array(map(lambda x: x[0], AllContexts)), # plot context vectors
np.array(map(lambda x: x[1], AllContexts)),
'o',color='black')
plt.gca().plot(theta[0], theta[1],'o',color='blue') # plot theta vector (hidden vector)
normalisation = LA.norm(np.array([beta_estimation[0], beta_estimation[1]]))
plt.gca().plot(beta_estimation[0] / normalisation, # plot beta estimation
beta_estimation[1] / normalisation,
'o',color='red')
fig.gca().add_artist(plt.Circle((0,0),1.,color='b',fill=False))
for i, x in enumerate(AllContexts):
fig.gca().annotate('%d' % i, xy=(x[0],x[1]), xytext=(x[0], x[1]),
arrowprops=dict(facecolor='black', shrink=0.05),
)
fig.savefig('img/'+filename+'.png')
|
jeppe/Adaptive-Social-Search
|
linucb/plot_utils.py
|
plot_utils.py
|
py
| 1,894 |
python
|
en
|
code
| 1 |
github-code
|
6
|
15796410510
|
"""Евлампия не смогла разобраться с рекурсией! Напишите реализацию алгоритма
определения факториала числа с использованием цикла.
Формат ввода
На вход подается n - целое число в диапазоне от 0 до 22
Формат вывода
Нужно вывести число - факториал для n
Пример
Ввод
3
Вывод
2
"""
def fact(number):
multiplication = 1
i = 1
while i <= number:
multiplication = multiplication * i
i += 1
return multiplication
if __name__ == "__main__":
n = int(input())
print(fact(n))
|
Ilia-Abrosimov/Algorithms-and-data-structures
|
4. Recursion/F.py
|
F.py
|
py
| 723 |
python
|
ru
|
code
| 0 |
github-code
|
6
|
31539804976
|
from Infrastructura.repos import RepoClienti, RepoFilme, RepoInchirieri
from Domain.entitati import Film, Client, Inchiriere
from Business.services import ServiceFilme, ServiceClienti, ServiceInchirieri
from Validare.validatoare import ValidFilme, ValidClienti, ValidInchirieri
from Exceptii.exceptii import ValidError, RepoError
import unittest
class Teste():
def __test_creeaza_film(self):
film = Film(1, 'a', 'a', 'a', 0)
assert (film.get_id() == 1)
assert (film.get_titlu() == 'a')
assert (film.get_descriere() == 'a')
assert (film.get_gen() == 'a')
assert (film.get_inchirieri() == 0)
film.set_titlu('b')
assert (film.get_titlu() == 'b')
film.set_descriere('b')
assert (film.get_descriere() == 'b')
film.set_gen('b')
assert (film.get_gen() == 'b')
def __test_creeaza_client(self):
client = Client(1, 'a', 1234567890123, 0)
assert (client.get_id() == 1)
assert (client.get_nume() == 'a')
assert (client.get_cnp() == 1234567890123)
assert (client.get_inchirieri() == 0 )
client.set_nume('b')
assert (client.get_nume() == 'b')
client.set_cnp(2234567890123)
assert (client.get_cnp() == 2234567890123)
def __test_creeaza_inchiriere(self):
film = Film(1, 'a', 'a', 'a', 0)
client = Client(1, 'a', 1234567890123, 0)
inchiriere = Inchiriere(client, film)
assert (inchiriere.get_client() == client)
assert (inchiriere.get_film() == film)
assert (inchiriere.get_id_client() == client.get_id())
assert (inchiriere.get_id_film() == film.get_id())
def __test_adauga_client(self):
client = Client(1, 'a', 1234567890123, 0)
testRepoCLient = RepoClienti()
testRepoCLient.adauga_client(client)
lista = testRepoCLient.get_all()
assert (len(lista) == 1)
def __test_adauga_film(self):
film = Film(1, 'a', 'a', 'a', 0)
testRepoFilme = RepoFilme()
testRepoFilme.adauga_film(film)
lista = testRepoFilme.get_all()
assert (len(lista) == 1)
def __test_modifica_client(self):
client = Client(1, 'a', 1234567890123, 0)
testRepoCLient = RepoClienti()
testRepoCLient.adauga_client(client)
testValidClienti = ValidClienti()
testServiceClient = ServiceClienti(testRepoCLient, testValidClienti)
testServiceClient.modifica_client(1, 'b', 1231231231231)
assert (client.get_id() == 1)
assert (client.get_nume() == 'b')
assert (client.get_cnp() == 1231231231231)
def __test_modifica_film(self):
film = Film(1, 'a', 'a', 'a', 0)
testRepoFilme = RepoFilme()
testRepoFilme.adauga_film(film)
testValidFilme = ValidFilme()
testServiceFilme = ServiceFilme(testRepoFilme, testValidFilme)
testServiceFilme.modifica_film(1, 'b', 'b', 'b')
assert (film.get_id() == 1)
assert (film.get_titlu() == 'b')
assert (film.get_descriere() == 'b')
assert (film.get_gen() == 'b')
def __test_sterge_client(self):
client = Client(1, 'a', 1234567890123, 0)
testRepoCLient = RepoClienti()
testRepoCLient.adauga_client(client)
testRepoCLient.sterge_client(client)
lista = testRepoCLient.get_all()
assert (len(lista) == 0)
def __test_sterge_film(self):
film = Film(1, 'a', 'a', 'a', 0)
testRepoFilme = RepoFilme()
testRepoFilme.adauga_film(film)
testRepoFilme.sterge_film(film)
lista = testRepoFilme.get_all()
assert (len(lista) == 0)
def __test_inchiriaza(self):
film = Film(1, 'a', 'a', 'a', 0)
client = Client(1, 'a', 1234567890123, 0)
inchiriere = Inchiriere(client, film)
testRepoInchiriere = RepoInchirieri()
testRepoInchiriere.inchiriaza(inchiriere)
lista = testRepoInchiriere.get_all()
assert (len(lista) == 1)
def __test_returneaza(self):
film = Film(1, 'a', 'a', 'a', 0)
client = Client(1, 'a', 1234567890123, 0)
inchiriere = Inchiriere(client, film)
testRepoInchiriere = RepoInchirieri()
testRepoInchiriere.inchiriaza(inchiriere)
testRepoInchiriere.returneaza(inchiriere)
lista = testRepoInchiriere.get_all()
assert (len(lista) == 0)
def run_all_tests(self):
self.__test_creeaza_film()
self.__test_creeaza_client()
self.__test_creeaza_inchiriere()
self.__test_adauga_client()
self.__test_adauga_film()
self.__test_modifica_client()
self.__test_modifica_film()
self.__test_sterge_client()
self.__test_sterge_film()
self.__test_inchiriaza()
self.__test_returneaza()
class TesteUnittest(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
def tearDown(self):
unittest.TestCase.tearDown(self)
def testCreateClient(self):
client1 = Client(1, "Ion", 5001229172347, 0)
self.assertEqual(client1.get_id(), 1)
self.assertEqual(client1.get_nume(), "Ion")
self.assertEqual(client1.get_cnp(),5001229172347)
client1.set_nume("Vasile")
self.assertEqual(client1.get_nume(), "Vasile")
client1.set_cnp(1231231231231)
self.assertEqual(client1.get_cnp(), 1231231231231)
self.__client = client1
def testValidClient(self):
validClient = ValidClienti()
self.__client_id_invalid = Client(-1, "Ion", 1231231231231, 0)
self.__client_nume_invalid = Client(1, "", 1231231231231, 0)
self.__client_cnp_invalid1 = Client(1, "Ion", 1, 0)
self.__client_cnp_invalid2 = Client(1, "Ion", 12345678901234, 0)
with self.assertRaises(ValidError):
validClient.validare_client(self.__client_id_invalid)
validClient.validare_client(self.__client_nume_invalid)
validClient.validare_client(self.__client_cnp_invalid1)
validClient.validare_client(self.__client_cnp_invalid2)
self.__validClient = validClient
def testRepoClient(self):
cli = Client(1, "Ion", 5001229172347, 0)
self.__repoClient = RepoClienti()
self.__repoClient.adauga_client(cli)
client = Client(1, "Vasile", 1234567890123, 0)
client1 = Client(2, "Sorin", 1231231231231, 0)
client3 = Client(1, None, None, 0)
client4 = Client(None, "Vasile", None, 0)
with self.assertRaises(RepoError):
self.__repoClient.adauga_client(client)
self.assertEqual(len(self.__repoClient.get_all()), 1)
self.__repoClient.modifica_client(client)
self.assertEqual(cli.get_nume(), "Vasile")
self.assertEqual(cli.get_cnp(), 1234567890123)
with self.assertRaises(RepoError):
self.__repoClient.modifica_client(client1)
x = self.__repoClient.cauta_client(client3)
self.assertEqual(x, cli)
x = self.__repoClient.cauta_client(client4)
self.assertEqual(x, cli)
with self.assertRaises(RepoError):
self.__repoClient.cauta_client(client1)
self.__repoClient.sterge_client(cli)
with self.assertRaises(RepoError):
self.__repoClient.sterge_client(client1)
self.assertEqual(len(self.__repoClient.get_all()), 0)
def testCreateFilm(self):
film1 = Film(1, 'Titlu', 'Descriere', 'Gen', 0)
self.assertEqual(film1.get_id(), 1)
self.assertEqual(film1.get_titlu(), "Titlu")
self.assertEqual(film1.get_descriere(), "Descriere")
self.assertEqual(film1.get_gen(), "Gen")
film1.set_titlu("AltTitlu")
self.assertEqual(film1.get_titlu(), "AltTitlu")
film1.set_descriere("AltaDescriere")
self.assertEqual(film1.get_descriere(), "AltaDescriere")
film1.set_gen("AltGen")
self.assertEqual(film1.get_gen(), "AltGen")
self.__film = film1
def testValidFilm(self):
validFilme = ValidFilme()
self.__film_id_invalid = Film(-1, 'a', 'a','a', 0)
self.__film_titlu_invalid = Film(1, '', 'a', 'a', 0)
self.__film_descriere_invalida = Film(1, 'a', '', 'a', 0)
self.__film_gen_invalid = Film(1, 'a', 'a', '', 0)
with self.assertRaises(ValidError):
validFilme.valideaza_film(self.__film_id_invalid)
validFilme.valideaza_film(self.__film_titlu_invalid)
validFilme.valideaza_film(self.__film_descriere_invalida)
validFilme.valideaza_film(self.__film_gen_invalid)
self.__validFilme = validFilme
def testRepoFilme(self):
film = Film(1, 'Titlu', 'Descriere', 'Gen', 0)
self.__repoFilme = RepoFilme()
self.__repoFilme.adauga_film(film)
film1 = Film(1, 'b', 'b', 'b', 0)
film2 = Film(1, None, None, None, 0)
film3 = Film(2, None, None, None, 0)
with self.assertRaises(RepoError):
self.__repoFilme.adauga_film(film1)
self.assertEqual(len(self.__repoFilme.get_all()), 1)
self.__repoFilme.modifica_film(film1)
self.assertEqual(film.get_titlu(), 'b')
self.assertEqual(film.get_descriere(), 'b')
self.assertEqual(film.get_gen(), 'b')
with self.assertRaises(RepoError):
self.__repoFilme.modifica_film(film3)
x = self.__repoFilme.cauta_film(film2)
self.assertEqual(x, film)
with self.assertRaises(RepoError):
self.__repoFilme.cauta_film(film3)
with self.assertRaises(RepoError):
self.__repoFilme.sterge_film(film3)
self.__repoFilme.sterge_film(film1)
self.assertEqual(len(self.__repoFilme.get_all()), 0)
def testRepoInchirieri(self):
self.__repoInchirieri = RepoInchirieri()
film1 = Film(1, 'Titlu', 'Descriere', 'Gen', 0)
client1 = Client(1, "Ion", 5001229172347, 0)
inchiriere = Inchiriere(film1, client1)
self.__repoInchirieri.inchiriaza(inchiriere)
self.assertEqual(len(self.__repoInchirieri.get_all()), 1)
self.__repoInchirieri.returneaza(inchiriere)
self.assertEqual(len(self.__repoInchirieri.get_all()), 0)
def runAllTests(self):
self.testCreateClient()
self.testValidClient()
self.testRepoClient()
self.testCreateFilm()
self.testValidFilm()
self.testRepoFilme()
self.testRepoInchirieri()
|
CombatFrog/facultate
|
FP/InchirieriFilme/Teste/teste.py
|
teste.py
|
py
| 10,764 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72453729787
|
from django.conf.urls.defaults import *
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^lsdesign/', include('lsdesign.foo.urls')),
url(r'^$',
'django.views.generic.simple.direct_to_template',
{'template': 'homepage.html'}),
(r'^work/', include('lsdesign.portfolio.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/(.*)', admin.site.root),
)
if getattr(settings, 'SERVE_STATIC_MEDIA', False):
urlpatterns += patterns('django.views.static',
(r'^%s(?P<path>.*)' % settings.MEDIA_URL, 'serve',
{'document_root': settings.MEDIA_ROOT}),
)
|
cyndi/lacey-springs-designs
|
lsdesign/urls.py
|
urls.py
|
py
| 955 |
python
|
en
|
code
| 2 |
github-code
|
6
|
20175364599
|
from decimal import Decimal
from django.conf import settings
from django.urls import reverse
from django.shortcuts import render, get_object_or_404
from core.models import Player, Developer, Payment, Order
from django.views.decorators.csrf import csrf_exempt
from hashlib import md5
from payments.forms import PaymentForm
from django.template.loader import render_to_string
from django.core.mail import EmailMessage
import secrets
import datetime
# Secret key identifying the webstore.
secret_key = "4c5e699656586b17e3775a51281cb3d0"
"""
Renders the view for succesful payment and adds the game to the players inventory.
Also sends a confirmation email to the user of the completed purchase.
"""
@csrf_exempt
def payment_done(request):
if request.GET['result'] == 'success':
pid = request.GET['pid']
payment = get_object_or_404(Payment, payment_id=pid)
order = payment.order
ref = request.GET['ref']
result = request.GET['result']
amount = '%.2f' % order.get_total_cost().quantize(Decimal('.01'))
checksumstr = "pid={}&ref={}&result={}&token={}".format(pid, ref, result, secret_key)
m = md5(checksumstr.encode("ascii"))
checksum = m.hexdigest()
if checksum == request.GET["checksum"]:
order.paid = True
order.updated = datetime.datetime.now()
order.save()
items = order.items.all()
uid = request.user.id
player = get_object_or_404(Player, user_id=uid)
games = []
for item in items:
player.games.add(item.game)
item.game.times_bought += 1
games.append(item.game)
item.game.save()
player.save()
payment.delete(keep_parents=True)
# The confirmation email.
mail_subject = 'Thank you for your purchase!'
message = render_to_string('payments/done_email.html', {
'user': request.user,
'first_name': order.first_name,
'last_name': order.last_name,
'email': order.email,
'address': order.address,
'postal_code': order.postal_code,
'city': order.city,
'games': games,
'price': order.get_total_cost()})
to_email = order.email
email = EmailMessage(mail_subject, message, to=[to_email])
email.send()
return render(request, 'payments/done.html')
else:
return render(request, 'payments/error.html')
else:
return render(request, 'payments/error.html')
"""
Renders the canceled payment page.
"""
@csrf_exempt
def payment_canceled(request):
pid = request.GET['pid']
payment = get_object_or_404(Payment, payment_id=pid)
payment.delete(keep_parents=True)
return render(request, 'payments/canceled.html')
"""
Renders the error -page when there is an error with the payment
"""
@csrf_exempt
def payment_error(request):
pid = request.GET['pid']
payment = get_object_or_404(Payment, payment_id=pid)
payment.delete(keep_parents=True)
return render(request, 'payments/error.html')
"""
Processes the payment of the order. Creates the values for the post message needed
for the mockup payment size.
"""
def payment_process(request):
order_id = request.session.get('order_id')
order = get_object_or_404(Order, id=order_id)
host = request.get_host()
pid = secrets.randbelow(1000000000)
Payment.objects.create(payment_id=pid, order=order)
sid = 'thebestgamestore'
amount = '%.2f' % order.get_total_cost().quantize(Decimal('.01'))
checksumstr = "pid={}&sid={}&amount={}&token={}".format(pid, sid, amount, secret_key)
m = md5(checksumstr.encode("ascii"))
checksum = m.hexdigest()
# Inputs for the POST -message.
payment_details = {
'pid': pid,
'sid': sid,
'amount': amount,
'success_url': 'http://{}{}'.format(host, reverse('payments:done')),
'cancel_url': 'http://{}{}'.format(host, reverse('payments:canceled')),
'error_url': 'http://{}{}'.format(host, reverse('payments:error')),
'checksum': checksum
}
form = PaymentForm(payment_details)
return render(request, 'payments/process.html', {'order': order,
'form':form})
|
vaarnaa/TheBestGameStore
|
payments/views.py
|
views.py
|
py
| 4,464 |
python
|
en
|
code
| 0 |
github-code
|
6
|
1622240991
|
"""
This file contains functions for micro table construction and encoding
"""
import unicodedata
import numpy as np
from pattern.text.en import tokenize
'''determine the type of a column: string (return False), number (return True)'''
def Is_Number_Col(col_cells):
threshold = 0.7
num_cell, non_empty = 0, 0
for cell in col_cells:
if cell.strip() != '':
non_empty += 1
if Is_Number(cell):
num_cell += 1
if non_empty == 0:
return False
else:
return float(num_cell) / float(non_empty) >= threshold
# Return whether a string is a number
def Is_Number(s):
num_flag = False
try:
float(s)
num_flag = True
except ValueError:
pass
if not num_flag:
try:
unicodedata.numeric(s)
num_flag = True
except (TypeError, ValueError):
pass
return num_flag
''' transform a cell to number (float)
return 0.0 if the cell does not have number format'''
def To_Number(cell):
if cell.lower() == 'nan' or cell.lower() == 'inf':
return 0.0
try:
v = float(cell)
return v
except ValueError:
pass
try:
v = unicodedata.numeric(cell)
return v
except (TypeError, ValueError):
pass
return 0.0
''' extract testing samples of a given column
'''
def extract_samples_by_col(columns, col, micro_table_size):
M, N = micro_table_size
samples = list()
tab_name, col_id = col.split(' ')
col_id = int(col_id)
N_col_ids = list()
for i, cells in enumerate(columns):
if i != col_id:
N_col_ids.append(i)
if len(N_col_ids) >= N:
break
''' organize the table as rows (transform),
filter out rows whose cell of target column is empty,
fill columns with 'NaN' if len(N_col_ids) < N '''
rows_filter = list()
for i in range(len(columns[0])):
if columns[col_id][i].strip() != '':
row = [columns[col_id][i]]
for N_col_id in N_col_ids:
row.append(columns[N_col_id][i])
if len(N_col_ids) < N:
row += ['NaN'] * (N - len(N_col_ids))
rows_filter.append(row)
''' slide a window in row dimension,
re-organize each table segment as dict,
append a segment whose length is less than M with rows of 'NaN' (0) '''
row_num, i = len(rows_filter), 0
while i < row_num:
seg = rows_filter[i:(i + M)] if i + M <= row_num else rows_filter[i:row_num]
seg_len = len(seg)
col_0 = [seg[j][0] for j in range(seg_len)]
col_0 += ['NaN'] * (M - seg_len)
sample = {'col_0': col_0}
for k in range(N):
col_k = [seg[j][k + 1] for j in range(seg_len)]
col_k += ['NaN'] * (M - seg_len)
sample['col_N_%d' % k] = col_k
i += 1
samples.append(sample)
return samples
''' Preprocess the cell phrase
'''
def cell_phrase_preprocess(cell):
cell_new = cell.replace('_', ' ').replace('-', ' ').replace('.', ' ').replace('/', ' '). \
replace('"', ' ').replace("'", ' ').replace('\\', ' ').replace('(', ' ').replace(')', ' ')
return cell_new
''' Transform a cell (phrase) to a vector by averaging the word vectors
'''
def cell_vector_avg(cell, w2v_model):
vector, n = np.zeros(w2v_model.vector_size), 0
if not cell == 'NaN':
ent_n = cell_phrase_preprocess(cell)
tokenized_line = ' '.join(tokenize(ent_n))
is_alpha_word_line = [word for word in tokenized_line.lower().split() if word.isalpha()]
for i, word in enumerate(is_alpha_word_line):
if word in w2v_model.wv.vocab:
w_vec = w2v_model.wv[word]
vector += w_vec.reshape(w2v_model.vector_size)
n += 1
return vector if n == 0 else vector / n
''' Transform a cell (phrase) to seq_size vectors
'''
def cell_vector(cell, w2v_model, seq_size):
vectors = np.zeros((seq_size, w2v_model.vector_size))
if not cell == 'NaN':
ent_n = cell_phrase_preprocess(cell)
tokenized_line = ' '.join(tokenize(ent_n))
is_alpha_word_line = [word for word in tokenized_line.lower().split() if word.isalpha()]
for i, word in enumerate(is_alpha_word_line):
if i >= seq_size:
break
if word in w2v_model.wv.vocab:
w_vec = w2v_model.wv[word]
vectors[i] = w_vec
return vectors
''' Embed a micro table
Each phrase cell is represented by a vector using averaging of word vectors
Each number cell is represented by the number vector
'''
def Table_Encode_WV_Avg(micro_table, table_size, w2v_model, use_surrounding_columns=True):
M, N = table_size
D = w2v_model.vector_size
emd = np.zeros((M, (N + 1), D)) if use_surrounding_columns else np.zeros((M, 1, D))
col_0 = micro_table['col_0']
for i, cell in enumerate(col_0):
emd[i][0] = cell_vector_avg(cell, w2v_model)
if use_surrounding_columns:
for k in range(N):
col_k = micro_table['col_N_%d' % k]
if Is_Number_Col(col_k):
for i, cell in enumerate(col_k):
emd[i][k + 1][0] = To_Number(cell)
else:
for i, cell in enumerate(col_k):
emd[i][k + 1] = cell_vector_avg(cell, w2v_model)
return emd
''' Embed a micro table
Each phrase cell is represented by sequence_size word vectors
Each number cell is represented by the number vector
'''
def Table_Encode_WV(micro_table, table_size, w2v_model, cell_seq_size, use_surrounding_columns=True):
M, N = table_size
D = w2v_model.vector_size
emd = np.zeros((M, (N + 1), cell_seq_size, D)) if use_surrounding_columns else np.zeros((M, 1, cell_seq_size, D))
col_0 = micro_table['col_0']
for i, cell in enumerate(col_0):
emd[i][0] = cell_vector(cell=cell, w2v_model=w2v_model, seq_size=cell_seq_size)
if use_surrounding_columns:
for k in range(N):
col_k = micro_table['col_N_%d' % k]
if Is_Number_Col(col_k):
for i, cell in enumerate(col_k):
emd[i][k + 1][0][0] = To_Number(cell)
else:
for i, cell in enumerate(col_k):
emd[i][k + 1] = cell_vector(cell=cell, w2v_model=w2v_model, seq_size=cell_seq_size)
return emd
''' Encode a micro_table by
first transforming the target column (cells) into a sequence by concatenation
then encode each word of the sequence to a vector
'''
def Synth_Column_Encode_WV(micro_table, seq_size, w2v_model):
D = w2v_model.vector_size
emd = np.zeros((seq_size, 1, D))
col_0 = micro_table['col_0']
seq = list()
for j, cell in enumerate(col_0):
ent_n = cell_phrase_preprocess(cell)
tokenized_line = ' '.join(tokenize(ent_n))
seq += [word for word in tokenized_line.lower().split() if word.isalpha()]
for j in range(seq_size):
if j < len(seq):
if seq[j] in w2v_model.wv.vocab:
emd[j, 0, :] = w2v_model.wv[seq[j]]
return emd
|
alan-turing-institute/SemAIDA
|
IJCAI19/SemColHNN_Codes/util/util_micro_table.py
|
util_micro_table.py
|
py
| 7,240 |
python
|
en
|
code
| 37 |
github-code
|
6
|
71781169789
|
# Calculate FPS (Frames per second)
import cv2
from timeit import default_timer as timer
camera = cv2.VideoCapture(0)
frame_count = 0
total_time = 0
while camera.isOpened():
start_time = timer()
_, frame = camera.read()
frame_count += 1
elapsed_time = timer() - start_time
total_time += elapsed_time
FPS = float(frame_count / total_time)
print(f"FPS: {FPS:.3f}")
cv2.imshow('Webcam 0', frame)
# Press "q" to exit program
if cv2.waitKey(1) == ord('q'):
break
# Release the frames
camera.release()
# Destroy all windows
cv2.destroyAllWindows()
|
yptheangel/opencv-starter-pack
|
python/basic/calculate_FPS.py
|
calculate_FPS.py
|
py
| 596 |
python
|
en
|
code
| 8 |
github-code
|
6
|
9920081347
|
from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index"),
path("login", views.login_view, name="login"),
path("logout", views.logout_view, name="logout"),
path("register", views.register, name="register"),
path("new_listing", views.new_listing, name="new_listing"),
path("view_watchlist", views.view_watchlist, name="view_watchlist"),
path("categories", views.categories, name="categories"),
path("<int:listing_id>", views.listing, name="listing"),
path("<int:listing_id>/bid", views.bid, name="bid"),
path("<int:listing_id>/watchlist", views.watchlist, name="watchlist"),
path("<int:listing_id>/close", views.close, name="close"),
path("<int:listing_id>/comment", views.comment, name="comment"),
path("auctions/<str:category>", views.category, name="category")
]
|
SaraRayne/Commerce
|
commerce/auctions/urls.py
|
urls.py
|
py
| 862 |
python
|
en
|
code
| 0 |
github-code
|
6
|
36480780883
|
from models.db import db
# from models.role import Role
class Feedback(db.Model):
id = db.Column(db.Integer, primary_key=True)
author_id = db.Column(db.Integer, db.ForeignKey('role.id'))
publication_id = db.Column(db.Integer, db.ForeignKey('publication.id'))
feedback = db.Column(db.Text, nullable=False)
done = db.Column(db.Boolean, default=False)
def __repr__(self):
return '<Feedback to publication {} from author {}: {}'.format(
self.publication_id,
self.author,
self.feedback)
def __init__(self, publication_id, feedback, author_id=None):
self.publication_id = publication_id
self.feedback = feedback
if author_id is not None:
self.author_id = author_id
def format(self):
response = {
'id': self.id,
'publication': self.publication.format(),
'text': self.feedback,
'done': self.done
}
if self.author is not None:
response["author"] = self.author.format()
return response
def insert(self):
db.session.add(self)
db.session.commit()
def update(self):
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
|
doigl/publicationWorkflow
|
models/feedback.py
|
feedback.py
|
py
| 1,299 |
python
|
en
|
code
| 0 |
github-code
|
6
|
3848256387
|
class PyIfElse:
# If is odd, print Weird
# If is even and in the inclusive range of 2 to 5, print Not Weird
# If is even and in the inclusive range of 6 to 20, print Weird
# If is even and greater than 20, print Not Weird
def __call__(self, number):
if number < 1 or number > 100:
raise ValueError()
if number % 2 == 0:
if number in range(2, 6):
return "Not Weird"
elif number in range(6, 21):
return "Weird"
elif number > 20:
return "Not Weird"
else:
return "Weird"
if __name__ == '__main__':
n = int(input())
number_type = PyIfElse()
res = number_type(n)
print(res)
|
pedroinc/hackerhank
|
problems/pyifelse.py
|
pyifelse.py
|
py
| 742 |
python
|
en
|
code
| 0 |
github-code
|
6
|
2061507228
|
from sklearn.preprocessing import OneHotEncoder
import numpy as np
class CategoricalEncoder:
""" if scikit >= 0.20, better use scikit's version instead of this class """
def __init__(self, dense=True):
assert dense, "only dense output is supported"
def fit(self, X):
self._str_to_int = {}
X_int = np.empty(X.shape, dtype=np.int32)
for i, row in enumerate(X):
for j, v in enumerate(row):
int_v = self._str_to_int.get(v)
if int_v is None:
int_v = len(self._str_to_int) + 1
self._str_to_int[v] = int_v
X_int[i, j] = int_v
self._one_hot = OneHotEncoder(sparse=False).fit(X_int)
return self
def transform(self, X):
X_int = np.empty(X.shape, dtype=np.int32)
for i, row in enumerate(X):
for j, v in enumerate(row):
X_int[i, j] = self._str_to_int.get(v, 0)
return self._one_hot.transform(X_int)
|
rom1mouret/cheatmeal
|
benchmarks/preproc/categorical_encoder.py
|
categorical_encoder.py
|
py
| 1,011 |
python
|
en
|
code
| 2 |
github-code
|
6
|
11812216467
|
from typing import Optional
import torch
import torch.nn as nn
class JaccardLoss(nn.Module):
"""JaccardLoss optimize mIoU score directly.
Args:
num_classes (int): A number of unique classes.
ignore_index (Optional[int]): Class label to ignore calculating score.
eps (float): Used to prevent zero division.
"""
def __init__(
self, num_classes: int, ignore_index: Optional[int] = None,
eps: float = 1e-16
):
super(JaccardLoss, self).__init__()
self.num_classes: int = num_classes
self.ignore_index = ignore_index
self.eps = eps
def forward( # type: ignore
self,
inputs: torch.Tensor,
targets: torch.Tensor
) -> torch.Tensor:
inputs = torch.argmax(inputs, dim=1)
inputs = inputs.byte().flatten()
targets = targets.byte().flatten()
if self.ignore_index is not None:
is_not_ignore = targets != self.ignore_index
inputs = inputs[is_not_ignore]
targets = targets[is_not_ignore]
intersection = inputs[inputs == targets]
area_intersection = intersection.bincount(minlength=self.num_classes)
bincount_pred = inputs.bincount(minlength=self.num_classes)
bincount_true = targets.bincount(minlength=self.num_classes)
area_union = bincount_pred + bincount_true - area_intersection
mean_iou = torch.mean(area_intersection / (area_union + self.eps))
return mean_iou
|
yutayamazaki/semantic-segmentation-pytorch
|
src/losses/jaccard.py
|
jaccard.py
|
py
| 1,507 |
python
|
en
|
code
| 1 |
github-code
|
6
|
6550444728
|
import RPi.GPIO as GPIO
from time import sleep
from API_PostReqs_for_pi import *
from API_PostReqs_for_PC import POST_LED_URL
from internetinfo import *
#GPIO setup
GPIO.setmode(GPIO.BCM)
LED_ZERO = 19
LED_ONE = 18
LED_TWO = 17
LED_THREE = 16
LED_FOUR = 13
LED_LIST = [LED_ZERO, LED_ONE, LED_TWO, LED_THREE, LED_FOUR]
BUTTON_ONE = 4
BUTTON_TWO = 5
GPIO.setup(LED_ZERO, GPIO.OUT)
GPIO.setup(LED_ONE, GPIO.OUT)
GPIO.setup(LED_TWO, GPIO.OUT)
GPIO.setup(LED_THREE, GPIO.OUT)
GPIO.setup(LED_FOUR, GPIO.OUT)
GPIO.setup(BUTTON_ONE, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(BUTTON_TWO, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
#API setup
TERMINATION_POST = "http://{}:{}/Terminate_Post/".format(IP, PORT)
TERMINATION_GET = "http://{}:{}/Terminate_Get/".format(IP, PORT)
TERMINATION_PUT = "http://{}:{}/Terminate_Put/".format(IP, PORT)
requests.post(url=POST_URL, params={"name":"BUTTON_ONE", "value":False})
requests.post(url=POST_URL, params={"name":"BUTTON_TWO", "value":False})
requests.post(url=POST_LED_URL, params={"value":False})
requests.post(url=TERMINATION_POST, params={"value":False})
#functions
def put_req(name:str, value:bool):
return requests.put(url=PUT_URL, params={"name":name, "value":value})
#main
button_one_active = False
led_status = False
terminate = False
while True:
#button one (layer mode)
"""
if GPIO.input(BUTTON_ONE) == GPIO.HIGH and button_one_active == True:
put_req("BUTTON_ONE", False)
print("MODE_OFF")
button_one_active = False
sleep(1)
"""
if GPIO.input(BUTTON_TWO) == GPIO.HIGH:
put_req("BUTTON_TWO", True)
print("MODE ON")
button_two_active = True
sleep(1)
#button two (minesweeper mode)
if GPIO.input(BUTTON_ONE) == GPIO.HIGH and button_two_active == True:
put_req("BUTTON_ONE", False)
print("MODE_OFF")
button_one_active = False
sleep(1)
if GPIO.input(BUTTON_ONE) == GPIO.HIGH and button_two_active == False:
put_req("BUTTON_ONE", True)
print("2 MODE ON")
button_one_active = True
sleep(1)
#LED
if button_one_active == True:
led_status = requests.get(url=GET_URL).json()
prev_led = led_status
if led_status != False: #json conversion mishap?
GPIO.output(LED_LIST[-int(led_status)],GPIO.HIGH)
sleep(0.5)
if button_one_active == False:
GPIO.output(LED_LIST[-int(led_status)], GPIO.LOW)
#Termination
if requests.get(url=TERMINATION_GET).json() == True:
requests.put(url=POST_URL, params={"name":"BUTTON_ONE", "value":False})
requests.put(url=POST_URL, params={"name":"BUTTON_TWO", "value":False})
requests.put(url=POST_LED_URL, params={"value":False})
requests.put(url=TERMINATION_POST, params={"value":False})
button_one_active = False
led_status = False
terminate = False
sleep(1)
#print(requests.get(url="http://{IP}:{}/GPIO_Get/", params={"name":"BUTTON_ONE"}).json()) #testing
|
TemplarOfSomething/3D-Minesweeper
|
GPIO_logic_for_pi.py
|
GPIO_logic_for_pi.py
|
py
| 3,102 |
python
|
en
|
code
| 0 |
github-code
|
6
|
41858747198
|
#############################################################################################
# Altere o programa de cálculo dos números primos, informando, caso o número não seja #
# primo, por quais número ele é divisível. #
#############################################################################################
n = int(input('Digite um número: '))
para_comparar = [1, n]
divisível_por = []
for c in range(1, n + 1):
if n % c == int():
divisível_por.append(c)
if divisível_por == para_comparar:
print(f'{n} é um número primo.')
elif n == 1:
print(f'{n} é um número primo.')
else:
print(f'Como {n} é divisível por {divisível_por}, {n} não é um número primo.')
print('Um número primo é aquele que é divisível somente por ele mesmo e por 1')
|
nralex/Python
|
3-EstruturaDeRepeticao/exercício22.py
|
exercício22.py
|
py
| 852 |
python
|
pt
|
code
| 0 |
github-code
|
6
|
5053626556
|
EXACT = 'yes'
EQUIVALENT = 'yes_m' # e.g. yes_modified
NO_MATCH = 'no'
NULL_SET = frozenset(['NULL','null','', '-', 'p.?', None])
SYN_ALIAS_SET = frozenset(['p.(=)', 'p.='])
COSMIC_NULL = 'p.?'
NON_CODING = '-'
NULL = '-'
HGVS_SYN = 'p.(=)'
DELINS = 'delins'
DEL = 'del'
INS = 'ins'
DUP = 'dup'
SUB = 'sub'
DELINS_SPLIT = 'del_ins' # for hgvs examples: c.34delTTinsAA
UNKNOWN = '?'
INVERSION = 'inv'
SYNONYMOUS = 'synonymous'
NONSENSE = 'nonsense'
MISSENSE = 'missense'
INFRAME = 'inframe'
FRAMESHIFT = 'fs'
INFRAME_INSERTION = 'inframe_insertion'
UPSTREAM = 'upstream'
START_LOST = 'start_lost'
STOP_LOST = 'stop_lost'
DINUCLEOTIDE = 'dinucleotide'
CANNOT_ASSESS = '?'
EXTENSION = 'ext'
INTRONIC = 'intronic'
MULTI_SUB = 'multi_sub'
FRAMESHIFT_NAME = 'frameshift'
DEL_SET = [DEL]
INS_SET = [DUP,INS]
SUB_SET = [NONSENSE, SYNONYMOUS, START_LOST, STOP_LOST]
CODING_START = 'c.'
PROTEIN_START = 'p.'
GENOMIC_START = 'g.'
CODING_START_LIST = ['r.','m.','c.','n.']
AMINO_ACID_SINGLETS = [
'A', 'R', 'N', 'D', 'B',
'C',
'E', 'Q', 'Z', 'G',
'H',
'I',
'L', 'K',
'M',
'F', 'P',
'S',
'T', 'W', 'Y',
'V',
'U',
'X'
]
AMINO_ACID_TRIPLETS = [
'Ala', 'Arg', 'Asn', 'Asp', 'Asx',
'Cys',
'Glu', 'Gln', 'Glx', 'Gly',
'His',
'Ile',
'Leu', 'Lys',
'Met',
'Phe', 'Pro',
'Ser',
'Thr', 'Trp', 'Tyr',
'Val',
'Sec',
'Ter'
]
INFRAME_SET = frozenset(['inframe_variant', 'inframe_deletion', 'inframe_insertion'])
MISSENSE_SET = frozenset(['non_synonymous_codon', 'missense_variant'])
UPSTREAM_SET = frozenset(['upstream_variant', 'upstream_gene_variant', '2KB_upstream_variant', '5_prime_UTR_variant'])
SYN_SET = frozenset(['synonymous_codon', 'synonymous_variant'])
INFRAME_DELETION_SET = frozenset(['disruptive_inframe_deletion', 'inframe_deletion'])
INFRAME_INSERTION_SET = frozenset(['disruptive_inframe_insertion', 'inframe_insertion'])
START_SET = frozenset(['initiator_codon_variant', 'start_lost'])
EFFECT_EQ_LIST = [INFRAME_DELETION_SET, INFRAME_SET, UPSTREAM_SET, SYN_SET, MISSENSE_SET, START_SET]
VARTYPE_DICT = {'del':'inframe_deletion',
'ins':'inframe_insertion',
'ext':'extension',
'delins': 'inframe_indel',
'dup':'duplication',
'?':''}
|
personalis/hgvslib
|
hgvslib/constants.py
|
constants.py
|
py
| 2,734 |
python
|
en
|
code
| 18 |
github-code
|
6
|
21812506780
|
import re
from src.plot_attention import plot_attention
from src.evaluate import evaluate
def translate(sentence, init_dict):
result, sentence, attention_plot = evaluate(sentence, init_dict)
print('Input: %s' % (sentence))
print('Predicted translation: {}'.format(result))
result = re.sub('<end>', '', result)
#return result
attention_plot = attention_plot[:len(result.split(' ')), :len(sentence.split(' '))]
plot_attention(attention_plot, sentence.split(' '), result.split(' '))
return result
|
sksos7/Kor_to_En_translator
|
src/translate.py
|
translate.py
|
py
| 532 |
python
|
en
|
code
| 0 |
github-code
|
6
|
14351509220
|
import mercantile
def get_blank_feature_json(lat, lon):
ft_dict = {"type": "Feature"}
geom_dict = {"type": "Point", "coordinates": [lon, lat]}
ft_dict["geometry"] = geom_dict
return ft_dict
# GET QUADHASH TILE OF A GIVEN COORDINATE
def get_quad_tile(lat, lon, precision):
ret = mercantile.tile(lon,lat,precision)
return ret
def get_quad_key_from_tile(x, y, zoom):
return mercantile.quadkey(x, y, zoom)
# GIVEN A QUAD_KEY, GET THE CORRESPONDING QUAD TILE
def get_tile_from_key(key):
return mercantile.quadkey_to_tile(key)
# GET QUADHASH STRING OF A GIVEN COORDINATE
def get_quad_key(lat, lon, zoom):
tile = get_quad_tile(lat, lon, precision=zoom)
#print(tile)
return get_quad_key_from_tile(tile.x, tile.y, tile.z)
#GIVEN A ZOOM LEVEL, WHAT IS THE MAX POSSIBLE TILE NUMBER HERE?
def get_max_possible_xy(zoom):
if zoom == 0:
return 0
return 2**zoom-1
# GIVEN A TILE, VERIFY IT IS VALID
def validate_tile(tile):
max_xy = get_max_possible_xy(tile.z)
if tile.x > max_xy or tile.x < 0 or tile.y > max_xy or tile.y < 0:
return False
return True
# GIVEN A BOX, FIND ALL TILES THAT LIE INSIDE THAT COORDINATE BOX
def find_all_inside_box(lat1, lat2, lon1, lon2, zoom):
all_tiles = []
top_left_quad_tile = get_quad_tile(lat2, lon1, zoom)
bottom_right_quad_tile = get_quad_tile(lat1, lon2, zoom)
print("TOP_LEFT & BOTTOM_RIGHT: ",top_left_quad_tile, bottom_right_quad_tile)
x1 = top_left_quad_tile.x
x2 = bottom_right_quad_tile.x
y1 = top_left_quad_tile.y
y2 = bottom_right_quad_tile.y
for i in range(x1, x2+1):
for j in range(y1,y2+1):
all_tiles.append(mercantile.Tile(x=i,y=j,z=zoom))
return all_tiles
#GIVEN A TILE, FIND THE SMALLER TILES THAT LIE INSIDE
def get_inner_tiles(tile_string):
combos = range(4)
children = []
for i in combos:
t_s = tile_string+str(i)
children.append(get_tile_from_key(t_s))
return children
#GIVEN A QUAD_TILE, GET ITS LAT-LNG BOUNDS
def get_bounding_lng_lat(tile_key):
tile = get_tile_from_key(tile_key)
bounds = mercantile.bounds(tile)
#print(tile_key, tile, bounds)
return (bounds.north, bounds.south, bounds.east, bounds.west)
if __name__ == '__main__':
tile_key = "02132333222"
tl = get_quad_tile(39.800137, -105.002746, 11)
print(get_quad_key_from_tile(tl.z,tl.y, tl.z))
'''for t in find_all_inside_box(40.33, 40.866726, -105.31, -104.96, 11):
print(get_quad_key_from_tile(t.x, t.y, t.z))'''
bounds = get_bounding_lng_lat("0231")
print(bounds)
print("BOUNDS>>", bounds[0],",",bounds[3], bounds[1],",",bounds[2])
xs = (49.2-25.86)/ (bounds[0]-bounds[1])
ys = (124.4 - 73.56) / (bounds[2] - bounds[3])
print(xs,ys)
|
InsertCoolNameHere/Quby
|
geo_utils/quadtile_utils.py
|
quadtile_utils.py
|
py
| 2,812 |
python
|
en
|
code
| 0 |
github-code
|
6
|
17772360699
|
import streamlit as st
import pandas as pd
import numpy as np
import psycopg2
from streamlit_option_menu import option_menu
#------- PAGE SETTINGS------------
page_title = "GHG Emission Calculator"
Page_icon = "🌳"
layout = "centered"
#-----------------------------------
st.set_page_config(page_title=page_title,page_icon=Page_icon,layout=layout)
st.title(page_title + " " + Page_icon)
# --- HIDE STREAMLIT STYLE ---
hide_st_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
header {visibility: hidden;}
</style>
"""
st.markdown(hide_st_style, unsafe_allow_html=True)
# --- NAVIGATION MENU ---
selected = option_menu(
menu_title=None,
options=["Data Entry", "Data Visualization"],
icons=["pencil-fill", "bar-chart-fill"], # https://icons.getbootstrap.com/
orientation="horizontal",
)
@st.cache_resource
def get_data ():
path = "Emissions.xlsx"
return pd.read_excel(path,sheet_name="Sheet2",usecols="A:I")
#----remember to remove duplicates
data = get_data()
data_na = data.dropna()
if selected == "Data Entry":
options1 = data_na.iloc[:,0].unique()
selected_option1 = st.selectbox("Select Scope:",options1)
#----filtering scope-------
filtered_data = data_na[data_na.iloc[:,0]==selected_option1]
#----get unique values for option 2-----
option2 = filtered_data.iloc[:,1].unique()
selected_option2 = st.selectbox("Select Category:",option2)
#-----filter based on option 2-----
filter_2 = filtered_data[filtered_data.iloc[:,1]==selected_option2]
option3 = filter_2.iloc[:,2].unique()
selected_option3 = st.selectbox("Select Sub Category:",option3)
#----filter based on option 3----
filter_3 = filter_2[filter_2.iloc[:,2]== selected_option3]
option4 = filter_3.iloc[:,3].unique()
selected_option4 = st.selectbox("Select Material:",option4)
#-----filter based on option 4----
filter_4 = filter_3[filter_3.iloc[:,3]==selected_option4]
option5 = filter_4["UOM"].unique()
selected_option5 = st.selectbox("Select Unit of Measure:",option5)
#----filter based on option 5-------
filter_5 = filter_4[filter_4["UOM"]== selected_option5]
option6 = filter_5["GHG/Unit"].unique()
selected_option6 = st.selectbox("Select Unit:",option6)
#-----filter based on last option-----
filter_6 = filter_5[filter_5["GHG/Unit"]== selected_option6]
option_7 = filter_6["GHG Conversion Factor 2022"].unique()
selected_option7 = st.selectbox("Emission Factor:",option_7)
#option7_int = int(selected_option7)
#----create an input field-------
with st.form("my_form", clear_on_submit=True):
values = st.number_input("Enter Amount",format="%i",min_value=0)
values_int = int(values)
#----multiplying the two columns together to find total emission----
emission = int(selected_option7 * values_int)
total = st.number_input("Total Emissions:",emission)
#---Creating the submit button-------------
submitted = st.form_submit_button("Save Data")
if submitted:
selected_option1 = selected_option1
selected_option2 = selected_option2
selected_option3 = selected_option3
selected_option4 = selected_option4
values = values
total = total
st.success("Data Saved Successfully!")
|
sforson14/Data
|
myfile.py
|
myfile.py
|
py
| 3,457 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40466717976
|
import sys
from collections import deque
input = sys.stdin.readline
graph = []
for i in range(8):
graph.append(list(input().rstrip()))
answer = 0
def bfs():
direction = [[0,0],[0,-1],[0,1],[-1,0],[1,0],[-1,-1],[1,-1],[1,1],[-1,1]]
visited = [[0] * 8 for _ in range(8)]
dq = deque([7,0,0])
|
Cho-El/coding-test-practice
|
백준 문제/BFS/16954_움직이는 미로 탈출.py
|
16954_움직이는 미로 탈출.py
|
py
| 311 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71889266427
|
import json
import sys
max_buy = float('-inf')
min_sell = float('inf')
for line in sys.stdin:
rec = json.loads(line.strip())
if 'price' not in rec:
continue
if rec['side'] == 'sell':
min_sell = min(min_sell, float(rec['price']))
else:
max_buy = max(max_buy, float(rec['price']))
print('max_buy: %s, min_sell: %s' % (max_buy, min_sell))
|
fivetentaylor/intro_to_programming
|
coinbase/format_wss_feed.py
|
format_wss_feed.py
|
py
| 383 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72592445948
|
import yaml
from .defaults import METADETECT_CONFIG
def load_config(config_path):
"""Load a config file and return it.
Parameters
----------
config_path : str, optional
The path to the config file.
Returns
-------
sim_config : dict
A dictionary of the sim config options.
run_config : dict
A dictionary of the run config options.
shear_meas_config : dict
A dictionary of the shear measurement options.
swap12 : bool
If True, swap the role of the 1- and 2-axes in the shear measurement.
cut_interp : bool
If True, cut objects with too much interpolation.
"""
with open(config_path, 'r') as fp:
config = yaml.load(fp, Loader=yaml.Loader)
swap12 = config.pop('swap12', False)
cut_interp = config.pop('cut_interp', False)
run_config = {
'n_patches_per_job': 200,
'n_patches': 10_000_000,
'n_jobs_per_script': 500}
run_config.update(config.get('run', {}))
shear_meas_config = config.get('shear_meas', {})
shear_meas_config.update(METADETECT_CONFIG)
return config['sim'], run_config, shear_meas_config, swap12, cut_interp
|
beckermr/metadetect-coadding-sims
|
coadd_mdetsims/config.py
|
config.py
|
py
| 1,186 |
python
|
en
|
code
| 0 |
github-code
|
6
|
32156253381
|
import hashlib
from collections import OrderedDict
post_data = {
'shop_id': 'D0F98E7D7742609DC508D86BB7500914',
'amount': 100,
'order_id': '123',
'payment_system': 16,
'currency': 'RUB',
'sign': 'e13cd755e9b4632d51ae4d5c74c2f122',
}
secret = 'GB%^&*YJni677'
request_sign = post_data['sign']
try:
post_data.pop('sign')
except KeyError:
raise Exception('Request is not signed!')
string_to_hash = '&'.join(
[f'{key}={value}' for key, value in sorted(post_data.items())])
sign = hashlib.md5((string_to_hash + secret).encode()).hexdigest()
|
Dmitrii-Kopeikin/tegro-docs-python-examples
|
python_examples/payments/payment_notification.py
|
payment_notification.py
|
py
| 578 |
python
|
en
|
code
| 1 |
github-code
|
6
|
15042871617
|
import RPi.GPIO as GPIO
import sys
import time
# When calling the function:
# Let 1 indicate the block is in the set
# 2 indicate the block is not in the set
def inSet( clr ):
"This function will flash a green or red light if the block is or is not in the set respectively"
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(5, GPIO.OUT)
GPIO.setup(6, GPIO.OUT)
GPIO.setup(13, GPIO.OUT)
if clr==1:
# Turn the LED blue
GPIO.output(5, GPIO.LOW)
GPIO.output(6, GPIO.LOW)
GPIO.output(13, GPIO.HIGH)
elif clr==2:
# Turn the LED red
GPIO.output(5, GPIO.HIGH)
GPIO.output(6, GPIO.LOW)
GPIO.output(13, GPIO.LOW)
else:
# Default: turn the LED green if input was recieved but invalid
GPIO.output(5, GPIO.LOW)
GPIO.output(6, GPIO.HIGH)
GPIO.output(13, GPIO.LOW)
time.sleep(1)
GPIO.output(5, GPIO.LOW)
GPIO.output(6, GPIO.LOW)
GPIO.output(13, GPIO.LOW)
GPIO.cleanup()
return;
inSet(int(sys.argv[1]))
|
Amanda9m/Lego-Sorter
|
source/driver/LED.py
|
LED.py
|
py
| 1,102 |
python
|
en
|
code
| 3 |
github-code
|
6
|
14706890571
|
#!/usr/bin/env python
# coding: utf-8
import requests
import pymongo
import pandas as pd
from splinter import Browser
from bs4 import BeautifulSoup
import time
# #### Open chrome driver
# open chrome driver browser
def init_browser():
executable_path = {'executable_path': 'chromedriver'}
return Browser('chrome', **executable_path, headless=False)
# ## NASA Mars News - Collect Latest News Title and Paragraph Text
def scrape():
browser = init_browser()
# define url
mars_news_url = "https://mars.nasa.gov/news/"
time.sleep(3)
browser.visit(mars_news_url)
#putting a sleep function here seems to make the flask application run
time.sleep(3)
# create beautiful soup object
html = browser.html
mars_news_soup = BeautifulSoup(html, 'html.parser')
# I added a few time.sleep(3) functions to allow the browser time to scrape the data. Hopefully that works.
# find the first news title
news_title = mars_news_soup.body.find("div", class_="content_title").text
# find the paragraph associated with the first title
news_p = mars_news_soup.body.find("div", class_="article_teaser_body").text
time.sleep(3)
mars_image_url = "https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars"
browser.visit(mars_image_url)
# create the soup item
html_image = browser.html
mars_imaging = BeautifulSoup(html_image, 'html.parser')
# the large image is within the figue element with class = lede
image = mars_imaging.body.find("figure", class_="lede")
#obtaining the url for the photo
feat_img_url = image.find('figure', class_='lede').a['href']
featured_image_url = f'https://www.jpl.nasa.gov{feat_img_url}'
featured_image_url
# ## Mars Weather
# open url in browser
#needs time to load
time.sleep(3)
# create a soup item
# ## Mars Facts
time.sleep(3)
# define url
mars_facts_url = "https://space-facts.com/mars/"
# read html into pandas
table = pd.read_html(mars_facts_url)
# returns the value from an html table
df = table[2]
df.columns = ["Description", "Value"]
# converting data to html table
mars_facts_html=df.to_html()
mars_facts_html
# ## Mars Hemispheres
# define url and open in browser
time.sleep(3)
mars_hemispheres_url = "https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars"
browser.visit(mars_hemispheres_url)
# #### Cerberus hemisphere
# click on the link for the Cerberus hemisphere
browser.click_link_by_partial_text('Cerberus')
# click on the open button to get to enhanced picture
browser.click_link_by_partial_text('Open')
# create a soup item
hemispheres_html = browser.html
cerberus_soup = BeautifulSoup(hemispheres_html, 'html.parser')
cerberus = cerberus_soup.body.find('img', class_ = 'wide-image')
cerberus_img = cerberus['src']
hem_base_url = 'https://astrogeology.usgs.gov'
#will store url later
cerberus_url = hem_base_url + cerberus_img
# #### Schiaperelli hemisphere
# define url and open in browser
time.sleep(3)
mars_hemispheres_url = "https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars"
browser.visit(mars_hemispheres_url)
# click on the link for the Cerberus hemisphere
browser.click_link_by_partial_text('Schiaparelli')
# click on the open button to get to enhanced picture
browser.click_link_by_partial_text('Open')
#schiap html page
# create a soup item
schiap_html = browser.html
schiap_soup = BeautifulSoup(schiap_html, 'html.parser')
#obtaining the image of the schiaparelli
schiap = schiap_soup.body.find('img', class_ = 'wide-image')
schiap_img = schiap['src']
hem_base_url = 'https://astrogeology.usgs.gov'
schiap_url = hem_base_url + schiap_img
# print(schiap_url)
# #### Syrtis hemisphere
# define url and open in browser
time.sleep(3)
mars_hemispheres_url = "https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars"
browser.visit(mars_hemispheres_url)
# click on the link for the Cerberus hemisphere
browser.click_link_by_partial_text('Syrtis')
# click on the link for the Cerberus hemisphere
browser.click_link_by_partial_text('Open')
# create a soup item
syrtis_html = browser.html
syrtis_soup = BeautifulSoup(syrtis_html, 'html.parser')
syrtis = syrtis_soup.body.find('img', class_ = 'wide-image')
syrtis_img = syrtis['src']
hem_base_url = 'https://astrogeology.usgs.gov'
syrtis_url = hem_base_url + syrtis_img
# print(syrtis_url)
# #### Valles hemisphere
# define url and open in browser
time.sleep(3)
mars_hemispheres_url = "https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars"
browser.visit(mars_hemispheres_url)
# click on the link for the Valles hemisphere
browser.click_link_by_partial_text('Valles')
# click on the link for the Valles hemisphere
browser.click_link_by_partial_text('Open')
# create a soup item
valles_html = browser.html
valles_soup = BeautifulSoup(valles_html, 'html.parser')
valles = valles_soup.body.find('img', class_ = 'wide-image')
valles_img = valles['src']
hem_base_url = 'https://astrogeology.usgs.gov'
valles_url = hem_base_url + valles_img
# print(valles_url)
# #### Define list of dictionaries that include each hemisphere
hemispheres_image_urls = [
{"title": "Valles Marineris Hemisphere", "img_url": valles_url},
{"title": "Cerberus Hemisphere", "img_url": cerberus_url},
{"title": "Schiaparelli Marineris Hemisphere", "img_url": schiap_url},
{"title": "Syrtis Major Hemisphere", "img_url": syrtis_url}
]
# dictionary should be returned
mars_dict = {
'headline': news_title,
'paragraph': news_p,
'featuredimage': featured_image_url,
# 'currentweather': mars_weather,
'factstable': mars_facts_html,
"va_title": "Valles Marineris Hemisphere", "va_img_url": valles_url,
"ce_title": "Cerberus Hemisphere", "ce_img_url": cerberus_url,
"sc_title": "Schiaparelli Marineris Hemisphere", "sc_img_url": schiap_url,
"sy_title": "Syrtis Major Hemisphere", "sy_img_url": syrtis_url}
# print(mars_dictionary)
browser.quit()
return mars_dict
|
lisaweinst/web-scraping-challenge
|
scrape_mars.py
|
scrape_mars.py
|
py
| 6,535 |
python
|
en
|
code
| 0 |
github-code
|
6
|
4491664030
|
def colored(r, g, b, text):
return "\033[38;2;{};{};{}m{}\033[38;2;255;255;255m".format(r, g, b, text)
class ProgressCounter:
def __init__(
self,
string: str,
max_count: int,
precision_digits: int = 2,
flush: bool = True
):
"""
Class for handling progress counting.
Uses simple formula to calculate progress:
percentage = (internal_count * 100 / max_count)
where 0 =< internal_count < max_count
Usage:
* Initialize the class by providing init arguments.
* Update Progress by calling the object.update() method.
Note:
Don't write anything to stdout (using print or otherwise) while you're updating progress.
e.g No print statements between class initialization and update. Until the progress has finished.
Args:
string : type<str>, A string describing whats is happening. e.g Searching, Loading, etc.
NOTE: The colon will be added by the class, just provide what's happening in the string.
e.g string = Searching
Output: Searching progress: 50.00%
maxcount : type<int>, The upper limit for progress percentage calculation. (The denominator.)
precision_digits : type<int>, Number of precision to display while displaying the percentage. (Default 2 Digits)
flush : type<bool>, Flush the progress to the stdout everytime an update is made. (Default True)
Methods:
Progresscounter.update()
Updates the progress and reflects that on stdout.
Args:
count : type<int>, Number of counts the progress should be increased to. (Default 1)
"""
self.string:str = string
self.max_count:int = max_count
self.precision_digits:int = precision_digits
self.flush:bool = flush
self.progress_count:int = 0
self._num_digits = 0
self._start()
def _start(self):
print(
f"{self.string} progress: {self.progress_count * 100 / self.max_count:.{self.precision_digits}f}%",
flush = self.flush,
end = '',
)
def update(self, count: int = 1):
"""
Updates the progress and reflects that on stdout.
Args:
count : type<int>, Number of counts the progress should be increased to. (Default 1)
"""
if self.max_count - self.progress_count < 1:
print("Can't update, max count exceeded!")
return
_length_of_progress_string = len(f"{self.progress_count * 100 / self.max_count:.{self.precision_digits}f}%")
self.progress_count += count
# Going back to start of the progress counter string.
print("\b" * _length_of_progress_string, flush=self.flush, end = '')
_current_progress_percentage = self.progress_count * 100 / self.max_count
print(
f"{_current_progress_percentage:.{self.precision_digits}f}%",
flush = self.flush,
end = '',
)
if _current_progress_percentage > 99.99:
print('', flush = self.flush)
import os
class progressTracker:
def __init__ (self,logFile):
self.logfile = logFile
self.config_list = []
if os.path.exists(self.logfile):
with open(self.logfile) as f:
self.raw_file_txt = f.read()
self.config_list = [config for config in self.raw_file_txt.split('\n') if config]
else:
with open(logFile, 'w') as f:f.write('\n')
def check(self, config):
if config in self.config_list:
return True
else:
# Add the config to the file.
self.config_list.append(config)
self._flush()
return False
def _flush(self):
with open(self.logfile , 'w') as f:
f.write(
'\n'.join(self.config_list)
)
|
ubdussamad/kptemp
|
utils.py
|
utils.py
|
py
| 4,022 |
python
|
en
|
code
| 0 |
github-code
|
6
|
17510610363
|
def decodeVariations(s: str) -> int:
def validate(lo, hi):
if lo == -1:
return True
return 1 <= int(s[lo:hi+1]) <= 26
def decode(lo, hi, tracker, res):
if lo == n:
ress.append(list(res))
return bool(tracker)
count = 0
res.append(s[lo:hi+1])
for nexthi in range(hi+1, n):
count += decode(hi+1, nexthi, tracker and validate(lo, hi), res)
res.pop()
return count
n = len(s)
ress = []
res = []
print(ress)
return decode(-1, -1, True, [])
s = "1267"
print(decodeVariations(s))
'''
1 2 6 7
1 26 7
12 6 7
ans = 3
'''
'''
1262
.
1 2 6 2
1: 2, 6, 2
1:
'''
'''
def decodeVariations(S):
"""
@param S: str
@return: int
"""
def helper(curridx=0, oneres=None, tracker=True):
if oneres is None:
oneres = []
count = 0
for endidx in range(curridx, n):
cand = S[curridx:endidx+1]
tracker = tracker and 1 <= int(cand) <= 26
oneres.append(cand)
count += helper(endidx + 1, oneres, tracker)
oneres.pop()
if curridx == n:
results.append(list(oneres))
count = int(tracker)
return count
n = len(S)
results = []
count = helper()
#for result in results:
# print(result)
#print(count)
return count
S = '1262'
print(decodeVariations(S))
'''
'''
1262
.
1 2 6 2
1: 2, 6, 2
1:
'''
'''
def decodeVariations(S):
"""
if memo[curridx][endidx] == -1:
memo[curridx][endidx] = helper(endidx + 1, oneres, tracker)
else:
print("using memo ", curridx, endidx)
@param S: str
@return: int
"""
def helper(curridx=0, oneres=None, tracker=True):
if oneres is None:
oneres = []
count = 0
for endidx in range(curridx, n):
if True or memo[curridx][endidx] == -1:
cand = S[curridx:endidx+1]
oneres.append(cand)
print('before', curridx, endidx, memo[curridx][endidx])
memo[curridx][endidx] = helper(endidx + 1, oneres, tracker and 1 <= int(cand) <= 26)
print('after', curridx, endidx, memo[curridx][endidx])
oneres.pop()
else:
print("using memo ", curridx, endidx)
count += memo[curridx][endidx]
if curridx == n:
results.append(list(oneres))
count = int(tracker)
return count
n = len(S)
results = []
memo = [[-1 for _ in range(n)] for _ in range(n)]
count = helper()
for result in results:
print(result)
print(count)
return count
S = '1262'
print(decodeVariations(S))
'''
'''
('using memo ', 3, 3)
('using memo ', 2, 2)
('using memo ', 2, 3)
('using memo ', 3, 3)
['1', '2', '6', '2']
['1', '2', '62']
['1', '262']
['1262']
4
'''
|
soji-omiwade/cs
|
dsa/pramp/decode_variations.py
|
decode_variations.py
|
py
| 2,798 |
python
|
en
|
code
| 0 |
github-code
|
6
|
42495755382
|
#First attempt to connect to ethereum mainnet via Infura API
import json
import web3
from web3 import Web3, HTTPProvider
try:
w3 = Web3(Web3.HTTPProvider("https://mainnet.infura.io/dPotOByPqLlLN3nx14Pq"))
print('w3 HTTPProvider call success')
except: print('w3 HTTPProvider call failure')
block = w3.eth.getBlock('latest')
uncles = block["uncles"]
#for element in block: print(element, block[element])
blockNumber = block["number"]
txnCount = w3.eth.getBlockTransactionCount(blockNumber)
print("Block:", blockNumber, " Number of transactions:", txnCount, "Miner: ", block["miner"])
print("Number of Uncles:", len(uncles))
minerReward = 3.0
uncleList = list()
for uncle in uncles:
#print("uncle:", w3.toHex(uncle))
uBlock = w3.eth.getBlock(uncle)
minerReward += (uBlock["number"] + 8 - blockNumber) * 3 / 8
print("Miner Reward: ", minerReward)
txnHashes = block["transactions"]
# Extract cumulativeGasUsed from last transaction in the block
lastTxnHash = txnHashes[txnCount - 1]
cumTotal = 0.0
lastTxnR = w3.eth.getTransactionReceipt(lastTxnHash)
if lastTxnR != None:
cumTotal = lastTxnR["cumulativeGasUsed"]
gwei = w3.toWei(cumTotal, 'gwei')
cumTotal = w3.fromWei(gwei, 'ether')
print("Total Gas Consumed", cumTotal)
minerReward += float(cumTotal)
print("Miner Reward: ", minerReward)
#for txnHash in txnHashes:
# txn = w3.eth.getTransaction(txnHash)
# wei = txn["value"]
# value = w3.fromWei(wei, 'ether')
# print(txn["from"], txn["to"], value)
|
KedarJo/ethScan
|
ethHello.py
|
ethHello.py
|
py
| 1,494 |
python
|
en
|
code
| 0 |
github-code
|
6
|
18453482266
|
# Time complexity is O(log n)
import random
def round_down(value, decimals):
factor = 1 / (10 ** decimals)
return (value // factor) * factor
def binary_search_recursive(nums, target):
start_pos = 0
end_pos = len(nums)
middle_pos = int((start_pos+end_pos)/2)
if start_pos >= end_pos:
return -1
print(
f"Now search from {start_pos}---->{middle_pos}--->{end_pos} and mid_element-->{arr[middle_pos]}")
if nums[middle_pos] == target:
return middle_pos
if target > nums[middle_pos]:
start_pos = middle_pos+1
elif target < nums[middle_pos]:
end_pos = middle_pos-1
return binary_search_recursive(nums[start_pos:end_pos+1], target)
def leet_code(nums, target):
start_pos = 0
end_pos = len(nums)-1
while start_pos <= end_pos:
middle_pos = int((start_pos+end_pos)//2)
print(
f"Now search from {start_pos}---->{middle_pos}--->{end_pos} and mid_element-->{nums[middle_pos]}")
if nums[middle_pos] == target:
return middle_pos
if target > nums[middle_pos]:
start_pos = middle_pos+1
elif target < nums[middle_pos]:
end_pos = middle_pos-1
return -1
def binary_search(arr, search_element):
# Sorted Array
# making the arr to ascending order
isAsc = True if arr[0] <= arr[len(arr)-1] else False
start_pos = 0
end_pos = len(arr)-1
no_of_comparions = 0
while start_pos <= end_pos:
mid_pos = int(round_down((start_pos+end_pos)/2, 0))
print(
f"Now search from {start_pos}---->{mid_pos}--->{end_pos} and mid_element-->{arr[mid_pos]}")
if arr[mid_pos] == search_element:
print("Found the search element at position:", mid_pos)
# return the arr[mid_pos] as we found the ans
break
if isAsc:
# If given array is sorted in ascending order
if search_element < arr[mid_pos]:
end_pos = mid_pos-1
else:
start_pos = mid_pos+1
else:
# If the array is sorted in descending order
if arr[mid_pos] < search_element:
end_pos = mid_pos-1
else:
start_pos = mid_pos+1
no_of_comparions += 1
# return -1 if code executes this line it means the element is not found in the array
print("NO OF COMPARISIONS FOR SEARCHING THE VARIABLE:", no_of_comparions)
if __name__ == '__main__':
range_of_list = int(input("Enter the maxium Range of List:")) or 10000000
total_elements = int(input("Enter the total_elements:")) or 5863
search_element = int(input("Enter the search element:"))
arr = random.sample(range(range_of_list), total_elements)
arr.sort(reverse=True)
binary_search_recursive(arr, search_element)
print(leet_code([-1, 0, 3, 5, 9, 12], 13))
|
beharamadhu270405/python-DS
|
Searching/binary-search.py
|
binary-search.py
|
py
| 2,898 |
python
|
en
|
code
| 0 |
github-code
|
6
|
648194677
|
import numpy as np
from ._segmentation import compute_mws_clustering, MWSGridGraph
from ..affinities import compute_affinities_with_lut
# TODO support a data backend like zarr etc.
class InteractiveMWS():
def __init__(self, affinities, offsets, n_attractive_channels=None,
strides=None, randomize_strides=False):
if len(offsets) != affinities.shape[0]:
raise ValueError("Number offsets and affinity channels do not match")
self._shape = affinities.shape[1:]
# set the state (grid graph, affinities, seeds)
self._grid_graph = MWSGridGraph(self.shape)
self._affinities = affinities
self._offsets = offsets
self._seeds = np.zeros(self.shape, dtype='uint64')
self._n_attractive = self.ndim if n_attractive_channels is None else n_attractive_channels
# strides and randomization
self.strides = [1] * self.ndim if strides is None else strides
self.randomize_strides = randomize_strides
# comppute the initial graph shape (= uv-ids, mutex-uv-ids, ...)
self._update_graph()
self._locked_seeds = set()
@property
def shape(self):
return self._shape
@property
def ndim(self):
return len(self.shape)
@property
def max_seed_id(self):
return self._seeds.max()
@property
def offsets(self):
return self._offsets
#
# update the graph
#
def _update_graph(self, mask=None):
if mask is not None:
self._grid_graph.clear_mask()
self._grid_graph.set_mask(mask)
# compute the attractive edges
# we set to > 1 to make sure these are the very first in priority
self._grid_graph.add_attractive_seed_edges = True
self._uvs, self._weights = self._grid_graph.compute_nh_and_weights(1. - self._affinities[:self._n_attractive],
self._offsets[:self._n_attractive])
# compute the repulsive edges
self._grid_graph.add_attractive_seed_edges = False
(self._mutex_uvs,
self._mutex_weights) = self._grid_graph.compute_nh_and_weights(self._affinities[self._n_attractive:],
self._offsets[self._n_attractive:],
strides=self.strides,
randomize_strides=self.randomize_strides)
#
# seed functionality
#
def _update_seeds_dense(self, new_seeds, seed_offset):
if new_seeds.shape != self.shape:
raise ValueError("Dense seeds have incorrect shape")
seed_mask = new_seeds != 0
self._seeds[seed_mask] = (new_seeds[seed_mask] + seed_offset)
def _update_seeds_sparse(self, new_seeds, seed_offset):
new_seeds_array = np.zeros_like(self._seeds)
for seed_id, coords in new_seeds.items():
new_id = seed_id + seed_offset
self._seeds[coords] = new_id
new_seeds_array[coords] = new_id
return new_seeds_array
def update_seeds(self, new_seeds, seed_offset=0):
if isinstance(new_seeds, np.ndarray):
self._update_seeds_dense(new_seeds, seed_offset)
elif isinstance(new_seeds, dict):
new_seeds = self._update_seeds_sparse(new_seeds, seed_offset)
else:
raise ValueError("new_seeds must be np.ndarray or dict, got %s" % type(new_seeds))
self._grid_graph.update_seeds(new_seeds)
def clear_seeds(self):
self._grid_graph.clear_seeds()
self._seeds = np.zeros(self.shape, dtype='uint64')
def merge(self, seg, ida, idb):
seg_mask = np.isin(seg, [ida, idb])
bb = np.where(seg_mask)
bb = tuple(slice(b.min(), b.max() + 1) for b in bb)
seg_sub = seg[bb]
# computing the affmask for the bounding box of the two segment ids
keys = np.array([[min(ida, idb), max(ida, idb)]], dtype='uint64')
vals = np.array([1.], dtype='float32')
aff_mask, _ = compute_affinities_with_lut(seg_sub, self._offsets, keys, vals,
default_val=0)
aff_mask = aff_mask.astype('bool')
n_mask = aff_mask.sum()
# we only need to change the affinities if there is something in the mask
if n_mask > 0:
bb = (slice(None),) + bb
self._affinities[bb][aff_mask] = 0
return n_mask
#
# segmentation functionality
#
def __call__(self, prev_seg=None):
# if we are passed a previous segmentation, we use it
# to mask with the locked_seeds
if prev_seg is not None and self._locked_seeds:
mask = ~np.isin(prev_seg, list(self._locked_seeds))
else:
mask = None
self._update_graph(mask=mask)
n_nodes = self._grid_graph.n_nodes
seg = compute_mws_clustering(n_nodes, self._uvs, self._mutex_uvs,
self._weights, self._mutex_weights)
# retrieve the old segmentation
if mask is not None:
mask = ~mask
seg[mask.ravel()] = (prev_seg[mask] + seg.max())
seg = self._grid_graph.relabel_to_seeds(seg)
return seg.reshape(self.shape)
#
# locked segment functionality
#
@property
def locked_seeds(self):
return self._locked_seeds
def lock_seeds(self, locked_seeds):
self._locked_seeds.update(locked_seeds)
def unlock_seeds(self, unlock_seeds):
self._locked_seeds.difference_update(unlock_seeds)
@property
def affinities(self):
return self._affinities
@affinities.setter
def affinities(self, affs):
self._affinities = affs
#
# tiktorch functionality
#
# TODO support a ROI
def update_affinities(self, affinities):
if affinities.shape[1:] != self.shape:
raise ValueError("Invalid Shape")
if affinities.shape[0] != len(self._offsets):
raise ValueError("Invalid number of channels")
self._affinities = affinities
|
constantinpape/affogato
|
src/python/module/affogato/segmentation/interactive_mws.py
|
interactive_mws.py
|
py
| 6,247 |
python
|
en
|
code
| 9 |
github-code
|
6
|
30323152947
|
import numpy as np
import pandas as pd
def calculate_rank_probability(df):
"""
1着確率を元に、2着と3着の確率を計算する
計算方法にはベンターが提唱する計算式を使用する
Parameters
----------
df : pandas.DataFrame
レースごとの1着確率を保持しているデータフレーム。
Returns
-------
df : pandas.DataFrame
レースごとに1着 / 2着 / 3着確率を計算したデータフレーム
"""
# ここの値はベンターのものなので再計算するのが良さそう
SECOND_CORRECT_VAL = 0.81
THIRD_CORRECT_VAL = 0.65
total = (
df.groupby("race_id")
.agg({"predict": np.sum})
.rename(columns={"predict": "total_proba"})
)
base = pd.merge(df, total, on=["race_id"], how="left")
base["first_proba"] = base["predict"] / base["total_proba"]
base["second_proba_source"] = base["first_proba"] ** SECOND_CORRECT_VAL
base["third_proba_source"] = base["first_proba"] ** THIRD_CORRECT_VAL
source = (
base.groupby("race_id")
.agg({"second_proba_source": np.sum, "third_proba_source": np.sum})
.rename(
columns={
"second_proba_source": "total_second_proba_source",
"third_proba_source": "total_third_proba_source",
}
)
)
base = pd.merge(base, source, on=["race_id"], how="left")
base["second_proba"] = (
base["second_proba_source"] / base["total_second_proba_source"]
)
base["third_proba"] = base["third_proba_source"] / base["total_third_proba_source"]
return base[["race_id", "horse_no", "first_proba", "second_proba", "third_proba"]]
def correct_proba_in_race(df):
"""
レース毎に勝率を補正する
Parameters
----------
df : pandas.DataFrame
データフレーム
Returns
-------
df : pandas.DataFrame
補正後のデータフレーム
"""
total = (
df.groupby("race_id")
.agg({"proba": np.sum})
.rename(columns={"proba": "total_proba"})
)
base = pd.merge(df, total, on=["race_id"], how="left")
base["correct_proba"] = base["proba"] / base["total_proba"]
return base.drop(["total_proba"], axis=1)
def create_horse_proba_array(df, proba_key):
"""
馬の予測値の配列を作る。馬番が欠番していた場合はnanで埋める。
Parameters
----------
df : pandas.DataFrame
データフレーム
proba_key : string
予測カラムの名前
Returns
-------
array : array
予測値の配列
"""
array = []
horse_numbers = df.horse_no.values
values = df[proba_key].values
current_horse_no = 1
for i in range(len(horse_numbers)):
no = horse_numbers[i]
if no == current_horse_no:
# 抜け番なし
array.append(values[i])
else:
# 抜け番あり
for j in range(no - current_horse_no):
# 抜けてる数だけ埋める
array.append(np.nan)
current_horse_no += 1
array.append(values[i])
current_horse_no += 1
return array
|
keimii/horse-racing-tools
|
src/probability_calculation.py
|
probability_calculation.py
|
py
| 3,269 |
python
|
ja
|
code
| 0 |
github-code
|
6
|
655272067
|
import os
import napari
import z5py
def view_result(sample, checkpoint_name):
halo = [25, 512, 512]
path = f'./data/{sample}.n5'
with z5py.File(path, 'r') as f:
ds = f['raw']
bb = tuple(slice(max(sh // 2 - ha, 0),
min(sh // 2 + ha, sh))
for sh, ha in zip(ds.shape, halo))
raw = ds[bb]
ds = f['labels']
labels = ds[bb]
prefix = f'predictions/{checkpoint_name}/'
fg_key = prefix + 'foreground'
if fg_key in f:
ds = f[fg_key]
fg = ds[bb]
else:
fg = None
bd_key = prefix + 'boundaries'
aff_key = prefix + 'affinities'
if bd_key in f:
ds = f[bd_key]
boundaries = ds[bb]
elif aff_key in f:
ds = f[aff_key]
bb_affs = (slice(None),) + bb
boundaries = ds[bb_affs]
else:
boundaries = None
prefix = f'segmentation/{checkpoint_name}'
# ws_key = prefix + '/watershed'
# if ws_key in f:
# ds = f[ws_key]
# ws = ds[bb]
# else:
# ws = None
ws = None
mc_key = prefix + '/multicut_postprocessed'
if mc_key in f:
ds = f[mc_key]
mc = ds[bb]
else:
mc = None
mws_key = prefix + '/mutex_watershed_postprocessed'
if mws_key in f:
ds = f[mws_key]
mws = ds[bb]
else:
mws = None
with napari.gui_qt():
viewer = napari.Viewer()
viewer.add_image(raw)
if fg is not None:
viewer.add_image(fg)
if boundaries is not None:
viewer.add_image(boundaries)
if ws is not None:
viewer.add_labels(ws)
if mc is not None:
viewer.add_labels(mc)
if mws is not None:
viewer.add_labels(mws)
viewer.add_labels(labels)
def view_results(samples, checkpoint):
checkpoint_name = os.path.split(checkpoint)[1]
for sample in samples:
view_result(sample, checkpoint_name)
if __name__ == '__main__':
view_result('human_small', 'affinity_model_default_human_rat')
|
constantinpape/torch-em
|
experiments/unet-segmentation/mitochondria-segmentation/mito-em/challenge/check_result.py
|
check_result.py
|
py
| 2,239 |
python
|
en
|
code
| 42 |
github-code
|
6
|
38961213741
|
import functools
class Person:
def __init__(self,Eid,Ename,Desig,sal):
self.Eid=Eid
self.Ename=Ename
self.Desig=Desig
self.sal=int(sal)
def PrintValues(self):
print("Emp Id",self.Eid)
print("Emp name",self.Ename)
print("Emp Degnation",self.Desig)
print("Emp salary ",self.sal)
def __str__(self):
return self.Ename
f=open("EmpDetails","r")
empl=[]
for data in f:
data=data.rstrip("\n").split(",")
Eid=data[0]
Ename=data[1]
Desig=data[2]
sal=data[3]
obj=Person(Eid,Ename,Desig,sal)
empl.append(obj)
lst=[]
# for emp
# maxval=functools.reduce(lambda emp:max(sal),lst)
# print(maxval)
#reduce can be use in integer value
maxval=list(map(lambda obj:obj.sal,empl))
print(maxval)
m=functools.reduce(lambda sal1,sal2:sal1 if sal1>sal2 else sal2,maxval)
print(m)
maxsal=list(filter(lambda emp:emp.sal==m,empl))
for i in maxsal:
print(i)
|
Aswin2289/LuminarPython
|
LuminarPythonPrograms/Oops/empSalReduce.py
|
empSalReduce.py
|
py
| 945 |
python
|
en
|
code
| 0 |
github-code
|
6
|
17319926912
|
#!usr/bin/python
#split the sequence
sequence = open("AJ223353.fasta")
sequence = sequence.read().replace("\n","")
coding = sequence[28:409]
nocoding = sequence[0:28]+sequence[409:]
seq_local = open("plain_genomic_seq.txt").read().rstrip().upper()
print(sequence)
print(seq_local)
seq_localdna = seq_local.replace("X","").replace("S","").replace("K","").replace("L","")
print(seq_localdna)
localcds = seq_localdna[0:63]+seq_localdna[90:]
local_intron = seq_localdna[63:90]
exons_out = open("All_exons.fasta", "w")
exons_out.write(">AJ223353_exon01_length" + str(len(coding)) + "\n" + coding+ "\n")
exons_out.write(">LocalSeqcds_length" + str(len(localcds)) + "\n" + localcds + "n")
exons_out.close()
print(open("All_exons.fasta").read())
introns_out = open("All_noncodings.fasta", "w")
introns_out.write(">AJ223353_noncoding01_length" + str(len(nocoding)) + "\n" + nocoding + "\n")
introns_out.write(">LocalSeq_intron01_length" + str(len(local_intron)) + "\n" + local_intron)
introns_out.close()
print(open("All_noncodings.fasta").read())
|
DHS123456/exercise
|
Lecture11/excercise.py
|
excercise.py
|
py
| 1,043 |
python
|
en
|
code
| 0 |
github-code
|
6
|
17335569332
|
import pygame
import math
import random
class Bullet():
def __init__(self, health, direction, start, colour, rRange):
self.dims: tuple((int, int)) = (20, 20)
self.sprite = pygame.Surface((20, 20))
self.sprite.fill(colour)
self.sprite.set_colorkey(colour)
self.x, self.y = start[0], start[1]
self.body = pygame.Rect(self.pos, self.dims)
self.rRange = rRange
self.distance = 0
#health
self.hp = health
#mobility
self.speed = 10
self.direction = direction
@property
def w(self):
return self.dims[0]
@property
def h(self):
return self.dims[1]
@property
def pos(self):
return (self.x, self.y)
def update(self):
moveVecx = self.direction["chx"] * self.speed
moveVecy = self.direction["chy"] * self.speed
self.distance += math.sqrt(moveVecx**2 + moveVecy**2)
self.x += moveVecx
self.y += moveVecy
self.body.x = self.x
self.body.y = self.y
def render(self, screen, dims):
screen.blit(self.sprite, (self.x, self.y))
pygame.draw.rect(screen, (240, 2, 100), self.body)
|
andrewchu16/untitledproject
|
src/bullet.py
|
bullet.py
|
py
| 1,225 |
python
|
en
|
code
| 4 |
github-code
|
6
|
21990463619
|
from flask import Flask, g, render_template,request,redirect,session,url_for,flash
import sqlite3
app = Flask(__name__)
app.config['SECRET_KEY'] = 'dev'
db_path = input("Enter database path: ")
# =============================================================================
# /Users/Eugen/Desktop/Final/blog.db
# =============================================================================
def connect_db():
sql = sqlite3.connect(db_path)
sql.row_factory = sqlite3.Row
return sql
def get_db():
if not hasattr(g, 'sqlite3'):
g.sqlite3_db = connect_db()
return g.sqlite3_db
@app.before_request
def before_request():
g.db = get_db()
if 'username' not in session:
session['username']=None
@app.teardown_appcontext
def close_db(error):
if hasattr(g, 'sqlite_db'):
g.sqlite3_db.close()
@app.route('/')
def index():
if session['username'] != None:
username = session['username']
cur = g.db.execute('SELECT * FROM posts ORDER BY published_date DESC')
data = cur.fetchall()
return render_template('index.html',data=data,username=username)
else:
cur = g.db.execute('SELECT * FROM posts ORDER BY published_date DESC')
data = cur.fetchall()
return render_template('index2.html',data=data)
@app.route('/login',methods=['GET','POST'])
def login():
if request.method =='POST':
username = request.form['username']
password = request.form['password']
cur = g.db.execute('SELECT * from users')
user_data = cur.fetchall()
try:
g.db.execute('INSERT into users (username,password) values (?,?)',[username,password])
g.db.commit()
session['username'] = request.form['username']
except Exception as e:
for row in user_data:
if(row[0] == username and row[1]==password ):
session['username'] = request.form['username']
print(e)
return redirect('/dashboard')
else:
return render_template('login.html')
@app.route('/logout',methods=['GET'])
def logout():
session['username']=None
return redirect('/')
@app.route('/dashboard',methods=['GET'])
def dashboard():
username = session['username']
if username != None:
cur = g.db.execute("SELECT * FROM posts WHERE author=?",[username])
data = cur.fetchall()
return render_template('dashboard.html',data=data,username=username)
else:
return redirect('/login')
@app.route('/add',methods=['GET','POST'])
def add():
username=session['username']
if username != None:
if request.method =='GET':
return render_template('add.html',username=username)
elif request.method == 'POST':
try:
if(username==request.form['author'] or username=='admin'):
g.db.execute('INSERT into posts (title,author,content,published_date) values (?,?,?,?) ',[request.form['title'],request.form['author'],request.form['content'],request.form['published_date']])
g.db.commit()
return redirect('/')
else:
flash('You are not authorized to post to the blog hosted by {}'.format(request.form['author']))
return redirect('/add')
except Exception as e:
print(e)
flash('Duplicate Title and Author!','error')
return redirect('/add')
else:
return redirect('/')
@app.route('/delete',methods=['POST'])
def delete():
username=session['username']
if username != None:
del_title = request.form['del_title']
del_author = request.form['del_author']
g.db.execute("DELETE FROM posts WHERE title=? AND author=?",[del_title,del_author])
g.db.commit()
return redirect('/dashboard')
else:
return redirect('/')
@app.route('/edit',methods=['GET','POST'])
def edit():
username=session['username']
if request.method =='GET':
if username != None:
e_title = request.form['edit_title']
e_author = request.form['edit_author']
return redirect(url_for('update',e_title=e_title,e_author=e_author))
else:
return redirect('/')
if request.method == 'POST':
if username != None:
e_title = request.form['edit_title']
e_author = request.form['edit_author']
return redirect(url_for('update',e_title=e_title,e_author=e_author))
else:
return redirect('/')
@app.route('/update/<e_title>/<e_author>',methods=['GET','POST'])
def update(e_title,e_author):
username=session['username']
if username != None:
if request.method == 'GET':
cur = g.db.execute("SELECT * FROM posts WHERE title=? AND author=?",[e_title,e_author])
data = cur.fetchall()
return render_template('update.html',data=data,username=username)
elif request.method == 'POST':
e_title=request.form['e_title']
e_author=request.form['e_author']
g.db.execute("UPDATE posts SET title=?,author=?,content=?,published_date=? WHERE title=? AND author=?",[request.form['title'],request.form['author'],request.form['content'],request.form['published_date'],e_title,e_author])
g.db.commit()
return redirect('/dashboard')
else:
return redirect('/')
if __name__ == '__main__':
app.run()
|
EugenMorarescu/IS211_Final
|
Final/final_project.py
|
final_project.py
|
py
| 5,821 |
python
|
en
|
code
| 0 |
github-code
|
6
|
43464163341
|
from datetime import timezone
import datetime
import pytz
from .send_mail_view import SendMailView
from django.test import RequestFactory
import pytest
class TestSendMailView:
# Test that sending an email with correct parameters returns a 200 OK response.
def test_send_mail_with_correct_parameters(self):
view = SendMailView()
request = RequestFactory().get('/')
response = view.get(request)
assert response.status_code == 200
# Test that sending an email with incorrect parameters returns a 400 BAD REQUEST status code.
def test_send_mail_with_incorrect_parameters_returns_bad_request(self):
view = SendMailView()
request = RequestFactory().get('/')
response = view.enviar_correo(request)
assert response.status_code == 400
# Test that the conversion of UTC time to local time in the SendMailView class returns the correct time.
def test_converting_utc_to_local_time(self):
# Create an instance of SendMailView
send_mail_view = SendMailView()
# Define a UTC time
utc_time = datetime(2022, 1, 1, 12, 0, 0, tzinfo=pytz.utc)
# Call the get_local_hour method with the UTC time
local_time = send_mail_view.get_local_hour(utc_time)
# Assert that the local time is correct
assert local_time == ('2022-01-01 07:00:00', 0, 0)
# Test that the Fibonacci series generated from the local time is correct
def test_fibonacci_series_from_local_time(self):
now = timezone.now()
view = SendMailView()
fibonacci_result = view.get_fibo_fron_local_hour(now)
assert fibonacci_result == [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89,
144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657]
# Test that sending an email with incorrect parameters returns a 400 BAD REQUEST status code.
def test_send_mail_with_incorrect_parameters_returns_bad_request(self):
view = SendMailView()
request = RequestFactory().get('/')
response = view.enviar_correo(request)
assert response.status_code == 400
|
segpy/technical-tests
|
prote/drf-prote-test/apps/prote_test/views/test_send_mail_view.py
|
test_send_mail_view.py
|
py
| 2,162 |
python
|
en
|
code
| 0 |
github-code
|
6
|
3438652661
|
class Solution:
def maximumUnits(self, boxTypes: List[List[int]], truckSize: int) -> int:
boxTypes.sort(key=lambda x:-x[1])
res = 0
count = 0
for [num_b, num_u] in boxTypes:
if num_b + count < truckSize:
count += num_b
res += num_b * num_u
else:
res += (truckSize - count) * num_u
break
return res
|
cuiy0006/Algorithms
|
leetcode/1710. Maximum Units on a Truck.py
|
1710. Maximum Units on a Truck.py
|
py
| 448 |
python
|
en
|
code
| 0 |
github-code
|
6
|
36222363296
|
# -*- coding: utf-8 -*-
import typing as T
import polars as pl
from ..importer import (
TsvGzReader,
dataframe_to_list_table,
)
from ..images import icon_by_portal, image_by_map
from .go_cmd import with_teleport_command
if T.TYPE_CHECKING:
from rstobj import Image, ListTable
def lt_list_main_city_gps_and_label_and_image() -> T.List[
T.Tuple['ListTable', str, 'Image']
]:
reader = TsvGzReader(__file__)
df = reader.read_df("main-city.tsv.gz")
df = df.select([
pl.col("zone").alias("城市"),
pl.col("zone").alias("图标"),
pl.col("loc_name").alias("地点"),
pl.col("description").alias("描述"),
pl.col("go_cmd").alias("go_cmd"),
])
df1 = with_teleport_command(df, go_cmd_col="go_cmd")
df2 = df1.with_column(pl.col("图标").apply(f=icon_by_portal))
lst = list()
for city in df2["城市"].unique(maintain_order=True):
sub_df = df2.filter(df2["城市"] == city)
image = image_by_map(city)
image.height = 480
lst.append(
(
dataframe_to_list_table(sub_df, title=f"{city}传送GM命令"),
city,
image,
)
)
return lst
|
MacHu-GWU/wotlkdoc-project
|
wotlkdoc/docs/gps/main_city.py
|
main_city.py
|
py
| 1,226 |
python
|
en
|
code
| 8 |
github-code
|
6
|
20066269029
|
import os
import pandas as pd
import properties
from audio import audio_utils as au
from files import file_utils as fu
min_fragment_duration_ms = 400
def __build_syncmap_sentences(chapter_audio, chapter_syncmap):
sentences = []
for fragment in chapter_syncmap['fragments']:
start_time = float(fragment['begin']) * 1000
end_time = float(fragment['end']) * 1000
if (end_time - start_time) > min_fragment_duration_ms:
sentences.append({
"audio": chapter_audio[start_time:end_time],
"text": fragment['lines'][0]
})
return sentences
def __export_dataset_audio_sample(audio_sample, dataset_chapter_index, syncmap_fragment_index):
audio_sample.export(
fu.build_dataset_audio_path(dataset_chapter_index, syncmap_fragment_index),
format="wav"
)
def __append_to_metadata(metadata_df, dataset_chapter_index, fragment_index, fragment_text, fragment_audio):
return metadata_df.append(
pd.DataFrame(
[{
'filename': fu.build_dataset_audio_filename(dataset_chapter_index, fragment_index),
'text': fragment_text,
'up_votes': 0,
'down_votes': 0,
'age': 0,
'gender': 'male',
'accent': '',
'duration': fragment_audio.duration_seconds
}],
columns=properties.csv_sample_columns
)
)
def __build_chapter_dataframe(dataframe, sentences, dataset_chapter_index):
for syncmap_fragment_index, sentence in enumerate(sentences):
trimmed_audio = au.trim_silence(sentence['audio'])
__export_dataset_audio_sample(trimmed_audio, dataset_chapter_index, syncmap_fragment_index)
dataframe = __append_to_metadata(dataframe,
dataset_chapter_index,
syncmap_fragment_index,
sentence['text'],
trimmed_audio)
return dataframe
def __build_metadata_and_export_audio_samples(dataframe, book_name, book_chapter_index, dataset_chapter_index):
chapter_audio = au.load_mp3_audio(book_name, book_chapter_index)
syncmap = fu.load_syncmap(book_name, book_chapter_index)
sentences = __build_syncmap_sentences(chapter_audio, syncmap)
dataframe = __build_chapter_dataframe(dataframe, sentences, dataset_chapter_index)
return dataframe
def __export_metadata(dataframe):
dataframe.to_csv(fu.build_dataset_metadata_path(),
sep='|', encoding='utf-8', index=False
)
def run():
os.makedirs(fu.build_dataset_audio_dir(), exist_ok=True)
df = pd.DataFrame(columns=properties.csv_sample_columns)
dataset_chapter_index = 1
for book in properties.book_list:
print("Exporting book \'{:s}\'.".format(book))
for book_chapter_index in range(1, properties.chapter_count_in[book] + 1):
print("Exporting chapter {:d}...".format(book_chapter_index))
df = __build_metadata_and_export_audio_samples(df, book, book_chapter_index, dataset_chapter_index)
dataset_chapter_index += 1
__export_metadata(df)
if __name__ == "__main__":
run()
|
arnasRad/speech_dataset
|
export_dataset.py
|
export_dataset.py
|
py
| 3,322 |
python
|
en
|
code
| 0 |
github-code
|
6
|
4510033934
|
class Node :
def __init__(self, data = None, next = None):
self.data = data
self.next = next
class LinkedList :
def __init__(self):
self.head = None
def insert_at_beginning(self,data):
node = Node(data,self.head)
self.head = node
return
def insert_at_end (self, data):
if self.head is None:
self.head = Node(data,None)
return
itr = self.head
while itr.next:
itr = itr.next
itr.next = Node(data,None)
def insert_values (self, data_list):
self.head = None
for data in data_list:
self.insert_at_end(data)
def get_length(self):
counter = 0
itr = self.head
while itr:
counter += 1
itr = itr.next
return counter
def remove_at (self, index):
if index<0 or index >= self.get_length():
raise Exception("Invalid index")
if index == 0:
self.head = self.head.next
return
counter = 0
itr = self.head
while itr:
if counter == index-1:
itr.next = itr.next.next
break
itr = itr.next
counter +=1
def insert_at(self,index,data):
if index < 0 or self.get_length()<index:
raise Exception("invalid index")
if index == 0:
self.insert_at_beginning(data)
return
counter = 0
itr = self.head
new_node = Node(data,None)
while itr:
if counter == index-1:
new_node.next = itr.next
itr.next = new_node
break
itr = itr.next
counter += 1
def print_linked_list(self):
if self.head is None:
print("Linked list is empty")
return
itr = self.head
lst = ""
while itr:
lst += str(itr.data) + "-->"
itr = itr.next
print(lst)
if __name__ == '__main__':
ll = LinkedList()
ll.insert_values(['a','b','c','d'])
ll.print_linked_list()
ll.insert_at(4,'z')
ll.print_linked_list()
ll.remove_at(2)
ll.print_linked_list()
|
rajat1994/LeetCode-PythonSolns
|
Linked Lists/linked_list.py
|
linked_list.py
|
py
| 2,300 |
python
|
en
|
code
| 0 |
github-code
|
6
|
12503051509
|
class Card:
suit_list = {
"Spades",
"Hearts",
"Clubs",
"Diamonds"
}
rank_list = {
"Ace": 1,
"Two": 2,
"Three": 3,
"Four": 4,
"Five": 5,
"Six": 6,
"Seven": 7,
"Eight": 8,
"Nine": 9,
"Ten": 10,
"Jack": 10,
"King": 10,
"Queen": 10
}
suit = ""
rank = ""
value = 0
def __init__(self, rank, suit):
if rank not in self.rank_list or suit not in self.suit_list:
print("CardInitError")
else:
self.rank = rank
self.suit = suit
self.value = self.rank_list[rank]
def __str__(self):
return str(self.rank + " of " + self.suit)
|
fowl-ow/Pythonjack
|
Card.py
|
Card.py
|
py
| 761 |
python
|
en
|
code
| 0 |
github-code
|
6
|
36517146950
|
# A brief description of the project
# Date: 17AUG21
# CSC221 M1HW1 – Array Manipulations
# Taylor J. Brown
# Prompt the user with a menu that has five options.
# 1) Create a 3-by-3 Array
# 2) Display cube Values for elements in array
# 3) Add 7 to every element and display result
# 4) Multiply elements by 6 and display result
# 5) Exit
# --- Option 1 must be done first. If another option is selected give user an error message and display menu again ---
# Option 1. User is prompted to enter nine integers. Display a 3x3 array without brackets.
# Option 2. Will display the cube of each element in array. (Don’t modify the array. Copy then modify).
# Option 3. Add 7 to each element in array and display. (Don’t modify the array. Copy then modify).
# Option 4. Multiply each element in array by 6 and display. (Don’t modify the array. Copy then modify).
# Option 5. Terminate program.
# If any option other than 1-5 is entered give the user an error message and display menu again.
# After every operation, menu will be displayed again until user chooses to terminate the program.
import numpy as np
import copy
def main():
array = ([])
keep_going = 0
while keep_going == 0:
print('MENU')
print('-'*25,'\n')
print(''*3,'1) Create a 3-by-3 Array')
print(''*3,'2) Display cube Values for elements in array')
print(''*3,'3) Add 7 to every element and display result')
print(''*3,'4) Multiply elements by 6 and display result')
print(''*3,'5) Exit')
usr_inp = int(input("Menu selection: "))
if usr_inp == 1:
array = option_1()
elif usr_inp == 2:
option_2(array)
elif usr_inp == 3:
option_3(array)
elif usr_inp == 4:
option_4(array)
elif usr_inp == 5:
keep_going +=1
else:
print("Error invalid option!")
def option_1():
L1 = []
L2 = []
L3 = []
count1 = 9
count = 0
for count1 in range(count1):
count += 1
usr_inp = int(input(f'Please enter integer {count} of 9: '))
z = len(L1)
k = len(L2)
if z < 3:
L1.append(usr_inp)
elif z == 3 and k < 3:
L2.append(usr_inp)
else:
L3.append(usr_inp)
array = np.array((L1,L2,L3), dtype = int)
new_arr = array.reshape(3,3)
printer(new_arr)
return new_arr
def option_2(array):
z = len(array)
cpy_arr = copy.deepcopy(array)
if z >= 1:
sq_arr = cpy_arr ** 3
printer(sq_arr)
else:
print("The array is empty! You have to create an array first!")
def option_3(array):
z = len(array)
cpy2_arr = copy.deepcopy(array)
if z >= 1:
add_arr = cpy2_arr + 7
printer(add_arr)
else:
print("The array is empty! You have to create an array first!")
def option_4(array):
z = len(array)
cpy3_arr = copy.deepcopy(array)
if z >= 1:
multi_arr = cpy3_arr * 6
printer(multi_arr)
else:
print("The array is empty! You have to create an array first!")
def printer(array):
print()
for row in array:
for col in row:
print(col, end=" ")
print()
print()
main()
|
TaylorBrown96/CSC-221
|
M1HW1_ArrayManipulation_TaylorBrown.py
|
M1HW1_ArrayManipulation_TaylorBrown.py
|
py
| 3,457 |
python
|
en
|
code
| 0 |
github-code
|
6
|
19121772365
|
from rest_framework import status
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.decorators import api_view, action
from rest_framework import viewsets, permissions, status
from django.http import Http404
from django.shortcuts import render
from leaderboard.models import leaderboard_user
from leaderboard.serializers import LeaderboardUserSerializer
def index(request):
return render(request, 'index.html')
@api_view(['GET'])
def api_root(request, format=None):
return Response({
'scoreboard_users': reverse('user_list', request=request, format=format)
})
class UserViewSet(viewsets.ModelViewSet):
"""
retrieve:
Return the given user.
list:
Return a list of all the existing users.
create:
Create a new user instance.
delete:
Delete a user instance.
update:
Update a user instance.
point_up:
Adds a point to the given user.
point_down:
Removes a point from the given user.
"""
queryset = leaderboard_user.objects.all()
serializer_class = LeaderboardUserSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
@action(detail=True)
def point_up(self, request, *args, **kwargs):
def get_object(self, pk):
try:
user = leaderboard_user.objects.get(pk=pk)
return user
except leaderboard_user.DoesNotExist:
raise Http404
user = self.get_object()
user.point_up()
return Response(status=status.HTTP_200_OK)
@action(detail=True)
def point_down(self, request, *args, **kwargs):
def get_object(self, pk):
try:
user = leaderboard_user.objects.get(pk=pk)
return user
except leaderboard_user.DoesNotExist:
raise Http404
user = self.get_object()
user.point_down()
return Response(status=status.HTTP_200_OK)
def perform_create(self, serializer):
serializer.save()
|
alex-gmoca/spring
|
spring/leaderboard/views.py
|
views.py
|
py
| 2,128 |
python
|
en
|
code
| 0 |
github-code
|
6
|
42368009926
|
from django.contrib import admin
from sign.models import Event, Guest
# Register your models here.
# EventAdmin类继承admin.ModelAdmin,admin.ModelAdmin类是一个自定义工具,能够自定义一些模块的特征
class EventAdmin(admin.ModelAdmin):
# list_display:用于定义显示哪些字段,必须是Event类里定义的字段
list_display = ['id', 'name', 'status', 'address', 'start_time']
# 创建搜索栏
search_fields = ['name']
# 创建过滤器
list_filter = ['status']
class GuestAdmin(admin.ModelAdmin):
list_display = ['realname', 'phone', 'email', 'sign', 'create_time', 'event']
search_fields = ['realname', 'phone']
list_filter = ['sign']
admin.site.register(Event, EventAdmin)
admin.site.register(Guest, GuestAdmin)
|
nhan118/learn
|
guest/sign/admin.py
|
admin.py
|
py
| 786 |
python
|
en
|
code
| 0 |
github-code
|
6
|
355111105
|
"""Datasets
RegressionDataGenerator, ClassificationDataGenerator2, ClassificationDataGenerator3, load_iris, load_mnist
are implemented.
"""
import numpy as np
import os,gzip
class RegressionDataGenerator():
"""RegressionDataGenerator
Create 1-D toy data for regression
"""
def __init__(self,f):
"""__init__
Args:
f (object) : generate 1-D data which follows f(x) + gauss noise
"""
self.f = f
def __call__(self,n = 50,lower = 0,upper = 2*np.pi,std = 1):
"""Make data
Args:
n (int) : number of data
lower,upper (float) : generate data almost lower <= x <= upper
std (float) : std of gauss noise
Returns:
2-D array: explanatory variable,shape = (N_samples,1)
2-D array: target variable, shape = (N_samples,1)
"""
X = np.random.rand(n)*(lower - upper) + upper
y = self.f(X) + np.random.randn(n)*std
return X.reshape(-1,1),y.reshape(-1,1)
class ClassificationDataGenerator2():
"""ClassificationDataGenerator2
Create 2-D toy data for classification, which has 2-class.
"""
def __init__(self,f):
"""__init__
Args:
f (object) : generate 2-D data which decision boundary is given as y = f(x)
"""
self.f = f
def __call__(self,n = 50,x_lower = 0,x_upper = 5,y_lower = 0,y_upper = 5,encoding="onehot"):
"""Make data
explanatory variable is expressed (x,y).
if encoding = "onehot", returned target variable's shape = (N_samples,2)
if encoding = "target", returned target variable's shape = (N_samples)
Args:
n (int) : number of data
x_lower,x_upper (float) : generate data almost x_lower <= x <= x_upper
y_lower,y_upper (float) : generate data almost y_lower <= y <= y_upper
encoding (str) : "onehot" or "target"
Returns:
2-D array: explanatory variable.shape = (N_samples,2) and y
"""
X1 = np.random.rand(n)*(x_upper - x_lower) + x_lower
X2 = np.random.rand(n)*(y_upper - y_lower) + y_lower
X = np.concatenate([X1.reshape(-1,1),X2.reshape(-1,1)],axis = 1)
if encoding == "onehot":
y = np.zeros((n,2))
y[X2 > self.f(X1),1] = 1
y[X2 <= self.f(X1),0] = 1
else:
y = np.zeros(n)
y[X2 > self.f(X1)] = 1
return X,y
class ClassificationDataGenerator3():
"""ClassificationDataGenerator3
Create 2-D toy data for classification, which has 3-class.
if encoding = "onehot", returned target variable's shape = (N_samples,3)
if encoding = "target", returned target variable's shape = (N_samples)
"""
def __init__(self,f1,f2):
"""
Args:
f1 (object) : generate 2-D data which first decision boundary is given as y = f1(x)
f1 (object) : generate 2-D data which second decesion boundary is given as y = f2(x)
Note:
for all x, f1(x) >= f2(x).
"""
self.f1 = f1
self.f2 = f2
def __call__(self,n = 50,x_lower = 0,x_upper = 5,y_lower = 0,y_upper = 5,encoding="onehot"):
"""Make data
explanatory variable is expressed (x,y).
Args:
n (int) : number of data
x_lower,x_upper (float) : generate data almost x_lower <= x <= x_upper
y_lower,y_upper (float) : generate data almost y_lower <= y <= y_upper
encoding (str) : "onehot" or "target"
Returns:
2-D array: explanatory variable.shape = (N_samples,2) and y
"""
X1 = np.random.rand(n)*(x_upper - x_lower) + x_lower
X2 = np.random.rand(n)*(y_upper - y_lower) + y_lower
X = np.concatenate([X1.reshape(-1,1),X2.reshape(-1,1)],axis = 1)
if encoding == "onehot":
y = np.zeros((n,3))
condition1 = (X2 > self.f1(X1))
condition2 = (X2 > self.f2(X1))
y[condition1,0] = 1
y[np.logical_and(np.logical_not(condition1),condition2),1] = 1
y[np.logical_not(condition2),2] = 1
else:
y = np.zeros(n)
condition1 = (X2 > self.f1(X1))
condition2 = (X2 > self.f2(X1))
y[condition1] = 0
y[np.logical_and(np.logical_not(condition1),condition2)] = 1
y[np.logical_not(condition2)] = 2
return X,y
def load_iris():
"""
Returns:
2-D array: explanatary valiable
1-D array: class 0,1,2
"""
dict = {
"Iris-setosa": 0,
"Iris-versicolor": 1,
"Iris-virginica": 2
}
X = []
y = []
file = __file__.rstrip("datasets.py")
os.chdir(f"{file}/../../")
with open("data/iris.data") as f:
data = f.read()
for line in data.split("\n"):
# sepal length | sepal width | petal length | petal width
if len(line) == 0:
continue
sl,sw,pl,pw,cl = line.split(",")
rec = np.array(list(map(float,(sl,sw,pl,pw))))
cl = dict[cl]
X.append(rec)
y.append(cl)
return np.array(X),np.array(y)
def _load_label(file_name):
file_path = file_name
with gzip.open(file_path, 'rb') as f:
labels = np.frombuffer(f.read(), np.uint8, offset=8)
return labels
def _load_image(file_name,normalized):
file_path = file_name
with gzip.open(file_path, 'rb') as f:
images = np.frombuffer(f.read(), np.uint8, offset=16)
if normalized:
return images.reshape(-1,28,28)/255.0
else:
return images.reshape(-1,28,28)
def load_mnist(label=np.arange(10),normalized=True):
"""
Args:
label (1-D array): label of the datayou want to get
normalized (bool): if image is normalized or not
Returns:
2-D array: image, shape = (N,28,28)
1-D array: target
"""
# change directory
file = __file__.rstrip("datasets.py")
os.chdir(f"{file}/../../")
mnist_data = {
"image": "data/t10k-images-idx3-ubyte.gz",
"label": "data/t10k-labels-idx1-ubyte.gz"
}
# load image and label
image = _load_image(mnist_data["image"],normalized)
target = _load_label(mnist_data["label"])
# select image
idx = np.isin(target,label)
return image[idx],target[idx]
|
hedwig100/PRML
|
prml/utils/datasets.py
|
datasets.py
|
py
| 6,493 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72788395069
|
import csv
bidic = {} # bigram dictioary
count = 0
with open('./gitignored_files/taiwanese_song_info.csv', 'r') as data:
# with open('./gitignored_files/song_info.csv', 'r') as data:
reader = csv.reader(data)
song_dictionary = list(reader)
with open('./gitignored_files/taiwanese_song_bigram.csv', 'w', newline='', encoding='utf-8') as csv_file:
writer = csv.writer(csv_file, delimiter=',')
# print(song_dictionary[row][0])
for i in range(1, len(song_dictionary)) :
word = song_dictionary[i][2].replace(" ", "")
for j in range(0,len(word)-1):
bi = word[j:j+2]
# print(bi)
if bi in bidic : # existed in bidic
bidic[bi][0] = bidic[bi][0] + 1
else : # does not exist in bidic
bidic[bi] = [1,0]
count+=1
# write bigram dictionary into file
writer.writerow(["vocabulary","counts","chance"])
for key,value in bidic.items() :
value[1] = value[0] / count
writer.writerow([key, value[0], value[1]])
|
LonEdit120/testing_codes
|
bigram_test_01.py
|
bigram_test_01.py
|
py
| 1,050 |
python
|
en
|
code
| 0 |
github-code
|
6
|
14876640371
|
from django.db import models
from django.utils.text import slugify
class Pet(models.Model):
MAX_LENGTH_NAME = 30
name = models.CharField(
max_length=MAX_LENGTH_NAME,
null=False,
blank=False,
)
personal_pet_photo = models.URLField(
null=False,
blank=False,
)
pet_slug = models.SlugField(
unique=True,
editable=False,
null=False,
blank=True,
)
date_of_birth = models.DateField(
null=True,
blank=True,
)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if not self.pet_slug:
self.pet_slug = slugify(f"{self.id}-{self.name}")
return super().save(*args, **kwargs)
def __str__(self):
return f"Name={self.name} - ID={self.id}"
|
Ivo2291/petstagram
|
petstagram/pets/models.py
|
models.py
|
py
| 820 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74190577788
|
from tkinter import Tk, Text
#Text genera una pantalla de texto multilinea en otros lenguajes en un textarea
root = Tk()
root.geometry("250x200")
root.resizable(False, False)
root.title("Escribe aquí")
text = Text(root, height=8)
text.pack()
#insert(linea donde queremos que se ubique el texto, texto)
text.insert("1.0","Estamos aprendiendo python")
root.mainloop()
|
alex-claradelrey/ConversorDeDivisas
|
Ejemplos/Ejemplos Tkinter/Ejemplo7.py
|
Ejemplo7.py
|
py
| 372 |
python
|
es
|
code
| 0 |
github-code
|
6
|
41061137991
|
import numpy as np
from src.core import Entity, Player
PLAYER_COLOR = (31, 120, 10) # dark green
class Ball(Entity):
def __init__(self, env_width, env_height, position, radius, color, timestep):
self.env_width = env_width
self.env_height = env_height
self.radius = radius
self.color = color
self.timestep = timestep
self.position = position
self.speed = np.array([np.random.random()*2-1, np.random.random()*2-1])
self.changed_speed = 0
self.left = self.position[0] - self.radius
self.right = self.position[0] + self.radius
self.bottom = self.position[1] + self.radius
self.top = self.position[1] - self.radius
def move(self):
self.speed += self.changed_speed
self.changed_speed = 0
delta_x = self.speed * self.timestep
self.position += delta_x
if (self.position[0] < self.radius and delta_x[0] < 0) or \
(self.position[0] > self.env_width - self.radius and delta_x[0] > 0):
self.speed[0] *= -1
if (self.position[1] < self.radius and delta_x[1] < 0) or \
(self.position[1] > self.env_height - self.radius and delta_x[1] > 0):
self.speed[1] *= -1
def update_speed(self, speed, position):
new_pos = self.position + self.speed * self.timestep
new_pos_ = position + speed * self.timestep
if np.sum((new_pos-new_pos_)**2) < np.sum((self.position - position)**2):
if not (position == self.position).all():
self.changed_speed -= (self.speed - speed) @ (self.position - position) / np.sum((self.position - position)**2) * (self.position - position)
class Debug(Player):
def __init__(self, env_width, env_height, radius, position, timestep):
self.env_width = env_width
self.env_height = env_height
self.radius = radius
self.position = position
self.color = PLAYER_COLOR
self.speed = np.zeros(2)
self.max_speed = 6
self.timestep = timestep
def valid_state(self, position, ball_list):
return True
def move(self, action, ball_list):
position = self.position + action
if self.valid_state(position, ball_list):
self.position += action * self.timestep
return True
else:
return False
def get_action(self, state):
return 0
def reset(self):
return
def load_model(self):
return
def save_model(self):
return
|
ylajaaski/reinforcement_env
|
src/envs/collision_v1/entities.py
|
entities.py
|
py
| 2,605 |
python
|
en
|
code
| 0 |
github-code
|
6
|
15687075462
|
"""Module that contains reusable functions to interact with azure."""
import os
import yaml
import json
import shutil
from typing import Tuple, List, Dict, Union, Optional
from azureml.core import Workspace, Model, Dataset, Datastore
from azureml.core.compute import ComputeTarget, AmlCompute
from azureml.core.compute_target import ComputeTargetException
from azureml.core.authentication import ServicePrincipalAuthentication, InteractiveLoginAuthentication
from sentence_transformers import SentenceTransformer, CrossEncoder
from azure.ai.ml import MLClient
from azure.keyvault.secrets import SecretClient
from azure.identity import DefaultAzureCredential
def get_sp_auth():
"""
Function that returns an authentication object that can be used to authenticate with the azure ml workspace.
Returns:
ServicePrincipalAuthentication|InteractiveLoginAuthentication: Authentication object that can be used to authenticate with the azure ml workspace.
"""
# in case your working on a local machine with the service principle located in the workfolder root
f_path = '.cloud/.azure/AZURE_SERVICE_PRINCIPAL.json'
if os.path.exists(f_path):
with open(f_path) as f:
cred = f.read()
os.environ['AZURE_SERVICE_PRINCIPAL'] = cred
service_principle_str = os.environ.get('AZURE_SERVICE_PRINCIPAL')
interactive_login = False
# the sp file needs to exist or the env var is already set, with codespace secrets for example
if service_principle_str is not None:
service_principle_cred = json.loads(service_principle_str)
if service_principle_cred:
print("Authenticate with environment variable.")
tenant_id = service_principle_cred["tenant"]
sp_id = service_principle_cred["appId"]
sp_pwd = service_principle_cred["password"]
else:
if os.path.exists("tenant.txt") and os.path.exists("appid.txt") and os.path.exists("password.txt"):
print("Authenticate with text files data.")
tenant_id = open("tenant.txt").read()
sp_id = open("appid.txt").read()
sp_pwd = open("password.txt").read()
else:
print("Interactive login.")
interactive_login = True
else:
interactive_login = True
if interactive_login:
return InteractiveLoginAuthentication(tenant_id="95101651-f23a-4239-a566-84eb874f75f4")
else:
sp_auth = ServicePrincipalAuthentication(
tenant_id=tenant_id, service_principal_id=sp_id, service_principal_password=sp_pwd
)
return sp_auth
def get_ws(stage="dev") -> Workspace:
"""Function that returns a workspace for the given stage.
Args:
stage (str, optional): One of the deployment staged. Either dev/uat/prod. Defaults to "dev".
Raises:
ValueError: In case an invalid stage name is passed.
Returns:
Workspace: _description_
"""
stages = {"dev", "uat", "staging", "prod"}
if stage not in stages:
raise ValueError("Invalid stage for workspace: got %s, should be from %s" % (stage, stages))
sp_auth = get_sp_auth()
config_path = ".cloud/.azure/{stage}_config.json".format(stage=stage)
ws = Workspace.from_config(config_path, auth=sp_auth)
return ws
def get_ml_client(stage: str = "dev"):
"""Function that returns a MLClient for the given stage.
Args:
stage (str, optional): Name of the deployment stage. Defaults to "dev".
Raises:
ValueError: In case an invalid stage is passed.
Returns:
_type_: the mlclient for the given stage that can be used to interact with the ml workspace.
"""
stages = {"dev", "uat", "staging", "prod"}
if stage not in stages:
raise ValueError("Invalid stage for workspace: got %s, should be from %s" % (stage, stages))
sp_auth = get_sp_auth()
config_path = ".cloud/.azure/{stage}_config.json".format(stage=stage)
ml_client = MLClient.from_config(credential=sp_auth, path=config_path)
return ml_client
def get_secret_client(stage: str = "dev") -> SecretClient:
"""Function that returns a secret client for the given stage.
Args:
stage (str, optional): Deployment stage. Defaults to "dev".
Raises:
ValueError: In case an invalid stage is passed.
Returns:
SecretClient: Secret client for the given stage that can be used to set/get secrets from the keyvault.
"""
stages = {"dev", "uat", "staging", "prod"}
if stage not in stages:
raise ValueError("Invalid stage for workspace: got %s, should be from %s" % (stage, stages))
# sp_auth = get_sp_auth()
# config_path = ".cloud/.azure/{stage}_config.json".format(stage=stage)
# read vault name from deployment config
with open(".cloud/.azure/resources_info.json") as f:
deployment_config = json.load(f)
vault_name = deployment_config[stage]["keyvault"]
vault_url = f"https://{vault_name}.vault.azure.net/"
credential = DefaultAzureCredential()
secret_client = SecretClient(vault_url=vault_url, credential=credential)
return secret_client
def configure_computes(ws: Workspace, clusters: List[Tuple[str, str, int]]):
'''
clusters is a list consisting of the tuples (cluster_name, vm_size, max_nodes)
e.g. cluster_names = [(cpu-cluster, STANDARD_D2_V2, 2), (gpu-cluster, Standard_NC6, 4)]
'''
made_clusters = []
print("making the clusters:", clusters)
for cluster_name, vm_size, max_nodes in clusters:
# Verify that cluster does not exist already
try:
cluster = ComputeTarget(workspace=ws, name=cluster_name)
vm_size_existing = cluster.serialize()['properties']['properties']['vmSize']
if vm_size_existing.lower() != vm_size.lower():
print(
f'WARNING: cluster {cluster_name} exists but with vm_size {vm_size_existing} instead of requested {vm_size} \nWe will still use the existing cluster'
)
else:
print(f'Found existing cluster {cluster_name}, use it.')
except ComputeTargetException:
# To use a different region for the compute, add a location='<region>' parameter
compute_config = AmlCompute.provisioning_configuration(
vm_size=vm_size,
max_nodes=max_nodes,
idle_seconds_before_scaledown=300,
)
cluster = ComputeTarget.create(ws, cluster_name, compute_config)
print(f"Creating new cluster {cluster_name} of type {vm_size} with {max_nodes} nodes")
cluster.wait_for_completion(show_output=False)
made_clusters.append(cluster)
return made_clusters
def download_model(workspace: Workspace, model: Dict[str, Union[str, int]], model_type):
"""
Function downloads the model and copies it to the models/model_type folder
:param model: Dictionary that contains the name and version of the model that needs to be downloaded
"""
print(f'download bi_encoder{model["name"]}:{model["version"]}')
model_path = Model.get_model_path(model_name=model['name'], version=model['version'], _workspace=workspace)
shutil.copytree(src=model_path, dst=f"models/{model_type}")
return model_path
def combine_models(
config_file: str = "configs/deployment_config.yaml",
language_config: str = "configs/model_languages.yaml",
bi_encoder: Tuple[str, int] = None,
):
"""
Combines 2 models that are on the model registry into 1 model and registers it again, so it can be used for inference
:config_file: Location to a config yaml file that contains info about the deployment
:language_config: Location to a config yaml file that contains info about what model to use for which language
:param bi_encoder: (model_name, model_version) as stated in the model registry for the first model if empty the standard untrained model will be used
"""
ws = get_ws("dev")
with open(config_file, 'r') as file:
config = yaml.safe_load(stream=file)
with open(language_config, 'r') as file:
language_models = yaml.safe_load(stream=file)
language = config['corpus_language']
bi = language_models[language.lower()]['bi_encoder']
cross = language_models[language.lower()]['cross_encoder']
if bi_encoder:
registry_model = {"name": bi_encoder[0], "version": bi_encoder[1]}
_ = download_model(ws, registry_model, model_type="bi_encoder")
else:
bi_model = SentenceTransformer(bi)
bi_model.save("models/bi_encoder")
model = CrossEncoder(cross)
model.save("models/cross_encoder")
Model.register(
ws,
model_path="models",
model_name="bi_cross_encoders",
description="Combination of a bi- and cross-encoder that is needed to do inference"
)
shutil.rmtree('models')
def upload_folder_to_datastore(path_on_datastore, local_data_folder, stage='dev'):
"""Function that will upload a local folder to the default datastore of the dev environment
Args:
path_on_datastore (string): Path on datastore where the folder is uploaded to
local_data_folder (string): Path to the local folder that needs to be uploaded
stage (string, optional): Name of the environment stage that the data needs to be uploaded to
"""
workspace = get_ws(stage)
# Gets the default datastore, this is where we are going to save our new data
datastore = workspace.get_default_datastore()
# Under which path do we want to save our new data
datastore_path = path_on_datastore #/{}".format(str_date_time)
# Select the directory where we put our processed data and upload it to our datastore
preprocessed = Dataset.File.upload_directory(local_data_folder, (datastore, datastore_path), overwrite=True)
|
ReBatch-ML/AnswerSearch
|
packages/azureml_functions.py
|
azureml_functions.py
|
py
| 9,949 |
python
|
en
|
code
| 0 |
github-code
|
6
|
25006908635
|
from git import Repo
from logging import info
from pathlib import Path
from platform import system
from shutil import copyfile, rmtree
from stat import S_IWRITE
from subprocess import check_output, STDOUT, CalledProcessError
from tempfile import TemporaryDirectory
from twrpdtgen import current_path
from twrpdtgen.utils.find_package import find_package
from typing import Union
def handle_remove_readonly(func, path, _):
Path(path).chmod(S_IWRITE)
func(path)
class AIKManager:
"""
This class is responsible for dealing with AIK tasks
such as cloning, updating, and extracting recovery images.
"""
def __init__(self, is_debug):
"""
AIKManager constructor method
First, check if AIK path exists, if so, update AIK, else clone AIK.
:param aik_path: Path object of AIK directory
"""
self.is_debug = is_debug
if not self.is_debug:
self.tempdir = TemporaryDirectory()
self.path = Path(self.tempdir.name)
else:
self.path = current_path / "extract"
if self.path.is_dir():
rmtree(self.path, ignore_errors=False, onerror=handle_remove_readonly)
self.images_path = self.path / "split_img"
self.ramdisk_path = self.path / "ramdisk"
# Check whether cpio package is installed
if platform.system() == "Linux" and not find_package("cpio"):
raise RuntimeError("cpio package is not installed. Install it by sudo apt install cpio or sudo pacman -S cpio (Based on what package manager you're using)")
info("Cloning AIK...")
if system() == "Linux":
Repo.clone_from("https://github.com/SebaUbuntu/AIK-Linux-mirror", self.path)
elif system() == "Windows":
Repo.clone_from("https://github.com/SebaUbuntu/AIK-Windows-mirror", self.path)
def extract(self, recovery_image: Union[Path, str]) -> None:
"""
Extract an image using AIK.
:param recovery_image: recovery image string or path object
"""
new_recovery_image = self.path / "recovery.img"
copyfile(recovery_image, new_recovery_image)
if system() == "Linux":
command = [self.path / "unpackimg.sh", "--nosudo", new_recovery_image]
elif system() == "Windows":
command = [self.path / "unpackimg.bat", new_recovery_image]
else:
raise NotImplementedError(f"{system()} is not supported!")
try:
process = check_output(command, stderr=STDOUT, universal_newlines=True)
except CalledProcessError as e:
returncode = e.returncode
output = e.output
else:
returncode = 0
output = process
if returncode != 0:
if self.is_debug:
print(output)
raise RuntimeError(f"AIK extraction failed, return code {returncode}")
self.get_image_infos()
def get_image_infos(self):
self.aik_images_path_base = str(self.images_path / "recovery.img-")
kernel = self.get_extracted_info("zImage")
self.kernel = kernel if kernel.is_file() else None
dt_image = self.get_extracted_info("dt")
self.dt_image = dt_image if dt_image.is_file() else None
dtb_image = self.get_extracted_info("dtb")
self.dtb_image = dtb_image if dtb_image.is_file() else None
self.dtbo_image = None
for name in ["dtbo", "recovery_dtbo"]:
dtbo_image = self.get_extracted_info(name)
if dtbo_image.is_file():
self.dtbo_image = dtbo_image
self.base_address = self.read_recovery_file(self.get_extracted_info("base"))
self.board_name = self.read_recovery_file(self.get_extracted_info("board"))
self.cmdline = self.read_recovery_file(self.get_extracted_info("cmdline"))
header_version = self.get_extracted_info("header_version")
self.header_version = self.read_recovery_file(header_version) if header_version.exists() else "0"
self.recovery_size = self.read_recovery_file(self.get_extracted_info("origsize"))
self.pagesize = self.read_recovery_file(self.get_extracted_info("pagesize"))
self.ramdisk_compression = self.read_recovery_file(self.get_extracted_info("ramdiskcomp"))
self.ramdisk_offset = self.read_recovery_file(self.get_extracted_info("ramdisk_offset"))
self.tags_offset = self.read_recovery_file(self.get_extracted_info("tags_offset"))
# Get a usable build.prop to parse
self.buildprop = None
buildprop_locations = [self.ramdisk_path / "default.prop",
self.ramdisk_path / "vendor" / "build.prop",
self.ramdisk_path / "system" / "build.prop",
self.ramdisk_path / "system" / "etc" / "build.prop"]
for folder in buildprop_locations:
if folder.is_file():
self.buildprop = folder
break
@staticmethod
def read_recovery_file(file: Path) -> str:
"""
Read file contents
:param file: file as a Path object
:return: string of the first line of the file contents
"""
return file.read_text().splitlines()[0]
def get_extracted_info(self, file: str) -> Path:
return self.images_path / ("recovery.img-" + file)
def cleanup(self):
if not self.is_debug:
self.tempdir.cleanup()
|
DENE-dev/dene-dev
|
RQ1-data/exp2/969-lobo1978-coder@device-tree-generator-aab7df0a3c0246a5dbe524f1196bedc1b4c05e05/twrpdtgen/utils/aik_manager.py
|
aik_manager.py
|
py
| 4,787 |
python
|
en
|
code
| 0 |
github-code
|
6
|
8105420751
|
# coding=utf-8
import re
import string
def preprocessing_text(text):
text = re.sub('\r', '', text)
text = re.sub('\n', '', text) # 改行の削除
text = re.sub(' ', '', text) # 半角スペースの削除
text = re.sub(' ', '', text) # 全角スペースの削除
text = re.sub(r'[0-9 0−9]', '0', text) # 数字を全て0に
return text
def preprocessing_text(text):
text = re.sub('<br />', '', text)
for symbol in string.punctuation:
if symbol != '.' and symbol != ',': # .と,以外の記号を削除する。
text = text.replace(symbol, '')
text = text.replace('.', ' . ') # .は前後に半角スペースを入れることで、一つの単語的に扱う。
text = text.replace(',', ' , ') # ,も同上 (これをしないと、.が付いた単語が別の単語と扱われてしまうから)
return text
def tokunizer_punctuation(text):
return text.strip().split() # 記号を半角スペースに置き換えているので、前後の半角スペースをstripで削除して半角スペースで単語リスト作成
def tokenizer_with_preprocessing(text):
text = preprocessing_text(text)
results = tokunizer_punctuation(text)
return results
def main():
print(tokenizer_with_preprocessing('I like cats.'))
if __name__ == '__main__':
main()
|
ys201810/pytorch_work
|
nlp/sentiment_analysis/script/preprocesser.py
|
preprocesser.py
|
py
| 1,373 |
python
|
ja
|
code
| 0 |
github-code
|
6
|
26889061222
|
"""
Author: Roman Solovyev, IPPM RAS
URL: https://github.com/ZFTurbo
Code based on: https://github.com/fizyr/keras-retinanet/blob/master/keras_retinanet/utils/eval.py
"""
import os
import numpy as np
import pandas as pd
# try:
# import pyximport
# pyximport.install(setup_args={"include_dirs": np.get_include()}, reload_support=False)
# from .compute_overlap import compute_overlap
# except:
# print("Couldn't import fast version of function compute_overlap, will use slow one. Check cython intallation")
from .compute_overlap_slow import compute_overlap
def get_real_annotations(table):
res = dict()
ids = list(map(str, table['ImageID'].values))
labels = list(map(str, table['LabelName'].values))
xmin = table['XMin'].values.astype(np.float32)
xmax = table['XMax'].values.astype(np.float32)
ymin = table['YMin'].values.astype(np.float32)
ymax = table['YMax'].values.astype(np.float32)
for i in range(len(ids)):
id = ids[i]
label = labels[i]
if id not in res:
res[id] = dict()
if label not in res[id]:
res[id][label] = []
box = [xmin[i], ymin[i], xmax[i], ymax[i]]
res[id][label].append(box)
return res
def get_detections(table):
res = dict()
ids = list(map(str, table['ImageID'].values))
labels = list(map(str, table['LabelName'].values))
scores = table['Conf'].values.astype(np.float32)
xmin = table['XMin'].values.astype(np.float32)
xmax = table['XMax'].values.astype(np.float32)
ymin = table['YMin'].values.astype(np.float32)
ymax = table['YMax'].values.astype(np.float32)
for i in range(len(ids)):
id = ids[i]
label = labels[i]
if id not in res:
res[id] = dict()
if label not in res[id]:
res[id][label] = []
box = [xmin[i], ymin[i], xmax[i], ymax[i], scores[i]]
res[id][label].append(box)
return res
def _compute_ap(recall, precision):
""" Compute the average precision, given the recall and precision curves.
Code originally from https://github.com/rbgirshick/py-faster-rcnn.
# Arguments
recall: The recall curve (list).
precision: The precision curve (list).
# Returns
The average precision as computed in py-faster-rcnn.
"""
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], recall, [1.]))
mpre = np.concatenate(([0.], precision, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def _print(msg, txt_content, verbose):
if verbose:
print(msg)
txt_content += f'{msg}\n'
return txt_content
def calculate_f1_score(num_annotations, true_positives, false_positives, scores, tp_ious, tp_confidences, confidence_threshold):
false_positives_copy = np.array(false_positives)
true_positives_copy = np.array(true_positives)
scores_copy = np.array(scores)
tp_ious_copy = np.array(tp_ious)
tp_confidences_copy = np.array(tp_confidences)
# mask
tp_mask = np.where(scores_copy > confidence_threshold, 1, 0)
true_positives_over_threshold = true_positives_copy * tp_mask
false_positives_over_threshold = false_positives_copy * tp_mask
tp_ious_copy *= tp_mask
tp_iou_sum = np.sum(tp_ious_copy)
tp_confidences_copy *= tp_mask
tp_confidence_sum = np.sum(tp_confidences_copy)
# sort by score
indices = np.argsort(-scores_copy)
false_positives_copy = false_positives_copy[indices]
true_positives_copy = true_positives_copy[indices]
obj_count = int(num_annotations)
tp = int(np.sum(true_positives_over_threshold))
fp = int(np.sum(false_positives_over_threshold))
fn = obj_count - tp
eps = 1e-7
p = tp / (tp + fp + eps)
r = tp / (obj_count + eps)
f1 = (2.0 * p * r) / (p + r + eps)
tp_iou = tp_iou_sum / (tp + eps)
tp_confidence = tp_confidence_sum / (tp + eps)
ret = {}
ret['confidence_threshold'] = confidence_threshold
ret['true_positives'] = true_positives_copy
ret['false_positives'] = false_positives_copy
ret['obj_count'] = obj_count
ret['tp_iou'] = tp_iou
ret['tp_iou_sum'] = tp_iou_sum
ret['tp_confidence'] = tp_confidence
ret['tp_confidence_sum'] = tp_confidence_sum
ret['tp'] = tp
ret['fp'] = fp
ret['fn'] = fn
ret['f1'] = f1
ret['p'] = p
ret['r'] = r
return ret
def mean_average_precision_for_boxes(ann, pred, iou_threshold=0.5, confidence_threshold_for_f1=0.25, exclude_not_in_annotations=False, verbose=True, find_best_threshold=False, classes_txt_path=''):
"""
:param ann: path to CSV-file with annotations or numpy array of shape (N, 6)
:param pred: path to CSV-file with predictions (detections) or numpy array of shape (N, 7)
:param iou_threshold: IoU between boxes which count as 'match'. Default: 0.5
:param exclude_not_in_annotations: exclude image IDs which are not exist in annotations. Default: False
:param verbose: print detailed run info. Default: True
:param classes_txt_path: class names file for show result. Default: ''
:return: tuple, where first value is mAP and second values is dict with AP for each class.
"""
class_names = []
max_class_name_len = 1
if classes_txt_path != '':
if os.path.exists(classes_txt_path) and os.path.isfile(classes_txt_path):
with open(classes_txt_path, 'rt') as f:
lines = f.readlines()
for line in lines:
class_name = line.replace('\n', '')
if len(class_name) > max_class_name_len:
max_class_name_len = len(class_name)
class_names.append(class_name)
else:
max_class_name_len = 9
if isinstance(ann, str):
valid = pd.read_csv(ann)
else:
valid = pd.DataFrame(ann, columns=['ImageID', 'LabelName', 'XMin', 'XMax', 'YMin', 'YMax'])
if isinstance(pred, str):
preds = pd.read_csv(pred)
else:
preds = pd.DataFrame(pred, columns=['ImageID', 'LabelName', 'Conf', 'XMin', 'XMax', 'YMin', 'YMax'])
ann_unique = valid['ImageID'].unique()
preds_unique = preds['ImageID'].unique()
if verbose:
print()
txt_content = ''
unique_classes = list(map(str, valid['LabelName'].unique()))
txt_content = _print(f'Unique classes: {len(unique_classes)}', txt_content, verbose)
txt_content = _print(f'Number of files in annotations: {len(ann_unique)}', txt_content, verbose)
txt_content = _print(f'Number of files in predictions: {len(preds_unique)}', txt_content, verbose)
# Exclude files not in annotations!
if exclude_not_in_annotations:
preds = preds[preds['ImageID'].isin(ann_unique)]
preds_unique = preds['ImageID'].unique()
txt_content = _print(f'Number of files in detection after reduction: {len(preds_unique)}', txt_content, verbose)
all_detections = get_detections(preds)
all_annotations = get_real_annotations(valid)
txt_content = _print(f'\nNMS iou threshold : {iou_threshold}', txt_content, verbose)
if find_best_threshold:
txt_content = _print(f'confidence threshold for tp, fp, fn calculate : best confidence policy per class', txt_content, verbose)
else:
txt_content = _print(f'confidence threshold for tp, fp, fn calculate : {confidence_threshold_for_f1}', txt_content, verbose)
total_tp_iou_sum = 0.0
total_tp_confidence_sum = 0.0
total_tp = 0
total_fp = 0
total_fn = 0
total_obj_count = 0
average_precisions = {}
for _, class_index_str in enumerate(sorted(unique_classes, key=lambda x: int(x))):
# Negative class
if str(class_index_str) == 'nan':
continue
tp_ious = []
tp_confidences = []
false_positives = []
true_positives = []
scores = []
num_annotations = 0.0
for i in range(len(ann_unique)):
detections = []
annotations = []
id = ann_unique[i]
if id in all_detections:
if class_index_str in all_detections[id]:
detections = all_detections[id][class_index_str]
if id in all_annotations:
if class_index_str in all_annotations[id]:
annotations = all_annotations[id][class_index_str]
if len(detections) == 0 and len(annotations) == 0:
continue
num_annotations += len(annotations)
detected_annotations = []
annotations = np.array(annotations, dtype=np.float64)
for d in detections:
scores.append(d[4])
if len(annotations) == 0:
false_positives.append(1)
true_positives.append(0)
tp_ious.append(0.0)
tp_confidences.append(0.0)
continue
overlaps = compute_overlap(np.expand_dims(np.array(d, dtype=np.float64), axis=0), annotations)
assigned_annotation = np.argmax(overlaps, axis=1)
max_overlap = overlaps[0, assigned_annotation]
if max_overlap >= iou_threshold and assigned_annotation not in detected_annotations:
false_positives.append(0)
true_positives.append(1)
detected_annotations.append(assigned_annotation)
tp_ious.append(max_overlap[0])
tp_confidences.append(d[4])
# print(f'conf : {d[4]:.4f}, iou : {max_overlap[0]:.4f}')
else:
false_positives.append(1)
true_positives.append(0)
tp_ious.append(0.0)
tp_confidences.append(0.0)
if num_annotations == 0:
average_precisions[class_index_str] = 0, 0
continue
ap_ret = calculate_f1_score(num_annotations, true_positives, false_positives, scores, tp_ious, tp_confidences, confidence_threshold_for_f1)
best_ret = ap_ret
if find_best_threshold:
best_f1 = 0.0
patience_count = 0
for i in range(99):
class_confidence_threshold = i / 100.0
cur_ret = calculate_f1_score(num_annotations, true_positives, false_positives, scores, tp_ious, tp_confidences, class_confidence_threshold)
cur_f1 = cur_ret['f1']
if cur_f1 == 0.0:
best_ret = cur_ret
break
if cur_f1 >= best_f1:
best_f1 = cur_f1
best_ret = cur_ret
else:
patience_count += 1
if patience_count == 5:
break
true_positives = ap_ret['true_positives'] # use ap_ret
false_positives = ap_ret['false_positives'] # use ap_ret
confidence_threshold = best_ret['confidence_threshold']
obj_count = best_ret['obj_count']
tp_iou = best_ret['tp_iou']
tp_iou_sum = best_ret['tp_iou_sum']
tp_confidence = best_ret['tp_confidence']
tp_confidence_sum = best_ret['tp_confidence_sum']
tp = best_ret['tp']
fp = best_ret['fp']
fn = best_ret['fn']
f1 = best_ret['f1']
p = best_ret['p']
r = best_ret['r']
total_obj_count += obj_count
total_tp_iou_sum += tp_iou_sum
total_tp_confidence_sum += tp_confidence_sum
total_tp += tp
total_fp += fp
total_fn += fn
# compute false positives and true positives
false_positives = np.cumsum(false_positives)
true_positives = np.cumsum(true_positives)
# compute recall and precision
recall = true_positives / num_annotations
precision = true_positives / np.maximum(true_positives + false_positives, np.finfo(np.float64).eps)
# compute average precision
average_precision = _compute_ap(recall, precision)
average_precisions[class_index_str] = average_precision, num_annotations
class_index = int(class_index_str)
class_name = f'class {class_index_str}'
if len(class_names) >= class_index + 1:
class_name = class_names[class_index]
txt_content = _print(f'{class_name:{max_class_name_len}s} AP: {average_precision:.4f}, Labels: {obj_count:6d}, TP: {tp:6d}, FP: {fp:6d}, FN: {fn:6d}, P: {p:.4f}, R: {r:.4f}, F1: {f1:.4f}, IoU: {tp_iou:.4f}, Confidence: {tp_confidence:.4f}, Threshold: {confidence_threshold:.2f}', txt_content, verbose)
present_classes = 0
precision = 0
for _, (average_precision, num_annotations) in average_precisions.items():
if num_annotations > 0:
present_classes += 1
precision += average_precision
eps = 1e-7
mean_ap = precision / (present_classes + eps)
p = total_tp / (total_tp + total_fp + eps)
r = total_tp / (total_obj_count + eps)
f1 = (2.0 * p * r) / (p + r + eps)
tp_iou = total_tp_iou_sum / (total_tp + eps)
tp_confidence = total_tp_confidence_sum / (total_tp + eps)
class_name = 'total'
txt_content = _print(f'\n{class_name:{max_class_name_len}s} AP: {mean_ap:.4f}, Labels: {total_obj_count:6d}, TP: {total_tp:6d}, FP: {total_fp:6d}, FN: {total_fn:6d}, P: {p:.4f}, R: {r:.4f}, F1: {f1:.4f}, IoU: {tp_iou:.4f}, Confidence: {tp_confidence:.4f}', txt_content, verbose)
return mean_ap, f1, tp_iou, total_tp, total_fp, total_obj_count - total_tp, tp_confidence, txt_content
|
inzapp/sbd
|
map_boxes/__init__.py
|
__init__.py
|
py
| 13,934 |
python
|
en
|
code
| 0 |
github-code
|
6
|
25958786704
|
from django.db import models
from django.forms import ModelForm
from django.utils import timezone
class Author(models.Model):
NATION_CHOICES = (
(None, 'Nationality'),
('CH', 'China'),
('US', 'America'),
('UK', 'England'),
('GE', 'German'),
('CA', 'Canada'),
)
name = models.CharField(max_length=80, unique=False, verbose_name='Author name')
nation = models.CharField(max_length=80, unique=False, verbose_name='Nationality', choices=NATION_CHOICES)
def save(self, *args, **kwargs):
try:
old_author = Author.objects.get(name=self.name)
except Author.DoesNotExist:
super().save(*args, **kwargs)
return self
else:
return old_author
def __str__(self):
return self.name
# Create your models here.
class Article(models.Model):
title = models.CharField(max_length=80, unique=True, verbose_name='Article name')
pub_date = models.DateTimeField('date published')
author = models.ForeignKey(Author, null=True, on_delete=models.CASCADE)
context = models.TextField()
def __str__(self):
return self.title
class Comments(models.Model):
article = models.ForeignKey(Article, on_delete=models.CASCADE)
name = models.CharField(max_length=80, null=True)
body = models.TextField()
created_on = models.DateTimeField(auto_now_add=True, null=True, blank=True)
active = models.BooleanField(default=False)
class Meta:
ordering = ['created_on']
def __str__(self):
return 'Comment {} by {}'.format(self.body, self.name)
class CommentsForm(ModelForm):
class Meta:
model = Comments
fields = ['name', 'body', 'active']
class AuthorForm(ModelForm):
class Meta:
model = Author
fields = '__all__'
class ArticleForm(ModelForm):
class Meta:
model = Article
fields = ['title', 'pub_date', 'context']
|
binkesi/blogsgn
|
models.py
|
models.py
|
py
| 2,015 |
python
|
en
|
code
| 0 |
github-code
|
6
|
12689186127
|
import matplotlib.pyplot as plt
import requests
import numpy as np
# Enter Spotify web API access token credentials below
# If you don't have them you can get them here:
# https://developer.spotify.com/dashboard/applications
client_id = "YOUR_CLIENT_ID_HERE"
client_secret = "YOUR_SECRET_ID_HERE"
# The below code generates a temporary access token using the credentials
# entered above through the Spotify web API
raw_response = requests.post('https://accounts.spotify.com/api/token', {
'grant_type': 'client_credentials',
'client_id': client_id,
'client_secret': client_secret,
})
print(raw_response)
# Converts the response to json
json_response = raw_response.json()
# Checks response code and runs search if connection made,
# otherwise tries to provide useful advice.
if raw_response.status_code == 200:
print("Connection established and authorised.")
# Asks user for an artist. The artists information is then retrieved from
# the Spotify web API.
artist_name = input("Please enter an artist: ")
# Checks if nothing has been entered by user and provides default answer
if artist_name == "":
print("No artist entered, so you will be provided Justin Bieber instead.")
artist_name = "Justin Bieber"
artist_info = requests.get('https://api.spotify.com/v1/search',
headers={'authorization': "Bearer " + json_response['access_token']},
params={'q': artist_name, 'type': 'artist'})
# Converts the artist_info to json
artist_info = artist_info.json()
# Prints artists name rating and a link to them on Spotify
print("You have selected: {} \nThis artist has a popularity of {}%".format(artist_info["artists"]["items"][0]["name"], artist_info["artists"]["items"][0]["popularity"]) )
print(artist_info["artists"]["items"][0]["external_urls"]["spotify"])
# To see all json data uncomment the below...
# print(artist_info)
# Below draws a table showing the artist and popularity
fig, ax = plt.subplots()
# Gets data from converted json file about the artist and uses some sample data
# to make the results more interesting.
names = (artist_info["artists"]["items"][0]["name"], "The Beatles", "Metallica", "Dido")
y_pos = np.arange(len(names))
popularities = (artist_info["artists"]["items"][0]["popularity"], 88, 84, 75)
# Table titles and specifics listed below
ax.barh(y_pos, popularities, align='center')
ax.set_yticks(y_pos, labels=names)
ax.set_xlabel('Popularity %')
ax.set_xlim([0, 100])
ax.set_title('Artists Popularity')
# Displays table once the below is ran
plt.tight_layout()
plt.show()
elif raw_response.status_code == 400:
print("Unable to connect. This is most likely due to "
"invalid 'client_id' or 'client_secret'.")
print("For more information check the website: "
"'https://developer.spotify.com/documentation/general/guides/authorization/'")
# Any other response code grouped here, can add more to this later.
else:
print("Unable to connect. Error unknown.")
|
Oliver343/ArtistSearchAPI
|
ArtistSearch.py
|
ArtistSearch.py
|
py
| 3,149 |
python
|
en
|
code
| 0 |
github-code
|
6
|
8480767140
|
import random
import os
from shutil import copyfile
from shutil import move
import sys
random.seed(667)
def switchFiles(k, inc, out, a):
"""
k : k random samples
inc : incoming image path, abs value (relative to current directory)
out : outgoing image path, same as inc
csv : name of csv file that has the image names
a : action to take, copy or move (insert string)
"""
files = os.listdir(inc)
if len(files) < k:
print("Error: only", len(files), "images available in specified input directory",
"while", k, "were requested.")
print("Now exiting.")
sys.exit()
rand = random.sample(range(0, len(files)), k)
n_not_found = 0
for i in range(0, k):
im = files[rand[i]]
if im.endswith(".jpg") or im.endswith(".png"):
try:
if a == "copy":
copyfile(inc + "/" + im, out + "/" + im)
elif a == "move":
move(inc + "/" + im, out + "/" + im)
except FileNotFoundError:
n_not_found += 1
print("files not found (should be 0):", n_not_found)
if (len(sys.argv) != 5):
print("Usage: \"python3 getRandSample.py <n_random_samples> <path_in> <path_out> <action>\"")
else:
k = int(sys.argv[1])
inc = sys.argv[2]
out = sys.argv[3]
a = sys.argv[4]
switchFiles(k, inc, out, a)
|
lweitkamp/ImageClassificationProject
|
getRandSample.py
|
getRandSample.py
|
py
| 1,409 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71567893307
|
import re
text = input()
expression = r"(^|(?<=\s))-?([0]|[1-9][0-9]*)(.[0-9]+)?($|(?=\s))"
# ^ - старт
# | - или
# \s - празно място
# ? - провери
# ?<=\s - разгледай преди това дали има нещо, тоест преди първият знак, има ли такъв символ там.
# -? - 0 или един път, значи или има минус или, няма минус, проверка дали е отрицателно число или не е отрицателно число.
# [0]|[1-9][0-9]* - ако имам повече от една цифра,първата цифра трябва да е 1 до 9 и всички следващи,може да са от 0 до 9,
# но тогава пък аз изгубвам 1ците и трябва да сложа звезда, защото по този начин казвам или само една имам тук или ако имам
# повече от една цифра първата трябва да е не е 0 от 1-9 и следващите вече са от 0-9 * е нула или повече пъти.
# Или една цифра от 1 - 9 или повече от една цифра,като следващите може да са от 0 - 9
# \. - точката е специален символ и задълцително трябва тук да се сложи \ ред нея
# $ - край
matches = re.finditer(expression, text)
output = list()
for match in matches:
output.append(match.group())
print(" ".join(output))
# :r[\W] - Хваща всички не-букви
# :r"[^A-Za-z] - Ако искаме да са само букви
# :r (^|(?<=[^A - Za-z])) - хваща момента където започва нова дума
# # I have new BMW
|
lorindi/SoftUni-Software-Engineering
|
Programming-Fundamentals-with-Python/9.Regular Expressions/04_match_numbers.py
|
04_match_numbers.py
|
py
| 1,835 |
python
|
bg
|
code
| 3 |
github-code
|
6
|
3987332070
|
import random
import numpy as np
from decimal import Decimal
def update_belief(belief, expectation, item_to_update, item_to_compare, teacher, item_preferred, reward_vals):
'''
Update belief distribution over reward of specified item based on specified query and label
Arguments:
belief: ((N,R_max+1) ndarray) prior over reward values for each item
expectation: ((N) array) expectation over belief for each item
item_to_update: (Item) item to update belief distribution for
item_to_compare: (Item) item that was used as comparison in teacher query
teacher: (Teacher) teacher that was queried
item_preferred: (Item) item that the teacher preferred out of (item_to_update, item_to_compare)
reward_vals: (int array) list of possible reward values
Return:
belief: ((N,R_max+1) ndarray) posterior over reward values for each item
'''
item_index_update = int(item_to_update)
item_index_compare = int(item_to_compare)
alternative_selected = 0 if item_to_update == item_preferred else 1
likelihood = [calc_likelihood(teacher.beta, r, expectation[item_index_compare], alternative_selected) for r in reward_vals]
unnormalized_posterior = np.multiply(belief[item_index_update], likelihood)
normalised_posterior = unnormalized_posterior / np.sum(unnormalized_posterior)
belief[item_index_update] = normalised_posterior
return belief
def calc_likelihood(beta, r0, r1, alternative_selected):
val_0 = Decimal(beta * r0).exp()
val_1 = Decimal(beta * r1).exp()
if alternative_selected == 0:
likelihood = float(val_0 / (val_0 + val_1))
elif alternative_selected == 1:
likelihood = float(val_1 / (val_0 + val_1))
else:
print("ERROR: invalid alternative:", alternative_selected)
return likelihood
def update_expectation(expectation, belief, reward_vals, indices_to_update):
if type(indices_to_update) != list:
# make it a list
indices_to_update = [indices_to_update]
for i in indices_to_update:
item_belief = belief[i]
expectation[i] = np.dot(item_belief, reward_vals)
return expectation
class Teacher(object):
def __init__(self, beta, seed=None):
self.beta = beta
random.seed(seed)
def get_beta(self):
return self.beta
def get_dist(self, r0, r1):
''' Return Boltzmann-rational distribution over alternatives i0 (with reward r0) and i1 (reward r1) '''
val_0 = Decimal(self.beta * r0).exp()
val_1 = Decimal(self.beta * r1).exp()
prob_0 = val_0/(val_0+val_1)
prob_1 = val_1/(val_0+val_1)
return [prob_0, prob_1]
def sample_dist(self, r0, r1):
dist = self.get_dist(r0, r1)
r = random.random()
if r < dist[0]:
return 0
return 1
|
RachelFreedman/B_select
|
selection.py
|
selection.py
|
py
| 2,902 |
python
|
en
|
code
| 0 |
github-code
|
6
|
42749263557
|
#!/usr/bin/env python
# coding=utf-8
# wujian@2018
import os
import argparse
import numpy as np
from libs.utils import istft, get_logger
from libs.opts import StftParser
from libs.data_handler import SpectrogramReader, WaveWriter
from libs.beamformer import DSBeamformer
logger = get_logger(__name__)
def run(args):
stft_kwargs = {
"frame_len": args.frame_len,
"frame_hop": args.frame_hop,
"window": args.window,
"center": args.center,
"transpose": False
}
topo = list(map(float, args.linear_topo.split(",")))
doa = args.doa if args.doa > 0 else 180 + args.doa
if doa < 0 or doa > 180:
raise RuntimeError(f"Illegal value for DoA: {args.doa:.2f}")
spectrogram_reader = SpectrogramReader(
args.wav_scp,
round_power_of_two=args.round_power_of_two,
**stft_kwargs)
beamformer = DSBeamformer(topo)
logger.info(f"Initialize {len(topo):d} channel DSBeamformer")
with WaveWriter(args.dst_dir, fs=args.fs) as writer:
for key, stft_src in spectrogram_reader:
stft_enh = beamformer.run(doa,
stft_src,
c=args.speed,
sample_rate=args.fs)
power = spectrogram_reader.power(key)
samps = istft(stft_enh, **stft_kwargs, power=power)
writer.write(key, samps)
logger.info(f"Processed {len(spectrogram_reader):d} utterances")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Command to apply delay and sum beamformer.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
parents=[StftParser.parser])
parser.add_argument("wav_scp",
type=str,
help="Rspecifier for multi-channel wave file")
parser.add_argument("dst_dir",
type=str,
help="Directory to dump enhanced results")
parser.add_argument("--fs",
type=int,
default=16000,
help="Sample frequency of input wave")
parser.add_argument("--speed",
type=float,
default=240,
help="Speed of sound")
parser.add_argument("--linear-topo",
type=str,
required=True,
help="Topology of linear microphone arrays")
parser.add_argument("--doa",
type=float,
default=90,
help="Given DoA for DS beamformer, in degrees")
args = parser.parse_args()
run(args)
|
Fuann/TENET
|
sptk/apply_ds_beamformer.py
|
apply_ds_beamformer.py
|
py
| 2,731 |
python
|
en
|
code
| 7 |
github-code
|
6
|
33504817634
|
from .models.user_tokens import UserTokens
from .models.sources_enabled import SourcesEnabled
from .searchers.constants import DEFAULT_PAGE_SIZE
from .models.results import Results, SourceResult
from .searchers import available_searchers
from .decorators import immutable
import logging
from collections import defaultdict
import grequests
logger = logging.getLogger(__name__)
class Link(object):
""" this is the core class and should be used outside
the package for search """
def __init__(self, user_tokens: UserTokens, sources_enabled: SourcesEnabled = None):
""" sources enabled being set to None implies all integrations for which token is set will be searched"""
self.__sources_enabled = sources_enabled
self.__user_tokens = user_tokens
if self.__sources_enabled is None:
self.__sources_enabled = SourcesEnabled(
list(self.__user_tokens.tokens.keys()))
super().__init__()
self.__page = 1
self.__pages = []
self.__results = Results()
self.__source_results = {}
self.__fetchers_modules = {}
self.__fetchers = defaultdict(list)
self.__reset()
@staticmethod
def builder(user_tokens: UserTokens, sources_enabled: SourcesEnabled = None):
return Link(user_tokens, sources_enabled)
def fetch(self):
self.__validate()
if len(self.__pages) >= self.__page:
logger.info(
"We don't have to load another page as its already been loaded")
self.__page += 1
return self.__pages[self.__page-2]
if self.__results.unfetched_results() >= self.__page_size:
self.__page += 1
output = self.__results.topk(self.__page_size)
self.__pages.append(output)
return output
if not self.__fetchers:
self.initialize_fetchers()
requests = []
for source in self.__sources_enabled.tokens:
for fetcher in self.__fetchers[source]:
request = fetcher.construct_request(
self.__page)
if request is not None:
requests.append(request)
grequests.map(requests)
self.__page += 1
output = self.__results.topk(self.__page_size)
self.__pages.append(output)
return output
def initialize_fetchers(self):
for source in self.__sources_enabled.tokens:
source_result = SourceResult(source)
for module in available_searchers[source]:
logger.debug(
f"Creating fetcher for {source} with name {module.name}")
self.__source_results[source] = source_result
self.__results.add_source_result(source_result)
self.__fetchers[source].append(
module(self.__user_tokens.tokens[source], self.__query, self.__page_size, source_result, self.__user_only))
if not self.__user_only and module.user_priority:
self.__fetchers[source].append(
module(self.__user_tokens.tokens[source], self.__query, self.__page_size, source_result, True))
def previous(self):
if self.__page < 3:
logger.info("Went too far back, this page doesn't exist")
return []
logger.info("Fetching a previous page")
self.__page -= 1
return self.__pages[self.__page-2]
@immutable("page_size", DEFAULT_PAGE_SIZE)
def page_size(self, page_size):
self.__page_size = page_size
return self
@immutable("query")
def query(self, query):
self.__query = query
return self
@immutable("user_only", False)
def user_only(self, user_only=False):
self.__user_only = user_only
return self
def __disable_all_sources(self):
self.__sources_enabled = []
def __validate(self):
assert(self.__query != None), "Query cant be None"
assert(self.__query != ""), "Query cant be empty"
assert(self.__user_tokens != None), "User Tokens cant be none"
assert(len(self.__sources_enabled.tokens) > 0), "No source enabled"
assert(set(self.__sources_enabled.tokens).issubset(
self.__user_tokens.tokens.keys())), "More sources enabled than tokens provided"
def __reset(self):
self.__page_size = DEFAULT_PAGE_SIZE
self.__query = None
self.__user_only = False
|
h4ck3rk3y/link
|
link/core.py
|
core.py
|
py
| 4,506 |
python
|
en
|
code
| 1 |
github-code
|
6
|
11295712377
|
from datetime import datetime
import copy
filename = "input/day20input.txt"
file = open(filename, "r")
file = file.readlines()
data = []
points = []
for index, f in enumerate(file):
if index == 0:
lookup = f.replace('\n', '')
elif f != '\n':
data.append(f.replace('\n', ''))
for yindex, y in enumerate(data):
for xindex, ch in enumerate(y):
if ch == '#':
points.append((xindex, yindex))
def get_extremes(points):
maxx = 0
minx = 9999999999
maxy = 0
miny = 9999999999
for p in points:
if p[0] > maxx:
maxx = p[0]
if p[0] < minx:
minx = p[0]
if p[1] > maxy:
maxy = p[1]
if p[1] < miny:
miny = p[1]
return (minx, maxx, miny, maxy)
order = [(-1, -1), (0, -1), (1, -1), (-1, 0),
(0, 0), (1, 0), (-1, 1), (0, 1), (1, 1)]
gen = 0
while gen < 50:
# print('gen', gen)
newpoints = []
(minx, maxx, miny, maxy) = get_extremes(points)
for y in range(miny-1, maxy+2):
for x in range(minx-1, maxx+2):
newval = ''
for o in order:
get_loc = (x+o[0], y+o[1])
if (get_loc[0] < minx or get_loc[0] > maxx or get_loc[1] > maxy or get_loc[1] < miny) and gen % 2 == 0:
newval += '0'
elif (get_loc[0] < minx or get_loc[0] > maxx or get_loc[1] > maxy or get_loc[1] < miny) and gen % 2 == 1:
if lookup[0] == '.':
newval += '0'
else:
newval += '1'
elif get_loc in points:
newval += '1'
else:
newval += '0'
# print(newval)
newval = int(newval, 2)
# print(newval)
if lookup[newval] == '#':
newpoints.append((x, y))
gen += 1
points = newpoints
if (gen == 2):
print(len(points))
print(len(points))
|
mykreeve/advent-of-code-2021
|
day20.py
|
day20.py
|
py
| 1,997 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40407441083
|
import sqlite3 as sql
def CreateDatabase():
coneccion = sql.connect("./Database/datos.db")
coneccion.commit()
coneccion.close()
CreateInitialTables()
print("Se ha creado la base de datos")
def SendQuery(query):
query = query
coneccion = sql.connect("./Database/datos.db")
cursor = coneccion.cursor()
cursor.execute(query)
data = cursor.fetchall()
coneccion.commit()
coneccion.close()
return data
def CreateInitialTables():
SendQuery("CREATE TABLE Chicas (cid integer primary key, nombre text not null)")
SendQuery("CREATE TABLE Asistencia (cid integer, fecha text not null, asistencia text, hora text, PRIMARY KEY (cid, fecha))")
SendQuery(" CREATE TABLE Pagos (cid integer, fecha text not null, sueldo integer,bonus integer, fichas integer, extras integer,info text,PRIMARY KEY (cid, fecha))")
def test_data():
import random
asistencia = ['si', 'no']
for i in range(10):
SendQuery(f"INSERT INTO Pagos (cid, fecha, sueldo, bonus,fichas, extras, info) VALUES ('{i+1}',date('now'),0, {random.randint(1000,99999)}, {random.randint(1000,99999)}, {random.randint(1000,99999)}, '')")
SendQuery(f"INSERT INTO Chicas (nombre) VALUES ('chica{str(i + 10)}')")
SendQuery(f"INSERT INTO Asistencia (cid, fecha, asistencia, hora) VALUES ('{i+1}',date('now'), '{random.choice(asistencia)}', '16:00')")
if __name__ == "__main__":
CreateDatabase()
test_data()
#SendQuery("SELECT nombre, fecha, sueldo, fichas, extras FROM Pagos, Chicas WHERE Chicas.cid = Pagos.cid")
|
Panconquesocl/LAS
|
Backend/DbUtils.py
|
DbUtils.py
|
py
| 1,566 |
python
|
es
|
code
| 0 |
github-code
|
6
|
41562701933
|
'''Singly linked list basics'''
#### Big O complexity ###
# - Insertion: O(1)
# - Removal - it depends: O(1) or O(N)
# - Searching: O(N)
# - Access: O(N)
### Important! #####
# 1. Singly-list are excellent alternative to arrays when insertion and deletion
# at the beginning are frequently required.
# 2. Singly-list does not require index, it works under nodes
class Node():
def __init__(self, val):
self.val = val
self.next = None
# Example how the node class works
first = Node('Node 1')
first.next = Node('Node 2')
first.next.next = Node('Node 3')
first.next.next.next = Node('Node 4')
class Singly_linked_list():
def __init__(self):
self.head = None
self.tail = None
self.length = 0
self.print_values()
def push(self, val):
new_node = Node(val)
if not self.head: # the list is empty
self.head = new_node
self.tail = self.head
else:
self.tail.next = new_node
self.tail = new_node
self.length += 1
return self
def pop(self):
if not self.head:
return None
current = self.head
new_tail = current
while current.next:
new_tail = current
current = current.next
self.tail = new_tail
self.tail.next = None
self.length -= 1
if self.length == 0:
self.head = None
self.tail = None
return current
def shift(self):
if not self.head:
return None
current_head = self.head
self.head = current_head.next
self.length -= 1
if self.length == 0:
self.tail = None
return current_head
def unshift(self, val):
new_node = Node(val)
if not self.head:
self.head = new_node
self.tail = new_node
else:
new_node.next = self.head
self.head = new_node
self.length += 1
return self
def get(self, idx):
if idx < 0 or idx >= self.length:
return None
current_head = self.head
for _ in range(0, idx):
current_head = current_head.next
return current_head
def set(self, idx, val):
node = self.get(idx)
if not node:
return False
node.val = val # replace the value by reference
return True
def insert(self, idx, val):
if idx < 0 or idx > self.length:
return False
if idx == self.length:
self.push(val)
return True
if (idx == 0):
self.unshift(val)
return True
new_node = Node(val)
prev_node = self.get(idx - 1)
tem_node = prev_node.next
prev_node.next = new_node
new_node.next = tem_node
self.length += 1
return True
def remove(self, idx):
if idx < 0 or idx >= self.length:
return None
if idx -1 == self.length:
return self.pop()
if idx == 0:
return self.shift()
prev_node = self.get(idx - 1)
removed_node = prev_node.next
prev_node.next = removed_node.next
self.length -= 1
return removed_node
def reverse(self):
if not self.head:
return None
node = self.head
self.head = self.tail
self.tail = node
next_node = None
prev_node = None
for _ in range(self.length):
next_node = node.next # node=2, next=3* | node=3, next=4* | node=4, next=None* | node=None, next=None*
node.next = prev_node # node=1, next=None | node=2, next=1 | node=3, next=2 | node=4, next=3
prev_node = node # node=1, next=None | node=2, next=1 | node=3, next=2 | node=4, next=3
node = next_node # node=2 next=3 | node=3, next=4 | node=4, next=None | node=None, next=None
return self
def print_values(self):
if not self.head:
return None
current_head = self.head
for idx, _ in enumerate(range(0, self.length)):
print('Indix: #{idx}, value: '.format(idx=idx), current_head.val)
current_head = current_head.next
list = Singly_linked_list()
list.push('1')
list.push('2')
list.push('3')
list.push('4')
# list.pop()
# list.shift()
# list.unshift('6')
# list.set(1, 'item replaced')
# list.insert(1, 'insert #2 position')
# list.remove(1)
# list.reverse()
# print(list.get(2).val)
list.print_values()
|
Wainercrb/data-structures
|
singly-linked-list/main.py
|
main.py
|
py
| 4,676 |
python
|
en
|
code
| 0 |
github-code
|
6
|
37710422808
|
from azure.cognitiveservices.vision.customvision.training import training_api
from azure.cognitiveservices.vision.customvision.training.models import ImageUrlCreateEntry
from azure.cognitiveservices.vision.customvision.prediction import prediction_endpoint
from azure.cognitiveservices.vision.customvision.prediction.prediction_endpoint import models
import os
import requests
import string
def identify(image):
predictor = prediction_endpoint.PredictionEndpoint("15c7f6bd782b4fab887295c83a608f42")
with open(image, mode="rb") as test_data:
results = predictor.predict_image("ac4d0722-29ce-4116-b9d2-225b453a3df3", test_data.read())
answer = None
percent = 0
for prediction in results.predictions:
if prediction.probability > .5:
if (answer == None):
answer = prediction.tag
else:
if prediction.probability > percent:
answer = prediction.tag
percent = prediction.probability
return answer
#takes in a list of paths to photos including row/col index
def images2Circ(photos):
#for p in photos:
# print(identify(p))
rind = 8
cind = 10
rows = 6
cols = 8
circuit = [[None for i in range(cols)] for j in range(rows)]
for pic in photos:
row = int(pic[rind])
col = int(pic[cind])
print(row,col)
gate = identify(pic)
if gate == "Corner":
circuit[row][col] = None
else:
circuit[row][col] = gate
print(circuit)
return circuit
#print(images2Circ(["pic/t3/aaa2a3.jpg","pic/t3/bbb1b4.jpg","pic/t3/ccc5c2.jpg","pic/t3/ddd5d5.jpg","pic/t3/eee0e0.jpg"]))
|
Guptacos/tartanhacks2018
|
image_recognition.py
|
image_recognition.py
|
py
| 1,718 |
python
|
en
|
code
| 2 |
github-code
|
6
|
21011206894
|
import argparse
import pandas as pd
import cv2
import mediapipe as mp
mp_pose = mp.solutions.pose
from pose_embedder import FullBodyPoseEmbedder
from pose_classifier import PoseClassifier
import numpy as np
classifiers = {}
def run_classify(csv_path):
# initialise Pose estimator for whole video
pose = mp_pose.Pose(
min_detection_confidence=0.5,
min_tracking_confidence=0.5
)
print(f'Reading from {csv_path}')
df = pd.read_csv(csv_path)
filepaths_exercises = zip(df['filepaths'], df['exercise'], df['groundtruth'])
classifications = [classify(fname, exercise, gt, pose) for fname, exercise, gt in filepaths_exercises]
df['prediction'] = classifications
df.to_csv(csv_path, header=True, index=None)
def classify(fname, exercise, groundtruth, pose):
classifier_samples_folder = f'{exercise}_csvs_out'
# Transforms pose landmarks into embedding.
pose_embedder = FullBodyPoseEmbedder()
# Classifies give pose against database of poses.
if classifier_samples_folder in classifiers:
pose_classifier = classifiers[classifier_samples_folder]
else:
pose_classifier = PoseClassifier(
pose_samples_folder=classifier_samples_folder,
pose_embedder=pose_embedder,
top_n_by_max_distance=30,
top_n_by_mean_distance=10)
classifiers[classifier_samples_folder] = pose_classifier
print(fname)
print(exercise)
img = cv2.imread(fname)
classification_result = 0.0
results = pose.process(img)
pose_landmarks = results.pose_landmarks
if pose_landmarks:
frame_height, frame_width = img.shape[0], img.shape[1]
pose_landmarks = np.array(
[[lmk.x * frame_width, lmk.y * frame_height, lmk.z * frame_width]
for lmk in pose_landmarks.landmark],
dtype=np.float32)
assert pose_landmarks.shape == (33, 3), 'Unexpected landmarks shape: {}'.format(pose_landmarks.shape)
p_w_bar_x = {k:v/10. for k,v in sorted(pose_classifier(pose_landmarks).items(), key=lambda item: item[1], reverse=True)}
print(f'P(w|x): {p_w_bar_x}')
print(groundtruth)
gt_label = f'{exercise}_{groundtruth}'
if gt_label in p_w_bar_x:
classification_result = float(p_w_bar_x[gt_label])
return classification_result
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('csv_path')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
print(args)
run_classify(args.csv_path)
|
insidedctm/pose_knn_classifier
|
classify.py
|
classify.py
|
py
| 2,471 |
python
|
en
|
code
| 0 |
github-code
|
6
|
70680774589
|
from django import forms
from crispy_forms.helper import FormHelper
from .models import Category
from crispy_forms.layout import Submit, Layout, Div, HTML, Field
class CategoryForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div(Div(
Div(Div(
Field('kind'),
css_class='col-md-12'), css_class='row'),
Div(
Submit('submit', 'Salvar', css_class="btn btn-info btn-lg"),
HTML('<a href="{% url "dashboard" %}" class="btn btn-outline-secondary btn-lg">Voltar</a>'),
css_class='row btn-group col-md-12 d-flex justify-content-end'),
css_class='col-md-12'), css_class='row mt-5 w-100')
)
class Meta:
model = Category
fields = ('kind',)
|
ifcassianasl/movie-list
|
category/forms.py
|
forms.py
|
py
| 965 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40319564097
|
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ansibleguy.opnsense.plugins.module_utils.base.handler import \
module_dependency_error, MODULE_EXCEPTIONS
try:
from ansible_collections.ansibleguy.opnsense.plugins.module_utils.helper.main import \
diff_remove_empty
from ansible_collections.ansibleguy.opnsense.plugins.module_utils.base.api import Session
from ansible_collections.ansibleguy.opnsense.plugins.module_utils.defaults.main import \
OPN_MOD_ARGS, STATE_MOD_ARG
except MODULE_EXCEPTIONS:
module_dependency_error()
DOCUMENTATION = 'https://opnsense.ansibleguy.net/en/latest/modules/_tmpl.html'
EXAMPLES = 'https://opnsense.ansibleguy.net/en/latest/modules/_tmpl.html'
def run_module():
module_args = dict(
name=dict(type='str', required=True),
description=dict(type='str', required=False, default='', aliases=['desc']),
content=dict(type='list', required=False, default=[], elements='str'),
type=dict(type='str', required=False, choices=['1', '2'], default='1'),
**STATE_MOD_ARG,
**OPN_MOD_ARGS,
)
result = dict(
changed=False,
diff={
'before': {},
'after': {},
}
)
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True,
)
session = Session(module=module)
# do api interactions here
exists = True # check via api if the item already exists
# session.get(cnf={
# 'module': 'API-Module',
# 'controller': 'API-Controller',
# 'command': 'API-info-command',
# 'data': {'tests'}
# })
if exists:
result['diff']['before'] = 'test' # set to current value for diff-mode
if module.params['state'] == 'absent':
if exists:
result['changed'] = True
if not module.check_mode:
# remove item via api if not in check-mode
# session.post(cnf={
# 'module': 'API-Module',
# 'controller': 'API-Controller',
# 'command': 'API-delete-command',
# 'params': ['uuid'],
# })
pass
else:
if exists:
value_changed = True # compare existing item config with configured one
if value_changed:
result['diff']['after'] = 'tests' # set to configured value(s)
if not module.check_mode:
# update item via api if not in check-mode
# session.post(cnf={
# 'module': 'API-Module',
# 'controller': 'API-Controller',
# 'command': 'API-update-command',
# 'data': {'tests'},
# 'params': ['uuid'],
# })
pass
else:
result['diff']['after'] = 'tests' # set to configured value(s)
if not module.check_mode:
# create item via api if not in check-mode
# session.post(cnf={
# 'module': 'API-Module',
# 'controller': 'API-Controller',
# 'command': 'API-add-command',
# 'data': {'tests'},
# })
pass
# don't forget to call the 'reload' endpoint to activate the changes (if available/needed)
# cleanup and exit
session.close()
result['diff'] = diff_remove_empty(result['diff'])
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
|
ansibleguy/collection_opnsense
|
plugins/modules/_tmpl_direct.py
|
_tmpl_direct.py
|
py
| 3,679 |
python
|
en
|
code
| 158 |
github-code
|
6
|
20528489084
|
import pygame
WIDTH = 600
HEIGHT = 700
class Start:
def __init__(self):
pygame.init()
self.display = pygame.display.set_mode((WIDTH, HEIGHT))
self.background = pygame.Surface(self.display.get_size()).convert()
self.words = pygame.Surface(self.display.get_size()).convert()
self.font = pygame.font.SysFont('comicsansms',30)
self.fonty = pygame.font.SysFont('lucidaconsole',70)
self.play = self.font.render('Play',True,(0,255,0))
self.title = self.fonty.render('Memorize',True,(0,0,255))
self.emoji = self.fonty.render('Emoji',True,(255,0,0))
self.tape = pygame.image.load('tape.png').convert_alpha()
self.smart = pygame.image.load('smartemoji.png').convert_alpha()
self.tape = pygame.transform.scale(self.tape,(50,50))
self.smart = pygame.transform.scale(self.smart,(150,150))
self.mouse = pygame.mouse.get_pos()
letter = 'Memorize'
self.x = 150
for c in letter:
self.text = self.fonty.render(c,True,(0,0,255))
pygame.time.delay(50)
self.display.blit(self.text,(self.x,200))
self.words.blit(self.display,(self.x,350))
self.x += 40
pygame.display.flip()
pygame.time.delay(200)
self.display.blit(self.background,(0,0))
pygame.display.flip()
self.display.blit(self.play,(400,500))
pygame.draw.rect(self.display, (200,200,200),(110,100,230,80))
self.display.blit(self.emoji,(120,100))
self.display.blit(self.tape,(315,80))
self.display.blit(self.tape,(95,145))
self.display.blit(self.title,(150,200))
self.display.blit(self.smart,(150,400))
pygame.display.flip()
def choice(self):
done = False
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
return False
elif event.type == pygame.MOUSEMOTION:
self.mouse = pygame.mouse.get_pos()
if 400<self.mouse[0]<470 and 500<self.mouse[1]<540:
pygame.draw.rect(self.display, (255,255,255),(400,500,70,45))
self.display.blit(self.play,(400,500))
pygame.display.flip()
else:
pygame.draw.rect(self.display, (0,0,0),(400,500,70,45))
self.display.blit(self.play,(400,500))
pygame.display.flip()
elif event.type == pygame.MOUSEBUTTONDOWN:
if pygame.mouse.get_pressed()[0] and 400<self.mouse[0]<470 and 500<self.mouse[1]<550:
return True
pygame.display.flip()
pygame.quit()
|
dlam15/Emoji-Memorize
|
Start.py
|
Start.py
|
py
| 2,930 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40920706579
|
from django.urls import path
from lanarce_portfolio.images.api.views import ImagesCreateListAPI, ImageUpdateDeleteAPI, CommentListAPI
app_name = "images"
urlpatterns = [
path(
"", ImagesCreateListAPI.as_view(), name="images-create-list"
),
path(
"<uuid:image_id>/", ImageUpdateDeleteAPI.as_view(), name="image-update-delete"
),
path(
"<uuid:image_id>/comment/", CommentListAPI.as_view(), name="comment-list"
),
]
|
Ari100telll/lanarce_portfolio
|
lanarce_portfolio/images/urls.py
|
urls.py
|
py
| 465 |
python
|
en
|
code
| 0 |
github-code
|
6
|
36697861075
|
import os
# ---- TIMEOUT ---- #
# The maximum number of subprocesses to run at any given time.
max_processes = 5
# The maximum time any subprocess should run, in seconds, and an operation
# to be performed when a timeout occurs.
# Make sure this is >> than the individual test timeouts
# (see pam.py and utils/defaults.py).
timeout = 100
timeout_operation = lambda: open('timedout', 'w').close()
# ---- STUDENT PROCESSING ---- #
# File containing a list of student directories to test.
# -- Each directory should be on its own line.
# -- Each entry should be a relative path from the directory
# that contains test_runner.py.
students_fname = os.path.join('pam', 'examples', 'directories.txt')
# absolute path to uam
path_to_uam = 'YOUR_PATH_TO_UAM'
# Shell command to be performed before executing tests in a directory or None.
# -- This command will be invoked from within the student's directory!
# -- Notice the use of absolute paths here.
preamble_cmd = ('''cp %s .; cp %s .; cp %s .''' %
(os.path.join(path_to_uam, 'pam', 'examples', 'test_asst.py'),
os.path.join(path_to_uam, 'pam', 'examples', 'test_2_asst.py'),
os.path.join(path_to_uam, 'pam', 'examples', 'pep8.py')))
# List of shell commands that execute the tests in a student's
# submission directory.
# Warning: Some versions of shell don't like the >& redirect, so it's safer
# to redirect stdout and then use 2>&1
# See pam.py for more documentation.
test_cmd = [('%s result.json test_asst.py test_2_asst.py' %
os.path.join(path_to_uam, 'pam', 'pam.py'))]
# Shell command to be performed after executing tests in a student's submission
# directory or None.
postamble_cmd = 'rm -rf __pycache__ test_asst.py test_2_asst.py pep8.py'
# ---- AGGREGATION AND TEMPLATING ---- #
# where are the templates? absolute path.
template_dir = os.path.join(path_to_uam, 'templates')
|
ProjectAT/uam
|
pam/examples/config.py
|
config.py
|
py
| 1,921 |
python
|
en
|
code
| 4 |
github-code
|
6
|
3774639154
|
__author__ = 'shixk'
import datetime
from SearchFiles import SearchFiles
class GetData(object):
def loadfilterdata(self, query, conf):
if query['method'] == "time":
return self.filterbydate(query, conf)
else:
return {'ERROR': 'no method'}
def filterbydate(self, query, conf):
sf = SearchFiles(conf)
global file_list
if 'filetype' in query.keys():
query['filetype'] = ['.' + q for q in query['filetype'].split(',')]
if 'start' not in query.keys():
file_list = sf.getfilenotime(query['filetype'])
return file_list
elif 'end' not in query.keys():
query['end'] = datetime.datetime.strptime(query['start'], "%Y-%m-%d") + datetime.timedelta(hours=24)
file_list = sf.getfilelist(query['start'], query['end'], query['filetype'])
else:
if 'start' not in query.keys():
file_list = sf.getfileno2t()
return file_list
elif 'end' not in query.keys():
query['end'] = datetime.datetime.strptime(query['start'], "%Y-%m-%d") + datetime.timedelta(hours=24)
file_list = sf.getfileno_type(query['start'], query['end'])
return file_list
|
shinSG/SimplePictureService
|
HttpService/GetData.py
|
GetData.py
|
py
| 1,285 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5001311387
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from .models import Question
class AnswerFrom(forms.Form):
content=forms.CharField(widget=forms.Textarea(attrs={'rows': 6}), label='Trả lời')
def __init__(self, *args, **kwargs):
super(AnswerFrom, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_id = 'answer'
self.helper.form_method = 'post'
self.helper.form_action = 'answer/'
self.helper.add_input(Submit('submit', 'Trả lời'))
class QuestionForm(forms.Form):
title=forms.CharField(required=True, label='Question')
content=forms.CharField(label='Content', widget=forms.Textarea(), required=False)
tags=forms.CharField(label='Topics')
def clean_title(self):
title=self.cleaned_data['title'].strip()
if title[len(title)-1]!='?':
title+='?'
return title
def clean_tags(self):
tags=self.cleaned_data['tags'].strip()
tags=tags.strip(',')
tags=tags.split(',')
for i in range(len(tags)):
tags[i]=tags[i].lower().title()
return tags
def __init__(self, *args, **kwargs):
super(QuestionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_id = 'question'
self.helper.form_method = 'post'
self.helper.form_action = 'question:add'
self.helper.add_input(Submit('submit', 'Submit'))
|
duonghau/hoidap
|
question/forms.py
|
forms.py
|
py
| 1,575 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74668118906
|
from collections import Counter
from itertools import product
from operator import add
def solve(lines, cycles, dimensions):
board = set()
for row, line in enumerate(lines):
for col, elem in enumerate(line):
if elem == '#':
cell = dimensions * [0,]
cell[0], cell[1] = col, row
board.add(tuple(cell))
for _ in range(cycles):
new_board = set()
neighbour_counts = Counter()
for cell in board:
for delta in product(range(-1, 2), repeat=dimensions):
if delta != dimensions * (0,):
neighbour_counts[tuple(map(add, cell, delta))] += 1
for cell, count in neighbour_counts.items():
if count == 3 or (cell in board and count == 2):
new_board.add(cell)
board = new_board
return len(board)
with open('input.txt') as file:
lines = file.read().splitlines()
print(solve(lines, 6, 3))
print(solve(lines, 6, 4))
|
dionyziz/advent-of-code
|
2020/17/17.py
|
17.py
|
py
| 1,006 |
python
|
en
|
code
| 8 |
github-code
|
6
|
70943711868
|
from pytsbe.data.exploration import DataExplorer
def explore_available_datasets():
"""
Example of how to launch data exploration. For all datasets in data folder
perform calculation of stationary and non-stationary time series and create
visualisation of time series.
"""
explorer = DataExplorer()
explorer.display_statistics()
# Have a look at time series
explorer.visualise_series()
if __name__ == '__main__':
explore_available_datasets()
|
ITMO-NSS-team/pytsbe
|
examples/univariate_data_exploration.py
|
univariate_data_exploration.py
|
py
| 486 |
python
|
en
|
code
| 30 |
github-code
|
6
|
36481324263
|
def spin_words(sentence):
"""
Spin five of more letter word in given text.
Takes in a string of one or more words, and returns the same string,
but with all five or more letter words reversed.
Strings passed in will consist of only letters and spaces.
Spaces will be included only when more than one word is present.
:return: 'str'. Modified sentence.
>>> spin_words("Hey fellow warriors")
'Hey wollef sroirraw'
>>> spin_words("Short and long words")
'trohS and long sdrow'
"""
sentence = sentence.split()
for idx, word in enumerate(sentence):
if len(word) >= 5:
sentence[idx] = word[::-1]
return " ".join(sentence)
print(spin_words("Short and long words"))
|
Djet78/Codewars_tasks
|
Python/kyu_6/stop_gninnips_my_sdrow.py
|
stop_gninnips_my_sdrow.py
|
py
| 743 |
python
|
en
|
code
| 0 |
github-code
|
6
|
10368918603
|
import asyncio
import re
from os import remove
from pyUltroid.dB import DEVLIST
try:
from tabulate import tabulate
except ImportError:
tabulate = None
from telethon import events
from telethon.errors import MessageNotModifiedError
from telethon.tl.functions.contacts import (
BlockRequest,
GetBlockedRequest,
UnblockRequest,
)
from telethon.tl.functions.messages import ReportSpamRequest
from telethon.utils import get_display_name, resolve_bot_file_id
from pyUltroid.dB.base import KeyManager
from . import *
# ========================= CONSTANTS =============================
COUNT_PM = {}
LASTMSG = {}
WARN_MSGS = {}
U_WARNS = {}
if isinstance(udB.get_key("PMPERMIT"), (int, str)):
value = [udB.get_key("PMPERMIT")]
udB.set_key("PMPERMIT", value)
keym = KeyManager("PMPERMIT", cast=list)
Logm = KeyManager("LOGUSERS", cast=list)
PMPIC = udB.get_key("PMPIC")
LOG_CHANNEL = udB.get_key("LOG_CHANNEL")
UND = get_string("pmperm_1")
UNS = get_string("pmperm_2")
NO_REPLY = get_string("pmperm_3")
UNAPPROVED_MSG = "**PMSecurity of {ON}!**\n\n{UND}\n\nYou have {warn}/{twarn} warnings!"
if udB.get_key("PM_TEXT"):
UNAPPROVED_MSG = (
"**PMSecurity of {ON}!**\n\n"
+ udB.get_key("PM_TEXT")
+ "\n\nYou have {warn}/{twarn} warnings!"
)
# 1
WARNS = udB.get_key("PMWARNS") or 4
PMCMDS = [
f"{HNDLR}a",
f"{HNDLR}approve",
f"{HNDLR}da",
f"{HNDLR}disapprove",
f"{HNDLR}block",
f"{HNDLR}unblock",
]
_not_approved = {}
_to_delete = {}
my_bot = asst.me.username
def update_pm(userid, message, warns_given):
try:
WARN_MSGS.update({userid: message})
except KeyError:
pass
try:
U_WARNS.update({userid: warns_given})
except KeyError:
pass
async def delete_pm_warn_msgs(chat: int):
try:
await _to_delete[chat].delete()
except KeyError:
pass
# =================================================================
if udB.get_key("PMLOG"):
@ultroid_cmd(
pattern="logpm$",
)
async def _(e):
if not e.is_private:
return await e.eor("`Use me in Private.`", time=3)
if not Logm.contains(e.chat_id):
return await e.eor("`Wasn't logging msgs from here.`", time=3)
Logm.remove(e.chat_id)
return await e.eor("`Now I Will log msgs from here.`", time=3)
@ultroid_cmd(
pattern="nologpm$",
)
async def _(e):
if not e.is_private:
return await e.eor("`Use me in Private.`", time=3)
if Logm.contains(e.chat_id):
return await e.eor("`Wasn't logging msgs from here.`", time=3)
Logm.add(e.chat_id)
return await e.eor("`Now I Won't log msgs from here.`", time=3)
@ultroid_bot.on(
events.NewMessage(
incoming=True,
func=lambda e: e.is_private,
),
)
async def permitpm(event):
user = await event.get_sender()
if user.bot or user.is_self or user.verified or Logm.contains(user.id):
return
await event.forward_to(udB.get_key("PMLOGGROUP") or LOG_CHANNEL)
if udB.get_key("PMSETTING"):
if udB.get_key("AUTOAPPROVE"):
@ultroid_bot.on(
events.NewMessage(
outgoing=True,
func=lambda e: e.is_private and e.out and not e.text.startswith(HNDLR),
),
)
async def autoappr(e):
miss = await e.get_chat()
if miss.bot or miss.is_self or miss.verified or miss.id in DEVLIST:
return
if keym.contains(miss.id):
return
keym.add(miss.id)
await delete_pm_warn_msgs(miss.id)
try:
await ultroid_bot.edit_folder(miss.id, folder=0)
except BaseException:
pass
try:
await asst.edit_message(
LOG_CHANNEL,
_not_approved[miss.id],
f"#AutoApproved : <b>OutGoing Message.\nUser : {inline_mention(miss, html=True)}</b> [<code>{miss.id}</code>]",
parse_mode="html",
)
except KeyError:
await asst.send_message(
LOG_CHANNEL,
f"#AutoApproved : <b>OutGoing Message.\nUser : {inline_mention(miss, html=True)}</b> [<code>{miss.id}</code>]",
parse_mode="html",
)
except MessageNotModifiedError:
pass
@ultroid_bot.on(
events.NewMessage(
incoming=True,
func=lambda e: e.is_private
and e.sender_id not in DEVLIST
and not e.out
and not e.sender.bot
and not e.sender.is_self
and not e.sender.verified,
)
)
async def permitpm(event):
inline_pm = Redis("INLINE_PM") or False
user = event.sender
if not keym.contains(user.id) and event.text != UND:
if Redis("MOVE_ARCHIVE"):
try:
await ultroid_bot.edit_folder(user.id, folder=1)
except BaseException as er:
LOGS.info(er)
if event.media and not udB.get_key("DISABLE_PMDEL"):
await event.delete()
name = user.first_name
fullname = get_display_name(user)
username = f"@{user.username}"
mention = inline_mention(user)
count = keym.count()
try:
wrn = COUNT_PM[user.id] + 1
await asst.edit_message(
udB.get_key("LOG_CHANNEL"),
_not_approved[user.id],
f"Incoming PM from **{mention}** [`{user.id}`] with **{wrn}/{WARNS}** warning!",
buttons=[
Button.inline("Approve PM", data=f"approve_{user.id}"),
Button.inline("Block PM", data=f"block_{user.id}"),
],
)
except KeyError:
_not_approved[user.id] = await asst.send_message(
udB.get_key("LOG_CHANNEL"),
f"Incoming PM from **{mention}** [`{user.id}`] with **1/{WARNS}** warning!",
buttons=[
Button.inline("Approve PM", data=f"approve_{user.id}"),
Button.inline("Block PM", data=f"block_{user.id}"),
],
)
wrn = 1
except MessageNotModifiedError:
wrn = 1
if user.id in LASTMSG:
prevmsg = LASTMSG[user.id]
if event.text != prevmsg:
if "PMSecurity" in event.text or "**PMSecurity" in event.text:
return
await delete_pm_warn_msgs(user.id)
message_ = UNAPPROVED_MSG.format(
ON=OWNER_NAME,
warn=wrn,
twarn=WARNS,
UND=UND,
name=name,
fullname=fullname,
username=username,
count=count,
mention=mention,
)
update_pm(user.id, message_, wrn)
if inline_pm:
results = await ultroid_bot.inline_query(
my_bot, f"ip_{user.id}"
)
try:
_to_delete[user.id] = await results[0].click(
user.id, reply_to=event.id, hide_via=True
)
except Exception as e:
LOGS.info(str(e))
elif PMPIC:
_to_delete[user.id] = await ultroid_bot.send_file(
user.id,
PMPIC,
caption=message_,
)
else:
_to_delete[user.id] = await ultroid_bot.send_message(
user.id, message_
)
else:
await delete_pm_warn_msgs(user.id)
message_ = UNAPPROVED_MSG.format(
ON=OWNER_NAME,
warn=wrn,
twarn=WARNS,
UND=UND,
name=name,
fullname=fullname,
username=username,
count=count,
mention=mention,
)
update_pm(user.id, message_, wrn)
if inline_pm:
try:
results = await ultroid_bot.inline_query(
my_bot, f"ip_{user.id}"
)
_to_delete[user.id] = await results[0].click(
user.id, reply_to=event.id, hide_via=True
)
except Exception as e:
LOGS.info(str(e))
elif PMPIC:
_to_delete[user.id] = await ultroid_bot.send_file(
user.id,
PMPIC,
caption=message_,
)
else:
_to_delete[user.id] = await ultroid_bot.send_message(
user.id, message_
)
LASTMSG.update({user.id: event.text})
else:
await delete_pm_warn_msgs(user.id)
message_ = UNAPPROVED_MSG.format(
ON=OWNER_NAME,
warn=wrn,
twarn=WARNS,
UND=UND,
name=name,
fullname=fullname,
username=username,
count=count,
mention=mention,
)
update_pm(user.id, message_, wrn)
if inline_pm:
try:
results = await ultroid_bot.inline_query(
my_bot, f"ip_{user.id}"
)
_to_delete[user.id] = await results[0].click(
user.id, reply_to=event.id, hide_via=True
)
except Exception as e:
LOGS.info(str(e))
elif PMPIC:
_to_delete[user.id] = await ultroid_bot.send_file(
user.id,
PMPIC,
caption=message_,
)
else:
_to_delete[user.id] = await ultroid_bot.send_message(
user.id, message_
)
LASTMSG.update({user.id: event.text})
if user.id not in COUNT_PM:
COUNT_PM.update({user.id: 1})
else:
COUNT_PM[user.id] = COUNT_PM[user.id] + 1
if COUNT_PM[user.id] >= WARNS:
await delete_pm_warn_msgs(user.id)
_to_delete[user.id] = await event.respond(UNS)
try:
del COUNT_PM[user.id]
del LASTMSG[user.id]
except KeyError:
await asst.send_message(
udB.get_key("LOG_CHANNEL"),
"PMPermit is messed! Pls restart the bot!!",
)
return LOGS.info("COUNT_PM is messed.")
await ultroid_bot(BlockRequest(user.id))
await ultroid_bot(ReportSpamRequest(peer=user.id))
await asst.edit_message(
udB.get_key("LOG_CHANNEL"),
_not_approved[user.id],
f"**{mention}** [`{user.id}`] was Blocked for spamming.",
)
@ultroid_cmd(pattern="(start|stop|clear)archive$", fullsudo=True)
async def _(e):
x = e.pattern_match.group(1).strip()
if x == "start":
udB.set_key("MOVE_ARCHIVE", "True")
await e.eor("Now I will move new Unapproved DM's to archive", time=5)
elif x == "stop":
udB.set_key("MOVE_ARCHIVE", "False")
await e.eor("Now I won't move new Unapproved DM's to archive", time=5)
elif x == "clear":
try:
await e.client.edit_folder(unpack=1)
await e.eor("Unarchived all chats", time=5)
except Exception as mm:
await e.eor(str(mm), time=5)
@ultroid_cmd(pattern="(a|approve)(?: |$)", fullsudo=True)
async def approvepm(apprvpm):
if apprvpm.reply_to_msg_id:
user = (await apprvpm.get_reply_message()).sender
elif apprvpm.is_private:
user = await apprvpm.get_chat()
else:
return await apprvpm.edit(NO_REPLY)
if user.id in DEVLIST:
return await eor(
apprvpm,
"Lol, He is my Developer\nHe is auto Approved",
)
if not keym.contains(user.id):
keym.add(user.id)
try:
await delete_pm_warn_msgs(user.id)
await apprvpm.client.edit_folder(user.id, folder=0)
except BaseException:
pass
await eod(
apprvpm,
f"<b>{inline_mention(user, html=True)}</b> <code>approved to PM!</code>",
parse_mode="html",
)
try:
await asst.edit_message(
udB.get_key("LOG_CHANNEL"),
_not_approved[user.id],
f"#APPROVED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was approved to PM you!</code>",
buttons=[
Button.inline("Disapprove PM", data=f"disapprove_{user.id}"),
Button.inline("Block", data=f"block_{user.id}"),
],
parse_mode="html",
)
except KeyError:
_not_approved[user.id] = await asst.send_message(
udB.get_key("LOG_CHANNEL"),
f"#APPROVED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was approved to PM you!</code>",
buttons=[
Button.inline("Disapprove PM", data=f"disapprove_{user.id}"),
Button.inline("Block", data=f"block_{user.id}"),
],
parse_mode="html",
)
except MessageNotModifiedError:
pass
else:
await apprvpm.eor("`User may already be approved.`", time=5)
@ultroid_cmd(pattern="(da|disapprove)(?: |$)", fullsudo=True)
async def disapprovepm(e):
if e.reply_to_msg_id:
user = (await e.get_reply_message()).sender
elif e.is_private:
user = await e.get_chat()
else:
return await e.edit(NO_REPLY)
if user.id in DEVLIST:
return await eor(
e,
"`Lol, He is my Developer\nHe Can't Be DisApproved.`",
)
if keym.contains(user.id):
keym.remove(user.id)
await eod(
e,
f"<b>{inline_mention(user, html=True)}</b> <code>Disapproved to PM!</code>",
parse_mode="html",
)
try:
await asst.edit_message(
udB.get_key("LOG_CHANNEL"),
_not_approved[user.id],
f"#DISAPPROVED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was disapproved to PM you.</code>",
buttons=[
Button.inline("Approve PM", data=f"approve_{user.id}"),
Button.inline("Block", data=f"block_{user.id}"),
],
parse_mode="html",
)
except KeyError:
_not_approved[user.id] = await asst.send_message(
udB.get_key("LOG_CHANNEL"),
f"#DISAPPROVED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was disapproved to PM you.</code>",
buttons=[
Button.inline("Approve PM", data=f"approve_{user.id}"),
Button.inline("Block", data=f"block_{user.id}"),
],
parse_mode="html",
)
except MessageNotModifiedError:
pass
else:
await eod(
e,
f"<b>{inline_mention(user, html=True)}</b> <code>was never approved!</code>",
parse_mode="html",
)
@ultroid_cmd(pattern="block( (.*)|$)", fullsudo=True)
async def blockpm(block):
match = block.pattern_match.group(1).strip()
if block.reply_to_msg_id:
user = (await block.get_reply_message()).sender_id
elif match:
try:
user = await block.client.parse_id(match)
except Exception as er:
return await block.eor(str(er))
elif block.is_private:
user = block.chat_id
else:
return await eor(block, NO_REPLY, time=10)
await block.client(BlockRequest(user))
aname = await block.client.get_entity(user)
await block.eor(f"{inline_mention(aname)} [`{user}`] `has been blocked!`")
try:
keym.remove(user)
except AttributeError:
pass
try:
await asst.edit_message(
udB.get_key("LOG_CHANNEL"),
_not_approved[user],
f"#BLOCKED\n\n{inline_mention(aname)} [`{user}`] has been **blocked**.",
buttons=[
Button.inline("UnBlock", data=f"unblock_{user}"),
],
)
except KeyError:
_not_approved[user] = await asst.send_message(
udB.get_key("LOG_CHANNEL"),
f"#BLOCKED\n\n{inline_mention(aname)} [`{user}`] has been **blocked**.",
buttons=[
Button.inline("UnBlock", data=f"unblock_{user}"),
],
)
except MessageNotModifiedError:
pass
@ultroid_cmd(pattern="unblock( (.*)|$)", fullsudo=True)
async def unblockpm(event):
match = event.pattern_match.group(1).strip()
reply = await event.get_reply_message()
if reply:
user = reply.sender_id
elif match:
if match == "all":
msg = await event.eor(get_string("com_1"))
u_s = await event.client(GetBlockedRequest(0, 0))
count = len(u_s.users)
if not count:
return await eor(msg, "__You have not blocked Anyone...__")
for user in u_s.users:
await asyncio.sleep(1)
await event.client(UnblockRequest(user.id))
# GetBlockedRequest return 20 users at most.
if count < 20:
return await eor(msg, f"__Unblocked {count} Users!__")
while u_s.users:
u_s = await event.client(GetBlockedRequest(0, 0))
for user in u_s.users:
await asyncio.sleep(3)
await event.client(UnblockRequest(user.id))
count += len(u_s.users)
return await eor(msg, f"__Unblocked {count} users.__")
try:
user = await event.client.parse_id(match)
except Exception as er:
return await event.eor(str(er))
elif event.is_private:
user = event.chat_id
else:
return await event.eor(NO_REPLY, time=10)
try:
await event.client(UnblockRequest(user))
aname = await event.client.get_entity(user)
await event.eor(f"{inline_mention(aname)} [`{user}`] `has been UnBlocked!`")
except Exception as et:
return await event.eor(f"ERROR - {et}")
try:
await asst.edit_message(
udB.get_key("LOG_CHANNEL"),
_not_approved[user],
f"#UNBLOCKED\n\n{inline_mention(aname)} [`{user}`] has been **unblocked**.",
buttons=[
Button.inline("Block", data=f"block_{user}"),
],
)
except KeyError:
_not_approved[user] = await asst.send_message(
udB.get_key("LOG_CHANNEL"),
f"#UNBLOCKED\n\n{inline_mention(aname)} [`{user}`] has been **unblocked**.",
buttons=[
Button.inline("Block", data=f"block_{user}"),
],
)
except MessageNotModifiedError:
pass
@ultroid_cmd(pattern="listapproved$", owner=True)
async def list_approved(event):
xx = await event.eor(get_string("com_1"))
all = keym.get()
if not all:
return await xx.eor("`You haven't approved anyone yet!`", time=5)
users = []
for i in all:
try:
name = get_display_name(await ultroid_bot.get_entity(i))
except BaseException:
name = ""
users.append([name.strip(), str(i)])
with open("approved_pms.txt", "w") as list_appr:
if tabulate:
list_appr.write(
tabulate(users, headers=["UserName", "UserID"], showindex="always")
)
else:
text = "".join(f"[{user[-1]}] - {user[0]}" for user in users)
list_appr.write(text)
await event.reply(
f"List of users approved by [{OWNER_NAME}](tg://user?id={OWNER_ID})",
file="approved_pms.txt",
)
await xx.delete()
remove("approved_pms.txt")
@callback(
re.compile(
b"approve_(.*)",
),
from_users=[ultroid_bot.uid],
)
async def apr_in(event):
uid = int(event.data_match.group(1).decode("UTF-8"))
if uid in DEVLIST:
await event.edit("It's a dev! Approved!")
if not keym.contains(uid):
keym.add(uid)
try:
await ultroid_bot.edit_folder(uid, folder=0)
except BaseException:
pass
try:
user = await ultroid_bot.get_entity(uid)
except BaseException:
return await event.delete()
await event.edit(
f"#APPROVED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was approved to PM you!</code>",
buttons=[
[
Button.inline("Disapprove PM", data=f"disapprove_{uid}"),
Button.inline("Block", data=f"block_{uid}"),
],
],
parse_mode="html",
)
await delete_pm_warn_msgs(uid)
await event.answer("Approved.", alert=True)
else:
await event.edit(
"`User may already be approved.`",
buttons=[
[
Button.inline("Disapprove PM", data=f"disapprove_{uid}"),
Button.inline("Block", data=f"block_{uid}"),
],
],
)
@callback(
re.compile(
b"disapprove_(.*)",
),
from_users=[ultroid_bot.uid],
)
async def disapr_in(event):
uid = int(event.data_match.group(1).decode("UTF-8"))
if keym.contains(uid):
keym.remove(uid)
try:
user = await ultroid_bot.get_entity(uid)
except BaseException:
return await event.delete()
await event.edit(
f"#DISAPPROVED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was disapproved to PM you!</code>",
buttons=[
[
Button.inline("Approve PM", data=f"approve_{uid}"),
Button.inline("Block", data=f"block_{uid}"),
],
],
parse_mode="html",
)
await event.answer("Disapproved.", alert=True)
else:
await event.edit(
"`User was never approved!`",
buttons=[
[
Button.inline("Disapprove PM", data=f"disapprove_{uid}"),
Button.inline("Block", data=f"block_{uid}"),
],
],
)
@callback(
re.compile(
b"block_(.*)",
),
from_users=[ultroid_bot.uid],
)
async def blck_in(event):
uid = int(event.data_match.group(1).decode("UTF-8"))
try:
await ultroid_bot(BlockRequest(uid))
except BaseException:
pass
try:
user = await ultroid_bot.get_entity(uid)
except BaseException:
return await event.delete()
await event.edit(
f"BLOCKED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was blocked!</code>",
buttons=Button.inline("UnBlock", data=f"unblock_{uid}"),
parse_mode="html",
)
await event.answer("Blocked.", alert=True)
@callback(
re.compile(
b"unblock_(.*)",
),
from_users=[ultroid_bot.uid],
)
async def unblck_in(event):
uid = int(event.data_match.group(1).decode("UTF-8"))
try:
await ultroid_bot(UnblockRequest(uid))
except BaseException:
pass
try:
user = await ultroid_bot.get_entity(uid)
except BaseException:
return await event.delete()
await event.edit(
f"#UNBLOCKED\n\n<b>{inline_mention(user, html=True)}</b> [<code>{user.id}</code>] <code>was unblocked!</code>",
buttons=Button.inline("Block", data=f"block_{uid}"),
parse_mode="html",
)
await event.answer("Unblocked.", alert=True)
@callback("deletedissht")
async def ytfuxist(e):
try:
await e.answer("Deleted.")
await e.delete()
except BaseException:
await ultroid_bot.delete_messages(e.chat_id, e.id)
@in_pattern(re.compile("ip_(.*)"), owner=True)
async def in_pm_ans(event):
from_user = int(event.pattern_match.group(1).strip())
try:
warns = U_WARNS[from_user]
except Exception as e:
LOGS.info(e)
warns = "?"
try:
msg_ = WARN_MSGS[from_user]
except KeyError:
msg_ = "**PMSecurity of {OWNER_NAME}**"
wrns = f"{warns}/{WARNS}"
buttons = [
[
Button.inline("Warns", data=f"admin_only{from_user}"),
Button.inline(wrns, data=f"don_{wrns}"),
]
]
include_media = True
mime_type, res = None, None
cont = None
try:
ext = PMPIC.split(".")[-1].lower()
except (AttributeError, IndexError):
ext = None
if ext in ["img", "jpg", "png"]:
_type = "photo"
mime_type = "image/jpg"
elif ext in ["mp4", "mkv", "gif"]:
mime_type = "video/mp4"
_type = "gif"
else:
try:
res = resolve_bot_file_id(PMPIC)
except ValueError:
pass
if res:
res = [
await event.builder.document(
res,
title="Inline PmPermit",
description="~ @TeamUltroid",
text=msg_,
buttons=buttons,
link_preview=False,
)
]
else:
_type = "article"
include_media = False
if not res:
if include_media:
cont = types.InputWebDocument(PMPIC, 0, mime_type, [])
res = [
event.builder.article(
title="Inline PMPermit.",
type=_type,
text=msg_,
description="@TeamUltroid",
include_media=include_media,
buttons=buttons,
thumb=cont,
content=cont,
)
]
await event.answer(res, switch_pm="• Ultroid •", switch_pm_param="start")
@callback(re.compile("admin_only(.*)"), from_users=[ultroid_bot.uid])
async def _admin_tools(event):
chat = int(event.pattern_match.group(1).strip())
await event.edit(
buttons=[
[
Button.inline("Approve PM", data=f"approve_{chat}"),
Button.inline("Block PM", data=f"block_{chat}"),
],
[Button.inline("« Back", data=f"pmbk_{chat}")],
],
)
@callback(re.compile("don_(.*)"))
async def _mejik(e):
data = e.pattern_match.group(1).strip().decode("utf-8").split("/")
text = "👮♂ Warn Count : " + data[0]
text += "\n🤖 Total Warn Count : " + data[1]
await e.answer(text, alert=True)
@callback(re.compile("pmbk_(.*)"))
async def edt(event):
from_user = int(event.pattern_match.group(1).strip())
try:
warns = U_WARNS[from_user]
except Exception as e:
LOGS.info(str(e))
warns = "0"
wrns = f"{warns}/{WARNS}"
await event.edit(
buttons=[
[
Button.inline("Warns", data=f"admin_only{from_user}"),
Button.inline(wrns, data=f"don_{wrns}"),
]
],
)
|
TeamUltroid/Ultroid
|
plugins/pmpermit.py
|
pmpermit.py
|
py
| 29,216 |
python
|
en
|
code
| 2,615 |
github-code
|
6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.