blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dcf142b87156ed979564464d24b4927fc31cc95e | d1c7bb0b89f8575ccb077fe6718f9f33aa9447ee | /generate_gmail_logo/merge_logo.py | 2c8e7d7fec89620c31be1bd1e0b326453c39b65b | [] | no_license | ZeqinZheng/verified_logo_tool | d6c949523ed103475a24e8cf56c893bdc71eac1b | 05cd82ca6ee228ead7d5553e732aa4fe7380eeb6 | refs/heads/master | 2023-02-28T10:05:14.795801 | 2021-02-01T07:16:48 | 2021-02-01T07:16:48 | 330,213,866 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,150 | py | from PIL import Image, ImageChops
import json
import os
li_exceptions = []
config = {}
config_keys = ["input_folder_path", "output_folder_path"]
gmail_logo_size, verified_logo_size = None, None
pos_x, pos_y = None, None
def load_parameters():
if "config.json" not in os.listdir():
print("ERROR", "config.json not found")
exit(-1)
with open("config.json", "r") as f:
js = json.load(f)
for key in config_keys:
if key not in js.keys():
print(f"config file does not have {key}")
exit(-1)
config[key] = str(js[key])
if not os.path.isdir(config["input_folder_path"]) or not os.path.isdir(config["output_folder_path"]):
print("Invalid input folder path OR output folder path")
exit(-1)
def get_filenames(abs_path):
os.chdir(abs_path)
files = os.listdir()
return files
def calculate_logo_size(filepath):
global gmail_logo_size, verified_logo_size
logo = Image.open(filepath)
verified_logo_size = logo.size
gmail_logo_size = (1667, logo.size[1])
def calculate_position():
global pos_x, pos_y
width_gmail_logo = gmail_logo_size[0]
width_verified_logo = verified_logo_size[0]
pos_x = int(width_gmail_logo/2) - int(width_verified_logo/2)
pos_y = 0
def merge_images(file_path):
# new_img.paste(template, (0, 0))
verified_logo = None
try:
verified_logo = Image.open(file_path)
except Exception as e:
li_exceptions.append(str(e))
return None
new_img = Image.new("RGB", gmail_logo_size, (255, 255, 255))
new_img.paste(verified_logo, (pos_x, pos_y))
return new_img
load_parameters()
file_li = get_filenames(config["input_folder_path"])
calculate_logo_size(config["input_folder_path"] + "/" + file_li[0])
calculate_position()
for index, filename in enumerate(file_li):
filepath = config["input_folder_path"] + "/" + filename
img = merge_images(filepath)
if img is None:
continue
img.save(config["output_folder_path"] + "/" + filename)
print(index, filename, "saved")
print("finished")
print(li_exceptions)
| [
"[email protected]"
] | |
eab5037692f341b206eec399b2fa954fed14f0c4 | d6658ef4473a08685f89c0ad2d4d3710df89afc9 | /account/migrations/0009_auto_20210423_1228.py | 39736a51296b26dba45bdebed1d14bd2b0226d43 | [] | no_license | Jamezslim90/ChatApp | a99d357bf79e86b5dad8613792db746ac18175be | a2667163399586221ad20e1b899d281436ec9c90 | refs/heads/main | 2023-09-04T12:29:31.466113 | 2021-05-09T12:46:35 | 2021-05-09T12:46:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 420 | py | # Generated by Django 3.1.7 on 2021-04-23 12:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0008_auto_20210423_1227'),
]
operations = [
migrations.AlterField(
model_name='account',
name='bio',
field=models.TextField(blank=True, default='', max_length=100, null=True),
),
]
| [
"[email protected]"
] | |
4df23062007da5083161682ebd8f2c53d0b18dca | 2c47c2357ab3e64f25ea1c3a587947049615cb02 | /conversational/views.py | a2ab43876d155e00a00d72586f571da74f6c2217 | [] | no_license | MiraRose/conversations | 9a36a9019c59a531d778137960ba9e8788a891ff | 54854c90cc45468abd34e9aff37a7d8789ba3c0b | refs/heads/master | 2023-05-15T07:32:33.424816 | 2021-06-11T11:40:48 | 2021-06-11T11:40:48 | 375,561,294 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,770 | py | from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.template import loader
from django.urls import reverse
from conversational.forms import ConversationForm, MessageForm, ThoughtForm, SearchConversationForm, SearchMessageForm
from conversational.models import Conversation, Message, Thought
def index(request):
conversation_list = Conversation.objects.order_by('date')
template = loader.get_template('conversational/index.html')
form = ConversationForm()
searchconversation = SearchConversationForm()
searchmessage = SearchMessageForm()
context = {
'conversation_list': conversation_list,
'form': form,
'searchconversationform': searchconversation,
'searchmessageform': searchmessage
}
return HttpResponse(template.render(context, request))
def detail(request, conversation_id):
messageform = MessageForm()
thoughtform = ThoughtForm()
try:
conversation = Conversation.objects.get(pk=conversation_id)
except Conversation.DoesNotExist:
raise Http404("Conversation does not exist")
return render(request, 'conversational/detail.html',
{'conversation': conversation,
'messageform': messageform,
'thoughtform': thoughtform,
})
def addmessage(request, conversation_id):
conversation = get_object_or_404(Conversation, pk=conversation_id)
form = MessageForm(request.POST)
if form.is_valid():
newmessage = create_message(form, conversation)
newmessage.save()
messageform = MessageForm()
thoughtform = ThoughtForm()
return HttpResponseRedirect(reverse('detail', args=(conversation_id,)), {'conversation': conversation,
'messageform': messageform,
'thoughtform': thoughtform})
def addthought(request, conversation_id, message_id):
conversation = get_object_or_404(Conversation, pk=conversation_id)
form = ThoughtForm(request.POST)
if form.is_valid():
message = get_object_or_404(Message, pk=message_id)
thought = create_thought(form, message)
thought.save()
messageform = MessageForm()
thoughtform = ThoughtForm()
return HttpResponseRedirect(reverse('detail', args=(conversation_id,)), {'conversation': conversation,
'messageform': messageform,
'thoughtform': thoughtform})
def addconversation(request):
form = ConversationForm(request.POST)
if form.is_valid():
conversation = create_conversation(form)
conversation.save()
return HttpResponseRedirect(reverse('index'))
def searchconversationtitles(request):
searchconversation = SearchConversationForm(request.POST)
addform = ConversationForm(request.POST)
if searchconversation.is_valid():
searchterm = searchconversation.cleaned_data['searchterm']
queryset = Conversation.objects.filter(title__icontains=searchterm)
context = {
'queryset': queryset,
'form': addform,
}
template = loader.get_template('conversational/conversationsearch.html')
return HttpResponse(template.render(context, request))
def searchmessagetext(request):
searchmessage = SearchMessageForm(request.POST)
if searchmessage.is_valid():
searchmessageform = SearchMessageForm()
searchterm = searchmessage.cleaned_data['searchterm']
queryset = Message.objects.filter(text__icontains=searchterm)
context = {
'queryset': queryset,
'searchmessageform': searchmessageform
}
template = loader.get_template('conversational/messagesearch.html')
return HttpResponse(template.render(context, request))
def create_conversation(form):
conversation = Conversation()
conversation.title = form.cleaned_data['title']
conversation.date = form.cleaned_data['date']
return conversation
def create_thought(form, message):
newthought = Thought()
newthought.text = form.cleaned_data['text']
newthought.datetime = form.cleaned_data['date']
newthought.message = message
return newthought
def create_message(form, conversation):
newmessage = Message()
newmessage.text = form.cleaned_data['text']
newmessage.datetime = form.cleaned_data['date']
newmessage.conversation = conversation
return newmessage
| [
"[email protected]"
] | |
79b694b8b4cbaa2d7a37e7a37e8d2850553d4835 | 7f25bd671edb2dbe19dc72397cd4dd4471acca02 | /code/model_part_vst.py | 0ef773b5df8bd66c681d684866d1ae800a787149 | [
"MIT"
] | permissive | Mehooz/VGSNet | 944637a4b9727b04a78e487df3a7eaa2fb93485a | 18ddae20fb3ccc440a38bd8b23cba8fcaa753518 | refs/heads/master | 2022-10-09T18:50:30.975116 | 2020-06-10T08:50:47 | 2020-06-10T08:50:47 | 241,087,000 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,281 | py | """
This file defines part point cloud VAE/AE model.
"""
import torch
import torch.nn as nn
from chamfer_distance import ChamferDistance
from collections import namedtuple
from torchvision.models import vgg16,resnet18,resnet50
from loss import *
from projection import *
import math
class PartFeatSampler(nn.Module):
def __init__(self, feature_size, probabilistic=True):
super(PartFeatSampler, self).__init__()
self.probabilistic = probabilistic
self.mlp2mu = nn.Linear(feature_size, feature_size)
self.mlp2var = nn.Linear(feature_size, feature_size)
def forward(self, x):
mu = self.mlp2mu(x)
if self.probabilistic:
logvar = self.mlp2var(x)
std = logvar.mul(0.5).exp_()
eps = torch.randn_like(std)
kld = mu.pow(2).add_(logvar.exp()).mul_(-1).add_(1).add_(logvar)
return torch.cat([eps.mul(std).add_(mu), kld], 1)
else:
return mu
class PartImgSampler(nn.Module):
def __init__(self, feat_len):
super(PartImgSampler, self).__init__()
self.resnet = resnet18(pretrained=True)
self.mlp = nn.Linear(1000, feat_len)
def forward(self, img):
net = self.resnet(img)
net = self.mlp(net)
return net
class PartEncoder(nn.Module):
def __init__(self, feat_len, probabilistic=False):
super(PartEncoder, self).__init__()
self.conv1 = nn.Conv1d(3, 64, 1)
self.conv2 = nn.Conv1d(64, 128, 1)
self.conv3 = nn.Conv1d(128, 128, 1)
self.conv4 = nn.Conv1d(128, feat_len, 1)
self.bn1 = nn.BatchNorm1d(64)
self.bn2 = nn.BatchNorm1d(128)
self.bn3 = nn.BatchNorm1d(128)
self.bn4 = nn.BatchNorm1d(feat_len)
self.pc_sampler = PartFeatSampler(feature_size=feat_len, probabilistic=probabilistic)
self.img_sampler = PartImgSampler(feat_len=feat_len)
def forward(self, pc, img):
net = pc.transpose(2, 1)
net = torch.relu(self.bn1(self.conv1(net)))
net = torch.relu(self.bn2(self.conv2(net)))
net = torch.relu(self.bn3(self.conv3(net)))
net = torch.relu(self.bn4(self.conv4(net)))
net = net.max(dim=2)[0]
net_pc = self.pc_sampler(net)
net_img = self.img_sampler(img)
out = torch.cat((net_img,net_pc),1)
return out
class PartImgDecoder(nn.Module):
def __init__(self, feat_len, num_point):
super(PartImgDecoder,self).__init__()
self.num_point = num_point
self.pc_deconv1 = nn.ConvTranspose2d(4, 8, (3, 3))
self.pc_deconv2 = nn.ConvTranspose2d(8, 16, (3, 3))
self.pc_deconv3 = nn.ConvTranspose2d(16, 32, (5, 5))
self.pc_mlp =nn.Linear(32*16*16,9000)
self.img_deconv1 = nn.ConvTranspose2d(8,16,(5,5))
self.img_deconv2 = nn.ConvTranspose2d(16,32, (5,5))
self.img_deconv3 = nn.ConvTranspose2d(32,48, (5,5))
self.img_deconv4 = nn.ConvTranspose2d(48, 48, (5, 5))
self.img_deconv5 = nn.ConvTranspose2d(48, 48, (7, 7))
self.img_deconv6 = nn.ConvTranspose2d(48, 48, (7, 7))
self.img_deconv7 = nn.ConvTranspose2d(48, 48, (7, 7))
self.img_deconv8 = nn.ConvTranspose2d(48, 48, (7, 7))
self.img_mlp1 = nn.Linear(256,1024)
self.img_mlp2 = nn.Linear(1024,2048)
self.chamferLoss = ChamferDistance()
self.mse = nn.MSELoss()
self.gdt = grid_dist(64, 64)
def forward(self, net):
img = net[:,0:256]
pc = net[:,256:512]
pc = pc.view(net.shape[0], -1, 8, 8)
pc = self.pc_deconv1(pc)
pc = self.pc_deconv2(pc)
pc = self.pc_deconv3(pc)
pc = pc.view(-1,32*16*16)
pc = self.pc_mlp(pc)
pc = pc.view(net.shape[0],3000,3)
img = self.img_mlp1(img)
img = self.img_mlp2(img)
img = img.view(net.shape[0],-1,16,16)
img = self.img_deconv1(img)
img = self.img_deconv2(img)
img = self.img_deconv3(img)
img = self.img_deconv4(img)
img = self.img_deconv5(img)
img = self.img_deconv6(img)
img = self.img_deconv7(img)
img = self.img_deconv8(img)
img = img.view(net.shape[0],3,224,224)
return pc, img
def loss(self, pred_pc, gt_pc, pred_img, gt_img,n_views,batch_size,device,n_points):
dist1, dist2 = self.chamferLoss(pred_pc, gt_pc)
loss = (dist1.mean(dim=1) + dist2.mean(dim=1)) / 2
avg_loss = loss.mean() * 3000
mse_loss =self.mse(pred_img,gt_img)
views_x = torch.rand(batch_size, 4) * 2 * math.pi
views_y = torch.rand(batch_size, 4) * 2 * math.pi
return avg_loss, mse_loss
# proj_loss= torch.tensor(0.0).to(device)
#
# for i in range(n_views):
# pred_rot=world2cam(pred_pc,views_x[:,i],views_y[:,i],batch_size,device,n_points)
# pred_persp=perspective_transform(pred_rot,device,batch_size)
# pred_proj=cont_proj(pred_persp,64,64,device)
#
# gt_rot = world2cam(gt_pc, views_x[:,i], views_y[:,i], batch_size,device, n_points)
# gt_persp = perspective_transform(gt_rot,device, batch_size)
# gt_proj = cont_proj(gt_persp, 64, 64,device)
#
# #bceloss, min_dist, min_dist_inv = get_loss_proj(pred_proj,gt_proj,device,'bce_prob',1.0,None,self.gdt.to(device))
# bceloss = get_loss_proj(pred_proj, gt_proj, device, 'bce_prob', 1.0, None,
# self.gdt.to(device))
# proj_loss+=torch.mean(bceloss)
#
# #proj_loss+=1e-4*torch.mean(min_dist).item()
# #proj_loss+=1e-4*torch.mean(min_dist_inv).item()
#
# proj_loss=proj_loss/n_views * n_points
#
#
# return avg_loss, mse_loss ,proj_loss
if __name__ == '__main__':
# pe = PartEncoder(256).to('cuda')
# net = pe(torch.randn(1,3000,3).to('cuda'),torch.randn(1,3,224,224).to('cuda'))
pid = PartImgDecoder(512,3000).to('cuda')
# pid (net)
loss = pid.loss(torch.randn(1,3000,3).to('cuda'),torch.randn(1,3000,3).to('cuda'),torch.randn(1,3,224,224).to('cuda'),torch.randn(1,3,224,224).to('cuda'),4,1,'cuda',3000)
print(loss)
| [
"[email protected]"
] | |
7d470bd8955ad6a22916de891735d8dcec7d2b22 | a0f02a674f6a0c19effde195eb77f7ae5aba36bc | /blog/blog/settings.py | 0277d602e8dc5f0832be656b1685779c218f16e7 | [] | no_license | kdt7058/BlogCreater | cccb1ef5dd26b6765fefcd810e148f6d8e9625b7 | 798634cd008aad0f46c03982fde93fe5e04091c2 | refs/heads/main | 2023-07-16T01:22:30.328310 | 2021-08-25T16:38:12 | 2021-08-25T16:38:12 | 399,874,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,424 | py | """
Django settings for blog project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os # new
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-fzu$@sm9m44v1^2l@o)#xbo4z&e)-yxw61+i6s*9yy!cf3$7d%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [ os.path.join(BASE_DIR,"templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
STATICFILES_DIRS = [
os.path.join(BASE_DIR,"static")
]
| [
"[email protected]"
] | |
9af28fc12492109bacc6ba4ac0a26793968264a3 | ed0cdeef4bbab433a46d7e196e63a8163c8d43cf | /create_databases.py | 5658fe7e987ed7a53e6805fec23be6a7d70cd699 | [] | no_license | Mechanio/student_perfomance_journal | c7b9309e465c9757361605908ab78969399b8f91 | 8c92bf48dc6abf6bb0b92362dfcd6e95242fc682 | refs/heads/main | 2023-02-25T06:02:55.114454 | 2021-02-12T19:55:27 | 2021-02-12T19:55:27 | 338,412,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,219 | py | import sqlite3
conn = sqlite3.connect('data.sqlite')
cur = conn.cursor()
cur.executescript('''
DROP TABLE IF EXISTS Students;
DROP TABLE IF EXISTS Teachers;
DROP TABLE IF EXISTS Classes;
DROP TABLE IF EXISTS Subjects;
DROP TABLE IF EXISTS Students_Classes;
DROP TABLE IF EXISTS Marks;
CREATE TABLE Students (
id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
fullname TEXT UNIQUE,
class_id INTEGER,
login TEXT UNIQUE,
password TEXT
);
CREATE TABLE Teachers (
id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
fullname TEXT UNIQUE,
login TEXT UNIQUE,
password TEXT,
subject_id1 INTEGER,
subject_id2 INTEGER,
subject_id3 INTEGER,
subject_id4 INTEGER
);
CREATE TABLE Classes (
id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
title TEXT UNIQUE
);
CREATE TABLE Subjects (
id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
title TEXT UNIQUE,
class_id1 INTEGER,
class_id2 INTEGER,
class_id3 INTEGER,
class_id4 INTEGER
);
CREATE TABLE Marks (
subject_id INTEGER,
teacher_id INTEGER,
student_id INTEGER,
class_id INTEGER,
dating TEXT,
mark INTEGER
);
''')
conn.commit() | [
"[email protected]"
] | |
4d1dc6663b84adb8bb576cee22ef31b75c0f2f48 | 58deef84e815e231261e7661a971606c8c5e2d4b | /projects/pool_partner_price_service/pool_partner_price_service/models.py | eb2ae9c56d716d20b46eab776faf9d641492ad70 | [] | no_license | odranreb-g/tcc_puc | 9eb876476207ffe8607d77600b7e003345959112 | 9e4ebcee5adc513b909036c41b2631ea42e2f888 | refs/heads/master | 2023-08-22T04:21:40.917955 | 2021-10-16T20:57:12 | 2021-10-16T20:57:12 | 418,117,929 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | import uuid
from sqlalchemy import Column, Date, DateTime, Float, String
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.ext.declarative import declarative_base
from database import engine
Base = declarative_base()
class Delivery(Base):
__tablename__ = "deliveries_delivery"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
freight_price = Column(Float(precision=2))
partner_route_id = Column(UUID(as_uuid=True), nullable=True)
modified = Column(DateTime)
Base.metadata.create_all(engine)
| [
"[email protected]"
] | |
aeff10f43966e2020670336706cfc74d7bbbc391 | c5f8d5ca44dccd842b8f54ab5beb4b7877a3aaf4 | /scraper.py | 9413931a3411ab26edbb98fbcb4016232bd6772f | [] | no_license | Dennis-Cao/dc_ufc_fights_db | daf7c002f83bc99dde79855a28d1b0e17e5a943c | c1a6b13f7131e70a665394f3fc1f960ddb46f222 | refs/heads/master | 2022-07-09T17:00:22.230975 | 2019-06-07T16:33:40 | 2019-06-07T16:33:40 | 189,447,797 | 2 | 0 | null | 2022-06-21T22:03:31 | 2019-05-30T16:33:52 | Python | UTF-8 | Python | false | false | 6,491 | py | import requests
from bs4 import BeautifulSoup
import pandas as pd
import numpy as np
import string
import re
import datetime
import sqlite3
import time
import os
all_links = []
location = []
events = []
f1 = []
f2 = []
winner = []
f1_odds = []
f2_odds = []
label = []
favourite = []
def scrape_data():
# set up page to extract table
data = requests.get("https://www.betmma.tips/mma_betting_favorites_vs_underdogs.php?Org=1")
soup = BeautifulSoup(data.text, 'html.parser')
# table with 98% width
table = soup.find('table', {'width': "98%"})
# find all links in that table
links = table.find_all('a', href=True)
# append all links to a list
for link in links:
all_links.append("https://www.betmma.tips/"+link.get('href'))
# test for one use case
for link in all_links:
print(f"Now currently scraping link: {link}")
data = requests.get(link)
soup = BeautifulSoup(data.text, 'html.parser')
time.sleep(1)
# specific table with the information
rows = soup.find_all('table', {'cellspacing': "5"})
for row in rows:
# check for draw, if draw, then skip
# dictionary of won and lost
odds = row.find_all('td', {'align': "center", 'valign': "middle"})
# to avoid taking in draws
if odds[0].text not in ['WON', 'LOST']:
continue
# event name
h1 = soup.find("h1")
# location and date
h2 = soup.find("h2")
events.append(h1.text)
location.append(h2.text)
odds_f1 = float(odds[2].text.strip(" @"))
odds_f2 = float(odds[3].text.strip(" @"))
f1_odds.append(odds_f1)
f2_odds.append(odds_f2)
# how to generate label
odds_dict = {}
odds_dict[odds[0].text] = odds_f1
odds_dict[odds[1].text] = odds_f2
if odds_dict["WON"] > odds_dict["LOST"]:
label.append("Underdog")
else:
label.append("Favourite")
if odds_f1 > odds_f2:
favourite.append("f2")
else:
favourite.append("f1")
fighters = row.find_all('a', attrs={'href': re.compile("^fighter_profile.php")})
f1.append(fighters[0].text)
f2.append(fighters[1].text)
winner.append(fighters[2].text)
return None
def create_df():
# creating dataframe
df = pd.DataFrame()
df["Events"] = events
df["Location"] = location
df["Fighter1"] = f1
df["Fighter2"] = f2
df["Winner"] = winner
df["fighter1_odds"] = f1_odds
df["fighter2_odds"] = f2_odds
df["Favourite"] = favourite
df["Label"] = label
print(f"Successfully scraped {df.shape[0]} fights and last fight card was {df.iloc[-1, :]['Events']} {df.iloc[-1, :]['Location']}")
print(df["Label"].value_counts()/len(df))
return df
# functions to compute deltas
def odds_delta(df):
if df["Favourite"] == "f1":
return df["fighter1_odds"] - df["fighter2_odds"]
else:
return df["fighter2_odds"] - df["fighter1_odds"]
def reach_delta(df):
if df["Favourite"] == "f1":
return df["REACH_x"] - df["REACH_y"]
else:
return df["REACH_y"] - df["REACH_x"]
def slpm_delta(df):
if df["Favourite"] == "f1":
return df["SLPM_x"] - df["SLPM_y"]
else:
return df["SLPM_y"] - df["SLPM_x"]
def sapm_delta(df):
if df["Favourite"] == "f1":
return df["SAPM_x"] - df["SAPM_y"]
else:
return df["SAPM_y"] - df["SAPM_x"]
def stra_delta(df):
if df["Favourite"] == "f1":
return df["STRA_x"] - df["STRA_y"]
else:
return df["STRA_y"] - df["STRA_x"]
def strd_delta(df):
if df["Favourite"] == "f1":
return df["STRD_x"] - df["STRD_y"]
else:
return df["STRD_y"] - df["STRD_x"]
def td_delta(df):
if df["Favourite"] == "f1":
return df["TD_x"] - df["TD_y"]
else:
return df["TD_y"] - df["TD_x"]
def tda_delta(df):
if df["Favourite"] == "f1":
return df["TDA_x"] - df["TDA_y"]
else:
return df["TDA_y"] - df["TDA_x"]
def tdd_delta(df):
if df["Favourite"] == "f1":
return df["TDD_x"] - df["TDD_y"]
else:
return df["TDD_y"] - df["TDD_x"]
def suba_delta(df):
if df["Favourite"] == "f1":
return df["SUBA_x"] - df["SUBA_y"]
else:
return df["SUBA_y"] - df["SUBA_x"]
def age_delta(df):
if df["Favourite"] == "f1":
return df["Age_x"] - df["Age_y"]
else:
return df["Age_y"] - df["Age_x"]
def merge_data(df):
# We're always asking for json because it's the easiest to deal with
morph_api_url = "https://api.morph.io/Dennis-Cao/dc_ufc_fighters_db/data.json"
# Keep this key secret using morph secret variables
morph_api_key = os.environ['MORPH_API_KEY']
r = requests.get(morph_api_url, params={
'key': morph_api_key,
'query': "select * from data"
})
j = r.json()
# fighters db dataset to me merged
fighters_db = pd.DataFrame.from_dict(j)
test = pd.merge(df, fighters_db, left_on=["Fighter1"], right_on=["NAME"])
test2 = pd.merge(test, fighters_db, left_on=["Fighter2"], right_on=["NAME"])
test2["Odds_delta"] = test2.apply(odds_delta, axis=1)
test2["REACH_delta"] = test2.apply(reach_delta, axis=1)
test2["SLPM_delta"] = test2.apply(slpm_delta, axis=1)
test2["SAPM_delta"] = test2.apply(sapm_delta, axis=1)
test2["STRA_delta"] = test2.apply(stra_delta, axis=1)
test2["STRD_delta"] = test2.apply(strd_delta, axis=1)
test2["TD_delta"] = test2.apply(td_delta, axis=1)
test2["TDA_delta"] = test2.apply(tda_delta, axis=1)
test2["TDD_delta"] = test2.apply(tdd_delta, axis=1)
test2["SUBA_delta"] = test2.apply(suba_delta, axis=1)
test2["AGE_delta"] = test2.apply(age_delta, axis=1)
final_df = test2[['Events', 'Location', 'Fighter1', 'Fighter2', 'Favourite', 'Label', 'REACH_delta', 'SLPM_delta', 'SAPM_delta', 'STRA_delta', 'STRD_delta', 'TD_delta', 'TDA_delta', 'TDD_delta', 'SUBA_delta', "AGE_delta", 'Odds_delta']]
return final_df
scrape_data()
df = create_df()
df = merge_data(df)
conn = sqlite3.connect('data.sqlite')
df.to_sql('data', conn, if_exists='replace')
print('Fights Merged Db successfully constructed and saved')
conn.close()
| [
"[email protected]"
] | |
fce86f1905f3314240858989ab8a11b54a8315b3 | 34deb6cc15393765c9185b08b4b596ee5e93880e | /models/store.py | d044139c2f9f2cea6c4960c4a719c18bda428a26 | [] | no_license | honnie/test-repository | 2da5613b096934c3c0cc4a850208a6c7f4c51d16 | 48297e909998cc9c9412b6949f0538a376150a0d | refs/heads/master | 2023-06-17T14:20:37.835450 | 2021-07-16T14:43:16 | 2021-07-16T14:43:16 | 385,945,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | from db import db
class StoreModel(db.Model):
__tablename__ = 'stores'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
items = db.relationship('ItemModel', lazy='dynamic')
def __init__(self, name):
self.name = name
def json(self):
return {'name': self.name, 'items': [item.json() for item in self.items.all()]}
@classmethod
def find_by_name(cls, name):
# SELECT * FROM items WHERE name=name LIMIT 1
return cls.query.filter_by(name=name).first()
def save_to_db(self):
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
| [
"[email protected]"
] | |
e9c0557f54b79d2d4fd0f6930a6c985116f7288a | f89fd6b33bd65d07316e368cff7c4b22e11603bf | /fundader_crm/urls.py | 6b4c18e2fac4280bf5c794bce2db40fef1af8a70 | [] | no_license | Javier1221/crm_fundader | e53364ba0d5f13e24c595d5e43978e64fce5afea | 26c0ce30a8f5cb6e517efd6ab045b87422f495b5 | refs/heads/master | 2023-01-01T21:09:21.107814 | 2020-09-23T20:26:23 | 2020-09-23T20:26:23 | 298,085,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 833 | py | """fundader_crm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import url,include
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('course.urls')),
]
| [
"[email protected]"
] | |
01bff7903cc8aa59a879b500eec5d13f743213f7 | c54c1954e4535b534f6faf6afed26be7907da113 | /dice/dice.py | b1d9e2e91c28f00208ec8dca10dfe8197f7c21dd | [] | no_license | adleida/dice | a039db9b780828e866551588606cc795a6daa456 | 5bff51489616cfffd1f1f4517524d900a14868d0 | refs/heads/master | 2018-12-30T02:48:15.017740 | 2015-06-10T10:28:54 | 2015-06-10T10:28:54 | 37,183,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 823 | py | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# File Name: dice/dice.py
# Author: Tate_fan
# mail: [email protected]
# Created Time: 2015年06月01日 星期一 15时30分46秒
# ======================================================================
import dice
import logging
import logging.config
from .flask_app import WebApp, args, cfg
from .utils import get_res_path
from .schemasstore import SchemasStore
from .datagenerator import DataGenerator
logger = logging.getLogger(__name__)
def main():
# setting for logger
log = cfg.get('logging', {})
log.setdefault('version', 1)
logging.config.dictConfig(log)
debug = cfg.get("debug", False)
host = cfg.get("bind", '127.0.0.1')
webapp = WebApp()
webapp.run(host=host, debug=debug, threaded=True)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9c4aa0ece8d68eb18ff0f56a9193f4994857b239 | d71f549663180bac506849c46cf98d2307cb7dd2 | /PYthonFiles/priceingStruct/priceStrucWickedRide.py | ca2fecd640f40a295f4b2c61318c1378d38ad5fa | [] | no_license | saurabharas/RentalProject | e5a78b31d1a983718a7af311864e54fa7808d5c5 | 297b6a45901715fdfd459458182d5c7e1060dca0 | refs/heads/master | 2020-12-31T06:09:17.366760 | 2017-04-01T08:46:26 | 2017-04-01T08:46:26 | 80,615,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,501 | py | import time
from collections import Counter
from datetime import datetime, timedelta
from selenium import webdriver
from bs4 import BeautifulSoup
from collections import defaultdict
'''
pickUpTotal=datetime.strptime('19-02-2017 12', "%d-%m-%Y %H")
dropTotal=datetime.strptime('22-02-2017 19', "%d-%m-%Y %H")
pickUpDate=pickUpTotal.strftime('%d')
dropDate=dropTotal.strftime('%d')
pickUpTime=pickUpTotal.strftime('%H')
dropTime=dropTotal.strftime('%H')
cost=wickedRideTmingStruc(int(pickUpDate),int(dropDate),int(pickUpTime),int(dropTime),164,41,55,787,1200,pickUpTotal,dropTotal)
print(cost)
'''
'''
#This will give
#weekdayInitialBaseCost=1-4hr(weekday)
#weekdayBaseRate=5-19 rate eg 41
#weekdayFinalBase=above 24 base(weekday) cost eg 787
#weekendFinalBase=above 24 base(weekend) cost eg 1200
#weekendBaseRate=>24 rate eg55
def rateScraper(pickUpDate,pickUpHr,dropDate,dropHr,count1):
#dateReq.strftime('%d-%m-%Y')
browser1=webdriver.Chrome()
browser1.get("http://www.wickedride.com/booking/choose-models?start_date="+pickUpDate.strftime("%d")+"+Feb+2017&start_time="+str(pickUpHr)+"%3A00&end_date="+dropDate.strftime("%d")+"+Feb+2017&end_time="+str(dropHr)+"%3A00&city_id=1&_token=b3TzUROqD1ZI6KpvqAcfVgQkmp3ZNnUtPlx1s7cw")
ps=browser1.page_source
soup=BeautifulSoup(ps,"html.parser")
bikeTitleUL=soup.find_all("ul",{"class":"book_bike_fleet happy_customers item"})
weekdayInitialBaseCostList1=[]
weekdayBaseRateList1=[]
weekdayFinalBaseList1=[]
weekendFinalBaseList1=[]
weekendBaseRateList1=[]
#1255-1200
weekendBaseRateList2=[]
bikeDict={}
listReturned=[]
bikeTitleUL=soup.find_all("ul",{"class":"book_bike_fleet happy_customers item"})
bikeTitleList=[]
if(count1==1):
weekdayInitialBaseCostList=soup.find_all("div",{"class":"price total-price"})
weekdayBikeNameList1=soup.find_all("div",{"class":"price total-price"})
for value in weekdayInitialBaseCostList:
a=value.text.encode("UTF-8")
a1=a.replace("RS ", "")
weekdayInitialBaseCostList1.append(int(a1))
listReturned=weekdayInitialBaseCostList1
elif(count1==2):
weekdayBaseRateList=soup.find_all("div",{"class":"price total-price"})
myDivisionHr=(24-int(pickUpHr))+int(dropHr)
for value in weekdayBaseRateList:
a=value.text.encode("UTF-8")
a1=a.replace("RS ", "")
myReqCost=int(int(a1)/myDivisionHr)
weekdayBaseRateList1.append(myReqCost)
listReturned=weekdayBaseRateList1
elif(count1==3):
weekdayFinalBaseList=soup.find_all("div",{"class":"price total-price"})
for value in weekdayFinalBaseList:
a=value.text.encode("UTF-8")
a1=a.replace("RS ", "")
weekdayFinalBaseList1.append(int(a1))
listReturned=weekdayFinalBaseList1
elif(count1==4):
weekendFinalBaseList=soup.find_all("div",{"class":"price total-price"})
for value in weekendFinalBaseList:
a=value.text.encode("UTF-8")
a1=a.replace("RS ", "")
weekendFinalBaseList1.append(int(a1))
listReturned=weekendFinalBaseList1
elif(count1==5):
weekendBaseRateList=soup.find_all("div",{"class":"price total-price"})
for value in weekendBaseRateList:
a=value.text.encode("UTF-8")
a1=a.replace("RS ", "")
weekendBaseRateList1.append(int(a1))
#for x in range(0,len(weekendBaseRateList1)-1):
#weekendBaseRateList2[x]=int(weekendBaseRateList1[x].encode("UTF-8").replace("RS ", ""))-int(weekendFinalBaseList1[x].encode("UTF-8").replace("RS ", ""))
listReturned=weekendBaseRateList1
for value in bikeTitleUL:
tempList=[]
#print(value)
for value2 in value.contents[0].contents:
tempList.append(value2)
#print(tempList[0].text,tempList[1].contents[0].get("src"))
bikeTitleList.append(tempList[0].text.encode('utf8'))
#print(len(weekendBaseRateList1))
#print(len(weekendFinalBaseList1))
#the syntax is: mydict[key] = "value"
#released["iphone 5S"] = 2013
for x in range(0,len(listReturned)-1):
bikeDict[bikeTitleList[x]]=listReturned[x]
return bikeDict
def weekDayFindOut(pickUpDate,pickUpHr,dropHr,count):
count1=0
weekdayInitialBaseCostList1={}
weekdayBaseRateList1={}
weekdayFinalBaseList1={}
d=()
pickUpHr=int(pickUpDate.strftime("%H"))
dropHr=pickUpHr+3
dropDate=pickUpDate
#print(pickUpDate.strftime("%d"),pickUpHr,dropDate.strftime("%d"),dropHr)
weekdayInitialBaseCostList1=rateScraper(pickUpDate,pickUpHr,dropDate,dropHr,1)
#print(weekdayInitialBaseCostList1)
#print("\n \n")
#print(weekdayBaseRateList1)
pickUpHr1=pickUpHr+3
dropHr1=(pickUpHr1+17)-24
pickUpDate1=pickUpDate
dropDate1=pickUpDate+timedelta(days=1)
#print(pickUpHr1,dropHr1,pickUpDate1.strftime("%d"),dropDate1.strftime("%d"))
weekdayBaseRateList1=rateScraper(pickUpDate1,pickUpHr1,dropDate1,dropHr1,2)
#print(weekdayBaseRateList1)
#print("\n \n")
#print(weekdayBaseRateList1)
#function
pickUpHr2=pickUpHr
dropHr2=(pickUpHr+21)-24
pickUpDate2=pickUpDate
dropDate2=pickUpDate+timedelta(days=1)
#print(pickUpHr2,dropHr2,pickUpDate2.strftime("%d"),dropDate2.strftime("%d"))
weekdayFinalBaseList1=rateScraper(pickUpDate2,pickUpHr2,dropDate2,dropHr2,3)
#print(weekdayFinalBaseList1)
#print("\n \n")
#function
currentDay=pickUpDate.weekday()
dayToBeadded=5-currentDay
nextDate=pickUpDate+timedelta(days=dayToBeadded)
if(count==0):
a,b=weekendFindOut(nextDate,pickUpHr,dropHr,1)
d=(weekdayInitialBaseCostList1,weekdayBaseRateList1,weekdayFinalBaseList1,a,b)
else:
d=(weekdayInitialBaseCostList1,weekdayBaseRateList1,weekdayFinalBaseList1)
return d
def weekendFindOut(pickUpDate,pickUpHr,dropHr,count):
weekendFinalBaseList1={}
weekendBaseRateList1={}
#1255-1200
weekendBaseRateList2={}
#function
d=()
if(pickUpDate.weekday()==5):
pickUpHr1=int(pickUpDate.strftime("%H"))+5
dropHr1=pickUpHr1+1
pickUpDate1=pickUpDate
dropDate1=pickUpDate
#print(pickUpHr1,dropHr1,pickUpDate1.strftime("%d"),dropDate1.strftime("%d"))
weekendFinalBaseList1=rateScraper(pickUpDate1,pickUpHr1,dropDate1,dropHr1,4)
#print(weekendFinalBaseList1)
pickUpHr2=int(pickUpDate.strftime("%H"))+5
dropHr2=(pickUpHr2+25)-24
pickUpDate2=pickUpDate
dropDate2=pickUpDate+timedelta(days=1)
#print(pickUpHr2,dropHr2,pickUpDate2.strftime("%d"),dropDate2.strftime("%d"))
weekendBaseRateList1=rateScraper(pickUpDate2,pickUpHr2,dropDate2,dropHr2,5)
elif(pickUpDate.weekday()==6):
pickUpHr1=int(pickUpDate.strftime("%H"))+5
dropHr1=pickUpHr1+1
pickUpDate1=pickUpDate+timedelta(days=6)
dropDate1=pickUpDate1
#print(pickUpHr1,dropHr1,pickUpDate1.strftime("%d"),dropDate1.strftime("%d"))
weekendFinalBaseList1=rateScraper(pickUpDate1,pickUpHr1,dropDate1,dropHr1,4)
#print(weekendFinalBaseList1)
pickUpDate2=pickUpDate+timedelta(days=6)
dropDate2=pickUpDate2+timedelta(days=1)
pickUpHr2=int(pickUpDate2.strftime("%H"))+5
dropHr2=(pickUpHr2+25)-24
#print(pickUpDate2.strftime("%d %H"),dropDate2.strftime("%d %H"))
#print(pickUpHr2,dropHr2,pickUpDate2.strftime("%d"),dropDate2.strftime("%d"))
weekendBaseRateList1=rateScraper(pickUpDate2,pickUpHr2,dropDate2,dropHr2,5)
#Counter({'a':1, 'b':2, 'c':3})
#cost2-cost1
#function
#weekendBaseRate=cost2-cost1
#print(weekendBaseRateList1)
#print("\n \n")
#print(weekendFinalBaseList1)
#print("\n \n")
#print(len(weekendBaseRateList1))
#print(len(weekendFinalBaseList1))
A=Counter(weekendBaseRateList1)
B=Counter(weekendFinalBaseList1)
C=A-B
#print(C)
#print("\n \n")
for x in range(0,len(weekendBaseRateList1)-1):
pass
a=weekendBaseRateList1[x].encode("UTF-8")
a1=a.replace("RS ", "")
b=weekendFinalBaseList1[x].encode("UTF-8")
b1=b.replace("RS ", "")
weekendBaseRateList2.append(int(a1)-int(b1))
#print(weekendBaseRateList1)
#print(weekendBaseRateList2)
#print(weekendBaseRateList1)
dayToBeadded=7-pickUpDate.weekday()
nextDate=pickUpDate+timedelta(days=dayToBeadded)
if(count==0):
a,b,c=weekDayFindOut(nextDate,pickUpHr,dropHr,1)
d=(a,b,c,weekendFinalBaseList1,C)
else:
d=(weekendFinalBaseList1,C)
return d
'''
#from datetime import datetime
#datetime_object = datetime.strptime('Jun 1 2005 1:33PM', '%b %d %Y %I:%M%p')
#datetime.now().strftime('%H')
def pricingStrucWickedRide(weekdayBaseRate,weekendBaseRate,weekdayFinalBase,weekendFinalBase,weekdayInitialBaseCost,minimumBillingHr,pickUpTotal,dropTotal,pickUpDate,dropDate,pickUpTime,dropTime):
#weekdayBaseRate=5-19 rate eg 41
#weekendBaseRate=>24 rate eg55
#weekdayFinalBase=above 24 base(weekday) cost eg 787
#weekendFinalBase=above 24 base(weekend) cost eg 1200
#weekdayInitialBaseCost=1-4hr(weekday) eg 164
totalDays=(dropDate-pickUpDate)+1
totalHr=0
cost=0
diffDate=dropTotal-pickUpTotal
totalHr=diffDate.total_seconds()/3600
print(totalHr)
'''
if(dropTime>pickUpTime):
totalHr=(totalDays-2)*24+((dropTime)+(24-pickUpTime))
else:
totalHr=(totalDays-2)*24+((dropTime)+(24-pickUpTime))
print(totalHr)
'''
#weekday-weekday
if(pickUpTotal.weekday()<5 and dropTotal.weekday()<5):
print("1")
if(totalHr<=4):
cost=weekdayInitialBaseCost
print("1.1")
if(totalHr>=5):
print("3.2.1")
#noOfWeekday=dropDate.weekday()-pickUpDate.weekday()
qot=int(totalHr/24)
print(qot)
rem=totalHr%24
print(rem)
if(rem<20):
print("3.2.1.1")
cost=qot*weekdayFinalBase+rem*weekdayBaseRate
else:
print("3.2.1.2")
cost=qot*weekendFinalBase+weekdayFinalBase
#weekend-weekend
if(pickUpTotal.weekday()>4 and dropTotal.weekday()>4):
print("2")
if(totalHr<=24):
cost=weekendFinalBase
print("2.1")
if(totalHr>=25):
cost=weekendFinalBase+(totalHr-24)*weekendBaseRate
print("2.2")
#weekday-weekend
noOfWeekday=0
if(pickUpTotal.weekday()<5 and dropTotal.weekday()>4):
print("3")
if(totalDays<=1):
print("3.1")
if(totalHr<=24):
cost=weekendFinalBase
print("3.1.1")
if(totalDays>=2):
print("3.2")
if(dropTotal.weekday()==5):
print("3.2.1")
#noOfWeekday=dropDate.weekday()-pickUpDate.weekday()
totalWeekdayHr=totalHr-24
qot=int(totalWeekdayHr/24)
rem=totalWeekdayHr%24
if(rem<20):
print("3.2.1.1")
cost=weekendFinalBase+qot*weekdayFinalBase+rem*weekdayBaseRate
else:
print("3.2.1.2")
noOfWeekday=dropDate.weekday()-pickUpDate.weekday()
cost=weekendFinalBase+qot*weekdayFinalBase+weekdayFinalBase
if(dropTotal.weekday()==6):
noOfWeekday=dropTotal.weekday()-pickUpTotal.weekday()
print("3.2.2")
#(noOfWeekday-2)*weekdayFinalBase)
cost=((24-pickUpTime)*weekdayBaseRate)+(noOfWeekday-2)*weekdayFinalBase+weekendFinalBase+(dropTime*weekendBaseRate)
#weekend-weekday
if(pickUpTotal.weekday()>4 and dropTotal.weekday()<5):
noOfWeekday=0
print("4.1")
noOfWeekday=dropTotal.weekday()-pickUpTotal.weekday()
if(totalDays<=1):
print("4.1")
if(totalHr<=24):
print("4.1.1")
cost=weekendFinalBase
if(totalDays>=2):
print("4.2")
if(pickUpTotal.weekday()==6):
print("4.2.1")
#noOfWeekday=dropDate.weekday()-pickUpDate.weekday()
totalWeekdayHr=totalHr-24
qot=int(totalWeekdayHr/24)
rem=totalWeekdayHr%24
if(rem<20):
print("4.2.1.1")
cost=weekendFinalBase+qot*weekdayFinalBase+rem*weekdayBaseRate
else:
print("4.2.1.2")
noOfWeekday=dropTotal.weekday()-pickUpTotal.weekday()
cost=weekendFinalBase+qot*weekdayFinalBase+weekdayFinalBase
if(pickUpTotal.weekday()==5):
noOfWeekday=pickUpTotal.weekday()-dropTotal.weekday()
print("4.2.2")
cost=(24-pickUpTime)*weekendBaseRate+(noOfWeekday-1)*weekdayFinalBase+weekendFinalBase+dropTime*weekdayBaseRate
return cost
def dateDefault(pickUpTotal,dropTotal):
'''
currentDate=datetime.now()+timedelta(hours=1)
print(currentDate)
currentTimeHr=currentDate.strftime('%H')
pickUpHr=0
dropHr=0
count=0
count1=0
d=()
if(int(currentTimeHr)>=13 and int(currentTimeHr)<=15):
if(currentDate.weekday()<5):
d=weekDayFindOut(currentDate,pickUpHr,dropHr,0)
elif(currentDate.weekday()>4):
d=weekendFindOut(currentDate,pickUpHr,dropHr,0)
else:
d=({'Iron 883': 2290, 'Ninja ER-6n': 2880, 'Himalayan': 272, 'Monster 821': 3950, 'TNT 300': 1030, 'Forty Eight': 2970, 'Thunderbird - 350': 164, 'Street 750': 1330, 'Thunderbird - 500': 204, 'Versys 650': 3100, 'Scrambler': 2370, 'Avenger': 132, 'Classic - 350': 164, 'RC 390': 316, 'Duke 390': 248, 'Dominar': 850, 'TNT 600 GT': 2160, 'Bullet 350':164, 'RC 200': 256, 'TNT 600i': 2880, 'Desert Storm - 500': 204, 'Hyperstrada': 3710, 'Ninja 650': 1710, 'Tiger 800 XR': 3430, 'Bullet Electra - 350': 164, 'Mojo': 204, 'Duke 200': 200}, {'Iron 883': 229, 'Ninja ER-6n': 288, 'Himalayan': 68, 'Monster 821': 395, 'TNT 300': 103, 'Forty Eight': 297, 'Thunderbird - 350': 41, 'Street 750': 133, 'Thunderbird - 500': 51, 'Versys 650': 310, 'Scrambler': 237, 'Avenger': 33, 'Classic - 350': 41, 'RC 390': 79, 'Duke 390': 62, 'Dominar': 85, 'TNT 600 GT': 216, 'Bullet 350': 41, 'RC 200': 64, 'TNT 600i': 288, 'Desert Storm - 500': 51, 'Hyperstrada': 371, 'Ninja 650': 171, 'Tiger 800 XR': 343, 'Bullet Electra - 350': 41, 'Mojo': 51, 'Duke 200': 50}, {'Iron 883': 4809, 'Ninja ER-6n': 6048, 'Himalayan': 1305, 'Monster 821': 8295, 'TNT 300': 1977, 'Forty Eight': 6237, 'Thunderbird - 350': 787, 'Street 750': 2553, 'Thunderbird - 500': 979, 'Versys 650': 6510, 'Scrambler': 4550, 'Avenger': 633, 'Classic - 350': 787, 'RC 390': 1516, 'Duke 390': 1190, 'Dominar': 1785, 'TNT 600 GT': 4147, 'Bullet 350': 787, 'RC 200': 1228, 'TNT 600i': 6048, 'Desert Storm - 500': 979, 'Hyperstrada': 7791, 'Ninja 650': 3283, 'Tiger 800 XR': 7203, 'Bullet Electra - 350': 787, 'Mojo': 979, 'Duke 200': 960}, {'Iron 883': 5496, 'Ninja ER-6n': 6912, 'Himalayan': 2000, 'Monster 821': 10824, 'Z 250': 2592, 'TNT 300': 3312, 'Forty Eight': 7128, 'Thunderbird - 350': 1320, 'Street 750': 4272, 'Thunderbird - 500': 1656, 'Versys 650': 7440, 'Scrambler': 7584, 'Avenger': 960, 'Classic - 350': 1200, 'RC 390': 2544, 'Duke 390': 1992, 'TNT 600 GT': 6912, 'Bullet 350': 1200, 'RC 200': 2064, 'TNT 600i': 6912, 'Desert Storm - 500': 1656, 'Hyperstrada': 11880, 'Ninja 650': 5496, 'Tiger 800 XR': 8232, 'Duke 200': 1608, 'Mojo': 1656, 'Bullet Electra - 350': 1200}, Counter({'Hyperstrada': 495, 'Monster 821': 451, 'Tiger 800 XR': 343, 'Scrambler': 316, 'Versys 650': 310, 'Forty Eight': 297, 'Ninja ER-6n': 288, 'TNT 600 GT': 288, 'TNT 600i': 288, 'Iron 883': 229, 'Ninja 650': 229, 'Street 750': 178, 'TNT 300': 138, 'Z 250': 108, 'RC 390': 106, 'Himalayan': 91, 'RC 200': 86, 'Duke 390': 83, 'Thunderbird - 500': 69, 'Desert Storm - 500': 69, 'Mojo': 69, 'Duke 200': 67, 'Thunderbird - 350': 55, 'Classic - 350': 55, 'Bullet 350': 55, 'Bullet Electra - 350': 55, 'Avenger': 44}))
#print(len(d[4]))
d1=d[0]
d2=d[1]
d3=d[2]
d4=d[3]
d5=d[4]
dd = defaultdict(list)
for d in (d1, d2,d3,d4,d5): # you can list as many input dicts as you want here
for key, value in d.iteritems():
dd[key].append(value)
pickUpTotal=datetime.strptime('19-02-2017 12', "%d-%m-%Y %H")
dropTotal=datetime.strptime('22-02-2017 19', "%d-%m-%Y %H")
pickUpDate=pickUpTotal.strftime('%d')
dropDate=dropTotal.strftime('%d')
pickUpTime=pickUpTotal.strftime('%H')
dropTime=dropTotal.strftime('%H')
cost=wickedRideTmingStruc(int(pickUpDate),int(dropDate),int(pickUpTime),int(dropTime),164,41,55,787,1200,pickUpTotal,dropTotal)
print(cost)
for key, value in dd.iteritems():
#print(key)
print(key,value[0])
#print(dd)
'''
def internalDBWickedRide():
'''
#Order in List:
weekdayInitialBaseCost=1-4hr(weekday) eg 164
weekdayBaseRate=5-19 rate eg 41
weekendBaseRate=>24 rate eg55
weekdayFinalBase=above 24 base(weekday) cost eg 787
weekendFinalBase=above 24 base(weekend) cost eg 1200
'''
#Values to be put remains
dictWickedRide={'bangalore':
{'Jayanagar': {
'IRON 883':[500,229,5496,229,5496,2290,10]
,'STREET 750':[500,133,2553,178,4272,1330,10]
,'THUNDERBIRD - 500':[500,51,976,69,1656,201,4]
,'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'TIGER 800 XR':[500,343,8232,343,8232,3430,10]
,'SCRAMBLER':[500,237,4550,316,7584,2370,10]
,'BULLET ELECTRA - 350':[500,41,787,55,1200,164,4]
,'RC 200':[500,64,1228,86,2064,256,4]
,'FORTY EIGHT':[500,297,7128,297,7128,2970,10]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
,'DOMINAR':[500,85,2040,106,2544,850,10]
}
,'AECS Layout - Kundalahalli':{
'IRON 883':[500,229,5496,229,5496,2290,10]
,'DESERT STORM - 500':[500,51,979,69,1656,204,4]
,'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'NINJA 650':[500,171,3283,229,5496,1710,10]
,'DUKE 390':[500,62,1190,83,1992,248,4]
,'DUKE 200':[500,50,960,67,1608,200,4]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
,'RC 390':[500,79,1516,106,2544,316,4]
}
,'Koramangala':{
'IRON 883':[500,229,5496,229,5496,2290,10]
,'STREET 750':[500,133,2553,178,4272,1330,10]
,'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'TNT 600 GT':[500,216,4147,288,6912,2160,10]
,'DUKE 390':[500,62,1190,83,1992,248,4]
,'Duke 200':[500,50,960,67,1608,200,4]
,'TNT 600I':[500,288,6912,288,6912,2880,10]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
,'RC 390':[500,79,1516,106,2544,316,4]
,'Z 250':[500,81,1555,108,2592,810,10]
}
,'Indiranagar 1st Block':{
'IRON 883':[500,229,5496,229,5496,2290,10]
,'STREET 750':[500,133,2553,178,4272,1330,10]
,'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'TIGER 800 XR':[500,343,8232,343,8232,3430,10]
,'DUKE 390':[500,62,1190,83,1992,248,4]
,'RC 200':[500,64,1228,86,2064,256,4]
,'MOJO':[500,51,979,69,1656,204,4]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
,'RC 390':[500,79,1516,106,2544,316,4]
,'HYPERSTRADA':[500,371,8000,495,11880,3710,10]
}
,'Electronic City':{
'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'BULLET 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'BULLET ELECTRA - 350':[500,41,787,55,1200,164,4]
,'DUKE 390':[500,62,1190,83,1992,248,4]
,'DUKE 200':[500,50,960,67,1608,200,4]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
}
,'Art of Living, Kanakapura Main Road':{
'AVENGER':[500,33,633,44,960,132,4]
}
,'Chandapura':{
'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'AVENGER':[500,33,633,44,960,132,4]
}
,'OYO Rooms - Indiranagar':{
'CLASSIC 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'MOJO':[500,51,979,69,1656,204,4]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
,'MONSTER 821':[500,395,9500,451,10824,3950,10]
}
,'OYO Rooms - Marathahalli Innovative Multiplex':{
'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'NINJA 650':[500,171,3283,229,5496,1710,10]
,'RC 200':[500,64,1228,86,2064,256,4]
,'AVENGER':[500,33,633,44,960,132,4]
,'TNT 300':[500,103,1977,138,3312,1030,10]
}
,'OYO Flagship - Kormangala Sony Signal':{
'STREET 750':[500,133,2553,178,4272,1330,10]
,'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'NINJA ER-6N':[500,30,650,40,800]
,'VERSYS 650':[500,288,6912,288,6912,2880,10]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
}
,'Hebbal (Sahakara Nagar)':{
'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'BULLET 350':[500,41,787,55,1200,164,4]
,'DUKE 390':[500,62,1190,83,1992,248,4]
,'DUKE 200':[500,50,960,67,1608,200,4]
,'VERSYS 650':[500,288,6912,288,6912,2880,10]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
}
}
,'Jaipur':{
'JLN MARG': {
'STREET 750':[500,231,5544,231,5544,2310,10]
,'THUNDERBIRD - 500':[500,83,1992,83,1992,332,4]
,'BULLET 500':[500,83,1593,83,1992,332,4]
,'CLASSIC CHROME - 500':[83,1992,83,1992]
,'CLASSIC - 350':[500,66,1584,66,1584,264,4]
,'THUNDERBIRD - 350':[500,66,1584,66,1584,264,4]
,'BULLET ELECTRA - 350':[500,66,1584,66,1584,264,4]
,'HIMALAYAN':[500,106,2544,106,2544,424,4]
,'AVENGER':[500,55,1056,55,1320,220,4]
}
}
,'Udaipur':{
'LAKE PALACE GATE': {
'CLASSIC - 350':[500,66,1584,66,1584,264,4]
,'THUNDERBIRD - 350':[500,66,1584,66,1584,264,4]
,'BULLET ELECTRA - 350':[500,66,1584,66,1584,264,4]
}
}
,'Mysuru':{
'NAZARBAD': {
'IRON 883':[500,229,5496,229,5496,2290,10]
,'THUNDERBIRD - 500':[500,48,921,61,1464,192,4]
,'CLASSIC - 350':[500,35,672,44,1056,140,4]
,'BULLET 350':[500,35,672,44,1056,140,4]
,'RC 200':[500,64,1228,86,2064,256,4]
,'AVENGER':[500,33,633,44,960,132,4]
}
,'INFOSYS GATE-4': {
'STREET 750':[500,133,2553,178,4272,1330,10]
,'CLASSIC - 350':[500,35,672,44,1056,140,4]
,'DUKE 200':[500,50,960,67,1608,200,4]
,'HIMALAYAN':[500,68,1305,91,2000,272,4]
,'AVENGER':[500,33,633,44,960,132,4]
}
}
,'Bhuj': {
'TIME SQUARE BUILDING':{
'DESERT STORM - 500':[500,83,1992,83,1992,332,4]
,'BULLET - 500':[500,83,1593,83,1992,332,4]
,'CLASSIC - 350':[500,66,1584,66,1584,264,4]
,'THUNDERBIRD - 350':[500,66,1584,66,1584,264,4]
,'CLASSIC - 500':[500,83,1992,83,1992,332,4]
,'BULLET ELECTRA - 350':[500,66,1584,66,1584,264,4]
}
}
,'Ahmedabad':{
'SCIENCE CITY': {
'CLASSIC - 350':[500,66,1584,66,1584,264,4]
,'THUNDERBIRD - 350':[500,66,1584,66,1584,264,4]
}
}
,'Belagavi': {
'SHIVBASAV NAGAR':{
'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'THUNDERBIRD - 350':[500,41,787,55,1200,164,4]
,'DUKE 390':[500,62,1190,83,1992,248,4]
,'DUKE 200':[500,50,960,67,1608,200,4]
,'AVENGER':[500,33,633,44,960,132,4]
}
}
,'Jaisalmer': {
'JAISALMER FORT':{
'CLASSIC - 350':[500,139,3336,139,3336,1668,12]
,'IMPULSE':[500,77,1848,77,1848,924,12]
,'CBZ':[500,77,1848,77,1848,924,12]
,'PULSAR':[500,77,1848,77,1848,924,12]
,'DISCOVER':[500,56,1344,56,1344,672,12]
,'HIMALAYAN':[500,226,5424,226,5424,2712,12]
,'AVENGER':[500,105,2520,105,2520,1260,12]
}
}
,'Manipal': {
'HOTEL CENTRAL PARK':{
'CLASSIC - 350':[500,50,1200,50,1200,200,4]
,'THUNDERBIRD - 350':[500,50,1200,50,1200,200,4]
,'AVENGER':[500,33,633,44,960,132,4]
}
}
,'Gokarna': {
'HOTEL CENTRAL PARK':{
'CLASSIC - 350':[500,41,787,55,1200,164,4]
,'AVENGER':[500,36,864,36,864,864,24]
}
}
}
print(dictWickedRide)
#return dictBykeMania
internalDBWickedRide()
'''
weekdayInitialBaseCost=1-4hr(weekday)
weekdayBaseRate=5-19 rate eg 41
weekendBaseRate=>24 rate eg55
weekdayFinalBase=above 24 base(weekday) cost eg 787
weekendFinalBase=above 24 base(weekend) cost eg 1200
steps:
1.Find current date and time from calnedar
2.find today is a weekday or a weekend
3.If weekday:
3.1 Find total rate for 1-4 hrs(weekdayInitialBaseCost)
3.2 Find hr rate from 5-19 hrs (weekdayBaseRate)
3.3 Find total rate from 20-24hr (weekdayFinalBase)
3.4 GoTo weekend and perform weekend steps
4.If weekend:
4.1 Find Total Rate for 1-24 hrs (weekendFinalBase)
4.2 find hr rate for above 24-44 hrs(weekendBaseRate)
4.3 GoTo weekday and perform weekday steps
'''
| [
"[email protected]"
] | |
244e88616bf81f1c34a8a5451509ece0937aeb6f | 3a41d31a5d6a3a217b3085e2ebcc4e1fef9f3fe1 | /LeNet.py | 4aac031cca885f55ab27316685eb87f193adcfe2 | [] | no_license | cxk1998/NoiseDefense | fb45d9d1c4f5c29eb82f55c3d835d6c29ec05e30 | 5c34d530fcf6a66d876a1170ea25c9bf4f78089f | refs/heads/master | 2022-05-23T18:02:08.977199 | 2020-04-29T06:41:06 | 2020-04-29T06:41:06 | 259,845,427 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,921 | py | # LEnet 网络
################################
# 输入 1*28*28
# conv2 6*28*28
# maxpool 6*14*14
# conv2 16*10*10
# maxpool 16*5*5
# linear 120
# linear 84
# linear 10
###################################
import torch
import torchvision as tv
import torchvision.transforms as transforms
import torch.nn as nn
import torch.optim as optim
import argparse
import platform
import copy
from torchsummary import summary
# 定义网络结构
class LeNet(nn.Module):
def __init__(self):
super(LeNet, self).__init__()
self.conv1 = nn.Sequential( #input_size=(1*28*28)
nn.Conv2d(1, 6, 5, 1, 2), #padding=2保证输入输出尺寸相同
nn.ReLU(), #input_size=(6*28*28)
nn.MaxPool2d(kernel_size=2, stride=2),#output_size=(6*14*14)
)
self.conv2 = nn.Sequential(
nn.Conv2d(6, 16, 5),
nn.ReLU(), #input_size=(16*10*10)
nn.MaxPool2d(2, 2) #output_size=(16*5*5)
)
self.fc1 = nn.Sequential(
nn.Linear(16 * 5 * 5, 120),
nn.ReLU()
)
self.fc2 = nn.Sequential(
nn.Linear(120, 84),
nn.ReLU()
)
self.fc3 = nn.Linear(84, 10)
# 定义前向传播过程,输入为x
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
# nn.Linear()的输入输出都是维度为一的值,所以要把多维度的tensor展平成一维
x = x.view(x.size()[0], -1)
x = self.fc1(x)
x = self.fc2(x)
x = self.fc3(x)
return x
# 训练
if __name__ == "__main__":
# 定义是否使用GPU
# device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
device = torch.device("cpu")
# 使得我们能够手动输入命令行参数,就是让风格变得和Linux命令行差不多
parser = argparse.ArgumentParser()
parser.add_argument('--outf', default='./testmodel/', help='folder to output images and model checkpoints') # 模型保存路径
parser.add_argument('--net', default='./testmodel/net.pth', help="path to netG (to continue training)") # 模型加载路径
opt = parser.parse_args()
# 超参数设置
EPOCH = 15 # 遍历数据集次数
BATCH_SIZE = 64 # 批处理尺寸(batch_size)
LR = 0.001 # 学习率
# 定义数据预处理方式
transform = transforms.ToTensor()
# 判断系统平台
def is_windowssystem():
return 'Windows' in platform.system()
def is_linuxsystem():
return 'Linux' in platform.system()
if is_windowssystem():
MNIST_data = "./dataset" # windows
if is_linuxsystem():
MNIST_data = "/home/yjdu/federatedlearning_DP_torch/dataset" # linux
# 定义训练数据集
trainset = tv.datasets.MNIST(
root=MNIST_data,
train=True,
download=False,
transform=transform)
# 定义训练批处理数据
trainloader = torch.utils.data.DataLoader(
trainset,
batch_size=BATCH_SIZE,
shuffle=True,
)
# 定义测试数据集
testset = tv.datasets.MNIST(
root=MNIST_data,
train=False,
download=False,
transform=transform)
# 定义测试批处理数据
testloader = torch.utils.data.DataLoader(
testset,
batch_size=BATCH_SIZE,
shuffle=False,
)
# 定义损失函数loss function 和优化方式(采用SGD)
net = LeNet().to(device)
criterion = nn.CrossEntropyLoss() # 交叉熵损失函数,通常用于多分类问题上
optimizer = optim.SGD(net.parameters(), lr=LR, momentum=0.9)
summary(net, (1, 28, 28))
for epoch in range(EPOCH):
sum_loss = 0.0
# 数据读取
for i, data in enumerate(trainloader):
inputs, labels = data
inputs, labels = inputs.to(device), labels.to(device)
# 梯度清零
optimizer.zero_grad()
# forward + backward
outputs = net(inputs)
loss = criterion(outputs, labels)
loss.backward()
# # 保存梯度
# client_dict = dict()
# params_modules = list(net.named_parameters())
# for params_module in params_modules:
# (name, params) = params_module
# params_grad = copy.deepcopy(params.grad)
# client_dict[name] = params_grad
#
# # 梯度清零
# optimizer.zero_grad()
#
# # 加载梯度
# params_modules = list(net.named_parameters())
# for params_module in params_modules:
# (name, params) = params_module
# params.grad = client_dict[name] # 用字典中存储的子模型的梯度覆盖server中的参数梯度
optimizer.step()
# 每训练100个batch打印一次平均loss
sum_loss += loss.item()
if i % 100 == 99:
print('[%d, %d] loss: %.03f'
% (epoch + 1, i + 1, sum_loss / 100))
sum_loss = 0.0
# 每跑完一次epoch测试一下准确率
with torch.no_grad():
correct = 0
total = 0
for data in testloader:
images, labels = data
images, labels = images.to(device), labels.to(device)
outputs = net(images)
# 取得分最高的那个类
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
print('第%d个epoch的识别准确率为:%d%%' % (epoch + 1, (100 * correct / total)))
torch.save(net.state_dict(), '%s/net_%03d.pth' % (opt.outf, epoch + 1))
print('successfully save the model to %s/net_%03d.pth' % (opt.outf, epoch + 1))
| [
"[email protected]"
] | |
c1688dd8a6ab0e6d3e78ae4cc2ca727e136cde98 | aa480d8b09dd7ad92c37c816ebcace24a35eb34c | /third-round/98.验证二叉搜索树.py | cd2da970c58b69ea6e7594bbd03f0c33dee7998b | [] | no_license | SR2k/leetcode | 7e701a0e99f9f05b21216f36d2f5ac07a079b97f | de131226159865dcb7b67e49a58d2ddc3f0a82c7 | refs/heads/master | 2023-03-18T03:37:02.916453 | 2022-09-16T01:28:13 | 2022-09-16T01:28:13 | 182,083,445 | 0 | 0 | null | 2023-03-08T05:44:26 | 2019-04-18T12:27:12 | Python | UTF-8 | Python | false | false | 1,999 | py | #
# @lc app=leetcode.cn id=98 lang=python3
#
# [98] 验证二叉搜索树
#
# https://leetcode-cn.com/problems/validate-binary-search-tree/description/
#
# algorithms
# Medium (35.59%)
# Likes: 1457
# Dislikes: 0
# Total Accepted: 449.5K
# Total Submissions: 1.3M
# Testcase Example: '[2,1,3]'
#
# 给你一个二叉树的根节点 root ,判断其是否是一个有效的二叉搜索树。
#
# 有效 二叉搜索树定义如下:
#
#
# 节点的左子树只包含 小于 当前节点的数。
# 节点的右子树只包含 大于 当前节点的数。
# 所有左子树和右子树自身必须也是二叉搜索树。
#
#
#
#
# 示例 1:
#
#
# 输入:root = [2,1,3]
# 输出:true
#
#
# 示例 2:
#
#
# 输入:root = [5,1,4,null,null,3,6]
# 输出:false
# 解释:根节点的值是 5 ,但是右子节点的值是 4 。
#
#
#
#
# 提示:
#
#
# 树中节点数目范围在[1, 10^4] 内
# -2^31 <= Node.val <= 2^31 - 1
#
#
#
from commons.Tree import TreeNode
# @lc code=start
class Solution:
def isValidBST(self, root: TreeNode) -> bool:
prev = -float('inf')
curr = root
stack = []
while curr or stack:
while curr:
stack.append(curr)
curr = curr.left
curr = stack.pop()
if curr.val <= prev:
return False
prev = curr.val
curr = curr.right
return True
# class Solution:
# def isValidBST(self, root: TreeNode) -> bool:
# def helper(node: TreeNode, min_val: int, max_val: int) -> bool:
# if not node:
# return True
# if not min_val < node.val < max_val:
# return False
# next_min, next_max = max(node.val, min_val), min(node.val, max_val)
# return helper(node.left, min_val, next_max) and helper(node.right, next_min, max_val)
# return helper(root, -float('inf'), float('inf'))
# @lc code=end
| [
"[email protected]"
] | |
76ed65cddd7842518ab08d6a5653fe73d806c8ad | dfd0566e8beed86a1708e773a0cb6b3268125384 | /lib/python2.7/site-packages/ghpythonremote/connectors.py | e107877621fc3f0097584ff3b906df054e8fc868 | [] | no_license | aldoranx/my-blog | 97b7bd58f916b9f15a0ea27d9fa0ce313f3b8a12 | a255cd7a4ad33b2102e5aa76a83ae840fe360f51 | refs/heads/master | 2022-12-31T08:34:55.130886 | 2020-10-14T09:38:09 | 2020-10-14T09:38:09 | 292,361,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,055 | py | import errno
import logging
import os
import platform
import socket
import subprocess
from time import sleep
try:
import _winreg as winreg
except ImportError:
import winreg
from ghpythonremote import rpyc
from .helpers import get_python_path, get_extended_env_path_conda
logger = logging.getLogger("ghpythonremote.connectors")
class GrasshopperToPythonRemote:
def __init__(
self,
rpyc_server_py,
python_exe=None,
location=None,
timeout=60,
max_retry=3,
port=None,
log_level=logging.WARNING,
working_dir=None,
):
if python_exe is None:
self.python_exe = get_python_path(location)
else:
if location is not None:
logger.debug(
"python_exe and env_name specified at the same time, ignoring "
"env_name."
)
self.python_exe = python_exe
self.env = get_extended_env_path_conda(self.python_exe)
self.rpyc_server_py = rpyc_server_py
self.timeout = timeout
self.retry = 0
self.max_retry = max(0, max_retry)
self.log_level = log_level
self.working_dir = working_dir
if port is None:
self.port = _get_free_tcp_port()
else:
self.port = port
self.python_popen = self._launch_python()
self.connection = self._get_connection()
self.py_remote_modules = self.connection.root.getmodule
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Cleanup the connection on error and exit.
Parameters
----------
exc_type : Error
Type of the exception that caused the __exit__.
exc_val : str
Value of the exception that caused the __exit__.
exc_tb : type
Exception log.
Returns
-------
True if the connection was successfully closed."""
try:
if exc_type:
logger.error("{!s}, {!s}, {!s}".format(exc_type, exc_val, exc_tb))
except NameError:
pass
self.close()
return True
def run_py_function(self, module_name, function_name, *nargs, **kwargs):
"""Run a specific Python function on the remote, with Python crash handling."""
remote_module = self.py_remote_modules(module_name)
function = getattr(remote_module, function_name)
function_output = kwargs.pop("function_output", None)
try:
result = function(*nargs, **kwargs)
except (socket.error, EOFError):
self._rebuild_py_remote()
return self.run_py_function(*nargs, **kwargs)
if function_output is not None:
try:
result = result[function_output]
except NameError:
pass
return result
def close(self):
if not self.connection.closed:
logger.info("Closing connection.")
self.connection.close()
if self.python_popen.poll() is None:
logger.info("Closing Python.")
self.python_popen.terminate()
def _launch_python(self):
logger.debug("Using python executable: {!s}".format(self.python_exe))
logger.debug("Using rpyc_server module: {!s}".format(self.rpyc_server_py))
logger.debug("Using port: {}".format(self.port))
logger.debug("Using log_level: {!s}".format(self.log_level))
logger.debug("Using working_dir: {!s}".format(self.working_dir))
assert self.python_exe is not "" and self.python_exe is not None
assert self.rpyc_server_py is not "" and self.rpyc_server_py is not None
assert self.port is not "" and self.port is not None
assert self.log_level is not "" and self.log_level is not None
python_call = '"{!s}" "{!s}" "{}" "{!s}"'.format(
self.python_exe, self.rpyc_server_py, self.port, self.log_level
)
cwd = self.working_dir
python_popen = subprocess.Popen(
python_call,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
cwd=cwd,
env=self.env,
)
return python_popen
def _get_connection(self):
connection = None
logger.info("Connecting...")
for i in range(self.timeout):
try:
if not connection:
logger.debug(
"Connecting. Timeout in {:d} seconds.".format(self.timeout - i)
)
connection = rpyc.utils.factory.connect(
"localhost",
self.port,
service=rpyc.core.service.ClassicService,
config={"sync_request_timeout": None},
ipv6=False,
keepalive=True,
)
else:
logger.debug(
"Found connection, testing. Timeout in {:d} seconds.".format(
self.timeout - i
)
)
connection.ping(timeout=1)
logger.debug("Connection ok, returning.")
logger.info("Connected.")
return connection
except socket.error as e:
if self.python_popen.poll() is not None:
raise RuntimeError(
"Remote python {!s} failed on launch. ".format(self.python_exe)
+ "Does the remote python have rpyc installed?"
)
if i == self.timeout - 1 or not e.errno == errno.ECONNREFUSED:
raise RuntimeError(
"Could not connect to remote python {!s}. ".format(
self.python_exe
)
+ "Does the remote python have rpyc installed?"
)
sleep(1)
except (
rpyc.core.protocol.PingError,
rpyc.core.async_.AsyncResultTimeout,
) as e:
logger.debug(str(e))
raise e
def _rebuild_py_remote(self):
if self.retry < self.max_retry:
logger.info("Lost Rhino connection, retrying.")
self.retry += 1
self.close()
[self.rhino_popen, self.connection, self.gh_remote] = [None, None, None]
logger.info("Waiting 10 seconds.")
sleep(10)
self.python_popen = self._launch_python()
self.connection = self._get_connection()
else:
raise RuntimeError(
"Lost connection to Python, and reconnection attempts limit ({:d}) "
"reached. Exiting.".format(self.max_retry)
)
class PythonToGrasshopperRemote:
"""Creates a remote Rhino/IronPython instance (with Grasshopper functions)
connected to a local python engine.
The local instance will be able to import all objects from the Rhino IronPython
engine, as well as Grasshopper components. Rhino will appear frozen on a python
script it is reading.
Parameters
----------
rhino_file_path : str
Absolute file path to a Rhino .3dm file to open in the remote Rhino. Can be
empty.
rpyc_server_py : str
Absolute path to the ghcompservice.py module that launches the server on the
remote.
rhino_ver : int
A Rhino version to use, from 5 to 7. Overridden by rhino_exe. Defaults to 6.
rhino_exe : str
Absolute path to the Rhino executable. By default, fetches from the windows
registry the Rhino install with the same bitness as the platform, and version
given by rhino_ver.
timeout : int
Number of seconds to wait for Rhino and IronPython to startup.
max_retry : int
Number of times Rhino will be restarted if it crashes, before declaring the
connection dead.
Examples
--------
>>> ROOT = os.path.abspath(os.path.join(os.path.curdir, '..'))
>>> rhino_file_path = os.path.join(ROOT, 'examples', 'curves.3dm')
>>> rpyc_server_py = os.path.join(ROOT, 'ghcompservice.py')
>>> with PythonToGrasshopperRemote(
>>> rhino_file_path, rpyc_server_py, rhino_ver=6, timeout=60
>>> ) as py2gh:
>>> rghcomp = py2gh.gh_remote_components
>>> rgh = py2gh.connection
>>> Rhino = rgh.modules.Rhino
>>> rs = rgh.modules.rhinoscriptsyntax
>>> # Do stuff with all this
>>> # See CPython_to_GH.py for a longer example
"""
def __init__(
self,
rhino_file_path,
rpyc_server_py,
rhino_ver=6,
rhino_exe=None,
timeout=60,
max_retry=3,
port=None,
):
if rhino_exe is None:
self.rhino_exe = self._get_rhino_path(version=rhino_ver)
else:
self.rhino_exe = rhino_exe
self.rhino_file_path = rhino_file_path
self.rpyc_server_py = rpyc_server_py
self.timeout = timeout
self.retry = 0
self.max_retry = max(0, max_retry)
if port is None:
self.port = _get_free_tcp_port()
else:
self.port = port
self.rhino_popen = self._launch_rhino()
self.connection = self._get_connection()
self.gh_remote_components = self.connection.root.ghcomp
self.gh_remote_userobjects = self.connection.root.ghuo
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Cleanup the connection on error and exit.
Parameters
----------
exc_type : Error
Type of the exception that caused the __exit__.
exc_val : str
Value of the exception that caused the __exit__.
exc_tb : type
Exception log.
Returns
-------
True if the connection was successfully closed."""
try:
if exc_type:
logger.error("{!s}, {!s}, {!s}".format(exc_type, exc_val, exc_tb))
except NameError:
pass
self.close()
return True
def run_gh_component(self, component_name, *nargs, **kwargs):
"""Run a specific Grasshopper component on the remote, with Rhino crash
handling.
"""
is_cluster = kwargs.pop("is_cluster", False)
# TODO: improve ghcomp to get clusters the same way we get compiled components,
# thus removing the need for a custom getter
component = self.gh_remote_components(component_name, is_cluster=is_cluster)
component_output = kwargs.pop("component_output", None)
try:
result = component(*nargs, **kwargs)
except (socket.error, EOFError):
self._rebuild_gh_remote()
return self.run_gh_component(*nargs, **kwargs)
if component_output is not None:
try:
result = result[component_output]
except NameError:
pass
return result
def close(self):
if not self.connection.closed:
logger.info("Closing connection.")
self.connection.close()
if self.rhino_popen.poll() is None:
logger.info("Closing Rhino.")
self.rhino_popen.terminate()
@staticmethod
def _get_rhino_path(version=6, preferred_bitness="same"):
rhino_reg_key_path = None
version_str = "{!s}.0".format(version)
if platform.architecture()[0] == "64bit":
if preferred_bitness == "same" or preferred_bitness == "64":
if version == 5:
version_str += "x64"
rhino_reg_key_path = r"SOFTWARE\McNeel\Rhinoceros\{}\Install".format(
version_str
)
if version < 5:
rhino_reg_key_path = None
elif preferred_bitness == "32":
rhino_reg_key_path = (
r"SOFTWARE\WOW6432Node\McNeel\Rhinoceros\{}\Install"
)
rhino_reg_key_path = rhino_reg_key_path.format(version_str)
elif platform.architecture()[0] == "32bit":
if preferred_bitness == "same" or preferred_bitness == "32":
rhino_reg_key_path = r"SOFTWARE\McNeel\Rhinoceros\{}\Install".format(
version_str
)
if version > 5:
rhino_reg_key_path = None
if rhino_reg_key_path is None:
logger.error(
"Did not understand Rhino version ({!s}) and bitness ({!s}) options "
"for platform {!s}.".format(
version, preferred_bitness, platform.machine()
)
)
# In Python 3, OpenKey might throw a FileNotFoundError, which is not defined in
# Python 2. Just pretend to work around that
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
rhino_reg_key = winreg.OpenKey(
winreg.HKEY_LOCAL_MACHINE, rhino_reg_key_path
)
rhino_path = winreg.QueryValueEx(rhino_reg_key, "Path")[0]
except (FileNotFoundError, OSError) as e:
logger.error(
"Unable to find Rhino installation in registry. Are you running "
"Windows with Rhinoceros installed?"
)
raise e
return os.path.join(rhino_path, "Rhino.exe")
def _launch_rhino(self):
assert self.rhino_exe is not "" and self.rhino_exe is not None
assert self.rpyc_server_py is not "" and self.rpyc_server_py is not None
assert self.port is not "" and self.port is not None
rhino_call = [
'"' + self.rhino_exe + '"',
"/nosplash",
"/notemplate",
'/runscript="-_RunPythonScript ""{!s}"" {!s} -_Exit "'.format(
self.rpyc_server_py, self.port
),
]
if self.rhino_file_path:
rhino_call.append(self.rhino_file_path)
# Default escaping in subprocess.line2cmd does not work here,
# manually convert to string
rhino_call = " ".join(rhino_call)
rhino_popen = subprocess.Popen(
rhino_call, stdout=subprocess.PIPE, stdin=subprocess.PIPE
)
return rhino_popen
def _get_connection(self):
connection = None
logger.info("Connecting...")
for i in range(self.timeout):
try:
if not connection:
logger.debug(
"Connecting. Timeout in {:d} seconds.".format(self.timeout - i)
)
connection = rpyc.utils.factory.connect(
"localhost",
self.port,
service=rpyc.core.service.ClassicService,
config={"sync_request_timeout": None},
ipv6=False,
keepalive=True,
)
else:
logger.debug(
"Found connection, testing. Timeout in {:d} seconds.".format(
self.timeout - i
)
)
connection.ping(timeout=1)
logger.debug("Connection ok, returning.")
logger.info("Connected.")
return connection
except (
socket.error,
rpyc.core.protocol.PingError,
rpyc.core.async_.AsyncResultTimeout,
) as e:
if e is socket.error and not e.errno == errno.ECONNREFUSED:
raise
if i == self.timeout - 1:
raise
elif e is socket.error:
sleep(1)
def _rebuild_gh_remote(self):
if self.retry < self.max_retry:
logger.info("Lost Rhino connection, retrying.")
self.retry += 1
self.close()
[self.rhino_popen, self.connection, self.gh_remote] = [None, None, None]
logger.info("Waiting 10 seconds.")
sleep(10)
self.rhino_popen = self._launch_rhino()
self.connection = self._get_connection()
self.gh_remote_components = self.connection.root.get_component
else:
raise RuntimeError(
"Lost connection to Rhino, and reconnection attempts limit ({:d}) "
"reached. Exiting.".format(self.max_retry)
)
def _get_free_tcp_port():
tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp.bind(("", 0))
addr, port = tcp.getsockname()
tcp.close()
return port
| [
"[email protected]"
] | |
02b3788367bf8c81aefe06a9885086b46a11f04d | 2c691f89e0cbc83bd2dde3ca7f01d441a0493652 | /Experiment/habituationStim.py | f86903a8893aab4ff13bcc84f88264cb310db306 | [] | no_license | NNiehof/GVSNoise | eebe347851c1d4ff7ba39aaaa4d12ba6a38c8055 | 9269a816dc6142166a4d783a57203d67194cec14 | refs/heads/master | 2020-03-21T02:52:42.602058 | 2018-07-19T14:29:43 | 2018-07-19T14:29:43 | 138,025,814 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,132 | py | # Nynke Niehof, 2018
from Experiment.GVS import GVS
from Experiment.genNoiseStim import genStim
import matplotlib.pyplot as plt
def habituation_signal():
"""
Generate a habituation signal with a slow ramp
"""
amp = 1.0
duration = 25.0
f_samp = 1e3
buffer_size = int(duration * f_samp)
gvs = GVS(max_voltage=amp)
timing = {"rate": f_samp, "samps_per_chan": buffer_size}
connected = gvs.connect("cDAQ1Mod1/ao0", **timing)
if connected:
# white noise with fade-in/fade-out
make_stim = genStim(f_samp)
make_stim.noise(25.0, amp)
make_stim.fade(f_samp * 10.0)
faded_samples = make_stim.stim
print("start galvanic stim")
gvs.write_to_channel(faded_samples)
print("end galvanic stim")
gvs.quit()
return faded_samples
def stimulus_plot(stim, title=""):
plt.figure()
plt.plot(stim)
plt.xlabel("sample")
plt.ylabel("amplitude (mA)")
plt.title(title)
if __name__ == "__main__":
faded_stim = habituation_signal()
stimulus_plot(faded_stim, title="white noise with fade in/out")
plt.show()
| [
"[email protected]"
] | |
f5b48777983c83671e9f41d73bae623898a46a17 | 1df2491dd7208cf3cbe1410c3853de1cd7f37480 | /robocorp-code/tests/robocorp_code_tests/fixtures.py | c87b1d06775199a3c4ef337320cf72def9df9c74 | [
"Apache-2.0"
] | permissive | xylix/robotframework-lsp | c8cfa63e452be6635a7072cf4cdc7f201a286ca4 | ae21ddd460608d8e72ddcafe261c2df55b49db21 | refs/heads/master | 2023-01-23T17:47:45.633410 | 2020-11-23T12:03:45 | 2020-11-23T12:03:45 | 275,767,608 | 0 | 0 | null | 2020-06-29T07:56:06 | 2020-06-29T07:56:06 | null | UTF-8 | Python | false | false | 7,919 | py | import os
import pytest
from robocorp_ls_core.protocols import IConfigProvider
from robocorp_ls_core.robotframework_log import get_logger
from robocorp_ls_core.unittest_tools.cases_fixture import CasesFixture
from robocorp_code.protocols import IRcc, ActionResult
log = get_logger(__name__)
@pytest.fixture
def language_server_client_class():
from robocorp_code_tests.robocode_language_server_client import (
RobocorpLanguageServerClient,
)
return RobocorpLanguageServerClient
@pytest.fixture
def language_server_class():
from robocorp_code.robocorp_language_server import RobocorpLanguageServer
return RobocorpLanguageServer
@pytest.fixture
def main_module():
from robocorp_code import __main__
return __main__
@pytest.fixture
def rcc_location() -> str:
from robocorp_code.rcc import download_rcc
from robocorp_code.rcc import get_default_rcc_location
location = get_default_rcc_location()
download_rcc(location, force=False)
return location
@pytest.fixture
def ci_endpoint() -> str:
ci_endpoint = os.environ.get("CI_ENDPOINT")
if ci_endpoint is None:
raise AssertionError("CI_ENDPOINT env variable must be specified for tests.")
return ci_endpoint
@pytest.fixture
def ci_credentials() -> str:
ci_credentials = os.environ.get("CI_CREDENTIALS")
if ci_credentials is None:
raise AssertionError("ci_credentials env variable must be specified for tests.")
return ci_credentials
@pytest.fixture
def rcc_config_location(tmpdir) -> str:
config_dir = tmpdir.join("config")
os.makedirs(str(config_dir))
return str(config_dir.join("config_test.yaml"))
@pytest.fixture(scope="session")
def cases(tmpdir_factory) -> CasesFixture:
basename = "res áéíóú"
copy_to = str(tmpdir_factory.mktemp(basename))
f = __file__
original_resources_dir = os.path.join(os.path.dirname(f), "_resources")
assert os.path.exists(original_resources_dir)
return CasesFixture(copy_to, original_resources_dir)
@pytest.fixture
def config_provider(
ws_root_path: str, rcc_location: str, ci_endpoint: str, rcc_config_location: str
):
from robocorp_code.robocorp_config import RobocorpConfig
from robocorp_ls_core.ep_providers import DefaultConfigurationProvider
config = RobocorpConfig()
config.update(
{
"robocorp": {
"rcc": {
"location": rcc_location,
"endpoint": ci_endpoint,
"config_location": rcc_config_location,
}
}
}
)
return DefaultConfigurationProvider(config)
@pytest.fixture
def rcc(config_provider: IConfigProvider, rcc_config_location: str) -> IRcc:
from robocorp_code.rcc import Rcc
rcc = Rcc(config_provider)
# We don't want to track tests.
for _i in range(2):
# There's a bug in which the --do-not-track doesn't work the first time.
result = rcc._run_rcc(
"feedback identity --do-not-track --config".split() + [rcc_config_location],
expect_ok=False,
)
assert result.success
result_msg = result.result
assert result_msg
if "enabled" in result_msg:
continue
if "disabled" in result_msg:
break
raise AssertionError(f"Did not expect {result_msg}")
else:
raise AssertionError(f"Did not expect {result_msg}")
return rcc
@pytest.fixture
def rcc_conda_installed(rcc: IRcc):
result = rcc.check_conda_installed()
assert result.success, r"Error: {result}"
_WS_INFO = (
{
"id": "workspace_id_1",
"name": "CI workspace",
"orgId": "affd282c8f9fe",
"orgName": "My Org Name",
"orgShortName": "654321",
"shortName": "123456", # Can be some generated number or something provided by the user.
"state": "active",
"url": "http://url1",
},
{
"id": "workspace_id_2",
"name": "My Other workspace",
"orgId": "affd282c8f9fe",
"orgName": "My Org Name",
"orgShortName": "1234567",
"shortName": "7654321",
"state": "active",
"url": "http://url2",
},
)
_PACKAGE_INFO_WS_2: dict = {}
_PACKAGE_INFO_WS_1: dict = {
"activities": [
{"id": "452", "name": "Package Name 1"},
{"id": "453", "name": "Package Name 2"},
]
}
class RccPatch(object):
def __init__(self, monkeypatch):
from robocorp_code.rcc import Rcc
self.monkeypatch = monkeypatch
self._current_mock = self.mock_run_rcc_default
self._original = Rcc._run_rcc
self._package_info_ws_1 = _PACKAGE_INFO_WS_1
self.custom_handler = None
def mock_run_rcc(self, args, *starargs, **kwargs) -> ActionResult:
return self._current_mock(args, *starargs, **kwargs)
def mock_run_rcc_default(self, args, *sargs, **kwargs) -> ActionResult:
import json
import copy
if self.custom_handler is not None:
ret = self.custom_handler(args, *sargs, **kwargs)
if ret is not None:
return ret
if args[:4] == ["cloud", "workspace", "--workspace", "workspace_id_1"]:
# List packages for workspace 1
return ActionResult(
success=True, message=None, result=json.dumps(self._package_info_ws_1)
)
if args[:4] == ["cloud", "workspace", "--workspace", "workspace_id_2"]:
# List packages for workspace 2
return ActionResult(
success=True, message=None, result=json.dumps(_PACKAGE_INFO_WS_2)
)
if args[:3] == ["cloud", "workspace", "--config"]:
# List workspaces
workspace_info = _WS_INFO
return ActionResult(
success=True, message=None, result=json.dumps(workspace_info)
)
if args[:3] == ["cloud", "push", "--directory"]:
if args[4:8] == ["--workspace", "workspace_id_1", "--robot", "2323"]:
return ActionResult(success=True)
if args[4:8] == ["--workspace", "workspace_id_1", "--robot", "453"]:
return ActionResult(success=True)
if args[:5] == ["cloud", "new", "--workspace", "workspace_id_1", "--robot"]:
# Submit a new package to ws 1
cp = copy.deepcopy(self._package_info_ws_1)
cp["activities"].append({"id": "2323", "name": args[5]})
self._package_info_ws_1 = cp
return ActionResult(
success=True,
message=None,
result="Created new robot named {args[5]} with identity 2323.",
)
if args[:4] == ["config", "credentials", "-j", "--verified"]:
return ActionResult(
success=True,
message=None,
result=json.dumps(
[
{
"account": "robocorp-code",
"identifier": "001",
"endpoint": "https://endpoint.foo.bar",
"secret": "123...",
"verified": 1605525807,
}
]
),
)
raise AssertionError(f"Unexpected args: {args}")
def mock_run_rcc_should_not_be_called(self, args, *sargs, **kwargs):
raise AssertionError(
"This should not be called at this time (data should be cached)."
)
def apply(self) -> None:
from robocorp_code.rcc import Rcc
self.monkeypatch.setattr(Rcc, "_run_rcc", self.mock_run_rcc)
def disallow_calls(self) -> None:
self._current_mock = self.mock_run_rcc_should_not_be_called
@pytest.fixture
def rcc_patch(monkeypatch):
return RccPatch(monkeypatch)
| [
"[email protected]"
] | |
31e3ddb982ee1cffa8058bbecf325c5dbdd7093d | c117f7064b7132778bead5a8b77b67e2429a2b7a | /beteven.py | 27e35d3bb83cd426ac35de52a2b1a860ad6f518d | [] | no_license | gurudurairaj/gp | 664306f41f73f8b620ba74b048372e1c94e59bc7 | 2fce98f7428103b54b9edd075d4a83dc434c2926 | refs/heads/master | 2020-04-15T05:00:45.934019 | 2019-05-26T17:54:54 | 2019-05-26T17:54:54 | 164,405,807 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | n,m=input().split()
n=int(n)
m=int(m)
ll=[]
for i in range(n+1,m):
if i%2==0:
ll.append(i)
print(*ll)
| [
"[email protected]"
] | |
b5ac408244b0956e4dc6ac8faa9fd9c6d0f30e69 | 5826280cde9f6eef10ec97db021a829c6a97fe51 | /python/productMange/product/urls.py | 605f847f1c15bb7f9b4cca262d04f9abca992c81 | [] | no_license | Sindahx/upload | 2faaec36b413c57402b82f58fba6a76e4a3d0478 | 1ef4a4cb9804008016eb6fb8b16e40f817ff2805 | refs/heads/master | 2023-01-29T17:29:21.191131 | 2019-07-25T12:38:46 | 2019-07-25T12:38:46 | 127,373,252 | 0 | 0 | null | 2023-01-10T23:54:01 | 2018-03-30T02:49:41 | Python | UTF-8 | Python | false | false | 183 | py |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'pro_list$', views.pro_list),
url(r'save_list$', views.saveProlist),
] | [
"[email protected]"
] | |
05b1aaa0f5ec88eeeaf57ab42afdecbe4911b1bb | c62d47765f2931926b7d0d4354aac70b3a345d6f | /src/task2_2.py | ae98e192a676acd7bb8c21432ae9d4e02f6c96bc | [] | no_license | jgongil/de_challenge | ed228387adea5cb1e4773db560c3a7c2bd7041fd | 3e614ec0c2a7c9314cea9d2610c2658161b91424 | refs/heads/master | 2023-03-27T21:56:23.056290 | 2021-04-03T16:29:37 | 2021-04-03T16:29:37 | 352,990,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | # PART 2: Spark Dataframe API
## Task 2: Creates CSV that lists the minimum price, maximum price and total row count
summary = airbnb_df\
.groupBy()\
.min('price')\
.collect()[0]\
.__getitem__(0)\
,airbnb_df\
.groupBy()\
.max('price')\
.collect()[0]\
.__getitem__(0)\
,airbnb_df.count()
filename = os.path.join(output_path,"out_2_2.txt")
schema = StructType([StructField("min_price",FloatType(),True),\
StructField("max_price",FloatType(),True),\
StructField("total",IntegerType(),True)])
summary_df = spark.createDataFrame(data=[summary], schema=schema)
summary_df.coalesce(1)\
.write\
.mode ("overwrite")\
.format("csv")\
.option("header", "true")\
.save(filename) | [
"[email protected]"
] | |
b0d4bf9981244b768521aa0fddccb6d57a0aa52d | 544743de026a3ee5d98dc1edc6dfc4ff9c546f55 | /quizapp/quiz/admin.py | 673d02b31654bd5e4df99ed71bbab7a132c37f39 | [] | no_license | bhansa/django-quiz | 68a4daeb6fe2a950601dc6cc0f544b21d0891a0a | 05653498a008edcfbb5042f90f15f7acae0351e7 | refs/heads/master | 2021-01-01T06:13:02.407668 | 2017-07-17T07:30:21 | 2017-07-17T07:30:21 | 97,381,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95 | py | from django.contrib import admin
from .models import Question
admin.site.register(Question)
| [
"[email protected]"
] | |
3a0929e3aa1cc9bf47f1148955a3d3260d60125c | a040fab5954fd31eedb7c303e7f56d9834dad548 | /inventory/migrations/0001_initial.py | 87d69442be14b1e1878fef481dc50aad8adf3c3d | [] | no_license | horatiorosa/firstdjangopp | 57d7ae23294e91870aba70d72716f464f3735f50 | 6c865f328e9d8f88f73527a9c42d38ed84deb018 | refs/heads/master | 2021-01-10T10:04:09.603747 | 2015-10-28T04:36:31 | 2015-10-28T04:36:31 | 45,083,797 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 670 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.dev20151024185721 on 2015-10-27 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('description', models.TextField()),
('amount', models.IntegerField()),
],
),
]
| [
"[email protected]"
] | |
4a33c1d637a89156009bfa5b49d2776f3074f2c3 | a8062308fb3bf6c8952257504a50c3e97d801294 | /problems/BestTimeBuySellStockWithCooldown.py | 4d7e38fb7c1273d5d2ae76bbef1f7f1e3e51a90c | [] | no_license | wan-catherine/Leetcode | 650d697a873ad23c0b64d08ad525bf9fcdb62b1b | 238995bd23c8a6c40c6035890e94baa2473d4bbc | refs/heads/master | 2023-09-01T00:56:27.677230 | 2023-08-31T00:49:31 | 2023-08-31T00:49:31 | 143,770,000 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | from math import inf
"""
d[i][0] = max{d[i-1][0], d[i-1][1] + prices[i]}
d[i][1] = max{d[i-1][1], d[i-2][0] - prices[i]}
here we must use i-1 in d[i][1], because after selling , you need to cooldown one day.
So each time, when you buy a stock, you need to use i - 2.
"""
class Solution:
def maxProfit_before(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
hold, sold, rest = float("-inf"), 0 , 0
for i in prices:
prevSold = sold
sold = hold + i
hold = max(hold, rest - i)
rest = max(rest, prevSold)
return max(rest, sold)
def maxProfit(self, prices):
d_0 = 0
d_1 = float(-inf)
last_sell = 0
for i in range(0, len(prices)):
last_d_0 = d_0
d_0 = max(d_0, d_1 + prices[i])
d_1 = max(d_1, last_sell - prices[i])
last_sell = last_d_0
return d_0
| [
"[email protected]"
] | |
ca38eb3f635946ec0dfb1e07c8eb036d07605990 | 516913d9cf0866abe498a93a01c569f4dd1dc9d3 | /concoord-1.0.2/build/lib.linux-x86_64-2.7/concoord/utils.py | e2152c743d9f169cef4de13d6b140b69e045a736 | [
"MIT",
"BSD-3-Clause"
] | permissive | milannic/expCPython | 5f5ce9354d778e310f9377334e4eed3c442e8739 | b062a11326f0a1cbaeb2609bd37f96854621da99 | refs/heads/master | 2021-01-10T20:13:18.604135 | 2014-05-19T19:24:58 | 2014-05-19T19:24:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,639 | py | '''
@author: Deniz Altinbuken, Emin Gun Sirer
@note: Utility functions for the runtime. Includes a timer module for collecting measurements.
@copyright: See LICENSE
'''
import socket
import os, sys
import time
import string
import threading
from concoord.enums import *
def findOwnIP():
"""Retrieves the hostname of the caller"""
return socket.gethostbyname(socket.gethostname())
def load_configdict(configpath):
configfile = os.path.basename(configpath)
configdir = os.path.dirname(configpath)
sys.path.append(configdir)
configmodule = __import__(configfile[:-3], globals(), locals(), [], -1)
config_dict = {}
for key in dir(configmodule):
if key.startswith('__'):
continue
else:
config_dict[key] = getattr(configmodule, key)
return config_dict
def get_addressportpairs(group):
for peer in group.iterkeys():
yield (peer.addr,peer.port)
def get_addresses(group):
for peer in group.iterkeys():
yield peer.addr
# A logger will always print to the screen. It can also log to a file or to a network log daemon.
class NoneLogger():
def write(self, cls, string):
return
def close(self):
return
class Logger():
def __init__(self, name, filename=None, lognode=None):
self.prefix = name
self.log = None
if filename is not None:
self.log = open("concoord_log_"+name, 'w')
if lognode is not None:
logaddr,logport = lognode.split(':')
try:
self.log = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.log.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
self.log.setsockopt(socket.IPPROTO_TCP,socket.TCP_NODELAY,1)
self.log.connect((logaddr,int(logport)))
except IOError:
self.log = None
return
def write(self, cls, string):
print "[%s] %s %s: %s" % (self.prefix + '_' + threading.current_thread().name,
time.time(), # time.asctime(time.localtime(time.time())),
cls, string)
if self.log is not None:
self.log.write("[%s] %s %s: %s" % (self.prefix + '_' + threading.current_thread().name,
time.time(), # time.asctime(time.localtime(time.time())),
cls, string))
self.log.flush()
def close(self):
if self.log is not None:
self.log.close()
# PERFORMANCE MEASUREMENT UTILS
timers = {}
def starttimer(timerkey, timerno):
global timers
index = "%s-%s" % (str(timerkey),str(timerno))
if not timers.has_key(index):
timers[index] = [time.time(), 0]
def endtimer(timerkey, timerno):
global timers
index = "%s-%s" % (str(timerkey),str(timerno))
try:
if timers[index][1] == 0:
timers[index][1] = time.time()
except:
print "Can't stop timer %s %s." % (str(timerkey),str(timerno))
def dumptimers(numreplicas, numacceptors, ownertype, outputdict):
global timers
if ownertype == NODE_REPLICA:
filename = "output/replica/%s-%s" % (str(numreplicas), str(numacceptors))
elif ownertype == NODE_ACCEPTOR:
filename = "output/acceptor/%s-%s" % (str(numreplicas), str(numacceptors))
try:
outputfile = open(outputdict+filename, "w")
except:
outputfile = open("./"+filename, "w")
for index,numbers in timers.iteritems():
timerkey, timerno = index.rsplit("-")
if not numbers[1]-numbers[0] < 0:
outputfile.write("%s:\t%s\t%s\t%s\n" % (str(timerno),
str(numreplicas),
str(numacceptors),
str(numbers[1]-numbers[0])))
outputfile.close()
def starttiming(fn):
"""
Decorator used to start timing.
Keeps track of the count for the first and second calls.
"""
def new(*args, **kw):
obj = args[0]
if obj.firststarttime == 0:
obj.firststarttime = time.time()
elif obj.secondstarttime == 0:
obj.secondstarttime = time.time()
profile_on()
return fn(*args, **kw)
return new
def endtiming(fn):
"""
Decorator used to end timing.
Keeps track of the count for the first and second calls.
"""
NITER = 10000
def new(*args, **kw):
ret = fn(*args, **kw)
obj = args[0]
if obj.firststoptime == 0:
obj.firststoptime = time.time()
elif obj.secondstoptime == 0:
obj.secondstoptime = time.time()
elif obj.count == NITER:
now = time.time()
total = now - obj.secondstarttime
perrequest = total/NITER
filename = "output/%s-%s" % (str(len(obj.groups[NODE_REPLICA])+1),
str(len(obj.groups[NODE_ACCEPTOR])))
outputfile = open("./"+filename, "a")
# numreplicas #numacceptors #perrequest #total
outputfile.write("%s\t%s\t%s\t%s\n" % (str(len(obj.groups[NODE_REPLICA])+1),
str(len(obj.groups[NODE_ACCEPTOR])),
str(perrequest), str(total)))
outputfile.close()
obj.count += 1
sys.stdout.flush()
profile_off()
profilerdict = get_profile_stats()
for key, value in sorted(profilerdict.iteritems(),
key=lambda (k,v): (v[2],k)):
print "%s: %s" % (key, value)
time.sleep(10)
sys.stdout.flush()
os._exit(0)
else:
obj.count += 1
return ret
return new
def throughput_test(fn):
"""Decorator used to measure throughput."""
def new(*args, **kw):
ret = fn(*args, **kw)
obj = args[0]
obj.throughput_runs += 1
if obj.throughput_runs == 100:
obj.throughput_start = time.time()
elif obj.throughput_runs == 1100:
obj.throughput_stop = time.time()
totaltime = obj.throughput_stop - obj.throughput_start
print "********************************************"
print "TOTAL: ", totaltime
print "TPUT: ", 1000/totaltime, "req/s"
print "********************************************"
obj._graceexit(1)
return ret
return new
| [
"[email protected]"
] | |
ae1c3a9691da7ed4f10ee68e80335c727dd44e27 | 4436b5d11505b30807b11d647936d1dc76d28200 | /glucose/serializers.py | 56b36e4b952d4a322b8a888bd721bd803e1f4233 | [] | no_license | JulenEA/una-health-test | 5ffeeba351c5b7bef9ef24cb84e112927e02ef29 | 736f27c1408ecdf3c4ed8db61b842573a014cee0 | refs/heads/master | 2023-09-06T02:29:18.860553 | 2021-11-15T14:17:34 | 2021-11-15T14:17:34 | 428,292,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | from rest_framework import serializers
class GlucoseLevelSerializer(serializers.ModelSerializer):
class Meta:
model = "GlucoseLevel"
fields = ["value"] | [
"[email protected]"
] | |
5316dfde2969474c4777ce22b026d1c674ccf87f | 356c0ef1a1560bf885bab80ee047236272380355 | /第五周的作业.py | 3f811714de09a1a7b834c1e25bd06679c5c839d9 | [] | no_license | q362052264/-4 | ee487329a432b90a5177b7a6a84e3078dcd95645 | 896377823e135563cff9bfe066daea16e1d22e32 | refs/heads/master | 2020-03-28T09:20:56.547491 | 2018-11-17T15:15:52 | 2018-11-17T15:15:52 | 148,030,501 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,351 | py | #1.
from functools import partial
def mul(x,y):
return x*y
a=partial(mul,2)
#2.
def add(x,y,z):
return x+y+z
from functools import partial
addA=partial(add,x=1,y=2,z=3)
addA()
#2.2
def add(x,*args):
sum1=x
for i in args:
sum1=sum1+i
print(f"sum={sum1}")
return sum1
def add1(x,*args):
z=add(x,*args)
return partial(add1,z)
add1(1)(2)(3)
add1(1,2,3)(4)
#3.
def timer(func):
def Wrapper(*args,**akg):
start=time.time()
x=func(*args,**akg)
end=time.time()
print(end-start)
return x
return Wrapper
@timer
def add1(x,y):
return x+y
#4.
def timer2(func):
def Wrapper(*args,**akg):
try:
x=func(*args,**akg)
except Exception as e:
print(e)
return None
return x
return Wrapper
@timer2
def chu(*args,**akg):
sum1=args[0]**2
print(args)
for i in args:
sum1=sum1/i
return sum1
#5.
def timer3(func):
def Wrapper(*args,**akg):
start=time.time()
x=func(*args,**akg)
end=time.time()
thiss=end-start
print(f'Done {thiss}')
return x
return Wrapper
@timer3
def chu(*args,**akg):
sum1=args[0]**2
print(args)
for i in args:
sum1=sum1/i
return sum1
#1.
def timer4(func):
def Wrapper(*args,**akg):
print(type(args))
new_args=[]
for i in range(len(args)):
print(type(args[i]))
if type(args[i])==type("aa"):
if not args[i].isupper():
print(args[i])
new_args.append(args[i].upper())
else:
print(args[i])
new_args.append(args[i])
print(new_args)
x=func(*new_args,**akg)
return x
return Wrapper
@timer4
def add2(x,y):
return x+y
add2("asdsad","ASDSADSA")
#2.
def t1(path):
def t2(zijie):
path1=path
with open(path,'r') as f:
while True:
try:
t=f.read(zijie)
except Exception as e:
print(e)
break
yield t
print("½áÊøÁË!")
return t2
path="C:/new1.txt"
z=t1(path)
x=z(2)
x.send(None)
#3.
def t3():
sumkey=0
while True:
sumkey=sumkey+1
yield sumkey
x=t3()
next(x)
#4.
x=(i for i in range(1,51,1) if not i%2)
for i in x:
print(i)
#5.
import os
from os.path import join,getsize
def x(path):
for root,dirs,files in os.walk(path):
for name in files:
fname=join(root,name)
yield fname
z=x("c:\\")
next(z)
| [
"[email protected]"
] | |
5801a063d7176dd91cb959457aa7299976029703 | fda434f6e613d73c6e1acfe45d84130cfb37ec5f | /centriod_tracking/object_tracking.py | 73ce7dd643c6f9af3ecf191a0b1f3d3479c48e52 | [] | no_license | pengyue/computer_vision | a79588ab181393e9ff7bdc7f865030633d1cf26d | d7762e947d0d40590fc45103fa94c92b7f20cfd7 | refs/heads/master | 2023-04-20T02:10:56.692713 | 2021-05-16T18:24:29 | 2021-05-16T18:24:29 | 104,679,803 | 0 | 0 | null | 2017-09-30T19:20:33 | 2017-09-24T22:11:44 | Python | UTF-8 | Python | false | false | 3,141 | py | # import the necessary packages
from centroid_tracker.centroid_tracker import CentroidTracker
from imutils.video import VideoStream
import numpy as np
import argparse
import imutils
import time
import cv2
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--prototxt", required=True, help="path to Caffe 'deploy' prototxt file")
ap.add_argument("-m", "--model", required=True, help="path to Caffe pre-trained model")
ap.add_argument("-c", "--confidence", type=float, default=0.5, help="minimum probability to filter weak detections")
args = vars(ap.parse_args())
# initialize our centroid tracker and frame dimensions
ct = CentroidTracker()
(H, W) = (None, None)
# load our serialized model from disk
print("[INFO] loading model...")
net = cv2.dnn.readNetFromCaffe(args["prototxt"], args["model"])
# initialize the video stream and allow the camera sensor to warmup
print("[INFO] starting video stream...")
vs = VideoStream(src=0).start()
time.sleep(2.0)
# loop over the frames from the video stream
while True:
# read the next frame from the video stream and resize it
frame = vs.read()
frame = imutils.resize(frame, width=400)
# if the frame dimensions are None, grab them
if W is None or H is None:
(H, W) = frame.shape[:2]
# construct a blob from the frame, pass it through the network,
# obtain our output predictions, and initialize the list of
# bounding box rectangles
blob = cv2.dnn.blobFromImage(frame, 1.0, (W, H),
(104.0, 177.0, 123.0))
net.setInput(blob)
detections = net.forward()
rects = []
# loop over the detections
for i in range(0, detections.shape[2]):
# filter out weak detections by ensuring the predicted
# probability is greater than a minimum threshold
if detections[0, 0, i, 2] > args["confidence"]:
# compute the (x, y)-coordinates of the bounding box for
# the object, then update the bounding box rectangles list
box = detections[0, 0, i, 3:7] * np.array([W, H, W, H])
rects.append(box.astype("int"))
# draw a bounding box surrounding the object so we can
# visualize it
(startX, startY, endX, endY) = box.astype("int")
cv2.rectangle(frame, (startX, startY), (endX, endY), (0, 255, 0), 2)
# update our centroid tracker using the computed set of bounding
# box rectangles
objects = ct.update(rects)
# loop over the tracked objects
for (objectID, centroid) in objects.items():
# draw both the ID of the object and the centroid of the
# object on the output frame
text = "ID {}".format(objectID)
cv2.putText(frame, text, (centroid[0] - 10, centroid[1] - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
cv2.circle(frame, (centroid[0], centroid[1]), 4, (0, 255, 0), -1)
# show the output frame
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
| [
"[email protected]"
] | |
75f6503d0e78a5ad7eadd5a4640cd36be9faab91 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_27257.py | dd5ad1d210030a0ef846632ef623d71976c3a33c | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79 | py | # Ipython notebook caching issue
%load_ext autoreload
%autoreload 2
import ...
| [
"[email protected]"
] | |
116d66a4f7efaff3dced2521575e4fff667ad408 | 6bab6dd66e8d06cd47217a1d9c8e4176018ef381 | /ServerSide/twitter sentiment analysis/sentiment_mod.py | 7ec2016503b0d7acd616729b03c8b0d7d8ba8311 | [] | no_license | ni3aswal/OneLifeRescue | e3fad0c96c0758e6b333a5abe7057eb2bcad92b3 | a8b519665fdc703d42d1b3edf17e4306f7c6c2a0 | refs/heads/master | 2021-06-30T14:18:20.171693 | 2019-07-21T11:05:13 | 2019-07-21T11:05:13 | 151,963,028 | 0 | 0 | null | 2020-09-04T23:03:34 | 2018-10-07T16:48:54 | JavaScript | UTF-8 | Python | false | false | 2,792 | py | import nltk
import random
#from nltk.corpus import movie_reviews
from nltk.classify.scikitlearn import SklearnClassifier
import pickle
from sklearn.naive_bayes import MultinomialNB, BernoulliNB
from sklearn.linear_model import LogisticRegression, SGDClassifier
from sklearn.svm import SVC, LinearSVC, NuSVC
from nltk.classify import ClassifierI
from statistics import mode
from nltk.tokenize import word_tokenize
class VoteClassifier(ClassifierI):
def __init__(self, *classifiers):
self._classifiers = classifiers
def classify(self, features):
votes = []
for c in self._classifiers:
v = c.classify(features)
votes.append(v)
return mode(votes)
def confidence(self, features):
votes = []
for c in self._classifiers:
v = c.classify(features)
votes.append(v)
choice_votes = votes.count(mode(votes))
conf = choice_votes / len(votes)
return conf
documents_f = open("documents.pickle", "rb")
documents = pickle.load(documents_f)
documents_f.close()
word_features5k_f = open("word_features5k.pickle", "rb")
word_features = pickle.load(word_features5k_f)
word_features5k_f.close()
def find_features(document):
words = word_tokenize(document)
features = {}
for w in word_features:
features[w] = (w in words)
return features
featuresets_f = open("featuresets.pickle", "rb")
featuresets = pickle.load(featuresets_f)
featuresets_f.close()
random.shuffle(featuresets)
print(len(featuresets))
testing_set = featuresets[10000:]
training_set = featuresets[:10000]
open_file = open("originalnaivebayes5k.pickle", "rb")
classifier = pickle.load(open_file)
open_file.close()
open_file = open("MNB_classifier5k.pickle", "rb")
MNB_classifier = pickle.load(open_file)
open_file.close()
open_file = open("BernoulliNB_classifier5k.pickle", "rb")
BernoulliNB_classifier = pickle.load(open_file)
open_file.close()
open_file = open("LogisticRegression_classifier5k.pickle", "rb")
LogisticRegression_classifier = pickle.load(open_file)
open_file.close()
open_file = open("LinearSVC_classifier5k.pickle", "rb")
LinearSVC_classifier = pickle.load(open_file)
open_file.close()
open_file = open("SGDC_classifier5k.pickle", "rb")
SGDC_classifier = pickle.load(open_file)
open_file.close()
voted_classifier = VoteClassifier(
classifier,
LinearSVC_classifier,
MNB_classifier,
BernoulliNB_classifier,
LogisticRegression_classifier)
def sentiment(text):
feats = find_features(text)
return voted_classifier.classify(feats),voted_classifier.confidence(feats) | [
"[email protected]"
] | |
1e91a5ced33f3e657e50a075b92b9a3991502e26 | 68463eaf559a3063ac9d490dc36676d714f817c5 | /statsy/cache.py | aaca9dc5a1ac86124080f69509094edda091b825 | [
"MIT"
] | permissive | zhebrak/django-statsy | 6ba1e94e19da0ed8d25ed1f9f5b32f33ddafc83e | d74845e75c78842fc3890db123ab6e36fe2d3973 | refs/heads/master | 2020-05-25T13:37:02.105397 | 2019-03-29T21:59:41 | 2019-03-29T21:59:41 | 27,049,965 | 60 | 11 | MIT | 2019-03-29T21:59:42 | 2014-11-23T22:12:29 | JavaScript | UTF-8 | Python | false | false | 719 | py | # coding: utf-8
from django.core.cache import cache as django_cache
from statsy.settings import CACHE_TIMEOUT
class StatsyCache(object):
@staticmethod
def get(key):
return django_cache.get(key)
@staticmethod
def set(key, value, timeout=CACHE_TIMEOUT):
return django_cache.set(key, value, timeout)
def setdefault(self, key, default, timeout=CACHE_TIMEOUT):
value = self.get(key)
if not value:
if callable(default):
default = default()
self.set(key, default, timeout)
value = default
return value
@staticmethod
def delete(key):
return django_cache.delete(key)
cache = StatsyCache()
| [
"[email protected]"
] | |
63fa5496b2ea77bdd19fad000cb490f1f4704501 | 77db7aa2b94cac35e62ea325144e8f40d7412f58 | /standard_library/email/message_text.py | 8ca2255d3ee295022f038f4327719a1f5e6b8655 | [
"MIT"
] | permissive | eledata/PythonStudy | a9081988133b375fd484ec9f6d79078de62e12ed | 75e51d6c08e8c5464d421408cfc830fcb6720a0a | refs/heads/master | 2021-07-06T03:26:13.266239 | 2020-09-28T03:02:01 | 2020-09-28T03:02:01 | 187,608,616 | 0 | 0 | null | 2019-05-20T09:21:43 | 2019-05-20T09:21:42 | null | UTF-8 | Python | false | false | 1,009 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-8-28 下午4:31
# @Author : Tom.Lee
# @File : text_message.py
# @Product : PyCharm
# @Source :
"""创建并发送简单文本消息"""
import smtplib
from email.mime.text import MIMEText
# # config email
me = ''
you = ''
smtp_host = ''
smtp_port = 25
passwd = ''
textfile = 'textfile'
# Open a plain text file for reading. For this example, assume that
# the text file contains only ASCII characters.
fp = open(textfile, 'rb')
# Create a text/plain message
msg = MIMEText(fp.read(), 'text', 'utf-8')
fp.close()
# me == the sender's email address
# you == the recipient's email address
msg['Subject'] = 'The contents of %s' % textfile
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP()
s.connect(host=smtp_host, port=smtp_port)
s.set_debuglevel(1)
s.login(me, passwd)
s.sendmail(me, [you], msg.as_string())
s.quit()
| [
"[email protected]"
] | |
bfee887f18fe65fae065f030fc687ef7d3121180 | 306f0cdeabad9f14c06ec3e51998111521047368 | /EdgeDetectionSobel.py | 4568a8a10e547dba554a477cc58e2122cb5923d9 | [] | no_license | jinkaido1/Computer_Vision-Image-Processing-CSE-573 | a0817cf5c0b4289e484fb82eef88eeefffbdff86 | d8b6c614b41d678c40380ad6bbea21bb1af1e9af | refs/heads/master | 2022-01-20T11:42:02.646076 | 2019-05-27T19:54:20 | 2019-05-27T19:54:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,576 | py | import numpy as np
import cv2
import sys
import os
def create_zero_array(name_of_array):
name_of_array = [[0 for x in range(image_height)] for y in range(image_width )]
name_of_array = np.array(name_of_array)
return name_of_array
# Convolve function - takes the kernel as an argument
def Convolve(kernel,output):
for x in range(image.shape[1]):
for y in range(image.shape[0]):
output[y,x] = (kernel*image_with_padding[y:y+3,x:x+3]).sum()
return output
# Finding the max pixel value
def findmax(matrix):
max1 = 0
for i in range(matrix.shape[0]):
for j in range(matrix.shape[1]):
if(max1 < matrix[i,j]):
max1 = matrix[i,j]
return max1
def cleaning_up_image(clean_image_matrix,image,max_value):
for x in range(image.shape[1]):
for y in range(image.shape[0]):
clean_image_matrix[y,x] = abs(image[y,x])/max_value
#print(clean_sobel_x[y,x])
return clean_image_matrix
def normalize(matrix_to_be_normalized):
matrix_to_be_normalized = (matrix_to_be_normalized * 255).astype("uint8")
return matrix_to_be_normalized
#Sobel x kernel
sobelX = np.array((
[-1, 0, 1],
[-2, 0, 2],
[-1, 0, 1]), dtype="int")
#Sobel y kernel
sobelY = np.array((
[-1, -2, -1],
[0, 0, 0],
[1, 2, 1]), dtype="int")
# Read the image
image = cv2.imread(sys.argv[1],0)
cv2.imshow("Original Image",image)
image_height = image.shape[1]
image_width = image.shape[0]
# Padding the image
image_with_padding = [[0 for x in range(image_height +2)] for y in range(image_width +2)]
image_with_padding = np.array(image_with_padding)
clean_sobel_x = []
clean_sobel_x = create_zero_array(clean_sobel_x)
clean_sobel_x = clean_sobel_x.astype('double')
clean_sobel_y = []
clean_sobel_y = create_zero_array(clean_sobel_y)
clean_sobel_y = clean_sobel_y.astype('double')
# Padding Size- we need one pixel padding hence x and y are 1
x=1
y=1
image_with_padding[x:image.shape[0]+x, y:image.shape[1]+y] = image
sobel_y = []
sobel_y = create_zero_array(sobel_y)
sobel_y = Convolve(sobelY,sobel_y)
sobel_x = []
sobel_x = create_zero_array(sobel_x)
sobel_x = Convolve(sobelX,sobel_x)
cv2.imshow("Sobelx output",sobel_x.astype("uint8"))
cv2.imshow("Sobely output",sobel_y.astype("uint8"))
max1 = findmax(sobel_x)
max2 = findmax(sobel_y)
clean_sobel_x = cleaning_up_image(clean_sobel_x,sobel_x,max1)
clean_sobel_y = cleaning_up_image(clean_sobel_y,sobel_y,max2)
clean_sobel_x = normalize(clean_sobel_x)
clean_sobel_y = normalize(clean_sobel_y)
cv2.imshow("clean_sobel_x",clean_sobel_x)
cv2.imshow("clean_sobel_y",clean_sobel_y)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
"[email protected]"
] | |
632eeb81e00fb2841427df6e05e7eb5104583de6 | f4c88c87cdf6994b4943852481a326c8fe5e808f | /pw1.py | b0a2e24b9691f73403ff36fb1339dcaa31176cb7 | [] | no_license | MeganWang18/homework | db41691804f608393c9d6d5d296d2e081f049c02 | ff2052022e268a2cf35a47bc5ce2f482e2197f54 | refs/heads/master | 2022-04-12T03:10:49.851607 | 2020-04-11T07:07:42 | 2020-04-11T07:07:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,534 | py | #!/Library/Frameworks/Python.framework/Versions/3.8/bin/python3
import re
from pwn import *
def getMidChar(lowChar=None, highChar=None):
midChar = chr(int((ord(lowChar) + ord(highChar))/2))
if midChar == lowChar:
midChar = chr(ord(midChar)-1)
elif midChar == highChar:
midChar = chr(ord(midChar)+1)
return(midChar)
pwLength = 20
pw = ''
r = remote("twinpeaks.cs.ucdavis.edu",30004)
for i in range(0, 19):
cnt = 0
highChar = 'z'
lowChar = 'a'
newChar = 'a'
previousPw = ''
while True:
cnt += 1
print("Password sent #%s: %s" % (cnt, pw+newChar))
input = r.send("%s\r\n" % pw+newChar)
response = r.recvline_contains(b"strcmp returned").decode("utf-8")
print("Response: %s" % response)
m = re.search("strcmp returned (.+?) instead of", response)
if m.group(1) == '-1':
lowChar = newChar
previousPw = pw+newChar
print("previousPw: " + previousPw)
newChar = getMidChar(lowChar, highChar)
elif m.group(1) == '1':
highChar = newChar
newChar = getMidChar(lowChar, highChar)
if pw+newChar == previousPw:
pw = previousPw
print('=' * 50)
break
else:
print("Password: %s" % pw)
i = 19 # force exit for loop
break
print("Low: %s Middile: %s High: %s" % (lowChar, newChar, highChar))
print('-' * 25)
r.interactive()
| [
"[email protected]"
] | |
e822734452788469381fe098b7e95cf0e104855e | c9f17f091a4f86442def1cd5b20c5ea461a970ea | /neo/Prompt/Commands/tests/test_send_commands.py | c0dbced8b11b19fa20427dc1875391e0a1997336 | [
"MIT",
"LicenseRef-scancode-free-unknown"
] | permissive | LysanderGG/neo-python | 147fc5346f7017c153edede755c6de95e6dadda8 | d6ba2f6407efadd943910814d65fc98f57093974 | refs/heads/development | 2020-04-03T02:43:57.493330 | 2019-01-16T13:59:17 | 2019-01-16T13:59:17 | 154,965,219 | 0 | 0 | MIT | 2019-01-16T14:02:16 | 2018-10-27T13:02:46 | Python | UTF-8 | Python | false | false | 25,704 | py | from neo.Utils.WalletFixtureTestCase import WalletFixtureTestCase
from neo.Wallets.utils import to_aes_key
from neo.Implementations.Wallets.peewee.UserWallet import UserWallet
from neo.Core.Blockchain import Blockchain
from neocore.UInt160 import UInt160
from neo.Prompt.Commands.WalletImport import ImportToken
from neo.Prompt.Utils import get_tx_attr_from_args
from neo.Prompt.Commands import Send, Wallet
from neo.Prompt.PromptData import PromptData
import shutil
from mock import patch
import json
from io import StringIO
from neo.Prompt.PromptPrinter import pp
class UserWalletTestCase(WalletFixtureTestCase):
wallet_1_script_hash = UInt160(data=b'\x1c\xc9\xc0\\\xef\xff\xe6\xcd\xd7\xb1\x82\x81j\x91R\xec!\x8d.\xc0')
wallet_1_addr = 'AJQ6FoaSXDFzA6wLnyZ1nFN7SGSN2oNTc3'
import_watch_addr = UInt160(data=b'\x08t/\\P5\xac-\x0b\x1c\xb4\x94tIyBu\x7f1*')
watch_addr_str = 'AGYaEi3W6ndHPUmW7T12FFfsbQ6DWymkEm'
_wallet1 = None
@classmethod
def GetWallet1(cls, recreate=False):
if cls._wallet1 is None or recreate:
shutil.copyfile(cls.wallet_1_path(), cls.wallet_1_dest())
cls._wallet1 = UserWallet.Open(UserWalletTestCase.wallet_1_dest(),
to_aes_key(UserWalletTestCase.wallet_1_pass()))
return cls._wallet1
@classmethod
def tearDown(cls):
PromptData.Wallet = None
def test_send_neo(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '50']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res)
self.assertIn("Sending with fee: 0.0", mock_print.getvalue())
def test_send_gas(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '5']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res)
self.assertIn("Sending with fee: 0.0", mock_print.getvalue())
def test_send_with_fee_and_from_addr(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '1', '--from-addr=AJQ6FoaSXDFzA6wLnyZ1nFN7SGSN2oNTc3', '--fee=0.005']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res) # verify successful tx
json_res = res.ToJson()
self.assertEqual(self.watch_addr_str, json_res['vout'][0]['address']) # verify correct address_to
self.assertEqual(self.wallet_1_addr, json_res['vout'][1]['address']) # verify correct address_from
self.assertEqual(json_res['net_fee'], "0.005") # verify correct fee
self.assertIn("Sending with fee: 0.005", mock_print.getvalue())
def test_send_no_wallet(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
args = ["send", "neo", self.wallet_1_addr, '5']
Wallet.CommandWallet().execute(args)
self.assertIn("Please open a wallet", mock_print.getvalue())
def test_send_bad_args(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str] # too few args
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Please specify the required parameters", mock_print.getvalue())
def test_send_bad_assetid(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'blah', self.watch_addr_str, '12']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Asset id not found", mock_print.getvalue())
def test_send_bad_address_to(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
address_to = 'AGYaEi3W6ndHPUmW7T12FFfsbQ6DWymkE' # address_to is too short causing ToScriptHash to fail
args = ['send', 'neo', address_to, '12']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Not correct Address, wrong length", mock_print.getvalue())
def test_send_bad_address_from(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
address_from = '--from-addr=AJQ6FoaSXDFzA6wLnyZ1nFN7SGSN2oNTc' # address_from is too short causing ToScriptHash to fail
args = ['send', 'neo', self.watch_addr_str, '12', address_from]
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Not correct Address, wrong length", mock_print.getvalue())
def test_send_negative_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '-12']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("invalid amount format", mock_print.getvalue())
def test_send_zero_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '0']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Amount cannot be 0", mock_print.getvalue())
def test_send_weird_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '12.abc3']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("invalid amount format", mock_print.getvalue())
def test_send_bad_precision_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '12.01']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("incorrect amount precision", mock_print.getvalue())
def test_send_negative_fee(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '12', '--fee=-0.005']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("invalid amount format", mock_print.getvalue())
def test_send_weird_fee(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '12', '--fee=0.0abc']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("invalid amount format", mock_print.getvalue())
def test_send_token_bad(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
token_hash = 'f8d448b227991cf07cb96a6f9c0322437f1599b9'
ImportToken(PromptData.Wallet, token_hash)
args = ['send', 'NEP5', self.watch_addr_str, '32']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Could not find the contract hash", mock_print.getvalue())
def test_send_token_ok(self):
with patch('neo.Prompt.Commands.Tokens.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
token_hash = '31730cc9a1844891a3bafd1aa929a4142860d8d3'
ImportToken(PromptData.Wallet, token_hash)
args = ['send', 'NXT4', self.watch_addr_str, '30', '--from-addr=%s' % self.wallet_1_addr]
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res)
self.assertIn("Will transfer 30.00000000 NXT4 from AJQ6FoaSXDFzA6wLnyZ1nFN7SGSN2oNTc3 to AGYaEi3W6ndHPUmW7T12FFfsbQ6DWymkEm",
mock_print.getvalue())
def test_insufficient_funds(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '72620']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Insufficient funds", mock_print.getvalue())
def test_bad_password(self):
with patch('neo.Prompt.Commands.Send.prompt', side_effect=['blah']):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'neo', self.watch_addr_str, '50']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Incorrect password", mock_print.getvalue())
@patch.object(Send, 'gather_signatures')
def test_owners(self, mock):
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.wallet_1_addr, '2', "--owners=['AXjaFSP23Jkbe6Pk9pPGT6NBDs1HVdqaXK','APRgMZHZubii29UXF9uFa6sohrsYupNAvx']"]
Wallet.CommandWallet().execute(args)
self.assertTrue(mock.called)
def test_attributes(self):
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '2', '--tx-attr={"usage":241,"data":"This is a remark"}']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res)
self.assertEqual(2, len(
res.Attributes)) # By default the script_hash of the transaction sender is added to the TransactionAttribute list, therefore the Attributes length is `count` + 1
def test_multiple_attributes(self):
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '2', '--tx-attr=[{"usage":241,"data":"This is a remark"},{"usage":242,"data":"This is a remark 2"}]']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res)
self.assertEqual(3, len(res.Attributes))
def test_bad_attributes(self):
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '2', '--tx-attr=[{"usa:241"data":his is a remark"}]']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res)
self.assertEqual(1, len(res.Attributes))
def test_utils_attr_str(self):
args = ["--tx-attr=[{'usa:241'data':his is a remark'}]"]
with self.assertRaises(Exception) as context:
args, txattrs = get_tx_attr_from_args(args)
self.assertTrue('could not convert object' in context.exception)
self.assertEqual(len(args), 0)
self.assertEqual(len(txattrs), 0)
def test_utilst_bad_type(self):
args = ["--tx-attr=bytearray(b'\x00\x00')"]
with self.assertRaises(Exception) as context:
args, txattr = get_tx_attr_from_args(args)
self.assertTrue('could not convert object' in context.exception)
self.assertEqual(len(args), 0)
self.assertEqual(len(txattr), 0)
def test_fails_to_sign_tx(self):
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
with patch('neo.Wallets.Wallet.Wallet.Sign', return_value=False):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn(
"Transaction initiated, but the signature is incomplete. Use the `sign` command with the information below to complete signing",
mock_print.getvalue())
def test_fails_to_relay_tx(self):
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
with patch('neo.Prompt.Commands.Send.NodeLeader.Relay', return_value=False):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Could not relay tx", mock_print.getvalue())
def test_could_not_send(self):
# mocking traceback module to avoid stacktrace printing during test run
with patch('neo.Prompt.Commands.Send.traceback'):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=[UserWalletTestCase.wallet_1_pass()]):
with patch('neo.Wallets.Wallet.Wallet.GetStandardAddress', side_effect=[Exception]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['send', 'gas', self.watch_addr_str, '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Could not send:", mock_print.getvalue())
def test_sendmany_good_simple(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt',
side_effect=["neo", self.watch_addr_str, "1", "gas", self.watch_addr_str, "1", UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res) # verify successful tx
self.assertIn("Sending with fee: 0.0", mock_print.getvalue())
json_res = res.ToJson()
# check for 2 transfers
transfers = 0
for info in json_res['vout']:
if info['address'] == self.watch_addr_str:
transfers += 1
self.assertEqual(2, transfers)
def test_sendmany_good_complex(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt',
side_effect=["neo", "AXjaFSP23Jkbe6Pk9pPGT6NBDs1HVdqaXK", "1", "gas", "AXjaFSP23Jkbe6Pk9pPGT6NBDs1HVdqaXK", "1",
UserWalletTestCase.wallet_1_pass()]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2', '--from-addr=%s' % self.wallet_1_addr, '--change-addr=%s' % self.watch_addr_str, '--fee=0.005']
address_from_account_state = Blockchain.Default().GetAccountState(self.wallet_1_addr).ToJson()
address_from_gas = next(filter(lambda b: b['asset'] == '0x602c79718b16e442de58778e148d0b1084e3b2dffd5de6b7b16cee7969282de7',
address_from_account_state['balances']))
address_from_gas_bal = address_from_gas['value']
res = Wallet.CommandWallet().execute(args)
self.assertTrue(res) # verify successful tx
json_res = res.ToJson()
self.assertEqual("AXjaFSP23Jkbe6Pk9pPGT6NBDs1HVdqaXK", json_res['vout'][0]['address']) # verify correct address_to
self.assertEqual(self.watch_addr_str, json_res['vout'][2]['address']) # verify correct change address
self.assertEqual(float(address_from_gas_bal) - 1 - 0.005, float(json_res['vout'][3]['value']))
self.assertEqual('0.005', json_res['net_fee'])
self.assertIn("Sending with fee: 0.005", mock_print.getvalue())
def test_sendmany_no_wallet(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
args = ['sendmany', '2']
Wallet.CommandWallet().execute(args)
self.assertIn("Please open a wallet", mock_print.getvalue())
def test_sendmany_bad_args(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany'] # too few args
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Please specify the required parameter", mock_print.getvalue())
def test_sendmany_bad_outgoing(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '0'] # too few outgoing
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Outgoing number must be >= 1", mock_print.getvalue())
def test_sendmany_weird_outgoing(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '0.5'] # weird number outgoing
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Invalid outgoing number", mock_print.getvalue())
def test_sendmany_bad_assetid(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "blah", self.watch_addr_str, "1"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Asset id not found", mock_print.getvalue())
def test_sendmany_token(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "NXT4", self.watch_addr_str, "32"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
token_hash = '31730cc9a1844891a3bafd1aa929a4142860d8d3'
ImportToken(PromptData.Wallet, token_hash)
args = ['sendmany', "2", '--from-addr=%s' % self.wallet_1_addr]
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("sendmany does not support NEP5 tokens", mock_print.getvalue())
def test_sendmany_bad_address_to(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt',
side_effect=["neo", self.watch_addr_str, "1", "gas", "AGYaEi3W6ndHPUmW7T12FFfsbQ6DWymkE", "1"]): # address is too short
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Not correct Address, wrong length", mock_print.getvalue())
def test_sendmany_negative_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "gas", self.watch_addr_str, "-1"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("invalid amount format", mock_print.getvalue())
def test_sendmany_zero_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "gas", self.watch_addr_str, "0"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Amount cannot be 0", mock_print.getvalue())
def test_sendmany_weird_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "gas", self.watch_addr_str, "5.abc3"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("invalid amount format", mock_print.getvalue())
def test_sendmany_bad_precision_amount(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["gas", self.watch_addr_str, "1", "neo", self.watch_addr_str, "5.01"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("incorrect amount precision", mock_print.getvalue())
def test_sendmany_bad_address_from(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "gas", self.watch_addr_str, "1"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
address_from = '--from-addr=AJQ6FoaSXDFzA6wLnyZ1nFN7SGSN2oNTc' # address_from is too short causing ToScriptHash to fail
args = ['sendmany', '2', address_from]
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Not correct Address, wrong length", mock_print.getvalue())
def test_sendmany_bad_change_address(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "gas", self.watch_addr_str, "1"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
change_address = '--change-addr=AGYaEi3W6ndHPUmW7T12FFfsbQ6DWymkE' # change address is too short causing ToScriptHash to fail
args = ['sendmany', '2', change_address]
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Not correct Address, wrong length", mock_print.getvalue())
def test_sendmany_negative_fee(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "gas", self.watch_addr_str, "1"]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2', '--fee=-0.005']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("invalid amount format", mock_print.getvalue())
def test_sendmany_keyboard_interrupt(self):
with patch('sys.stdout', new=StringIO()) as mock_print:
with patch('neo.Prompt.Commands.Send.prompt', side_effect=["neo", self.watch_addr_str, "1", "gas", KeyboardInterrupt]):
PromptData.Wallet = self.GetWallet1(recreate=True)
args = ['sendmany', '2']
res = Wallet.CommandWallet().execute(args)
self.assertFalse(res)
self.assertIn("Transaction cancelled", mock_print.getvalue())
| [
"[email protected]"
] | |
02ab97bdf11eab35132ba94b3fa12e4782ce20cf | 41e52022ab2dcf15d3795a9a60e24229ba9c9099 | /tools/analyse-io.py | da96de86b4e4435193235f767fd13229aeb0b4ad | [] | no_license | a-yandulski/TelemetryPoC | e114b86978795260858e77738bec3085bc327b94 | a2560f0f86136ec921a4bb3beab8dc76a6237131 | refs/heads/master | 2022-07-03T21:05:38.461402 | 2020-05-14T10:31:47 | 2020-05-14T10:31:47 | 255,569,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,485 | py | import json
import requests
import sys
import csv
import datetime
import time
now = int(datetime.datetime.today().timestamp() * 1000)
lookback = 7 * 24 * 3600 * 1000 # 7 days in milliseconds
services_url = "http://localhost:16686/api/services"
dependencies_url = f"http://localhost:16686/api/dependencies?endTs={now}&lookback={lookback}"
io_requests = []
io_dependencies = []
io_requests_and_deps = []
separator = ", "
def main(args):
global io_requests
global io_dependencies
global io_requests_and_deps
services = requests.get(services_url).json()
io_requests = list(filter(lambda x: x.startswith("IntelligentOffice.") or x.startswith("/"), services["data"]))
dependencies = requests.get(dependencies_url).json()
io_dependencies = list(filter(lambda x: x["parent"] != x["child"] and not (x["parent"].startswith("Microservice.") or x["parent"].startswith("Monolith.")), dependencies["data"]))
for request in io_requests:
request_with_deps = { }
request_with_deps["page"] = request
request_with_deps["uses_db"] = False
request_with_deps["uses_api"] = False
request_with_deps["api_dependencies"] = ""
request_dependencies = list(map(lambda x: x["child"], filter(lambda y: y["parent"] == request, io_dependencies)))
if len(request_dependencies) > 0:
request_dependencies.sort()
api_deps = list(filter(lambda x: x.startswith("Microservice.") or x.startswith("Monolith."), request_dependencies))
db_deps = list(filter(lambda x: x.startswith("SQL Server"), request_dependencies))
request_with_deps["uses_db"] = True if len(db_deps) > 0 else False
request_with_deps["uses_api"] = True if len(api_deps) > 0 else False
request_with_deps["api_dependencies"] = separator.join(api_deps)
io_requests_and_deps.append(request_with_deps)
io_requests_and_deps.sort(key = lambda x: x["page"])
for io_request in io_requests_and_deps:
print(f"{io_request['page']}\t{io_request['uses_db']}\t{io_request['uses_api']}\t{io_request['api_dependencies']}")
if len(io_requests_and_deps) == 0:
return
keys = io_requests_and_deps[0].keys()
with open('io_dependencies.csv', 'w+', newline='') as output_file:
writer = csv.DictWriter(output_file, keys)
writer.writeheader()
writer.writerows(io_requests_and_deps)
return
if __name__ == "__main__":
main(sys.argv[1:]) | [
"[email protected]"
] | |
cca02e1e5eb10c14810e2b242c4ef1a28857a41e | 05e5698d9c62f7ddc9661dc59bd0e7c7fc9bf07d | /blog/admin.py | 42aa040d04f7157def4549b23452b923e9555608 | [] | no_license | andresff50/ByteDist | f38d0078050299f19e9e302d15fbd90f6f1fe5d4 | 7ca19623a15e7b9cc553c31cdd1e8a36641489f5 | refs/heads/master | 2020-07-22T11:59:09.672995 | 2020-01-12T20:51:38 | 2020-01-12T20:51:38 | 207,194,694 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,078 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import Category, Tag, Post
# Register your models here.
class PostModelAdmin(admin.ModelAdmin):
list_display = ["title", "created_date", "published_date", "author", "c_view", "status"]
list_filter = ["created_date", "published_date", "status"]
search_fields = ["title", "content"]
prepopulated_fields = {'slug': ('title',)}
#readonly_fields = ['slug']
#date_hierarchy = 'published_date'
#ordering = ['status', 'published_date']
class Meta:
model = Post
class CategoryModelAdmin(admin.ModelAdmin):
list_display = ["title","orden"]
search_fields = ["title"]
prepopulated_fields = {'slug': ('title',)}
class Meta:
model = Category
class TagModelAdmin(admin.ModelAdmin):
list_display = ["title"]
search_fields = ["title"]
class Meta:
model = Tag
admin.site.register(Category, CategoryModelAdmin)
admin.site.register(Tag, TagModelAdmin)
admin.site.register(Post, PostModelAdmin) | [
"[email protected]"
] | |
6bb0b496092ba4fd24481021b84b8eb80ca6cd79 | e3d620828aa693e5e11dd6e0f0547fb173cd3c5b | /kube-cpusets/kube-cpusets/kube_cpusets/tests/test_kube_cpusets.py | 36626e7e92507a9b6b35b4a6cb9e1b8aad58d6b8 | [
"Apache-2.0"
] | permissive | starlingx/monitoring | 57b3b2fd1c3389e018202e96d0fdf2409a7b8cdf | 0d1ac162dc5337e463a7980f8e03ea7ca3910c94 | refs/heads/master | 2023-09-04T11:04:07.994824 | 2023-08-29T17:02:07 | 2023-08-29T17:02:07 | 238,744,168 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | #
# SPDX-License-Identifier: Apache-2.0
#
# Copyright (c) 2020 Wind River Systems, Inc.
#
import testtools
class KubeCpusetsTestCase(testtools.TestCase):
# NOTE(jgauld): Suggest add basic cpuset parsing tests.
# /sys/devices/system/cpu/isolated
def test_isolated_cpusets_parse(self):
pass
# kube_cpusets.kube_cpusets.get_isolated_cpuset()
| [
"[email protected]"
] | |
7039823df479d2c91657c0e73ca1d4a67f8bad0d | d44af0b6315765ed94e470f5ce1aacb146299dfe | /os-tweet.py | bf06262c3a7de8a366980114cc742e4d98df4e14 | [
"Apache-2.0"
] | permissive | swoodford/twitter | df6e38752e5c754087a137b35f6e61e6bb04fd4d | c8fa1b273dadda7dfe33efc8d82bc0bb28795555 | refs/heads/master | 2021-01-10T03:12:57.780661 | 2017-08-31T22:21:06 | 2017-08-31T22:21:06 | 44,554,240 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,157 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This script will tweet your current Raspberry Pi OS version details
# Requires Twython, API credentials set as env vars
import os
import time
from twython import Twython
import twitter_api_creds
# Set Credentials from environment variables
CONSUMER_KEY = os.getenv("CONSUMER_KEY")
CONSUMER_SECRET = os.getenv("CONSUMER_SECRET")
ACCESS_KEY = os.getenv("ACCESS_KEY")
ACCESS_SECRET = os.getenv("ACCESS_SECRET")
api = Twython(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET)
cmd = 'bash os-tweet.sh'
osdetails = os.popen(cmd).readline()
cmd2 = 'date'
time = os.popen(cmd2).readline()
# Get geolocation using IP address
getlat = 'curl -s https://whatismycountry.com/ | sed -n \'s/.*Coordinates \\(.*\\)<.*/\\1/p\' | cut -d \' \' -f1'
getlong = 'curl -s https://whatismycountry.com/ | sed -n \'s/.*Coordinates \\(.*\\)<.*/\\1/p\' | cut -d \' \' -f2'
lat = os.popen(getlat).readline()
long = os.popen(getlong).readline()
lat = lat.strip()
long = long.strip()
# Tweet with OS details, date, time and ip-geolocation
api.update_status(status=osdetails + ' as of ' + time + '', lat=(lat), long=(long))
| [
"[email protected]"
] | |
19b951f099dc942c519339f10cba334910583334 | 25563b7084e44b9aeb522c8426942b077a16dd6a | /currency_info_server/api/models.py | 1dcceb4ddfc1b1f73413606ae84d670eb88c8b37 | [] | no_license | mieszkosluzewski/currency | 00c437477f436a0a8fdac929d3724daeba087605 | c18ee5aa21179b8f2b0792aeafd28670f245b69d | refs/heads/master | 2022-12-12T02:34:57.087577 | 2018-08-26T12:47:07 | 2018-08-26T12:47:07 | 145,881,802 | 0 | 0 | null | 2022-12-08T02:47:28 | 2018-08-23T16:45:13 | Python | UTF-8 | Python | false | false | 278 | py | from django.db import models
class ExchangeRate(models.Model):
"""
Model for currency exchange rate.
Exchange rate are related to EUR.
"""
date = models.DateTimeField()
currency = models.CharField(max_length=3)
exchange_rate = models.FloatField()
| [
"[email protected]"
] | |
2845418777ab64fad89574f07fd0e3696b1806e0 | 2db37525eb81169d18bfca9b66ace80293fef94a | /decisionstructure/1_two_numbers.py | b165d98764c61acc88accc54989081249c0b29d3 | [] | no_license | leosbarai/pythonbrasil | 3ec79c5fae966e4c9befc0c91da3e6d91b693f6a | 44255343bc301533f94fb06a77644a44731cf36c | refs/heads/main | 2023-02-28T10:32:14.699135 | 2021-02-05T16:47:52 | 2021-02-05T16:47:52 | 308,131,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | def numbers():
print("**********************************")
print(" Exibe o Maior Número!")
print("**********************************")
number_one = input("Informe o primeiro número: ")
number_two = input("Informe o segundo número: ")
if number_one.isdigit() and number_two.isdigit():
if number_one > number_two:
print(f"{number_one} é maior que {number_two}!")
elif number_one < number_two:
print(f"{number_two} é maior que {number_one}!")
else:
print("Os números são iguais")
else:
print("Valor digitado não é um número!")
if __name__ == "__main__":
numbers()
| [
"[email protected]"
] | |
731347542f0c51d82fb738cde80ae938865f584c | f69f1c9180ad49d332a3033216d71da16390344e | /combined_dirichlet_architectures/LEGACY/beta_k3/caller.py | f37ebe362ae179fde14338ab9d1428232f0eb389 | [] | no_license | acevedo-oscar/MicrobiotaGAN | 447e53cfb50940f5f71992ad2fe5fef8d4e31aa3 | cc47443b7196cbbe73df6141c4f5da5b0289f46b | refs/heads/master | 2023-04-22T06:57:25.803107 | 2021-05-05T03:37:08 | 2021-05-05T03:37:08 | 149,841,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,448 | py | import pandas as pd
import numpy as np
import csv
from timeit import default_timer as timer
partiton_n = pd.read_csv('random_amounts.csv', header=None).values.flatten()
print(partiton_n)
##
import os
cwd = os.getcwd()
print(cwd)
os.chdir("../../")
##
print("Loading Train func")
from train_dirichlet_interface import train_gan
print("Finished Loading Train func")
train_ds = pd.read_csv('data/k3_dir/k3_train_set.csv', header=None).values
print(train_ds.shape)
ds_size = train_ds.shape[0]
# train_ds = train_ds[0:ds_size,:]
repetitions = 5
batch_size = 256
print("===> Training with CLR policy <===")
print("===> Training with K3 Dirichlet Dataset <===")
#partiton_n = [300, 400, 500, 700]
print("Give this experiment a name")
experiment_name = "retrain_net" #input()
print(experiment_name)
assert type(experiment_name) == str
print(partiton_n)
print(len(partiton_n))
container_path = 'data/'+ experiment_name
# Recall that os.mkdir isn't recursive, so it only makes on directoryt at a time
try:
# Create target Directory
os.mkdir(container_path)
print("Directory " , container_path , " Created ")
except FileExistsError:
print("Directory " , container_path , " already exists")
for m in range(len(partiton_n)):
for k_rep in range(repetitions):
# partiton_n = np.random.randint(batch_size,ds_size) #At least it has to fit one batch size
index_list = np.random.randint(0,ds_size, size=partiton_n[m]).tolist()
print("Calling training function")
ratio = str(k_rep+1)+"/"+str(repetitions)
ratio2 = str(m+1)+"/"+str(len(partiton_n))
telegram_message = " >> Repetition ("+ratio+") of Partiton ("+ratio2+")"
start = timer()
train_gan(train_ds, index_list, partiton_n[m], k_rep, telegram_message, experiment_name)
print("====> Finished repetition "+str(k_rep)+' of partition # '+str(m))
# repetition, partition, size of partition, time
info_log = [k_rep+1, m+1, partiton_n[m], timer()-start ]
with open(cwd+'/Time_log.csv', 'a') as csvFile:
writer = csv.writer(csvFile)
writer.writerow(info_log)
# Snippet to write a partition table
"""
partiton_n = [np.random.randint(batch_size,ds_size) for k in range(10) ]
df = pd.DataFrame(np.array(partiton_n))
save_name = 'random_amounts.csv'
with open(save_name, 'a') as f:
df.to_csv(f, header=False, index=False)
"""
| [
"Fisgon2018"
] | Fisgon2018 |
15def91348e123b84348c86c88d5732c13bd0bf3 | 5c084bd8095661797c76b73071abcdb27ec56318 | /com/migrations/0001_initial.py | 1635c458201887fc3a8b1aa179c0fdd376ba8048 | [] | no_license | Digvijai/butterpro | 1864f413a17043053d211c0523928cd020a7ba00 | 59e7d7992b1811a5e6dfc83e4f39d3d69e21a8c1 | refs/heads/master | 2023-02-07T13:28:48.084629 | 2020-12-31T14:41:02 | 2020-12-31T14:41:02 | 325,519,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | # Generated by Django 3.1.4 on 2020-12-26 15:09
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('email', models.CharField(max_length=50)),
('phone', models.CharField(max_length=50)),
('msg', models.CharField(max_length=500)),
],
),
]
| [
"[email protected]"
] | |
22708cee8f7060234de219330aa6019ee7d7e034 | 6068170443a67a5385962886f1702725ae539e2a | /model/Board.py | 4fd9d61fc726b86106fb893896ba72fe7d3481fd | [] | no_license | Timuer/BBS | 739d768914290b6fa40eb6dfead5271f8bc767b2 | 65eace466afb03411cb48bcb7190e6127f0ac863 | refs/heads/master | 2020-03-11T14:48:42.105033 | 2018-09-05T13:28:48 | 2018-09-05T13:28:48 | 130,065,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py | from model import MongoModel
class Board(MongoModel):
__fields__ = MongoModel.__fields__ + [
("title", str, ""),
("description", str, ""),
]
"""
class Board(Model):
def __init__(self, form):
self.id = form.get("id", "")
self.title = form.get("title", "")
self.description = form.get("description", "")
""" | [
"[email protected]"
] | |
16c45496bc6c18f97399be82222b4dc40de75a3a | 0ed54a31e4a3055bd6a9c293b2137a0a75e93cd7 | /priv-esc-linux.py | 650ad58fd3ac5ea88feecf9081248510ee4d79d4 | [] | no_license | sarakainimame/linux-privilege-escalation | 31805e5acc71f6fc7ee0b2bf8f3e3cfb9c229a19 | 39427e457aeb2fce40987297b1d7864d91e33f7b | refs/heads/main | 2023-04-23T17:31:49.538507 | 2021-05-08T08:18:10 | 2021-05-08T08:18:10 | 359,348,789 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,207 | py | #!/usr/bin/env/ python3
#####################################################################
# #
# This is an automation tool for Linux Privilege Escalation #
# #
# It does: #
# + OS, Release, and Kernel information gathering #
# + Root service check #
# + SUID-GUID check ('can this user sudo anything?') #
# + Check for SUID-GUID abusable binaries #
# #
#####################################################################
# Importing required libraries:
import os
# This function will check for OS, Release, and Kernel information
def OS_CHECK():
if bit != 0:
print ('[+] OS/Kernel Check Function')
# Running some commands in terminal to identify the system
os.system('uname -a')
os.system('cat /etc/os-release')
os.system('cat /etc/issue')
print ('[+] OS Check CLEARED -> Proceeding with Root Service Check')
print ('------------------------------------------------------')
print ('------------------------------------------------------')
print ('------------------------------------------------------')
ROOT_SERVICE_CHECK()
else:
print ('[+] OS/Kernel Check Function')
os.system('uname -a')
os.system('cat /etc/os-release')
os.system('cat /etc/issue')
GOTOMAIN = input ('[+] Press ENTER to return to MAIN function')
MAIN()
# This function will check for services running as root
def ROOT_SERVICE_CHECK():
if bit != 0:
print ('[+] Root Service Check Function')
os.system('ps aux | grep root')
print('[+] Root Service Check CLEARED -> Proceeding with SUID/GUID Check')
print ('------------------------------------------------------')
print ('------------------------------------------------------')
print ('------------------------------------------------------')
SUID_GUID_CHECK()
else:
print ('[+] Root Service Check Function')
os.system('ps aux | grep root')
GOTOMAIN = input ('[+] Press ENTER to return to MAIN function')
MAIN()
# This function will check for SUID/GUID abusable binaris
def SUID_GUID_CHECK():
if bit != 0:
print ('[+] SUID Check Function')
os.system('find / -perm -u=s -type f 2>/dev/null') # 's' is the SUID bit set for the file permission
print ('------------------------------------------------------')
print('[+] GUID check function')
os.system('find / -perm -g=s -type f 2>/dev/null')
print ('------------------------------------------------------')
print ('------------------------------------------------------')
print ('------------------------------------------------------')
print ('[+] SUID/GUID Check CLEARED')
print ('------------------------------------------------------')
print ('Full Scan Cleared Successfully!!!')
print ('------------------------------------------------------')
GOTOMAIN = input ('[+] Press ENTER to return to Main Menu')
MAIN()
else:
print ('[+] SUID/GUID Check Function')
os.system('find / -perm -u=s -type f 2>/dev/null') # 's' is the SUID bit set for the file permission
os.system('find / -perm -g=s -type f 2>/dev/null')
GOTOMAIN = input ('[+] Press ENTER to return to Main Menu')
MAIN()
# This is the main function that enables options
def MAIN():
global bit # this is a global variable used to process all the functions if Option 4 is selected
bit = 0
print ('[+] Please select one of the following options: ')
OPTION = input ('''
[+] 1. OS/Kernel Check
[+] 2. Root Service Check
[+] 3. SUID/GUID User Check
[+] 4. Full Scan (run all)
[+] 5. To exit
LPC >>> ''')
# Selections:
if OPTION == '1':
print ('You chose option 1. --- Proceeding to OS/Kernel Check')
OS_CHECK()
elif OPTION == '2':
print ('You chose option 2. --- Proceeding to Root Service Check')
ROOT_SERVICE_CHECK()
elif OPTION == '3':
print ('You chose option 3. --- Proceeding to SUID/GUID User Check')
SUID_GUID_CHECK()
elif OPTION == '4':
print ('You chose option 4. --- Proceeding with Full Scan')
bit = bit + 1
OS_CHECK ()
elif OPTION == '5':
print ('You chose option 4. --- Proceeding to EXIT')
else :
BAD_OPTION = input ('Invalid option. Press ENTER to continue')
MAIN()
MAIN()
| [
"[email protected]"
] | |
610b52b8b40a4fe4bdfd01b124c3f409e1b68aed | 5f1284a4f08d6bbb0491ccb4edcf2faf185f33df | /main.py | 5e390cfda45dc24c5ac0ea5c7fbf4c3bf8267bc1 | [
"MIT"
] | permissive | isavillamiel/URLShorty | 9cae31ab90e057f34af1e92ca4a67455555b188f | af71d9d5918e05ec70cbc1c4ae1e35b7e9bafe4c | refs/heads/master | 2021-08-31T14:31:35.860411 | 2017-12-21T17:55:57 | 2017-12-21T17:55:57 | 115,032,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,728 | py | from flask import Flask, request, render_template, redirect
from math import floor
from sqlite3 import OperationalError
import string
import sqlite3
try:
from urllib.parse import urlparse
str_encode = str.encode
except ImportError:
from urlparse import urlparse
str_encode = str
try:
from string import ascii_uppercase
from string import ascii_lowercase
except ImportError:
from string import lowercase as ascii_lowercase
from string import uppercase as ascii_uppercase
import base64
def table_check():
create_table = """
CREATE TABLE WEB_URL(
ID INT PRIMARY KEY AUTOINCREMENT,
URL TEXT NOT NULL
);
"""
# urls.bd better be in app root folder lol
with sqlite3.connect('myurls.db') as conn:
cursor = conn.cursor()
try:
cursor.execute(create_table)
except OperationalError:
pass
# to encode
def toBase64(num, b=64):
if b <= 0 or b > 64:
return 0
base = string.digits + string.lowercase + string.uppercase
r = num % b
res = base[r]
q = floor(num/b)
while q:
r = q % b
q = floor(q/b)
res = base[int(r)] + res
return res
# to decode
def toBase10(num, b=64):
base = string.digits + string.lowercase + string.uppercase
limit = len(num)
res = 0
for i in xrange(limit):
res = b * res + base.find(num[i])
return res
@app.route('/', methods = ['GET', 'POST'])
def home():
if request.method == 'POST':
original_url = str_encode(request.form.get('url'))
if urlparse(original_url).scheme == '':
url = 'http://' + original_url
else:
url = original_url
with sqlite3.connect('myurls.db') as conn:
cursor = conn.cursor()
res = cursor.execute(
'INSERT INTO WEB_URL (URL) VALUES(?)',
[base64.urlsafe_b64decode(url)]
)
encoded_string = toBase64(res.lastrowid)
return render_template('home.html', short_url = host+encoded_string)
return render_template('home.html')
@app.route('/<short_url>')
def redirect_short_url(short_url):
decoded = toBase10(short_url)
url = host
with sqlite3.connect('myurls.db') as conn:
cursor = conn.cursor()
res = cursor.execute('SELECT URL FROM WEB_URL WHERE ID=?', [decoded])
try:
short = res.fetchone()
if short is not None:
url = base64.urlsafe_b64decode(short[0])
except Exception as e:
print(e)
return redirect(url)
if __name__ == '__main__':
# checks if db was created or not
table_check()
app.run(debug=True)
| [
"[email protected]"
] | |
41052edd751343f504e45a45d5ff32e5d3c0b3a2 | aca4ffb711bfd8c30a6357ccb463f4ad0d8715a4 | /venv/Lib/site-packages/sqlalchemy/events.py | ff84272c6a95a8f3794b688c650f5f954a608283 | [] | no_license | zengke123/EbOps | 2f96673d999c6f7dcdc346f4f68869a6dcf49521 | 6bdfbe8e2a568819340b17b32dd1d6deb4db5a4b | refs/heads/master | 2022-12-10T17:04:49.309971 | 2019-08-02T14:31:25 | 2019-08-02T14:31:25 | 183,548,231 | 8 | 2 | null | 2022-12-08T05:57:55 | 2019-04-26T03:11:59 | Python | UTF-8 | Python | false | false | 51,639 | py | # sqlalchemy/events.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Core event interfaces."""
from . import event
from . import exc
from . import util
from .engine import Connectable
from .engine import Dialect
from .engine import Engine
from .pool import Pool
from .sql.base import SchemaEventTarget
class DDLEvents(event.Events):
"""
Define event listeners for schema objects,
that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget`
subclasses, including :class:`.MetaData`, :class:`.Table`,
:class:`.Column`.
:class:`.MetaData` and :class:`.Table` support events
specifically regarding when CREATE and DROP
DDL is emitted to the database.
Attachment events are also provided to customize
behavior whenever a child schema element is associated
with a parent, such as, when a :class:`.Column` is associated
with its :class:`.Table`, when a :class:`.ForeignKeyConstraint`
is associated with a :class:`.Table`, etc.
Example using the ``after_create`` event::
from sqlalchemy import event
from sqlalchemy import Table, Column, Metadata, Integer
m = MetaData()
some_table = Table('some_table', m, Column('data', Integer))
def after_create(target, connection, **kw):
connection.execute("ALTER TABLE %s SET name=foo_%s" %
(target.name, target.name))
event.listen(some_table, "after_create", after_create)
DDL events integrate closely with the
:class:`.DDL` class and the :class:`.DDLElement` hierarchy
of DDL clause constructs, which are themselves appropriate
as listener callables::
from sqlalchemy import DDL
event.listen(
some_table,
"after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
)
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
For all :class:`.DDLEvent` events, the ``propagate=True`` keyword argument
will ensure that a given event handler is propagated to copies of the
object, which are made when using the :meth:`.Table.tometadata` method::
from sqlalchemy import DDL
event.listen(
some_table,
"after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s"),
propagate=True
)
new_table = some_table.tometadata(new_metadata)
The above :class:`.DDL` object will also be associated with the
:class:`.Table` object represented by ``new_table``.
.. seealso::
:ref:`event_toplevel`
:class:`.DDLElement`
:class:`.DDL`
:ref:`schema_ddl_sequences`
"""
_target_class_doc = "SomeSchemaClassOrObject"
_dispatch_target = SchemaEventTarget
def before_create(self, target, connection, **kw):
r"""Called before CREATE statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
CREATE statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def after_create(self, target, connection, **kw):
r"""Called after CREATE statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
CREATE statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def before_drop(self, target, connection, **kw):
r"""Called before DROP statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
DROP statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def after_drop(self, target, connection, **kw):
r"""Called after DROP statements are emitted.
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
DROP statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def before_parent_attach(self, target, parent):
"""Called before a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def after_parent_attach(self, target, parent):
"""Called after a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
def column_reflect(self, inspector, table, column_info):
"""Called for each unit of 'column info' retrieved when
a :class:`.Table` is being reflected.
The dictionary of column information as returned by the
dialect is passed, and can be modified. The dictionary
is that returned in each element of the list returned
by :meth:`.reflection.Inspector.get_columns`:
* ``name`` - the column's name
* ``type`` - the type of this column, which should be an instance
of :class:`~sqlalchemy.types.TypeEngine`
* ``nullable`` - boolean flag if the column is NULL or NOT NULL
* ``default`` - the column's server default value. This is
normally specified as a plain string SQL expression, however the
event can pass a :class:`.FetchedValue`, :class:`.DefaultClause`,
or :func:`.sql.expression.text` object as well.
.. versionchanged:: 1.1.6
The :meth:`.DDLEvents.column_reflect` event allows a non
string :class:`.FetchedValue`,
:func:`.sql.expression.text`, or derived object to be
specified as the value of ``default`` in the column
dictionary.
* ``attrs`` - dict containing optional column attributes
The event is called before any action is taken against
this dictionary, and the contents can be modified.
The :class:`.Column` specific arguments ``info``, ``key``,
and ``quote`` can also be added to the dictionary and
will be passed to the constructor of :class:`.Column`.
Note that this event is only meaningful if either
associated with the :class:`.Table` class across the
board, e.g.::
from sqlalchemy.schema import Table
from sqlalchemy import event
def listen_for_reflect(inspector, table, column_info):
"receive a column_reflect event"
# ...
event.listen(
Table,
'column_reflect',
listen_for_reflect)
...or with a specific :class:`.Table` instance using
the ``listeners`` argument::
def listen_for_reflect(inspector, table, column_info):
"receive a column_reflect event"
# ...
t = Table(
'sometable',
autoload=True,
listeners=[
('column_reflect', listen_for_reflect)
])
This because the reflection process initiated by ``autoload=True``
completes within the scope of the constructor for :class:`.Table`.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`.Table.tometadata` is used.
"""
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
e.g.::
from sqlalchemy import event
def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
"handle an on checkout event"
event.listen(Pool, 'checkout', my_on_checkout)
In addition to accepting the :class:`.Pool` class and
:class:`.Pool` instances, :class:`.PoolEvents` also accepts
:class:`.Engine` objects and the :class:`.Engine` class as
targets, which will be resolved to the ``.pool`` attribute of the
given engine or the :class:`.Pool` class::
engine = create_engine("postgresql://scott:tiger@localhost/test")
# will associate with engine.pool
event.listen(engine, 'checkout', my_on_checkout)
"""
_target_class_doc = "SomeEngineOrPool"
_dispatch_target = Pool
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
if issubclass(target, Engine):
return Pool
elif issubclass(target, Pool):
return target
elif isinstance(target, Engine):
return target.pool
else:
return target
def connect(self, dbapi_connection, connection_record):
"""Called at the moment a particular DBAPI connection is first
created for a given :class:`.Pool`.
This event allows one to capture the point directly after which
the DBAPI module-level ``.connect()`` method has been used in order
to produce a new DBAPI connection.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def first_connect(self, dbapi_connection, connection_record):
"""Called exactly once for the first time a DBAPI connection is
checked out from a particular :class:`.Pool`.
The rationale for :meth:`.PoolEvents.first_connect` is to determine
information about a particular series of database connections based
on the settings used for all connections. Since a particular
:class:`.Pool` refers to a single "creator" function (which in terms
of a :class:`.Engine` refers to the URL and connection options used),
it is typically valid to make observations about a single connection
that can be safely assumed to be valid about all subsequent
connections, such as the database version, the server and client
encoding settings, collation settings, and many others.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param connection_proxy: the :class:`._ConnectionFairy` object which
will proxy the public interface of the DBAPI connection for the
lifespan of the checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
Processing of all checkout listeners will abort and restart
using the new connection.
.. seealso:: :meth:`.ConnectionEvents.engine_connect` - a similar event
which occurs upon creation of a new :class:`.Connection`.
"""
def checkin(self, dbapi_connection, connection_record):
"""Called when a connection returns to the pool.
Note that the connection may be closed, and may be None if the
connection has been invalidated. ``checkin`` will not be called
for detached connections. (They do not return to the pool.)
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def reset(self, dbapi_connection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
when the ``rollback()`` method is called on the DBAPI connection
before it is returned to the pool. The behavior of "reset" can
be controlled, including disabled, using the ``reset_on_return``
pool argument.
The :meth:`.PoolEvents.reset` event is usually followed by the
:meth:`.PoolEvents.checkin` event is called, except in those
cases where the connection is discarded immediately after reset.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
.. seealso::
:meth:`.ConnectionEvents.rollback`
:meth:`.ConnectionEvents.commit`
"""
def invalidate(self, dbapi_connection, connection_record, exception):
"""Called when a DBAPI connection is to be "invalidated".
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked, either from API usage or via "auto-invalidation",
without the ``soft`` flag.
The event occurs before a final attempt to call ``.close()`` on the
connection occurs.
:param dbapi_connection: a DBAPI connection.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param exception: the exception object corresponding to the reason
for this invalidation, if any. May be ``None``.
.. versionadded:: 0.9.2 Added support for connection invalidation
listening.
.. seealso::
:ref:`pool_connection_invalidation`
"""
def soft_invalidate(self, dbapi_connection, connection_record, exception):
"""Called when a DBAPI connection is to be "soft invalidated".
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked with the ``soft`` flag.
Soft invalidation refers to when the connection record that tracks
this connection will force a reconnect after the current connection
is checked in. It does not actively close the dbapi_connection
at the point at which it is called.
.. versionadded:: 1.0.3
"""
def close(self, dbapi_connection, connection_record):
"""Called when a DBAPI connection is closed.
The event is emitted before the close occurs.
The close of a connection can fail; typically this is because
the connection is already closed. If the close ops fails,
the connection is discarded.
The :meth:`.close` event corresponds to a connection that's still
associated with the pool. To intercept close events for detached
connections use :meth:`.close_detached`.
.. versionadded:: 1.1
"""
def detach(self, dbapi_connection, connection_record):
"""Called when a DBAPI connection is "detached" from a pool.
This event is emitted after the detach occurs. The connection
is no longer associated with the given connection record.
.. versionadded:: 1.1
"""
def close_detached(self, dbapi_connection):
"""Called when a detached DBAPI connection is closed.
The event is emitted before the close occurs.
The close of a connection can fail; typically this is because
the connection is already closed. If the close ops fails,
the connection is discarded.
.. versionadded:: 1.1
"""
class ConnectionEvents(event.Events):
"""Available events for :class:`.Connectable`, which includes
:class:`.Connection` and :class:`.Engine`.
The methods here define the name of an event as well as the names of
members that are passed to listener functions.
An event listener can be associated with any :class:`.Connectable`
class or instance, such as an :class:`.Engine`, e.g.::
from sqlalchemy import event, create_engine
def before_cursor_execute(conn, cursor, statement, parameters, context,
executemany):
log.info("Received statement: %s", statement)
engine = create_engine('postgresql://scott:tiger@localhost/test')
event.listen(engine, "before_cursor_execute", before_cursor_execute)
or with a specific :class:`.Connection`::
with engine.begin() as conn:
@event.listens_for(conn, 'before_cursor_execute')
def before_cursor_execute(conn, cursor, statement, parameters,
context, executemany):
log.info("Received statement: %s", statement)
When the methods are called with a `statement` parameter, such as in
:meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and
:meth:`.dbapi_error`, the statement is the exact SQL string that was
prepared for transmission to the DBAPI ``cursor`` in the connection's
:class:`.Dialect`.
The :meth:`.before_execute` and :meth:`.before_cursor_execute`
events can also be established with the ``retval=True`` flag, which
allows modification of the statement and parameters to be sent
to the database. The :meth:`.before_cursor_execute` event is
particularly useful here to add ad-hoc string transformations, such
as comments, to all executions::
from sqlalchemy.engine import Engine
from sqlalchemy import event
@event.listens_for(Engine, "before_cursor_execute", retval=True)
def comment_sql_calls(conn, cursor, statement, parameters,
context, executemany):
statement = statement + " -- some comment"
return statement, parameters
.. note:: :class:`.ConnectionEvents` can be established on any
combination of :class:`.Engine`, :class:`.Connection`, as well
as instances of each of those classes. Events across all
four scopes will fire off for a given instance of
:class:`.Connection`. However, for performance reasons, the
:class:`.Connection` object determines at instantiation time
whether or not its parent :class:`.Engine` has event listeners
established. Event listeners added to the :class:`.Engine`
class or to an instance of :class:`.Engine` *after* the instantiation
of a dependent :class:`.Connection` instance will usually
*not* be available on that :class:`.Connection` instance. The newly
added listeners will instead take effect for :class:`.Connection`
instances created subsequent to those event listeners being
established on the parent :class:`.Engine` class or instance.
:param retval=False: Applies to the :meth:`.before_execute` and
:meth:`.before_cursor_execute` events only. When True, the
user-defined event function must have a return value, which
is a tuple of parameters that replace the given statement
and parameters. See those methods for a description of
specific return arguments.
"""
_target_class_doc = "SomeEngine"
_dispatch_target = Connectable
@classmethod
def _listen(cls, event_key, retval=False):
target, identifier, fn = (
event_key.dispatch_target,
event_key.identifier,
event_key._listen_fn,
)
target._has_events = True
if not retval:
if identifier == "before_execute":
orig_fn = fn
def wrap_before_execute(
conn, clauseelement, multiparams, params
):
orig_fn(conn, clauseelement, multiparams, params)
return clauseelement, multiparams, params
fn = wrap_before_execute
elif identifier == "before_cursor_execute":
orig_fn = fn
def wrap_before_cursor_execute(
conn, cursor, statement, parameters, context, executemany
):
orig_fn(
conn,
cursor,
statement,
parameters,
context,
executemany,
)
return statement, parameters
fn = wrap_before_cursor_execute
elif retval and identifier not in (
"before_execute",
"before_cursor_execute",
"handle_error",
):
raise exc.ArgumentError(
"Only the 'before_execute', "
"'before_cursor_execute' and 'handle_error' engine "
"event listeners accept the 'retval=True' "
"argument."
)
event_key.with_wrapper(fn).base_listen()
def before_execute(self, conn, clauseelement, multiparams, params):
"""Intercept high level execute() events, receiving uncompiled
SQL constructs and other objects prior to rendering into SQL.
This event is good for debugging SQL compilation issues as well
as early manipulation of the parameters being sent to the database,
as the parameter lists will be in a consistent format here.
This event can be optionally established with the ``retval=True``
flag. The ``clauseelement``, ``multiparams``, and ``params``
arguments should be returned as a three-tuple in this case::
@event.listens_for(Engine, "before_execute", retval=True)
def before_execute(conn, clauseelement, multiparams, params):
# do something with clauseelement, multiparams, params
return clauseelement, multiparams, params
:param conn: :class:`.Connection` object
:param clauseelement: SQL expression construct, :class:`.Compiled`
instance, or string statement passed to :meth:`.Connection.execute`.
:param multiparams: Multiple parameter sets, a list of dictionaries.
:param params: Single parameter set, a single dictionary.
.. seealso::
:meth:`.before_cursor_execute`
"""
def after_execute(self, conn, clauseelement, multiparams, params, result):
"""Intercept high level execute() events after execute.
:param conn: :class:`.Connection` object
:param clauseelement: SQL expression construct, :class:`.Compiled`
instance, or string statement passed to :meth:`.Connection.execute`.
:param multiparams: Multiple parameter sets, a list of dictionaries.
:param params: Single parameter set, a single dictionary.
:param result: :class:`.ResultProxy` generated by the execution.
"""
def before_cursor_execute(
self, conn, cursor, statement, parameters, context, executemany
):
"""Intercept low-level cursor execute() events before execution,
receiving the string SQL statement and DBAPI-specific parameter list to
be invoked against a cursor.
This event is a good choice for logging as well as late modifications
to the SQL string. It's less ideal for parameter modifications except
for those which are specific to a target backend.
This event can be optionally established with the ``retval=True``
flag. The ``statement`` and ``parameters`` arguments should be
returned as a two-tuple in this case::
@event.listens_for(Engine, "before_cursor_execute", retval=True)
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
# do something with statement, parameters
return statement, parameters
See the example at :class:`.ConnectionEvents`.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
:param statement: string SQL statement, as to be passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param executemany: boolean, if ``True``, this is an ``executemany()``
call, if ``False``, this is an ``execute()`` call.
.. seealso::
:meth:`.before_execute`
:meth:`.after_cursor_execute`
"""
def after_cursor_execute(
self, conn, cursor, statement, parameters, context, executemany
):
"""Intercept low-level cursor execute() events after execution.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object. Will have results pending
if the statement was a SELECT, but these should not be consumed
as they will be needed by the :class:`.ResultProxy`.
:param statement: string SQL statement, as passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param executemany: boolean, if ``True``, this is an ``executemany()``
call, if ``False``, this is an ``execute()`` call.
"""
@util.deprecated(
"0.9",
"The :meth:`.ConnectionEvents.dbapi_error` "
"event is deprecated and will be removed in a future release. "
"Please refer to the :meth:`.ConnectionEvents.handle_error` "
"event.",
)
def dbapi_error(
self, conn, cursor, statement, parameters, context, exception
):
"""Intercept a raw DBAPI error.
This event is called with the DBAPI exception instance
received from the DBAPI itself, *before* SQLAlchemy wraps the
exception with it's own exception wrappers, and before any
other operations are performed on the DBAPI cursor; the
existing transaction remains in effect as well as any state
on the cursor.
The use case here is to inject low-level exception handling
into an :class:`.Engine`, typically for logging and
debugging purposes.
.. warning::
Code should **not** modify
any state or throw any exceptions here as this will
interfere with SQLAlchemy's cleanup and error handling
routines. For exception modification, please refer to the
new :meth:`.ConnectionEvents.handle_error` event.
Subsequent to this hook, SQLAlchemy may attempt any
number of operations on the connection/cursor, including
closing the cursor, rolling back of the transaction in the
case of connectionless execution, and disposing of the entire
connection pool if a "disconnect" was detected. The
exception is then wrapped in a SQLAlchemy DBAPI exception
wrapper and re-thrown.
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
:param statement: string SQL statement, as passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
:param context: :class:`.ExecutionContext` object in use. May
be ``None``.
:param exception: The **unwrapped** exception emitted directly from the
DBAPI. The class here is specific to the DBAPI module in use.
"""
def handle_error(self, exception_context):
r"""Intercept all exceptions processed by the :class:`.Connection`.
This includes all exceptions emitted by the DBAPI as well as
within SQLAlchemy's statement invocation process, including
encoding errors and other statement validation errors. Other areas
in which the event is invoked include transaction begin and end,
result row fetching, cursor creation.
Note that :meth:`.handle_error` may support new kinds of exceptions
and new calling scenarios at *any time*. Code which uses this
event must expect new calling patterns to be present in minor
releases.
To support the wide variety of members that correspond to an exception,
as well as to allow extensibility of the event without backwards
incompatibility, the sole argument received is an instance of
:class:`.ExceptionContext`. This object contains data members
representing detail about the exception.
Use cases supported by this hook include:
* read-only, low-level exception handling for logging and
debugging purposes
* exception re-writing
* Establishing or disabling whether a connection or the owning
connection pool is invalidated or expired in response to a
specific exception.
The hook is called while the cursor from the failed ops
(if any) is still open and accessible. Special cleanup operations
can be called on this cursor; SQLAlchemy will attempt to close
this cursor subsequent to this hook being invoked. If the connection
is in "autocommit" mode, the transaction also remains open within
the scope of this hook; the rollback of the per-statement transaction
also occurs after the hook is called.
For the common case of detecting a "disconnect" situation which
is not currently handled by the SQLAlchemy dialect, the
:attr:`.ExceptionContext.is_disconnect` flag can be set to True which
will cause the exception to be considered as a disconnect situation,
which typically results in the connection pool being invalidated::
@event.listens_for(Engine, "handle_error")
def handle_exception(context):
if isinstance(context.original_exception, pyodbc.Error):
for code in (
'08S01', '01002', '08003',
'08007', '08S02', '08001', 'HYT00', 'HY010'):
if code in str(context.original_exception):
context.is_disconnect = True
A handler function has two options for replacing
the SQLAlchemy-constructed exception into one that is user
defined. It can either raise this new exception directly, in
which case all further event listeners are bypassed and the
exception will be raised, after appropriate cleanup as taken
place::
@event.listens_for(Engine, "handle_error")
def handle_exception(context):
if isinstance(context.original_exception,
psycopg2.OperationalError) and \
"failed" in str(context.original_exception):
raise MySpecialException("failed ops")
.. warning:: Because the :meth:`.ConnectionEvents.handle_error`
event specifically provides for exceptions to be re-thrown as
the ultimate exception raised by the failed statement,
**stack traces will be misleading** if the user-defined event
handler itself fails and throws an unexpected exception;
the stack trace may not illustrate the actual code line that
failed! It is advised to code carefully here and use
logging and/or inline debugging if unexpected exceptions are
occurring.
Alternatively, a "chained" style of event handling can be
used, by configuring the handler with the ``retval=True``
modifier and returning the new exception instance from the
function. In this case, event handling will continue onto the
next handler. The "chained" exception is available using
:attr:`.ExceptionContext.chained_exception`::
@event.listens_for(Engine, "handle_error", retval=True)
def handle_exception(context):
if context.chained_exception is not None and \
"special" in context.chained_exception.message:
return MySpecialException("failed",
cause=context.chained_exception)
Handlers that return ``None`` may be used within the chain; when
a handler returns ``None``, the previous exception instance,
if any, is maintained as the current exception that is passed onto the
next handler.
When a custom exception is raised or returned, SQLAlchemy raises
this new exception as-is, it is not wrapped by any SQLAlchemy
object. If the exception is not a subclass of
:class:`sqlalchemy.exc.StatementError`,
certain features may not be available; currently this includes
the ORM's feature of adding a detail hint about "autoflush" to
exceptions raised within the autoflush process.
:param context: an :class:`.ExceptionContext` object. See this
class for details on all available members.
.. versionadded:: 0.9.7 Added the
:meth:`.ConnectionEvents.handle_error` hook.
.. versionchanged:: 1.1 The :meth:`.handle_error` event will now
receive all exceptions that inherit from ``BaseException``,
including ``SystemExit`` and ``KeyboardInterrupt``. The setting for
:attr:`.ExceptionContext.is_disconnect` is ``True`` in this case and
the default for
:attr:`.ExceptionContext.invalidate_pool_on_disconnect` is
``False``.
.. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now
invoked when an :class:`.Engine` fails during the initial
call to :meth:`.Engine.connect`, as well as when a
:class:`.Connection` object encounters an error during a
reconnect ops.
.. versionchanged:: 1.0.0 The :meth:`.handle_error` event is
not fired off when a dialect makes use of the
``skip_user_error_events`` execution option. This is used
by dialects which intend to catch SQLAlchemy-specific exceptions
within specific operations, such as when the MySQL dialect detects
a table not present within the ``has_table()`` dialect method.
Prior to 1.0.0, code which implements :meth:`.handle_error` needs
to ensure that exceptions thrown in these scenarios are re-raised
without modification.
"""
def engine_connect(self, conn, branch):
"""Intercept the creation of a new :class:`.Connection`.
This event is called typically as the direct result of calling
the :meth:`.Engine.connect` method.
It differs from the :meth:`.PoolEvents.connect` method, which
refers to the actual connection to a database at the DBAPI level;
a DBAPI connection may be pooled and reused for many operations.
In contrast, this event refers only to the production of a higher level
:class:`.Connection` wrapper around such a DBAPI connection.
It also differs from the :meth:`.PoolEvents.checkout` event
in that it is specific to the :class:`.Connection` object, not the
DBAPI connection that :meth:`.PoolEvents.checkout` deals with, although
this DBAPI connection is available here via the
:attr:`.Connection.connection` attribute. But note there can in fact
be multiple :meth:`.PoolEvents.checkout` events within the lifespan
of a single :class:`.Connection` object, if that :class:`.Connection`
is invalidated and re-established. There can also be multiple
:class:`.Connection` objects generated for the same already-checked-out
DBAPI connection, in the case that a "branch" of a :class:`.Connection`
is produced.
:param conn: :class:`.Connection` object.
:param branch: if True, this is a "branch" of an existing
:class:`.Connection`. A branch is generated within the course
of a statement execution to invoke supplemental statements, most
typically to pre-execute a SELECT of a default value for the purposes
of an INSERT statement.
.. versionadded:: 0.9.0
.. seealso::
:ref:`pool_disconnects_pessimistic` - illustrates how to use
:meth:`.ConnectionEvents.engine_connect`
to transparently ensure pooled connections are connected to the
database.
:meth:`.PoolEvents.checkout` the lower-level pool checkout event
for an individual DBAPI connection
:meth:`.ConnectionEvents.set_connection_execution_options` - a copy
of a :class:`.Connection` is also made when the
:meth:`.Connection.execution_options` method is called.
"""
def set_connection_execution_options(self, conn, opts):
"""Intercept when the :meth:`.Connection.execution_options`
method is called.
This method is called after the new :class:`.Connection` has been
produced, with the newly updated execution options collection, but
before the :class:`.Dialect` has acted upon any of those new options.
Note that this method is not called when a new :class:`.Connection`
is produced which is inheriting execution options from its parent
:class:`.Engine`; to intercept this condition, use the
:meth:`.ConnectionEvents.engine_connect` event.
:param conn: The newly copied :class:`.Connection` object
:param opts: dictionary of options that were passed to the
:meth:`.Connection.execution_options` method.
.. versionadded:: 0.9.0
.. seealso::
:meth:`.ConnectionEvents.set_engine_execution_options` - event
which is called when :meth:`.Engine.execution_options` is called.
"""
def set_engine_execution_options(self, engine, opts):
"""Intercept when the :meth:`.Engine.execution_options`
method is called.
The :meth:`.Engine.execution_options` method produces a shallow
copy of the :class:`.Engine` which stores the new options. That new
:class:`.Engine` is passed here. A particular application of this
method is to add a :meth:`.ConnectionEvents.engine_connect` event
handler to the given :class:`.Engine` which will perform some per-
:class:`.Connection` task specific to these execution options.
:param conn: The newly copied :class:`.Engine` object
:param opts: dictionary of options that were passed to the
:meth:`.Connection.execution_options` method.
.. versionadded:: 0.9.0
.. seealso::
:meth:`.ConnectionEvents.set_connection_execution_options` - event
which is called when :meth:`.Connection.execution_options` is
called.
"""
def engine_disposed(self, engine):
"""Intercept when the :meth:`.Engine.dispose` method is called.
The :meth:`.Engine.dispose` method instructs the engine to
"dispose" of it's connection pool (e.g. :class:`.Pool`), and
replaces it with a new one. Disposing of the old pool has the
effect that existing checked-in connections are closed. The new
pool does not establish any new connections until it is first used.
This event can be used to indicate that resources related to the
:class:`.Engine` should also be cleaned up, keeping in mind that the
:class:`.Engine` can still be used for new requests in which case
it re-acquires connection resources.
.. versionadded:: 1.0.5
"""
def begin(self, conn):
"""Intercept begin() events.
:param conn: :class:`.Connection` object
"""
def rollback(self, conn):
"""Intercept rollback() events, as initiated by a
:class:`.Transaction`.
Note that the :class:`.Pool` also "auto-rolls back"
a DBAPI connection upon checkin, if the ``reset_on_return``
flag is set to its default value of ``'rollback'``.
To intercept this
rollback, use the :meth:`.PoolEvents.reset` hook.
:param conn: :class:`.Connection` object
.. seealso::
:meth:`.PoolEvents.reset`
"""
def commit(self, conn):
"""Intercept commit() events, as initiated by a
:class:`.Transaction`.
Note that the :class:`.Pool` may also "auto-commit"
a DBAPI connection upon checkin, if the ``reset_on_return``
flag is set to the value ``'commit'``. To intercept this
commit, use the :meth:`.PoolEvents.reset` hook.
:param conn: :class:`.Connection` object
"""
def savepoint(self, conn, name):
"""Intercept savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
"""
def rollback_savepoint(self, conn, name, context):
"""Intercept rollback_savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
:param context: :class:`.ExecutionContext` in use. May be ``None``.
"""
def release_savepoint(self, conn, name, context):
"""Intercept release_savepoint() events.
:param conn: :class:`.Connection` object
:param name: specified name used for the savepoint.
:param context: :class:`.ExecutionContext` in use. May be ``None``.
"""
def begin_twophase(self, conn, xid):
"""Intercept begin_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
"""
def prepare_twophase(self, conn, xid):
"""Intercept prepare_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
"""
def rollback_twophase(self, conn, xid, is_prepared):
"""Intercept rollback_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
:param is_prepared: boolean, indicates if
:meth:`.TwoPhaseTransaction.prepare` was called.
"""
def commit_twophase(self, conn, xid, is_prepared):
"""Intercept commit_twophase() events.
:param conn: :class:`.Connection` object
:param xid: two-phase XID identifier
:param is_prepared: boolean, indicates if
:meth:`.TwoPhaseTransaction.prepare` was called.
"""
class DialectEvents(event.Events):
"""event interface for execution-replacement functions.
These events allow direct instrumentation and replacement
of key dialect functions which interact with the DBAPI.
.. note::
:class:`.DialectEvents` hooks should be considered **semi-public**
and experimental.
These hooks are not for general use and are only for those situations
where intricate re-statement of DBAPI mechanics must be injected onto
an existing dialect. For general-use statement-interception events,
please use the :class:`.ConnectionEvents` interface.
.. seealso::
:meth:`.ConnectionEvents.before_cursor_execute`
:meth:`.ConnectionEvents.before_execute`
:meth:`.ConnectionEvents.after_cursor_execute`
:meth:`.ConnectionEvents.after_execute`
.. versionadded:: 0.9.4
"""
_target_class_doc = "SomeEngine"
_dispatch_target = Dialect
@classmethod
def _listen(cls, event_key, retval=False):
target = event_key.dispatch_target
target._has_events = True
event_key.base_listen()
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
if issubclass(target, Engine):
return Dialect
elif issubclass(target, Dialect):
return target
elif isinstance(target, Engine):
return target.dialect
else:
return target
def do_connect(self, dialect, conn_rec, cargs, cparams):
"""Receive connection arguments before a connection is made.
Return a DBAPI connection to halt further events from invoking;
the returned connection will be used.
Alternatively, the event can manipulate the cargs and/or cparams
collections; cargs will always be a Python list that can be mutated
in-place and cparams a Python dictionary. Return None to
allow control to pass to the next event handler and ultimately
to allow the dialect to connect normally, given the updated
arguments.
.. versionadded:: 1.0.3
"""
def do_executemany(self, cursor, statement, parameters, context):
"""Receive a cursor to have executemany() called.
Return the value True to halt further events from invoking,
and to indicate that the cursor execution has already taken
place within the event handler.
"""
def do_execute_no_params(self, cursor, statement, context):
"""Receive a cursor to have execute() with no parameters called.
Return the value True to halt further events from invoking,
and to indicate that the cursor execution has already taken
place within the event handler.
"""
def do_execute(self, cursor, statement, parameters, context):
"""Receive a cursor to have execute() called.
Return the value True to halt further events from invoking,
and to indicate that the cursor execution has already taken
place within the event handler.
"""
def do_setinputsizes(
self, inputsizes, cursor, statement, parameters, context
):
"""Receive the setinputsizes dictionary for possible modification.
This event is emitted in the case where the dialect makes use of the
DBAPI ``cursor.setinputsizes()`` method which passes information about
parameter binding for a particular statement. The given
``inputsizes`` dictionary will contain :class:`.BindParameter` objects
as keys, linked to DBAPI-specific type objects as values; for
parameters that are not bound, they are added to the dictionary with
``None`` as the value, which means the parameter will not be included
in the ultimate setinputsizes call. The event may be used to inspect
and/or log the datatypes that are being bound, as well as to modify the
dictionary in place. Parameters can be added, modified, or removed
from this dictionary. Callers will typically want to inspect the
:attr:`.BindParameter.type` attribute of the given bind objects in
order to make decisions about the DBAPI object.
After the event, the ``inputsizes`` dictionary is converted into
an appropriate datastructure to be passed to ``cursor.setinputsizes``;
either a list for a positional bound parameter execution style,
or a dictionary of string parameter keys to DBAPI type objects for
a named bound parameter execution style.
Most dialects **do not use** this method at all; the only built-in
dialect which uses this hook is the cx_Oracle dialect. The hook here
is made available so as to allow customization of how datatypes are set
up with the cx_Oracle DBAPI.
.. versionadded:: 1.2.9
.. seealso::
:ref:`cx_oracle_setinputsizes`
"""
pass
| [
"[email protected]"
] | |
3af5bcdeb732a8276230f2d75dcdba8ebedda0ad | 1f3b92255c66a5a7fb51f1cc77a902f78ee76688 | /untitled3/ua/lviv/iot/armament/models/ApartmentType.py | 925d4e13ccd90e96fb3b8645e1668b5b28f997d1 | [] | no_license | YuliaYassnyska/Lab11 | 8475a9308183d9a7769ecb5b148b9992722edfec | fda5d5f26d1fc1a9a3c5069be719cafcae3bb94f | refs/heads/master | 2020-05-20T05:44:21.306387 | 2019-05-07T23:06:35 | 2019-05-07T23:06:35 | 185,413,900 | 0 | 0 | null | 2019-06-09T13:22:49 | 2019-05-07T14:02:19 | Python | UTF-8 | Python | false | false | 97 | py | from enum import Enum
class ApartmentType(Enum):
PISTOLS=0
GRENADES=1
SNIPERDEVISE=2 | [
"[email protected]"
] | |
d760dbf6c270bdb690105efdf7eb6c672753a781 | 1d53e2839b2fe7573f8abfad56cc93766b5ce90c | /practicepython/list-comprehensions.py | e1e0dbea26c2146e3bdc1587bfc9817cb12fea90 | [] | no_license | amitparmar01/python-learn | a25fe0b5c9d73afdc6c501de850bec47e0fca0b9 | b3224dcfeca60d2472287df89be204eb99cdf3c0 | refs/heads/master | 2021-01-25T05:34:39.832430 | 2015-06-25T17:38:57 | 2015-06-25T17:38:57 | 37,963,277 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | def main():
a = [1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
even = [num for num in a if num%2 == 0]
print(even)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
271c16554eec56e35a88f5b7354f2ceedf4be695 | 4c04bfa2b0dd91250e35bce2886954830299bb80 | /contrib/testgen/gen_base58_test_vectors.py | 0864f4bb3837b0a2025cbde5834631658d7d719a | [
"MIT"
] | permissive | thehomosapien/dclr | 6a81a5fcd761b6fb7cf21f5d1c7e64d8d6e0c941 | dba88f2ad6359addaa66f1875792cc8c4f30d686 | refs/heads/master | 2020-07-04T03:28:50.754864 | 2019-08-13T12:27:40 | 2019-08-13T12:27:40 | 202,137,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,591 | py | #!/usr/bin/env python
# Copyright (c) 2012-2018 The Bitcoin Core developers
# Copyright (c) 2017 The Raven Core developers
# Copyright (c) 2018 The DCLRcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 0
SCRIPT_ADDRESS = 5
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 128
PRIVKEY_TEST = 239
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
| [
"[email protected]"
] | |
6d95b8e7f0f080375b6f7fbe55eb73c7ed357a40 | c7ac751f9f13f133184d7df31c016e7b3be38da7 | /url_builder.py | b3c29e749c480a15217d3545a431f1be2308daee | [] | no_license | pietro-andreoli/BB-Oracle | 84b6a38a4c19bf1413b309f94f66cf98e4825196 | 4db1b54d2402c646d1c104791ba8fa477f430e4c | refs/heads/master | 2023-02-10T09:12:15.157409 | 2020-12-27T19:31:31 | 2020-12-27T19:31:31 | 324,826,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,170 | py | class BestBuyURLBuilder():
"""
A simple URL builder specifically for BestBuy API URLs.
Example:
path_parts = ["search", "contact"]
url_obj = BestBuyURLBuilder(path_parts)
print(url_obj.url)
url_obj = BestBuyURLBuilder()
url_obj.set_path("search", "contact")
print(url_obj.url)
Properties:
PROTOCOL (str): Protocol of the URL.
API_DOMAIN (str): Domain of the URL.
Attributes:
path_components (list): Path components separated.
_url (str): The URL as a string.
"""
PROTOCOL = "https"
API_DOMAIN = "api.bestbuy.com"
def __init__(self, *args):
self.path_components = []
self.set_path(*args)
self._url = None
self._compile_url()
def set_path(self, *args):
"""
Sets the path.
"""
self.path_components = args
def _compile_url(self):
"""
Actually builds and sets the URL string.
"""
base_url = f"{BestBuyURLBuilder.PROTOCOL}://{BestBuyURLBuilder.API_DOMAIN}/"
path = '/'.join(self.path_components)
self._url = base_url + path
@property
def url(self):
"""
Getter for _url. Compiles the URL, sets it and returns it.
Returns:
str: Compiled URL.
"""
self._compile_url()
return self._url
| [
"[email protected]"
] | |
dcc5c035c0a3db9dffc08a1f4dd012a5ba7378e1 | d0cebf75d67a5d8be57bd3656684ee2dbf282c8a | /tests/test_square.py | ac369eab2428bba0b666d35e906d800b78deddc5 | [
"MIT"
] | permissive | sgill2/tutorial | e310b79fef6edbedd5e98924cbecc5a354356db9 | be436fe682896efb72fa54f43ce8b66276757bb4 | refs/heads/master | 2021-04-29T18:18:33.937197 | 2018-02-15T23:20:53 | 2018-02-15T23:20:53 | 121,690,743 | 0 | 0 | MIT | 2018-02-15T23:23:47 | 2018-02-15T22:30:10 | Python | UTF-8 | Python | false | false | 743 | py | """
Testing area package
"""
from shapes.square.area import area_square
from shapes.square.perimeter import perimeter_square
from shapes.triangle.area import area_triangle
import pytest
def test_square_area():
"""
testing function area_square
"""
length = 2
A = area_square(length)
assert pytest.approx(A) == 4.0
def test_square_perimeter():
length = 2
P = perimeter_square(length)
assert pytest.approx(P) == 8.0
######################
# Write a test for the triangle function
######################
def test_triangle_area():
base = 1
height = 1
A = area_triangle(base, height)
assert pytest.approx(A) == 0.5
print("insert test for triangle area here")
print('changed file')
| [
"[email protected]"
] | |
8456a893f9314c7395601b63bb315e7f5349d99a | 83ff6674ad1fc4ac1d9523216149d5dae82f8bbd | /accounts/urls.py | a8e0836da5ea4c3f61d766c4e173108ff8f03fe0 | [] | no_license | 4802852/Act-Agora | 8a75c4622b0e306a514096b0a4aaa3e3360ec26e | 7603444e32cebd6d5ae2d3a6f8f3a349373120d2 | refs/heads/master | 2023-04-28T19:49:18.498810 | 2021-05-10T13:44:03 | 2021-05-10T13:44:03 | 351,899,676 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | from django.urls import path
from . import views
app_name = 'accounts'
urlpatterns = [
path('accounts/agreement/', views.AgreementView.as_view(), name='agreement'),
path('accounts/signup_t/', views.TrainerRegisterView.as_view(), name='signup_t'),
path('accounts/signup/', views.TraineeRegisterView.as_view(), name='signup'),
path('accounts/login/', views.LoginView.as_view(), name='login'),
path('accounts/logout/', views.logout_view, name='logout'),
path('accounts/mypage/', views.mypage, name='mypage'),
path('accounts/recovery/id/', views.RecoveryIdView.as_view(), name='recovery_id'),
path('accounts/recovery/id/find/', views.ajax_find_id_view, name='ajax_id'),
path('accounts/mypage/password_change/', views.password_edit_view, name='password_change'),
path('accounts/profile/', views.profile_view, name='profile'),
path('accounts/profile/update/', views.profile_update_view, name='profile_update'),
path('accounts/delete/', views.profile_delete_view, name='profile_delete'),
path('accounts/registerauth/', views.register_success, name='register_success'),
path('accounts/activate/<str:uid64>/<str:token>/', views.activate, name='activate'),
path('accounts/recovery/pw/', views.RecoveryPwView.as_view(), name='recovery_pw'),
path('accounts/recovery/pw/find/', views.ajax_find_pw_view, name='ajax_pw'),
path('accounts/recovery/pw/auth/', views.auth_confirm_view, name='recovery_auth'),
path('accounts/recovery/pw/reset/', views.auth_pw_reset_view, name='recovery_pw_reset'),
]
| [
"[email protected]"
] | |
0075ef4446fc7ccdf9f90d6aa94669b2ff78f0b2 | 2dd895261aeb91953d98248a09c69a7a1873511f | /ember_django/backend/authenticate_user.py | fe7370d24ec382b3422a4048add6c7d5d37f7d4e | [] | no_license | KPetsas/e-science | 0245f9dab90946335adc904fc5523e5d1a566b68 | bb9f16caeaef5cad98a248f6a2a15990b0fd313c | refs/heads/master | 2021-01-16T12:19:49.605386 | 2015-01-12T14:21:47 | 2015-01-12T14:21:47 | 29,286,850 | 0 | 0 | null | 2015-01-15T07:57:58 | 2015-01-15T07:57:57 | null | UTF-8 | Python | false | false | 2,515 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This script contains classes and functions that
authenticate escience users.
@author: Ioannis Stenos, Nick Vrionis
'''
import logging
from kamaki.clients.astakos import AstakosClient
from kamaki.clients import ClientError
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import BasePermission
from backend.models import Token
from rest_framework import exceptions
from orka.cluster_errors_constants import *
# Constants
AUTHENTICATED = 1
NOT_AUTHENTICATED = 0
# Only method that escience token authentication is not required.
SAFE_METHODS = ['POST']
class EscienceTokenAuthentication(TokenAuthentication):
'''
Class that inherit the rest_framework default Token authentication class.
Overrides the Token model used and authenticate_credentials method.
'''
model = Token
def authenticate_credentials(self, key):
try:
token = self.model.objects.get(key=key)
except self.model.DoesNotExist:
raise exceptions.AuthenticationFailed('Invalid token')
return (token.user, token)
class IsAuthenticatedOrIsCreation(BasePermission):
'''
Class for permissions. Only POST method will be allowed without
Token authentication. Every other method will have to add
as a request header the escience authentication token.
'''
def has_permission(self, request, view):
return (
request.method in SAFE_METHODS or
request.user and
request.user.is_authenticated()
)
class IsAuthenticated(BasePermission):
'''
Class for permissions for database view. Every method will have to add
as a request header the escience authentication token.
'''
def has_permission(self, request, view):
return (
request.user and
request.user.is_authenticated()
)
def check_user_credentials(token, auth_url='https://accounts.okeanos.grnet.gr'
'/identity/v2.0'):
'''Identity,Account/Astakos. Test ~okeanos authentication credentials'''
logging.info(' Test the credentials')
try:
auth = AstakosClient(auth_url, token)
auth.authenticate()
logging.info(' Authentication verified')
return AUTHENTICATED
except ClientError:
logging.error('Authentication failed with url %s and token %s' % (
auth_url, token))
return NOT_AUTHENTICATED
| [
"[email protected]"
] | |
9bfbbc13e6a41a76c9efbb66aa8b6fe6264d1f22 | ad9798fe308ec4837e1e772dad23f5d0a97e2650 | /DataStrucsAlgos/Listings/listing_3_4.py | 0277926ab54bf7c92f601dbe3d78cd26d07c7a7d | [] | no_license | vivek-x-jha/Python-Concepts | f60839924f5c22117e8866f9ac8daa555181e31f | 770d178dea637a21593f4686e1ddc4f3c296c50a | refs/heads/master | 2022-03-23T23:26:09.511309 | 2019-12-22T03:41:47 | 2019-12-22T03:41:47 | 111,546,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | from concepts.DataStrucsAlgos.pythonds import Stack
def parChecker(symbolString):
s = Stack()
balanced = True
index = 0
while index < len(symbolString) and balanced:
symbol = symbolString[index]
if symbol in "([{":
s.push(symbol)
else:
if s.isEmpty():
balanced = False
else:
top = s.pop()
if not matches(top,symbol):
balanced = False
index = index + 1
if balanced and s.isEmpty():
return True
else:
return False
def matches(open,close):
opens = "([{"
closers = ")]}"
return opens.index(open) == closers.index(close)
| [
"[email protected]"
] | |
9c264644514c139a4eafcd14de69452cf05e2f3a | 0572de92159e99958b6d77ecea92060a7cbb4bd7 | /001_multi_armed_bandit.py | 059ceee8cfe4003ce4525b54abe6be1f072cf9b5 | [] | no_license | ironmanciti/reiforcement-lecture | e8e5cabd4cec9fd69a5fdadc86d6386c681083e3 | 9701951cbd3a0e962aa52d277b9cc0f6fd76ec9a | refs/heads/master | 2020-07-08T13:20:19.212860 | 2019-11-17T00:57:10 | 2019-11-17T00:57:10 | 203,686,378 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,649 | py | """
10-Armed Testbed (Reinforcement Learning: An Introduction, Sutton, Barto, fig 2.2)
Created by Jet-Tsyn Lee 23/02/18, last update 03/01/18
Program is to compare the greedy and e-greedy methods in a 10-armed bandit testbed, presented
in the Reinforcement Learning: An Introduction book, Sutton, Barto, fig 2.2.
"""
import numpy as np
import matplotlib.pyplot as plt
import time
################################################################
# TestBed class containing the states and actions, and the overall rules of the test
class KarmedBandit(object):
# Constructor
def __init__(self, nArms, mean, stDev):
# Number of arms
self.nArms = nArms
# Used for the Gaussian random actions
self.mean = mean # Mean
self.stDev = stDev # Standard Deviation
self.reset()
# Reset bandit for next iteration
def reset(self):
# 정규분포로 10 개의 arm (action array) 을 reset
self.actArr = np.random.normal(self.mean, self.stDev, self.nArms)
# maximum action value 를 갖는 arm
self.optim = np.argmax(self.actArr)
###############################################################
# Agent Class - Controls the agents action in the environment interacting with the bandit
class Agent(object):
# Constructor
def __init__(self, nArms, epsilon=0):
self.nArms = nArms # Number of arms to select
self.epsilon = epsilon # Epsilon
self.timeStep = 0 # Time Step t
self.lastAction = None # Store last action
self.kAction = np.zeros(nArms) # count of actions taken at time t
self.rSum = np.zeros(nArms) # Sums number of rewards
self.valEstimates = np.zeros(nArms) # action value estimates sum(rewards)/Amount -> Qt ~= Q*(a)
# Return string for graph legend
def __str__(self):
if self.epsilon == 0:
return "Greedy"
else:
return "Epsilon = " + str(self.epsilon)
# Selects action based on a epsilon-greedy behaviour
def action(self):
### POLICY ###
randProb = np.random.random()
if randProb < self.epsilon: # Epsilon method
a = np.random.choice(self.nArms) # 무작위로 arm 선택
else: # Greedy method
maxAction = np.argmax(self.valEstimates) # Find max value estimate
# 같은 Value Estimation 의 action 이 여러개 있을 수 있음을 감안
action = np.where(self.valEstimates == np.argmax(self.valEstimates))[0]
# If multiple actions contain the same value, randomly select an action
if len(action) == 0:
a = maxAction
else:
a = np.random.choice(action)
# save last action in variable, and return result
self.lastAction = a
return a
# Interpreter - updates the value extimates amounts based on the last action
def interpreter(self, reward):
# Add 1 to the number of action taken in step
At = self.lastAction
self.kAction[At] += 1 # Add 1 to action selection
self.rSum[At] += reward # Add reward to sum array
# Calculate new action-value, sum(r)/ka
self.valEstimates[At] = self.rSum[At]/self.kAction[At]
# Increase time step
self.timeStep += 1
# Reset all variables for next iteration
def reset(self):
self.timeStep = 0
self.lastAction = None
self.kAction[:] = 0
self.rSum[:] = 0
self.valEstimates[:] = 0
nArms = 10 # n number of bandits
bandit = KarmedBandit(nArms=nArms, mean=0, stDev=3)
start_time = time.time() #store time to monitor execution
agents = [Agent(nArms=nArms), Agent(nArms=nArms, epsilon=0.1), Agent(nArms=nArms, epsilon=0.01)]
iterations = 2000 # number of repeated iterations
plays = 1000 # number of plays per iteration
# Array to store the scores, number of plays X number of agents
scoreArr = np.zeros((plays, len(agents)))
# Array to maintain optimal count, Graph 2
optimlArr = np.zeros((plays, len(agents)))
# loop for number of iterations
for iIter in range(iterations):
if (iIter%100) == 0: # Print statement after every 100 iterations
print("Completed Iterations: ", iIter)
bandit.reset() #Reset testbed and all agents
for agent in agents:
agent.reset()
# Loop for number of plays
for jPlays in range(plays):
for i, kAgent in enumerate(agents):
actionT = kAgent.action()
# Reward - normal dist (mean = Q*(a_t), variance = 1)
rewardT = np.random.normal(bandit.actArr[actionT], scale=1)
# Agent checks state
kAgent.interpreter(reward=rewardT)
# Add score in arrary, graph 1
scoreArr[jPlays, i] += rewardT
# check the optimal action, add optimal to array, graph 2
if actionT == bandit.optim:
optimlArr[jPlays, i] += 1
scoreAvg = scoreArr/iterations
optimlAvg = optimlArr/iterations
print("Execution time: %s seconds" % (time.time() - start_time))
#Graph 1 - Averate rewards over all plays
plt.title("10-Armed TestBed - Average Rewards")
plt.plot(scoreAvg)
plt.ylabel('Average Reward')
plt.xlabel('Plays')
plt.legend(agents, loc=4)
plt.show()
#Graph 1 - optimal selections over all plays
plt.title("10-Armed TestBed - % Optimal Action")
plt.plot(optimlAvg * 100)
plt.ylim(0, 100)
plt.ylabel('% Optimal Action')
plt.xlabel('Plays')
plt.legend(agents, loc=4)
plt.show()
| [
"[email protected]"
] | |
643685a1abf09d1c2d97cfea6dc7afee896f5e04 | 6861759f3bc7f82554d654e7223280a8a7d86157 | /data_manager/manager/TwitterStreamListener.py | 0a3f411e987bf9b19144149d547c80f17c16ce56 | [] | no_license | nweat/mental-health-research | c8c6cfd0a3a91059ce4e47328ca4d0440affe349 | 426f6bb425600f3c1a5fb9039341bc012733072d | refs/heads/master | 2023-02-12T17:42:13.914410 | 2023-02-01T04:46:06 | 2023-02-01T04:46:06 | 74,685,980 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,732 | py | import tweepy, sys, json, os, os.path
import HelperManager as helper
"""
REFERENCES:
http://stackoverflow.com/questions/23531608/how-do-i-save-streaming-tweets-in-json-via-tweepy
"""
class TwitterStreamListener(tweepy.StreamListener):
def __init__(self, limit, path, api = None):
super(TwitterStreamListener, self).__init__()
self.num_tweets = 0
self.limit = limit
self.path = path
self.helper = helper.HelperManager()
self.file = ''
def on_connect(self):
if not os.path.exists(self.path):
self.file = open(self.path, 'w')
else:
self.file = open(self.path, 'w+')
def on_status(self, status):
lat = 'NaN'
lon = 'NaN'
if status.coordinates != None:
lon = status.coordinates['coordinates'][0] #long
lat = status.coordinates['coordinates'][1] #lat
if status.place != None:
countryCode = status.place.country_code
record = {'Text': status.text, 'Created At': status.created_at,
'stat count': status.user.statuses_count,
'desc': status.user.description,
'screen name': status.user.screen_name,
'lang': status.user.lang,
'lat': lat,
'lon': lon }
#self.num_tweets += 1
if self.num_tweets <= self.limit:
if self.helper.criteria_normal_user_selection(status.user.statuses_count, status.user.description,status.user.screen_name,status.user.lang,'null','null',lat) == 1:
self.file.write(status.user.screen_name)
self.file.write('\n')
self.num_tweets += 1
print record
return True
else:
self.file.close()
return False
def on_error(self, status_code):
print('Got an error with status code: ' + str(status_code))
return True # To continue listening
def on_timeout(self):
print('Timeout...')
return True # To continue listening
| [
"[email protected]"
] | |
b83fa683a2914702d34f58b631a73c04236ea9d0 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/cirq_new/cirq_program/startCirq_pragma226.py | 876ed49c4a8c218eb909548e558be8e853f51f36 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,536 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=14
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
class Opty(cirq.PointOptimizer):
def optimization_at(
self,
circuit: 'cirq.Circuit',
index: int,
op: 'cirq.Operation'
) -> Optional[cirq.PointOptimizationSummary]:
if (isinstance(op, cirq.ops.GateOperation) and isinstance(op.gate, cirq.CZPowGate)):
return cirq.PointOptimizationSummary(
clear_span=1,
clear_qubits=op.qubits,
new_operations=[
cirq.CZ(*op.qubits),
cirq.X.on_each(*op.qubits),
cirq.X.on_each(*op.qubits),
]
)
#thatsNoCode
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[1])) # number=7
c.append(cirq.X.on(input_qubit[1])) # number=10
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=5
c.append(cirq.H.on(input_qubit[0])) # number=11
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=12
c.append(cirq.H.on(input_qubit[0])) # number=13
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=8
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=9
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2820
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq_pragma226.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"[email protected]"
] | |
8050d61e076da223a5c16cdd9de4e8a0891e2bdf | a5064258539dc46d1381ec964896182e7f6674fa | /LinearRegression/GradDescend3D.py | f65d70654abe42612173ab7527bab7953ef28a69 | [
"Apache-2.0"
] | permissive | 50183816/lineregression | 4cdbfc7ced348bc313dd64250a2cc7aaa3428c38 | f16e5b18d74cf1fd41ce5dafe1cab5f3f3e30883 | refs/heads/master | 2020-09-30T13:33:45.680777 | 2019-12-11T07:24:34 | 2019-12-11T07:24:34 | 227,297,107 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 982 | py | # _*_ codig utf8 _*_
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import matplotlib as mpl
mpl.rcParams['font.sans-serif'] = [u'simHei']
def f(x,y):# 原函数
return x**2 + y ** 2
def h(x):# 导函数
return 2*x
X=[]
Y=[]
Z=[]
x=2
y=2
step=0.1
f_change=f(x,y)
f_current=f(x,y)
X.append(x)
Y.append(y)
Z.append(f_current)
while f_change > 1e-10:
x = x - step * h(x)
y=y-step*h(y)
f_change = f_current-f(x,y)
f_current = f(x,y)
X.append(x)
Y.append(y)
Z.append(f_current)
print('结果为(%s,%s)' %(x,y))
print(X)
fig = plt.figure()
ax=Axes3D(fig)
X2 = np.arange(-2,2,0.2)
Y2 = np.arange(-2,2,0.2)
X2,Y2=np.meshgrid(X2,Y2)
Z2=X2**2+Y2**2
ax.plot_surface(X2,Y2,Z2,rstride=1,cstride=1,cmap='rainbow')
ax.plot(X,Y,Z,'bo--')
#plt.plot(X2,Y2,'-',color='#666666')
#plt.plot(X,Y,'bo--')
ax.set_title('$y=x^2+y^2$函数求解最小值,最终解为(%.2f,%.2f,%.2f)'%(x,y,f_current))
plt.show() | [
"Administrator@WINDOWS-2251633"
] | Administrator@WINDOWS-2251633 |
6c6f5c28509fd31415bce43079b41aac9b9fa74d | 6dfc0fa80744d23b4fd33118de31af5b51589d64 | /manager.py | d0f6aa5d245ebfe608493e3cfd2c59ee701b3725 | [] | no_license | GrandpaAn/grandpaanrent | 48832ee7f08d2430dff2fbd07aa9394de8506813 | 7f716b8010c9e90fae51819ceaf63883b09574d4 | refs/heads/master | 2020-03-12T20:03:05.472216 | 2018-04-24T04:44:12 | 2018-04-24T04:44:12 | 130,797,011 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,024 | py | from werkzeug.utils import secure_filename
from flask_script import Manager
from app import create_app, db
from flask_migrate import Migrate, MigrateCommand, upgrade
from app.models import *
app = create_app()
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@manager.command
def dev():
from livereload import Server
live_server = Server(app.wsgi_app)
live_server.watch('**/*.*')
live_server.serve(open_url=True)
@manager.command
def test():
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
@manager.command
def deploy():
from app.models import Role
upgrade()
Role.seed()
@manager.command
def forged():
from forgery_py import basic, lorem_ipsum, name, internet, date
from random import randint
db.drop_all()
db.create_all()
Role.seed()
guests = Role.query.first()
def generate_comment(func_author, func_post):
return Comment(body=lorem_ipsum.paragraphs(),
created=date.date(past=True),
author=func_author(),
post=func_post())
def generate_post(func_author):
return Post(title=lorem_ipsum.title(),
body=lorem_ipsum.paragraphs(),
created=date.date(past=True),
author=func_author())
def generate_user():
return User(name=internet.user_name(),
email=internet.email_address(),
password=basic.text(6, at_least=6, spaces=False),
role=guests)
users = [generate_user() for i in range(0, 5)]
db.session.add_all(users)
random_user = lambda: users[randint(0, 4)]
posts = [generate_post(random_user) for i in range(0, randint(50, 200))]
db.session.add_all(posts)
random_post = lambda: posts[randint(0, len(posts) - 1)]
comments = [generate_comment(random_user, random_post) for i in range(0, randint(2, 100))]
db.session.add_all(comments)
db.session.commit()
if __name__ == '__main__':
manager.run()
# app.run(debug=True)
# live_server = Server(app.wsgi_app)
# live_server.watch('**/*.*')
# live_server.serve(open_url=True)
| [
"[email protected]"
] | |
21a9256cade8930d41c842a62f75ffa03f5688e8 | 4ec77ff70c7e3edce07f889e7284e6e6e1df0860 | /bankAccount/bank.py | b4d8d6fc870aee8b20e83a6ef2f67ef177c8ad0a | [] | no_license | victorsemenov1980/Various_Apps | 755308b405902204635a117d69feffbb4dc954fd | 56640dbe9380b2dd9a4ac84d4d79f283caf5d77e | refs/heads/master | 2023-02-22T10:04:09.222008 | 2021-01-24T10:03:12 | 2021-01-24T10:03:12 | 263,818,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,804 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri May 15 16:39:57 2020
@author: user
"""
import tkinter
from tkinter import*
window=Tk()
window.wm_title('Current balance')#Title for the window
class account:
def __init__(self,file):
self.file=file
with open(file,'r') as file:
self.balance=int(file.read())#file read is always a string
def withdraw(self,amount):
self.amount=amount
self.balance=self.balance-amount
return self.balance
def deposit(self,amount):
self.amount=amount
self.balance=self.balance+amount
return self.balance
def save(self):
with open(self.file,'w') as file:
file.write(str(self.balance))
def view():
t1.insert(END,str(current.balance)+'\n')
current.save()
def withdraw():
current.withdraw(int(e1_value.get()))
t1.insert(END,'After withdrawal of '+e1_value.get()+' the leftover is '+str(current.balance)+'\n')
current.save()
def deposit():
current.deposit(int(e1_value.get()))
t1.insert(END,'After deposit of '+e1_value.get()+' the available balance is '+str(current.balance)+'\n')
current.save()
current=account('balance.txt')
'''INPUT boxes'''
e1=Label(window,text="Enter amount")
e1.grid(row=0,column=0)
e1_value=StringVar()
e1=Entry(window,textvariable=e1_value)
e1.grid(row=0,column=1)
'''OUTput box'''
t1=Text(window,height=20,width=60)
t1.grid(row=2,column=0,columnspan=6)
'''Buttons'''
b1=Button(window,text='View current balance',command=view)
b1.grid(row=3,column=0,rowspan=1)
b2=Button(window,text='Withdraw amount',command=withdraw)
b2.grid(row=3,column=1,rowspan=1)
b3=Button(window,text='Deposit amount',command=deposit)
b3.grid(row=3,column=2,rowspan=1)
window.mainloop() | [
"[email protected]"
] | |
820ffb64ce2f4d2a3837c7bfd6cdf7171e2781ad | dd29cbe2d3b4595c6d4e12362aa12d415fb5b324 | /Hydriot.PiAgent/triggers/contracts/dose_relay_abstract.py | b986b5aa3ee067f48ba5ef2cfdb40f8fc8b64952 | [
"MIT"
] | permissive | mariusvrstr/hydriot | 6e238d72ca0ff33623a76066fceca84756187787 | 4b6f3bc9533b57e84ab66802ed66ce8c9be6b9e7 | refs/heads/main | 2023-06-10T09:48:39.833881 | 2021-07-04T09:17:37 | 2021-07-04T09:17:37 | 332,140,204 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,370 | py | from abc import ABC, abstractmethod ## abstract module
from datetime import datetime, timedelta
import RPi.GPIO as GPIO
import asyncio
class DoseRelayAbstract(ABC):
name = "N/A"
is_enabled = None
_counter_sensor = None
_eligable = False
_last_time_tube_was_filled = None
_maximum_prime_time = None
_is_dosing = False
# Cooldown period after start that will now allow dosing (In case reset mid dose)
# Need schedule to check if eligable for dosage (Cooldown, elapsed time, sensor reading)
# If ap switches off stop any in-progress dosing
# Add manual dosage on schedule that also does not require TDS
# Throw exception until calibration have taken place
# During eligibility schedule check that dose_should_finish_by is not < now else switch relay off
async def _dose(self, duration_in_seconds):
self._is_dosing = True
self._switch_relay_on()
await asyncio.sleep(duration_in_seconds)
self._switch_relay_off()
self._is_dosing = False
def _switch_relay_on(self):
self._current_on_state = True
GPIO.output(self.relay_pin_pos, GPIO.LOW if self.is_low_volt_relay else GPIO.HIGH) # ON
pass
def _switch_relay_off(self):
GPIO.output(self.relay_pin_pos, GPIO.HIGH if self.is_low_volt_relay else GPIO.LOW) # OFF
pass
def __init__(self, name, is_enabled, max_prime_time):
self.name = name
self.is_enabled = is_enabled
self._maximum_prime_time = max_prime_time
self._switch_relay_off()
def check_if_switched_on(self):
gpio_status = GPIO.input(self.relay_pin_pos)
return gpio_status == 0 # relay is switched on
async def prime_tube_with_fluid(self):
avg_deviation = None
start_measurement = None
self._is_dosing = True
if self._counter_sensor is not None:
avg_deviation = self._counter_sensor.reading_deviation
start_measurement = self._counter_sensor.latest_value
self._switch_relay_on()
prime_time_end = datetime.now() + timedelta(seconds=self._maximum_prime_time)
while (datetime.now() <= prime_time_end):
await asyncio.sleep(1)
if self._counter_sensor is not None:
startingToInfluenceTds = self._counter_sensor.latest_value > (start_measurement + avg_deviation)
if startingToInfluenceTds:
break
self._switch_relay_off()
self._is_dosing = False
def dose_with_online_pid_controller(self):
if not self._eligable:
raise ValueError('Trying to dose while not in being in a correct state')
# 1. Negotiate a new dose session with online service (return with frequency)
# 2. While not complete loop to server
# a. Read TDS and check if completed (within x TDS from target)
# b. Request dosage instructions (return dose_duration_in_seconds and sleep time)
# c. Dose
# d. Sleep
pass
async def dose(self, duration_in_seconds):
await self.prime_tube_with_fluid()
await self._dose(duration_in_seconds)
def busy_dosing(self):
# Move this out of memory to accommodate use across threads
return self._is_dosing
| [
"[email protected]"
] | |
e4bc5e498dbc3c8d06ce7dbcbdc9e020bcb148fa | 59166105545cdd87626d15bf42e60a9ee1ef2413 | /test/test_tournament.py | 622fc429f4b92fd6331cb63ab602edd088ca9dc5 | [] | no_license | mosoriob/dbpedia_api_client | 8c594fc115ce75235315e890d55fbf6bd555fa85 | 8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc | refs/heads/master | 2022-11-20T01:42:33.481024 | 2020-05-12T23:22:54 | 2020-05-12T23:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,361 | py | # coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import dbpedia
from dbpedia.models.tournament import Tournament # noqa: E501
from dbpedia.rest import ApiException
class TestTournament(unittest.TestCase):
"""Tournament unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test Tournament
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = dbpedia.models.tournament.Tournament() # noqa: E501
if include_optional :
return Tournament(
number_of_people_attending = [
56
],
end_date = [
'0'
],
description = [
'0'
],
type = [
'0'
],
silver_medalist = [
None
],
participant = [
'0'
],
duration = [
1.337
],
medalist = [
None
],
previous_event = [
None
],
champion_in_single_female = [
None
],
champion_in_double_male = [
None
],
id = '0',
following_event = [
None
],
champion_in_single_male = [
None
],
bronze_medalist = [
None
],
champion_in_mixed_double = [
None
],
caused_by = [
None
],
label = [
'0'
],
gold_medalist = [
None
],
champion_in_single = [
None
],
race_track = [
None
],
next_event = [
None
],
champion_in_double_female = [
None
],
champion_in_double = [
None
],
start_date = [
'0'
],
champion = [
None
]
)
else :
return Tournament(
)
def testTournament(self):
"""Test Tournament"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
4608141a02bdce7b7bce0c759d6721c1b8a6c4f4 | baa484828e8683d51d58d48989532e3d3ce987bc | /200227_1.py | 467b83b8a14b9be0d86308da4f81a5e998cb92d7 | [] | no_license | sungguenja/study_gui | 0fc1e17c98a9afc0a6e66a39aeefcd89c3e60f5e | b058ca900061f2bd743f8532056ecedcc6b7ce0a | refs/heads/master | 2021-01-16T16:32:28.027456 | 2020-03-17T16:54:21 | 2020-03-17T16:54:21 | 243,184,712 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | import tkinter
win = tkinter.Tk()
win.geometry("300x300")
hello = tkinter.Label(win, text='Hello', relief='ridge')
hello.pack(side='left') #왼쪽에 할당이 된다 사용이 안되보이는 장소도 할당이 되있는 상태!
hello2 = tkinter.Label(win, text='Hello2', relief='ridge')
hello2.pack(side='bottom') # 아래쪽에 할당됨. 여기서 앵커를 걸거나 fill을 써 채울 수가 있다
# 앵커는 다음 창에서 확인해보자
win.mainloop() | [
"[email protected]"
] | |
11c946dba93dd2c1be4751a9d475370852b898c6 | 39e0ee7161cb5b07aa9e9ab779b11c55aa05ae7b | /execute/CoComain.py | fb440907f135e1f3decb8c4be9e044af49789d21 | [] | no_license | zhangyoujian/Vehicle-License-Plate-Recognition | 7468665f2afdb233ebbae2e062f87f9ea07f47fe | dbf7c9212a40cd28d5fe19fc4143a131afab6b24 | refs/heads/master | 2020-04-23T22:09:54.335757 | 2019-07-30T12:03:05 | 2019-07-30T12:03:05 | 171,492,376 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,103 | py | import os
import sys
import random
import math
import numpy as np
import skimage.io
import matplotlib
import matplotlib.pyplot as plt
from mrcnn import utils
import mrcnn.model as modellib
from mrcnn import visualize
ROOT_DIR = os.path.abspath("../")
sys.path.append(ROOT_DIR) # To find local version of the library
# Import COCO config
sys.path.append(os.path.join(ROOT_DIR, "execute/samples/coco/")) # To find local version
import samples.coco.coco as coco
# %matplotlib inline
# Directory to save logs and trained model
MODEL_DIR = os.path.join(ROOT_DIR, "logs")
# Local path to trained weights file
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "execute/samples/coco/mask_rcnn_coco.h5")
# Download COCO trained weights from Releases if needed
if not os.path.exists(COCO_MODEL_PATH):
utils.download_trained_weights(COCO_MODEL_PATH)
# Directory of images to run detection on
IMAGE_DIR = os.path.join(ROOT_DIR, "images")
class InferenceConfig(coco.CocoConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
config = InferenceConfig()
config.display()
model = modellib.MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=config)
# Load weights trained on MS-COCO
model.load_weights(COCO_MODEL_PATH, by_name=True)
class_names = ['BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird',
'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear',
'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie',
'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
'kite', 'baseball bat', 'baseball glove', 'skateboard',
'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup',
'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',
'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza',
'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed',
'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote',
'keyboard', 'cell phone', 'microwave', 'oven', 'toaster',
'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors',
'teddy bear', 'hair drier', 'toothbrush']
file_names = next(os.walk(IMAGE_DIR))[2]
image = skimage.io.imread(os.path.join(IMAGE_DIR, random.choice(file_names)))
# showFile = '5951960966_d4e1cda5d0_z.jpg'
# image = skimage.io.imread(os.path.join(IMAGE_DIR, showFile))
# Run detection
results = model.detect([image], verbose=1)
# Visualize results
r = results[0]
matplotlib.use('TkAgg')
visualize.display_instances(image, r['rois'], r['masks'], r['class_ids'],
class_names, r['scores'])
def main():
print("Hello World")
if __name__=='__main__':
main() | [
"[email protected]"
] | |
225e892d393e25a6059a797e599f4f1e545569cd | d5d62f4b7deb6836d785a51432ac35c059e1bd3a | /train.py | 4c248421105b15bdc744f0028abe0215678ef7eb | [] | no_license | anirudhdahiya9/IRChatBot | 2d15e8bc2a312549733b726082a34c205de4acd5 | e919f4c8c080211573a34f0a0174b1add738f99c | refs/heads/master | 2021-08-12T04:57:01.287789 | 2017-11-14T12:58:32 | 2017-11-14T12:58:32 | 108,343,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,196 | py | from nltk import word_tokenize
import sys
import collections
with open('../train.enc') as f:
elines = f.read().decode('latin-1')
elines = elines.split('\n')
with open('../train.dec') as f:
dlines = f.read().decode('latin-1')
dlines = dlines.split('\n')
print 'data loaded'
# In[19]:
def build_dataset(words, n_words):
count = [['UNK', -1]]
count.extend(collections.Counter(words).most_common(n_words - 1))
dictionary = dict()
for word, _ in count:
dictionary[word] = len(dictionary)
#data = list()
#unk_count = 0
for word in words:
if word in dictionary:
index = dictionary[word]
else:
index = 0 # dictionary['UNK']
#unk_count += 1
#data.append(index)
#count[0][1] = unk_count
reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys()))
return dictionary, reversed_dictionary
#Anything above 7 for enc or above 8 for dec is thrown away
i=0
nelines = []
ndlines = []
while i<len(elines):
elines[i] = word_tokenize(elines[i])
dlines[i] = ['<GO>'] + word_tokenize(dlines[i])
if len(elines[i])<8 and len(dlines[i])<9:
nelines += [elines[i]]
ndlines += [dlines[i]]
i+=1
del elines, dlines
elines = nelines
dlines = ndlines
total = []
for subl in elines:
total += subl
for subl in dlines:
total+=subl
#print elines[:5]
#print dlines[:5]
#print total[:30]
dictionary, reversed_dictionary = build_dataset(total, 56000)
print dictionary.keys()[:10]
print len(dictionary.keys())
print 'dataset built'
# In[21]:
max_seq_len = 7
vocab_size = len(dictionary)
def tokToId(inp, dictionary):
for iline, line in enumerate(inp):
line = [dictionary[tk] for tk in line]
inp[iline] = line
return inp
def padSeq(inp, mxlen, markEOS):
for iline, line in enumerate(inp):
#if len(line)>=mxlen:
# line = line[:mxlen - 1]
# if markEOS:
# line.append('<EOS>')
# else:
# pass
#else:
if markEOS:
line.append('<EOS>')
for _ in range(mxlen - len(line) + 2):
line.append('<PAD>')
inp[iline] = line
return inp
def epadSeq(inp, mxlen, markEOS):
for iline, line in enumerate(inp):
#if len(line)>=mxlen:
# line = line[:mxlen - 1]
for _ in range(mxlen - len(line)):
line.append('<PAD>')
inp[iline] = line[::-1]
return inp
def prep_data(enc, dec, dictionary):
dictionary['<PAD>'] = vocab_size
dictionary['<EOS>'] = vocab_size+1
enc = epadSeq(enc, max_seq_len, False)
dec = padSeq(dec, max_seq_len, True)
enc = tokToId(enc, dictionary)
dec = tokToId(dec, dictionary)
#print enc[:10]
#print dec[:10]
return enc, dec
enc, dec = prep_data(elines, dlines, dictionary)
print 'data processed'
import tensorflow as tf #I know this needs to go up
from tensorflow.contrib.legacy_seq2seq import embedding_rnn_seq2seq
vocabulary_size = len(dictionary.keys())
embedding_size = 100
batch_size = 10
lsize = 40
# In[72]:
print 'making graph'
#Graph
#with tf.variable_scope("myrnn", reuse=None) as scope:
cell = tf.contrib.rnn.BasicLSTMCell(lsize)
inputs = tf.placeholder(tf.int32, shape=(None, max_seq_len))
labels = tf.placeholder(tf.int32, shape=(None, max_seq_len+2))
w_t = tf.get_variable("proj_w",[vocabulary_size, lsize], dtype=tf.float32)
w = tf.transpose(w_t)
b = tf.get_variable("proj_b",[vocabulary_size], dtype=tf.float32)
output_projection = (w, b)
#output_projection = None
inputs_series = tf.unstack(inputs, axis=1)
labels_series = tf.unstack(labels, axis=1)
#print w_t
outputs, states = embedding_rnn_seq2seq(
inputs_series, inputs_series, cell,
vocabulary_size,
vocabulary_size,
embedding_size, output_projection=output_projection,
feed_previous=True)
#print outputs[0]
def sampled_loss(labels, inputs):
labels = tf.reshape(labels, [-1, 1])
# We need to compute the sampled_softmax_loss using 32bit floats to
# avoid numerical instabilities.
local_w_t = tf.cast(w_t, tf.float32)
local_b = tf.cast(b, tf.float32)
local_inputs = tf.cast(inputs, tf.float32)
return tf.cast(
tf.nn.sampled_softmax_loss(
weights=local_w_t,
biases=local_b,
labels=labels,
inputs=local_inputs,
num_sampled=512,
num_classes=vocabulary_size),
tf.float32)
loss = tf.reduce_mean([tf.reduce_sum(sampled_loss(label, output)) for output, label in zip(outputs, labels_series)])
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)
print 'graph ok'
init = tf.global_variables_initializer()
sess = tf.Session()
saver = tf.train.Saver()
sess.run(init)
#with tf.variable_scope("myrnn") as scope:
for ep in range(40):
#if i>0:
#scope.reuse_variables()
for i in range(len(enc)//batch_size):
inp = enc[i:i+batch_size]
label = dec[i:i+batch_size]
#print inp
#print label
try:
sess.run(train_step, {inputs: inp, labels: label})
except:
continue
saver.save(sess, 'ckpt_'+str(ep)+'.tfmodel')
print ep
| [
"[email protected]"
] | |
6367acbe75692d761377ab374ee4591f98056f1a | fcdb69b396258c1e3105dbfe1fcd50cc73f7b8cf | /VetorParEimpar.py | 7664d17b4178b6b31fda29e7e3e85344d5f5692d | [] | no_license | l0rennareis/Algoritmo | 6b7147be1bb21e084c0ccfcc77d61cedd93e13fe | f73a1cbc0ab773b755d756cc2bf8e5cc758a50b4 | refs/heads/master | 2021-03-19T07:25:50.806907 | 2017-06-23T22:50:13 | 2017-06-23T22:50:13 | 94,377,834 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | pares = 0
impares = 0
vetorPar=[]
vetorImpar=[]
vetor=[]
for i in range(0, 20):
numero = int(input('Informe um numero: '))
if (numero % 2 == 0):
vetorPar.append(numero)
vetor.append(numero)
pares += 1
else:
vetorImpar.append(numero)
vetor.append(numero)
impares += 1
print ('Vetor com todos os números ', vetor)
print ('Vetor de números pares: ', vetorPar)
print ('Vetor de números impares: ', vetorImpar)
| [
"[email protected]"
] | |
827a3a7d06b3574c0142d7df5fe8ee4173d23478 | e18cd91383fd9d630f5a92034eaa8303225597d0 | /_gcloud_vendor/apitools/base/py/exceptions.py | 55faa4970ebbcfc9983d66e728a02a57f7f95362 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | lucemia/gcloud-python | f9df8525acd2cbc769608e21c02c87ce0a1b38ee | f56abdf750035c1cb023865ff67c27e3b50d8606 | refs/heads/master | 2021-01-09T07:00:56.867364 | 2015-02-18T18:20:42 | 2015-02-18T18:20:42 | 25,643,617 | 1 | 2 | null | 2014-11-28T03:52:16 | 2014-10-23T15:20:00 | Python | UTF-8 | Python | false | false | 2,472 | py | #!/usr/bin/env python
"""Exceptions for generated client libraries."""
class Error(Exception):
"""Base class for all exceptions."""
class TypecheckError(Error, TypeError):
"""An object of an incorrect type is provided."""
class NotFoundError(Error):
"""A specified resource could not be found."""
class UserError(Error):
"""Base class for errors related to user input."""
class InvalidDataError(Error):
"""Base class for any invalid data error."""
class CommunicationError(Error):
"""Any communication error talking to an API server."""
class HttpError(CommunicationError):
"""Error making a request. Soon to be HttpError."""
def __init__(self, response, content, url):
super(HttpError, self).__init__()
self.response = response
self.content = content
self.url = url
def __str__(self):
content = self.content.decode('ascii', 'replace')
return 'HttpError accessing <%s>: response: <%s>, content <%s>' % (
self.url, self.response, content)
@property
def status_code(self):
# TODO(craigcitro): Turn this into something better than a
# KeyError if there is no status.
return int(self.response['status'])
@classmethod
def FromResponse(cls, http_response):
return cls(http_response.info, http_response.content,
http_response.request_url)
class InvalidUserInputError(InvalidDataError):
"""User-provided input is invalid."""
class InvalidDataFromServerError(InvalidDataError, CommunicationError):
"""Data received from the server is malformed."""
class BatchError(Error):
"""Error generated while constructing a batch request."""
class ConfigurationError(Error):
"""Base class for configuration errors."""
class GeneratedClientError(Error):
"""The generated client configuration is invalid."""
class ConfigurationValueError(UserError):
"""Some part of the user-specified client configuration is invalid."""
class ResourceUnavailableError(Error):
"""User requested an unavailable resource."""
class CredentialsError(Error):
"""Errors related to invalid credentials."""
class TransferError(CommunicationError):
"""Errors related to transfers."""
class TransferInvalidError(TransferError):
"""The given transfer is invalid."""
class NotYetImplementedError(GeneratedClientError):
"""This functionality is not yet implemented."""
class StreamExhausted(Error):
"""Attempted to read more bytes from a stream than were available."""
| [
"[email protected]"
] | |
d20251508eb9837040e9642fba5ecaf94f21f6c0 | 8333655862e5f90383f07b5b83bbf606e01b01a3 | /cythonized_code/setup.py | a54ff038e037db50517163828f6a0012a347bc8e | [] | no_license | cuichi23/delayCoupledDPLLnet | d0a161a0bb6b47f9ae55377d5d14b369f8df13b4 | 443cfc66f12a5fbfb5766da4d058fc644fa6f643 | refs/heads/master | 2021-07-13T05:21:53.392002 | 2020-11-26T18:32:03 | 2020-11-26T18:32:03 | 66,270,249 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | from distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules = cythonize("simulation.pyx")
)
| [
"[email protected]"
] | |
3283aec3cc51133ee7658b42d162e7449747d2ec | 0058e49536f6a98baae2c7221838fbcafbcd218c | /bookings/serializers/appointments.py | 7e04befe080d709aece1b7517149597d9510b4c8 | [] | no_license | Julian-Bio0404/Gym-Admin | b45eb65d539bf39788e440dcfd6e485365c4ef59 | 812c1527393f179ae0486d9da210da1366bbd1ee | refs/heads/main | 2023-06-26T13:50:02.766332 | 2021-07-31T02:02:03 | 2021-07-31T02:02:03 | 378,182,141 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,963 | py | """Appointments serializers."""
# Django
from django.core.validators import RegexValidator
from django.utils import timezone
# Django REST Framework
from rest_framework import serializers
# Models
from users.models import User
from bookings.models import Appointment
# Serializers
from users.serializers import UserModelSerializer
class AppointmentModelSerializer(serializers.ModelSerializer):
"""Appointment model serializer."""
user = UserModelSerializer(read_only=True)
physio = UserModelSerializer(read_only=True)
class Meta:
"""Meta class."""
model = Appointment
fields = ('__all__')
read_only_fields = ['user']
class CreateAppointmentSerializer(serializers.Serializer):
"""Create a Appointment"""
def physio_validator(physio):
"""Verify that the physio exists"""
physios = User.objects.filter(rol='physio')
physios_usernames = [physio.username for physio in physios]
if physio not in physios_usernames:
raise serializers.ValidationError('The physio does not exist')
identification_regex = RegexValidator(
regex=r"\d{6,10}$",
message='Identification number must be entered in the format: 199999999. Up to 11 digits allowed'
)
identification_number = serializers.CharField(validators=[identification_regex])
physio = serializers.CharField(validators=[physio_validator])
date = serializers.DateTimeField()
def validate(self, data):
"""Verify that the user has an active membership """
user = User.objects.get(
identification_number=data['identification_number'],
is_verified=True
)
profile = user.profile
if profile.is_active == False:
raise serializers.ValidationError('The user does not have an active membership.')
if data['date'] <= timezone.now():
raise serializers.ValidationError('Time not available.')
try:
appointment = Appointment.objects.get(user=user)
if appointment:
raise serializers.ValidationError('You already have an appointment.')
except Appointment.DoesNotExist:
try:
appointment_not_available = Appointment.objects.get(date=data['date'])
if appointment_not_available:
raise serializers.ValidationError('Time is already busy.')
except Appointment.DoesNotExist:
return data
def create(self, data):
"""Create a training reserve."""
user = User.objects.get(
identification_number=data.pop('identification_number')
)
physio = User.objects.get(
username=data.pop('physio'),
rol='physio'
)
appointment = Appointment.objects.create(
user=user,
physio=physio,
date=data['date']
)
return appointment
| [
"[email protected]"
] | |
b79d8a629800eb411e96096967862f6dbc396ea9 | cbd4744259616c44d47337cd856c03b9c7c32511 | /Planteome_annotation/IRRI/oryzaSNPjson_2GAF.py | 40f723a8c82fe5b638f23b7a7b857e1f5cfe6ff8 | [] | no_license | austinmeier/data_annotation | 6d2c22d5cdbc2bcaddefc6b8358c9a0ca30147a5 | a8e9361dff4da67e4963aa0f0dfd496e341ea904 | refs/heads/master | 2021-01-18T23:38:44.196686 | 2018-01-09T18:26:38 | 2018-01-09T18:26:38 | 52,914,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,846 | py | import time
import urllib2
import json
import re
outdir = "/Users/meiera/Documents/SVN/associations/to-associations/test_IRRI_TO/"
traitdict ={} #this is what we will use
with open("/Users/meiera/Documents/git/data_annotation/Planteome_annotation/IRRI/OryzaSNPtraitmap.csv") as infile:
for line in infile:
line1 = line.split(',')
traitdict[line1[0]] = {'TOid':line1[1].strip(),'traitname':line1[2].strip(),'evidencecode':'IDA'}
sampletraitdict ={ ### this is what the actual trait dictionary looks like
# 43718:{'TOid':'TO:0000269','traitname':'100-grain weight (gm) - cultivated','evidencecode':'IDA'},
# 43693:{'TOid':'TO:0000140','traitname':'Apiculus color at post-harvest','evidencecode':'IDA'},
# 43694:{'TOid':'TO:0000140','traitname':'Apiculus color at reproductive','evidencecode':'IDA'},
# 43695:{'TOid':'TO:0000294','traitname':'Auricle color at vegetative','evidencecode':'IDA'},
# 43696:{'TOid':'TO:0000141','traitname':'Awn color','evidencecode':'IDA'},
43766:{'TOid':'TO:0000141','traitname':'Awn color (Late observation)','evidencecode':'IDA'}
}
#########################################################################
# MAIN
#########################################################################
def main(traitdict):
for x in traitdict:
trait = traitdict[x] #returns a dictionary
outname = re.sub('[^a-zA-Z0-9 \n\.]', '', trait['traitname'])
outfile = "%s%s_OryzaSNP.assoc"%(outdir,outname.replace(" ","_"))
with open(outfile, "w") as assocfile:
assocfile.write("!gaf-version: 2.0\n")
trait_json= mk_json(x)
print trait_json
for object in trait_json:
gafline(object,trait,assocfile)
#########################################################################
# web calls
#########################################################################
def mk_json(traitnumber): #traitnumber is the ID that Oryzasnp uses, eg: 43696
phenotypeID= str(traitnumber)
webcall = "http://oryzasnp.org/iric-portal/ws/variety/phenotypes/%s" %(phenotypeID)
webcalled1=urllib2.urlopen(webcall).read()
tempjson = "/Users/meiera/Documents/git/data_annotation/Planteome_annotation/IRRI/test1.json"
TEMPJSON = open(tempjson, "w")
TEMPJSON.write(webcalled1)
TEMPJSON.close()
with open("/Users/meiera/Documents/git/data_annotation/Planteome_annotation/IRRI/test1.json") as data_file:
trait_json=json.load(data_file)
return trait_json
#########################################################################
# one run, one GAF line
#########################################################################
def gafline(phenotype_object,testtrait, outfile):
#check to make sure each column call function returns a value, if any return False, it will not write a GAF line
if col2(phenotype_object) and col3(phenotype_object) and col5(testtrait) and col6() and col7(testtrait) \
and col9() and col12() and col13(phenotype_object) and col14() and col15 and col16(phenotype_object,testtrait):
outfile.write(
#print(
col1()+"\t"+
col2(phenotype_object)+"\t"+
col3(phenotype_object)+"\t"+
col4()+"\t"+
col5(testtrait)+"\t"+
col6()+"\t"+
col7(testtrait)+"\t"+
col8(phenotype_object)+"\t"+
col9()+"\t"+
col10(phenotype_object)+"\t"+
col11(phenotype_object)+"\t"+
col12()+"\t"+
col13(phenotype_object)+"\t"+
col14()+"\t"+
col15()+"\t"+
col16(phenotype_object,testtrait)+"\t"+
"\n")
else: print("sump'n aint right with this trait")
# required
def col1():
return "IRIC"
#required
def col2(phenotype_object):
#check if the dictionary from json contains an irisId
if 'irisId' in phenotype_object:
#return the IRIS_ID
return str(phenotype_object['irisId']).replace(" ","_")
else:
print('record for\n', phenotype_object,"\nwill not be included. It is missing an IRIS-ID")
return False
#required
def col3(phenotype_object):
#check if the dictionary from json contains a name
if 'name' in phenotype_object:
#return the germplasm name (unless here is a germplasm symbol)
Name= str(phenotype_object['name']).split('::')
return Name[0]
else:
print('record for\n', phenotype_object,"\nwill not be included. It is missing a name")
return False
#not required
def col4():
return ""
#required
def col5(testtrait):
#return the TO:xxxxxxxx or CO:xxxxxxxx
#return "TO:0000141" #this is the test one, "awn color"
return testtrait['TOid']
#required
def col6():
#return IRIC (no pmid)
return "IRIC"
#required
def col7(testtrait):
#return the evidence code
return testtrait['evidencecode']
#not required
def col8(phenotype_object):
#check if the dictionary from json contains a country
if 'country' in phenotype_object:
#return the relationship "from_country" and the country of origin
country_origin = phenotype_object['country']
column8 = "from_country(%s)"%(country_origin)
return column8.replace(" ","_")
else:
return ""
#required
def col9():
#return aspect
return "T"
#not required
def col10(phenotype_object):
#check if the dictionary from json contains a name
if 'name' in phenotype_object:
#return the germplasm name (unless here is a germplasm symbol)
Name= str(phenotype_object['name']).split('::')
return Name[0]
else:
#this name is not required, so it won't return anything. However, in this case, if there isn't a name,
#it will error out on col3(), so it doesn't really matter
return ""
#not required
def col11(phenotype_object):
#check if the dictionary from json contains a iricStockPhenotypeId
if 'name' in phenotype_object:
Name= str(phenotype_object['name']).split('::')
if len(Name)>1:
return Name[1]
else: return Name[0]
else:
return ""
#required
def col12():
#return object type
return "germplasm"
#required
def col13(phenotype_object):
#return taxonID
#might need translation for subpopulation.
#if phenotype_object['subpopulation']== "indx":
# taxonID = "NCBITaxon:39946"
#return taxonID
return "NCBITaxon:4530" #this is the generic oryza stativa NCBITaxon
#required
def col14():
#return date
date=str(time.strftime('%m%d%Y'))
return date
#not required
def col15():
#return assigned_by
return "Planteome:Austin_Meier"
#not required for GAF, but required for germplasm
def col16(phenotype_object,phenotypename):
"""displays the phenotype score along with the trait name as recorded. No spaces allowed."""
if 'value' in phenotype_object:
#return the value
phenotype_value = str(phenotype_object['value']).replace(" ", "_")
return "has_phenotype_score(" + phenotypename['traitname'].replace(" ", "_") + "=" + phenotype_value +")"
#return phenotypename['traitname']+ str(phenotype_object['value'])
else:
print('record for\n', phenotype_object,"\nwill not be included. It is missing a value")
return False
#return the variable (if it exists)
#return the method, if it exists
#return the evaluation location (evaluation_location(x))
#########################################################################
# run actual code here
#########################################################################
if __name__ == "__main__":
main(traitdict)
| [
"[email protected]"
] | |
b6978e0268c1f7c52f2e5e48d60663f265ee27f5 | 5c2b0d15e06bef93dc6ef3c90c9554a61ff8eaac | /ch03/06/01.py | d942649134ade490e503f1cf6e8d341fc161ad10 | [
"MIT"
] | permissive | leehb1592/book-kiwoom-api | c6a9b4796f3e2b33a5dc0fed51572f4c76a36d7f | 377f66d77db78c70a02b0d87661961a281133a43 | refs/heads/main | 2023-04-18T02:41:14.079341 | 2021-04-29T13:47:44 | 2021-04-29T13:47:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | # 연결 상태 확인
from pykiwoom.kiwoom import *
kiwoom = Kiwoom()
kiwoom.CommConnect(block=True)
state = kiwoom.GetConnectState()
if state == 0:
print("미연결")
elif state == 1:
print("연결완료") | [
"[email protected]"
] | |
86abe6ce015ef905e010d371869f06e172e875fa | 087ec9d8a17f2107ebb70a24151e00c2b734f139 | /Mundo 2/Exercicios/Desafio064.py | b5d96f2f21cbd5ba101a12847c467e6165dcb509 | [
"MIT"
] | permissive | yWolfBR/Python-CursoEmVideo | c8bf38ec36b77dc6685256cce22f03ef47505004 | 17bab8ad3c4293daf8377c5d49242942845b3577 | refs/heads/main | 2023-06-14T09:11:00.894609 | 2021-06-24T18:37:36 | 2021-06-24T18:37:36 | 378,760,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py | s = c = 0
while True:
n = int(input('Digite um número [999 para finalizar]: '))
if n == 999:
break
else:
s += n
c += 1
print('Foram digitados {} números. A soma total entre eles é de {}'.format(c, s))
| [
"[email protected]"
] | |
af318256ba4b8562d185c1d373e905a5c651d3e6 | 76bed4134f2590726c20d9d74b4441baac19b856 | /scripts/setup.py | 77e94d241ce2343ef20dad3f26f26ffdad77b286 | [] | no_license | venkatmi/oncosplice | 5f55925cabeacd00c965a09ee5f9d09e4b41c75b | c7c1d4c9ca157b8a0dca087069f73cdb3ac60243 | refs/heads/master | 2021-11-09T23:07:40.390385 | 2021-11-05T01:01:13 | 2021-11-05T01:01:13 | 109,444,825 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,737 | py | #!/usr/local/bin/python2.6
import sys
import suds
_script = 'AltAnalyze.py'
_appName = "AltAnalyze"
_appVersion = '2.0.9.4'
_appDescription = "AltAnalyze is a freely available, open-source and cross-platform program that allows you to take RNASeq or "
_appDescription +="relatively raw microarray data (CEL files or normalized), identify predicted alternative splicing or alternative "
_appDescription +="promoter changes and view how these changes may affect protein sequence, domain composition, and microRNA targeting."
_authorName = 'Nathan Salomonis'
_authorEmail = '[email protected]'
_authorURL = 'http://www.altanalyze.org'
_appIcon = "AltAnalyze_W7.ico"
excludes = ['wx'] #["wxPython"] #"numpy","scipy","matplotlib"
includes = ["mpmath", "numpy","sklearn.neighbors.typedefs",'sklearn.utils.lgamma','sklearn.manifold',
'sklearn.utils.sparsetools._graph_validation','sklearn.utils.weight_vector',
'pysam.TabProxies','pysam.ctabixproxies','patsy.builtins','dbhash','anydbm']
""" By default, suds will be installed in site-packages as a .egg file (zip compressed). Make a duplicate, change to .zip and extract
here to allow it to be recognized by py2exe (must be a directory) """
matplot_exclude = [] #['MSVCP90.dll']
scipy_exclude = [] #['libiomp5md.dll','libifcoremd.dll','libmmd.dll']
""" xml.sax.drivers2.drv_pyexpat is an XML parser needed by suds that py2app fails to include. Identified by looking at the line: parser_list+self.parsers in
/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/PyXML-0.8.4-py2.7-macosx-10.6-intel.egg/_xmlplus/sax/saxexts.py
check the py2app print out to see where this file is in the future
(reported issue - may or may not apply) For mac and igraph, core.so must be copied to a new location for py2app:
sudo mkdir /System/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/lib-dynload/igraph/
cp /Library/Python/2.6/site-packages/igraph/core.so /System/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/lib-dynload/igraph/
"""
if sys.platform.startswith("darwin"):
### Local version: /usr/local/bin/python2.6
### example command: python setup.py py2app
from distutils.core import setup
import py2app
import lxml
import sklearn
includes+= ["pkg_resources","distutils","lxml.etree","lxml._elementpath"] #"xml.sax.drivers2.drv_pyexpat"
"""
resources = ['/System/Library/Frameworks/Python.framework/Versions/2.6/include/python2.6/pyconfig.h']
frameworks = ['/System/Library/Frameworks/Python.framework/Versions/2.6/include/python2.6/pyconfig.h']
frameworks += ['/System/Library/Frameworks/Python.framework/Versions/2.6/Extras/lib/python/pkg_resources.py']
frameworks += ['/System/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/distutils/util.py']
frameworks += ['/System/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/distutils/sysconfig.py']
import pkg_resources
import distutils
import distutils.sysconfig
import distutils.util
"""
options = {"py2app":
{"excludes": excludes,
"includes": includes,
#"frameworks": frameworks,
#"resources": resources,
#argv_emulation = True,
"iconfile": "altanalyze.icns"}
}
setup(name=_appName,
app=[_script],
version=_appVersion,
description=_appDescription,
author=_authorName,
author_email=_authorEmail,
url=_authorURL,
options=options,
#data_files=data_files,
setup_requires=["py2app"]
)
if sys.platform.startswith("win"):
### example command: python setup.py py2exe
from distutils.core import setup
import py2exe
import suds
import numpy
import matplotlib
import unique
import lxml
import sys
import sklearn
import pysam
import TabProxies
import ctabix
import csamtools
import cvcf
import dbhash
import anydbm
import six ### relates to a date-time dependency in matplotlib
#sys.path.append(unique.filepath("Config\DLLs")) ### This is added, but DLLs still require addition to DLL python dir
from distutils.filelist import findall
import os
excludes = []
data_files=matplotlib.get_py2exe_datafiles()
matplotlibdatadir = matplotlib.get_data_path()
matplotlibdata = findall(matplotlibdatadir)
matplotlibdata_files = []
for f in matplotlibdata:
dirname = os.path.join('matplotlibdata', f[len(matplotlibdatadir)+1:])
matplotlibdata_files.append((os.path.split(dirname)[0], [f]))
windows=[{"script":_script,"icon_resources":[(1,_appIcon)]}]
options={'py2exe':
{
"includes": 'lxml',
'includes': 'pysam',
'includes': 'TabProxies',
'includes': 'csamtools',
'includes': 'ctabix',
'includes': 'lxml.etree',
'includes': 'lxml._elementpath',
"includes": 'matplotlib',
"includes": 'mpl_toolkits',
"includes": 'matplotlib.backends.backend_tkagg',
#'includes': 'sklearn.neighbors.typedefs',
#'includes': 'sklearn.utils.lgamma',
#"includes": 'sklearn.utils.sparsetools._graph_validation',
#"includes": 'sklearn.utils.weight_vector',
#"includes": 'sklearn.manifold',
"dll_excludes": matplot_exclude+scipy_exclude,
}}
setup(
#console = windows,
windows = windows,
options = options,
version=_appVersion,
description=_appDescription,
author=_authorName,
author_email=_authorEmail,
url=_authorURL,
data_files=matplotlibdata_files+data_files,
)
if sys.platform.startswith("2linux"):
# bb_setup.py
from bbfreeze import Freezer
f = Freezer(distdir="bb-binary")
f.addScript("AltAnalyze.py")
f()
if sys.platform.startswith("2linux"):
# bb_setup.py
from bbfreeze import Freezer
f = Freezer(distdir="bb-binary")
f.addScript("AltAnalyze.py")
f()
if sys.platform.startswith("linux"):
### example command: python setup.py build
includes = ['matplotlib','mpl_toolkits','matplotlib.backends.backend_tkagg']
includefiles = []
from cx_Freeze import setup, Executable
### use to get rid of library.zip and move into the executable, along with appendScriptToLibrary and appendScriptToExe
#buildOptions = dict(create_shared_zip = False)
setup(
name = _appName,
version=_appVersion,
description=_appDescription,
author=_authorName,
author_email=_authorEmail,
url=_authorURL,
#options = dict(build_exe = buildOptions),
options = {"build_exe": {"includes":includes, "include_files": includefiles}},
executables = [Executable(_script,
#appendScriptToExe=True,
#appendScriptToLibrary=False,
#icon='goelite.ico',
compress=True)],
)
| [
"[email protected]"
] | |
8d4b95f623de9965e3967b316937dfb89e9d4188 | 70f24526e33c5d1e6280a86a449eaf4f577082ce | /src/djallauth/settings/__init__.py | a6b34a8df6a3eaf40d854db692645d4f6fa6ff34 | [] | no_license | mikenunez/Dj-AllAuth | afa2de5d00c30e9c6ac14e5f0d74225c437e43c8 | 9ceb81166d53b733e1762b08a36eecac2f4c9339 | refs/heads/master | 2021-06-10T11:47:46.264990 | 2017-01-06T22:06:19 | 2017-01-06T22:06:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | from .base_settings import *
try:
from .prod_settings import *
except:
pass
try:
from .local_settings import *
except:
pass | [
"[email protected]"
] | |
d8cd5e0c456f3ab55d88a4ac1b3ffad3ecc02d82 | 7ba48f82dac0c19d41d7da51cda3aef5173dd77c | /multi_sites/context_processors.py | a2c36ecdbaff4e4f7e46f6290a78709a96da695e | [] | no_license | saiful7/Betasmartz | 09af8b11f816adf3c2dc41ad5a70f170d6dbb981 | 337a79b59498f42294f19e53eea9cd1c8019ee48 | refs/heads/master | 2022-05-10T04:14:31.382569 | 2018-11-23T06:50:45 | 2018-11-23T06:50:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | from django.contrib.sites.shortcuts import get_current_site
def with_theme(request):
"""
Return the value you want as a dictionary.
You may add multiple values in there.
"""
site = get_current_site(request)
return {
'theme': site.site_config.safe_theme,
}
| [
"[email protected]"
] | |
659b51b2851089f1ee15701359c633374ea0899b | 64cd231e0a11bdfc93d36594f617e07250067a00 | /config.py | b58d424ea92c7d6b265d8e05a75b9635f9e16088 | [] | no_license | qianxunchen/Blog | 58c1962ac617a5aeb1b0ef5169a336ed2770f100 | d7d748bb28dc927f71d5640c01fde286c2fb92de | refs/heads/master | 2022-12-05T20:04:43.923775 | 2020-08-22T09:12:23 | 2020-08-22T09:12:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://%s:%s@%s/%s?charset=utf8' % ('root', 'xxxx', 'localhost:3306', 'blog')
SECRET_KEY = "123456"
JSON_AS_ASCII = False | [
"[email protected]"
] | |
13ddbe3ec41a22d7523cbec2421a2bbf228c7295 | e9bb78ff467f8db8259fb7a31f205881faa4ebb1 | /021_MySQL/016_prevent_SQL_injection.py | 10e7d8ba6c84ec9b855918c0dbf946948c860a40 | [] | no_license | Maopos/Basic_Python | 1913144b56bc17fbe12bebf013e8408d9e1c75e5 | 9c976b6c6e7861e431e4f6a4c630bbce982c0eec | refs/heads/main | 2023-06-08T18:30:29.573670 | 2021-06-20T23:58:56 | 2021-06-20T23:58:56 | 363,065,275 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | import mysql.connector
print('======================')
print()
mydb = mysql.connector.connect(
host = 'localhost',
user = 'root',
password = 'Python2222.',
database = 'base_datos'
)
mycursor = mydb.cursor()
sql = "SELECT * FROM customers WHERE address = %s"
adr = ("Cali Valle", )
mycursor.execute(sql, adr)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
print()
print('======================') | [
"[email protected]"
] | |
bec42c4453d3bfc82e98971640f59310acce4db9 | 33a39e03acfbf39c40792b02bd2fd2a88de1f9a9 | /DisappearedNumbers.py | 887d8bd8399b993fd6fb4dbe0f15c843c4dcc5c8 | [] | no_license | sainihimanshu1999/Arrays-LeetCode | 468e72e6b15dd050c0d1ef4ae92a55d3ca157daf | 32aeb0e2c180f0949fbdb03959e032873ca5034a | refs/heads/main | 2023-04-20T03:38:58.028150 | 2021-05-09T17:27:25 | 2021-05-09T17:27:25 | 364,503,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | '''
In this question we edit the array in-place, we mark the the numbers negative and when the numbers are
positive those index+1 numbers are missing
'''
def disappeared(self,nums):
for i in range(len(nums)):
idx = abs(nums[i])-1
nums[idx] = -abs(nums[idx])
return [i+1 for i in range(len(nums)) if nums[i]>0] | [
"[email protected]"
] | |
751a0e0b4e2ffbb3b0ab47a59a0336853cae1308 | ac5eb7df8515fb92071f9e0f64d1cf6467f4042d | /Python/regularExpressionMathching.py | cf85e148de31b73447673601351d3242b146e37e | [] | no_license | Litao439420999/LeetCodeAlgorithm | 6ea8060d56953bff6c03c95cf6b94901fbfbe395 | 9aee4fa0ea211d28ff1e5d9b70597421f9562959 | refs/heads/master | 2023-07-08T06:17:20.310470 | 2021-08-08T08:12:23 | 2021-08-08T08:12:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,614 | py | #
# LeetCodeLink : https://leetcode-cn.com/problems/regular-expression-matching
# 10. 正则表达式匹配
# 给你一个字符串 s 和一个字符规律 p,请你来实现一个支持 '.' 和 '*' 的正则表达式匹配。
# '.' 匹配任意单个字符
# '*' 匹配零个或多个前面的那一个元素
# 所谓匹配,是要涵盖 整个 字符串 s的,而不是部分字符串。
#
# 说明:
# s 可能为空,且只包含从 a-z 的小写字母。
# p 可能为空,且只包含从 a-z 的小写字母,以及字符 . 和 *。
#
# 示例 1:
# 输入:
# s = "aa"
# p = "a"
# 输出: false
# 解释: "a" 无法匹配 "aa" 整个字符串。
#
# 示例 2:
# 输入:
# s = "aa"
# p = "a*"
# 输出: true
# 解释: 因为 '*' 代表可以匹配零个或多个前面的那一个元素, 在这里前面的元素就是 'a'。因此,字符串 "aa" 可被视为 'a' 重复了一次。
#
"""
# 动态规划的一般流程三步:暴力的递归解法 -> 带备忘录的递归解法 -> 迭代的动态规划解法
# 思考流程来说, 就分为步骤:找到状态和选择 -> 明确 dp 数组/函数的定义 -> 寻找状态之间的关系
#
# 这就是思维模式的框架,按照以上的模式来解决问题,养成这种模式思维
"""
class Solution:
"""
动态规划 = 状态转移方程 + 边界条件
状态转移方程 = f[i][j] =
if(p[j] != '*') = f[i-1][j-1] matches; otherwise false;
otherwise = f[i-1][j] or f[i][j-2] matches; otherwise f[i][j-2]
边界条件 = f[0][0] = true, 即就是空串可以匹配
逐步匹配、 S.size = m、 P.size = n
时间复杂度 T(n) = big O(mn)
空间复杂度 S(n) = big O(mn)
"""
def isMatch(self, s, p):
m, n = len(s), len(p)
def matches(i, j):
if i == 0:
return False
if p[j - 1] == '.':
return True
return s[i - 1] == p[j - 1]
# 状态转移方程
f = [[False] * (n + 1) for _ in range(m + 1)]
# 边界条件
f[0][0] = True
for i in range(m + 1):
for j in range(1, n + 1):
if p[j - 1] == '*':
f[i][j] |= f[i][j - 2]
if matches(i, j - 1):
f[i][j] |= f[i - 1][j]
else:
if matches(i, j):
f[i][j] |= f[i - 1][j - 1]
return f[m][n]
if __name__ == "__main__":
# [test]
s = "aa"
p = "a"
# p = "a."
# p = "a*"
solution = Solution()
print(solution.isMatch(s, p))
| [
"[email protected]"
] | |
843de1a2886ed67c04dfc4e39f9973821e2113af | c90385e0a07392d60fbde6022419787abdd9badf | /My_HTML_IDE.py | 282c4ef1ad4aa7fb7f27d6a2a07b495aa2de159e | [] | no_license | aditi419/P160h | 577b834da589dcecd675625398b8d4831958b355 | 4965cc1a1cce9959f117c035bac62cab19fb8bc6 | refs/heads/main | 2023-08-15T14:22:25.578418 | 2021-09-25T19:56:31 | 2021-09-25T19:56:31 | 410,369,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 611 | py | from tkinter import *
from PIL import ImageTk,Image
root = Tk()
root.minsize(650,650)
root.maxsize(650,650)
root.configure(background='#6699CC')
open_image = ImageTk.PhotoImage(Image.open('open.png'))
save_image = ImageTk.PhotoImage(Image.open('save.png'))
exit_image = ImageTk.PhotoImage(Image.open('exit.jpg'))
label1 = Label(root,text='File Name:')
label1.place(relx=0.28,rely=0.03,anchor=CENTER)
inputFileName = Entry(root)
inputFileName.place(relx=0.5,rely=0.03,anchor=CENTER)
my_text = Text(root,height=35,width=80,bg='white',fg='black')
my_text.place(relx=0.5,rely=0.5,anchor=CENTER)
root.mainloop() | [
"[email protected]"
] | |
04ce7ce32adfb75a109e9455c76b0c5e00ef24f7 | 3bbe69481d294eba60f83639f3a9430fb8cda4d9 | /api/vacancy/views.py | 3f70edb24741e63c1e805ca90df284d9bceff490 | [] | no_license | ulugbek1999/ncd-cms | 5e288f44b01387cd66a54d2dcaf1e0d288205bc4 | dcb43bf65f0d6efcbf6481f5c9284c8c1a7c6b9d | refs/heads/master | 2022-09-06T13:22:34.637080 | 2019-12-14T09:10:26 | 2019-12-14T09:10:26 | 204,455,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,778 | py | from rest_framework.views import APIView
from .serializers import VacancySerializer
from vacancy.models import Vacancy
from rest_framework.response import Response
from rest_framework import status
from django.utils import timezone
from django.shortcuts import get_object_or_404
from django.db import connection
class VacancyCreateView(APIView):
def update_sql(self, i):
with connection.cursor() as cursor:
cursor.execute("UPDATE vacancy SET image='vacancies/default.jpg' WHERE id=%s", [i])
cursor.execute("SELECT * FROM vacancy WHERE id=%s", [i])
row = cursor.fetchone()
print(row)
return row
def post(self, request, format=None):
title_en = request.data.get("title_en")
title_uz = request.data.get("title_uz")
title_ru = request.data.get("title_ru")
title_kz = request.data.get("title_kz")
short_description_en = request.data.get("short_content_en")
short_description_uz = request.data.get("short_content_uz")
short_description_ru = request.data.get("short_content_ru")
short_description_kz = request.data.get("short_description_kz")
description_en = request.data.get("content_en")
description_uz = request.data.get("content_uz")
description_ru = request.data.get("content_ru")
description_kz = request.data.get("content_kz")
image = request.data.get("image")
location_ru = request.data.get("location_ru")
location_en = request.data.get("location_en")
location_kz = request.data.get("location_kz")
location_uz = request.data.get("location_uz")
wages_en = request.data.get("wages_en")
wages_ru = request.data.get("wages_ru")
wages_kz = request.data.get("wages_kz")
wages_uz = request.data.get("wages_uz")
status_i = request.data.get("status")
vacancy = Vacancy.objects.create(
title_en=title_en,
title_ru=title_ru,
title_kz=title_kz,
title_uz=title_uz,
short_description_en=short_description_en,
short_description_kz=short_description_kz,
short_description_ru=short_description_ru,
short_description_uz=short_description_uz,
description_en=description_en,
description_kz=description_kz,
description_ru=description_ru,
description_uz=description_uz,
location_en=location_en,
location_kz=location_kz,
location_ru=location_ru,
location_uz=location_uz,
image=image,
wages_en=wages_en,
wages_kz=wages_kz,
wages_ru=wages_ru,
wages_uz=wages_uz,
created=timezone.now(),
status=status_i,
)
if image is None:
self.update_sql(vacancy.id)
return Response(status=status.HTTP_201_CREATED)
class VacancyDeleteView(APIView):
def delete(self, request):
vacancy = get_object_or_404(Vacancy, pk=request.data.get("id"))
vacancy.delete()
return Response(status=status.HTTP_202_ACCEPTED)
class VacancyUpdateView(APIView):
def put(self, request, pk):
vacancy = Vacancy.objects.get(pk=pk)
vacancy.title_en = request.data.get("title_en")
vacancy.title_uz = request.data.get("title_uz")
vacancy.title_ru = request.data.get("title_ru")
vacancy.title_kz = request.data.get("title_kz")
vacancy.short_description_en = request.data.get("short_content_en")
vacancy.short_description_uz = request.data.get("short_content_uz")
vacancy.short_description_kz = request.data.get("short_content_kz")
vacancy.short_description_ru = request.data.get("short_content_ru")
vacancy.description_en = request.data.get("content_en")
vacancy.description_uz = request.data.get("content_uz")
vacancy.description_ru = request.data.get("content_ru")
vacancy.description_kz = request.data.get("content_kz")
if request.data.get("changed") == "true":
vacancy.image = request.data.get("image")
vacancy.location_ru = request.data.get("location_ru")
vacancy.location_en = request.data.get("location_en")
vacancy.location_kz = request.data.get("location_kz")
vacancy.location_uz = request.data.get("location_uz")
vacancy.wages_en = request.data.get("wages_en")
vacancy.wages_ru = request.data.get("wages_ru")
vacancy.wages_kz = request.data.get("wages_kz")
vacancy.wages_uz = request.data.get("wages_uz")
vacancy.status = request.data.get("status")
vacancy.save()
return Response(status=status.HTTP_202_ACCEPTED)
| [
"[email protected]"
] | |
9a98f273cf8794cb612c493669dadee98e5e6431 | b09b6ba74bae5c42b21c417596d3577de4ae79ca | /great_international/migrations/0140_delete_internationalcuratedtopiclandingpage.py | bb152e9288e16db67038d82732e071dce21ed1a7 | [
"MIT"
] | permissive | uktrade/directory-cms | 856902c4d445d5ed2a2ec7a66d62e3a098636142 | 28b1390a3f15fbb88388ec197ab76e510cccbaa6 | refs/heads/develop | 2023-08-17T15:25:48.960844 | 2023-08-14T13:42:08 | 2023-08-14T13:42:08 | 119,525,005 | 8 | 7 | MIT | 2023-09-12T15:21:44 | 2018-01-30T11:11:59 | Python | UTF-8 | Python | false | false | 455 | py | # Generated by Django 2.2.24 on 2021-11-17 15:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0059_apply_collection_ordering'),
('wagtailforms', '0004_add_verbose_name_plural'),
('great_international', '0139_auto_20211013_1536'),
]
operations = [
migrations.DeleteModel(
name='InternationalCuratedTopicLandingPage',
),
]
| [
"[email protected]"
] | |
40861d39d38515c68ba7459be7326b62d6ad687c | 449821c18203ff8823ef56735741982af74d2c59 | /calculator.py | e3bb12a4143607fdc8e902db1670b48b358d3ee5 | [] | no_license | xiedong1995/test | 745891e23845972b949511e63b59373a6b2ce7cd | a1866fae0b0c2390cc43e2647ec2c586e52023b5 | refs/heads/master | 2020-09-22T09:48:15.361434 | 2019-12-01T10:54:17 | 2019-12-01T10:54:17 | 173,248,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py |
class Calculator:
""" 用于完成两个数的加减乘除"""
def __init__(self, a, b):
self.a = int(a)
self.b = int(b)
# 加法
def add(self):
return self.a + self.b
# 减法
def sub(self):
return self.a - self.b
# 乘法
def mul(self):
return self.a * self.b
# 除法
def div(self):
return self.a / self.b
| [
"[email protected]"
] | |
3f438af3cb5db262b6cf06fe2e6ff6a4b57eb63d | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/ads/googleads/v4/googleads-py/google/ads/googleads/v4/services/services/feed_service/transports/base.py | af73b0aeafb2dac3ed16dbc2b26e95d02d348a3d | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,949 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
from google import auth
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.ads.googleads.v4.resources.types import feed
from google.ads.googleads.v4.services.types import feed_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
'google-ads-googleads',
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class FeedServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for FeedService."""
AUTH_SCOPES = (
'https://www.googleapis.com/auth/adwords',
)
def __init__(
self, *,
host: str = 'googleads.googleapis.com',
credentials: credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]): The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ':' not in host:
host += ':443'
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.get_feed: gapic_v1.method.wrap_method(
self.get_feed,
default_timeout=None,
client_info=client_info,
),
self.mutate_feeds: gapic_v1.method.wrap_method(
self.mutate_feeds,
default_timeout=None,
client_info=client_info,
),
}
@property
def get_feed(self) -> typing.Callable[
[feed_service.GetFeedRequest],
feed.Feed]:
raise NotImplementedError
@property
def mutate_feeds(self) -> typing.Callable[
[feed_service.MutateFeedsRequest],
feed_service.MutateFeedsResponse]:
raise NotImplementedError
__all__ = (
'FeedServiceTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
2afe88f38f33383f8c4c475b7cd9626bc8d53553 | 39ceb962075b4ed0e21ad4f9831210ea48a937b8 | /字符串/434. 字符串中的单词数.py | b95fff00baa7de3379f7e4b39ec7dd048520b7ba | [] | no_license | buidler/LeetCode | fe91fbf764b4f1ea28125a60497e67513471282a | 9763c632c35324761c53604f35b473622d145d3c | refs/heads/master | 2022-12-12T21:14:10.898611 | 2020-09-13T08:25:22 | 2020-09-13T08:25:22 | 295,146,007 | 1 | 0 | null | 2020-09-13T12:24:45 | 2020-09-13T12:24:45 | null | UTF-8 | Python | false | false | 1,720 | py | """
统计字符串中的单词个数,这里的单词指的是连续的不是空格的字符。
请注意,你可以假定字符串里不包括任何不可打印的字符。
示例:
输入: "Hello, my name is John"
输出: 5
"""
class Solution(object):
def countSegments(self, s):
"""
:type s: str
:rtype: int
"""
if len(s.split()) == 0:
return 0
# 设置计数器,初始值为1
index = 0
# 遍历字符串,如果当前遇到的字符是字母前一个字符是空格时,计数器增1
# 当然,字符串第一个字符单独考虑
for i in range(len(s)):
if (i == 0 or s[i - 1] == ' ') and s[i].isalnum():
index += 1
return index
def countSegments2(self, s):
"""
:type s: str
:rtype: int
"""
# 设置计数器,初始值为1
index = 1
# 遍历字符串,如果当前遇到的字符不是字母,那么在遇到下一个字母时,计数器增1
for i in range(len(s)):
if not s[i - 1].isalnum() and s[i].isalnum():
index += 1
return index
def countSegments3(self, s):
"""
:type s: str
:rtype: int
"""
# 测试用例每个单词后都会有一个空格
return len(s.split())
if __name__ == '__main__':
solution = Solution()
print(solution.countSegments(", , , , a, eaefa"))
print(solution.countSegments("Of all the gin joints in all the towns in all the world, "))
print(solution.countSegments("Hello, my, name, is, John"))
print(solution.countSegments("love live! mu'sic forever"))
| [
"[email protected]"
] | |
f298bf467f3d150981e177418bb563f0dd6ec91d | 599786b7f37dc6a4a0d1c5e467804be806feaed8 | /core/GetDataClass.py | fe845a93fb4fe79710a6ed17796491a8cdb58c7b | [] | no_license | Surpris/DataViewerBase | dc0a8e57e5a9c02dbe31e91400169f019dafb80b | ee82a1abe0f7f328e6fa8ea0a31356c266426593 | refs/heads/master | 2021-01-21T14:33:20.292380 | 2018-08-03T15:24:18 | 2018-08-03T15:24:18 | 95,300,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,406 | py | # coding: utf-8
try:
import olpy
import dbpy
except:
pass
import time
import datetime
import numpy as np
class TagDiscriminator():
"""
Class for discrimination of tags.
"""
def __init__(self, bl, chan, cycle, offset, field):
self.bl = bl
self.chan = chan
self.cycle = cycle
self.offset = offset
self.field = field
def analizePattern(self):
"""
make a pattern of tags.
"""
now = datetime.datetime.now()
print("[{}]: Analyze pattern".format(now))
numOfTags = 12
run = dbpy.read_runnumber_newest(self.bl)
run -= 0
hiTag, startTag = dbpy.read_start_tagnumber(self.bl, run)
#print(newestTag)
tagList = dbpy.read_taglist(self.bl, hiTag, startTag, startTag + numOfTags - 1)
#print(tagList)
status = dbpy.read_syncdatalist_float(self.field, hiTag, tagList)
status = np.array(status)
#print(status)
self.offset = tagList[np.nonzero(status)[0][0]]
self.startTag = startTag
def discriminate(self, tagList):
return (tagList-self.offset) % self.cycle
class GetDataClass(object):
"""Class for getting data."""
def __init__(self, detId='OPAL-234363', chan=0, bl=1, cycle=6, limNumImg=35):
"""Initialization"""
self.repRate = 60
self.tagRepRate = 60
self.waitSec = 1
self.limNumImg = limNumImg
self.chan = 0
self.cycle = 6
self.bl = 1
self.detId = detId
self.read = olpy.StorageReader(self.detId)
self.buf = olpy.StorageBuffer(self.read)
self.disc = TagDiscriminator(self.bl, self.chan, self.cycle, None,
"xfel_bl_1_shutter_1_open_valid/status")
self.startTag = -1
self.endTag = -1
self.isReset = False
self.currentRun = -1
def getData(self):
"""get data from the detector with Id `detId`. """
st = time.time()
currentRun = dbpy.read_runnumber_newest(self.bl)
#tag discriminator setup
# success_dbpy = False
if self.endTag == -1 or self.currentRun != currentRun:
runstatus = dbpy.read_runstatus(self.bl, currentRun)
if runstatus == 2: # Analyse patterns after the latest Run start running.
self.currentRun = currentRun
self.disc.analizePattern()
self.startTag = self.disc.startTag
#initialize parameters
image = [None for col in range(self.disc.cycle)]
numOfImg = np.zeros(self.disc.cycle)
#Acquire the current newest tag and image
newestTag = self.read.collect(self.buf, olpy.NEWESTTAG)
col = self.disc.discriminate(newestTag)
if image[col] is None:
image[col] = self.buf.read_det_data(self.chan)
else:
image[col] += self.buf.read_det_data(self.chan)
numOfImg[col] += 1
#repeat acquiring images backward
for i in range(1, self.limNumImg + 1):
#try to get image
tag = newestTag - int(self.tagRepRate/self.repRate)*i
if tag == self.endTag or tag < self.startTag:
break
try:
self.read.collect(self.buf, tag)
except Exception as ex:
print(tag, ":", ex)
continue
col = self.disc.discriminate(tag)
if image[col] is None:
image[col] = self.buf.read_det_data(self.chan)
else:
image[col] += self.buf.read_det_data(self.chan)
numOfImg[col] += 1
self.endTag = newestTag
run = dbpy.read_runnumber_newest(self.bl)
runstatus = dbpy.read_runstatus(self.bl, run)
if runstatus == 0: # (-1=not yet exist, 0=Stopped(ready to read), 1=Paused, 2=Running)
pass
elif runstatus != 2:
self.startTag = -1
self.endTag = -1
else:
pass
# if elapsed < self.waitSec:
# time.sleep(self.waitSec - elapsed)
return image, numOfImg, currentRun, self.startTag, self.endTag
if __name__ == "main":
pass
# getData() | [
"[email protected]"
] | |
a3bcb9efe57ad1536db2e3b6c0db3ac60c7f7f6b | f78a6e4630363c31ce152009d7df8794cb66f84f | /Juego.py | cf7a3929faa0f43354a1c9de73c7b25e84d393da | [] | no_license | TailsDoll321/Canta-O-Paga | d78da70f2d2406efcaff8ee40f69bb922871144e | b26b0ca08b00d232122bb89430f786d7b8d10a1a | refs/heads/main | 2023-04-14T16:35:43.256064 | 2021-05-04T00:41:59 | 2021-05-04T00:41:59 | 364,097,929 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,036 | py | import pygame
import random
newsong = 1
i = 176
turno = 1
jugador = 0
njug = 0
pj1 = -1
pj2 = 0
pj3 = 0
pj4 = 0
txturn = ""
txpun = ""
txwin = ""
n = 17
newpunishment = 0
pygame.init()
screen = pygame.display.set_mode((750, 500))
Title = "Canta o Paga"
pygame.display.set_caption(Title)
song = {1 : "Los Malaventurados No Lloran - PXNDX", 2 : "Starlight - Muse", 3 : "Ya Lo Veia Venir - Moderatto", 4 : "Knights of Cydonia - Muse", 5 : "Hustle Bones - Death Grips",
6 : "Renai Circulation - Kana Hanazawa", 7 : "Zombie - The Cranberries", 8 : "Clint Eastwood - Gorillaz", 9 : "In The End - Linkin Park", 10 : "Numb - Linkin Park",
11 : "Labios Rotos - Mana", 12 : "Stressed Out - Twenty One Pilots", 13 : "La Cancion - Bad Bunny X J Balvin", 14 : "Callaita - Bad Bunny", 15 : "QUE PRETENDES - Bad Bunny X J Balvin",
16 : "Chop Suey - System of a Down", 17 : "Toxicity - System of a Down", 18 : "Aerials, System of a Down", 19 : "I Don't Love You - My Chemical Romance",
20 : "Helena - My Chemical Romance", 21 : "Welcome to the Black Parade - My Chemical Romance", 22 : "That's What You Get - Paramore", 23 : "Still Into You - Paramore",
24 : "Misery Business - Paramore", 25 : "Ain´t It Fun - Paramore", 26 : "My Blood - Twenty One Pilots", 27 : "Devuelveme a mi Chica - Hombres G",
173 : "La Celula que Explota - Caifanes", 174 : "Sognare - Division Minuscula", 175 : "Anda y Ve - Jose Jose", 176 : "El Triste - Jose Jose", 177 : "Gavilan o Paloma - Jose Jose",
28 : "Blinding Lights - The Weekend", 29 : "Circles - Post Malone", 30 : "This Love - Maroon 5", 31 : "Viva la Vida - Coldplay", 32 : "La Nave del Olvido - Jose Jose",
33 : "Run - Joji", 34 : "Slow Dancing in the Dark - Joji", 35 : "I Don't Wanna Waste my Time - Joji" ,36 : "17 Años - Angeles Azules", 37 : "The Beach - The Neighborhood",
38 : "Ties - Years & Years", 39 : "Palo Santo - Years & Years", 40 : "El Amar y el Querer - Jose Jose", 41 : "40 y 20 - Jose Jose", 42 : "Te Quiero - Barney",
43 : "Baby Shark", 44 : "La Vaca Lola", 45 : "El Pollito Pio", 46 : "Mi Pollito Amarillito", 47 : "La Gallina Turuleca", 48 : "El Patito Juan", 49 :"Si Veo a tu Mama - Bad Bunny",
50 : "Sigues con El - Arcángel", 51 : "Hola Remix - Dalex", 52 : "Bellaquita - Dalex", 53 : "Girls Like You - Maroon 5", 54 : "Ride It - Regard", 55 : "Feel So Close - Calvin Harris",
56 : "Just Can´t Get Enough - Black Eyed Peas", 57 : "Meet Me Halfway - Black Eyes Peas", 58 : "Thunder - Imagine Dragons", 59 : "Amsterdam - Imagine Dragons",
60 : "Dance Monkey - Tones and I", 61 : "Jangueo - Alex Rose", 62 : "Ignorantes - Bad Bunny", 63 : "Cuando Calienta el Sol - Luis Miguel", 64 : "Prometiste - Pepe Aguilar",
65 : "Por que me Haces Llorar - Juan Gabriel", 66 : "Empire State of Mind - Jay Z", 67 : "Love the Way you Lie - Eminem", 68 : "My Way - Frank Sinatra", 69 : "Atrevetetete - Calle 13",
70 : "What A Wonderful World - Louis Armstrong", 71 : "Gasolina - Daddy Yankee", 72 : "Mis Ojos Lloran Por Ti - Big Boy", 73 : "Ven Bailalo - Angel y Khriz", 74 : "Snuff - Slipknot",
75 : "Duality - Slipknot", 76 : "Psychosocial - Slipknot", 77 : "The Animal I've Become - Three Days Grace", 78 : "Sugar, We´re Going Down - Fall Out Boy",
79 : "Time of Dying - Three Days Grace", 80 : "Smells Like a Teen Spirit - Nirvana", 81 : "Come As You Are - Nirvana", 82 : "The Man Who Sold The World - Nirvana",
83 : "Heart-Shaped Box - Nirvana", 84 : "The Ballad of Mona Lisa - Panic! at the Disco", 85 : "Blow Me Away - Breaking Benjamin", 86 : "My Immortal - Evanescence",
87 : "Bring Me To Life - Evanescence", 88 : "Tiny Little Adiantum", 89 : "Last Resort - Papa Roach", 90 : "All The Small Things - Blink 182", 91 : "What's My Age Again - Blink 182",
92 : "Can't Stop - Red Hot Chili Peppers", 93 : "Californication - Red Hot Chili Peppers", 94 : "PPAP - Pikotaro", 95 : "Otherside - Red Hot Chili Peppers",
96 : "Boss Bitch - Doja Cat", 97 : "CANDY - Doja Cat", 98 : "Electric Feel - MGMT", 99 : "Kids - MGMT", 100 : "Instant Crush - Daft Punk", 101 : "Bitchcraft - Drake Bell",
102 : "I Know - Drake Bell", 103 : "Procedimiento Para Llegar a un Comun Acuerdo - PXNDX", 104 : "Something About Us - Daft Punk", 105 : "Get Lucky - Daft Punk",
106 : "American Boy - Estelle", 107 : "Lying Is The Most Fun A Girl Can Do Without Taking Her Clothes - Panic! At The Disco", 108 : "Thnks Fr Th Mmrs - Fall Out Boy",
109 : "S.O.S - Jonas Brothers", 110 : "Mr. Brightside - The Killers", 111 : "Burnin' Up - Jonas Brothers", 112 : "Fireflies - Owl City", 113 : "Kangaroo Court - Capital Cities",
114 : "On Melancoholy Hill - Gorillaz", 115 : "Stylo - Gorillaz", 116 : "Rock The House - Gorillaz", 117 : "Stronger - Kanye West", 118 : "I Love Kanye - Kanye West",
119 : "I'm Not The Only One - Sam Smith", 120 : "Luna - Zoe", 121 : "Hurt - Johnny Cash", 122 : "Sk8er Boi - Avril Lavigne", 123 : "Complicated - Avril Lavigne",
124 : "What the Hell - Avril Lavigne", 125 : "Hey! Ya - Outkast", 126 : "Boulevard of Broken Dreams - Green Day", 127 : "21 Guns - Green Day", 128 :"Holiday - Green Day",
129 : "Know Your Enemy - Green Day", 130 : "American Idiot - Green Day", 131 : "Poker Face - Lady Gaga", 132 : "Bad Day - Daniel Powter", 133 : "Electromovimiento - Calle 13",
134 : "Bad Romance - Lady Gaga", 135 : "Billie Jean - Michael Jackson", 136 : "Don - Miranda", 137 : "The Way You Make Me Feel - Michael Jackson", 138 : "Limon Y Sal - Julieta Venegas",
139 : "Ella es Bonita - Natalia Lafourcade", 140 : "Yofo - Molotov", 141 : "Electricity - Dua Lipa", 142 : "Zodiaco - Moderatto", 143 : "Mil Demonios - Moderatto",
144 : "I Want It That Way - Backstreet Boy", 145 : "It's Gotta Be You - Backstreet Boy", 146 : "Raging - Kygo", 147 : "21 Questions - 50 Cent", 148 : "Brillas - Leon Larregui",
149 : "Azul - Zoe", 150 : "Dry Ice - Green Day", 151 : "Letterbomb - Green Day", 152 : "Llamado de Emergencia - Daddy Yankee", 153 : "Soñe - Zoe", 154 : "China - Daddy Yankee",
155 : "Desvelado - Bobby Pulido", 156 : "Enseñame - Bobby Pulido", 157 : "Amor Prohibido - Selena", 158 : "El Liston de tu Pelo - Angeles Azules", 159 : "Crei en Ti - Angeles Azules",
160 : "Donde Estas - Intocable", 161 : "Loco - Pesado", 162 : "Te Amo - Pesado", 163 : "El Ruido de Tus Zapatos - Banda Limon", 164 : "Total Eclipse of the Heart - Bonnie Tyler",
165 : "Every Breath You Take - The Police", 166 : "My Name Is - Eminem", 167 : "Houdini - Foster the People", 168 : "TUSA - Karol G", 169 : "Wake Me Up Before You Go-Go - Wham!",
170 : "Careless Whisper - George Michael", 171 : "Baby Please - Allsion", 172 : "Fragil - Allison"}
castigo = ["Cortarle la uña de un dedo con los dientes a un jugador ", "Hacer un baile de Tik-Tok", "Marcale/Envia mensaje a un/una ex",
"Comete un chile habanero", "Dar vueltas por 10 segundos y bailar payaso de rodeo con vuelta y brinco", "Come algo echado a perder",
"Di un secreto vergonzoso", "Huele la axila de otro jugador", "Chupa la axila a otro jugador", "Lambe tu propio codo",
"Hacer 3 lagartijas con aplauso seguidas", "Dile un piropo a algun desconocido", "Meter la mano en un inodoro", "Recibir una cachetada de otro jugador",
"Declaratele a algun desconocido", "Actuar como un animal, a votacion de jugadores", "Comete el moco de algun jugador",
"Marcarle a tus padres y decirle que saliste del closet"]
# Colores
black = (0, 0, 0)
grey = (87, 89, 93)
white = (255, 255, 255)
green = (26, 148, 49)
light_green = (89, 182, 91)
red = (255, 0, 0)
light_red = (244, 86, 44)
blue = (0, 0, 255)
light_blue = (63, 183, 227)
yellow = (255, 255, 0)
light_yellow = (255, 255, 102)
violet = (238,130,238)
light_violet = (221,160,221)
clock = pygame.time.Clock()
#Tipo de Letra
small = pygame.font.SysFont("Centhury Gothic", 25)
smallm = pygame.font.SysFont("Centhury Gothic", 35)
medium = pygame.font.SysFont("Centhury Gothic", 50)
large = pygame.font.SysFont("Centhury Gothic", 70)
def text_ob(text, color, size=small):
if size == "small":
textSurface = small.render(text, True, color)
if size == "medium":
textSurface = medium.render(text, True, color)
if size == "large":
textSurface = large.render(text, True, color)
if size == "smallm":
textSurface = smallm.render(text, True, color)
return textSurface, textSurface.get_rect()
def msg_screen(msg, color, y_displace = 0, x_displace = 0, size = "small"):
textSurf, textRect = text_ob(msg, color, size)
textRect.center = ((750/2) + x_displace, (500/2)+ y_displace)
screen.blit(textSurf, textRect)
def text_b(msg, color, buttonx, buttony, buttonwidth, buttonheight, size = "small"):
textSurf, textRect = text_ob(msg,color,size)
textRect.center = ((buttonx+(buttonwidth/2)), buttony+(buttonheight/2))
screen.blit(textSurf, textRect)
def boton(nombre, ancho, largo, posx, posy, color_a, color_b, acc = None):
global newsong, njug, turno, newpunishment, jugador
cur = pygame.mouse.get_pos()
# print(posy + largo)
# print(posx + ancho)
if (posx + ancho) > cur[0] > posx and (posy + largo) > cur[1] > posy:
pygame.draw.rect(screen, color_a, (posx, posy, ancho, largo))
if pygame.mouse.get_pressed()[0]:
pygame.time.delay(450)
if acc == "J":
jugadores()
if acc == "I":
instrucciones()
if acc == "Q":
pygame.quit()
quit()
if acc == "2":
#print("Have you heard the legend")
njug = 2
puntuacionpos()
#print(njug)
game()
if acc == "3":
#print("of Darth Plagueis?")
njug = 3
puntuacionpos()
game()
if acc == "4":
#print("It's not something a Jedi wold tell you")
njug = 4
puntuacionpos()
game()
if acc == "C":
puntuacionpos()
turno += 1
newsong = 1
if acc == "P":
newpunishment = 1
castigos()
if acc == "+":
puntuacionneu()
turno +=1
newsong = 1
game()
if acc == "-":
puntuacionneg()
newsong = 1
turno += 1
game()
if acc == "reset":
reset()
if acc == "eq":
equipo()
else:
pygame.draw.rect(screen, color_b, (posx, posy, ancho, largo))
def intro():
intro = True
while intro:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
screen.fill(white)
msg_screen("CANTA O PAGA", black, -150, size="large")
boton("Inicio", 400, 75, 170, 175, light_green, green, acc = "J")
boton("Instrucciones", 400, 75, 170, 280, light_blue, blue, acc = "I")
boton("Salir", 400, 75, 170, 385, light_red, red, acc = "Q")
boton("Equipo", 100, 50, 630, 440, light_violet, violet, acc = "eq")
text_b("JUGAR", black, 170, 175, 400, 75)
text_b("INSTRUCCIONES", black, 170, 280, 400, 75)
text_b("SALIR", black, 170, 385, 400, 75)
text_b("EQUIPO", black, 630, 440, 100, 50)
pygame.display.update()
def equipo():
equi = True
while equi:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
screen.fill(green)
msg_screen("EQUIPO 4 PENSAMIENTO CREATIVO M2", violet, -220, size = "medium")
msg_screen("1859114 AVILA RANGEL JAVIER ALEJANDRO", black, -180, size = "small")
msg_screen("1818283 CESEÑA MARTINEZ FERNANDO WENCESLAO", black, -155, size = "small")
msg_screen("1942654 EDGAR SALDAÑA ERO", black, -130, size = "small")
msg_screen("1634092 HERNANDEZ CORTES OSCAR ADOLFO", black, -105, size = "small")
msg_screen("1855970 JAIME REYES IVAN AZAEL", black, -80, size = "small")
msg_screen("1811025 MARTINEZ CALDERON ADOLFO", black, -55, size = "small")
msg_screen("1812990 MORENO SANDOVAL LAURA", black, -30 , size = "small")
msg_screen("1889390 PALATO HERNANDEZ CARLOS", black, -5, size = "small")
msg_screen("1722403 RODRIGUEZ PEREZ CHRISTIAN", black, 20, size = "small")
msg_screen("1793936 SALINAS RODRIGUEZ CAROLINA", black, 45, size = "small")
msg_screen("1942494 VEGA DE LA CRUZ RAUL JAVIER", black, 70, size = "small")
boton("Inicio", 150, 75, 105, 410, light_violet, violet, acc = "J")
boton("Salir", 150, 75, 505, 410, light_red, red, acc = "Q")
boton("Instrucciones", 150, 75, 305, 410, light_blue, blue, acc = "I")
text_b("INSTRUCCIONES", black, 305, 410, 150, 75)
text_b("JUGAR", black, 105, 410, 150, 75)
text_b("SALIR", black, 505, 410, 150, 75)
pygame.display.update()
def instrucciones():
instru = True
while instru:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
screen.fill(white)
msg_screen("INTRUCCIONES", blue, -190, size = "large")
msg_screen("Canta la cancion que salga en la pantalla, los demas jugadores decidiran", black, -130, size = "small")
msg_screen("si estuvo correcto o no, de ser asi se te otorgara un punto, pero si no,", black, -110, size = "small")
msg_screen("se impondra un castigo por la misma pantalla, si no lo realizas se te quita", black, -90, size = "small")
msg_screen("un punto, si se realiza, quedas neutro.", black, -70, size = "small")
msg_screen("Obten 7 puntos para ganar.", red, 0, size = "medium")
boton("Inicio", 150, 75, 105, 410, light_green, green, acc = "J")
boton("Salir", 150, 75, 505, 410, light_red, red, acc = "Q")
text_b("JUGAR", black, 105, 410, 150, 75)
text_b("SALIR", black, 505, 410, 150, 75)
pygame.display.update()
def jugadores():
jugador = True
while jugador:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
screen.fill(white)
msg_screen("Seleccione el numero de jugadores:", red, -190, size = "medium")
boton("2", 560, 130, 80, 100, light_green, green, acc = "2")
boton("3", 250, 130, 80, 300, light_yellow, yellow, acc = "3")
boton("4", 250, 130, 390, 300, light_violet, violet, acc = "4")
text_b("2", black, 80, 100, 560, 130)
text_b("3", black, 80, 300, 250, 130)
text_b("4", black, 390, 300, 250, 130)
pygame.display.update()
def puntuacionpos():
global jugador, njug, turno, pj1, pj2, pj3, pj4, txturn
if njug == 2:
if (turno % njug) == 1:
pj1 += 1
#print(pj1)
jugador = 1
#print(jugador)
#turno += 1
#print(turno)
elif (turno % njug) == 0:
pj2 += 1
#print(pj2)
jugador = 2
#print(jugador)
#turno += 1
#print(turno)
if njug == 3:
if (turno % njug) == 1:
pj1 += 1
jugador = 1
elif (turno % njug) == 2:
pj2 += 1
jugador = 2
#turno += 1
elif (turno % njug) == 0:
pj3 += 1
jugador = 3
#turno += 1
if njug == 4:
if (turno % njug) == 1:
pj1 += 1
jugador = 1
#turno += 1
elif (turno % njug) == 2:
pj2 += 1
jugador = 2
#turno += 1
elif (turno % njug) == 3:
pj3 += 1
jugador = 3
#turno += 1
elif (turno % njug) == 0:
pj4 += 1
jugador = 4
#turno += 1
def puntuacionneu():
global jugador, njug, turno, pj1, pj2, pj3, pj4, txturn
if njug == 2:
if (turno % njug) == 1:
#print(pj1)
jugador = 1
#print(jugador)
#turno += 1
#print(turno)
elif (turno % njug) == 0:
#print(pj2)
jugador = 2
#print(jugador)
#turno += 1
#print(turno)
if njug == 3:
if (turno % njug) == 1:
jugador = 1
elif (turno % njug) == 2:
jugador = 2
#turno += 1
elif (turno % njug) == 0:
jugador = 3
#turno += 1
if njug == 4:
if (turno % njug) == 1:
jugador = 1
#turno += 1
elif (turno % njug) == 2:
jugador = 2
#turno += 1
elif (turno % njug) == 3:
jugador = 3
#turno += 1
elif (turno % njug) == 0:
jugador = 4
#turno += 1
def puntuacionneg():
global jugador, njug, turno, pj1, pj2, pj3, pj4, txturn
if njug == 2:
if (turno % njug) == 1:
pj1 -= 1
#print(pj1)
jugador = 1
print(jugador)
#turno += 1
#print(turno)
elif (turno % njug) == 0:
pj2 -= 1
#print(pj2)
jugador = 2
#print(jugador)
#turno += 1
#print(turno)
if njug == 3:
#print(turno)
#print(njug)
#print(turno % njug)
#print(jugador)
if (turno % njug) == 1:
#print("hola")
pj1 -= 1
jugador = 1
elif (turno % njug) == 2:
pj2 -= 1
jugador = 2
#turno += 1
elif (turno % njug) == 0:
pj3 -= 1
jugador = 3
#turno += 1
if njug == 4:
if (turno % njug) == 1:
pj1 -= 1
jugador = 1
#turno += 1
elif (turno % njug) == 2:
pj2 -= 1
jugador = 2
#turno += 1
elif (turno % njug) == 3:
pj3 -= 1
jugador = 3
#turno += 1
elif (turno % njug) == 0:
pj4 -= 1
jugador = 4
#turno += 1
def scoreboard():
global pj1, pj2, pj3, pj4, njug, jugador
if njug == 2:
tp1 = ("Jugador 1: "+str(pj1))
tp2 = ("Jugador 2: "+str(pj2))
msg_screen(tp1, black, -230, -150, size = "small")
msg_screen(tp2, black, -230, 150, size = "small")
if njug == 3:
tp1 = ("Jugador 1: "+str(pj1))
tp2 = ("Jugador 2: "+str(pj2))
tp3 = ("Jugador 3: "+str(pj3))
msg_screen(tp1, black, -230, -120, size = "small")
msg_screen(tp2, black, -230, 0, size = "small")
msg_screen(tp3, black, -230, 120, size = "small")
if njug == 4:
tp1 = ("Jugador 1: "+str(pj1))
tp2 = ("Jugador 2: "+str(pj2))
tp3 = ("Jugador 3: "+str(pj3))
tp4 = ("Jugador 4:"+str(pj4))
msg_screen(tp1, black, -230, -300, size = "small")
msg_screen(tp2, black, -230, -100, size = "small")
msg_screen(tp3, black, -230, 100, size = "small")
msg_screen(tp4, black, -230, 300, size = "small")
def jugadortexto():
global jugador, njug, turno, txturn, txpun
if njug == 2:
if turno > 1:
if jugador == 2:
txturn = ("Jugador 1 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 1")
else:
txturn = ("Jugador 2 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 2")
else:
txturn = ("Jugador 1 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 1")
if njug == 3:
if turno > 1:
if jugador == 2:
txturn = ("Jugador 3 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 3")
elif jugador == 3:
txturn = ("Jugador 1 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 1")
elif jugador == 1:
txturn = ("Jugador 2 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 2")
else:
txturn = ("Jugador 1 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 1")
if njug == 4:
if turno > 1:
if jugador == 2:
txturn = ("Jugador 3 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 3")
elif jugador == 3:
txturn = ("Jugador 4 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 4")
elif jugador == 4:
txturn = ("Jugador 1 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 1")
elif jugador == 1:
txturn = ("Jugador 2 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 2")
else:
txturn = ("Jugador 1 canta la siguiente cancion")
txpun = ("HORA DEL CASTIGO JUGADOR 1")
def zeros():
global pj1, pj2, pj3, pj4
if pj1 < 0:
pj1 = 0
if pj2 < 0:
pj2 = 0
if pj3 < 0:
pj3 = 0
if pj4 < 0:
pj4 = 0
def wintxt():
global pj1, pj2, pj3, pj4, txwin
if pj1 == 7:
txwin = ("FELICIDADES JUGADOR 1, HAS GANADO")
if pj2 == 7:
txwin = ("FELICIDADES JUGADOR 2, HAS GANADO")
if pj3 == 7:
txwin = ("FELICIDADES JUGADOR 3, HAS GANADO")
if pj4 == 7:
txwin = ("FELICIDADES JUGADOR 4, HAS GANADO")
def winnerscreen():
global txtwin
wnnr = True
while wnnr:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
screen.fill(green)
wintxt()
msg_screen(txwin, black, size = "medium")
boton("Reset", 300, 135, 400, 300, light_green, green, acc = "reset")
boton("Bye", 300, 135, 50, 300, light_red, red, acc = "Q")
text_b("Jugar de nuevo", black, 400, 300, 300, 135)
text_b("Salir del juego", black, 50, 300, 300, 135)
pygame.display.update()
def reset():
global pj1, pj2, pj3, pj4, jugador, turno, newsong, newpunishment, njug
turno = 1
jugador = 0
njug = 0
pj1 = 0
pj2 = 0
pj3 = 0
pj4 = 0
newsong = 1
newpunishment = 0
jugadores()
def game():
global newsong, i, song, jugador, turno, txturn
game = True
while game:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if (pj1 is 7) or (pj2 is 7) or (pj3 is 7) or (pj4 is 7):
winnerscreen()
zeros()
while newsong == 1:
screen.fill(white)
x = random.randint(1, i)
cancion = song[x]
#print(cancion)
if x == 107:
msg_screen(cancion, black, size = "small")
else:
msg_screen(cancion, black, size = "smallm")
#print(turno)
#print(jugador)
newsong = 0
scoreboard()
jugadortexto()
msg_screen(txturn, red, -175, size = "medium")
boton("Pos", 300, 135, 400, 300, light_green, green, acc = "C")
boton("Cast", 300, 135, 50, 300, light_red, red, acc = "P")
text_b("Parece que la esucucho hace rato", black, 400, 300, 300, 135)
text_b("No se la supo", black, 50, 300, 300, 135)
clock.tick(15)
pygame.display.update()
def castigos():
global jugador, n, newpunishment, txpun, castigo
punishmentl = True
while punishmentl:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
while newpunishment == 1:
screen.fill(black)
y = random.randint(1, n-1)
np = castigo[y]
if y == 4:
msg_screen(np, white, -70, size = "small")
else:
msg_screen(np, white, -70, size = "smallm")
newpunishment = 0
msg_screen(txpun, red, -175, size = "medium")
boton("Si", 300, 135, 400, 300, light_green, green, acc = "+")
boton("No", 300, 135, 50, 300, light_red, red, acc = "-")
text_b("Si lo logro", black, 400, 300, 300, 135)
text_b("No pudo hacerlo", black, 50, 300, 300, 135)
clock.tick(15)
pygame.display.update()
intro()
| [
"[email protected]"
] | |
73830c9d57873f34c328dfdbc15e09877dd4ba7c | e3c8f786d09e311d6ea1cab50edde040bf1ea988 | /Incident-Response/Tools/grr/grr/client/grr_response_client/client_actions/linux/linux_test.py | 8711fe54f31fe7cba9a5fb16ee2e8e04ac35fbb0 | [
"Apache-2.0",
"MIT"
] | permissive | foss2cyber/Incident-Playbook | d1add8aec6e28a19e515754c6ce2e524d67f368e | a379a134c0c5af14df4ed2afa066c1626506b754 | refs/heads/main | 2023-06-07T09:16:27.876561 | 2021-07-07T03:48:54 | 2021-07-07T03:48:54 | 384,988,036 | 1 | 0 | MIT | 2021-07-11T15:45:31 | 2021-07-11T15:45:31 | null | UTF-8 | Python | false | false | 2,536 | py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""Linux only tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import builtins
import glob
import os
from absl import app
from grr_response_client.client_actions.linux import linux
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr.test_lib import client_test_lib
from grr.test_lib import test_lib
class LinuxOnlyTest(client_test_lib.EmptyActionTest):
def testEnumerateUsersLinux(self):
"""Enumerate users from the wtmp file."""
def MockedOpen(requested_path, mode="rb"):
try:
fixture_path = os.path.join(self.base_path, "VFSFixture",
requested_path.lstrip("/"))
return builtins.open.old_target(fixture_path, mode)
except IOError:
return builtins.open.old_target(requested_path, mode)
with utils.MultiStubber((builtins, "open", MockedOpen),
(glob, "glob", lambda x: ["/var/log/wtmp"])):
results = self.RunAction(linux.EnumerateUsers)
found = 0
for result in results:
if result.username == "user1":
found += 1
self.assertEqual(result.last_logon, 1296552099 * 1000000)
elif result.username == "user2":
found += 1
self.assertEqual(result.last_logon, 1296552102 * 1000000)
elif result.username == "user3":
found += 1
self.assertEqual(result.last_logon, 1296569997 * 1000000)
elif result.username == "utuser":
self.assertEqual(result.last_logon, 1510318881 * 1000000)
else:
self.fail("Unexpected user found: %s" % result.username)
self.assertEqual(found, 3)
def testEnumerateFilesystemsLinux(self):
"""Enumerate filesystems."""
def MockCheckMounts(unused_filename):
del unused_filename # Unused.
device = "/dev/mapper/dhcp--100--104--9--24--vg-root"
fs_type = "ext4"
mnt_point = "/"
yield device, fs_type, mnt_point
with utils.Stubber(linux, "CheckMounts", MockCheckMounts):
results = self.RunAction(linux.EnumerateFilesystems)
expected = rdf_client_fs.Filesystem(
mount_point="/",
type="ext4",
device="/dev/mapper/dhcp--100--104--9--24--vg-root")
self.assertLen(results, 2)
for result in results:
self.assertEqual(result, expected)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
| [
"[email protected]"
] | |
932d5c09403c01e3e1f9412602e1f5cf3643ae7e | b4ee5a2986d7232630aadc284194b3af3c9ac504 | /projects/cats/tests/03.py | 22df36a7ac272ee6e61364ea1a9a930329ebf6d8 | [] | no_license | lijian12345/cs61a-sp20 | d42c855eb06fe26f7e0d37d483d1f4fa81a0e118 | 3a9aa5e922c3f8a4d31b6f197340d4828e342530 | refs/heads/master | 2023-04-03T10:36:32.192096 | 2021-04-19T11:06:50 | 2021-04-19T11:06:50 | 254,659,566 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,950 | py | test = {
'name': 'Problem 3',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> accuracy("12345", "12345") # Returns 100.0
100.0
>>> accuracy("a b c", "a b c")
100.0
>>> accuracy("a b c d", "b a c d")
50.0
>>> accuracy("a b", "c d e")
0.0
>>> accuracy("Cat", "cat") # the function is case-sensitive
0.0
>>> accuracy("a b c d", " a d ")
25.0
>>> accuracy("abc", " ")
0.0
>>> accuracy(" a b \tc" , "a b c") # Tabs don't count as words
100.0
>>> accuracy("abc", "")
0.0
>>> accuracy("", "abc")
0.0
>>> accuracy("cats.", "cats") # punctuation counts
0.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> reference_text = "Abstraction, in general, is a fundamental concept to computer science and software development. The process of abstraction can also be referred to as modeling and is closely related to the concepts of theory and design. Models can also be considered types of abstractions per their generalization of aspects of reality. Abstraction in computer science is also closely related to abstraction in mathematics due to their common focus on building abstractions as objects, but is also related to other notions of abstraction used in other fields such as art."
>>> typed_string1 = "Abstraction, in general, is a fundamental concept to computer science and software development. The process of abstraction can also be referred to as modeling and is closely related to the concepts of theory and design. Models can also be considered types of abstractions per their generalization of aspects of reality. Abstraction in computer science is also closely related to abstraction in mathematics due to their common focus on building abstractions as objects, but is also related to other notions of abstraction used in other fields such as art."
>>> typed_string2 = "Abstraction, in general, is a fundamentl concept to computer science and software development. The process of abstraction can also be referred to as modeling and is closely related to the concepts of theory and design. Models can also be considered types of abstractions per their generalization of aspects of reality. Abstraction in computer science is also closely related to abstraction in mathematics due to their common focus on building abstractions as objects, but is also related to other notions of abstraction usd in other fields such as art."
>>> typed_string3 = "Abstraction,"
>>> typed_string4 = "Abstraction, in general, is a fundamental concept to computer science and software development. The process of abstraction can also be referred to as modeling and is closely related to the concepts of theory and design. Models can also be considered types of abstractions per their generalization of aspects of reality. Abstraction in computer science is also closely related to abstraction in mathematics due to their common focus on building abstractions as objects, but is also related to other notions of abstraction used in other fields such as art. extra"
>>> typed_string5 = "Abstraction, in general, is a fundamental concept to computer science and software development. The process of abstraction can also be referred to as modeling and is closely related to the concepts of theory and design. Models can also be considered types of abstractions per their generalization of aspects of reality. Abstraction in computer science is also closely related to abstraction in mathematics due to their common focus on building abstractions as objects, but is also related to other notions of abstraction used in other fields such as art. Abstraction, in general, is a fundamental concept to computer science and software development. The process of abstraction can also be referred to as modeling and is closely related to the concepts of theory and design. Models can also be considered types of abstractions per their generalization of aspects of reality. Abstraction in computer science is also closely related to abstraction in mathematics due to their common focus on building abstractions as objects, but is also related to other notions of abstraction used in other fields such as art. art"
>>> typed_string6 = "abstraction"
>>> round(accuracy(typed_string2, reference_text), 1)
97.7
>>> round(accuracy(typed_string3, reference_text), 1)
100.0
>>> round(accuracy(typed_string4, reference_text), 1)
98.9
>>> round(accuracy(typed_string5, reference_text), 1)
49.7
>>> round(accuracy(typed_string6, reference_text), 1)
0.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('statu>tably tautit up]ti]ll a\\nhydrat(e h)arpula sidecheck }shapeless structuration bra>inlike', 'statu>tably tautit up]ti]ll a\\nhydrat(e h)arpula'), 2)
55.56
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('condi,tioning neomorph pyrazole pinche{m asce=tically turgidness appetent theatro%=phile', 'condi,tioning neomorph pyrazole pinche{m asce=tically turgidness appetent theatro%=phile \\pterygoidal'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('sp)orang,iform bu:dder ungothic wem(less minimus', 'sp)orang,iform bu:dder ungothic wem(less minimus'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('gendarme lol|lipop co*mbat lumpsuc!?ker finder_ i~m{portune erratu)m', 'gendarme lol|lipop co*mbat lumpsuc!?ker finder_'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("oviferous realleg_e grasswor\\m\\ nutt'ishnes-s foul lemoni@sh rotun]dn#ess", "oviferous realleg_e grasswor\\m\\ nutt'ishnes-s foul lemoni@sh"), 2)
85.71
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('dollier `sciogr;aphic rhopali@um retardive unli:very wenneberg}^ite a}naphroditous corresponsion', 'dollier `sciogr;aphic rhopali@um retardive unli:very'), 2)
62.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('cisplatine cf: teleobjecti"ve d_ubber in"sectol%ogy pawnage passen|ger&', 'cisplatine cf: teleobjecti"ve d_ubber in"sectol%ogy pawnage'), 2)
85.71
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("accompaniment osteochond)ropa\\thy bellower ;apocryp%hon goall'es>s ;conformal riband str%addl'e barrenness", "accompaniment osteochond)ropa\\thy bellower ;apocryp%hon goall'es>s ;conformal"), 2)
66.67
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("pruriently& polony unshaded p{reimportation ca-thepsin` 'nowel/ of sheephearte(d", "pruriently& polony unshaded p{reimportation ca-thepsin` 'nowel/ of sheephearte(d xy_lotypography"), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('southeas|ternmost" d`ustbin nontreated halakah redstart cataractal +c{omplin ungarbed pseudoangelic', 'southeas|ternmost" d`ustbin nontreated halakah redstart'), 2)
55.56
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('intercreate inkhornizer polea;xe )anaerobio*us %ins>ubordinately s,ubprefect]orial', 'intercreate inkhornizer polea;xe )anaerobio*us %ins>ubordinately s,ubprefect]orial electromagnetism barbit>urate rhi.pidistian'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('uniformitaria\\n di(scrimination {aqueousness cibation septuor trich"ophyl-lous', 'uniformitaria\\n di(scrimination {aqueousness cibation septuor'), 2)
83.33
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('pul#icidal overslaugh ectod<actylis?m [arfvedsoni,te portrayer apo=geotropically tutorials undishe}d', 'pul#icidal overslaugh ectod<actylis?m [arfvedsoni,te portrayer apo=geotropically tutorials undishe}d'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('scleromeninx epirogeny se?ric}ipary supermoro+se uneffeminate ozone a@,thletics', 'scleromeninx epirogeny se?ric}ipary supermoro+se uneffeminate'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('plumbo(sol^vent temulent;i|ve trid!ent,ate pang!en+etically morbify cystere)thism ambient `ferned', 'plumbo(sol^vent temulent;i|ve trid!ent,ate pang!en+etically morbify'), 2)
62.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('uncorked nasoethmoidal[ patrist~ic i:mpa+rl effect>ive', 'uncorked nasoethmoidal[ patrist~ic i:mpa+rl effect>ive triddler "unsighting c,a#nticle magistrat\'ic'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('kair`ine progne quotity dupery =?shisham #biog?raphies', 'kair`ine progne quotity dupery =?shisham #biog?raphies'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('orre[ry nickelous g/randpaternal mesarteritis antony~$m u}nchewed', 'orre[ry nickelous g/randpaternal mesarteritis antony~$m u}nchewed'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('i}nadequateness #f)eudist marc@hland sanidinic kratogeni$c hemimellitene{ mi|l!liner', 'i}nadequateness #f)eudist marc@hland sanidinic kratogeni$c hemimellitene{ mi|l!liner saltativen,ess'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('scorbutic an]te[meridian =dunt[ hold,fast warm(er zenog#raphy preobstruction', 'scorbutic an]te[meridian =dunt[ hold,fast warm(er'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("caulicule bra?n radio[log!ic incorrigible 'spiderle}ss ri~tualless nonresisti,ng rev,accin|ate", "caulicule bra?n radio[log!ic incorrigible 'spiderle}ss ri~tualless nonresisti,ng"), 2)
87.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('playwrighting encumberingly <yu~gada pasang cryptogrammatist* wonder', 'playwrighting encumberingly <yu~gada pasang cryptogrammatist*'), 2)
83.33
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('saltless nonf?orma,tion {marin/a ca(sh undamming stringmakin:g r=ipa bra>nsolder phlebecto+m\\y', 'saltless nonf?orma,tion {marin/a ca(sh undamming stringmakin:g r=ipa bra>nsolder phlebecto+m\\y'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('gaub unf-oxy anaphr^odis>iac completory @anaph,yte intercrust a^ct}ification cytase', 'gaub unf-oxy anaphr^odis>iac completory @anaph,yte'), 2)
62.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('u<pbid dam con`,ductivity b=urion ~bevillain somesthesia muffler{', 'u<pbid dam con`,ductivity b=urion ~bevillain somesthesia muffler{ semisq<uare'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('indiffe[rently nora|tion) highman end(ite amethodically [bolometer ph;otoshop cliqueless heterocentric', 'indiffe[rently nora|tion) highman end(ite amethodically [bolometer ph;otoshop'), 2)
77.78
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('_disregardance saw<er unexcuse?dness diacoe+l:e silhouettist', '_disregardance saw<er unexcuse?dness diacoe+l:e silhouettist -tash'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('catabaptist m]oral leiomyomat#ous belive%< sto!@ve deontol[ogist- unfixedness #metamerization', 'catabaptist m]oral leiomyomat#ous belive%< sto!@ve'), 2)
62.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('inc?ohe!rentific bicetyl geromorp)hism staphyl{o,ncus .an/thropomorphite furrow', 'inc?ohe!rentific bicetyl geromorp)hism staphyl{o,ncus .an/thropomorphite furrow'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('a!ccomplic)eship s>udiform nonprotection crinoid?al bubalis ~nas(eberry umbriferously scopti-/cal', 'a!ccomplic)eship s>udiform nonprotection crinoid?al bubalis ~nas(eberry umbriferously scopti-/cal grounder'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('e@r`ythematous mel<iorist ?endode?rmis `decentrali/st dallier c\\ondiction journalizer^) ferganite( forescene', 'e@r`ythematous mel<iorist ?endode?rmis `decentrali/st dallier c\\ondiction journalizer^) ferganite( forescene'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('bend\\i_ng bu/lldo+zer fineles<s caesaropapacy superaward<? a(nthropotomical] enkindle', 'bend\\i_ng bu/lldo+zer fineles<s caesaropapacy superaward<?'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('arboriform nucleonics wate$r^ed plu#mbism bi+se=xed diaphanometric redkne^es spor]ophyll,', 'arboriform nucleonics wate$r^ed plu#mbism bi+se=xed diaphanometric redkne^es spor]ophyll, yah:oo!'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('par*ad^octor c+al{ycate sclerodactylia saucerlike gridir;;on demigriffin dhoti tre$ading in$(framandibular', 'par*ad^octor c+al{ycate sclerodactylia saucerlike gridir;;on'), 2)
55.56
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("colle[ctorship ha,ply externall=y lenti'cularis seisma!l telephone", "colle[ctorship ha,ply externall=y lenti'cularis seisma!l telephone accounti@ng"), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('-clinometrical pulicarious gourdlike unordin}arin-ess unwakened analytically', '-clinometrical pulicarious gourdlike unordin}arin-ess unwakened analytically art@:hritis vou.ge kidnap]'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("p_idd-ler stolonate g'emmipar[ity inconglomerate hulk unmin^uted satiny{", "p_idd-ler stolonate g'emmipar[ity inconglomerate hulk"), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('sabered rid_i$cule thieving conferrable s-ubsero]sa counteraddress anc+onagra officeless', 'sabered rid_i$cule thieving conferrable s-ubsero]sa counteraddress anc+onagra officeless retroce$ssi^on'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('noggen engrained"ly\\ mis"m+atchment q|uietso.me neg~ationist brom`/iodide', 'noggen engrained"ly\\ mis"m+atchment q|uietso.me neg~ationist brom`/iodide pos`+tural zei$sm'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('unidigitate $noo`logist jesse} invernacular ?apterygot<e outdevil', 'unidigitate $noo`logist jesse} invernacular ?apterygot<e outdevil'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('di"-mmed pin>\\ked krausite symbolry* amph/ibiousne:ss per[+fectionizement nigra>niline str:adi_ne', 'di"-mmed pin>\\ked krausite symbolry* amph/ibiousne:ss'), 2)
62.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('dys;analy"te overa_rm deprivate phym\'a;toid dentinal>', 'dys;analy"te overa_rm deprivate phym\'a;toid dentinal> podium ny@ctinasti<c vagabondager'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('line(n{man pho+toelect[ricity t@ormen?table t&ripletree antenna vicissitude@ pa}lladous_ ev~angeli>stics', "line(n{man pho+toelect[ricity t@ormen?table t&ripletree antenna vicissitude@ pa}lladous_ ev~angeli>stics extrav?as'ate"), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('_cig discrepantl`y }pr\\oclamation s,`purrer morb@$ific -abysmal acetamidine libelee #bigger', '_cig discrepantl`y }pr\\oclamation s,`purrer morb@$ific'), 2)
55.56
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('rejumble peripl\\e]gmatic null.@ipore spatchco/ck oviductal >zygosperm profound |p)ed _ma=mmalogical', 'rejumble peripl\\e]gmatic null.@ipore spatchco/ck oviductal >zygosperm profound |p)ed'), 2)
88.89
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("phonogrammically pre@ambition odalisk] crownl@e^ss subhuman flyway amorphousness dext'raural", "phonogrammically pre@ambition odalisk] crownl@e^ss subhuman flyway amorphousness dext'raural"), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('her|i,ot autodi!d)act fr#act#ional insect ri{ngmaster;', 'her|i,ot autodi!d)act fr#act#ional insect ri{ngmaster; brickcroft ideas l{ymphocystosis unacceptant'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('balzarine lute thirtee<nfold p=aradi,ng permutableness', 'balzarine lute thirtee<nfold p=aradi,ng permutableness'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("plunderingly colchicine 'misph#rase rela;ta` an=thr#opolater", "plunderingly colchicine 'misph#rase rela;ta` an=thr#opolater ph^otomicrographic dome in~going"), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("gynos=?pore yourselves %o-verhardy w'orkm`aster dichotomously unsociality naphthalene", "gynos=?pore yourselves %o-verhardy w'orkm`aster dichotomously"), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("ramp}acious devalo'ka tect(a:l tournament h'ass_ock", "ramp}acious devalo'ka tect(a:l tournament h'ass_ock"), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('suble;vation postlachrymal auto&/cinesis mu+yu{sa hemo"pexis bang', 'suble;vation postlachrymal auto&/cinesis mu+yu{sa hemo"pexis'), 2)
83.33
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('{dino\\there d$r$ugshop <cicindelid unilluminating d+iy| forese%t chromatophilia toss,y irradicate', '{dino\\there d$r$ugshop <cicindelid unilluminating d+iy| forese%t chromatophilia toss,y'), 2)
88.89
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('?waverou~s acrogenous e=x&cuss gaugeable obsequien_c\\e prevari_cation', '?waverou~s acrogenous e=x&cuss gaugeable obsequien_c\\e prevari_cation /preco-nclusion bec;a:use'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('portfire voicele\'t unsumptuous relict} indiscre;\'etly exclam"ative=ly bi<fl]ex', 'portfire voicele\'t unsumptuous relict} indiscre;\'etly exclam"ative=ly'), 2)
85.71
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('proce|ed [injure lacun}ae| hypophr@enosis (transcolorati|on li.a,na stophound', 'proce|ed [injure lacun}ae| hypophr@enosis (transcolorati|on'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('relativ*enes/s psalmographer m\\yomorp:hic ,zoomechanic}s horse^"herd crassly mi*nimalism thigh;t =denatur-e', 'relativ*enes/s psalmographer m\\yomorp:hic ,zoomechanic}s horse^"herd'), 2)
55.56
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('yucker dargsman& albuminoidal #applaudably palaeoal&chemic%al', 'yucker dargsman& albuminoidal #applaudably palaeoal&chemic%al oscurrantist zeolitizatio!<n'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('prese/ntational revivement" runkly {co+emptor autovaccine funambulo \'pardoner lapstreaked', 'prese/ntational revivement" runkly {co+emptor autovaccine'), 2)
62.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('^predative ophionine accompani}mental snift?er ur,osom"e ungymnas#tic', '^predative ophionine accompani}mental snift?er ur,osom"e ungymnas#tic +atropic circumhorizontal non^speculation'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('ripe t#ruantship shar|)k fanioned palatize pitchi', 'ripe t#ruantship shar|)k fanioned palatize pitchi re$c:ure'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("dysepulotic b`rainlessl#y aut.onomasy riftless tetr>a:coralline antic>holagogu+e fa't.uism p*:urism", "dysepulotic b`rainlessl#y aut.onomasy riftless tetr>a:coralline antic>holagogu+e fa't.uism"), 2)
87.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('pneumonople>uritis marm<ose v&entpiece pho)tocopier nem!atode quinquev+alvous penury', 'pneumonople>uritis marm<ose v&entpiece pho)tocopier nem!atode'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('!b<roomwood myt{hopoem unremova_bly devitrification monarchess calamitous undet*ain(ed denationalize worth', '!b<roomwood myt{hopoem unremova_bly devitrification monarchess calamitous undet*ain(ed denationalize worth'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("overfear;ful )th|eopneustic swoo#ny% `thearchy morei+sh sh'are-ware exp,iati_on unnumberable postbrachial", "overfear;ful )th|eopneustic swoo#ny% `thearchy morei+sh sh'are-ware"), 2)
66.67
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('\'spar"ger developmentalist miscanoniz_e deformal_iz_e unerring', '\'spar"ger developmentalist miscanoniz_e deformal_iz_e unerring'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('platybrach:ycephalou.s token screen|like ga!+slighting unrebutt`able! d-o*pper draw ve}(rsemaking encompa{ss', 'platybrach:ycephalou.s token screen|like ga!+slighting unrebutt`able! d-o*pper draw ve}(rsemaking encompa{ss'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('sparrowcide* jap^ane.se croise$tte es". preconflict gla@ri*ngly justic{ial` male+volent', 'sparrowcide* jap^ane.se croise$tte es". preconflict gla@ri*ngly justic{ial`'), 2)
87.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('galvanomagnetic dividedness nonlipoidal attitude vraic unaccusto"m clup"_eine', 'galvanomagnetic dividedness nonlipoidal attitude vraic unaccusto"m clup"_eine luc\\k{ relationist'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('pneumoperitonitis p*ostc]onnubial quippishness nucleolocen+trosome semiseafaring reproachless between chlorobromi_de', 'pneumoperitonitis p*ostc]onnubial quippishness nucleolocen+trosome semiseafaring reproachless'), 2)
75.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('relieved stalk@less retroactive conducingly unvirulent filiopietistic hexadiene', 'relieved stalk@less retroactive conducingly unvirulent'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('tamelessly ~myelocyti"c muscul}e spastica!lly mucivore; uplad;der jo,unce" indigna[tor`y', 'tamelessly ~myelocyti"c muscul}e spastica!lly mucivore; uplad;der jo,unce" indigna[tor`y'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('ferry&boat} transempirica@l sneaksby week carc_ake', 'ferry&boat} transempirica@l sneaksby week carc_ake acquainted po|pu)lar suprat~emporal'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('sterneb\'er patria meningora\\ch(idian palp=at$e harangu"er cavernulous drip-', 'sterneb\'er patria meningora\\ch(idian palp=at$e harangu"er cavernulous drip-'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('toadp%ipe* private su&bsa~cral c@larif~y ir%redeem?able facetenes*s( anter(ointerio*r fre<eheartedly', 'toadp%ipe* private su&bsa~cral c@larif~y ir%redeem?able facetenes*s('), 2)
75.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy(')uncoacted #phacom<eter scolecolo|gy> ?cess+ion assoilzie', ')uncoacted #phacom<eter scolecolo|gy> ?cess+ion assoilzie'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('ra"ph:is overfold parli)amentar{ism maldigesti?on de>ca[pod debo%}sh', 'ra"ph:is overfold parli)amentar{ism maldigesti?on de>ca[pod'), 2)
83.33
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy(']vortically met-adiorite pell%[icule col+lage:n cyto^pl<asmic phlebotome o)therworld cro>sstra%ck unpunishedly', ']vortically met-adiorite pell%[icule col+lage:n cyto^pl<asmic phlebotome o)therworld cro>sstra%ck unpunishedly'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy("longicorn shadchan jour-nals$ pla'ck! me\\senchyme", "longicorn shadchan jour-nals$ pla'ck! me\\senchyme uns\\calably"), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('voluminously canthotomy monomerous\\ re:warehouse. mi?am~i sacking grayly|', 'voluminously canthotomy monomerous\\ re:warehouse. mi?am~i sacking grayly| sov;ere:ignly'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('plastog[$amy pa&ntophile cinchotoxine ultraradic{al $p{ayer phantoml!ike >pi(nyl un=congenially p{ausati]on', 'plastog[$amy pa&ntophile cinchotoxine ultraradic{al $p{ayer phantoml!ike >pi(nyl un=congenially'), 2)
88.89
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('o,bd,ormition i+nstantly ste%nocrotaphia ?a+xometry v?entrose h}y%lology `erythrogenesis improcurable', 'o,bd,ormition i+nstantly ste%nocrotaphia ?a+xometry v?entrose h}y%lology'), 2)
75.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('circum|scribable m/aterialis;tical por,tless tice& askance^ gran>dma fetishization noninfantry', 'circum|scribable m/aterialis;tical por,tless tice& askance^ gran>dma fetishization'), 2)
87.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('berrier paroarion )hept%aploid shastraik exce:pt=ious /aortoptosis talky*', 'berrier paroarion )hept%aploid shastraik exce:pt=ious'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('afterturn celadon t!hin crani"osto%sis cash^ eking archaeologist', 'afterturn celadon t!hin crani"osto%sis cash^ eking'), 2)
85.71
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('subjectiveness myr?istic caddl]"e artiod\\actyl` refectorian punner divisibil(ity', 'subjectiveness myr?istic caddl]"e artiod\\actyl` refectorian'), 2)
71.43
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('rice vibrissae enwind- influentia<lly calcareocorneous postcommissure crystallographical ?magnil<oquy quillaja', 'rice vibrissae enwind- influentia<lly calcareocorneous'), 2)
55.56
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('>phaciti!s ludicropathetic zo[odynamic lenticul>othalamic cratches azox|yphenetol^e hydropo?ni^cs revi)ewer', '>phaciti!s ludicropathetic zo[odynamic lenticul>othalamic cratches azox|yphenetol^e hydropo?ni^cs revi)ewer'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('cata+ria tress"ured; ther"ea,fter misexpl(ain tai|pa[n', 'cata+ria tress"ured; ther"ea,fter misexpl(ain tai|pa[n'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('suprasph&an,oidal (tubemaki`ng seriog<rotesqu?e ontoge["ny ent~omologize vertible< jurisprudentialist songwright;', 'suprasph&an,oidal (tubemaki`ng seriog<rotesqu?e ontoge["ny ent~omologize vertible< jurisprudentialist songwright; r|edigest'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('unharnessed !tai+lorize allozooid= malefactory rerummage veep ;dustma*n at/"miatry', 'unharnessed !tai+lorize allozooid= malefactory rerummage veep ;dustma*n'), 2)
87.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('ramp/agious a+fter{work vicety evan=escence inco<rruptn|ess temporarily| mu%corrhe|a', 'ramp/agious a+fter{work vicety evan=escence inco<rruptn|ess temporarily| mu%corrhe|a'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('b\'ovicid,e tachyglossa!l he{avyheart\\ed p,alatalism. pragmatical wonderin"g enterprisingl?y', 'b\'ovicid,e tachyglossa!l he{avyheart\\ed p,alatalism. pragmatical wonderin"g enterprisingl?y'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('@agrammatism pach?ydermia unnoticeab<le feretory attributes o&verbrigh,t t:etrahedro!n a-s>sure', '@agrammatism pach?ydermia unnoticeab<le feretory attributes o&verbrigh,t t:etrahedro!n a-s>sure chromatograph,y'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('te$gularly phylogen^y )expansi;ble a_perture unending!\\ly vernaculari_st ca/pping @.monarchian', 'te$gularly phylogen^y )expansi;ble a_perture unending!\\ly vernaculari_st ca/pping'), 2)
87.5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('tedd&er# nutp!ecker pep<s)inate atl:,antic psalmodize shel#l@', 'tedd&er# nutp!ecker pep<s)inate atl:,antic psalmodize shel#l@ pursley hemang#iosarco-ma'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('fugle ra"cinglike pedest`rian h&eteropoly# c$osmog{onist poly?phon:e taeniaci-de foregame ci~st', 'fugle ra"cinglike pedest`rian h&eteropoly# c$osmog{onist poly?phon:e taeniaci-de'), 2)
77.78
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('imb(onity riziform sodd>ing kickup wiresmith .differe|nt inflicter', 'imb(onity riziform sodd>ing kickup wiresmith .differe|nt inflicter s}ummon'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('wave"ment co#nfess* wh@eelery pendulumlik$e noctambulous charkhana catst*[one', 'wave"ment co#nfess* wh@eelery pendulumlik$e noctambulous charkhana catst*[one'), 2)
100.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> round(accuracy('stepfatherhood diplomatist autoreinfusion a\\$poxesis formal`dehyde omophagia', 'stepfatherhood diplomatist autoreinfusion a\\$poxesis formal`dehyde omophagia !rooklet- veuglai`re'), 2)
100.0
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> from cats import accuracy
""",
'teardown': '',
'type': 'doctest'
}
]
}
| [
"[email protected]"
] | |
1eaf0c5c80d21cc2f471542a39f86241d5fb8212 | aa4527f224a135ed02447a028af6f0179cd12e0f | /Python_DSA/DutchNationalFlag.py | d8ebbb03530fdf87dd12f7d951969c83b39005cf | [] | no_license | Ankitmandal1/Hactoberfest2021 | 3d5416c0721f3f3b6d4c5fccb8164e8d0d9c6833 | 7169b9b3293e5687889578938e267dfcd28e0167 | refs/heads/main | 2023-09-04T01:12:19.753420 | 2021-10-31T05:12:58 | 2021-10-31T05:12:58 | 423,058,444 | 1 | 0 | null | 2021-10-31T05:07:59 | 2021-10-31T05:07:58 | null | UTF-8 | Python | false | false | 629 | py | def swap(A, i, j):
temp = A[i]
A[i] = A[j]
A[j] = temp
def threeWayPartition(A):
start = mid = 0
pivot = 1
end = len(A) - 1
while mid <= end:
if A[mid] < pivot: # current element is 0
swap(A, start, mid)
start = start + 1
mid = mid + 1
elif A[mid] > pivot: # current element is 2
swap(A, mid, end)
end = end - 1
else: # current element is 1
mid = mid + 1
if __name__ == '__main__':
A = [0, 1, 2, 2, 1, 0, 0, 2, 0, 1, 1, 0]
threeWayPartition(A)
print(A)
| [
"[email protected]"
] | |
5cbcb9231cbad440e4d4c6bfcdf06cf2d3a6e571 | e10ae2c13f8e6b8ce382e3d2fbda7b2099aa4fcc | /restapi/settings.py | 8d235a4f13d157419ec3ee6ece3759f307c4fc9d | [] | no_license | vugarmammadli/ta-application | 7d58bf2db86f09e045709053d5ffb1c5c003e9d6 | 92545a4b9e7aa701223621f3920b54a7769c6437 | refs/heads/master | 2020-06-29T12:15:11.711180 | 2019-08-30T23:22:19 | 2019-08-30T23:22:19 | 200,532,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,203 | py | """
Django settings for restapi project.
Generated by 'django-admin startproject' using Django 2.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import django_heroku
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ti^tpu_*oo&6g#p#o9m0#4j!*r&9j(lllnfd9adyd-!*z@&ie5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'ta'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'restapi.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'restapi.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# Activate Django-Heroku.
django_heroku.settings(locals()) | [
"[email protected]"
] | |
b252cecb9090e7cd517c8bd8c87b6a660435e8e9 | bcabfc8e97f1172b79cf6bb0935a6eba3e402763 | /{{cookiecutter.service_name}}/src/common/setup.py | d89ba4d0db1b1b7f5799529c5659babca3066c24 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"MIT-0",
"BSD-2-Clause"
] | permissive | ServerlessOpsIO/aws-sam-python-template | 98f8d16ab649f4d0f7b27a0a783e6060596cb1b5 | 3a64cf08edfb628cae2b9f35402a193da501f152 | refs/heads/master | 2022-05-19T14:06:44.264920 | 2022-04-16T22:11:58 | 2022-04-16T22:11:58 | 250,827,340 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 892 | py | #!/usr/bin/env python
import io
import re
import os
from setuptools import setup, find_packages
setup(
name='common',
version='0.0.1',
description='{{cookiecutter.service_name}} Service Common Code',
author='{{cookiecutter.author_name}}',
author_email='{{cookiecutter.author_email}}',
license='Apache License 2.0',
packages=find_packages(exclude=['tests.*', 'tests']),
keywords="{{cookiecutter.service_name}} Service",
python_requires='>={{cookiecutter.python_version}}.*',
include_package_data=True,
install_requires=[
'aws_lambda_powertools',
'boto3'
],
classifiers=[
'Environment :: Console',
'Environment :: Other Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: {{cookiecutter.python_version}}',
]
)
| [
"[email protected]"
] | |
0ee16c0ea586d5ac531d3a995bdb931cb79a0d19 | 3b7fe6b89a40a5fe2d8a580b09b950bdafdfb2c0 | /webservice/md5/test.py | 4034657821755f16d7758ab497f822b14d5f3ace | [] | no_license | Valeriia-arh/web-service-md5 | ee2ef391aab013b98f8ec65471c2a3174faa2b8d | 6d91f266fa8f34399cef5b6b05a9b92661ab0e52 | refs/heads/master | 2020-03-12T22:01:33.916097 | 2018-10-11T14:25:13 | 2018-10-11T14:25:13 | 130,839,120 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 699 | py | from md5 import count
import requests
from django.test import TestCase
from rest_framework.test import APIRequestFactory
#APIRequestFactory - creating test requests
test_file = "http://data.enteromics.com/robots.txt"
assertContains(response, text, ...) # проверяет, что в ответе сервера содержится указанный текст;
assertTemplateUsed(response, template_name, ...) # проверяет, что при рендеринге страницы использовался указанный шаблон;
assertRedirects(response, expected_url, ...) # проверяет, было ли перенаправление;
def download_and_md5sum:
| [
"[email protected]"
] | |
8b0292c7d368dcfc0e81a605d46862236e1beb19 | b40c5846605fc0b0456dd99bb5e214eaefb1a067 | /python_class_3.py | 8b9748b1348db5a7cf28909f047ca1a7407e9c1a | [] | no_license | daaaaaun/python_section2 | 6153767110559c4f1abac61d4022d107566737d5 | dc5eeb1d3e00c06c8b50909927b43da99e2e989b | refs/heads/master | 2020-05-15T11:03:44.816503 | 2019-04-23T01:45:49 | 2019-04-23T01:45:49 | 182,210,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | import sys
import io
sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.detach(), encoding='utf-8')
#클래스 변수, 인스턴스 변수
class Warehouse:
stock_num = 0
def __init__(self, name):
self.name = name
Warehouse.stock_num += 1
def __del__(self):
Warehouse.stock_num -= 1
user1 = Warehouse('kim')
user2 = Warehouse('park')
print(user1.name)
print(user2.name)
print(user1.__dict__)
print(user2.__dict__)
print(Warehouse.__dict__) #인스턴스 네임스페이스 확인 후 없으면 클래스 네임스페이스를 찾는다
#클래수 변수는 공유됨
print(user1.stock_num)
print(user2.stock_num)
| [
"[email protected]"
] | |
35b89275ce8b64c34b2067dd74a3789ad5306781 | c8f110e2e6837004de2a2924e2b2b2736bb807ea | /writing/models.py | b865bc38847bf1c234882304d863f2301063ef6b | [] | no_license | mcnolan/hydra | 8f1915077fb057de6eeb7ef99084d8c9e2f73e3a | f83bde24f11078cd79f738335fa4b8d37039e904 | refs/heads/master | 2021-01-25T12:30:06.653221 | 2013-08-28T22:19:11 | 2013-08-28T22:19:11 | 656,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,864 | py | from django.db import models
#Book Writing Models
# Standards:
# Titles : 200 characters long
# Descriptions : 500 characters long
#
# The Author model describes the User doing the writing
class Author(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=100)
picture = models.ImageField(upload_to="avatarImages", null=True, blank=True)
description = models.CharField(max_length=500, blank=True)
joined = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.first_name + " " + self.last_name
class Book(models.Model):
title = models.CharField(max_length=200)
cover = models.ImageField(upload_to="covers", null=True, blank=True)
sub_title = models.CharField(max_length=500, blank=True)
author = models.ForeignKey(Author)
description = models.CharField(max_length=500, blank=True)
private = models.BooleanField()
created = models.DateField(auto_now_add=True)
def __unicode__(self):
return self.title
# The Entry model describes the Entries that make up a particular book.
# This could be anything from a single paragraph to an entire chapter
class Entry(models.Model):
book = models.ForeignKey(Book)
author = models.ForeignKey(Author)
name = models.CharField(max_length=200)
created = models.DateField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now_add=True, auto_now=True)
version = models.IntegerField(default=1)
#This will be null if the Entry is the latest
previous_version = models.ForeignKey("self", null=True, blank=True)
content = models.TextField()
# In theory this group should not contain the Entry in previous version
entries = models.ManyToManyField("self", blank=True)
def __unicode__(self):
return self.name | [
"[email protected]"
] | |
e44f659d1ee375f01cc1a261dca21404d715709a | 69ffff811386a96d22eedaa78f83c6c4333a8da7 | /pages/templatetags/pages_tags.py | 35d574f37626a9cc7cce6d04115d0466de0c2a1c | [] | no_license | bkarakus/hhac2017 | 3afe7a79fdab63e3e2e729b5871418283e388f4f | 8cd5278fd168e144dc4d4e1a1a2a7c089ca883cc | refs/heads/master | 2021-09-06T16:57:55.560079 | 2018-02-08T20:26:48 | 2018-02-08T20:26:48 | 111,285,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,175 | py | # -*- coding:utf-8 -*-
from django.template import Library, Node, TemplateSyntaxError
from django.utils.translation import ugettext as _
from pages.models import Sayfa, Menu, Image
register = Library()
class PagesNode(Node):
"""
Get the books and add to the context
"""
def __init__(self, menu_slug,context_var):
self.menu_slug = menu_slug
self.context_var = context_var
def render(self, context):
try:
menu = Menu.objects.get(slug=self.menu_slug)
except Menu.DoesNotExist, Menu.MultipleObjectsReturned:
pages = []
else:
pages = Sayfa.objects.filter(menu=menu,aktif=True)
context[self.context_var] = pages
return ''
def get_pages(parser, token):
"""
{% get_pages menu_slug as pages %}
"""
try:
bits = token.split_contents()
except ValueError:
raise TemplateSyntaxError(
_('tag requires exactly two arguments'))
if len(bits) != 4:
raise TemplateSyntaxError(
_('tag requires exactly three arguments'))
if bits[2] != 'as':
raise TemplateSyntaxError(
_("first argument to tag must be 'as'"))
return PagesNode(bits[1],bits[3])
class ImageNode(Node):
"""
Get the books and add to the context
"""
def __init__(self, context_var):
self.context_var = context_var
def render(self, context):
images = Image.objects.filter(slideshow=True)
context[self.context_var] = images
return ''
def get_slideshow_images(parser, token):
"""
{% get_slideshow_images as pages %}
"""
try:
bits = token.split_contents()
except ValueError:
raise TemplateSyntaxError(
_('tag requires exactly two arguments'))
if len(bits) != 3:
raise TemplateSyntaxError(
_('tag requires exactly three arguments'))
if bits[1] != 'as':
raise TemplateSyntaxError(
_("first argument to tag must be 'as'"))
return ImageNode(bits[2])
register.tag('get_pages', get_pages)
register.tag('get_slideshow_images', get_slideshow_images) | [
"[email protected]"
] | |
555a89a7e83b5305fd25b029e01d18d66298ed77 | fa914cf7f22c6fd4a57876729787735d65290969 | /University of Oklahoma/Text Analysis/04 Activity Police Report Extraction/normanpd/normanpd.py | 780f9c11d2797684738c6a95e765aedeb8326ad4 | [] | no_license | saeid-h/homeworks-repo | d14d7422e8dcd4f4d130ea8ab91b3d46d9ab591a | 9e3155758b1dc6ec5b835c8c1da993fb1adbca8c | refs/heads/master | 2023-04-11T13:48:28.542396 | 2021-04-28T17:01:32 | 2021-04-28T17:01:32 | 258,676,144 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,681 | py | # Norman PD
from urllib import request
import re
import PyPDF2
from PyPDF2 import PdfFileReader
from io import StringIO, BytesIO
import sqlite3
import random
normanpdurl = 'http://normanpd.normanok.gov/content/daily-activity'
#normanpd = 'normanpd.db'
def fetchincidents():
return request.urlopen(normanpdurl).read().decode('utf-8')
def extractincidents():
s = fetchincidents()
m = re.findall(r'(?<=href=").*Incident.*pdf',s)
remoteFiles = list()
memoryFiles = list()
pdfFiles = list()
for i in range(0, len(m)):
m[i] = 'http://normanpd.normanok.gov' + m[i]
# http://stackoverflow.com/questions/9751197/opening-pdf-urls-with-pypdf
remoteFiles.append (request.urlopen(m[i]).read())
memoryFiles.append (BytesIO(remoteFiles[i]))
pdfFiles.append (PdfFileReader(memoryFiles[i]))
# http://stackoverflow.com/questions/28726255/
# converting-a-pdf-file-consisting-of-tables-into-text-document-containings-tables
dbString = []
for pdf in pdfFiles:
content = ""
text = ""
for page in range(0, pdf.getNumPages()):
# Extract text from page and add to content
content = pdf.getPage(page).extractText()
text+=content
tokens=content.split("\n")
if page == 0:
#print(tokens)
dbString+=tokens[5:-3]
else:
dbString+=tokens[:-1]
dbString=dbString[:-1]
dblist = []
i = 0
while (i+5 < len(dbString)):
if (re.match(r'.*/.*/.*:.*', dbString[i+5])):
dblist.append(dbString[i:i+5])
i += 5
else:
dblist.append([dbString[i], dbString[i+1], dbString[i+2]+" "+dbString[i+3],
dbString[i+4], dbString[i+5]])
i += 6
return dblist
def createdb():
conn = sqlite3.connect('normanpd.db')
# https://docs.python.org/2/library/sqlite3.html
c = conn.cursor()
# http://stackoverflow.com/questions/1601151/how-do-i-check-in-sqlite-whether-a-table-exists
# Create table
c.execute('''CREATE TABLE if not exists incidents(
id INTEGER,
number TEXT,
date_time TEXT,
location TEXT,
nature TEXT,
ORI TEXT)''')
# Save (commit) the changes
conn.commit()
conn.close()
return
def populatedb(incidents):
# https://docs.python.org/2/library/sqlite3.html
conn = sqlite3.connect('normanpd.db')
c = conn.cursor()
# Insert a row of data
r = c.execute("select max(id) from incidents").fetchone()
if (r[0] == None):
id = 1
else:
id = r[0]+1
for field in incidents:
c.execute("INSERT INTO incidents VALUES (" + str(id) + ",'" + field[1] +
"','" + field[0] + "','" + field[2] + "','" + field[3] + "','" +
field[4] + "')")
id += 1
# Save (commit) the changes
conn.commit()
conn.close()
return
def status():
# https://docs.python.org/2/library/sqlite3.html
conn = sqlite3.connect('normanpd.db')
c = conn.cursor()
r = c.execute("select count(*) from incidents").fetchone()
max_row = r[0]
print ("\n\nTotal number of rows are = ", max_row)
print("\n\n5 random rows of norman.db database:\n\n")
st = ["ID: ",
"Incident Numer: ",
"Date/Time: ",
"Location: ",
"Nature: ",
"Incident ROI: "]
for i in range(0,5):
row = random.randint(0,max_row)
r = c.execute("select * from incidents where id = " + str(row)).fetchone()
for j in range(0,5):
print (st[j], r[j])
print ("\n")
conn.close()
return
def signature():
print("\n")
print ("normanpd version 2.0 imported into Python environment successfully!")
print ("Code by Saeid Hosseinipoor")
print ("email: [email protected]")
print("\n")
return
signature()
| [
"[email protected]"
] | |
45b7736d0301f865e9ec81f0363c55c1380b4fb3 | ecccaf0fee8d418a0e425e7f3041e53ec693caf1 | /tests/e2e/test_directives.py | be13bcf82ea23a4e59c438badccd17c41ac18bf6 | [
"MIT"
] | permissive | Krazybug/mnamer | 31cdd5f7bf3a55d2e19a06ab67c16d0862a80361 | d4e7f344053cd203a1a86561a1e822fd180375dd | refs/heads/master | 2021-01-02T06:40:23.483684 | 2020-02-07T05:08:59 | 2020-02-07T05:08:59 | 239,532,212 | 0 | 1 | MIT | 2020-02-10T14:31:58 | 2020-02-10T14:31:57 | null | UTF-8 | Python | false | false | 2,810 | py | import json
from typing import Callable
from unittest.mock import MagicMock, patch
import pytest
from mnamer.__version__ import VERSION
from mnamer.settings import Settings
from tests import *
@pytest.mark.parametrize("flag", ("-V", "--version"))
def test_version(flag: str, e2e_run: Callable):
result = e2e_run(flag)
assert result.code == 0
assert result.out == f"mnamer version {VERSION}"
@patch("mnamer.__main__.clear_cache")
def test_directives__cache_clear(
mock_clear_cache: MagicMock, e2e_run: Callable
):
result = e2e_run("--no_cache")
assert result.code == 0
assert "cache cleared" in result.out
mock_clear_cache.assert_called_once()
@pytest.mark.parametrize("key", Settings._serializable_fields())
@patch("mnamer.utils.crawl_out")
def test_directives__config_dump(
mock_crawl_out: MagicMock, key: str, e2e_run: Callable
):
mock_crawl_out.return_value = None
result = e2e_run("--config_dump")
assert result.code == 0
if key.startswith("api_key"):
return
json_out = json.loads(result.out)
value = DEFAULT_SETTINGS[key]
expected = getattr(value, "value", value)
actual = json_out[key]
assert actual == expected
@pytest.mark.usefixtures("setup_test_path")
def test_id__omdb(e2e_run: Callable):
result = e2e_run(
"--batch",
"--movie_api",
"omdb",
"--id-imdb",
"tt5580390",
"aladdin.1992.avi",
)
assert "Shape of Water" in result.out
@pytest.mark.usefixtures("setup_test_path")
def test_id__tmdb(e2e_run: Callable):
result = e2e_run(
"--batch",
"--movie_api",
"tmdb",
"--id-tmdb",
"475557",
"Ninja Turtles (1990).mkv",
)
assert result.code == 0
assert "Joker" in result.out
@pytest.mark.usefixtures("setup_test_path")
def test_id__tvdb(e2e_run: Callable):
result = e2e_run(
"--batch",
"--episode_api",
"tvdb",
"--id-tvdb",
"79349",
"game.of.thrones.01x05-eztv.mp4",
)
assert result.code == 0
assert "Dexter" in result.out
@pytest.mark.usefixtures("setup_test_path")
def test_media__episode_override(e2e_run: Callable):
result = e2e_run("--batch", "--media", "episode", "aladdin.1992.avi")
assert result.code == 0
assert "Processing Episode" in result.out
@pytest.mark.usefixtures("setup_test_path")
def test_media__movie_override(e2e_run: Callable):
result = e2e_run("--batch", "--media", "movie", "s.w.a.t.2017.s02e01.mkv")
assert result.code == 0
assert "Processing Movie" in result.out
# TODO
def test_config_ignore(e2e_run: Callable):
pass
def test_test(e2e_run: Callable):
result = e2e_run("--batch", "--test")
assert result.code == 0
assert "testing mode" in result.out
| [
"[email protected]"
] | |
1e87a933623fdffc157080af0c3a213f948be10c | fc4cde67f058da348fee2264ce57c3db054b6dcb | /figures.py | 51680fa93cfc029b93b4a61034d6fde50a33f798 | [] | no_license | MichaelShulga/tetris | b7c794e9fca2450c200d4945e30445c81ca83758 | 8e412cc4b1775e4c5c6267516fd123dd9ead191d | refs/heads/master | 2023-06-16T22:31:05.983274 | 2021-07-15T17:02:19 | 2021-07-15T17:02:19 | 384,950,978 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | import random
figure1 = [[1, 1, 1, 1]]
figure2 = [[2, 2],
[2, 2]]
figure3 = [[0, 3, 0],
[3, 3, 3]]
figure4 = [[4, 4, 4],
[4, 0, 0]]
figure5 = [[5, 0, 0],
[5, 5, 5]]
figure6 = [[6, 6, 0],
[0, 6, 6]]
figure7 = [[0, 7, 7],
[7, 7, 0]]
figures = [figure1, figure2, figure3, figure4, figure5, figure6, figure7]
def transposed(matrix):
return list(zip(*matrix))
def get_figure():
figure = random.choice(figures)
for _ in range(random.randint(0, 3)):
transposed(figure)
return figure
| [
"[email protected]"
] | |
7b07c8b92846529d1c88e7451734a29c50e6786c | a77f592060cd6017da1fc2cddbc477b4a0bc1027 | /phonebook/urls.py | da7b0581f5c72063c75cf852e7b6c34abb63b10b | [] | no_license | nderitumaingi/jay | 708e971ea57c62579a42d6a259c5723b936a808a | e8f0bec8d18b2d67cb6664f9be3a44507fb55c5b | refs/heads/master | 2020-05-04T09:40:32.531110 | 2019-04-02T13:05:58 | 2019-04-02T13:05:58 | 178,173,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | from django.urls import path
from .import views
from django.contrib.auth import views as auth_views
from . import views
app_name = 'phonebook'
urlpatterns = [
path('login', auth_views.LoginView.as_view(), name='login'),
path('logout', auth_views.LogoutView.as_view(), name='logout'),
path('signup', views.signup, name='signup'),
path('', views.ContactListView.as_view(), name='contact-index'),
path('add-contact', views.ContactCreateView.as_view(), name='add-contact'),
path('<int:pk>/update-contact', views.ContactUpdateView.as_view(), name='update-contact'),
path('<int:pk>/delete-contact', views.ContactDeleteView.as_view(), name='delete-contact'),
] | [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.