blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
sequencelengths 1
1
| author
stringlengths 0
175
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
99f3dea40c103f391f5bbedf2c955812f133372f | 51253a1765ed005a8804b7bf1b6372429f94a020 | /calculate.py | 293d04cdf75a8d63db1a5b87dc0823716b7c1751 | [] | no_license | xly135846/MEGC2021 | b766d3ae295c238c305ae3f7fa0d8056f1ae9ba0 | 83bf61440aca980fb11e789dc3dfd47be78db81d | refs/heads/main | 2023-06-26T23:15:14.433211 | 2021-07-12T14:10:36 | 2021-07-12T14:10:36 | 384,604,569 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,000 | py | import numpy as np
from scipy import signal
from utils.utils import *
def cal_TP(left_count_1, label):
result = []
for inter_2 in label:
temp = 0
for inter_1 in left_count_1:
if cal_IOU(inter_1, inter_2)>=0.5:
temp += 1
result.append(temp)
return result
def spotting_evaluation(pred, express_inter, K, P):
pred = np.array(pred)
threshold = np.mean(pred)+ P*(np.max(pred)-np.mean(pred))
num_peak = signal.find_peaks(pred, height=threshold, distance=K*2)
pred_inter = []
for peak in num_peak[0]:
pred_inter.append([peak-K, peak+K])
result = cal_TP(pred_inter, express_inter)
result = np.array(result)
TP = len(np.where(result!=0)[0])
n = len(pred_inter)-(sum(result)-TP)
m = len(express_inter)
FP = n-TP
FN = m-TP
return TP, FP, FN, pred_inter
def spotting_evaluation_V2(pred_inter, express_inter):
result = cal_TP(pred_inter, express_inter)
result = np.array(result)
TP = len(np.where(result!=0)[0])
n = len(pred_inter)-(sum(result)-TP)
m = len(express_inter)
FP = n-TP
FN = m-TP
return TP, FP, FN
def cal_f1_score(TP, FP, FN):
recall = TP/(TP+FP)
precision = TP/(TP+FN)
f1_score = 2*recall*precision/(recall+precision)
return recall, precision, f1_score
def merge(alist, blist, pred_value, K):
alist_str = ""
for i in alist:
alist_str +=str(i)
split_str = str(1-pred_value)
num = max([len(i) for i in alist_str.split(split_str)])-1
for i in range(num):
i=0
while i<(len(alist)-1):
if (alist[i]==pred_value and alist[i+1]==pred_value) and abs(blist[i][1]-blist[i+1][0])<=K*2:
clist = alist[:i]+[pred_value]+alist[i+2:]
dlist = blist[:i]+[[blist[i][0],blist[i+1][1]]]+blist[i+2:]
alist, blist = clist, dlist
i+=1
return alist,blist | [
"[email protected]"
] | |
161039098666b2a69f57aa3710a588c4f046cc99 | f3e2912c5631337985373df238d68b5c0d340add | /leetcode/p138_v2.py | ee8d872a784db1ca0d90df226fe24f0d54fe243b | [] | no_license | mrfox321/leetcode | b7eb318e6679a4a99ab0aa9b0149013bc1817dc3 | 7b355ff4514c4cb17e970e10f2338542d2435aa9 | refs/heads/master | 2021-05-12T02:48:56.391646 | 2018-01-15T21:35:24 | 2018-01-15T21:35:24 | 117,597,860 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | def vote3(nums):
count1 = 0
count2 = 0
num1 = 0
num2 = 0
if not nums:
return []
| [
"[email protected]"
] | |
dd70ce0a8d819d13d154991d54421443c6ce4260 | 5b4deaae0a06d84418d750d80ad0c954e84d954e | /documents/admin.py | 9b9639bbc4da8f8acb46ee2e5d05b9f7d18c4d79 | [] | no_license | maratovision/military_crm | d3dfdca84dad45e5c903571624057ddf16c5d9d0 | d44d9ff581b53895cef8d00b0a25a2a2e84337f7 | refs/heads/master | 2023-04-28T19:01:41.305201 | 2021-05-19T15:12:57 | 2021-05-19T15:12:57 | 362,474,704 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | from django.contrib import admin
from .models import *
admin.site.register(Document)
| [
"[email protected]"
] | |
0359e9366c572e840e6a924176a959c6c328847d | e3c8f786d09e311d6ea1cab50edde040bf1ea988 | /Incident-Response/Tools/grr/grr/server/grr_response_server/gui/selenium_tests/report_test.py | 1175096622c718b20aa9b0c66c5f1c953997a6f7 | [
"Apache-2.0",
"MIT"
] | permissive | foss2cyber/Incident-Playbook | d1add8aec6e28a19e515754c6ce2e524d67f368e | a379a134c0c5af14df4ed2afa066c1626506b754 | refs/heads/main | 2023-06-07T09:16:27.876561 | 2021-07-07T03:48:54 | 2021-07-07T03:48:54 | 384,988,036 | 1 | 0 | MIT | 2021-07-11T15:45:31 | 2021-07-11T15:45:31 | null | UTF-8 | Python | false | false | 4,588 | py | #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from absl import app
from selenium.webdriver.common import keys
from grr_response_core.lib import rdfvalue
from grr_response_server import data_store
from grr_response_server.gui import gui_test_lib
from grr_response_server.rdfvalues import objects as rdf_objects
from grr.test_lib import test_lib
def AddFakeAuditLog(user=None, router_method_name=None):
data_store.REL_DB.WriteAPIAuditEntry(
rdf_objects.APIAuditEntry(
username=user,
router_method_name=router_method_name,
))
class TestReports(gui_test_lib.GRRSeleniumTest):
"""Test the reports interface."""
def testReports(self):
"""Test the reports interface."""
with test_lib.FakeTime(
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")):
AddFakeAuditLog(user="User123")
with test_lib.FakeTime(
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")):
AddFakeAuditLog(user="User456")
# Make "test" user an admin.
self.CreateAdminUser(u"test")
self.Open("/#/stats/")
# Go to reports.
self.Click("css=#MostActiveUsersReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Server | User Breakdown")
# Enter a timerange that only matches one of the two fake events.
self.Type("css=grr-form-datetime input", "2012-12-21 12:34")
self.Click("css=button:contains('Show report')")
self.WaitUntil(self.IsTextPresent, "User456")
self.assertFalse(self.IsTextPresent("User123"))
def testReportsDontIncludeTimerangesInUrlsOfReportsThatDontUseThem(self):
client_id = self.SetupClient(0)
self.AddClientLabel(client_id, u"owner", u"bar")
self.Open("/#/stats/")
# Go to reports.
self.Click("css=#MostActiveUsersReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Server | User Breakdown")
# Default values aren't shown in the url.
self.WaitUntilNot(lambda: "start_time" in self.GetCurrentUrlPath())
self.assertNotIn("duration", self.GetCurrentUrlPath())
# Enter a timerange.
self.Type("css=grr-form-datetime input", "2012-12-21 12:34")
self.Type("css=grr-form-duration input", "2w")
self.Click("css=button:contains('Show report')")
# Reports that require timeranges include nondefault values in the url when
# `Show report' has been clicked.
self.WaitUntil(lambda: "start_time" in self.GetCurrentUrlPath())
self.assertIn("duration", self.GetCurrentUrlPath())
# Select a different report.
self.Click("css=#LastActiveReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Client | Last Active")
# The default label isn't included in the url.
self.WaitUntilNot(lambda: "bar" in self.GetCurrentUrlPath())
# Select a client label.
self.Select("css=grr-report select", "bar")
self.Click("css=button:contains('Show report')")
# Reports that require labels include them in the url after `Show report'
# has been clicked.
self.WaitUntil(lambda: "bar" in self.GetCurrentUrlPath())
# Reports that dont require timeranges don't mention them in the url.
self.assertNotIn("start_time", self.GetCurrentUrlPath())
self.assertNotIn("duration", self.GetCurrentUrlPath())
# Select a different report.
self.Click("css=#GRRVersion7ReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Active Clients - 7 Days Active")
# The label is cleared when report type is changed.
self.WaitUntilNot(lambda: "bar" in self.GetCurrentUrlPath())
self.assertNotIn("start_time", self.GetCurrentUrlPath())
self.assertNotIn("duration", self.GetCurrentUrlPath())
class TestDateTimeInput(gui_test_lib.GRRSeleniumTest):
"""Tests datetime-form-directive."""
def testInputAllowsInvalidText(self):
# Make "test" user an admin.
self.CreateAdminUser(u"test")
# Open any page that shows the datetime-form-directive.
self.Open("/#/stats/HuntApprovalsReportPlugin")
datetime_input = self.WaitUntil(self.GetVisibleElement,
"css=grr-form-datetime input")
value = datetime_input.get_attribute("value")
self.assertRegex(value, r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}")
self.assertStartsWith(value, "20")
datetime_input.send_keys(keys.Keys.BACKSPACE)
self.WaitUntilNot(self.IsTextPresent, value)
self.assertEqual(value[:-1], datetime_input.get_attribute("value"))
if __name__ == "__main__":
app.run(test_lib.main)
| [
"[email protected]"
] | |
5c6f9daae5340369857d3b1d744fe2d8252f0966 | 352aed8d9ea6b95122aaad4c738580c0d3f197af | /algorithm/recursion/TreeNode.py | b0db0e55b3418a7de408beab9fe28915b9f05dc2 | [] | no_license | zexiangzhang/algorithmAndDataStructure | 39c1fa000b8daff2076069a6316d34b699a5187f | c90b7a090d2da63cb45053d7792b44ecb03a063f | refs/heads/master | 2021-06-15T20:15:18.815648 | 2021-03-08T04:09:26 | 2021-03-08T04:09:26 | 164,974,065 | 9 | 0 | null | null | null | null | UTF-8 | Python | false | false | 701 | py | class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
def print_tree(tree: TreeNode):
if not tree:
return []
result_list = []
cur_layer = [tree]
count = 0
while cur_layer:
cur_list = []
next_layer = []
for node in cur_layer:
cur_list.append(node.val)
if node.left:
next_layer.append(node.left)
if node.right:
next_layer.append(node.right)
if count // 2 == 0:
cur_list.reverse()
result_list.append(cur_list)
cur_layer = next_layer
print(result_list) | [
"[email protected]"
] | |
3f06472b82640d70daa49eb6ce3f5df05671bb86 | afde7c617c377412d838cc8795fd6243ef5b329a | /djangoapp/src/djangoapp/settings.py | 913aa81a1216c482685cef63a3d853b1f5ce4c45 | [] | no_license | YangYijian/CS411_project | 1391a12aa83c356ec38b23189bc35219a6f07c8c | 04451fccfd70d18796db97ea43738f343a65fe6b | refs/heads/master | 2022-11-25T11:56:56.946681 | 2020-07-24T03:36:17 | 2020-07-24T03:36:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,289 | py | """
Django settings for djangoapp project.
Generated by 'django-admin startproject' using Django 2.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '&3(xspivj9*hez$t(a@5jcfr1l^@6o^&%!9h0zj=*3tq3d^3o7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['www.gorentuiuc.web.illinois.edu', 'gorentuiuc.web.illinois.edu']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# own app
'appUser',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangoapp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['./templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangoapp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'gorentuiuc_djangoapp_mysql',
'USER': 'gorentuiuc_hanc3',
'PASSWORD': 'Ch19990327??',
'PORT': '8889'
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
b92633e12fb23ac3c35e53dc6e02df1e2b10293d | 65c867757636a41827724853f32ad64103e07a6a | /lpthw/ex44e.py | bd1d22d80d9ac14d59a8d6cc741304dab93fb509 | [] | no_license | joshmarte/GH_Projects | 25799880bf6f7c637045623704244ccd4c158705 | 04977b8376bec54440da58473259dbe66cd70c3c | refs/heads/master | 2021-05-17T18:46:55.533734 | 2020-03-29T00:47:48 | 2020-03-29T00:47:48 | 250,925,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | # Composition
class Other(object):
def override(self):
print("OTHER override()")
def implicit(self):
print("OTHER implicit()")
def altered(self):
print("OTHER altered()")
class Child(object):
def __init__(self):
self.other = Other()
def implicit(self):
self.other.implicit()
def override(self):
print("CHILD override()")
def altered(self):
print("CHILD, BEFORE OTHER altered()")
self.other.altered()
print("CHILD, AFTER OTHER altered()")
son = Child()
son.implicit()
son.override()
son.altered()
| [
"[email protected]"
] | |
e3c0dd3d0f3fb8f62c81edfefc318ee91b956d48 | 50a601c7a3ded91d7e7a989f9334594a2ea2e714 | /eComSite/eComSite/settings.py | cbcde1e78b8d403c00352f71a613dd7581c4c452 | [] | no_license | YitayalT/django-projects | 4f809347bdb5439a753a1e87d2da3664cd82296a | 987b0bfaf2d200a067304af257ffc49f4210690f | refs/heads/main | 2023-07-06T20:08:13.854384 | 2021-08-10T12:11:11 | 2021-08-10T12:11:11 | 376,379,620 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,257 | py | """
Django settings for eComSite project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-gmf=k$*=a1izi66d00$)v8hz!kk_0u+2v38xr3enc&^q%%i7-i'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'shop',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'eComSite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'eComSite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"[email protected]"
] | |
2bc16a886262ae8cd45018ab9ec20e829eb79aed | 99652a257737e98e40e973a0ea63c797470f5e67 | /network/edit_network.py | 4dda84f679b2d96fd6b634d877c2e76dd93e52ce | [
"MIT"
] | permissive | jrg1381/sm_asr_console | 56180e6c4620e2f3969128127fc05045cf8e6099 | 47c4090075deaaa7f58e9a092423a58bc7b0a30f | refs/heads/master | 2022-12-24T17:24:28.415196 | 2019-07-25T16:52:42 | 2019-07-25T16:52:42 | 153,685,493 | 2 | 0 | MIT | 2021-06-01T22:51:44 | 2018-10-18T20:46:40 | Python | UTF-8 | Python | false | false | 2,700 | py | # encoding: utf-8
import npyscreen
import curses
from error_handler import error_handler
from swagger_client import ManagementApi
from swagger_client.models import ManagementIpAddressInfo
class NetworkSubMenuList(npyscreen.MultiLineAction):
def __init__(self, *args, **keywords):
super(NetworkSubMenuList, self).__init__(*args, **keywords)
# Map from the text on screen to the name of the sub-form
self.form_index = {
"Use DHCP": "NETWORK/DHCP",
"Configure static IP": "NETWORK/STATIC_IP",
}
self.values = list(self.form_index.keys())
def actionHighlighted(self, act_on_this, key_press):
self.parent.parentApp.switchForm(self.form_index[act_on_this])
class NetworkDhcp(npyscreen.ActionPopup):
@error_handler("Management API")
def enable_dhcp(self):
self.parentApp.management_api.set_dhcp()
def create(self):
self.add(npyscreen.MultiLine, values=["Choose OK to enable DHCP networking.", "The system will restart."],
editable=False)
def on_ok(self):
self.enable_dhcp()
self.parentApp.switchFormPrevious()
def on_cancel(self):
self.parentApp.switchFormPrevious()
class EditNetwork(npyscreen.ActionFormV2):
def create(self):
super().create()
self.management_api = self.parentApp.management_api
self.licensing_api = self.parentApp.licensing_api
self.wg_network_options = self.add(NetworkSubMenuList, rely=1)
def on_ok(self):
self.parentApp.switchFormPrevious()
def on_cancel(self):
self.parentApp.switchFormPrevious()
class NetworkStatic(npyscreen.ActionPopup):
@error_handler("Management API")
def _change_network(self):
request = ManagementIpAddressInfo(
nameservers=self.wg_nameservers.value.split(' '),
netmask=self.wg_netmask.value,
gateway=self.wg_gateway.value,
address=self.wg_ip_address.value)
response = self.parentApp.management_api.set_manual_ip_address(request)
def create(self):
super().create()
self.wg_nameservers = self.add(npyscreen.TitleText, rely=1, name="Nameservers (space separated)")
self.wg_netmask = self.add(npyscreen.TitleText, rely=3, name="Netmask (e.g. 255.255.255.0)")
self.wg_gateway = self.add(npyscreen.TitleText, rely=5, name="Gateway (e.g. 192.168.1.1)")
self.wg_ip_address = self.add(npyscreen.TitleText, rely=7, name="IP address (e.g. 192.168.1.40)")
def on_ok(self):
self._change_network()
self.parentApp.switchFormPrevious()
def on_cancel(self):
self.parentApp.switchFormPrevious()
| [
"[email protected]"
] | |
663ab0f3c355a22523a5bd0aca9f2b4efb5dc25c | 60d72b2bd72841c0530492d5ae8f98f83629b9ab | /gestaoprocessos/migrations/0001_initial.py | 66baa355b0fee9894aab1369d200ca9f35a3155d | [] | no_license | RobertaHauser/intranet-iso9001 | e782be2a9e2bd9736fda03950d711a2400b3185b | 2396e3a20a27f91f490518664328396484b033c5 | refs/heads/master | 2023-08-20T08:46:36.588201 | 2021-10-21T00:42:18 | 2021-10-21T00:42:18 | 415,255,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,094 | py | # Generated by Django 3.2.8 on 2021-10-19 15:26
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='tab_sgq_001',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('requisito_num_original', models.CharField(max_length=184, unique=True, verbose_name='N.Requisito(original)')),
('requisito_txt_original', models.TextField(verbose_name='Descrição do requisito (original)')),
('requirement_num_original', models.CharField(max_length=184, verbose_name='N.Requirement (original)')),
('requirement_txt_original', models.TextField(verbose_name='Requirement description (original)')),
],
options={
'verbose_name': 'TAB-SGQ-001',
'verbose_name_plural': 'TAB-SGQ-001',
'db_table': 'tab_sqg_001',
},
),
]
| [
"[email protected]"
] | |
cd75f8dcb615ca8df8600ef455e587f1fa3c3a1b | 0abe2fc7fa3509a28fac6446c9f8be3fcf3a2047 | /example_vehicle.py | 0c7c999e327688810ff2953306d4db18b70be13c | [] | no_license | cheng-zilong/PyiLQR | 1bd70e5ebabdc28e63d3576101ad6a9d4e2e10bc | 211fd8793795213171676e6aa64c7bfb5bb3f9be | refs/heads/master | 2023-01-14T10:54:54.252112 | 2020-11-19T05:13:19 | 2020-11-19T05:13:19 | 306,287,914 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 17,765 | py | #%%
import numpy as np
import sympy as sp
import scipy as sci
import time as tm
from scipy import io
from iLQRSolver import DynamicModel, ObjectiveFunction, BasiciLQR, AdvancediLQR, Logger
from loguru import logger
from datetime import datetime
import torch
from torch import nn, optim
from torch.autograd.functional import jacobian
from torch.utils.tensorboard import SummaryWriter
import sys
logger.remove()
logger.add(sys.stdout, format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> - {message}")
class Residual(nn.Module):
"""The Residual block of ResNet."""
def __init__(self, input_channels, output_channels, is_shorcut = True):
super().__init__()
self.is_shorcut = is_shorcut
self.bn1 = nn.BatchNorm1d(input_channels)
self.relu = nn.ReLU()
self.linear1 = nn.Linear(input_channels, output_channels)
self.bn2 = nn.BatchNorm1d(output_channels)
self.linear2 = nn.Linear(output_channels, output_channels)
self.shorcut = nn.Linear(input_channels, output_channels)
self.main_track = nn.Sequential(self.bn1, self.relu , self.linear1, self.bn2, self.relu, self.linear2)
def forward(self, X):
if self.is_shorcut:
Y = self.main_track(X) + self.shorcut(X)
else:
Y = self.main_track(X) + X
return torch.nn.functional.relu(Y)
class SmallResidualNetwork(nn.Module):
def __init__(self, in_dim, out_dim):
super().__init__()
layer1_no=64
layer2_no=32
layer3_no=16
layer4_no=8
self.layer = nn.Sequential( Residual(in_dim, layer1_no),
Residual(layer1_no, layer2_no),
Residual(layer2_no, layer3_no),
Residual(layer3_no, layer4_no),
nn.Linear(layer4_no, out_dim))
def forward(self, x):
x = self.layer(x)
return x
class SmallNetwork(nn.Module):
"""Here is a dummy network that can work well on the vehicle model
"""
def __init__(self, in_dim, out_dim):
super().__init__()
layer1_no=128
layer2_no=64
self.layer = nn.Sequential( nn.Linear(in_dim, layer1_no), nn.BatchNorm1d(layer1_no), nn.ReLU(),
nn.Linear(layer1_no, layer2_no), nn.BatchNorm1d(layer2_no), nn.ReLU(),
nn.Linear(layer2_no, out_dim))
def forward(self, x):
x = self.layer(x)
return x
class LargeNetwork(nn.Module):
"""Here is a dummy network that can work well on the vehicle model
"""
def __init__(self, in_dim, out_dim):
super().__init__()
layer1_no=800
layer2_no=400
self.layer = nn.Sequential( nn.Linear(in_dim, layer1_no), nn.BatchNorm1d(layer1_no), nn.ReLU(),
nn.Linear(layer1_no, layer2_no), nn.BatchNorm1d(layer2_no), nn.ReLU(),
nn.Linear(layer2_no, out_dim))
def forward(self, x):
x = self.layer(x)
return x
def vehicle_vanilla(T = 100, max_iter = 10000, is_check_stop = True):
file_name = "vehicle_vanilla_" + datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
#################################
######### Dynamic Model #########
#################################
vehicle, x_u, n, m = DynamicModel.vehicle()
init_state = np.asarray([0,0,0,0],dtype=np.float64).reshape(-1,1)
init_input = np.zeros((T,m,1))
dynamic_model = DynamicModel.DynamicModelWrapper(vehicle, x_u, init_state, init_input, T)
#################################
##### Objective Function ########
#################################
C_matrix = np.diag([0.,1.,1.,1.,10.,10.])
r_vector = np.asarray([0.,-10.,0.,8.,0.,0.])
objective_function = ObjectiveFunction.ObjectiveFunctionWrapper((x_u - r_vector)@C_matrix@(x_u - r_vector), x_u)
#################################
######### iLQR Solver ###########
#################################
logger_id = Logger.loguru_start( file_name = file_name,
T=T,
max_iter = max_iter,
is_check_stop = is_check_stop,
init_state = init_state,
C_matrix = C_matrix,
r_vector = r_vector)
vehicle_example = BasiciLQR.iLQRWrapper(dynamic_model, objective_function)
vehicle_example.solve(file_name, max_iter = max_iter, is_check_stop = is_check_stop)
Logger.loguru_end(logger_id)
def vehicle_log_barrier(T = 100, max_iter = 10000, is_check_stop = True):
file_name = "vehicle_logbarrier_" + datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
logger_id = Logger.loguru_start(file_name = file_name, T=T, max_iter = max_iter, is_check_stop = is_check_stop)
#################################
######### Dynamic Model #########
#################################
h_constant = 0.1 # step size
vehicle, x_u, n, m = DynamicModel.vehicle(h_constant)
init_state = np.asarray([0,0,0,4],dtype=np.float64).reshape(-1,1)
dynamic_model = DynamicModel.DynamicModelWrapper(vehicle, x_u, init_state, np.zeros((T,m,1)), T)
#################################
##### Objective Function ########
#################################
# box constraints
inequality_constraint1 = x_u[5] - 8 # acceleration<=8
inequality_constraint2 = -8 - x_u[5] # -3<=acceleration
inequality_constraint3 = x_u[4] - 0.6 # omega<=0.6
inequality_constraint4 = -0.6 - x_u[4] # -0.6<=omega
# collision avoidance constraints
obs1_x, obs1_y, obs2_x, obs2_y = sp.symbols('obs1_x, obs1_y, obs2_x, obs2_y')
inequality_constraint5 = 1 - ((x_u[0] - obs1_x)**2)/25 - ((x_u[1] - obs1_y)**2)/4
inequality_constraint6 = 1 - ((x_u[0] - obs2_x)**2)/25 - ((x_u[1] - obs2_y)**2)/4
ineq_constr = [ inequality_constraint1,
inequality_constraint2,
inequality_constraint3,
inequality_constraint4,
inequality_constraint5,
inequality_constraint6]
# Weighting Matrices
C_matrix = np.diag([0.,1.,0.,0.,1.,1.])
r_vector = np.asarray([0.,4.,0.,0.,0.,0.])
# Parameters of the obstacle
obs1_x0 = 20
obs1_y0 = 0
obs1_velocity = 3
obs2_x0 = 0
obs2_y0 = 4
obs2_velocity = 6
# There are totally 5 additional variables
# [t, obs1_x, obs1_y, obs2_x, obs2_y]
objective_function = ObjectiveFunction.ObjectiveLogBarrier( (x_u - r_vector)@C_matrix@(x_u - r_vector),
x_u,
ineq_constr,
[obs1_x, obs1_y, obs2_x, obs2_y])
add_param_obj = np.zeros((T, 5))
for tau in range(T):
add_param_obj[tau] = np.asarray((0.5, obs1_x0+h_constant*obs1_velocity*tau, obs1_y0,
obs2_x0+h_constant*obs2_velocity*tau, obs2_y0),
dtype = np.float64)
objective_function.update_add_param(add_param_obj)
#################################
######### iLQR Solver ###########
#################################
vehicle_example = AdvancediLQR.LogBarrieriLQR(dynamic_model, objective_function)
vehicle_example.solve(file_name, max_iter = max_iter, is_check_stop = is_check_stop)
Logger.loguru_end(logger_id)
def vehicle_NNiLQR(T = 100,
trial_no=100,
stopping_criterion = 1e-4,
max_iter=100,
max_line_search = 50,
decay_rate=0.99,
decay_rate_max_iters=300,
gaussian_filter_sigma = 10,
gaussian_noise_sigma = [[0.01], [0.1]],
network = "small"):
file_name = "vehicle_dd_iLQR_" + datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
#################################
######### Dynamic Model #########
#################################
vehicle, x_u, n, m = DynamicModel.vehicle()
init_state = np.asarray([0,0,0,0],dtype=np.float64).reshape(-1,1)
init_input = np.zeros((T,m,1))
dynamic_model = DynamicModel.DynamicModelWrapper(vehicle, x_u, init_state, init_input, T)
#################################
##### Objective Function ########
#################################
C_matrix = np.diag([0.,1.,1.,1.,10.,10.])
r_vector = np.asarray([0.,-10.,0.,8.,0.,0.])
objective_function = ObjectiveFunction.ObjectiveFunctionWrapper((x_u - r_vector)@C_matrix@(x_u - r_vector), x_u)
#################################
########## Training #############
#################################
x0_u_lower_bound = [0, 0, 0, 0, -1, -3]
x0_u_upper_bound = [0, 0, 0, 0, 1, 3]
x0_u_bound = (x0_u_lower_bound, x0_u_upper_bound)
dataset_train = DynamicModel.DynamicModelDataSetWrapper(dynamic_model, x0_u_bound, Trial_No=trial_no)
dataset_vali = DynamicModel.DynamicModelDataSetWrapper(dynamic_model, x0_u_bound, Trial_No=10)
if network == "large":
nn_dynamic_model = DynamicModel.NeuralDynamicModelWrapper(LargeNetwork(n+m, n),init_state, init_input, T)
nn_dynamic_model.pretrain(dataset_train, dataset_vali, max_epoch=100000, stopping_criterion = stopping_criterion, lr = 0.001, model_name = "vehicle_large_5.model")
elif network == "small":
nn_dynamic_model = DynamicModel.NeuralDynamicModelWrapper(SmallNetwork(n+m, n),init_state, init_input, T)
nn_dynamic_model.pretrain(dataset_train, dataset_vali, max_epoch=100000, stopping_criterion = stopping_criterion, lr = 0.001, model_name = "vehicle_small_5.model")
elif network == "residual":
nn_dynamic_model = DynamicModel.NeuralDynamicModelWrapper(SmallResidualNetwork(n+m, n),init_state, init_input, T)
nn_dynamic_model.pretrain(dataset_train, dataset_vali, max_epoch=100000, stopping_criterion = stopping_criterion, lr = 0.001, model_name = "vehicle_residual_5.model")
#################################
######### iLQR Solver ###########
#################################
logger_id = Logger.loguru_start(file_name = file_name,
T=T,
trial_no = trial_no,
stopping_criterion = stopping_criterion,
max_iter = max_iter,
max_line_search = max_line_search,
decay_rate = decay_rate,
decay_rate_max_iters = decay_rate_max_iters,
gaussian_filter_sigma = gaussian_filter_sigma,
gaussian_noise_sigma = gaussian_noise_sigma,
init_state = init_state,
C_matrix = C_matrix,
r_vector = r_vector,
x0_u_lower_bound = x0_u_lower_bound,
x0_u_upper_bound = x0_u_upper_bound,
is_use_large_net = network)
vehicle_example = AdvancediLQR.NNiLQR(dynamic_model, objective_function)
vehicle_example.solve( file_name, nn_dynamic_model, dataset_train,
re_train_stopping_criterion=stopping_criterion,
max_iter=max_iter,
max_line_search = max_line_search,
decay_rate=decay_rate,
decay_rate_max_iters=decay_rate_max_iters,
gaussian_filter_sigma = gaussian_filter_sigma,
gaussian_noise_sigma = gaussian_noise_sigma)
Logger.loguru_end(logger_id)
def vehicle_net_iLQR( T = 100,
trial_no=100,
stopping_criterion = 1e-4,
max_iter=100,
decay_rate=0.99,
decay_rate_max_iters=300,
gaussian_filter_sigma = 10,
gaussian_noise_sigma = [[0.01], [0.1]],
network = "small"):
file_name = "vehicle_net_iLQR_" + datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
#################################
######### Dynamic Model #########
#################################
vehicle, x_u, n, m = DynamicModel.vehicle()
init_state = np.asarray([0,0,0,4],dtype=np.float64).reshape(-1,1)
init_input = np.zeros((T,m,1))
dynamic_model = DynamicModel.DynamicModelWrapper(vehicle, x_u, init_state, init_input, T)
#################################
##### Objective Function ########
#################################
C_matrix = np.diag([0.,1.,0.,1.,10.,10.])
r_vector = np.asarray([0.,-3.,0.,8.,0.,0.])
objective_function = ObjectiveFunction.ObjectiveFunctionWrapper((x_u - r_vector)@C_matrix@(x_u - r_vector), x_u)
#################################
########## Training #############
#################################
x0_u_lower_bound = [-0, -1, -0.3, 0, -0.3, -3]
x0_u_upper_bound = [10, 1, 0.3, 8, 0.3, 3]
x0_u_bound = (x0_u_lower_bound, x0_u_upper_bound)
dataset_train = DynamicModel.DynamicModelDataSetWrapper(dynamic_model, x0_u_bound, Trial_No=trial_no)
dataset_vali = DynamicModel.DynamicModelDataSetWrapper(dynamic_model, x0_u_bound, Trial_No=10)
if network == "large":
nn_dynamic_model = DynamicModel.NeuralDynamicModelWrapper(LargeNetwork(n+m, n),init_state, init_input, T)
nn_dynamic_model.pretrain(dataset_train, dataset_vali, max_epoch=100000, stopping_criterion = stopping_criterion, lr = 0.001, model_name = "vehicle_neural_large.model")
elif network == "small":
nn_dynamic_model = DynamicModel.NeuralDynamicModelWrapper(SmallNetwork(n+m, n),init_state, init_input, T)
nn_dynamic_model.pretrain(dataset_train, dataset_vali, max_epoch=100000, stopping_criterion = stopping_criterion, lr = 0.001, model_name = "vehicle_neural_small.model")
elif network == "small_residual":
nn_dynamic_model = DynamicModel.NeuralDynamicModelWrapper(SmallResidualNetwork(n+m, n),init_state, init_input, T)
nn_dynamic_model.pretrain(dataset_train, dataset_vali, max_epoch=100000, stopping_criterion = stopping_criterion, lr = 0.001, model_name = "vehicle_neural_small_residual.model")
#################################
######### iLQR Solver ###########
#################################
logger_id = Logger.loguru_start( file_name = file_name,
T=T,
trial_no = trial_no,
stopping_criterion = stopping_criterion,
max_iter = max_iter,
decay_rate = decay_rate,
decay_rate_max_iters = decay_rate_max_iters,
gaussian_filter_sigma = gaussian_filter_sigma,
gaussian_noise_sigma = gaussian_noise_sigma,
init_state = init_state,
C_matrix = C_matrix,
r_vector = r_vector,
x0_u_lower_bound = x0_u_lower_bound,
x0_u_upper_bound = x0_u_upper_bound,
network = network)
vehicle_example = AdvancediLQR.NetiLQR(dynamic_model, objective_function)
vehicle_example.solve( file_name, nn_dynamic_model, dataset_train,
re_train_stopping_criterion=stopping_criterion,
max_iter=max_iter,
decay_rate=decay_rate,
decay_rate_max_iters=decay_rate_max_iters,
gaussian_filter_sigma = gaussian_filter_sigma,
gaussian_noise_sigma = gaussian_noise_sigma)
Logger.loguru_end(logger_id)
# %%
if __name__ == "__main__":
# vehicle_vanilla(T = 100, max_iter=500, is_check_stop = False)
# vehicle_log_barrier(T = 100, max_iter=10000, is_check_stop = True)
vehicle_NNiLQR( T = 100,
trial_no=100,
stopping_criterion = 1e-4,
max_iter=500,
max_line_search = 10,
decay_rate=0.99,
decay_rate_max_iters=300,
gaussian_filter_sigma = 5,
gaussian_noise_sigma = [[0.01], [0.1]],
# network = "large")
# network = "large")
network = "residual")
# vehicle_net_iLQR( T = 100,
# trial_no=100,
# stopping_criterion = 1e-4,
# max_iter=1000,
# decay_rate=0.99,
# decay_rate_max_iters=300,
# gaussian_filter_sigma = 5,
# gaussian_noise_sigma = [[0.01], [0.1]],
# network = "small")
# %%
| [
"[email protected]"
] | |
b4fd30ba3e9eec7528a1c2334b4650b1dacbdb00 | 6bd93b73213dd97e6e9529db6d7eecdb0504697d | /GoDigital_Python_Codes/command_line3.py | c24de0ae40d597420fe8c15ab992d2fd0ae6e462 | [] | no_license | patiltushar9820/Python_Code | 02e9558e63068823008645892e944894c1a31e62 | b8f1abc448ba738cab6763000f57ba3e9fc2a376 | refs/heads/main | 2023-08-16T13:35:05.532766 | 2021-09-24T10:57:49 | 2021-09-24T10:57:49 | 407,141,302 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | #
def f(c):
return c
#>>> c=[1,2,3]
#>>> e=f(c)
#>>> e is c
#output - True | [
"[email protected]"
] | |
c8401e8e3188c1d22ddcee1a2d85035f8bdfab43 | de0ea898d18e4faf383d230cf2542335bfa166d5 | /library/views.py | 877c529b48ed090292c6dd4c1e2631133c9a939e | [] | no_license | msadour/book_API | 86121341e66249b51835e5e1c842c8fdde26ba6c | 81477c242647c95897a05ad892bc3e11542defa7 | refs/heads/master | 2022-12-09T16:33:12.027427 | 2020-01-22T13:25:45 | 2020-01-22T13:25:45 | 231,387,598 | 0 | 0 | null | 2022-12-07T23:21:19 | 2020-01-02T13:28:36 | HTML | UTF-8 | Python | false | false | 989 | py | # -*- coding: utf-8 -*-
"""
Views.
"""
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from .permissions import IsOwnerOrReadOnly
from .models import Book
from .serializers import BookSerializer
class HelloView(APIView):
"""
Display the message 'Hello World!' if the permission allows us.
"""
permission_classes = (IsAuthenticated,)
def get(self, request):
content = {'message': 'Hello, World!'}
return Response(content)
class BookViewSet(viewsets.ModelViewSet):
"""
Display book(s) if the permission allows us.
"""
queryset = Book.objects.all()
serializer_class = BookSerializer
permission_classes = [IsAuthenticated,
permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly]
| [
"[email protected]"
] | |
73f941ce41d59714ed7a1e39befe32cf6890c32e | 2bd18a28000b13fd4e22fbc17319d777947c057c | /tools/fetch_revealjs.py | 05ef8be5acf092cef0c69e9210b802ede67b6417 | [
"MIT"
] | permissive | fabianhauser/sphinx-revealjs | ead224501addc3e727c07d578c0e1d9daf0d7f0f | 30ca019ce33352d64729689b2607b2127423f45f | refs/heads/master | 2020-12-28T08:48:22.429885 | 2019-12-22T08:14:09 | 2019-12-22T08:14:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,588 | py | #!/use/bin/env python
"""Fetch and sync reveal.js resources
"""
import shutil
import sys
from pathlib import Path
from urllib.request import urlretrieve
import tarfile
def validate_dir_state(target: Path) -> bool:
expected = [
'sphinx_revealjs',
]
actually = all([(target / e).exists() for e in expected])
return actually
def download_release(target: Path, version: str = '3.8.0') -> Path:
target.mkdir(exist_ok=True)
url = f"https://github.com/hakimel/reveal.js/archive/{version}.tar.gz"
dest = target / f"revealjs-{version}.tgz"
if not dest.exists():
urlretrieve(url, str(dest))
return dest
def extract_archive(target: Path) -> Path:
with tarfile.open(str(target)) as tr:
dir_name = tr.getmembers()[0].name
tr.extractall(str(target.parent))
return target.parent / dir_name
if __name__ == '__main__':
base_dir = Path.cwd()
valid = validate_dir_state(base_dir)
if not valid:
print('Nooo')
sys.exit(1)
downloaded = download_release(base_dir / 'var')
extracted = extract_archive(downloaded)
src_list = [
'css',
'js',
'lib',
'plugin',
'LICENSE',
]
dest_base = base_dir / 'sphinx_revealjs' \
/ 'themes' / 'sphinx_revealjs' / 'static' / 'revealjs'
for src_ in src_list:
src = extracted / src_
dest = dest_base / src_
if src.is_dir():
shutil.rmtree(dest)
shutil.copytree(src, dest)
else:
dest.unlink()
shutil.copy2(src, dest)
| [
"[email protected]"
] | |
d699aa415671a09c0d3cb6f790fbd8d199a1e504 | 7b6377050fba4d30f00e9fb5d56dfacb22d388e1 | /numericalFunctions/ptwXY/Python/Test/UnitTesting/convolution/convolution.py | 23e1f84ea78f302c6955c15e21ec6115a7eb5cc4 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | LLNL/fudge | 0a4fe8e3a68b66d58e42d1f4d209ea3f713c6370 | 6ba80855ae47cb32c37f635d065b228fadb03412 | refs/heads/master | 2023-08-16T21:05:31.111098 | 2023-08-01T22:09:32 | 2023-08-01T22:09:32 | 203,678,373 | 21 | 4 | NOASSERTION | 2023-06-28T20:51:02 | 2019-08-21T23:22:20 | Python | UTF-8 | Python | false | false | 3,194 | py | # <<BEGIN-copyright>>
# Copyright 2022, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
# <<END-copyright>>
import os
from numericalFunctions import pointwiseXY_C
if( 'CHECKOPTIONS' in os.environ ) :
options = os.environ['CHECKOPTIONS'].split( )
if( '-e' in options ) : print( __file__ )
CPATH = '../../../../Test/UnitTesting/convolution'
os.system( 'cd %s; ./convolution -v > v' % CPATH )
f = open( os.path.join( CPATH, 'v' ) )
ls = f.readlines( )
f.close( )
line = 1
def getIntegerValue( name, ls ) :
global line
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: line at %s does not contain %s info: "%s"' % ( __file__, line, name, ls[0][:-1] ) )
value = int( ls[0].split( '=' )[1] )
line += 1
return( ls[1:], value )
def getDoubleValue( name, ls ) :
global line
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: line at %s does not contain %s info: "%s"' % ( __file__, line, name, ls[0][:-1] ) )
value = float( ls[0].split( '=' )[1] )
line += 1
return( ls[1:], value )
def compareValues( label, i, v1, v2 ) :
sv1, sv2 = '%.12g' % v1, '%.12g' % v2
sv1, sv2 = '%.8g' % float( sv1 ), '%.8g' % float( sv2 )
if( sv1 != sv2 ) : print( '<%s> <%s>' % ( sv1, sv2 ) )
if( sv1 != sv2 ) : raise Exception( '%s: values %s %s diff by %g at %d for label = %s' % ( __file__, v1, v2, v2 - v1, i, label ) )
def getData( ls, accuracy ) :
global line
i = 0
for l in ls :
if( l.strip( ) != '' ) : break
i = i + 1
line += i
ls = ls[i:]
ls, length = getIntegerValue( 'length', ls )
data = [ list( map( float, ls[i].split( )[:2] ) ) for i in range( length ) ]
data = pointwiseXY_C.pointwiseXY_C( data, initialSize = len( data ), overflowSize = 10, accuracy = accuracy )
line += length
return( ls[length:], data )
def getDatas( ls ) :
global line
i = 0
for l in ls :
if( l.strip( ) != '' ) : break
i = i + 1
line += i
ls = ls[i:]
if( len( ls ) == 0 ) : return( ls )
if( ls[0][:9] == '# Area = ' ) : ls = ls[1:]
if( len( ls ) == 0 ) : return( ls )
label, ls = ls[0], ls[1:]
if( label[:10] != '# label = ' ) : raise Exception( '%s: invalid label = "%s"' % ( __file__, label[:-1] ) )
line += 1
label = label.split( '=' )[1].strip( )
ls, mode = getIntegerValue( 'mode', ls )
ls, accuracy = getDoubleValue( 'accuracy', ls )
ls, self = getData( ls, accuracy )
ls, other = getData( ls, accuracy )
ls, cConvolution = getData( ls, accuracy )
convolution = self.convolute( other, mode )
if( len( convolution ) != len( cConvolution ) ) : raise Exception( '%s: len( convolution ) = %d != len( cConvolution ) = %d for label "%s"' %
( __file__, len( convolution ), len( cConvolution ), label ) )
for i , dXY in enumerate( convolution ) :
gXY = cConvolution[i]
compareValues( label, i, dXY[0], gXY[0] )
compareValues( label, i, dXY[1], gXY[1] )
return( ls )
while( len( ls ) ) : ls = getDatas( ls )
| [
"[email protected]"
] | |
05a82e0894bf03670de9e8fdcc9e6f9ac9414455 | 322935e44777b28dc1153e2fb94f3b23b7669a83 | /train_values.py | 0f94cebd36a615f5e4432cc368cd028639a142bc | [
"Apache-2.0"
] | permissive | Neuraxio/seq2seq-signal-prediction | 96ac2711aac2a2d1273b80a3b0589697ef0c2faa | 740c4429d9f578a2d03b29b4ac14b6e2cc2015bc | refs/heads/master | 2021-07-15T22:12:38.492832 | 2020-05-22T18:47:20 | 2020-05-22T18:47:20 | 136,780,255 | 1 | 0 | null | 2018-06-10T04:55:01 | 2018-06-10T04:55:01 | null | UTF-8 | Python | false | false | 535 | py | def plot_metrics(metric_name, train_values, metric_validation, exercice_number):
print('last mse train: {}'.format(train_values[-1]))
print('best mse train: {}'.format(min(mse_train)))
mse_validation = 0
print('last mse validation: {}'.format(mse_validation[-1]))
print('best mse validation: {}'.format(min(mse_validation)))
plot_metric(
mse_train,
mse_validation,
xlabel='epoch',
ylabel='mse',
title='Exercice {} Model Mean Squared Error'.format(exercice_number)
) | [
"[email protected]"
] | |
054064211e3ee583ffcdc3963c1eace5582ed820 | addb660d485b60c793187ce7e6da7d095692956a | /main.py | d6e6abd3f2164a5ad1d801317de91f7a9d110ac0 | [] | no_license | RowanAI/JetsonPythonCamera | c29fc3746d7a089e3695e2f57bb63f4ae80e5a5f | 4174965914f0ba5938fe24db48b554ee484a9d41 | refs/heads/master | 2020-04-17T18:53:36.781950 | 2019-01-21T17:10:53 | 2019-01-21T17:10:53 | 166,845,571 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,121 | py | #!/usr/bin/env python
import sys
import argparse
import cv2
import numpy as np
import camera
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument("--video_device", dest="video_device",
help="Video device # of USB webcam (/dev/video?) [-1 for Jetson]",
default=-1, type=int)
arguments = parser.parse_args()
return arguments
def read_cam(video_capture):
if video_capture.isOpened():
windowName = "CannyDemo"
cv2.namedWindow(windowName, cv2.WINDOW_NORMAL)
cv2.resizeWindow(windowName, 1280, 720)
cv2.moveWindow(windowName, 0, 0)
cv2.setWindowTitle(windowName, "Canny Edge Detection")
showWindow = 3 # Show all stages
showHelp = True
font = cv2.FONT_HERSHEY_PLAIN
helpText = "'Esc' to Quit, '1' for Camera Feed, '2' for Canny Detection, '3' for All Stages. '4' to hide help"
edgeThreshold = 40
showFullScreen = False
while True:
# Check to see if the user closed the window
if cv2.getWindowProperty(windowName, 0) < 0:
# This will fail if the user closed the window; Nasties get printed to the console
break
ret_val, frame = video_capture.read()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(hsv, (7, 7), 1.5)
edges = cv2.Canny(blur, 0, edgeThreshold)
if showWindow == 3: # Need to show the 4 stages
# Composite the 2x2 window
# Feed from the camera is RGB, the others gray
# To composite, convert gray images to color.
# All images must be of the same type to display in a window
frameRs = cv2.resize(frame, (640, 360))
hsvRs = cv2.resize(hsv, (640, 360))
vidBuf = np.concatenate(
(frameRs, cv2.cvtColor(hsvRs, cv2.COLOR_GRAY2BGR)), axis=1)
blurRs = cv2.resize(blur, (640, 360))
edgesRs = cv2.resize(edges, (640, 360))
vidBuf1 = np.concatenate((cv2.cvtColor(blurRs, cv2.COLOR_GRAY2BGR), cv2.cvtColor(
edgesRs, cv2.COLOR_GRAY2BGR)), axis=1)
vidBuf = np.concatenate((vidBuf, vidBuf1), axis=0)
if showWindow == 1: # Show Camera Frame
displayBuf = frame
elif showWindow == 2: # Show Canny Edge Detection
displayBuf = edges
elif showWindow == 3: # Show All Stages
displayBuf = vidBuf
if showHelp == True:
cv2.putText(displayBuf, helpText, (11, 20), font,
1.0, (32, 32, 32), 4, cv2.LINE_AA)
cv2.putText(displayBuf, helpText, (10, 20), font,
1.0, (240, 240, 240), 1, cv2.LINE_AA)
cv2.imshow(windowName, displayBuf)
key = cv2.waitKey(10)
if key == 27: # Check for ESC key
cv2.destroyAllWindows()
break
elif key == 49: # 1 key, show frame
cv2.setWindowTitle(windowName, "Camera Feed")
showWindow = 1
elif key == 50: # 2 key, show Canny
cv2.setWindowTitle(windowName, "Canny Edge Detection")
showWindow = 2
elif key == 51: # 3 key, show Stages
cv2.setWindowTitle(
windowName, "Camera, Gray scale, Gaussian Blur, Canny Edge Detection")
showWindow = 3
elif key == 52: # 4 key, toggle help
showHelp = not showHelp
elif key == 44: # , lower canny edge threshold
edgeThreshold = max(0, edgeThreshold-1)
print('Canny Edge Threshold Maximum: ', edgeThreshold)
elif key == 46: # , raise canny edge threshold
edgeThreshold = edgeThreshold+1
print('Canny Edge Threshold Maximum: ', edgeThreshold)
elif key == 74: # Toggle fullscreen; This is the F3 key on this particular keyboard
# Toggle full screen mode
if showFullScreen == False:
cv2.setWindowProperty(
windowName, cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN)
else:
cv2.setWindowProperty(
windowName, cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_NORMAL)
showFullScreen = not showFullScreen
else:
print("camera open failed")
if __name__ == '__main__':
arguments = parse_cli_args()
print("Called with args:")
print(arguments)
print("OpenCV version: {}".format(cv2.__version__))
print("Device Number:", arguments.video_device)
if arguments.video_device == -1:
video_capture = camera.open_camera(device_number=None)
else:
video_capture = camera.open_camera(
device_number=arguments.video_device)
read_cam(video_capture)
video_capture.release()
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
6b59d53ff5dca12c2cf49ecda84be12a1c60a12c | a3644ed207867df4d78a04af39ac3e26f86f9012 | /ibvp/language/symbolic/util.py | cf587104d319938fea973aba507443ccc906a896 | [
"MIT"
] | permissive | ibvp/ibvp | 006887be85a37ac4da51664d5fec9244c446cacd | c758b150cbd822bd17444499bea29c53b0606327 | refs/heads/master | 2022-05-07T02:17:46.232332 | 2022-03-20T19:34:13 | 2022-03-20T19:34:13 | 21,990,116 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,418 | py | from __future__ import division
from __future__ import absolute_import
from six.moves import range
__copyright__ = "Copyright (C) 2010-2013 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import numpy as np
def pretty(expr):
from ibvp.language.symbolic.mappers import PrettyStringifyMapper
stringify_mapper = PrettyStringifyMapper()
from pymbolic.mapper.stringifier import PREC_NONE
result = stringify_mapper(expr, PREC_NONE)
splitter = "="*75 + "\n"
cse_strs = stringify_mapper.get_cse_strings()
if cse_strs:
result = "\n".join(cse_strs)+"\n"+splitter+result
return result
def join_fields(*args):
from pytools.obj_array import make_obj_array, log_shape
from pymbolic.geometric_algebra import MultiVector, bit_count
res_list = []
for arg in args:
if isinstance(arg, list):
res_list.extend(arg)
elif isinstance(arg, MultiVector):
for grade in arg.all_grades():
for bits in range(2**arg.space.dimensions):
if bit_count(bits) == grade:
res_list.append(arg.data.get(bits, 0))
elif isinstance(arg, np.ndarray):
if log_shape(arg) == ():
res_list.append(arg)
else:
res_list.extend(arg.flat)
else:
res_list.append(arg)
return make_obj_array(res_list)
| [
"[email protected]"
] | |
06683c64c9c082713d0b286d60bf3d006bef3569 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/NicolasHug_Surprise/Surprise-master/examples/grid_search_usage.py | f915af8c2eff0478eb4c7a991024a2a4e4aa1ff3 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 1,150 | py | """
This module describes how to manually train and test an algorithm without using
the evaluate() function.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from surprise import GridSearch
from surprise import SVD
from surprise import Dataset
param_grid = {'n_epochs': [5, 10], 'lr_all': [0.002, 0.005],
'reg_all': [0.4, 0.6]}
grid_search = GridSearch(SVD, param_grid, measures=['RMSE', 'FCP'])
# Prepare Data
data = Dataset.load_builtin('ml-100k')
data.split(n_folds=3)
grid_search.evaluate(data)
# best RMSE score
print(grid_search.best_score['RMSE'])
# >>> 0.96117566386
# combination of parameters that gave the best RMSE score
print(grid_search.best_params['RMSE'])
# >>> {'reg_all': 0.4, 'lr_all': 0.005, 'n_epochs': 10}
# best FCP score
print(grid_search.best_score['FCP'])
# >>> 0.702279736531
# combination of parameters that gave the best FCP score
print(grid_search.best_params['FCP'])
# >>> {'reg_all': 0.6, 'lr_all': 0.005, 'n_epochs': 10}
import pandas as pd # noqa
results_df = pd.DataFrame.from_dict(grid_search.cv_results)
print(results_df)
| [
"[email protected]"
] | |
5fce91bfbfe482e6893a7b74cbdac1ae14de5a2f | b68a11a2f0e978bce191999b9c66192abef2d47a | /src/optimize_anchor.py | 10049d5279ecbc8b7043e0c78034a973583ebb22 | [
"MIT"
] | permissive | cppxaxa/chainer-object-detection | e860349daabab29b32c3f894d2263523cb0bb1b9 | c662a9df05c969123e7eb687b4bda2ae28671b2b | refs/heads/master | 2021-06-24T21:14:27.179037 | 2017-09-06T03:43:10 | 2017-09-06T03:43:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,877 | py | import argparse
import json
import numpy as np
import os
import six
def parse_args():
parser = argparse.ArgumentParser('Optimize anchor boxes')
parser.add_argument('dataset_dir', type=str, help='Dataset directory path')
parser.add_argument('box_num', type=int, help='Number of boxes')
parser.add_argument('--category', '-c', type=str, default=None, help='Comma separated category names')
parser.add_argument('--scale', '-s', type=float, default=1, help='Scale factor of box size')
return parser.parse_args()
def find_files(root_dir, extensions):
for root, dirs, files in os.walk(root_dir):
for file_name in files:
base_name, ext = os.path.splitext(file_name)
if not ext in extensions:
continue
file_path = os.path.join(root, file_name)
yield file_path
def kmeans(ws, hs, box_num):
# initiailze anchor boxes
indices = np.random.randint(0, box_num, len(ws))
anchor_ws = []
anchor_hs = []
for i in six.moves.range(box_num):
anchor_ws.append(np.mean(ws[indices == i]))
anchor_hs.append(np.mean(hs[indices == i]))
anchor_ws = np.asarray(anchor_ws, dtype=np.float32)
anchor_hs = np.asarray(anchor_hs, dtype=np.float32)
ws = np.expand_dims(ws, 1)
hs = np.expand_dims(hs, 1)
anchor_ws = np.expand_dims(anchor_ws, 0)
anchor_hs = np.expand_dims(anchor_hs, 0)
for i in six.moves.range(100):
unions = np.maximum(ws, anchor_ws) * np.maximum(hs, anchor_hs)
intersections = np.minimum(ws, anchor_ws) * np.minimum(hs, anchor_hs)
ious = intersections / unions
indices = np.argmax(ious, axis=1)
for j in six.moves.range(box_num):
anchor_ws[0,j] = np.mean(ws[indices == j])
anchor_hs[0,j] = np.mean(hs[indices == j])
sorted_indices = np.argsort(anchor_hs[0])
return anchor_ws[0][sorted_indices], anchor_hs[0][sorted_indices]
def main():
args = parse_args()
if args.category is not None:
categories = args.category.split(',')
else:
categories = None
scale = args.scale
widths = []
heights = []
for file_path in find_files(args.dataset_dir, '.json'):
with open(file_path) as f:
annotation = json.load(f)
for region in annotation['regions']:
if categories is not None and not region['category'] in categories:
continue
x, y, w, h = region['bbox']
widths.append(w)
heights.append(h)
widths = np.asarray(widths, dtype=np.float32)
heights = np.asarray(heights, dtype=np.float32)
anchor_ws, anchor_hs = kmeans(widths, heights, args.box_num)
for i in six.moves.range(args.box_num):
print('[{0:.5f}, {1:.5f}],'.format(anchor_ws[i] * scale, anchor_hs[i] * scale))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
b58f1c2677efc0f912848a4f36547d99e1449cb4 | 680929c3bd7ccd8169f412dcd80dea9f4b10f3ed | /authentication/scripts/query.py | 522408c9f1cdc0f8b03d77d6ac95afdc0dd39e7f | [] | no_license | vivekverma080698/University-Management-System | 8e710f692b99028f46b3412743d24998635e2ce8 | 7e31ff5371b4376d0178e2d6163dbf9ad9a1720b | refs/heads/master | 2020-05-17T04:31:46.596919 | 2019-04-26T03:51:45 | 2019-04-26T03:51:45 | 181,276,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | import os
import django
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'EMSystem.settings')
django.setup()
from authentication.models import AuthTable, Department, Employee, Faculty, Director, Hod, Registrar, Ccfs, Staff, Post, AssistRegistrar
def receive(request):
print(request.POST['a'])
receive() | [
"[email protected]"
] | |
38742a88ba0c5c08377e5077d759c8ccdcd8e974 | 70de99ce6c74255df7126143e18be3ebf542a215 | /src/pubmed.py | 5c981b27f1437ae096dbd69d3f146070b52cfaee | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | NETESOLUTIONS/bibliometrics | 1c9c1675a165b4facabe70c8b74d0668e43911c0 | 7bf2ef874c1573ea764cdb1a4838a66ddd083d9b | refs/heads/master | 2021-01-11T05:54:03.427396 | 2016-05-09T00:05:44 | 2016-05-09T00:05:44 | 54,979,564 | 0 | 0 | null | 2016-05-09T00:05:44 | 2016-03-29T13:49:05 | Python | UTF-8 | Python | false | false | 9,965 | py | from io import BytesIO
import re
from itertools import count
import requests
import requests_cache
import lxml.etree
import lxml.html
from util import has_keys, xpath_str, xpath_strs
def _split_range(n, m):
'''Given a range [0,m], return
an iterator of ranges ([0,n], [n,2n], [2n, 3n], ..., [in, m]).
Example: _split_range(15, 40) => ([0,15], [15, 30], [30, 40])'''
low = 0
for i in range(m / n + 1):
high = min(low + n, m)
yield (low, high)
low = high
def _ref_to_citmatch_str(ref, refkey):
'''Takes a ref (article data in a dictionary) and builds a citmatch string
(a PubMed query format). refkey is an arbitrary identifier for the given ref.'''
journal = ref.get('journal')
if journal == None: journal = ''
year = ref.get('year')
if year == None: year = ''
volume = ref.get('volume')
if volume == None: volume = ''
firstpage = ref.get('firstpage')
if firstpage == None: firstpage = ''
firstauthor = ref.get('authors', [['']])[0][0]
if firstauthor == None: firstauthor = ''
return journal + '|' + \
year + '|' + \
volume + '|' + \
firstpage + '|' + \
firstauthor + '|' + \
refkey + '|' + \
'%0D'
_pmid_re = re.compile(r'\d+')
def _ref_to_esearch_term(ref):
'''Takes a ref (article data in a dictionary) and builds an esearch term
(a PubMed query format).'''
title = ref['title']
if not 'authors' in ref or not ref['authors']:
return u'({title}[Title])'.format(title=title)
else:
author = ref['authors'][0][0]
return u'({title} [Title]) AND ({author} [Author - First])'.format(title=title, author=author)
class Client:
def __init__(self):
self.session = requests_cache.CachedSession('.req-cache')
self.session.mount('http://eutils.ncbi.nlm.nih.gov', requests.adapters.HTTPAdapter(max_retries=10))
self.session.mount('http://www.ncbi.nlm.nih.gov', requests.adapters.HTTPAdapter(max_retries=10))
self.xml_parser = lxml.etree.XMLParser(recover=True, encoding='utf-8')
self.html_parser = lxml.html.HTMLParser(recover=True, encoding='utf-8')
def _add_pmids_by_citmatch(self, refs):
'''Try to match the list of refs (dictionaries of article data) using the citmatch service.
If the ref is successfully matched, it will acquire a PMID attribute.'''
searchable_refs = [ref for ref in refs if not 'pmid' in ref and has_keys(ref, 'journal', 'year', 'volume', 'firstpage', 'authors')]
if not searchable_refs:
return
citmatch_str = '\n'.join([_ref_to_citmatch_str(ref, str(i)) for (ref, i) in zip(searchable_refs, count())])
req = self.session.get('http://eutils.ncbi.nlm.nih.gov/entrez/eutils/ecitmatch.cgi',
params={'db': 'pubmed', 'retmode': 'xml', 'bdata': citmatch_str})
pmids_raw = req.text
pmid_lines = pmids_raw.split('\n')
for pmid_line in pmid_lines:
pmid_line = pmid_line.strip()
if not pmid_line: continue
pieces = pmid_line.split('|')
pmid = pieces[-1].encode('utf-8')
index = pieces[-2]
index = int(index)
if _pmid_re.match(pmid):
searchable_refs[index]['pmid'] = pmid
def _add_pmid_by_author_title_scrape(self, ref):
'''Try to match the given ref (a dictionary of article data) by doing a standard
PubMed article search. If the match succeeds, the ref will acquire a PMID attribute.'''
esearch_term = _ref_to_esearch_term(ref)
req = self.session.get('http://www.ncbi.nlm.nih.gov/pubmed/', params={'term': esearch_term})
doc = lxml.html.document_fromstring(req.content, parser=self.html_parser)
idtag = doc.cssselect('.abstract .aux .rprtid .highlight')
if not idtag == []:
ref['pmid'] = idtag[0].text.encode('utf-8')
def _add_pmids(self, refs):
'''Takes a list of refs (dictionaries containing article data) and tries to
match their PMIDs. First it will use the citmatch method. If that fails, it will
try using the scraping method.'''
#print 'add pmids for %d refs' % len(refs)
for (lo, hi) in _split_range(50, len(refs)):
#print 'add pmids: %d to %d of %d' % (lo, hi, len(refs))
self._add_pmids_by_citmatch(refs[lo:hi])
for ref in refs:
if not 'pmid' in ref:
self._add_pmid_by_author_title_scrape(ref)
def add_pubmed_data(self, refs):
'''Takes a list of refs (dictionaries containing article data) and tries to add
as much information about them stored in PubMed.'''
self._add_pmids(refs)
refs_with_pmids = [ref['pmid'] for ref in refs if 'pmid' in ref]
if not refs_with_pmids: return #print '%d pmids found of %d refs' % (len(refs_with_pmids), len(refs))
for (lo, hi) in _split_range(100, len(refs_with_pmids)):
#print 'pubmed data: %d to %d of %d' % (lo, hi, len(refs_with_pmids))
pmids_str = ','.join(refs_with_pmids[lo:hi])
req = self.session.get('http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi',
params={'db': 'pubmed', 'id': pmids_str, 'rettype': 'xml'})
doc = lxml.etree.parse(BytesIO(req.content), self.xml_parser)
articles = doc.xpath('/PubmedArticleSet/PubmedArticle')
for article in articles:
pubmed_ref = _article_to_pubmed_ref(article)
ref = _dict_with_value(refs, 'pmid', pubmed_ref['pmid'])
ref.update(pubmed_ref)
def search_for_papers_by_author(self, author_name):
'''Return a list of refs (article data in dictionaries) written by the given author.'''
term = '"%s"[Author]' % author_name
req = self.session.get('http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi',
params={'db': 'pubmed', 'term': term, 'retmax': 100000})
doc = lxml.etree.parse(BytesIO(req.content), self.xml_parser)
pmids = doc.xpath('/eSearchResult/IdList/Id/text()')
refs = [{'pmid': unicode(pmid)} for pmid in pmids]
return refs
def num_papers_by_author(self, author_name):
'''Return the number of papers written by the given author.'''
term = '"%s"[Author]' % author_name
req = self.session.get('http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi',
params={'db': 'pubmed', 'term': term, 'retmax': 100000})
doc = lxml.etree.parse(BytesIO(req.content), self.xml_parser)
count = doc.xpath('/eSearchResult/Count/text()')
return int(count[0])
def search_for_papers(self, term):
'''Return a list of refs (article data in dictionaries) that match the given
PubMed query term.'''
req = self.session.get('http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi',
params={'db': 'pubmed', 'term': term, 'retmax': 100000})
doc = lxml.etree.parse(BytesIO(req.content), self.xml_parser)
pmids = doc.xpath('/eSearchResult/IdList/Id/text()')
refs = [{'pmid': unicode(pmid)} for pmid in pmids]
return refs
def _dict_with_value(ds, k, v):
'''Given a list of dictionaries (ds),
return the dictionary d such that d[k] == v.'''
for d in ds:
if k in d and d[k] == v:
return d
return None
def _article_to_pubmed_ref(article):
'''Convert PubMed XML data about an article into a ref (dictionary containing the article data).
The returned dictionary will contain this:
{
"pmid": a string containing the article's PMID
"authors": a list of tuples (string, integer), where the first element is the author's name and the second is the author's affiliation if known (otherwise it's None)
"institutions": a dictionary, where the key is an integer and value is a list of strings contanining the institutional hierarchy
"title": the article's title
"pubdate": an integer of the form 19850726 (i.e., 1985/07/26)
"year": the publication year as integer
"journal": a string
"grantagencies": a list of strings
"pubtypes": a list of strings specifying the publication types as per PubMed
"meshterms": a nested list of strings specifying the MeSH terms as per PubMed
}'''
r = {}
r['pmid'] = xpath_str(article, 'PubmedData/ArticleIdList/ArticleId[@IdType=\'pubmed\']/text()')
institutions = {}
authors = []
for author in article.xpath('MedlineCitation/Article/AuthorList/Author'):
lastname = xpath_str(author, 'LastName/text()')
initials = xpath_str(author, 'Initials/text()')
if lastname and initials:
name = lastname + u' ' + initials
else:
continue
institution_address = xpath_str(author, 'Affiliation/text()')
institution_index = len(institutions) + 1 if institution_address else None
if institution_address:
institutions[institution_index] = (institution_address, None)
authors.append((name, institution_index))
r['authors'] = authors
r['institutions'] = institutions
r['title'] = xpath_str(article, 'MedlineCitation/Article/ArticleTitle/text()')
pubdate_str = u''
pubdate_elem = article.xpath('PubmedData/History/PubMedPubDate[@PubStatus="pubmed"]')[0]
pubdate_yr = xpath_str(pubdate_elem, 'Year/text()')
if pubdate_yr:
pubdate_str += pubdate_yr
pubdate_mon = xpath_str(pubdate_elem, 'Month/text()')
if pubdate_mon:
pubdate_str += '%02d' % int(pubdate_mon)
pubdate_day = xpath_str(pubdate_elem, 'Day/text()')
if pubdate_day:
pubdate_str += '%02d' % int(pubdate_day)
else:
pubdate_str += '00'
else:
pubdate_str += '0000'
r['pubdate'] = int(pubdate_str) if pubdate_str else None
r['year'] = pubdate_yr
r['journal'] = xpath_str(article, 'MedlineCitation/MedlineJournalInfo/MedlineTA/text()')
r['grantagencies'] = xpath_strs(article, 'MedlineCitation/Article/GrantList[last()]/Grant/Agency/text()')
r['pubtypes'] = xpath_strs(article, 'MedlineCitation/Article/PublicationTypeList/PublicationType/text()')
allterms = []
for meshheading in article.xpath('MedlineCitation/MeshHeadingList/MeshHeading'):
terms = xpath_strs(meshheading, 'DescriptorName/text() | QualifierName/text()')
allterms.append(terms)
r['meshterms'] = allterms
return r
| [
"[email protected]"
] | |
661d6ab3aa7204d4ba70040676e93983eadf0b35 | 60ce2ef142dab58a092fc928136c6609127b2f5d | /async_instagram_private_api/core/repository.py | cd6ec09d157e2f80815b8c9360d353e88f09ee57 | [] | no_license | daymos92/async_instagram_private_api | be1270f7e89ec735411234c9e190c162fc160ce1 | 080b31cc4d74727df6df34865edbb371d31feb67 | refs/heads/master | 2022-12-10T23:01:31.169650 | 2020-09-10T18:18:54 | 2020-09-10T18:18:54 | 290,125,150 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106 | py | from abc import ABC
class Repository(ABC):
def __init__(self, client):
self.client = client
| [
"[email protected]"
] | |
bc42ef0c5dad5b26427a4460ec6e632112b7a11b | d11bfe38cf1e775ae39ad51ef0d8c3fbf246eea5 | /examples/data_collector.py | c2dd54f76c55a09f8bc669869a4563d55da11def | [
"MIT"
] | permissive | Bit4QU4/pyomyo | 733aff9c8de7de79f702a55f80a6d37c6f3ad09c | f5407888819324029a024451e46a80a5ead64a0a | refs/heads/main | 2023-08-24T04:23:22.757705 | 2021-10-05T14:07:44 | 2021-10-05T14:07:44 | 415,192,537 | 0 | 0 | MIT | 2021-10-09T03:35:26 | 2021-10-09T03:35:25 | null | UTF-8 | Python | false | false | 1,475 | py | # Simplistic data recording
import time
import multiprocessing
import numpy as np
import pandas as pd
from pyomyo import Myo, emg_mode
def data_worker(mode=emg_mode.FILTERED, seconds=15, filepath="data_gather.csv"):
collect = True
# ------------ Myo Setup ---------------
m = Myo(mode=mode)
m.connect()
myo_data = []
def add_to_queue(emg, movement):
myo_data.append(emg)
m.add_emg_handler(add_to_queue)
def print_battery(bat):
print("Battery level:", bat)
m.add_battery_handler(print_battery)
# Its go time
m.set_leds([0, 128, 0], [0, 128, 0])
# Vibrate to know we connected okay
m.vibrate(1)
print("Data Worker started to collect")
# Start collecing data.
start_time = time.time()
while collect:
if (time.time() - start_time < seconds):
m.run()
else:
collect = False
collection_time = time.time() - start_time
print("Finished collecting.")
print(f"Collection time: {collection_time}")
print(len(myo_data), "frames collected")
# Add columns and save to df
myo_cols = ["Channel_1", "Channel_2", "Channel_3", "Channel_4", "Channel_5", "Channel_6", "Channel_7", "Channel_8"]
myo_df = pd.DataFrame(myo_data, columns=myo_cols)
myo_df.to_csv(filepath, index=False)
print("CSV Saved at: ", filepath)
# -------- Main Program Loop -----------
if __name__ == '__main__':
seconds = 10
file_name = str(seconds)+"_test_emg.csv"
p = multiprocessing.Process(target=data_worker, args=(seconds, file_name))
p.start() | [
"[email protected]"
] | |
e854ed4a3386c854b4fb23ef278a885098c04eaf | 2b49bf0b7b9a62eb665cb0da9a86d7c65433f8a2 | /Additional/206.Reverse Linked List.py | 06f7ef7c083a993eeb7cd52a2f6ada4422dd50d7 | [] | no_license | samuel871211/My-python-code | f7472fff671437d6181b91d36a77e24eb04678c6 | 3120cfb6ccaeade969dd0ea0ff335b4a5789ba74 | refs/heads/master | 2023-03-04T13:48:37.658549 | 2023-02-28T06:16:52 | 2023-02-28T06:16:52 | 210,172,178 | 3 | 5 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def reverseList(self, head: ListNode) -> ListNode:
a = []
while head != None:
a.append(head.val)
head = head.next
if len(a) == 0:
return None
else:
a.reverse()
newhead = ListNode(a[0])
cur = newhead
for i in range(1,len(a)):
cur.next = ListNode(a[i])
cur = cur.next
return newhead
| [
"[email protected]"
] | |
ed0e092db1e23abf296cbfe26bfe5ec704a4c85b | 220a814958d0d9ffaee98f6247d5c1a39b91e28a | /capstone/settings.py | db270d27a4c5bb3e632de58d2b8b108bc48d4bf6 | [] | no_license | MalteHildebrandt/MyRepository | f1c9e579b37a2c7f48d1344f282b9957d6088686 | 91bc1e8191db9adc470fcb0f2cb4a13596c1444f | refs/heads/master | 2021-09-28T19:37:43.416941 | 2021-04-18T16:07:00 | 2021-04-18T16:07:00 | 235,188,964 | 0 | 0 | null | 2021-09-22T19:52:32 | 2020-01-20T20:06:26 | Python | UTF-8 | Python | false | false | 3,652 | py | """
Django settings for workforce project.
Generated by 'django-admin startproject' using Django 3.1.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import django_heroku
import gunicorn
import psycopg2.extensions
import dj_database_url
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9ej)ry)*cx0)x0ox-dz85+q*8ji(((*j%v$@fgx3d^#*7q1mra'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'staffplan',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'workforce.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'workforce.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
# local SQlite Config
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# local Postgres Config
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'workforce',
'USER': 'postgres',
'PASSWORD': 'Malte1824',
'HOST': 'localhost',
'PORT': '5432',
}
}
"""
# production db config
db_from_env = dj_database_url.config(conn_max_age=600)
DATABASES['default'].update(db_from_env)
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
# Activate Django-Heroku.
django_heroku.settings(locals()) | [
"[email protected]"
] | |
23977d7878d871d7821160290ecdd4b6d6fd5e51 | 28c3a979f155bedabf72bd9e7284361a9bbdb762 | /flaskr/flaskr/flaskr.py | 8cba07b80b840050834787537326b7217e779c52 | [] | no_license | beinnor/misc-python | 33b194f6199bbef533a75de71ae5c8a6d9343e14 | 6a4409b8a20e55d6b44f5480f342cd2e8bfeb6a1 | refs/heads/master | 2021-06-04T20:58:46.955449 | 2018-04-14T23:46:28 | 2018-04-14T23:46:28 | 129,562,703 | 0 | 0 | null | 2021-04-20T17:22:54 | 2018-04-14T23:46:01 | Python | UTF-8 | Python | false | false | 2,660 | py | # all the imports
import os
import sqlite3
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
app = Flask(__name__) # create the application instance :)
app.config.from_object(__name__) # load config from this file, flaskr.py
# load default config and override-config from an environment variable
app.config.update(dict(
DATABASE=os.path.join(app.root_path, 'flaskr.db'),
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
app.config.from_envvar('FLASKR_SETTINGS', silent=True)
def connect_db():
"""Connects to the specific database."""
rv = sqlite3.connect(app.config['DATABASE'])
rv.row_factory = sqlite3.Row
return rv
def init_db():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
@app.cli.command('initdb')
def initdb_command():
"""Initializes the database."""
init_db()
print('Initialized the database.')
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if not hasattr(g, 'sqlite_db'):
g.sqlite_db = connect_db()
return g.sqlite_db
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, 'sqlite_db'):
g.sqlite_db.close()
@app.route('/')
def show_entries():
db = get_db()
cur = db.execute('select title, text from entries order by id desc')
entries = cur.fetchall()
return render_template('show_entries.html', entries=entries)
@app.route('/add', methods=['POST'])
def add_entry():
if not session.get('logged_in'):
abort(401)
db = get_db()
db.execute('insert into entries (title, text) values (?, ?)',
[request.form['title'], request.form['text']])
db.commit()
flash('New entry was successfully posted')
return redirect(url_for('show_entries'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'Invalid username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'Invalid password'
else:
session['logged_in'] = True
flash('You were logged in')
return redirect(url_for('show_entries'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_entries'))
| [
"[email protected]"
] | |
87d8617072a506c92696bd2d28771c0581767428 | cc578cec7c485e2c1060fd075ccc08eb18124345 | /cs15211/FlattenNestedListIterator.py | a0f0bfde784f7bd127acc87b7ee70a319e0c47be | [
"Apache-2.0"
] | permissive | JulyKikuAkita/PythonPrac | 18e36bfad934a6112f727b4906a5e4b784182354 | 0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c | refs/heads/master | 2021-01-21T16:49:01.482561 | 2019-02-07T06:15:29 | 2019-02-07T06:15:29 | 91,907,704 | 1 | 1 | Apache-2.0 | 2019-02-07T06:15:30 | 2017-05-20T18:12:53 | Python | UTF-8 | Python | false | false | 5,253 | py | __source__ = 'https://leetcode.com/problems/flatten-nested-list-iterator/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/flatten-nested-list-iterator.py
# Time: O(n), n is the number of the integers.
# Space: O(h), h is the depth of the nested lists.
#
# Description: Leetcode # 341. Flatten Nested List Iterator
#
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger(object):
# def isInteger(self):
# """
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
# Companies
# Google Facebook Twitter
# Related Topics
# Stack Design
# Similar Questions
# Flatten 2D Vector Zigzag Iterator Mini Parser Array Nesting
#
import unittest
class NestedIterator(object):
def __init__(self, nestedList):
"""
Initialize your data structure here.
:type nestedList: List[NestedInteger]
"""
self.__depth = [[nestedList, 0]]
def next(self):
"""
:rtype: int
"""
nestedList, i = self.__depth[-1]
self.__depth[-1][1] += 1
return nestedList[i].getInteger()
def hasNext(self):
"""
:rtype: bool
"""
while self.__depth:
nestedList, i = self.__depth[-1]
if i == len(nestedList):
self.__depth.pop()
elif nestedList[i].isInteger():
return True
else:
self.__depth[-1][1] += 1
self.__depth.append([nestedList[i].getList(), 0])
return False
# Your NestedIterator object will be instantiated and called as such:
# i, v = NestedIterator(nestedList), []
# while i.hasNext(): v.append(i.next())
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
/**
* // This is the interface that allows for creating nested lists.
* // You should not implement it, or speculate about its implementation
* public interface NestedInteger {
*
* // @return true if this NestedInteger holds a single integer, rather than a nested list.
* public boolean isInteger();
*
* // @return the single integer that this NestedInteger holds, if it holds a single integer
* // Return null if this NestedInteger holds a nested list
* public Integer getInteger();
*
* // @return the nested list that this NestedInteger holds, if it holds a nested list
* // Return null if this NestedInteger holds a single integer
* public List<NestedInteger> getList();
* }
*/
# 3ms 94.48%
public class NestedIterator implements Iterator<Integer> {
private Stack<Iterator<NestedInteger>> stack;
Integer nextInteger;
public NestedIterator(List<NestedInteger> nestedList) {
stack = new Stack<>();
if(nestedList != null){
stack.push(nestedList.iterator());
}
}
@Override
public Integer next() {
return nextInteger;
}
@Override
public boolean hasNext() {
while(!stack.isEmpty()){
Iterator<NestedInteger> iter = stack.peek();
if(!iter.hasNext()){
stack.pop();
continue;
}
NestedInteger nextVal = iter.next();
if(nextVal.isInteger()){
nextInteger = nextVal.getInteger();
return true;
}else{
stack.push(nextVal.getList().iterator());
}
}
return false;
}
}
/**
* Your NestedIterator object will be instantiated and called as such:
* NestedIterator i = new NestedIterator(nestedList);
* while (i.hasNext()) v[f()] = i.next();
*/
# 2ms 100%
class NestedIterator implements Iterator<Integer> {
List<NestedInteger> nestedList;
List<Integer> list = new ArrayList<Integer>();
int index;
private void help(List<NestedInteger> input){
// List<Integer> res = new ArrayList<Integer>();
for(NestedInteger item : input){
if(item.isInteger()){
list.add(item.getInteger());
}else{
help(item.getList());
}
}
// System.out.println(res.toString());
// return res;
}
public NestedIterator(List<NestedInteger> nestedList) {
this.nestedList = nestedList;
index = 0;
help(nestedList);
}
@Override
public Integer next() {
return list.get(index++);
}
@Override
public boolean hasNext() {
if(index < list.size()){
return true;
}
return false;
}
}
''' | [
"[email protected]"
] | |
15b7110630874d11b613c384a2932880406f7171 | 8064d8f659683aef4864fc97a096fa346c1b73d3 | /main.py | 57f8c27f1b0b1b316052315793e1bf711319fa2b | [] | no_license | Mabedin00/polygons | 6546469068d8d9a72596db01f51d925b932bd19f | f886459b3f482c92e0fa6ea815945c4777ace0ed | refs/heads/master | 2021-05-17T01:55:55.976589 | 2020-03-29T16:47:11 | 2020-03-29T16:47:11 | 250,565,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | from display import *
from draw import *
from parser_s import *
from matrix import *
import math
screen = new_screen()
color = [ 0,191,255]
edges = []
polygons = []
transform = new_matrix()
parse_file( 'script', edges, polygons, transform, screen, color )
| [
"[email protected]"
] | |
265bacf4d9b77ac8cf00788613c415a06b5d30a5 | a8f6397d9eafbde47f0f98e10f0bdc1388c5267a | /apps/users/__init__.py | 6d50ec4556fd78576ad2bcf2ab64abe00f05ece3 | [] | no_license | wangruiyy/MxOnline | 52d5ff4d0a3488d00ba56782d63db4808af5b19f | ad93d58498e9a747173f42514e638a4920b2fc02 | refs/heads/master | 2021-04-03T06:27:06.407996 | 2018-03-10T13:14:15 | 2018-03-10T13:14:15 | 124,655,346 | 0 | 0 | null | 2018-03-10T13:14:16 | 2018-03-10T12:51:49 | null | UTF-8 | Python | false | false | 68 | py | # -*- coding:utf-8 -*-
default_app_config = 'users.apps.UsersConfig' | [
"[email protected]"
] | |
2660a093f4d3a91fac5365d58cf4d25ec3dfc0a2 | 352fc6108f4fe5a778b9d89e3a11781542ac6a5e | /buhgalteria/main.py | 09c2b3eada27990c486db7f6a8846a8d18fdde01 | [] | no_license | Iusar/pro_python_1 | 6419681fb7670fa5d4335a986282e4b6518e4b30 | 401dcabea014e5e824f23b5fb7bad9b01bc29968 | refs/heads/main | 2023-07-26T08:53:49.486999 | 2021-09-13T15:49:38 | 2021-09-13T15:49:38 | 403,737,593 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | import application.salary as sal
import application.db.people as pers
import datetime as d
# Основная программа
def main(number):
print(f'{sal.person_salary(pers.find_person(number))} время запроса {d.datetime.now()}')
pass
# Запуск программы
if __name__ == '__main__':
main(1)
| [
"[email protected]"
] | |
f3c90002c2a47d94fde1e5a1d4ba57adb52a7550 | 8e1e4382a7a013d6629de59c4d49746028c42e08 | /london/data/London/extras/make.py | d9d870ba727d1904a5a4b2e7ee043a74d969816f | [] | no_license | smckane/Pubtran-London | 56c48d30b1fcb170c77aeec0fc7d7df7f95a6ec9 | a2f7011cbfb19a071e02e52f80362622b30d199b | refs/heads/master | 2020-04-10T03:28:28.759720 | 2012-01-30T16:04:09 | 2012-01-30T16:04:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,254 | py | #!/usr/bin/env python
# coding=UTF-8
def findStation(station, stops):
if station in stops: return station
if station + ' Underground Station' in stops: return station + ' Underground Station'
if station.replace(' and ', ' & ') in stops: return station.replace(' and ', ' & ')
if station + ' (Central Line)' in stops: return station + ' (Central Line)'
if station.replace(' ', '-') in stops: return station.replace(' ', '-')
if station.replace(' (H & C)', ' (Circle Line)') in stops: return station.replace(' (H & C)', ' (Circle Line)')
if station.replace('(District and Picc)', '(Dist & Pic lines)') in stops: return station.replace('(District and Picc)', '(Dist & Pic lines)')
if station == 'Bromley-by-Bow': return 'Bromley-By-Bow'
if station == 'Heathrow Terminals 123': return 'Heathrow Terminals 1-2-3 Underground Station'
if station == 'Harrow and Wealdstone': return 'Harrow & Wealdstone Underground Station'
if station == 'St John\'s Wood': return 'St.John\'s Wood'
if station == 'Hammersmith': return 'Hammersmith (H&C Line) Underground Station'
if station == 'King\'s Cross St Pancras': return 'King\'s Cross St.Pancras'
if station == 'Edgware Road (Bakerloo)': return 'Edgware Road (Bakerloo Line)'
return None
lines = ['Bakerloo', 'Central', 'District', 'Hammersmith & City, Circle', 'Jubilee', 'Metropolitan', 'Northern', 'Piccadilly', 'Victoria', 'Waterloo & City']
circle = ['Aldgate', 'Baker Street', 'Barbican', 'Bayswater', 'Blackfriars', 'Cannon Street', 'Edgware Road (H & C)', 'Embankment', 'Euston Square', 'Farringdon', 'Gloucester Road', 'Great Portland Street', 'High Street Kensington', 'King\'s Cross St Pancras', 'Liverpool Street', 'Mansion House', 'Monument', 'Moorgate', 'Notting Hill Gate', 'Paddington', 'Sloane Square', 'South Kensington', 'St. James\'s Park', 'Temple', 'Tower Hill', 'Victoria', 'Westminster']
hammersmith = ['Aldgate East', 'Barbican', 'Barking', 'Bow Road', 'Bromley-by-Bow', 'East Ham', 'Edgware Road (H & C)', 'Euston Square', 'Farringdon', 'Goldhawk Road', 'Great Portland Street', 'Hammersmith', 'King\'s Cross St Pancras', 'Ladbroke Grove', 'Latimer Road', 'Liverpool Street', 'Mile End', 'Moorgate', 'Paddington', 'Plaistow', 'Royal Oak', 'Stepney Green', 'Upton Park', 'West Ham', 'Westbourne Park', 'Whitechapel']
code = ''
stations = {}
for line in open('stationCodes.txt'):
line = line.strip()
line = line.replace('’', '\'')
line = line.replace('Trackernet Data Services Guide Beta 0', '')
if line in lines: code = line[0]
if not (line[3] == ' ' and line[:2].upper() == line[:2]): continue
if line[4:] == 'Olympia': continue
code2 = code
if code == 'H':
code2 = ''
if line[4:] in circle: code2 = 'c'
if line[4:] in hammersmith: code2 += 'H'
if code2 == '': print 'ERROR - ' + line[4:]
stations[line[4:]] = stations.get(line[4:], line[:3] + ':') + code2
stops = []
for line in open('../stops.txt'):
stops.append(line.strip())
for key in stations.keys():
if findStation(key, stops) == None: print key
#and / &, Debden, (Central Line), pomlcky misto mezer, tecka misto mezery,
f = open('../extras.txt', 'w')
for key in stations.keys():
station = findStation(key, stops)
if station != None:
f.write(station + ';' + stations[key] + '\n')
| [
"[email protected]"
] | |
ff975e89943e61a080b93fd3e0356b80d1223b49 | 12258001571bd504223fbf4587870960fa93a46d | /mud/Spirit-0.4.7/spirit/__init__.py | a39206389c749ccbde799c5f371bf90e6be804da | [
"MIT"
] | permissive | Nik0las1984/mud-obj | 0bd71e71855a9b0f0d3244dec2c877bd212cdbd2 | 5d74280724ff6c6ac1b2d3a7c86b382e512ecf4d | refs/heads/master | 2023-01-07T04:12:33.472377 | 2019-10-11T09:10:14 | 2019-10-11T09:10:14 | 69,223,190 | 2 | 0 | null | 2022-12-26T20:15:20 | 2016-09-26T07:11:49 | Python | UTF-8 | Python | false | false | 88 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.4.7'
| [
"[email protected]"
] | |
4e6aad964d91dfc7d6221094475cf15efc7717ec | bdaf0a03348208b79b0986b00acf731293b7f297 | /SourceCodes/createPklCancer.py | d4791c64298f4681c758973baee453a7a280986f | [] | no_license | mutual-ai/Empirical-study-of-Supervised-Learning-Algorithms | 2855afa8c4820422a7018703509b21a4e90777f8 | b5f0e41940dcc68befaa3d4276b1c5ff8aa3a599 | refs/heads/master | 2020-05-27T05:53:23.970946 | 2017-02-08T23:13:45 | 2017-02-08T23:13:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py | from numpy import genfromtxt
import gzip, six.moves.cPickle as cPickle
from glob import glob
import numpy as np
import pandas as pd
import csv
import math
#Data = dir_to_dataset("C:\Users\Divya Chopra\Downloads\breast-cancer-wisconsin.csv")
# Data and labels are read
Data = np.genfromtxt('C:\h\imagesFile.csv', delimiter=',', dtype='f8')[0:]
Data = Data[~np.isnan(Data).any(axis=1)]
np.random.shuffle(Data)
y = Data[:,10]
Data=Data[:,1:10]
#y = y[~np.isnan(Data).any(axis=1)]
np.random.shuffle(Data)
#np.random.shuffle(y)
#print("data", Data)
print("Y", y)
percentage=0.7
index2=math.ceil(len(Data)*0.5)
DataTraining=Data[:index2]
DataTesting=Data[index2+1:]
trainVal=0.5
index3=math.ceil(len(DataTraining)*0.5)
DataValidation=DataTraining[index3+1:]
DataTraining=DataTraining[:index3]
yTraining=y[:index3]
yValidation=y[index3+1:]
yTesting=y[index2+1:]
train_set_x = DataTraining
val_set_x = DataValidation
test_set_x = DataTesting
#print("train_set_x", train_set_x)
train_set_y =yTraining
val_set_y = yValidation
test_set_y = yTesting
#print("train_set_y", train_set_y)
# Divided dataset into 3 parts. I had 6281 images.
train_set = train_set_x,train_set_y
val_set = val_set_x,val_set_y
test_set = test_set_x,test_set_y
dataset = [train_set, val_set, test_set]
f = gzip.open('C:\h\kfile.pkl.gz','wb')
cPickle.dump(dataset, f, protocol=2)
f.close() | [
"[email protected]"
] | |
c000191d786acda270210cf979a5fac23c9b5af0 | 479057ecbf4ea2885ba10175887dec025458d7e8 | /dvc/executor.py | e2becc48d330399a77f96077b8d2fab50ec45233 | [
"Apache-2.0"
] | permissive | benzei/dvc | e863859ffdad781c92a8705bdade6a62de01fc98 | fa12ef632a74e6e6b92edbd582319e03ca937738 | refs/heads/master | 2021-06-24T23:46:43.807485 | 2017-09-05T16:44:33 | 2017-09-05T16:44:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | import subprocess
from dvc.exceptions import DvcException
class ExecutorError(DvcException):
def __init__(self, msg):
DvcException.__init__(self, msg)
class Executor:
@staticmethod
def exec_cmd(cmd, stdout_file=None, stderr_file=None, cwd=None, shell=False):
stdout, stdout_fd = Executor.output_file(stdout_file)
stderr, stderr_fd = Executor.output_file(stderr_file)
try:
p = subprocess.Popen(cmd,
cwd=cwd,
stdout=stdout,
stderr=stderr,
shell=shell)
p.wait()
out, err = map(lambda s: s.decode().strip('\n\r') if s else '', p.communicate())
return p.returncode, out, err
except Exception as ex:
return 1, None, str(ex)
finally:
if stderr_fd:
stderr_fd.close()
if stdout_fd:
stdout_fd.close()
pass
@staticmethod
def output_file(output_file, default_output=None):
output_fd = None
if output_file is not None:
if output_file == '-':
output = default_output
else:
output_fd = open(output_file, 'w')
output = output_fd
else:
output = subprocess.PIPE
return output, output_fd
@staticmethod
def exec_cmd_only_success(cmd, stdout_file=None, stderr_file=None, cwd=None, shell=False):
code, out, err = Executor.exec_cmd(cmd, stdout_file=stdout_file,
stderr_file=stderr_file, cwd=cwd, shell=shell)
if code != 0:
raise ExecutorError('Git command error ({}):\n{}'.format(' '.join(cmd), err))
return out
| [
"[email protected]"
] | |
d3d070c644f324b81f6c492f4cc9cd6582068417 | 5ac7bdec90c21a3da8fd5a1a684a80d202c30e8d | /openstack_dashboard/nikola_auth/views.py | 0f546a5d895bbcf30071ebe64326aa76c07ed578 | [
"Apache-2.0"
] | permissive | AlexOugh/horizon | 185aba38551ee15732a12f9690203d5383e03f70 | bda2a59aad7637f45211db37235ab18323e20b25 | refs/heads/master | 2021-01-16T18:45:36.289172 | 2015-02-10T23:58:16 | 2015-02-10T23:58:16 | 30,272,985 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,771 | py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import django
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.decorators import login_required # noqa
from django.contrib.auth import views as django_auth_views
from django import shortcuts
from django.utils import functional
from django.utils import http
from django.views.decorators.cache import never_cache # noqa
from django.views.decorators.csrf import csrf_protect # noqa
from django.views.decorators.debug import sensitive_post_parameters # noqa
from keystoneclient import exceptions as keystone_exceptions
from keystoneclient.v2_0 import client as keystone_client_v2
from nikola_auth import forms
# This is historic and is added back in to not break older versions of
# Horizon, fix to Horizon to remove this requirement was committed in
# Juno
from nikola_auth.forms import Login # noqa
from nikola_auth import user as auth_user
from nikola_auth import utils
try:
is_safe_url = http.is_safe_url
except AttributeError:
is_safe_url = utils.is_safe_url
LOG = logging.getLogger(__name__)
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login(request, template_name=None, extra_context=None, **kwargs):
"""Logs a user in using the :class:`~nikola_auth.forms.Login` form."""
# If the user is already authenticated, redirect them to the
# dashboard straight away, unless the 'next' parameter is set as it
# usually indicates requesting access to a page that requires different
# permissions.
if (request.user.is_authenticated() and
auth.REDIRECT_FIELD_NAME not in request.GET and
auth.REDIRECT_FIELD_NAME not in request.POST):
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
# Get our initial region for the form.
initial = {}
current_region = request.session.get('region_endpoint', None)
requested_region = request.GET.get('region', None)
regions = dict(getattr(settings, "AVAILABLE_REGIONS", []))
if requested_region in regions and requested_region != current_region:
initial.update({'region': requested_region})
if request.method == "POST":
# NOTE(saschpe): Since https://code.djangoproject.com/ticket/15198,
# the 'request' object is passed directly to AuthenticationForm in
# django.contrib.auth.views#login:
if django.VERSION >= (1, 6):
form = functional.curry(forms.Login)
else:
form = functional.curry(forms.Login, request)
else:
form = functional.curry(forms.Login, initial=initial)
if extra_context is None:
extra_context = {'redirect_field_name': auth.REDIRECT_FIELD_NAME}
if not template_name:
if request.is_ajax():
template_name = 'auth/_login.html'
extra_context['hide'] = True
else:
template_name = 'auth/login.html'
res = django_auth_views.login(request,
template_name=template_name,
authentication_form=form,
extra_context=extra_context,
**kwargs)
# Set the session data here because django's session key rotation
# will erase it if we set it earlier.
if request.user.is_authenticated():
auth_user.set_session_from_user(request, request.user)
regions = dict(forms.Login.get_region_choices())
region = request.user.endpoint
region_name = regions.get(region)
request.session['region_endpoint'] = region
request.session['region_name'] = region_name
return res
def logout(request, login_url=None, **kwargs):
"""Logs out the user if he is logged in. Then redirects to the log-in page.
.. param:: login_url
Once logged out, defines the URL where to redirect after login
.. param:: kwargs
see django.contrib.auth.views.logout_then_login extra parameters.
"""
msg = 'Logging out user "%(username)s".' % \
{'username': request.user.username}
LOG.info(msg)
endpoint = request.session.get('region_endpoint')
token = request.session.get('token')
if token and endpoint:
delete_token(endpoint=endpoint, token_id=token.id)
""" Securely logs a user out. """
return django_auth_views.logout_then_login(request, login_url=login_url,
**kwargs)
def delete_token(endpoint, token_id):
"""Delete a token."""
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
ca_cert = getattr(settings, "OPENSTACK_SSL_CACERT", None)
utils.remove_project_cache(token_id)
try:
if utils.get_keystone_version() < 3:
client = keystone_client_v2.Client(
endpoint=endpoint,
token=token_id,
insecure=insecure,
cacert=ca_cert,
debug=settings.DEBUG
)
client.tokens.delete(token=token_id)
LOG.info('Deleted token %s' % token_id)
else:
# FIXME: KS-client does not have delete token available
# Need to add this later when it is exposed.
pass
except keystone_exceptions.ClientException:
LOG.info('Could not delete token')
@login_required
def switch(request, tenant_id, redirect_field_name=auth.REDIRECT_FIELD_NAME):
"""Switches an authenticated user from one project to another."""
LOG.debug('Switching to tenant %s for user "%s".'
% (tenant_id, request.user.username))
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
ca_cert = getattr(settings, "OPENSTACK_SSL_CACERT", None)
endpoint = request.user.endpoint
try:
if utils.get_keystone_version() >= 3:
if not utils.has_in_url_path(endpoint, '/v3'):
endpoint = utils.url_path_replace(endpoint, '/v2.0', '/v3', 1)
client = utils.get_keystone_client().Client(
tenant_id=tenant_id,
token=request.user.token.id,
auth_url=endpoint,
insecure=insecure,
cacert=ca_cert,
debug=settings.DEBUG)
auth_ref = client.auth_ref
msg = 'Project switch successful for user "%(username)s".' % \
{'username': request.user.username}
LOG.info(msg)
except keystone_exceptions.ClientException:
msg = 'Project switch failed for user "%(username)s".' % \
{'username': request.user.username}
LOG.warning(msg)
auth_ref = None
LOG.exception('An error occurred while switching sessions.')
# Ensure the user-originating redirection url is safe.
# Taken from django.contrib.auth.views.login()
redirect_to = request.REQUEST.get(redirect_field_name, '')
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = settings.LOGIN_REDIRECT_URL
if auth_ref:
old_endpoint = request.session.get('region_endpoint')
old_token = request.session.get('token')
if old_token and old_endpoint and old_token.id != auth_ref.auth_token:
delete_token(endpoint=old_endpoint, token_id=old_token.id)
user = auth_user.create_user_from_token(
request, auth_user.Token(auth_ref), endpoint)
auth_user.set_session_from_user(request, user)
return shortcuts.redirect(redirect_to)
@login_required
def switch_region(request, region_name,
redirect_field_name=auth.REDIRECT_FIELD_NAME):
"""Switches the user's region for all services except Identity service.
The region will be switched if the given region is one of the regions
available for the scoped project. Otherwise the region is not switched.
"""
if region_name in request.user.available_services_regions:
request.session['services_region'] = region_name
LOG.debug('Switching services region to %s for user "%s".'
% (region_name, request.user.username))
redirect_to = request.REQUEST.get(redirect_field_name, '')
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = settings.LOGIN_REDIRECT_URL
return shortcuts.redirect(redirect_to)
| [
"[email protected]"
] | |
904ddc6a110c928eecd9ed053afa3bf80f4931a3 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/25/usersdata/98/11884/submittedfiles/av1_3.py | e38e0f0784c64456ff7dcadb762460593411b8a4 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
a=int(input('Digite o valor de a: '))
b=int(input('Digite o valor de b: '))
i=1
cont=0
c=0
while True:
if a%i==0 and b%i==0:
cont=cont+1
c=i
i=i+1
if i==a or i==b:
break | [
"[email protected]"
] | |
e50e19db7754f252118d5e3c69541abe67d0fdab | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/42/usersdata/69/21660/submittedfiles/jain.py | 34c02d431af79001b4eb9414ce0115cad59ff0fc | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,090 | py | # -*- coding: utf-8 -*-
from __future__ import division
import funcoes
'''
ENTRADA TESTE
f = 0.2
dH = 5
L = 3250
Q = 0.005
g = 9.81
v = 0.000001
e = 0.00006
k = 10
A saida para esta entrada é aproximadamente: 0.1247 (D) e 0.0224 (f)
'''
f = 0.2
dH = input('Digite a perda de carga: ')
L = input('Digite o comprimento da tubulação: ')
Q = input('Digite a vazão: ')
g = input('Digite a gravidade: ')
v = input('Digite a viscosidade cinemática: ')
e = input('Digite a rugosidade absoluta: ')
k = 10
#comece aqui
import math
def diametro(fn,L,Q,dH):
Diam=((8*fn*L*Q*Q)/(math.pi*math.pi*dH*g))**(1/5)
return Diam
def Reynalds(Q,D,v):
R=4*Q/(math.pi*D*v)
return R
def atrito(Rey,E,D):
s=(E/(3.7*D))+(5.74/(Rey**0.9))
t=(2500/Rey)**6
f=(((64/Rey)**8)+9.5*((math.log(s)-t)**(-16)))**0.125
return f
for i in range(0,k,1):
D=diametro(fn,L,Q,dH)
Rey=Reynalds(Q,D,v)
fn=atrito(Rey,e,D)
if 0.000001<=(e/D)<=0.01 and 5000<=Rey<=100000000:
if fn==f:
break
else:
f=fn
print('%.10f'%f)
print('%.10f'%D) | [
"[email protected]"
] | |
b991927ecdb38b810321296465c42532c63cc33a | c64bafe65cfb67823e90cd78035131b7234fb356 | /LabWork2/tests/logger/TestLoggerMethods.py | 7989984af405c8a96db4c48d5ee5c21436b8ea98 | [] | no_license | AlexFridman/EHI | 7e5144953501554d89499b2edf82d9527a6a4605 | ab75bdf9d3760acd6c63bbe1968f2cd9ca140db2 | refs/heads/master | 2020-05-29T12:32:26.853925 | 2015-12-25T17:08:37 | 2015-12-25T17:08:37 | 44,743,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 819 | py | __author__ = 'AlexF'
import unittest
from LabWork2.code.logger.logger import Logger
class TestLoggerMethods(unittest.TestCase):
def test_logging(self):
class SimpleClass(Logger):
def test(self, x, y=5):
return x + y
obj = SimpleClass()
obj.test(5)
log = str(obj)
self.assertEqual('test([5, {}]) = 10', log)
def test_custom_logging_format(self):
class SimpleClass(Logger):
def __init__(self):
super(SimpleClass, self).__init__(format_str=']{0}[ ]{1}[ = {2}')
def test(self, x, y=5):
return x + y
obj = SimpleClass()
obj.test(5)
log = str(obj)
self.assertEqual(']test[ ][5, {}][ = 10', log)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
1c7c9c9ea04789b3d3a412a11d1d58d9df296e09 | b757f90af4e022e470e2c85461866eb9e992c7d9 | /shared_utils/load_json_data.py | 496935666a7523e54e15631d931ac0f514d9ddf1 | [] | no_license | mermi/events-pipeline | a985ddd6c857dece97a84997fc937f031cd2d83b | 737d34db1fa1188695cb37ddc22472a0109844a8 | refs/heads/main | 2023-05-01T23:03:46.027667 | 2021-05-26T20:10:04 | 2021-05-26T20:10:04 | 370,645,906 | 1 | 0 | null | 2021-05-25T12:59:44 | 2021-05-25T10:00:41 | Shell | UTF-8 | Python | false | false | 721 | py | import logging
from sqlalchemy import create_engine
import json
from pandas.io.json import json_normalize
from shared_utils.dag_utils.utils import get_query as get_query
logger = logging.getLogger(f'logging_steps')
def create_table(conn, create_query):
cur=conn.cursor()
cur.execute(create_query)
conn.commit()
def load_json_postgres(conn, create_query):
create_table(conn, create_query)
with open('shared_utils/data/flink_data_engieering_sample_data.json', 'r') as data_file:
data = json.load(data_file)
df = json_normalize(data)
engine = create_engine(conn)
df.to_sql("stafing.events", engine, index=False, if_exists='append')
logger.info("table created")
| [
"[email protected]"
] | |
961a831640d66bdb4e7113ccbc8e41fd17b88923 | a61263850fe63de61ec3004519f0d9aa69f104ac | /python_Algorithm/battle19/TaxiFee.py | e10f3b4d5de684c4e63460e0d62861c606b5a984 | [] | no_license | Kimhyeonsuk/Programmers_Python | dd0e13ef6690cfab0c46a7c8b07a5f3b40175071 | cc5687c8db2cfa098602829dec3acbf17c5c2177 | refs/heads/master | 2023-07-16T22:30:29.457419 | 2021-09-02T10:40:56 | 2021-09-02T10:40:56 | 355,876,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | def solution(n, s, a, b, fares):
answer = 1e9
board = [[1e9 for _ in range(n + 1)] for _ in range(n + 1)]
for fare in fares:
board[fare[0]][fare[1]] = fare[2]
board[fare[1]][fare[0]] = fare[2]
for i in range(1, n + 1):
board[i][i] = 0
for k in range(1, n + 1):
for i in range(1, n + 1):
for j in range(1, n + 1):
if board[i][j]>board[i][k]+board[k][j]:
board[i][j]=board[i][k]+board[k][j]
for k in range(1, n + 1):
answer = min(answer, board[s][k] + board[k][a] + board[k][b])
return answer | [
"[email protected]"
] | |
99e71c6c12890b685bb63664cdf01f707beae10a | 1e127a1f29f34bfee095235e731aff898750cd8c | /execute_bilstm_conditional.py | c7b3f12640412d4f3767dc77d901a7017f69fe7d | [] | no_license | malvika-hp/stance-detection-in-news | 2dd5c6cff18740d1d638ad6a05a4262171651d90 | f870db3c78b3cc4755eeac91cd1e54f2649ff009 | refs/heads/master | 2020-04-20T03:28:03.004676 | 2019-03-26T00:30:57 | 2019-03-26T00:30:57 | 168,599,040 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,016 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
######
# Execution script for the conditional LSTM with attention
# Based on starter code from PS3-CS224n
######
## General libraries
import tensorflow as tf
import numpy as np
import random
from tensorflow import keras
## Our Own Code
from BILSTM_conditional import LSTMCondModel
from run_text_processing import save_data_pickle, get_data
from our_util import Progbar, minibatches, pack_labels, split_data, softmax, get_performance, convertOutputs, downsample_label, split_indices
from tensorflow.python.tools import inspect_checkpoint as chkp
def run_save_data_pickle(): ## Needs NLTK to be installed!
save_data_pickle(outfilename = '/../../glove/twitter50d_h_ids_b_ids_pickle.p',
embedding_type = 'twitter.27B.50d',
parserOption = 'nltk')
def run_lstm_conditional(config, split = True, outputpath = '../../xp', final = False):
## Get data
config, data_dict = get_data(config,
filename_embeddings = '/../../glove/glove.twitter.27B.50d.txt',
pickle_path = '/../../glove/twitter50d_h_ids_b_ids_pickle.p',
concat = False)
## pass data into local namespace:
y = data_dict['y']
h = data_dict['h_np']
b = data_dict['b_np']
print("===========Number of head=========", len(h))
print("===========Number of body=========", len(b))
h_len = data_dict['h_seqlen']
b_len = data_dict['b_seqlen']
# Do shortening of dataset ## affects number of samples and max_len.
if config.num_samples is not None:
## Random seed
np.random.seed(1)
ind = range(np.shape(h)[0])
random.shuffle(ind)
indices = ind[0:config.num_samples ]
h = h[indices,:]
b = b[indices,:]
h_len = h_len[indices]
b_len = b_len[indices]
y = y[indices]
# Truncate headlines and bodies
if config.h_max_len is not None:
h_max_len = config.h_max_len
if np.shape(h)[1] > h_max_len:
h = h[:, 0:h_max_len]
h_len = np.minimum(h_len, h_max_len)
if config.b_max_len is not None:
b_max_len = config.b_max_len
if np.shape(b)[1] > b_max_len:
b = b[:, 0:b_max_len]
b_len = np.minimum(b_len, b_max_len)
if split:
# Split data
train_indices, dev_indices, test_indices = split_indices(np.shape(h)[0])
# Divide data
train_h = h[train_indices,:]
train_b = b[train_indices,:]
train_h_len = h_len[train_indices]
train_b_len = b_len[train_indices]
train_y = y[train_indices]
# test
dev_h = h[dev_indices,:]
dev_b = b[dev_indices,:]
dev_h_len = h_len[dev_indices]
dev_b_len = b_len[dev_indices]
dev_y = y[dev_indices]
if final:
# Combine train and dev
train_dev_indices = train_indices + dev_indices
train_h = h[train_dev_indices,:]
train_b = b[train_dev_indices,:]
train_h_len = h_len[train_dev_indices]
train_b_len = b_len[train_dev_indices]
train_y = y[train_dev_indices]
# Set dev to test
dev_h = h[test_indices,:]
dev_b = b[test_indices,:]
dev_h_len = h_len[test_indices]
dev_b_len = b_len[test_indices]
dev_y = y[test_indices]
## Passing parameter_dict to config settings
## Changes to config based on data shape
assert(np.shape(train_h)[0] == np.shape(train_b)[0] == np.shape(train_y)[0] == np.shape(train_h_len)[0] == np.shape(train_b_len)[0])
config.num_samples = np.shape(train_h)[0]
config.h_max_len = np.shape(train_h)[1]
config.b_max_len = np.shape(train_b)[1]
## Start Tensorflow!
print('Starting TensorFlow operations')
print 'With hidden layers: ', config.n_layers ## hidden layer?
with tf.Graph().as_default():
tf.set_random_seed(1)
model = LSTMCondModel(config)
# saver = tf.train.Saver()
saver = tf.train.Saver(tf.global_variables())
init = tf.global_variables_initializer()
with tf.Session() as session:
session.run(init)
losses_ep, dev_performances_ep, dev_predicted_classes_ep, dev_predictions_ep = model.fit(session, train_h, train_b, train_h_len, train_b_len, train_y, dev_h, dev_b, dev_h_len, dev_b_len, dev_y) #M
save_path = saver.save(session, "model/model.ckpt")
print("Model saved in file: %s" % save_path)
# Write results to csv
convertOutputs(outputpath, config, losses_ep, dev_performances_ep)
print('Losses ', losses_ep)
print('Dev Performance ', dev_performances_ep)
return losses_ep, dev_predicted_classes_ep, dev_performances_ep
## for debugging
if __name__ == "__main__":
print('Doing something!')
run_save_data_pickle()
losses, dev_predicted_classes, dev_performance = run_bow(num_samples = 1028)
print('Execution Complete')
| [
"[email protected]"
] | |
3110e22e2cd31de927f5d7e9bd160edb5ac1ccdf | ccfe64ba6d63f2668a7338398a020c0b64172e0e | /firestore.py | eb0e9de70e108a40fbdb3dfb8d535b82bb14096a | [] | no_license | saad277/Python-Firebase | dd31580b367647e3fe48b87299504082aa152c7a | fb1b753da808d7bcb1a5f843acf2c8c206cd4979 | refs/heads/master | 2021-05-20T09:06:52.232643 | 2020-04-28T20:00:11 | 2020-04-28T20:00:11 | 252,214,577 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | #pip install firebase_admin
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
cred=credentials.Certificate("service_account.json")
firebase_admin.initialize_app(cred)
db=firestore.client();
ref=db.collection("food");
docs=ref.stream()
data=[]
for doc in docs:
#print(doc.id)
data.append(doc.to_dict())
print(data[0]['image'])
| [
"[email protected]"
] | |
45548b72b1271b8770482dfa3c14105e12bc21b8 | adafa4f13a450ff4205241650f842e211781322c | /products/models.py | ec8f9260fabdf2719050db253a0dd80a77358751 | [] | no_license | GladkikhAnton/WebSitePractice | b5c6f7455b9f1b3621e637e6995292f07c8c0bff | 6884d7765fd016d1fd3c79e6e3f6dbfc17e6fee6 | refs/heads/master | 2021-03-23T17:08:21.277864 | 2020-03-15T13:29:22 | 2020-03-15T13:29:22 | 247,470,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,959 | py | from django.db import models
class TypeOfProduct(models.Model):
name = models.CharField(max_length=32, blank=True, null=True, default=None)
created = models.DateTimeField(auto_now_add=True, auto_now=False)
updated = models.DateTimeField(auto_now_add=False, auto_now=True)
def __str__(self):
return "%s" % self.name
class Meta:
verbose_name="Тип товара"
verbose_name_plural="Типы товаров"
class Product(models.Model):
name = models.CharField(max_length=32, blank=True, null=True, default=None)
description = models.TextField(blank=True, null=True, default=None)
type = models.ForeignKey(TypeOfProduct, blank=True, null=True, default=None, on_delete=models.PROTECT)
cost = models.PositiveSmallIntegerField(default=0)
created = models.DateTimeField(auto_now_add=True, auto_now=False)
updated = models.DateTimeField(auto_now_add=False, auto_now=True)
#Кастомизация админки
def __str__(self):
return "%s" % self.name# ("Пользователь %s %s" % (self.email, seil.name)
#Кастомизация множественного числа и единственного
class Meta:
verbose_name="Товар"
verbose_name_plural="Товары"
class ProductImage(models.Model):
product= models.ForeignKey(Product, blank=True, null=True, default=None, on_delete=models.PROTECT)
image = models.ImageField(upload_to='product_images/')
created = models.DateTimeField(auto_now_add=True, auto_now=False)
updated = models.DateTimeField(auto_now_add=False, auto_now=True)
def __str__(self):
return "%s" % self.image # ("Пользователь %s %s" % (self.email, seil.name)
# Кастомизация множественного числа и единственного
class Meta:
verbose_name="Фотография"
verbose_name_plural="Фотографии"
| [
"[email protected]"
] | |
edf8538fd32c1becb17b39f2cd1cc4dae63a0763 | 652b72b566a84dbd0e667a86759ec5ee793219e0 | /App/carmanager/admin.py | 76a53766630651aae2e3a31a0b00cfa2fd7d7c65 | [] | no_license | RaisaKulakovska/Some-Django-Project | 05a9b0ef376751fbe6d25f2d5d06471bfd84e6be | 9f42e8a739180fd31adca55ebd559539f59f466c | refs/heads/master | 2021-04-03T12:07:13.132940 | 2020-03-31T11:16:17 | 2020-03-31T11:16:17 | 248,351,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from django.contrib import admin
from .models import CarManager
admin.site.register(CarManager)
| [
"[email protected]"
] | |
b7626825dc0410ead2b494a5a21db36f16d6b4a1 | 6088e13467554ee0dc90f7964dabfd53661b4b99 | /compare_other/darkflow_train.py | fc1709189420610caa6c38da2f880fa7b8bf6c59 | [] | no_license | chadrick-kwag/yolov2-fromscratch | 186f7da39806526e1957dc575221567b590c8c54 | 6cdbd1d75116c5aac39a5cb0dd134b26e084c832 | refs/heads/master | 2020-03-11T12:24:16.312566 | 2018-06-28T15:57:40 | 2018-06-28T15:57:40 | 129,996,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,053 | py | import tensorflow.contrib.slim as slim
import pickle
import tensorflow as tf
from ..yolo.misc import show
import numpy as np
import os
import math
def expit_tensor(x):
return 1. / (1. + tf.exp(-x))
def loss(self, net_out):
"""
Takes net.out and placeholders value
returned in batch() func above,
to build train_op and loss
"""
# meta
m = self.meta
sprob = float(m['class_scale'])
sconf = float(m['object_scale'])
snoob = float(m['noobject_scale'])
scoor = float(m['coord_scale'])
H, W, _ = m['out_size']
B, C = m['num'], m['classes']
HW = H * W # number of grid cells
anchors = m['anchors']
print('{} loss hyper-parameters:'.format(m['model']))
print('\tH = {}'.format(H))
print('\tW = {}'.format(W))
print('\tbox = {}'.format(m['num']))
print('\tclasses = {}'.format(m['classes']))
print('\tscales = {}'.format([sprob, sconf, snoob, scoor]))
size1 = [None, HW, B, C]
size2 = [None, HW, B]
# return the below placeholders
_probs = tf.placeholder(tf.float32, size1)
_confs = tf.placeholder(tf.float32, size2)
_coord = tf.placeholder(tf.float32, size2 + [4])
# weights term for L2 loss
_proid = tf.placeholder(tf.float32, size1)
# material calculating IOU
_areas = tf.placeholder(tf.float32, size2)
_upleft = tf.placeholder(tf.float32, size2 + [2])
_botright = tf.placeholder(tf.float32, size2 + [2])
self.placeholders = {
'probs':_probs, 'confs':_confs, 'coord':_coord, 'proid':_proid,
'areas':_areas, 'upleft':_upleft, 'botright':_botright
}
# Extract the coordinate prediction from net.out
net_out_reshape = tf.reshape(net_out, [-1, H, W, B, (4 + 1 + C)])
coords = net_out_reshape[:, :, :, :, :4]
coords = tf.reshape(coords, [-1, H*W, B, 4])
adjusted_coords_xy = expit_tensor(coords[:,:,:,0:2])
adjusted_coords_wh = tf.sqrt(tf.exp(coords[:,:,:,2:4]) * np.reshape(anchors, [1, 1, B, 2]) / np.reshape([W, H], [1, 1, 1, 2]))
coords = tf.concat([adjusted_coords_xy, adjusted_coords_wh], 3)
adjusted_c = expit_tensor(net_out_reshape[:, :, :, :, 4])
adjusted_c = tf.reshape(adjusted_c, [-1, H*W, B, 1])
adjusted_prob = tf.nn.softmax(net_out_reshape[:, :, :, :, 5:])
adjusted_prob = tf.reshape(adjusted_prob, [-1, H*W, B, C])
adjusted_net_out = tf.concat([adjusted_coords_xy, adjusted_coords_wh, adjusted_c, adjusted_prob], 3)
wh = tf.pow(coords[:,:,:,2:4], 2) * np.reshape([W, H], [1, 1, 1, 2])
area_pred = wh[:,:,:,0] * wh[:,:,:,1]
centers = coords[:,:,:,0:2]
floor = centers - (wh * .5)
ceil = centers + (wh * .5)
# calculate the intersection areas
intersect_upleft = tf.maximum(floor, _upleft)
intersect_botright = tf.minimum(ceil , _botright)
intersect_wh = intersect_botright - intersect_upleft
intersect_wh = tf.maximum(intersect_wh, 0.0)
intersect = tf.multiply(intersect_wh[:,:,:,0], intersect_wh[:,:,:,1])
# calculate the best IOU, set 0.0 confidence for worse boxes
iou = tf.truediv(intersect, _areas + area_pred - intersect)
best_box = tf.equal(iou, tf.reduce_max(iou, [2], True))
best_box = tf.to_float(best_box)
confs = tf.multiply(best_box, _confs)
# take care of the weight terms
conid = snoob * (1. - confs) + sconf * confs
weight_coo = tf.concat(4 * [tf.expand_dims(confs, -1)], 3)
cooid = scoor * weight_coo
weight_pro = tf.concat(C * [tf.expand_dims(confs, -1)], 3)
proid = sprob * weight_pro
self.fetch += [_probs, confs, conid, cooid, proid]
true = tf.concat([_coord, tf.expand_dims(confs, 3), _probs ], 3)
wght = tf.concat([cooid, tf.expand_dims(conid, 3), proid ], 3)
print('Building {} loss'.format(m['model']))
loss = tf.pow(adjusted_net_out - true, 2)
loss = tf.multiply(loss, wght)
loss = tf.reshape(loss, [-1, H*W*B*(4 + 1 + C)])
loss = tf.reduce_sum(loss, 1)
self.loss = .5 * tf.reduce_mean(loss)
tf.summary.scalar('{} loss'.format(m['model']), self.loss)
| [
"[email protected]"
] | |
b59043f0adb0bd245f3edc8572fc324e3dbc3732 | 2dc3904c6935db8e3a5d77a855368faeb9f0932c | /code4step3/unet_model.py | 86e8ef82573796296c521d5a8935eb8a746db4a6 | [
"MIT"
] | permissive | yukeyi/MCDS-Capstone | 470f64cd09d7332bcae4b4abee9a75c489e391e9 | f7ce48fc5d3f5f96c1f29556585ed2338683c7d2 | refs/heads/master | 2020-04-23T00:54:28.652993 | 2019-12-16T01:23:41 | 2019-12-16T01:23:41 | 170,796,148 | 0 | 0 | MIT | 2019-04-23T13:28:30 | 2019-02-15T03:27:39 | Jupyter Notebook | UTF-8 | Python | false | false | 2,570 | py | import torch
import torch.nn as nn
from model_util import conv_trans_block_3d, maxpool_3d, conv_block_2_3d, conv_block_3_3d, conv_block_4_3d
class UnetGenerator_3d(nn.Module):
def __init__(self, in_dim, out_dim, num_filter):
super(UnetGenerator_3d, self).__init__()
self.in_dim = in_dim
self.out_dim = out_dim
self.num_filter = num_filter
act_fn = nn.ReLU()
print("\n------Initiating U-Net------\n")
self.down_1 = conv_block_2_3d(self.in_dim, self.num_filter, act_fn)
self.pool_1 = maxpool_3d()
self.down_2 = conv_block_2_3d(self.num_filter, self.num_filter * 2, act_fn)
self.pool_2 = maxpool_3d()
self.down_3 = conv_block_2_3d(self.num_filter * 2, self.num_filter * 4, act_fn)
self.pool_3 = maxpool_3d()
self.bridge = conv_block_2_3d(self.num_filter * 4, self.num_filter * 8, act_fn)
self.trans_1 = conv_trans_block_3d(self.num_filter * 8, self.num_filter * 8, act_fn)
self.up_1 = conv_block_3_3d(self.num_filter * 12, self.num_filter * 4, act_fn)
self.trans_2 = conv_trans_block_3d(self.num_filter * 4, self.num_filter * 4, act_fn)
self.up_2 = conv_block_3_3d(self.num_filter * 6, self.num_filter * 2, act_fn)
self.trans_3 = conv_trans_block_3d(self.num_filter * 2, self.num_filter * 2, act_fn)
self.up_3 = conv_block_3_3d(self.num_filter * 3, self.num_filter * 1, act_fn)
self.out = conv_block_4_3d(self.num_filter, out_dim, nn.LogSoftmax())
self.reset_params()
@staticmethod
def weight_init(m):
if (isinstance(m, nn.Conv3d) or isinstance(m, nn.ConvTranspose3d)):
nn.init.xavier_normal(m.weight)
nn.init.constant(m.bias, 0)
def reset_params(self):
for i, m in enumerate(self.modules()):
self.weight_init(m)
def forward(self, x):
down_1 = self.down_1(x)
pool_1 = self.pool_1(down_1)
down_2 = self.down_2(pool_1)
pool_2 = self.pool_2(down_2)
down_3 = self.down_3(pool_2)
pool_3 = self.pool_3(down_3)
bridge = self.bridge(pool_3)
trans_1 = self.trans_1(bridge)
concat_1 = torch.cat([trans_1, down_3], dim=1)
up_1 = self.up_1(concat_1)
trans_2 = self.trans_2(up_1)
concat_2 = torch.cat([trans_2, down_2], dim=1)
up_2 = self.up_2(concat_2)
trans_3 = self.trans_3(up_2)
concat_3 = torch.cat([trans_3, down_1], dim=1)
up_3 = self.up_3(concat_3)
out = self.out(up_3)
return out | [
"[email protected]"
] | |
7a4118e9b6ecbc2640cb45301060a4a5dd9cd48c | b932d5b0889d4466fb605455a40ec6ca5b4285ae | /00_workflows/parsl_demo/pi_test_queue.py | 6e62a31c3ff40a18a6099b89ec245554359a857a | [] | no_license | cszhz/CompPerfWorkshop-2021 | 54cde54e4dd50fe9aed8fb1eec3f5a2c86f63885 | f070d3290d5b50d98709523f3e73e26e57ace369 | refs/heads/main | 2023-06-11T02:02:53.828553 | 2021-07-06T23:36:27 | 2021-07-06T23:36:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,489 | py | import parsl
import os
from parsl.app.app import python_app
from parsl.config import Config
from parsl.providers import CobaltProvider
from parsl.launchers import AprunLauncher
from parsl.executors import HighThroughputExecutor, ThreadPoolExecutor
from parsl.addresses import address_by_hostname
MY_USER_PATH = '/home/USERNAME/.local/miniconda-3/latest/bin/'
MY_ALLOCATION = 'Comp_Perf_Workshop'
MY_QUEUE = 'comp_perf_workshop'
MY_COMPUTE_NODES = 1
MY_COMPUTE_BLOCKS = 1
MY_TIME = '00:05:00'
WORKERS_PER_NODE = 64
parsl_config = Config(
executors=[
HighThroughputExecutor(
label='theta-htex',
max_workers = WORKERS_PER_NODE*MY_COMPUTE_NODES*MY_COMPUTE_BLOCKS,
worker_debug=True,
address=address_by_hostname(),
provider=CobaltProvider(
queue=MY_QUEUE,
account=MY_ALLOCATION,
launcher=AprunLauncher(overrides="-d 64"),
walltime=MY_TIME,
nodes_per_block=MY_COMPUTE_NODES,
init_blocks=1,
min_blocks=1,
max_blocks=MY_COMPUTE_BLOCKS,
# string to prepend to #COBALT blocks in the submit
# script to the scheduler eg: '#COBALT -t 50'
scheduler_options='',
# Command to be run before starting a worker, such as:
worker_init='module load miniconda-3; export PATH=$PATH:{}'.format(MY_USER_PATH),
cmd_timeout=120,
),
),
ThreadPoolExecutor(
label='login-node',
max_threads = 8
),
],
)
parsl.load(parsl_config)
@python_app(executors=['theta-htex'])
def pi(num_points):
from random import random
inside = 0
for i in range(num_points):
x, y = random(), random() # Drop a random point in the box.
if x**2 + y**2 < 1: # Count points within the circle.
inside += 1
return (inside*4 / num_points)
# App that computes the mean
@python_app(executors=['login-node'])
def mean(estimates):
import numpy as np
estimates = np.array(estimates)
return (np.mean(estimates))
if __name__ == '__main__':
num_points_per_trial = 10000
num_trials = 1280
trials = []
for i in range(num_trials):
trial = pi(num_points_per_trial)
trials.append(trial)
trials_results = [trial.result() for trial in trials]
pi = mean(trials_results)
print(pi.result())
| [
"[email protected]"
] | |
59e17142d32ca19041f681400af2b2f13474c48c | 9e4f426b1b09fe37433ecb2a88ac08b3bcd525a5 | /modules/roblox.py | d1a4a53c1856caa2812b3a5573b55b6d506cfc41 | [] | no_license | gusxyz/PyVer | a7e635679fbd8542c679f1107af9e45bd6f756a0 | 23e460bd50e790f322c989b5d31d243e9aee90f9 | refs/heads/master | 2023-01-22T14:36:42.766233 | 2019-12-13T07:59:57 | 2019-12-13T07:59:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | import json,bs4,requests
class robloxUser():
def __init__(self,id):
self.id = id
def getStatus(self):
request = requests.get('https://roblox.com/users/' + self.id + '/profile')
pageData = bs4.BeautifulSoup(request.text, features='html.parser')
hiddenData = pageData.find('div', {'data-profileuserid' : self.id})
return hiddenData['data-statustext']
def getDescription(self):
request = requests.get('https://roblox.com/users/' + self.id + '/profile')
pageData = bs4.BeautifulSoup(request.text, features='html.parser')
return pageData.find('span', {'class' : 'profile-about-content-text linkify'}).getText()
def getImage(self, imageType):
if str.lower(imageType) == 'headshot':
return 'https://www.roblox.com/headshot-thumbnail/image?userId=' + self.id + '&width=420&height=420&format=png'
elif str.lower(imageType) == 'bust':
return 'https://www.roblox.com/bust-thumbnail/image?userId=' + self.id + '&width=420&height=420&format=png'
else:
return 'Select Headshot or Bust' | [
"[email protected]"
] | |
2a4b17daaced9ba578fabcf45c20f80bffa9da09 | 9010a33df4badfcaa3f87493e4e1281a86760bb9 | /Rest/tutorial/snippets/migrations/0003_auto_20181108_1726.py | 1794f728919b0e4dfee215eed271608ef526343a | [] | no_license | DjangoJwa/About_Django | bae8cc7690d6db84653b89d567a390d82de473a4 | c249b91a2da4e7de2c498f8941fbefc9ee5c0bfe | refs/heads/master | 2020-04-04T23:08:21.658167 | 2018-11-21T11:17:01 | 2018-11-21T11:17:01 | 155,361,782 | 0 | 0 | null | 2018-10-30T09:54:04 | 2018-10-30T09:54:04 | null | UTF-8 | Python | false | false | 686 | py | # Generated by Django 2.1.2 on 2018-11-08 08:26
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('snippets', '0002_auto_20181108_1724'),
]
operations = [
migrations.AlterField(
model_name='snippets',
name='highlighted',
field=models.TextField(),
),
migrations.AlterField(
model_name='snippets',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='snippets', to=settings.AUTH_USER_MODEL),
),
]
| [
"[email protected]"
] | |
5cfb25b5cea19947c08e86c583fd2f5d891fcf9f | 2109c75aaaad18ed202c2f258cfb42cadf402b6a | /boards/project/admin.py | 1a71238728019845b1aa582c135315a5dba5f152 | [] | no_license | Kartug/Python-projects | f23a36d5db307524f2a3e27f8235531c4bf9fa5b | 9361cdf503a06c4d14b258e0a07c718fec255714 | refs/heads/master | 2022-12-23T05:41:38.794027 | 2020-01-16T08:42:04 | 2020-01-16T08:42:04 | 187,950,399 | 0 | 0 | null | 2022-12-08T07:00:06 | 2019-05-22T02:44:42 | Python | UTF-8 | Python | false | false | 123 | py | from django.contrib import admin
from project.models import Board
admin.site.register(Board)
# Register your models here.
| [
"[email protected]"
] | |
3d5170d0177d9b2a83b3013b2a1a0b64a23a880d | af88074068528f5f807b810f6718133d3c544ad3 | /easy/1342/numberOfSteps.py | b367e077ce784819caedd66bf70b85df980cda3f | [
"Apache-2.0"
] | permissive | ozashu/leetcode | 377e787055826da02867f967b5d2469820577f3a | c3989b197c0574af25d4b893bfda84ebbec94db7 | refs/heads/master | 2020-04-06T08:55:18.962627 | 2020-04-02T14:58:52 | 2020-04-02T14:58:52 | 157,321,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | class Solution:
def numberOfSteps(self, num: int) -> int:
i = 0
while num > 0:
if num % 2 == 0:
num /= 2
i += 1
else:
num -= 1
i += 1
return i
if __name__ == '__main__':
ns = Solution()
print(ns.numberOfSteps(14))
print(ns.numberOfSteps(8))
print(ns.numberOfSteps(123))
| [
"[email protected]"
] | |
d1da48f8ef3b8c05ad37267fa85ffbd06269ed62 | 666a8807cd96c3d7e2204e5fcb2d793eb0615071 | /src/proveedores/migrations/0001_initial.py | 141ba8b7ac2da6d3807fd985bb543590cea74f65 | [] | no_license | maritza05/Mystery-Cafeteria | 4b8a714164c5063ade18269f92d04816a48179e2 | 85238303d15df0630c18d6c1b9f8b3c4ae99080b | refs/heads/master | 2021-01-10T07:14:09.902376 | 2015-12-08T03:05:01 | 2015-12-08T03:05:01 | 47,043,914 | 0 | 0 | null | 2015-11-30T04:02:53 | 2015-11-29T00:00:08 | Python | UTF-8 | Python | false | false | 624 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Proveedor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('organizacion', models.CharField(max_length=65)),
('email', models.EmailField(max_length=254)),
('numero_telefono', models.CharField(max_length=10)),
],
),
]
| [
"[email protected]"
] | |
e65db35788ca6456a7d6e19edf98c2b01ab8fe66 | 0194a80ffb16878cd7dea79d0ec55ebdaac69b1a | /airline_booking/class_example.py | 4b243b7161a13f46b3de7aaef7b399e4d2aecfde | [] | no_license | Hancullen/Building-Python-Web-Services | 9c758420a7872113e42981f16568499d312e72bc | 0323d3077c6d651e4a3c19f362567f7a907bdcf9 | refs/heads/master | 2020-06-08T18:54:05.695283 | 2019-07-01T21:20:06 | 2019-07-01T21:20:06 | 193,286,646 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,492 | py | class Flight:
counter = 1
def __init__(self, origin, destination, duration):
#super(, self).__init__()
#keep track of id counter
self.id = Flight.counter
Flight.counter += 1
#keep track of passengers in this list
self.passengers = []
#details about flight
self.origin = origin
self.destination = destination
self.duration = duration
def print_f(self):
print(f"Flight origin: {self.origin}")
print(f"Flight destination: {self.destination}")
print(f"Flight duration: {self.duration}")
print()
print("Passengers in this flight: ")
for p in self.passengers:
print(f"{p.name}")
def add_passenger(self, pax):
self.passengers.append(pax) #add passenger into List
pax.flight_id = self.id #self is the object of flight,
#p is the object of passenger
#each passenger's flight id = id of flight they are on.
#it is how it keep tracking which flight associated with passenger
def delay(self, amount):
self.duration += amount
class Passenger:
def __init__(self, name):
self.name = name
def main():
#create flight
f = Flight(origin="Hanoi", destination="Vaasa", duration=790)
f.delay(5)
p1 = Passenger("Han")
p2 = Passenger("Blue")
f.add_passenger(p1)
f.add_passenger(p2)
f.print_f()
if __name__=="__main__":
main()
| [
"[email protected]"
] | |
9110dd05442c39c54177de9edcf5e4b53bd86be6 | cf2b2ae4ee4ebdd9bbc0b5677ce29fa0c7d20256 | /scripts/clustering/snippet/make_dv.py | d1a8a1a0a77761b91cea8f10b496ee0631211532 | [] | no_license | jaewookahn/adaptive_vibe_testing | 8cd2109ba87f10d1ba967c34f4ecb2fa29519e7d | d47b4a3262781d48dbf4f79355b65f7f44bb0a8f | refs/heads/master | 2021-01-19T09:42:08.566979 | 2013-12-27T05:25:08 | 2013-12-27T05:25:08 | 18,613,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,668 | py | #!/usr/bin/python
import os, bsddb, string, sys
if len(sys.argv) < 3:
"Usage: command TOPICID MODE[db,gt,notes,all]"
sys.exit(1)
TOPICID = int(sys.argv[1])
MODE = sys.argv[2]
if MODE == 'db':
NEW_DV = True
else:
NEW_DV = False
gt_count = {40009:534, 40021:1454, 40048:733}
#
# build dv db
fp = os.popen("dumpindex index-%d s" % TOPICID)
doccount = int(fp.readlines()[1].split(":")[1].strip())
fp.close()
if NEW_DV:
dv = bsddb.btopen("dv-%d.bsddb" % TOPICID, 'c')
for i in range(1, doccount + 1):
sys.stdout.write("\r%d/%d" % (i, doccount))
sys.stdout.flush()
fp = os.popen("dumpindex index-%d dv %d" % (TOPICID, i))
dopass = True
for s in fp:
if s.startswith("--- Terms ---"):
dopass = False
continue
if dopass:
continue
v1, v2, term = s.strip().split()
if term == '[OOV]':
continue
key = "%d:%s" % (i, term)
if dv.has_key(key):
dv[key] = str(int(dv[key]) + 1)
else:
dv[key] = "1"
dv.close()
sys.exit(0)
#
# read term id from the indri index
dv = bsddb.btopen('dv-%d.bsddb' % TOPICID, 'r')
fp = os.popen("dumpindex index-%d v" % TOPICID)
fp.readline()
terms = {}
for i, s in enumerate(fp):
term, temp, temp = s.split()
terms[term] = i
fp.close()
# all
if MODE == 'all':
start = 1
end = doccount + 1
# gt only
if MODE == 'gt':
start = 1
end = gt_count[TOPICID] + 1
# note only
if MODE == 'notes':
start = gt_count[TOPICID] + 1
end = doccount + 1
for i in range(start, end):
line = []
for term in terms.keys():
key = "%d:%s" % (i, term)
if dv.has_key(key):
line.append("%d:%s" % (terms[term], dv[key]))
if len(line) > 0:
print len(line), string.join(line, ' ') | [
"[email protected]"
] | |
35df953227d832576b310a7c674b9aa7c5a59c17 | 37c38975875857cbd2401dabcfb5269e2e89e5e8 | /prac-mlc/.backups/MCL_Lori.py-1478953425359 | 1fd0956efa07bda0eb2f518ffd1312dea2578bb6 | [] | no_license | gimaik/ICC_C333_Robotics | 28efe9856996c6dc110dc3b5487fbeec75d5df73 | c2aaa881cc42322c19af6f3330b5050009daa378 | refs/heads/master | 2021-06-08T04:00:00.071369 | 2016-11-29T21:52:49 | 2016-11-29T21:52:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,621 | #!/usr/bin/env python
# Some suitable functions and data structures for drawing a map and particles
import time
import sys
import random
import math
import brickpi
import time
#ROBOTICS
interface = brickpi.Interface()
interface.initialize()
motors = [0, 1]
k_p = 480.0
k_i = 400.0
k_d = 5.0
LENGTH = 15.0 #15 FOR 40CM
ANGLE = 20.5 #FOR 360
AnglePerCentimeter = LENGTH / 40.0
AnglePerRadius = ANGLE / (2*math.pi)
left_coefficient = 1.035
D = 150
#-------------------------------Initialization----------------------------------------
interface.motorEnable(motors[0])
interface.motorEnable(motors[1])
#Left motor
motorParams0 = interface.MotorAngleControllerParameters()
motorParams0.maxRotationAcceleration = 6
motorParams0.maxRotationSpeed = 12
motorParams0.feedForwardGain = 255/20.0
motorParams0.minPWM = 18.0
motorParams0.pidParameters.minOutput = -255
motorParams0.pidParameters.maxOutput = 255
motorParams0.pidParameters.k_p = k_p
motorParams0.pidParameters.k_i = k_i
motorParams0.pidParameters.K_d = k_d
#Right motor
motorParams1 = interface.MotorAngleControllerParameters()
motorParams1.maxRotationAcceleration = 6.0
motorParams1.maxRotationSpeed = 12
motorParams1.feedForwardGain = 255/20.0
motorParams1.minPWM = 18.0
motorParams1.pidParameters.minOutput = -255
motorParams1.pidParameters.maxOutput = 255
motorParams1.pidParameters.k_p = k_p
motorParams1.pidParameters.k_i = k_i
motorParams1.pidParameters.K_d = k_d
interface.setMotorAngleControllerParameters(motors[0],motorParams0)
interface.setMotorAngleControllerParameters(motors[1],motorParams1)
# -------------------------------------Movement function-------------------------------------
def rotate(rotation):
angle = rotation * AnglePerRadius
interface.increaseMotorAngleReferences(motors, [-left_coefficient * angle, angle])
motorAngles = interface.getMotorAngles(motors)
#initialValues = [motorAngles[0][0], motorAngles[1][0]]
while not interface.motorAngleReferencesReached(motors):
time.sleep(0.1)
def goLine(distance):
angle = distance * AnglePerCentimeter
interface.increaseMotorAngleReferences(motors, [left_coefficient * angle, angle])
motorAngles = interface.getMotorAngles(motors)
#initialValues = [motorAngles[0][0], motorAngles[1][0]]
while not interface.motorAngleReferencesReached(motors):
time.sleep(0.1)
#interface.startLogging("./log2_" + str(k_p) + ".txt"
#----------------------------------------Square test--------------------------------------------
'''
goLine(40)
rotate(90/(2*math.pi))
goLine(40)
rotate(90/(2*math.pi))
goLine(40)
rotate(90/(2*math.pi))
goLine(40)
rotate(90/(2*math.pi))
'''
particles = Particles()
class Dot(object):
def __init__(self, x,y,theta,weight):
self.x = x
self.y = y
self.theta = theta
self.w = weight
# ---------------------------------Waypoint navigation-----------------------------------
def compute_coord(x, y, wx, wy):
dx = wx - x
dy = wy - y
alpha = math.atan2(dy, dx) #in radius
dist = math.sqrt(dx*dx + dy*dy)
return (alpha, dist)
def compute_angle_turn(curr_angle, dest_angle):
print "cur:"+str(curr_angle/(math.pi)*180)
print "des:"+str(dest_angle/(math.pi)*180)
angle_diff = dest_angle - curr_angle
# tricks to reduce the turing anle
if angle_diff > math.pi:
angle_diff = -(math.pi*2 - angle_diff)
if angle_diff < -math.pi:
angle_diff = math.pi*2 + angle_diff
return angle_diff, dest_angle
def navigateToWaypoint(start_point):
while 1:
inputStr = raw_input("input destination: ")
if inputStr == "exit":
print "mission completed"
return
origin = particles.computeAvgDot()
wx,wy = inputStr.split(',')
wx = float(wx)
wy = float(wy)
dist,angle_diff,destAngle = navigateToWaypointAux(wx, wy, origin)
particles.updateRotation()
particles.updateStrainghtLine()
particles.updateWeights()
def navigateToWaypointAux(wx, wy, origin):
curr_x, curr_y, curr_theta = origin.x,origin.y,origin.theta
(alpha, dist) = compute_coord(curr_x, curr_y, wx, wy)
angle_diff, dest_angle = compute_angle_turn(curr_theta, alpha)
rotate(angle_diff)
goLine(dist)
return dist,angle_diff,destAngle
#webSimulation()
#origin = Dot(0,0,0,1.0)
#navigateToWaypoint(origin)
#interface.stopLogging()
import time
import random
import math
SIGMA_E = 0
SIGMA_F = 0
SIGMA_G = 0
# Functions to generate some dummy particles data:
def calcX():
return random.gauss(80,3) + 70*(math.sin(t)); # in cm
def calcY():
return random.gauss(70,3) + 60*(math.sin(2*t)); # in cm
def calcW():
return random.random();
def calcTheta():
return random.randint(0,360);
# A Canvas class for drawing a map and particles:
# - it takes care of a proper scaling and coordinate transformation between
# the map frame of reference (in cm) and the display (in pixels)
class Canvas:
def __init__(self,map_size=210):
self.map_size = map_size; # in cm;
self.canvas_size = 768; # in pixels;
self.margin = 0.05*map_size;
self.scale = self.canvas_size/(map_size+2*self.margin);
def drawLine(self,line):
x1 = self.__screenX(line[0]);
y1 = self.__screenY(line[1]);
x2 = self.__screenX(line[2]);
y2 = self.__screenY(line[3]);
print "drawLine:" + str((x1,y1,x2,y2))
def drawParticles(self,data):
display = [(self.__screenX(d[0]),self.__screenY(d[1])) + d[2:] for d in data];
print "drawParticles:" + str(display);
def __screenX(self,x):
return (x + self.margin)*self.scale #ol
def __screenY(self,y):
return (self.map_size + self.margin - y)*self.scale
# A Map class containing walls
class Map:
def __init__(self):
self.walls = [];
def add_wall(self,wall):
self.walls.append(wall);
def clear(self):
self.walls = [];
def draw(self):
for wall in self.walls:
canvas.drawLine(wall);
#def find_closest_wall(particle_x,particle_y,particle_theta):
#for wall in self.walls:
def generate_points(n):
data = []
for i in range(n):
data.append((0.0,0.0,0.0,1.0/n))
return data
def calculate_likelihood(x, y, theta, z):
#find closest distance to wall
min_dist = -1
for wall in mymap.walls:
m = ( (wall[3] - wall[1])*(wall[0] - x) - (wall[2] - wall[0])*(wall[1] - y) ) / ( (wall[3] - wall[1]) * math.cos(theta) - (wall[2] - wall[0]) * math.sin(theta))
if m < 0:
continue #wall is behind robot
intersection_x = x + m * math.cos(theta)
intersection_y = y + m * math.sin(theta)
#------------------------------------------------------------------------
if (wall[0] > wall[2]):
if intersection_x > wall[0] or intersection_x < wall[2]:
continue #robot does not intersect wall
if (wall[0] < wall[2]):
if intersection_x < wall[0] or intersection_x > wall[2]:
continue #robot does not intersect wall
if (wall[0] == wall[2]):
if (wall[1] > wall[3]):
if intersection_y > wall[1] or intersection_y < wall[3]:
continue #robot does not intersect wall
if (wall[1] < wall[3]):
if intersection_y < wall[1] or intersection_y > wall[3]:
continue #robot does not intersect wall
#------------------------------------------------------------------------
if (min_dist == -1):
min_dist = m
elif (min_dist > m):
min_dist = m
prob = math.exp( -(z - m)*(z - m) / 2 * deviation_sensor * deviation_sensor)
return prob
# Simple Particles set
class Particles:
def __init__(self):
self.n = 10;
self.data = generate_points(self.n)
def updateStraightLine(self, dist):
for i in range(self.n):
self.data[i][0] += (dist+random.gauss(0, SIGMA_E))*math.cos(self.data[i][2])
self.data[i][1] += (dist+random.gauss(0, SIGMA_E))*math.sin(self.data[i][2])
self.data[i][2] += random.gauss(0,SIGMA_F)
def updateWeights(self, z):
for i in range(self.n):
self.data[i][3] = self.data[i][3] * calculate_likelihood(self.data[i][0], self.data[i][1], self.data[i][2], z)
def updateRotation(self, angleDiff):
self.data[i][2] += angleDiff + random.gauss(0,SIGMA_G)
def getX(self, point):
return point[0]
def getY(self, point):
return point[1]
def getTheta(self, point):
return point[2]
def getWeight(self, point):
return point[3]
def draw(self):
canvas.drawParticles(self.data);
def normalizeData(self):
sum = 0.0
for i in range(self.n):
sum += self.data[i][3]
for i in range(self.n):
self.data[i][3] /= sum
def copyPoint(self, point):
return (point[0], point[1], point[2], 1.0/self.n)
def regenerateParticles(self):
sum = 0.0
cumDist = []
#create cumulative distribution list
for i in range(self.n):
sum += self.data[i][3]
cumDist.append(sum)
#generate new data
newData = []
for i in range(self.n):
randomNum = random.random()
pos = (-1)
for j in range(self.n):
if cumDist[j] > randomNum:
pos = j
break
if pos == -1:
pos = self.n-1
newData.append(self.copyPoint(self.data[pos]))
self.data = newData
def computeAvg(self):
sum = (0.0, 0.0, 0.0, 0.0)
for i in range(self.n):
sum[0] += data[i][0]*data[i][3]
sum[1] += data[i][1]*data[i][3]
sum[2] += data[i][2]*data[i][3]
return sum
def computeAvgDot(self):
point = self.computeAvg()
return Dot(point[0],point[1], point[2], point[3])
canvas = Canvas();
mymap = Map();
# Definitions of walls
# a: O to A
# b: A to B
# c: C to D
# d: D to E
# e: E to F
# f: F to G
# g: G to H
# h: H to O
mymap.add_wall((0,0,0,168)); # a
mymap.add_wall((0,168,84,168)); # b
mymap.add_wall((84,126,84,210)); # c
mymap.add_wall((84,210,168,210)); # d
mymap.add_wall((168,210,168,84)); # e
mymap.add_wall((168,84,210,84)); # f
mymap.add_wall((210,84,210,0)); # g
mymap.add_wall((210,0,0,0)); # h
mymap.draw();
navigateToWaypoint()
t = 0;
#while True:
# particles.update();
# particles.draw();
# t += 0.05;
# time.sleep(0.05);
interface.terminate()
| [
"[email protected]"
] | ||
bb4ee3d130fd09e1f604855d82a3176e24481bfb | 7c084b1270e92bab5b6f96920d0f3d8ab91b114d | /116106000759_高鑫/primepath与unittest/read.py | deca86a68d16e6a432f58ade06fe7c9edb19fdcb | [] | no_license | 1206045748/homework | ee2f49c63e2ac1728ade21f45fee503c1dd05382 | 8d469d0131e8443f53b1ee5bc1d85f68974cb132 | refs/heads/master | 2021-01-25T11:28:55.476987 | 2017-06-10T09:56:24 | 2017-06-10T09:56:24 | 93,930,235 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,915 | py | def compare(p1,p2,tt):
s1=""
s2=""
for i in p1:
s1=s1+str(i)+","
for j in p2:
s2=s2+str(j)+","
if(s1!=s2 and s1 in s2):
if(p1 not in tt):
tt.append(p1)
while True:
ten=None
try:
ten=input("输入:")
data = {}
with open('./case/case'+ten+'.txt', 'r') as df:
ie=0
for kv in [de.strip().replace(" ","").split(',') for de in df]:
data[ie]=[int(kve) for kve in kv]
ie=ie+1
print (data)
l=[]
s=''
def fin(i,a,l,g):
c=[]
for y in a:
c.append(y)
if(i in c and i!=c[0]):
l.append(c)
elif(i in c and i==c[0]):
# s=s+i
c.append(i)
l.append(c)
else:
d=c
d.append(i)
if(len(g[i])==0 or g[i][0]==-1):
l.append(d)
else:
for t in g[i]:
fin(t,d,l,g)
for key in data:
#s=s+key
a=[]
fin(key,a,l,data)
temp=[]
tt=[]
for i in l:
if(i not in temp):
temp.append(i)
l2=[]
for i in temp:
l2.append(i)
for p1 in temp:
for p2 in l2:
compare(p1,p2,tt)
#print(tt)
for i in tt:
temp.remove(i)
print(len(temp))
print (temp)
files= open('./answer/answer'+ten+'.txt', 'w+')
files.write(str(len(temp))+"\n")
temp=sorted(temp,key=lambda a:(len(a),a))
for ti in temp:
# ki=' '.join([str(ji) for ji in ti])
# files.write(ki+"\n")
files.write(str(ti)+"\n")
files.close()
except:pass
if(len(ten)>2):break | [
"[email protected]"
] | |
a861a51696a1ce07f9eff6c8bb1d0344e618b511 | 3cadf60273e5e7ecede807d631d2c9b9e45499ad | /src/18_stuff/task02.py | d28f964b1cb2678dc24838eebe40832c175a7700 | [] | no_license | shamanengine/HackerRank | 78a4316713518601f4f0499626fbce8766e004df | 8f6c4afa0b6d1e1e934af6ba173c00eae249f42e | refs/heads/master | 2021-08-27T17:57:34.391358 | 2021-08-13T15:17:17 | 2021-08-13T15:17:17 | 143,048,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | '''
Given 2 numbers, provide number of all perfect squares between them
'''
import math
a, b = map(int, input().strip().split())
i = 0
for x in range(int(math.ceil(a ** (1 / 2))), b):
if x ** 2 <= b:
i += 1
# print(x)
else:
break
print(i)
'''
Input
1 50
25590 26590
9 49
Output
7
4
5
'''
| [
"[email protected]"
] | |
3b95704664ddfd0e9322f66c1c9e5b22ff8ecd47 | b886d594fee149585d0ea2d8193a77e4a0c596b4 | /portfolio/migrations/0004_auto_20200529_0254.py | 9b5644a39b9af51a4be381592e52b44e03899d33 | [] | no_license | SathvikTumoju/SoftwareEngineeringProject | ab8cfae493267ac5c8c609b6b9a8ee55e4af193d | f3720331b479b87009bd82c3e32fffc4230c9505 | refs/heads/master | 2022-08-29T00:27:37.536325 | 2020-05-30T18:03:08 | 2020-05-30T18:03:08 | 268,075,458 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 537 | py | # Generated by Django 3.0.5 on 2020-05-28 21:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('portfolio', '0003_auto_20200529_0151'),
]
operations = [
migrations.RenameField(
model_name='contact',
old_name='description',
new_name='address',
),
migrations.AddField(
model_name='contact',
name='contactdetails',
field=models.TextField(blank=True),
),
]
| [
"[email protected]"
] | |
217b98f6b0ce25cdbad2d4b9d5143161e41191a2 | 3d170d63538c244008fb168642d7dfa9066f4478 | /django_tutorial/mysite/polls/models.py | d5d1e5adfe5ddf035946b0982d5fcf33b18e5c9f | [] | no_license | asidhu0/Hack-the-Hood | 51160de0f6b56bb5d1d62dbf87cdd22c65063fc0 | cd4acbf3aff1354830d10ca26d1b167f10698960 | refs/heads/main | 2023-06-20T09:28:35.711583 | 2021-07-08T17:48:47 | 2021-07-08T17:48:47 | 377,609,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | from django.db import models
from django.utils import timezone
import datetime
# Create your models here.
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.question_text
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.choice_text
| [
"[email protected]"
] | |
8b09f2081747012fd399d46c268b02475956c0db | cde0ca62604340cdf907d257379c8c7771dd1655 | /Pandas tutorial_Iterations & Sorting.py | 7f9debbad0b489814f6a2d3b20c53d07bb154591 | [] | no_license | RishiNandhan/Pandas-Basics | 44def9e6856670622ce0b98f961cb80a149b892c | 9353b389712ebec1c312eb6bd77650764bba577a | refs/heads/master | 2023-07-05T09:25:48.355888 | 2021-08-12T01:43:44 | 2021-08-12T01:43:44 | 395,030,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,347 | py | import numpy as np
import pandas as pd
df1=pd.DataFrame(data=np.random.rand(10,3),columns=['a','b','c'])
print(df1)
"""
Iteration on different object type:
1) Series- return the values
2) Dataframe- return the column name
3) Panel- return the item name
"""
for i in df1:
print("column name: "+i)
print("#####")
"""
1) iteritems()- return key,value for each row
2) iterrows()- returns index,series in the dataframe
3) itertuples()- returns the tuple of each row
"""
for key,value in df1.iteritems():
print(key,value)
print("########")
for index,series in df1.iterrows():
print(index,series)
print("#########")
for row in df1.itertuples():
print(row)
print("#########")
"""
while iterating over the data....any changes made will not be reflected as
it is a view of the original data. So any changes made will not reflect in the data.
"""
df2=pd.DataFrame(data=np.random.rand(10,2),columns=['a','b'],index=[3,4,6,8,2,1,0,5,7,9])
print(df2)
print("######")
#sort by index
sort_index= df2.sort_index()
print(sort_index)
print("#######")
#sort by values
sort_values1=df2.sort_values(by=['a','b'])
print(sort_values1)
print("##########")
#sort by values using algorithm
sort_values2=df2.sort_values(by=['a','b'],kind="mergesort")
print(sort_values2)
"""
There are 3 kinds of sorting algorithm
1) mergesort
2) quicksort
3) heapsort
"""
| [
"[email protected]"
] | |
33e0aee58930ad35e13facfe0a5c06af88da237d | c416ba6f84c52cffff34bde5e37ec09efca5f0c9 | /jicasurvey/settings.py | d60f1d0506b403f408f1be5e0cfb63f86d882e48 | [] | no_license | AashishGrg/Survey-jyca- | aa462320e6b40c93b9e6b8a2bb89d52cb1d4cdb7 | bdc657edee51a5ee71f1f26251fffedf676f7d1b | refs/heads/master | 2022-05-11T16:30:00.684891 | 2019-07-03T18:31:09 | 2019-07-03T18:31:09 | 195,107,528 | 0 | 0 | null | 2022-04-22T21:38:15 | 2019-07-03T18:27:20 | Python | UTF-8 | Python | false | false | 3,432 | py | """
Django settings for jicasurvey project.
Generated by 'django-admin startproject' using Django 2.2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'mgszuruda3io6d!chjm$hp$qsfc8)_eu+(stx2muxk+1s$vyhm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'authentication',
'rest_framework.authtoken',
'rest_framework',
'municipality',
'collector',
'farmer',
'crop',
]
AUTH_USER_MODEL = "authentication.DataCollector"
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'jicasurvey.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'jicasurvey.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication', # <-- And here
],
}
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
9d384db166816110a41e565ceeb8c4a3806b1678 | 64660496cb42060c84b4d89a125d443b91fbbbf2 | /kalman.py | a6f1baf5dba6bed27271330d6264b83daa80801e | [
"MIT"
] | permissive | mohamedsayed18/AI_for_Robotics | ae6ad6b8213e0493e3bef2b2e44a4ae45510b451 | a5a6f2360d32cfc32b2d6e8f02d16959e84fa216 | refs/heads/master | 2020-12-09T07:38:10.708555 | 2020-04-24T07:10:24 | 2020-04-24T07:10:24 | 233,239,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,281 | py | """
kalman filter assignment from AI_for_Robotics course
this solution is not based on the given code in the course but it applies the
same equations to solve the multidimension kalman filter challange
"""
import numpy as np
#let's define some variables we will need
# We are taking discite measurement so this variable will be constant
delta_t = 0.1 # just an assumption
"""inital_state vector, we assumed that we start from co-ordinates(4,12) and
velocity is 0 in the x and y directions"""
current_state = np.array([4, 12, 0, 0]) # initial values
x_pos_error = 0
y_pos_error = 0
x_vel_error = 1000
y_vel_error = 1000
x_pos_measure_error = 0
y_pos_measure_error = 0
U = 0 #external force
# let's start by defining the matricies
"""
State vector. we have four variables or states we want to estimate the position
and velocity at X-axis and at Y-axis. so the variables are
x: position at X-axis
y: position at Y-axis
x_vel: velocity in X-axis
y_vel: velocity in Y-axis
The position in the X-axis is known using value of variables x and x_vel using
this formula new_position = old_position + x_vel * delta_time
instead of doing this we can reprsent this relation in matrix form
"""
# State transmission matrix in the course is noted as F matrix
A = np.array([
[1, 0, delta_t, 0], # position in X-axis
[0, 1, 0, delta_t], # position in Y-axis
[0, 0, 1, 0], #velocity in X-axis
[0, 0, 0, 1] #velocity in Y-axis
])
"""
Input transmission matrix B. trasnform the input to the system(if we have
external force affecting system) for our case U=0
so the values of B is not important, so can ignore it. this video explain how to
design it https://www.youtube.com/watch?v=NbRrLv_vX_U
"""
#B = np.zeros((4,1))
"""
Process covariance matix.
Defines the variance in the process and how they are related to each other
will be a 4x4 matrix.
initial uncertainty: 0 for positions x and y, 1000 for the two velocities
"""
pk = np.array([
[x_pos_error, 0, 0, 0],
[0, y_pos_error, 0, 0],
[0, 0, x_vel_error, 0],
[0, 0, 0, y_vel_error]
])
"""
Measurement covariance matrix.
Defines covariance(same as variance but in multidimension we call it covariance)
of the variables you measure and how they affect each other.
in our case: use 2x2 matrix with 0.1 as main diagonal
"""
R = np.diag([0.1, 0.1, 0.1, 0.1])
"""transmission matrix"""
H = np.identity(4)
#H = np.array([1,0,0,0],[0,1,0,0])
"""
measurement transmission matrix C
"""
C = np.identity(4)
# Measurements are X and Y
measurements = [[5., 10.], [6., 8.], [7., 6.], [8., 4.], [9., 2.], [10., 0.]]
np.seterr(divide='ignore', invalid='ignore') # to ignore division by zero
# Let's apply kalmanfilter it can be applied by repeating those six steps
for i in range(len(measurements)):
"""
First step detrmine the current state
X = A*X + B*U + W, where W is noise in the process
we have U=0, and will neglect the W
"""
current_state = A.dot(current_state)
"""
Second step predict the process covariance matrix
P(current) = A * P(previous) * A(transpose) + Q.
where Q is the process noise covariance matrix: which keeps the state
covariance matrix from becomming to small or going to zero
"""
#fi = np.dot(A, pk)
#print(fi)
pk = np.dot(A.dot(pk), A.transpose())
#print("pk ",pk)
# third step
# calculate kalman gain
# there is error because of dividing by zero
# suggested solution
#https://stackoverflow.com/questions/17514377/divide-by-arrays-containing-zeros-python
k = np.divide(pk.dot(H.transpose()), (H.dot(pk).dot(H.transpose())+R))
#print(k)
# fourth step
# new measurement value
# Y = C*Y + Z
Y = np.array([measurements[i][0], measurements[i][0], 0, 0])
# fifth step
#calculate the new state
# assign it to initial state to be able to use it in the new iteration
current_state = current_state + k.dot(Y - H.dot(current_state))
# 6step
# update the process covariance matrix
pk = (np.identity(4) - k.dot(H)).dot(pk)
print(current_state)
# tutorials
# https://www.youtube.com/playlist?list=PLX2gX-ftPVXU3oUFNATxGXY90AULiqnWT | [
"[email protected]"
] | |
5778472994ed8b4be4ac116fa0b67a3d2d5488ec | 95b6cf15c6194c0acf9496b13a9c00020e00d4bd | /lecture4/flights/migrations/0002_auto_20200831_1320.py | cd50c5d8e4180b00da268d3fc5d50913596b830a | [
"MIT"
] | permissive | fedejimenez/CS50-django | 8eccacbbf51783cd86f1897bbd9396cf80ccb61d | 09c474fc2cf3df451a326b0ce5146d35d6e3844d | refs/heads/master | 2022-12-05T08:12:21.364915 | 2020-08-31T17:13:51 | 2020-08-31T17:13:51 | 286,286,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,061 | py | # Generated by Django 3.1 on 2020-08-31 13:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('flights', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Airport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=3)),
('city', models.CharField(max_length=64)),
],
),
migrations.AlterField(
model_name='flight',
name='destination',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='arrivals', to='flights.airport'),
),
migrations.AlterField(
model_name='flight',
name='origin',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='departures', to='flights.airport'),
),
]
| [
"[email protected]"
] | |
d84e02f5f2815a7f82b35a8daa0cb83b201fc09c | 6e1508ebdaf63b3afee10926bdf74ce9478f3508 | /kadanesalgorithm.py | 4ebc51ef1200e349f04a36b701abf97ccdb58046 | [] | no_license | dopeprogr4mmer/DSA | 5f2741a924bec9b6add7b4d92d207ec553576439 | 18f4bd93b264acfd4cfd91b9aa318bdf502d0339 | refs/heads/main | 2023-07-17T22:33:40.347653 | 2021-08-25T05:28:04 | 2021-08-25T05:28:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,454 | py | <<<<<<< HEAD
<<<<<<< HEAD
import sys
=======
>>>>>>> d86366dc91ea42926b2be1e049e46fb03f518c05
=======
>>>>>>> d86366dc91ea42926b2be1e049e46fb03f518c05
def maxSubArraySum(a,size):
max_so_far = 0
max_ending_here = 0
for i in range(size):
<<<<<<< HEAD
<<<<<<< HEAD
#max_ending_here += a[i]
if max_ending_here + a[i]<a[i]:
max_ending_here = a[i]
else:
max_ending_here+=a[i]
if(max_so_far<max_ending_here):
max_so_far = max_ending_here
print(max_so_far)
return max_so_far
def max_SubArray_Sum(a,size): #Kadane algo
##Your code here
output_arr = [0]*size
output_arr[0]=a[0]
max_sum = a[0]
for i in range(1,size):
output_arr[i] = max(a[i], output_arr[i-1]+a[i])
#print(output_arr)
max_sum = max(max_sum, output_arr[i])
print(output_arr, max_sum)
return max_sum
maxSubArraySum([2,3,-6,3,3,-6,1,-5], 5)
=======
=======
>>>>>>> d86366dc91ea42926b2be1e049e46fb03f518c05
max_ending_here += a[i]
if max_ending_here<0:
max_ending_here = 0
elif(max_so_far<max_ending_here):
max_so_far = max_ending_here
<<<<<<< HEAD
return max_so_far
>>>>>>> d86366dc91ea42926b2be1e049e46fb03f518c05
=======
return max_so_far
>>>>>>> d86366dc91ea42926b2be1e049e46fb03f518c05
| [
"[email protected]"
] | |
d82bb2cd4bc68a63180a9488a7a2602cda047598 | 193ac8a9b2bd33dd73c179e96cc87494fe6d385f | /one/app1/urls.py | d259194bacba90b7d2df50ec6985c336b0954cdd | [] | no_license | sasan-gsm/django | c6f9ddb59ee70fcd40452a077b504090149f2e57 | 4f6c05a3fd116a5c77463440bd54175ac6708474 | refs/heads/master | 2020-06-25T01:15:03.364934 | 2019-07-27T10:31:54 | 2019-07-27T10:31:54 | 199,151,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | from django.conf.urls import url
from .views import PostList, post_detail
urlpatterns = [
url(r'ˆ$', PostList.as_view(), {'parent_template': 'base.html'}, name='app1_post_list'),
url(r'ˆ(?P<year>\d{4})/'
r'(?P<month>\d{1,2})/'
r'(?P<slug>[\w\-]+)/$',
post_detail, name='app1_post_detail'),
] | [
"[email protected]"
] | |
6c7b3931a3c0403baae08b707f913103da3c4399 | 4771e66c3e64cec4c7d3dcf3486843b5bfb22b76 | /Assignment1/mfcc.py | 42f9a8b046558de33c5daffa74274d9379867cf6 | [] | no_license | ameyagodbole/LS621-Spoken-Language-Systems | c108334eebb9e8692a395ac2d0ec477e20c57428 | 0f08735087f56b2da4629d9b354b44b2453b9e0e | refs/heads/master | 2021-05-05T00:08:07.546366 | 2018-10-28T05:21:28 | 2018-10-28T05:21:28 | 119,502,706 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,312 | py | # Copyright (c) 2006 Carnegie Mellon University
#
# You may copy and modify this freely under the same terms as
# Sphinx-III
"""Compute MFCC coefficients.
This module provides functions for computing MFCC (mel-frequency
cepstral coefficients) as used in the Sphinx speech recognition
system.
"""
__author__ = "David Huggins-Daines <[email protected]>"
__version__ = "$Revision$"
import numpy, numpy.fft
def mel(f):
return 2595. * numpy.log10(1. + f / 700.)
def melinv(m):
return 700. * (numpy.power(10., m / 2595.) - 1.)
class MFCC(object):
def __init__(self, nfilt=40, ncep=13,
lowerf=133.3333, upperf=6855.4976, alpha=0.97,
samprate=16000, shiftlen=0.01, wlen=0.0256,
nfft=512):
# Store parameters
self.lowerf = lowerf
self.upperf = upperf
self.nfft = nfft
self.ncep = ncep
self.nfilt = nfilt
# self.frate = frate
self.fshift = float(samprate)*shiftlen
# Build Hamming window
self.wlen = int(wlen * samprate)
self.win = numpy.hamming(self.wlen)
# Prior sample for pre-emphasis
self.prior = 0
self.alpha = alpha
# Build mel filter matrix
self.filters = numpy.zeros((nfft/2+1,nfilt), 'd')
dfreq = float(samprate) / nfft
if upperf > samprate/2:
raise(Exception,
"Upper frequency %f exceeds Nyquist %f" % (upperf, samprate/2))
melmax = mel(upperf)
melmin = mel(lowerf)
dmelbw = (melmax - melmin) / (nfilt + 1)
# Filter edges, in Hz
filt_edge = melinv(melmin + dmelbw * numpy.arange(nfilt + 2, dtype='d'))
for whichfilt in range(0, nfilt):
# Filter triangles, in DFT points
leftfr = round(filt_edge[whichfilt] / dfreq)
centerfr = round(filt_edge[whichfilt + 1] / dfreq)
rightfr = round(filt_edge[whichfilt + 2] / dfreq)
# For some reason this is calculated in Hz, though I think
# it doesn't really matter
fwidth = (rightfr - leftfr) * dfreq
height = 2. / fwidth
if centerfr != leftfr:
leftslope = height / (centerfr - leftfr)
else:
leftslope = 0
freq = int(leftfr + 1)
while freq < centerfr:
self.filters[freq,whichfilt] = (freq - leftfr) * leftslope
freq = freq + 1
if freq == centerfr: # This is always true
self.filters[freq,whichfilt] = height
freq = freq + 1
if centerfr != rightfr:
rightslope = height / (centerfr - rightfr)
while freq < rightfr:
self.filters[freq,whichfilt] = (freq - rightfr) * rightslope
freq = freq + 1
# Build DCT matrix
self.s2dct = s2dctmat(nfilt, ncep, 1./nfilt)
self.dct = dctmat(nfilt, ncep, numpy.pi/nfilt)
def sig2s2mfc(self, sig):
nfr = int(len(sig) / self.fshift + 1)
mfcc = numpy.zeros((nfr, self.ncep), 'd')
fr = 0
while fr < nfr:
start = int(round(fr * self.fshift))
end = min(len(sig), start + self.wlen)
frame = sig[start:end]
if len(frame) < self.wlen:
frame = numpy.resize(frame,self.wlen)
frame[self.wlen:] = 0
mfcc[fr] = self.frame2s2mfc(frame)
fr = fr + 1
return mfcc
def sig2logspec(self, sig):
nfr = int(len(sig) / self.fshift + 1)
mfcc = numpy.zeros((nfr, self.nfilt), 'd')
fr = 0
while fr < nfr:
start = int(round(fr * self.fshift))
end = min(len(sig), start + self.wlen)
frame = sig[start:end]
if len(frame) < self.wlen:
frame = numpy.resize(frame,self.wlen)
frame[self.wlen:] = 0
mfcc[fr] = self.frame2logspec(frame)
fr = fr + 1
return mfcc
def pre_emphasis(self, frame):
# FIXME: Do this with matrix multiplication
outfr = numpy.empty(len(frame), 'd')
outfr[0] = frame[0] - self.alpha * self.prior
for i in range(1,len(frame)):
outfr[i] = frame[i] - self.alpha * frame[i-1]
self.prior = frame[-1]
return outfr
def frame2logspec(self, frame):
frame = self.pre_emphasis(frame) * self.win
fft = numpy.fft.rfft(frame, self.nfft)
# Square of absolute value
power = fft.real * fft.real + fft.imag * fft.imag
return numpy.log(numpy.dot(power, self.filters).clip(1e-5,numpy.inf))
def frame2s2mfc(self, frame):
logspec = self.frame2logspec(frame)
return numpy.dot(logspec, self.s2dct.T) / self.nfilt
def s2dctmat(nfilt,ncep,freqstep):
"""Return the 'legacy' not-quite-DCT matrix used by Sphinx"""
melcos = numpy.empty((ncep, nfilt), 'double')
for i in range(0,ncep):
freq = numpy.pi * float(i) / nfilt
melcos[i] = numpy.cos(freq * numpy.arange(0.5, float(nfilt)+0.5, 1.0, 'double'))
melcos[:,0] = melcos[:,0] * 0.5
return melcos
def logspec2s2mfc(logspec, ncep=13):
"""Convert log-power-spectrum bins to MFCC using the 'legacy'
Sphinx transform"""
nframes, nfilt = logspec.shape
melcos = s2dctmat(nfilt, ncep, 1./nfilt)
return numpy.dot(logspec, melcos.T) / nfilt
def dctmat(N,K,freqstep,orthogonalize=True):
"""Return the orthogonal DCT-II/DCT-III matrix of size NxK.
For computing or inverting MFCCs, N is the number of
log-power-spectrum bins while K is the number of cepstra."""
cosmat = numpy.zeros((N, K), 'double')
for n in range(0,N):
for k in range(0, K):
cosmat[n,k] = numpy.cos(freqstep * (n + 0.5) * k)
if orthogonalize:
cosmat[:,0] = cosmat[:,0] * 1./numpy.sqrt(2)
return cosmat
def dct(input, K=13):
"""Convert log-power-spectrum to MFCC using the orthogonal DCT-II"""
nframes, N = input.shape
freqstep = numpy.pi / N
cosmat = dctmat(N,K,freqstep)
return numpy.dot(input, cosmat) * numpy.sqrt(2.0 / N)
def dct2(input, K=13):
"""Convert log-power-spectrum to MFCC using the normalized DCT-II"""
nframes, N = input.shape
freqstep = numpy.pi / N
cosmat = dctmat(N,K,freqstep,False)
return numpy.dot(input, cosmat) * (2.0 / N)
def idct(input, K=40):
"""Convert MFCC to log-power-spectrum using the orthogonal DCT-III"""
nframes, N = input.shape
freqstep = numpy.pi / K
cosmat = dctmat(K,N,freqstep).T
return numpy.dot(input, cosmat) * numpy.sqrt(2.0 / K)
def dct3(input, K=40):
"""Convert MFCC to log-power-spectrum using the unnormalized DCT-III"""
nframes, N = input.shape
freqstep = numpy.pi / K
cosmat = dctmat(K,N,freqstep,False)
cosmat[:,0] = cosmat[:,0] * 0.5
return numpy.dot(input, cosmat.T) | [
"[email protected]"
] | |
a0383062dd7b619a304a104fb57cf39cce1fcab4 | f0ab1869709fba67b57160f56b61a62a1ac149fa | /nn.py | 1e614c7683267990ee36b53a0416c4a96b4dd63a | [
"MIT"
] | permissive | tapionx/GeneticSnake | 8dd53bf94fc501e9f24333450e79d6671c6aeade | c373be31c1df8eb31d1349f32b0725592785bc4f | refs/heads/master | 2020-04-26T06:26:12.852625 | 2019-03-01T11:57:37 | 2019-03-01T11:57:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,377 | py | import numpy as np
from matplotlib import pyplot as plt
INPUTS = 5
HIDDEN = [4]
OUTPUTS = 3
LAYERS_DISTANCE = 250
NEURONS_DISTANCE = 100
RADIUS = 20
INPUT_COLOR = (0, 1, 0)
OUTPUT_COLOR = (1, 0, 1)
NEURON_COLOR = (1, 1, 1)
DROPOUT_COLOR = (1, 1, 0)
POSITIVE_WEIGHT = (1, 0, 0)
NEGATIVE_WEIGHT = (0, 0, 1)
class NeuralNetwork:
def __init__(self, inputs=INPUTS, outputs=OUTPUTS, h_layers=HIDDEN):
self.layers = [inputs] + h_layers + [outputs]
self.h_layers = len(h_layers)
self.size = 0
self.dropout = [True] * sum(self.layers)
for i in range(1, len(self.layers)):
self.size += self.layers[i - 1] * self.layers[i]
self.weights = np.random.randn(self.size)
def get_dropout(self, layer):
start = 0
end = 0
start = sum(self.layers[0:layer-1])
end = start + self.layers[layer-1]
input_dropout = np.array(self.dropout[start:end]).reshape(end-start, 1)
start = sum(self.layers[0:layer])
end = start + self.layers[layer]
output_dropout = np.array(self.dropout[start:end]).reshape(1, end-start)
return input_dropout, output_dropout
def get_weights(self, layer):
start = 0
for i in range(layer):
start += self.layers[i] * self.layers[i+1]
end = start + self.layers[layer] * self.layers[layer+1]
weights = self.weights[start:end].reshape(self.layers[layer], self.layers[layer+1])
input_dropout, output_dropout = self.get_dropout(layer+1)
weights = np.multiply(weights, input_dropout)
weights = np.multiply(weights, output_dropout)
return weights
def softmax(self, z):
e_z = np.exp(z - np.max(z))
return e_z / e_z.sum(axis=0)
def relu(self, z):
return z * (z > 0)
def sigmoid(self, z):
return 1 / (1 + np.exp(-z))
def forward_propagation(self, inputs, weights, is_output=False):
z = np.matmul(inputs, weights)
if is_output:
a = self.softmax(z)
else:
a = self.relu(z)
return a
def compute_outputs(self, inputs):
x = inputs
for i in range(self.h_layers):
a = self.forward_propagation(x, self.get_weights(i))
x = a
return self.forward_propagation(x, self.get_weights(self.h_layers), is_output=True)
def draw(self):
plt.figure('Best Neural Network')
plt.style.use('dark_background')
plt.clf()
for layer in range(1, len(self.layers)):
weights = self.get_weights(layer - 1).T
for neuron in range(self.layers[layer]):
for w, weight in enumerate(weights[neuron]):
self.__draw_weight(layer, neuron, self.layers[layer], w,
self.layers[layer-1], weight)
for layer in range(len(self.layers)):
for neuron in range(self.layers[layer]):
if layer == 0:
color = INPUT_COLOR
elif layer == len(self.layers) - 1:
color = OUTPUT_COLOR
else:
color = NEURON_COLOR
if not self.dropout[sum(self.layers[0:layer]) + neuron]:
color = DROPOUT_COLOR
self.__draw_neuron(layer, neuron, self.layers[layer], color)
plt.axis('scaled')
plt.axis('off')
plt.pause(0.1)
def __get_neuron_coord(self, layer, index, total):
x = layer * LAYERS_DISTANCE
y = ((total/2) - 0.5) * NEURONS_DISTANCE - index * NEURONS_DISTANCE
return x, y
def __draw_neuron(self, layer, index, total, color):
x, y = self.__get_neuron_coord(layer, index, total)
circle = plt.Circle((x, y), radius=RADIUS, color=color)
plt.gca().add_patch(circle)
def __draw_weight(self, layer, src, total_src, dst, total_dst, weight):
x_src, y_src = self.__get_neuron_coord(layer, src, total_src)
x_src = x_src - RADIUS
x_dst, y_dst = self.__get_neuron_coord(layer - 1, dst, total_dst)
x_dst = x_dst + RADIUS
if weight > 0:
color = POSITIVE_WEIGHT
else:
color = NEGATIVE_WEIGHT
line = plt.Line2D((x_src, x_dst), (y_src, y_dst), color=color)
line.set_linewidth(abs(weight))
plt.gca().add_line(line)
| [
"[email protected]"
] | |
5883d09ca1625ede579f97714447d90e76b7713d | c3ae3c81000d5a1f6d2bc754e4610c972b923176 | /dav_app/migrations/0004_auto_20200816_1302.py | d40a3793a2645fd207e79802eb70fef45d42c31a | [] | no_license | Aniket1317/School-Management-System | e2551f766fc3bb2c863566d1d6962f5290dfadd4 | 0e2eb88a20e2361e1327ee9b955dd74886461db0 | refs/heads/master | 2022-12-06T08:32:40.837700 | 2020-09-02T03:04:59 | 2020-09-02T03:04:59 | 292,164,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | # Generated by Django 3.0.5 on 2020-08-16 07:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dav_app', '0003_auto_20200816_1301'),
]
operations = [
migrations.AlterField(
model_name='onlineclass',
name='c_date',
field=models.DateTimeField(auto_now_add=True),
),
]
| [
"[email protected]"
] | |
d20dd5ea1f4d51da90a5c1f1bf792309df12b590 | b09290b62160cda81267b7986ebdeba42ba04c13 | /posts/views.py | 3f6bd5c3b39299d327de2ac0a49ef994184f7606 | [] | no_license | tanzeel0092/Django-Project | 2cc9859549592509b37afe123d270b5d406c768d | 8d005e74e4b3e07aa7cf1521838a57cf6c50b5e5 | refs/heads/main | 2023-07-08T02:47:58.650801 | 2021-08-17T19:58:12 | 2021-08-17T19:58:12 | 397,351,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from .models import Post
from .forms import PostForm
# Create your views here.
def index(request):
# If the method is POST
if request.method == 'POST':
form = PostForm(request.POST)
#If the form is valid
if form.is_valid():
# Yes, Save
form.save()
#Redirect to Home
return HttpResponseRedirect('/')
else:
#No, Show Error
return HttpResponseRedirect(form.erros.as_json())
# Get all posts, limit = 20
posts = Post.objects.all()[:20]
# Show
return render(request, 'posts.html',
{'posts': posts})
def delete(request, post_id):
# Find Post
post = Post.objects.get(id = post_id)
post.delete()
return HttpResponseRedirect('/') | [
"[email protected]"
] | |
3b12aa23f81a807198b89b5e8f7d0a2eec9c9ecd | 1419418226b6ba0f510649daaf62b71554cc2284 | /amatrice/project_GPS_M5.3_M5.8.py | 5aabd4ea3ee678dc37aff80268eb4ebefda90005 | [] | no_license | shineusn/mylife | 2ef48a777e39be2ef746c3dad16ea963d5b23e5e | 61dfa72d9047551746d26b7fe01fb5c2f1f0657a | refs/heads/master | 2020-03-22T13:44:42.422127 | 2018-02-13T18:09:43 | 2018-02-13T18:09:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,896 | py | from matplotlib import pyplot as plt
from numpy import genfromtxt,argmin,array,zeros,ones,where,linspace,r_
from matplotlib.ticker import MultipleLocator
g=genfromtxt('/Users/dmelgar/Amatrice2016/GPS/Cosismico_26Oct2016_GPS_GdL_V1.dat')
insar=genfromtxt(u'/Users/dmelgar/Amatrice2016/InSAR/M5.3-M5.8/Italy_T44/T44_Italy.lltnde')
#Parse GPS
lon_gps=g[:,1]
lat_gps=g[:,2]
north=g[:,6]/1000
east=g[:,4]/1000
up=g[:,8]/1000
#parse insar
lon_insar=insar[:,0]
lat_insar=insar[:,1]
los=insar[:,6]/1000
lookE=insar[:,3]
lookN=insar[:,4]
lookU=insar[:,5]
#Projection variables
projected_gps=9999*ones(len(lon_gps))
los_insar=9999*ones(len(lon_gps))
thresh=0.005
for k in range(len(lon_gps)):
#Get distance from GPS to LOS points
d=((lon_gps[k]-lon_insar)**2+(lat_gps[k]-lat_insar)**2)**0.5
i=argmin(d)
if d[i]<thresh:
#Get los vector
unit_vector=array([lookE[i],lookN[i],lookU[i]])
#project
projected_gps[k]=unit_vector.dot(array([east[k],north[k],up[k]]))
los_insar[k]=los[i]
plt.figure(figsize=(6,10))
plt.subplot(211)
plt.quiver(r_[11.65,lon_gps],r_[43.72,lat_gps],r_[1,east],r_[0,north],scale=0.11)
#i=where(up<0)[0]
#j=where(up>=0)[0]
#plt.quiver(lon_gps[j],lat_gps[j],zeros(len(up[j])),up[j],scale=0.01,color='b')
#plt.quiver(lon_gps[i],lat_gps[i],zeros(len(up[i])),up[i],scale=0.01,color='r')
ax=plt.subplot(212)
i=where(projected_gps<9999)[0]
x=linspace(-0.02,0.02)
y=x
plt.plot(x,y,lw=2,c='k')
plt.scatter(projected_gps[i],los_insar[i],marker='s',s=30,lw=0.2,c='#0080FF')
plt.xlim([-0.02,0.02])
plt.ylim([-0.02,0.02])
xmajorLocator = MultipleLocator(0.01)
ymajorLocator = MultipleLocator(0.01)
ax.xaxis.set_major_locator(xmajorLocator)
ax.yaxis.set_major_locator(ymajorLocator)
plt.ylabel('InSAR LOS (m)')
plt.xlabel('Projected GPS (m)')
plt.subplots_adjust(left=0.2,right=0.97,top=0.99,bottom=0.1)
| [
"[email protected]"
] | |
29df8de6841dff50c70bc5aaa400bd5c1bfcd3d2 | cd4a46cfdd78524a3922fcdc8ec796c12516c32c | /2_passArgument/differentModes.py | 1f516381c428e6f0e8f7dd01e8713d82addfeb05 | [] | no_license | MuhammadMoustafa/Notes | 7b44cfa5b0b4602156d83955f0afbe04ffd74f92 | 717b751f4156c32f4c25152a4f8aab40f768758d | refs/heads/master | 2021-03-28T05:04:53.696767 | 2020-03-16T23:51:45 | 2020-03-16T23:51:45 | 247,839,419 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 876 | py | #https://stackoverflow.com/questions/919680/can-a-variable-number-of-arguments-be-passed-to-a-function
#https://www.saltycrane.com/blog/2008/01/how-to-use-args-and-kwargs-in-python/
def parameters(a, b):
#print(locals())
return a, b
def defaultParameters(a, b, c=0):
print(locals())
def manyArgs(*arg):
print("I was called with", len(arg), "arguments:", arg)
def myfunc(**kwargs):
# kwargs is a dictionary.
for k,v in kwargs.items():
print("%s = %s" % (k, v))
def myfunc2(*args, **kwargs):
for a in args:
print("args", a)
for k,v in kwargs.items():
print("%s = %s" % (k, v))
# parameters(1, 2)
# defaultParameters(3, 4)
# defaultParameters(3, 4, 5)
# manyArgs(7)
# manyArgs(7, 8)
# manyArgs(7, 8, 9, 10, 11, 12)
# myfunc(abc=123, efh=456)
# myfunc2(1, 2, 3)
# parameters(b=1, a=2)
x = parameters(5 , 8)
print(x) | [
"[email protected]"
] | |
8930cf37540fd3f8044646882fbdf1fbf4ed85da | e31077b1ba487140d7cb3730c8ea32c9d1c78ff6 | /post/migrations/0002_post.py | b032edb8b5d899d97e08e723688e6fcc4338fb62 | [] | no_license | devinAlex/django_learn | f714642e7c0b52a351c73551959fce8261d437f2 | fbe9cfe11f3ba0f4c8afb4eb0f0bdc8ddb2657ed | refs/heads/master | 2020-03-09T00:42:56.130419 | 2018-04-07T04:15:15 | 2018-04-07T04:15:15 | 128,495,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('title', models.CharField(max_length=100)),
('content', models.TextField()),
('created', models.DateField()),
('category', models.ForeignKey(to='post.Category')),
],
),
]
| [
"[email protected]"
] | |
b772e89cc0e4130a6b472f8b625cf5dabe9daced | c8f4d4d7d9b689d555acc3c902e96eb53b4a947c | /season-2/last-digit-of-a-huge-number.py | dc4beadb9be5d1350fa57e3cb32fbe7753d0f296 | [] | no_license | diesarrollador/Code-war--Code-logic | 3d7a2eb4611b16f7bc09d57fa3f2be4d79bfc050 | 55bb81b0667b4530fc75fa3d33fc3f36908b9f29 | refs/heads/master | 2023-06-08T01:42:02.260430 | 2021-07-04T17:15:34 | 2021-07-04T17:15:34 | 362,586,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | """ Para una lista dada [x1, x2, x3, ..., xn]
calcule el último dígito (decimal) de
x1 ^ (x2 ^ (x3 ^ (... ^ xn))). """
def last_digit(lst):
if lst:
re=1
for numero in lst[::-1]:
re=numero**(re if re<4 else re%4+4)
return re%10
else:
return 1
print(last_digit([2, 2, 2, 0]))
print(last_digit([3, 4, 2]))
print(last_digit([0,0]))
print(last_digit([0,0,0]))
print(last_digit([]))
print(last_digit([12, 30, 21]))
print(last_digit([499942, 898102, 846073]))
print(last_digit([9,9,9])) | [
"[email protected]"
] | |
c4de4f95686f6d39c4a347e4462b601fbc2bd6d2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03828/s176803120.py | 3c09dd5cfe45d562d5aee2961335ac10dec7d7b7 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | from collections import Counter
MOD = 10 ** 9 + 7
def factorize(n):
""" Simple factorize
:param n: number to factorize
:return: list of factors
time complexity : O(n√n)
space complexity : O(n)
"""
factors = []
for i in range(2, n+1):
while n % i == 0:
n = n // i
factors.append(i)
return factors
def main():
N = int(input())
factors = []
for i in range(1, N+1):
factors += factorize(i)
factor_counts = list(Counter(factors).values())
ans = 1
for v in factor_counts:
ans = ans * (v+1) % MOD
print(ans)
main() | [
"[email protected]"
] | |
0acb3d200d67237cbb9d25a85d82e766725b679e | cb2c146075c348f429886ca405b5146b21bab62a | /Trabalho02/et.py | cc651e59349bc86ebf87c6665d5e1046e8b414b9 | [] | no_license | matheussn/PythonORI | 3b007ce53bca28a8ecb3ba7e343962c0e9271d10 | 8a688556b16199c4620f48bec62465d7ecb55f25 | refs/heads/master | 2020-03-30T06:35:44.234675 | 2018-12-18T19:13:45 | 2018-12-18T19:13:45 | 150,873,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | import pickle
import nltk
if __name__ == '__main__':
sents = nltk.corpus.mac_morpho.tagged_sents()
et = nltk.tag.UnigramTagger(sents)
f = open('etiquetador', 'wb')
pickle.dump(et, f) | [
"[email protected]"
] | |
5334784f4ec0721da7a057223fdb7a784998fc5d | f73c5a93e4ef0a058bbe0774b41b228baa3cb0d5 | /ParseScores.py | 2056fde130d176bb7307a13773a0baead8ae91f2 | [] | no_license | lukeboi/FTCScouting | dcbb4e73d62e8d4d54ddc717dc3b8c2276a7b00c | 2bdd445a416e71e37742d310fe3c6a018be42973 | refs/heads/master | 2021-05-31T21:30:12.554016 | 2016-03-13T18:07:12 | 2016-03-13T18:07:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,181 | py | #Code by Luke Farritor
#Project started on Mar 1, 2016
#Imports
import openpyxl
from openpyxl.cell import get_column_letter, column_index_from_string
#Pramaters
oldWb = "Scoring-System-Results(2).xlsx" #Downloaded Wb Name
oldSheetName = "Sheet1" #Sheet to get scores from
oldSheetRows = 2818 #Rows in old sheet to be worked with
oldSheetStartRow = 2 #Row that data starts on
newWb = "Parsed.xlsx" #New Wb Name
#Loading Scoring Sheet
print("Loading Workbook... (This will take a moment)")
oldscrFl = openpyxl.load_workbook(oldWb)
print("Loading Sheet...")
oldScrSht = oldscrFl.get_sheet_by_name(oldSheetName)
print("Loaded.")
#Make new sheet
print("Making new sheet titled " + newWb + "...")
scrsFl = openpyxl.Workbook()
scrsSht = scrsFl.active
print("Beginning to parse match data")
def scorePosition(pos):
return {
'1': 5, #beacon repair
'2': 5, #floor goal
'3': 5, #on mountain and floor
'4': 10, #mountain low
'5': 20, #mountain mid
'6': 40, #mountain high
}.get(pos, 0)
def parse ( row ):
#get match data
#red
r1 = int(oldScrSht['H' + str(row)].value)
r2 = int(oldScrSht['I' + str(row)].value)
r3 = int(oldScrSht['J' + str(row)].value)
r1APos = int(oldScrSht['AA' + str(row)].value)
r2APos = int(oldScrSht['AB' + str(row)].value)
r1TPos = int(oldScrSht['AE' + str(row)].value)
r2TPos = int(oldScrSht['AF' + str(row)].value)
redScr = 0
#blue
b1 = int(oldScrSht['K' + str(row)].value)
b2 = int(oldScrSht['L' + str(row)].value)
b3 = int(oldScrSht['M' + str(row)].value)
b1APos = int(oldScrSht['AS' + str(row)].value)
b2APos = int(oldScrSht['AT' + str(row)].value)
b1TPos = int(oldScrSht['AW' + str(row)].value)
b2TPos = int(oldScrSht['AX' + str(row)].value)
blueScr = 0
winner = 't' #t = tie, r = red won, b = blue won
#get match scores
#Red
redScr += scorePosition(str(r1APos)) #position at end of auto
redScr += scorePosition(str(r2APos)) #position at end of auto
redScr += int(oldScrSht['AC' + str(row)].value) * 20 #beacon
redScr += int(oldScrSht['AD' + str(row)].value) * 10 #climbers in shelter
#Teleop
redScr += scorePosition(str(r1TPos)) #position at end of teleop
redScr += scorePosition(str(r2TPos)) #position at end of teleop
redScr += int(oldScrSht['AG' + str(row)].value) * 1 #Floor Goal
redScr += int(oldScrSht['AI' + str(row)].value) * 5 #Low Goal
redScr += int(oldScrSht['AJ' + str(row)].value) * 10 #Mid Goal
redScr += int(oldScrSht['AH' + str(row)].value) * 15 #High Goal
redScr += int(oldScrSht['AK' + str(row)].value) * 10 #Climbers in Shelter
redScr += int(oldScrSht['AL' + str(row)].value) * 20 #Zip Line
redScr += int(oldScrSht['AM' + str(row)].value) * 20 #All Clear
redScr += int(oldScrSht['AN' + str(row)].value) * 80 #Pull up
#Blue
blueScr += scorePosition(str(b1APos)) #position at end of auto
blueScr += scorePosition(str(b2APos)) #position at end of auto
blueScr += int(oldScrSht['AU' + str(row)].value) * 20 #beacon
blueScr += int(oldScrSht['AV' + str(row)].value) * 10 #climbers in shelter
#Teleop
blueScr += scorePosition(str(b1TPos)) #position at end of teleop
blueScr += scorePosition(str(b2TPos)) #position at end of teleop
blueScr += int(oldScrSht['AY' + str(row)].value) * 1 #Floor Goal
blueScr += int(oldScrSht['BA' + str(row)].value) * 5 #Low Goal
blueScr += int(oldScrSht['BB' + str(row)].value) * 10 #Mid Goal
blueScr += int(oldScrSht['AZ' + str(row)].value) * 15 #High Goal
blueScr += int(oldScrSht['BC' + str(row)].value) * 10 #Climbers in Shelter
blueScr += int(oldScrSht['BD' + str(row)].value) * 20 #Zip Line
blueScr += int(oldScrSht['BE' + str(row)].value) * 20 #All Clear
blueScr += int(oldScrSht['BF' + str(row)].value) * 80 #Pull up
if(blueScr > redScr):
winner = 'b'
elif(redScr > blueScr):
winner = 'r'
if(r3 == 0):
r3 = ''
if(b3 == 0):
b3 = ''
#print(str(r1) + ',' + str(r2) + ',' + str(r3) + ' (' + str(redScr) + ') ' + " vs. " + str(b1) + ', ' + str(b2)+ ',' + str(b3) + ' (' + str(blueScr) + ') ' + 'WINNER: ' + winner)
scrsSht['A' + str(row)] = oldScrSht['A' + str(row)].value #move date
scrsSht['B' + str(row)] = oldScrSht['B' + str(row)].value #move event name
scrsSht['C' + str(row)] = oldScrSht['C' + str(row)].value #move event region
scrsSht['D' + str(row)] = oldScrSht['D' + str(row)].value #move event type
scrsSht['E' + str(row)] = r1
scrsSht['F' + str(row)] = r2
scrsSht['G' + str(row)] = r3
scrsSht['H' + str(row)] = redScr
scrsSht['I' + str(row)] = b1
scrsSht['J' + str(row)] = b2
scrsSht['K' + str(row)] = b3
scrsSht['L' + str(row)] = blueScr
scrsSht['M' + str(row)] = winner
scrsSht['N' + str(row)] = str(r1) + ',' + str(r2) + ',' + str(r3) + " vs. " + str(b1) + ', ' + str(b2) + ',' + str(b3) + 'WINNER: ' + winner
for row in range(oldSheetStartRow, oldSheetRows):
parse(row)
scrsFl.save('parsed.xlsx')
print('complete')
print('press enter to continue')
t = input()
| [
"[email protected]"
] | |
72d826befd48d579b5ffe05cdbba64e9680fe058 | 21fe1ec641f5a0ca022d877894ad1fa773719e59 | /python剑指offer/使用__new__实现单例模式.py | fc47c113aecdef206e4aca51e96ce19711df16e0 | [] | no_license | GDUTwuchao/leetcode_jianzhioffer | a1ab92c7e08a5b4df68a2ed605f4ec5921bab989 | a729d302c148825df73bd0fea3b7c724c3fa4968 | refs/heads/master | 2020-07-26T06:51:12.005080 | 2019-09-15T01:15:31 | 2019-09-15T01:15:31 | 208,569,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 847 | py | class SingleTon(object):
_instance = {}
def __new__(cls, *args, **kwargs):
if cls not in cls._instance:
cls._instance[cls] = super(SingleTon, cls).__new__(cls, *args, **kwargs)
# print cls._instance
return cls._instance[cls]
class MyClass(SingleTon):
class_val = 22
def __init__(self, val):
self.val = val
def obj_fun(self):
print(self.val, 'obj_fun')
@staticmethod
def static_fun():
print('staticmethod')
@classmethod
def class_fun(cls):
print(cls.class_val, 'classmethod')
if __name__ == '__main__':
a = MyClass(1)
b = MyClass(2)
print(a is b) # True
print(id(a), id(b) ) # 4367665424 4367665424
# 类型验证
print (type(a) ) # <class '__main__.MyClass'>
print (type(b) ) # <class '__main__.MyClass'> | [
"[email protected]"
] | |
a2c079a98705ce6a129fe2a91296597395f2abee | afb2bdf8044e4c9ff09b1b8379efbc17867d8cc0 | /4parts/challenge/challenge2.py | e60f5117ceda493cf23d8d7097d1376bfa4b1068 | [] | no_license | ChenFu0420/leranpython | b2e364ff8d6730a3eb768b76f0369faa3367dfa2 | 52d0aa614d7fab19e17bbb696330a0330d3862b6 | refs/heads/master | 2020-05-29T19:46:24.020046 | 2019-09-25T09:17:10 | 2019-09-25T09:17:10 | 189,339,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | n = eval(input())
for i in range(n):
for j in range(0, n - i):
print(end=" ")
for k in range(2 * i + 1):
print("*",end="")
print() | [
"[email protected]"
] | |
974d749d361019cdd9d6bb1b34a159f82ee40042 | 5d6201c7da4f19bc92f003b98629a10bd62e2426 | /main/migrations/0002_auto_20151106_1447.py | 2124b4360f205d273ee5ba1b8c5961096578fe9e | [] | no_license | azul-cloud/travelblogwave | 35b24cf9550a544eeaeaa01d99b085930f5f410b | 8c5dba290723484c3832606e9da0deba642395de | refs/heads/master | 2021-01-21T05:01:11.100319 | 2016-05-30T12:25:25 | 2016-05-30T12:25:25 | 22,630,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 532 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, verbose_name='username'),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
a51e4e5475b94e6166da433780dd9701e745454a | 42ed26d16ebb087b417b0bbc4b11ccd20707a522 | /book_1/chp09.8.py | 6dc8bdddd9550a5e1529733236e1fad27b6b24d1 | [
"MIT"
] | permissive | D-Mbithi/Real-Python-Course-Solutions | 12af04f482f164624243e355e8c30515ed4a383d | 6e743af5f9f40260df8d42b667b3535caed9db3b | refs/heads/master | 2023-02-10T17:35:16.755864 | 2019-07-02T11:08:54 | 2019-07-02T11:08:54 | 164,014,211 | 1 | 0 | MIT | 2023-02-02T06:30:21 | 2019-01-03T19:27:09 | Python | UTF-8 | Python | false | false | 22 | py | import os
import glob
| [
"[email protected]"
] | |
111a8f8f01d47e8e86aaac97e070c4623db3f485 | ada61d2d0b227a0d428c237ebc6df87137a5f8b3 | /third_party/skia/common/third_party/externals/boto/tests/unit/ec2/test_connection.py | c4dfa7a22b6af09616ac6214ff0e3d365869e53d | [
"BSD-3-Clause",
"MIT"
] | permissive | lineCode/libui-1 | 240b22f8ed542e6dc3d623b465d1170b8cb03b31 | 53e01ad28601aa0fb7b050a39185b46de0bd99fa | refs/heads/master | 2021-01-24T16:48:40.299172 | 2015-11-19T01:46:28 | 2015-11-19T01:46:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,389 | py | #!/usr/bin/env python
import httplib
from datetime import datetime, timedelta
from mock import MagicMock, Mock
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
import boto.ec2
from boto.regioninfo import RegionInfo
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
from boto.ec2.connection import EC2Connection
from boto.ec2.snapshot import Snapshot
from boto.ec2.reservedinstance import ReservedInstancesConfiguration
class TestEC2ConnectionBase(AWSMockServiceTestCase):
connection_class = EC2Connection
def setUp(self):
super(TestEC2ConnectionBase, self).setUp()
self.ec2 = self.service_connection
class TestReservedInstanceOfferings(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeReservedInstancesOfferingsResponse>
<requestId>d3253568-edcf-4897-9a3d-fb28e0b3fa38</requestId>
<reservedInstancesOfferingsSet>
<item>
<reservedInstancesOfferingId>2964d1bf71d8</reservedInstancesOfferingId>
<instanceType>c1.medium</instanceType>
<availabilityZone>us-east-1c</availabilityZone>
<duration>94608000</duration>
<fixedPrice>775.0</fixedPrice>
<usagePrice>0.0</usagePrice>
<productDescription>product description</productDescription>
<instanceTenancy>default</instanceTenancy>
<currencyCode>USD</currencyCode>
<offeringType>Heavy Utilization</offeringType>
<recurringCharges>
<item>
<frequency>Hourly</frequency>
<amount>0.095</amount>
</item>
</recurringCharges>
<marketplace>false</marketplace>
<pricingDetailsSet>
<item>
<price>0.045</price>
<count>1</count>
</item>
</pricingDetailsSet>
</item>
<item>
<reservedInstancesOfferingId>2dce26e46889</reservedInstancesOfferingId>
<instanceType>c1.medium</instanceType>
<availabilityZone>us-east-1c</availabilityZone>
<duration>94608000</duration>
<fixedPrice>775.0</fixedPrice>
<usagePrice>0.0</usagePrice>
<productDescription>Linux/UNIX</productDescription>
<instanceTenancy>default</instanceTenancy>
<currencyCode>USD</currencyCode>
<offeringType>Heavy Utilization</offeringType>
<recurringCharges>
<item>
<frequency>Hourly</frequency>
<amount>0.035</amount>
</item>
</recurringCharges>
<marketplace>false</marketplace>
<pricingDetailsSet/>
</item>
</reservedInstancesOfferingsSet>
<nextToken>next_token</nextToken>
</DescribeReservedInstancesOfferingsResponse>
"""
def test_get_reserved_instance_offerings(self):
self.set_http_response(status_code=200)
response = self.ec2.get_all_reserved_instances_offerings()
self.assertEqual(len(response), 2)
instance = response[0]
self.assertEqual(instance.id, '2964d1bf71d8')
self.assertEqual(instance.instance_type, 'c1.medium')
self.assertEqual(instance.availability_zone, 'us-east-1c')
self.assertEqual(instance.duration, 94608000)
self.assertEqual(instance.fixed_price, '775.0')
self.assertEqual(instance.usage_price, '0.0')
self.assertEqual(instance.description, 'product description')
self.assertEqual(instance.instance_tenancy, 'default')
self.assertEqual(instance.currency_code, 'USD')
self.assertEqual(instance.offering_type, 'Heavy Utilization')
self.assertEqual(len(instance.recurring_charges), 1)
self.assertEqual(instance.recurring_charges[0].frequency, 'Hourly')
self.assertEqual(instance.recurring_charges[0].amount, '0.095')
self.assertEqual(len(instance.pricing_details), 1)
self.assertEqual(instance.pricing_details[0].price, '0.045')
self.assertEqual(instance.pricing_details[0].count, '1')
def test_get_reserved_instance_offerings_params(self):
self.set_http_response(status_code=200)
self.ec2.get_all_reserved_instances_offerings(
reserved_instances_offering_ids=['id1','id2'],
instance_type='t1.micro',
availability_zone='us-east-1',
product_description='description',
instance_tenancy='dedicated',
offering_type='offering_type',
include_marketplace=False,
min_duration=100,
max_duration=1000,
max_instance_count=1,
next_token='next_token',
max_results=10
)
self.assert_request_parameters({
'Action': 'DescribeReservedInstancesOfferings',
'ReservedInstancesOfferingId.1': 'id1',
'ReservedInstancesOfferingId.2': 'id2',
'InstanceType': 't1.micro',
'AvailabilityZone': 'us-east-1',
'ProductDescription': 'description',
'InstanceTenancy': 'dedicated',
'OfferingType': 'offering_type',
'IncludeMarketplace': 'false',
'MinDuration': '100',
'MaxDuration': '1000',
'MaxInstanceCount': '1',
'NextToken': 'next_token',
'MaxResults': '10',},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestPurchaseReservedInstanceOffering(TestEC2ConnectionBase):
def default_body(self):
return """<PurchaseReservedInstancesOffering />"""
def test_serialized_api_args(self):
self.set_http_response(status_code=200)
response = self.ec2.purchase_reserved_instance_offering(
'offering_id', 1, (100.0, 'USD'))
self.assert_request_parameters({
'Action': 'PurchaseReservedInstancesOffering',
'InstanceCount': 1,
'ReservedInstancesOfferingId': 'offering_id',
'LimitPrice.Amount': '100.0',
'LimitPrice.CurrencyCode': 'USD',},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
class TestCreateImage(TestEC2ConnectionBase):
def default_body(self):
return """<CreateImageResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<imageId>ami-4fa54026</imageId>
</CreateImageResponse>"""
def test_minimal(self):
self.set_http_response(status_code=200)
response = self.ec2.create_image(
'instance_id', 'name')
self.assert_request_parameters({
'Action': 'CreateImage',
'InstanceId': 'instance_id',
'Name': 'name'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_block_device_mapping(self):
self.set_http_response(status_code=200)
bdm = BlockDeviceMapping()
bdm['test'] = BlockDeviceType()
response = self.ec2.create_image(
'instance_id', 'name', block_device_mapping=bdm)
self.assert_request_parameters({
'Action': 'CreateImage',
'InstanceId': 'instance_id',
'Name': 'name',
'BlockDeviceMapping.1.DeviceName': 'test',
'BlockDeviceMapping.1.Ebs.DeleteOnTermination': 'false'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
class TestCancelReservedInstancesListing(TestEC2ConnectionBase):
def default_body(self):
return """
<CancelReservedInstancesListingResponse>
<requestId>request_id</requestId>
<reservedInstancesListingsSet>
<item>
<reservedInstancesListingId>listing_id</reservedInstancesListingId>
<reservedInstancesId>instance_id</reservedInstancesId>
<createDate>2012-07-12T16:55:28.000Z</createDate>
<updateDate>2012-07-12T16:55:28.000Z</updateDate>
<status>cancelled</status>
<statusMessage>CANCELLED</statusMessage>
<instanceCounts>
<item>
<state>Available</state>
<instanceCount>0</instanceCount>
</item>
<item>
<state>Sold</state>
<instanceCount>0</instanceCount>
</item>
<item>
<state>Cancelled</state>
<instanceCount>1</instanceCount>
</item>
<item>
<state>Pending</state>
<instanceCount>0</instanceCount>
</item>
</instanceCounts>
<priceSchedules>
<item>
<term>5</term>
<price>166.64</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>4</term>
<price>133.32</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>3</term>
<price>99.99</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>2</term>
<price>66.66</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>1</term>
<price>33.33</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
</priceSchedules>
<tagSet/>
<clientToken>XqJIt1342112125076</clientToken>
</item>
</reservedInstancesListingsSet>
</CancelReservedInstancesListingResponse>
"""
def test_reserved_instances_listing(self):
self.set_http_response(status_code=200)
response = self.ec2.cancel_reserved_instances_listing()
self.assertEqual(len(response), 1)
cancellation = response[0]
self.assertEqual(cancellation.status, 'cancelled')
self.assertEqual(cancellation.status_message, 'CANCELLED')
self.assertEqual(len(cancellation.instance_counts), 4)
first = cancellation.instance_counts[0]
self.assertEqual(first.state, 'Available')
self.assertEqual(first.instance_count, 0)
self.assertEqual(len(cancellation.price_schedules), 5)
schedule = cancellation.price_schedules[0]
self.assertEqual(schedule.term, 5)
self.assertEqual(schedule.price, '166.64')
self.assertEqual(schedule.currency_code, 'USD')
self.assertEqual(schedule.active, False)
class TestCreateReservedInstancesListing(TestEC2ConnectionBase):
def default_body(self):
return """
<CreateReservedInstancesListingResponse>
<requestId>request_id</requestId>
<reservedInstancesListingsSet>
<item>
<reservedInstancesListingId>listing_id</reservedInstancesListingId>
<reservedInstancesId>instance_id</reservedInstancesId>
<createDate>2012-07-17T17:11:09.449Z</createDate>
<updateDate>2012-07-17T17:11:09.468Z</updateDate>
<status>active</status>
<statusMessage>ACTIVE</statusMessage>
<instanceCounts>
<item>
<state>Available</state>
<instanceCount>1</instanceCount>
</item>
<item>
<state>Sold</state>
<instanceCount>0</instanceCount>
</item>
<item>
<state>Cancelled</state>
<instanceCount>0</instanceCount>
</item>
<item>
<state>Pending</state>
<instanceCount>0</instanceCount>
</item>
</instanceCounts>
<priceSchedules>
<item>
<term>11</term>
<price>2.5</price>
<currencyCode>USD</currencyCode>
<active>true</active>
</item>
<item>
<term>10</term>
<price>2.5</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>9</term>
<price>2.5</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>8</term>
<price>2.0</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>7</term>
<price>2.0</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>6</term>
<price>2.0</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>5</term>
<price>1.5</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>4</term>
<price>1.5</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>3</term>
<price>0.7</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>2</term>
<price>0.7</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
<item>
<term>1</term>
<price>0.1</price>
<currencyCode>USD</currencyCode>
<active>false</active>
</item>
</priceSchedules>
<tagSet/>
<clientToken>myIdempToken1</clientToken>
</item>
</reservedInstancesListingsSet>
</CreateReservedInstancesListingResponse>
"""
def test_create_reserved_instances_listing(self):
self.set_http_response(status_code=200)
response = self.ec2.create_reserved_instances_listing(
'instance_id', 1, [('2.5', 11), ('2.0', 8)], 'client_token')
self.assertEqual(len(response), 1)
cancellation = response[0]
self.assertEqual(cancellation.status, 'active')
self.assertEqual(cancellation.status_message, 'ACTIVE')
self.assertEqual(len(cancellation.instance_counts), 4)
first = cancellation.instance_counts[0]
self.assertEqual(first.state, 'Available')
self.assertEqual(first.instance_count, 1)
self.assertEqual(len(cancellation.price_schedules), 11)
schedule = cancellation.price_schedules[0]
self.assertEqual(schedule.term, 11)
self.assertEqual(schedule.price, '2.5')
self.assertEqual(schedule.currency_code, 'USD')
self.assertEqual(schedule.active, True)
self.assert_request_parameters({
'Action': 'CreateReservedInstancesListing',
'ReservedInstancesId': 'instance_id',
'InstanceCount': '1',
'ClientToken': 'client_token',
'PriceSchedules.0.Price': '2.5',
'PriceSchedules.0.Term': '11',
'PriceSchedules.1.Price': '2.0',
'PriceSchedules.1.Term': '8',},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
class TestDescribeSpotInstanceRequests(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeSpotInstanceRequestsResponse>
<requestId>requestid</requestId>
<spotInstanceRequestSet>
<item>
<spotInstanceRequestId>sir-id</spotInstanceRequestId>
<spotPrice>0.003000</spotPrice>
<type>one-time</type>
<state>active</state>
<status>
<code>fulfilled</code>
<updateTime>2012-10-19T18:09:26.000Z</updateTime>
<message>Your Spot request is fulfilled.</message>
</status>
<launchGroup>mylaunchgroup</launchGroup>
<launchSpecification>
<imageId>ami-id</imageId>
<keyName>mykeypair</keyName>
<groupSet>
<item>
<groupId>sg-id</groupId>
<groupName>groupname</groupName>
</item>
</groupSet>
<instanceType>t1.micro</instanceType>
<monitoring>
<enabled>false</enabled>
</monitoring>
</launchSpecification>
<instanceId>i-id</instanceId>
<createTime>2012-10-19T18:07:05.000Z</createTime>
<productDescription>Linux/UNIX</productDescription>
<launchedAvailabilityZone>us-east-1d</launchedAvailabilityZone>
</item>
</spotInstanceRequestSet>
</DescribeSpotInstanceRequestsResponse>
"""
def test_describe_spot_instance_requets(self):
self.set_http_response(status_code=200)
response = self.ec2.get_all_spot_instance_requests()
self.assertEqual(len(response), 1)
spotrequest = response[0]
self.assertEqual(spotrequest.id, 'sir-id')
self.assertEqual(spotrequest.price, 0.003)
self.assertEqual(spotrequest.type, 'one-time')
self.assertEqual(spotrequest.state, 'active')
self.assertEqual(spotrequest.fault, None)
self.assertEqual(spotrequest.valid_from, None)
self.assertEqual(spotrequest.valid_until, None)
self.assertEqual(spotrequest.launch_group, 'mylaunchgroup')
self.assertEqual(spotrequest.launched_availability_zone, 'us-east-1d')
self.assertEqual(spotrequest.product_description, 'Linux/UNIX')
self.assertEqual(spotrequest.availability_zone_group, None)
self.assertEqual(spotrequest.create_time,
'2012-10-19T18:07:05.000Z')
self.assertEqual(spotrequest.instance_id, 'i-id')
launch_spec = spotrequest.launch_specification
self.assertEqual(launch_spec.key_name, 'mykeypair')
self.assertEqual(launch_spec.instance_type, 't1.micro')
self.assertEqual(launch_spec.image_id, 'ami-id')
self.assertEqual(launch_spec.placement, None)
self.assertEqual(launch_spec.kernel, None)
self.assertEqual(launch_spec.ramdisk, None)
self.assertEqual(launch_spec.monitored, False)
self.assertEqual(launch_spec.subnet_id, None)
self.assertEqual(launch_spec.block_device_mapping, None)
self.assertEqual(launch_spec.instance_profile, None)
self.assertEqual(launch_spec.ebs_optimized, False)
status = spotrequest.status
self.assertEqual(status.code, 'fulfilled')
self.assertEqual(status.update_time, '2012-10-19T18:09:26.000Z')
self.assertEqual(status.message, 'Your Spot request is fulfilled.')
class TestCopySnapshot(TestEC2ConnectionBase):
def default_body(self):
return """
<CopySnapshotResponse xmlns="http://ec2.amazonaws.com/doc/2012-12-01/">
<requestId>request_id</requestId>
<snapshotId>snap-copied-id</snapshotId>
</CopySnapshotResponse>
"""
def test_copy_snapshot(self):
self.set_http_response(status_code=200)
snapshot_id = self.ec2.copy_snapshot('us-west-2', 'snap-id',
'description')
self.assertEqual(snapshot_id, 'snap-copied-id')
self.assert_request_parameters({
'Action': 'CopySnapshot',
'Description': 'description',
'SourceRegion': 'us-west-2',
'SourceSnapshotId': 'snap-id'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
class TestCopyImage(TestEC2ConnectionBase):
def default_body(self):
return """
<CopyImageResponse xmlns="http://ec2.amazonaws.com/doc/2013-07-15/">
<requestId>request_id</requestId>
<imageId>ami-copied-id</imageId>
</CopyImageResponse>
"""
def test_copy_image(self):
self.set_http_response(status_code=200)
copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
'name', 'description', 'client-token')
self.assertEqual(copied_ami.image_id, 'ami-copied-id')
self.assert_request_parameters({
'Action': 'CopyImage',
'Description': 'description',
'Name': 'name',
'SourceRegion': 'us-west-2',
'SourceImageId': 'ami-id',
'ClientToken': 'client-token'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_copy_image_without_name(self):
self.set_http_response(status_code=200)
copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
description='description',
client_token='client-token')
self.assertEqual(copied_ami.image_id, 'ami-copied-id')
self.assert_request_parameters({
'Action': 'CopyImage',
'Description': 'description',
'SourceRegion': 'us-west-2',
'SourceImageId': 'ami-id',
'ClientToken': 'client-token'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
class TestAccountAttributes(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeAccountAttributesResponse xmlns="http://ec2.amazonaws.com/doc/2012-12-01/">
<requestId>6d042e8a-4bc3-43e8-8265-3cbc54753f14</requestId>
<accountAttributeSet>
<item>
<attributeName>vpc-max-security-groups-per-interface</attributeName>
<attributeValueSet>
<item>
<attributeValue>5</attributeValue>
</item>
</attributeValueSet>
</item>
<item>
<attributeName>max-instances</attributeName>
<attributeValueSet>
<item>
<attributeValue>50</attributeValue>
</item>
</attributeValueSet>
</item>
<item>
<attributeName>supported-platforms</attributeName>
<attributeValueSet>
<item>
<attributeValue>EC2</attributeValue>
</item>
<item>
<attributeValue>VPC</attributeValue>
</item>
</attributeValueSet>
</item>
<item>
<attributeName>default-vpc</attributeName>
<attributeValueSet>
<item>
<attributeValue>none</attributeValue>
</item>
</attributeValueSet>
</item>
</accountAttributeSet>
</DescribeAccountAttributesResponse>
"""
def test_describe_account_attributes(self):
self.set_http_response(status_code=200)
parsed = self.ec2.describe_account_attributes()
self.assertEqual(len(parsed), 4)
self.assertEqual(parsed[0].attribute_name,
'vpc-max-security-groups-per-interface')
self.assertEqual(parsed[0].attribute_values,
['5'])
self.assertEqual(parsed[-1].attribute_name,
'default-vpc')
self.assertEqual(parsed[-1].attribute_values,
['none'])
class TestDescribeVPCAttribute(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeVpcAttributeResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
<requestId>request_id</requestId>
<vpcId>vpc-id</vpcId>
<enableDnsHostnames>
<value>false</value>
</enableDnsHostnames>
</DescribeVpcAttributeResponse>
"""
def test_describe_vpc_attribute(self):
self.set_http_response(status_code=200)
parsed = self.ec2.describe_vpc_attribute('vpc-id',
'enableDnsHostnames')
self.assertEqual(parsed.vpc_id, 'vpc-id')
self.assertFalse(parsed.enable_dns_hostnames)
self.assert_request_parameters({
'Action': 'DescribeVpcAttribute',
'VpcId': 'vpc-id',
'Attribute': 'enableDnsHostnames',},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
class TestGetAllNetworkInterfaces(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeNetworkInterfacesResponse xmlns="http://ec2.amazonaws.com/\
doc/2013-06-15/">
<requestId>fc45294c-006b-457b-bab9-012f5b3b0e40</requestId>
<networkInterfaceSet>
<item>
<networkInterfaceId>eni-0f62d866</networkInterfaceId>
<subnetId>subnet-c53c87ac</subnetId>
<vpcId>vpc-cc3c87a5</vpcId>
<availabilityZone>ap-southeast-1b</availabilityZone>
<description/>
<ownerId>053230519467</ownerId>
<requesterManaged>false</requesterManaged>
<status>in-use</status>
<macAddress>02:81:60:cb:27:37</macAddress>
<privateIpAddress>10.0.0.146</privateIpAddress>
<sourceDestCheck>true</sourceDestCheck>
<groupSet>
<item>
<groupId>sg-3f4b5653</groupId>
<groupName>default</groupName>
</item>
</groupSet>
<attachment>
<attachmentId>eni-attach-6537fc0c</attachmentId>
<instanceId>i-22197876</instanceId>
<instanceOwnerId>053230519467</instanceOwnerId>
<deviceIndex>5</deviceIndex>
<status>attached</status>
<attachTime>2012-07-01T21:45:27.000Z</attachTime>
<deleteOnTermination>true</deleteOnTermination>
</attachment>
<tagSet/>
<privateIpAddressesSet>
<item>
<privateIpAddress>10.0.0.146</privateIpAddress>
<primary>true</primary>
</item>
<item>
<privateIpAddress>10.0.0.148</privateIpAddress>
<primary>false</primary>
</item>
<item>
<privateIpAddress>10.0.0.150</privateIpAddress>
<primary>false</primary>
</item>
</privateIpAddressesSet>
</item>
</networkInterfaceSet>
</DescribeNetworkInterfacesResponse>"""
def test_get_all_network_interfaces(self):
self.set_http_response(status_code=200)
result = self.ec2.get_all_network_interfaces(network_interface_ids=['eni-0f62d866'])
self.assert_request_parameters({
'Action': 'DescribeNetworkInterfaces',
'NetworkInterfaceId.1': 'eni-0f62d866'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEqual(len(result), 1)
self.assertEqual(result[0].id, 'eni-0f62d866')
def test_attachment_has_device_index(self):
self.set_http_response(status_code=200)
parsed = self.ec2.get_all_network_interfaces()
self.assertEqual(5, parsed[0].attachment.device_index)
class TestGetAllImages(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeImagesResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
<requestId>e32375e8-4ac3-4099-a8bf-3ec902b9023e</requestId>
<imagesSet>
<item>
<imageId>ami-abcd1234</imageId>
<imageLocation>111111111111/windows2008r2-hvm-i386-20130702</imageLocation>
<imageState>available</imageState>
<imageOwnerId>111111111111</imageOwnerId>
<isPublic>false</isPublic>
<architecture>i386</architecture>
<imageType>machine</imageType>
<platform>windows</platform>
<viridianEnabled>true</viridianEnabled>
<name>Windows Test</name>
<description>Windows Test Description</description>
<billingProducts>
<item>
<billingProduct>bp-6ba54002</billingProduct>
</item>
</billingProducts>
<rootDeviceType>ebs</rootDeviceType>
<rootDeviceName>/dev/sda1</rootDeviceName>
<blockDeviceMapping>
<item>
<deviceName>/dev/sda1</deviceName>
<ebs>
<snapshotId>snap-abcd1234</snapshotId>
<volumeSize>30</volumeSize>
<deleteOnTermination>true</deleteOnTermination>
<volumeType>standard</volumeType>
</ebs>
</item>
<item>
<deviceName>xvdb</deviceName>
<virtualName>ephemeral0</virtualName>
</item>
<item>
<deviceName>xvdc</deviceName>
<virtualName>ephemeral1</virtualName>
</item>
<item>
<deviceName>xvdd</deviceName>
<virtualName>ephemeral2</virtualName>
</item>
<item>
<deviceName>xvde</deviceName>
<virtualName>ephemeral3</virtualName>
</item>
</blockDeviceMapping>
<virtualizationType>hvm</virtualizationType>
<hypervisor>xen</hypervisor>
</item>
</imagesSet>
</DescribeImagesResponse>"""
def test_get_all_images(self):
self.set_http_response(status_code=200)
parsed = self.ec2.get_all_images()
self.assertEquals(1, len(parsed))
self.assertEquals("ami-abcd1234", parsed[0].id)
self.assertEquals("111111111111/windows2008r2-hvm-i386-20130702", parsed[0].location)
self.assertEquals("available", parsed[0].state)
self.assertEquals("111111111111", parsed[0].ownerId)
self.assertEquals("111111111111", parsed[0].owner_id)
self.assertEquals(False, parsed[0].is_public)
self.assertEquals("i386", parsed[0].architecture)
self.assertEquals("machine", parsed[0].type)
self.assertEquals(None, parsed[0].kernel_id)
self.assertEquals(None, parsed[0].ramdisk_id)
self.assertEquals(None, parsed[0].owner_alias)
self.assertEquals("windows", parsed[0].platform)
self.assertEquals("Windows Test", parsed[0].name)
self.assertEquals("Windows Test Description", parsed[0].description)
self.assertEquals("ebs", parsed[0].root_device_type)
self.assertEquals("/dev/sda1", parsed[0].root_device_name)
self.assertEquals("hvm", parsed[0].virtualization_type)
self.assertEquals("xen", parsed[0].hypervisor)
self.assertEquals(None, parsed[0].instance_lifecycle)
# 1 billing product parsed into a list
self.assertEquals(1, len(parsed[0].billing_products))
self.assertEquals("bp-6ba54002", parsed[0].billing_products[0])
# Just verify length, there is already a block_device_mapping test
self.assertEquals(5, len(parsed[0].block_device_mapping))
# TODO: No tests for product codes?
class TestModifyInterfaceAttribute(TestEC2ConnectionBase):
def default_body(self):
return """
<ModifyNetworkInterfaceAttributeResponse \
xmlns="http://ec2.amazonaws.com/doc/2013-06-15/">
<requestId>657a4623-5620-4232-b03b-427e852d71cf</requestId>
<return>true</return>
</ModifyNetworkInterfaceAttributeResponse>
"""
def test_modify_description(self):
self.set_http_response(status_code=200)
self.ec2.modify_network_interface_attribute('id', 'description', 'foo')
self.assert_request_parameters({
'Action': 'ModifyNetworkInterfaceAttribute',
'NetworkInterfaceId': 'id',
'Description.Value': 'foo'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_modify_source_dest_check_bool(self):
self.set_http_response(status_code=200)
self.ec2.modify_network_interface_attribute('id', 'sourceDestCheck',
True)
self.assert_request_parameters({
'Action': 'ModifyNetworkInterfaceAttribute',
'NetworkInterfaceId': 'id',
'SourceDestCheck.Value': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_modify_source_dest_check_str(self):
self.set_http_response(status_code=200)
self.ec2.modify_network_interface_attribute('id', 'sourceDestCheck',
'true')
self.assert_request_parameters({
'Action': 'ModifyNetworkInterfaceAttribute',
'NetworkInterfaceId': 'id',
'SourceDestCheck.Value': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_modify_source_dest_check_invalid(self):
self.set_http_response(status_code=200)
with self.assertRaises(ValueError):
self.ec2.modify_network_interface_attribute('id',
'sourceDestCheck',
123)
def test_modify_delete_on_termination_str(self):
self.set_http_response(status_code=200)
self.ec2.modify_network_interface_attribute('id',
'deleteOnTermination',
True, attachment_id='bar')
self.assert_request_parameters({
'Action': 'ModifyNetworkInterfaceAttribute',
'NetworkInterfaceId': 'id',
'Attachment.AttachmentId': 'bar',
'Attachment.DeleteOnTermination': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_modify_delete_on_termination_bool(self):
self.set_http_response(status_code=200)
self.ec2.modify_network_interface_attribute('id',
'deleteOnTermination',
'false',
attachment_id='bar')
self.assert_request_parameters({
'Action': 'ModifyNetworkInterfaceAttribute',
'NetworkInterfaceId': 'id',
'Attachment.AttachmentId': 'bar',
'Attachment.DeleteOnTermination': 'false'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_modify_delete_on_termination_invalid(self):
self.set_http_response(status_code=200)
with self.assertRaises(ValueError):
self.ec2.modify_network_interface_attribute('id',
'deleteOnTermination',
123,
attachment_id='bar')
def test_modify_group_set_list(self):
self.set_http_response(status_code=200)
self.ec2.modify_network_interface_attribute('id', 'groupSet',
['sg-1', 'sg-2'])
self.assert_request_parameters({
'Action': 'ModifyNetworkInterfaceAttribute',
'NetworkInterfaceId': 'id',
'SecurityGroupId.1': 'sg-1',
'SecurityGroupId.2': 'sg-2'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
def test_modify_group_set_invalid(self):
self.set_http_response(status_code=200)
with self.assertRaisesRegexp(TypeError, 'iterable'):
self.ec2.modify_network_interface_attribute('id', 'groupSet',
False)
def test_modify_attr_invalid(self):
self.set_http_response(status_code=200)
with self.assertRaisesRegexp(ValueError, 'Unknown attribute'):
self.ec2.modify_network_interface_attribute('id', 'invalid', 0)
class TestConnectToRegion(unittest.TestCase):
def setUp(self):
self.https_connection = Mock(spec=httplib.HTTPSConnection)
self.https_connection_factory = (
Mock(return_value=self.https_connection), ())
def test_aws_region(self):
region = boto.ec2.RegionData.keys()[0]
self.ec2 = boto.ec2.connect_to_region(region,
https_connection_factory=self.https_connection_factory,
aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key'
)
self.assertEqual(boto.ec2.RegionData[region], self.ec2.host)
def test_non_aws_region(self):
self.ec2 = boto.ec2.connect_to_region('foo',
https_connection_factory=self.https_connection_factory,
aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key',
region = RegionInfo(name='foo', endpoint='https://foo.com/bar')
)
self.assertEqual('https://foo.com/bar', self.ec2.host)
def test_missing_region(self):
self.ec2 = boto.ec2.connect_to_region('foo',
https_connection_factory=self.https_connection_factory,
aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key'
)
self.assertEqual(None, self.ec2)
class TestTrimSnapshots(TestEC2ConnectionBase):
"""
Test snapshot trimming functionality by ensuring that expected calls
are made when given a known set of volume snapshots.
"""
def _get_snapshots(self):
"""
Generate a list of fake snapshots with names and dates.
"""
snaps = []
# Generate some dates offset by days, weeks, months.
# This is to validate the various types of snapshot logic handled by
# ``trim_snapshots``.
now = datetime.now()
dates = [
now,
now - timedelta(days=1),
now - timedelta(days=2),
now - timedelta(days=7),
now - timedelta(days=14),
# We want to simulate 30/60/90-day snapshots, but February is
# short (only 28 days), so we decrease the delta by 2 days apiece.
# This prevents the ``delete_snapshot`` code below from being
# called, since they don't fall outside the allowed timeframes
# for the snapshots.
datetime(now.year, now.month, 1) - timedelta(days=28),
datetime(now.year, now.month, 1) - timedelta(days=58),
datetime(now.year, now.month, 1) - timedelta(days=88)
]
for date in dates:
# Create a fake snapshot for each date
snap = Snapshot(self.ec2)
snap.tags['Name'] = 'foo'
# Times are expected to be ISO8601 strings
snap.start_time = date.strftime('%Y-%m-%dT%H:%M:%S.000Z')
snaps.append(snap)
return snaps
def test_trim_defaults(self):
"""
Test trimming snapshots with the default arguments, which should
keep all monthly backups forever. The result of this test should
be that nothing is deleted.
"""
# Setup mocks
orig = {
'get_all_snapshots': self.ec2.get_all_snapshots,
'delete_snapshot': self.ec2.delete_snapshot
}
snaps = self._get_snapshots()
self.ec2.get_all_snapshots = MagicMock(return_value=snaps)
self.ec2.delete_snapshot = MagicMock()
# Call the tested method
self.ec2.trim_snapshots()
# Assertions
self.assertEqual(True, self.ec2.get_all_snapshots.called)
self.assertEqual(False, self.ec2.delete_snapshot.called)
# Restore
self.ec2.get_all_snapshots = orig['get_all_snapshots']
self.ec2.delete_snapshot = orig['delete_snapshot']
def test_trim_months(self):
"""
Test trimming monthly snapshots and ensure that older months
get deleted properly. The result of this test should be that
the two oldest snapshots get deleted.
"""
# Setup mocks
orig = {
'get_all_snapshots': self.ec2.get_all_snapshots,
'delete_snapshot': self.ec2.delete_snapshot
}
snaps = self._get_snapshots()
self.ec2.get_all_snapshots = MagicMock(return_value=snaps)
self.ec2.delete_snapshot = MagicMock()
# Call the tested method
self.ec2.trim_snapshots(monthly_backups=1)
# Assertions
self.assertEqual(True, self.ec2.get_all_snapshots.called)
self.assertEqual(2, self.ec2.delete_snapshot.call_count)
# Restore
self.ec2.get_all_snapshots = orig['get_all_snapshots']
self.ec2.delete_snapshot = orig['delete_snapshot']
class TestModifyReservedInstances(TestEC2ConnectionBase):
def default_body(self):
return """<ModifyReservedInstancesResponse xmlns='http://ec2.amazonaws.com/doc/2013-08-15/'>
<requestId>bef729b6-0731-4489-8881-2258746ae163</requestId>
<reservedInstancesModificationId>rimod-3aae219d-3d63-47a9-a7e9-e764example</reservedInstancesModificationId>
</ModifyReservedInstancesResponse>"""
def test_serialized_api_args(self):
self.set_http_response(status_code=200)
response = self.ec2.modify_reserved_instances(
'a-token-goes-here',
reserved_instance_ids=[
'2567o137-8a55-48d6-82fb-7258506bb497',
],
target_configurations=[
ReservedInstancesConfiguration(
availability_zone='us-west-2c',
platform='EC2-VPC',
instance_count=3
),
]
)
self.assert_request_parameters({
'Action': 'ModifyReservedInstances',
'ClientToken': 'a-token-goes-here',
'ReservedInstancesConfigurationSetItemType.0.AvailabilityZone': 'us-west-2c',
'ReservedInstancesConfigurationSetItemType.0.InstanceCount': 3,
'ReservedInstancesConfigurationSetItemType.0.Platform': 'EC2-VPC',
'ReservedInstancesId.1': '2567o137-8a55-48d6-82fb-7258506bb497'
}, ignore_params_values=[
'AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'
])
self.assertEqual(response, 'rimod-3aae219d-3d63-47a9-a7e9-e764example')
class TestDescribeReservedInstancesModifications(TestEC2ConnectionBase):
def default_body(self):
return """<DescribeReservedInstancesModificationsResponse xmlns='http://ec2.amazonaws.com/doc/2013-08-15/'>
<requestId>eb4a6e3c-3689-445c-b536-19e38df35898</requestId>
<reservedInstancesModificationsSet>
<item>
<reservedInstancesModificationId>rimod-49b9433e-fdc7-464a-a6e5-9dabcexample</reservedInstancesModificationId>
<reservedInstancesSet>
<item>
<reservedInstancesId>2567o137-8a55-48d6-82fb-7258506bb497</reservedInstancesId>
</item>
</reservedInstancesSet>
<modificationResultSet>
<item>
<reservedInstancesId>9d5cb137-5d65-4479-b4ac-8c337example</reservedInstancesId>
<targetConfiguration>
<availabilityZone>us-east-1b</availabilityZone>
<platform>EC2-VPC</platform>
<instanceCount>1</instanceCount>
</targetConfiguration>
</item>
</modificationResultSet>
<createDate>2013-09-02T21:20:19.637Z</createDate>
<updateDate>2013-09-02T21:38:24.143Z</updateDate>
<effectiveDate>2013-09-02T21:00:00.000Z</effectiveDate>
<status>fulfilled</status>
<clientToken>token-f5b56c05-09b0-4d17-8d8c-c75d8a67b806</clientToken>
</item>
</reservedInstancesModificationsSet>
</DescribeReservedInstancesModificationsResponse>"""
def test_serialized_api_args(self):
self.set_http_response(status_code=200)
response = self.ec2.describe_reserved_instances_modifications(
reserved_instances_modification_ids=[
'2567o137-8a55-48d6-82fb-7258506bb497'
],
filters={
'status': 'processing',
}
)
self.assert_request_parameters({
'Action': 'DescribeReservedInstancesModifications',
'Filter.1.Name': 'status',
'Filter.1.Value.1': 'processing',
'ReservedInstancesModificationId.1': '2567o137-8a55-48d6-82fb-7258506bb497'
}, ignore_params_values=[
'AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'
])
# Make sure the response was parsed correctly.
self.assertEqual(
response[0].modification_id,
'rimod-49b9433e-fdc7-464a-a6e5-9dabcexample'
)
self.assertEqual(
response[0].create_date,
datetime(2013, 9, 2, 21, 20, 19, 637000)
)
self.assertEqual(
response[0].update_date,
datetime(2013, 9, 2, 21, 38, 24, 143000)
)
self.assertEqual(
response[0].effective_date,
datetime(2013, 9, 2, 21, 0, 0, 0)
)
self.assertEqual(
response[0].status,
'fulfilled'
)
self.assertEqual(
response[0].status_message,
None
)
self.assertEqual(
response[0].client_token,
'token-f5b56c05-09b0-4d17-8d8c-c75d8a67b806'
)
self.assertEqual(
response[0].reserved_instances[0].id,
'2567o137-8a55-48d6-82fb-7258506bb497'
)
self.assertEqual(
response[0].modification_results[0].availability_zone,
'us-east-1b'
)
self.assertEqual(
response[0].modification_results[0].platform,
'EC2-VPC'
)
self.assertEqual(
response[0].modification_results[0].instance_count,
1
)
self.assertEqual(len(response), 1)
class TestRegisterImage(TestEC2ConnectionBase):
def default_body(self):
return """
<RegisterImageResponse xmlns="http://ec2.amazonaws.com/doc/2013-08-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<imageId>ami-1a2b3c4d</imageId>
</RegisterImageResponse>
"""
def test_vm_type_default(self):
self.set_http_response(status_code=200)
self.ec2.register_image('name', 'description',
image_location='s3://foo')
self.assert_request_parameters({
'Action': 'RegisterImage',
'ImageLocation': 's3://foo',
'Name': 'name',
'Description': 'description',
}, ignore_params_values=[
'AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'
])
def test_vm_type_hvm(self):
self.set_http_response(status_code=200)
self.ec2.register_image('name', 'description',
image_location='s3://foo',
virtualization_type='hvm')
self.assert_request_parameters({
'Action': 'RegisterImage',
'ImageLocation': 's3://foo',
'Name': 'name',
'Description': 'description',
'VirtualizationType': 'hvm'
}, ignore_params_values=[
'AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'
])
def test_sriov_net_support_simple(self):
self.set_http_response(status_code=200)
self.ec2.register_image('name', 'description',
image_location='s3://foo',
sriov_net_support='simple')
self.assert_request_parameters({
'Action': 'RegisterImage',
'ImageLocation': 's3://foo',
'Name': 'name',
'Description': 'description',
'SriovNetSupport': 'simple'
}, ignore_params_values=[
'AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'
])
def test_volume_delete_on_termination_on(self):
self.set_http_response(status_code=200)
self.ec2.register_image('name', 'description',
snapshot_id='snap-12345678',
delete_root_volume_on_termination=True)
self.assert_request_parameters({
'Action': 'RegisterImage',
'Name': 'name',
'Description': 'description',
'BlockDeviceMapping.1.DeviceName': None,
'BlockDeviceMapping.1.Ebs.DeleteOnTermination' : 'true',
'BlockDeviceMapping.1.Ebs.SnapshotId': 'snap-12345678',
}, ignore_params_values=[
'AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'
])
def test_volume_delete_on_termination_default(self):
self.set_http_response(status_code=200)
self.ec2.register_image('name', 'description',
snapshot_id='snap-12345678')
self.assert_request_parameters({
'Action': 'RegisterImage',
'Name': 'name',
'Description': 'description',
'BlockDeviceMapping.1.DeviceName': None,
'BlockDeviceMapping.1.Ebs.DeleteOnTermination' : 'false',
'BlockDeviceMapping.1.Ebs.SnapshotId': 'snap-12345678',
}, ignore_params_values=[
'AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'
])
class TestTerminateInstances(TestEC2ConnectionBase):
def default_body(self):
return """<?xml version="1.0" ?>
<TerminateInstancesResponse xmlns="http://ec2.amazonaws.com/doc/2013-07-15/">
<requestId>req-59a9ad52-0434-470c-ad48-4f89ded3a03e</requestId>
<instancesSet>
<item>
<instanceId>i-000043a2</instanceId>
<shutdownState>
<code>16</code>
<name>running</name>
</shutdownState>
<previousState>
<code>16</code>
<name>running</name>
</previousState>
</item>
</instancesSet>
</TerminateInstancesResponse>
"""
def test_terminate_bad_response(self):
self.set_http_response(status_code=200)
self.ec2.terminate_instances('foo')
class TestDescribeInstances(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeInstancesResponse>
</DescribeInstancesResponse>
"""
def test_default_behavior(self):
self.set_http_response(status_code=200)
self.ec2.get_all_instances()
self.assert_request_parameters({
'Action': 'DescribeInstances'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
def test_max_results(self):
self.set_http_response(status_code=200)
self.ec2.get_all_instances(
max_results=10
)
self.assert_request_parameters({
'Action': 'DescribeInstances',
'MaxResults': 10},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestDescribeTags(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeTagsResponse>
</DescribeTagsResponse>
"""
def test_default_behavior(self):
self.set_http_response(status_code=200)
self.ec2.get_all_tags()
self.assert_request_parameters({
'Action': 'DescribeTags'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
def test_max_results(self):
self.set_http_response(status_code=200)
self.ec2.get_all_tags(
max_results=10
)
self.assert_request_parameters({
'Action': 'DescribeTags',
'MaxResults': 10},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp', 'Version'])
class TestSignatureAlteration(TestEC2ConnectionBase):
def test_unchanged(self):
self.assertEqual(
self.service_connection._required_auth_capability(),
['ec2']
)
def test_switched(self):
region = RegionInfo(
name='cn-north-1',
endpoint='ec2.cn-north-1.amazonaws.com.cn',
connection_cls=EC2Connection
)
conn = self.connection_class(
aws_access_key_id='less',
aws_secret_access_key='more',
region=region
)
self.assertEqual(
conn._required_auth_capability(),
['hmac-v4']
)
class TestAssociateAddress(TestEC2ConnectionBase):
def default_body(self):
return """
<AssociateAddressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
<associationId>eipassoc-fc5ca095</associationId>
</AssociateAddressResponse>
"""
def test_associate_address(self):
self.set_http_response(status_code=200)
result = self.ec2.associate_address(instance_id='i-1234',
public_ip='192.0.2.1')
self.assertEqual(True, result)
def test_associate_address_object(self):
self.set_http_response(status_code=200)
result = self.ec2.associate_address_object(instance_id='i-1234',
public_ip='192.0.2.1')
self.assertEqual('eipassoc-fc5ca095', result.association_id)
class TestAssociateAddressFail(TestEC2ConnectionBase):
def default_body(self):
return """
<Response>
<Errors>
<Error>
<Code>InvalidInstanceID.NotFound</Code>
<Message>The instance ID 'i-4cbc822a' does not exist</Message>
</Error>
</Errors>
<RequestID>ea966190-f9aa-478e-9ede-cb5432daacc0</RequestID>
<StatusCode>Failure</StatusCode>
</Response>
"""
def test_associate_address(self):
self.set_http_response(status_code=200)
result = self.ec2.associate_address(instance_id='i-1234',
public_ip='192.0.2.1')
self.assertEqual(False, result)
class TestDescribeVolumes(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeVolumesResponse xmlns="http://ec2.amazonaws.com/doc/2014-02-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<volumeSet>
<item>
<volumeId>vol-1a2b3c4d</volumeId>
<size>80</size>
<snapshotId/>
<availabilityZone>us-east-1a</availabilityZone>
<status>in-use</status>
<createTime>YYYY-MM-DDTHH:MM:SS.SSSZ</createTime>
<attachmentSet>
<item>
<volumeId>vol-1a2b3c4d</volumeId>
<instanceId>i-1a2b3c4d</instanceId>
<device>/dev/sdh</device>
<status>attached</status>
<attachTime>YYYY-MM-DDTHH:MM:SS.SSSZ</attachTime>
<deleteOnTermination>false</deleteOnTermination>
</item>
</attachmentSet>
<volumeType>standard</volumeType>
<encrypted>true</encrypted>
</item>
<item>
<volumeId>vol-5e6f7a8b</volumeId>
<size>80</size>
<snapshotId/>
<availabilityZone>us-east-1a</availabilityZone>
<status>in-use</status>
<createTime>YYYY-MM-DDTHH:MM:SS.SSSZ</createTime>
<attachmentSet>
<item>
<volumeId>vol-5e6f7a8b</volumeId>
<instanceId>i-5e6f7a8b</instanceId>
<device>/dev/sdz</device>
<status>attached</status>
<attachTime>YYYY-MM-DDTHH:MM:SS.SSSZ</attachTime>
<deleteOnTermination>false</deleteOnTermination>
</item>
</attachmentSet>
<volumeType>standard</volumeType>
<encrypted>false</encrypted>
</item>
</volumeSet>
</DescribeVolumesResponse>
"""
def test_get_all_volumes(self):
self.set_http_response(status_code=200)
result = self.ec2.get_all_volumes(volume_ids=['vol-1a2b3c4d', 'vol-5e6f7a8b'])
self.assert_request_parameters({
'Action': 'DescribeVolumes',
'VolumeId.1': 'vol-1a2b3c4d',
'VolumeId.2': 'vol-5e6f7a8b'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEqual(len(result), 2)
self.assertEqual(result[0].id, 'vol-1a2b3c4d')
self.assertTrue(result[0].encrypted)
self.assertEqual(result[1].id, 'vol-5e6f7a8b')
self.assertFalse(result[1].encrypted)
class TestDescribeSnapshots(TestEC2ConnectionBase):
def default_body(self):
return """
<DescribeSnapshotsResponse xmlns="http://ec2.amazonaws.com/doc/2014-02-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<snapshotSet>
<item>
<snapshotId>snap-1a2b3c4d</snapshotId>
<volumeId>vol-1a2b3c4d</volumeId>
<status>pending</status>
<startTime>YYYY-MM-DDTHH:MM:SS.SSSZ</startTime>
<progress>80%</progress>
<ownerId>111122223333</ownerId>
<volumeSize>15</volumeSize>
<description>Daily Backup</description>
<tagSet/>
<encrypted>true</encrypted>
</item>
</snapshotSet>
<snapshotSet>
<item>
<snapshotId>snap-5e6f7a8b</snapshotId>
<volumeId>vol-5e6f7a8b</volumeId>
<status>completed</status>
<startTime>YYYY-MM-DDTHH:MM:SS.SSSZ</startTime>
<progress>100%</progress>
<ownerId>111122223333</ownerId>
<volumeSize>15</volumeSize>
<description>Daily Backup</description>
<tagSet/>
<encrypted>false</encrypted>
</item>
</snapshotSet>
</DescribeSnapshotsResponse>
"""
def test_get_all_snapshots(self):
self.set_http_response(status_code=200)
result = self.ec2.get_all_snapshots(snapshot_ids=['snap-1a2b3c4d', 'snap-5e6f7a8b'])
self.assert_request_parameters({
'Action': 'DescribeSnapshots',
'SnapshotId.1': 'snap-1a2b3c4d',
'SnapshotId.2': 'snap-5e6f7a8b'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEqual(len(result), 2)
self.assertEqual(result[0].id, 'snap-1a2b3c4d')
self.assertTrue(result[0].encrypted)
self.assertEqual(result[1].id, 'snap-5e6f7a8b')
self.assertFalse(result[1].encrypted)
class TestCreateVolume(TestEC2ConnectionBase):
def default_body(self):
return """
<CreateVolumeResponse xmlns="http://ec2.amazonaws.com/doc/2014-05-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<volumeId>vol-1a2b3c4d</volumeId>
<size>80</size>
<snapshotId/>
<availabilityZone>us-east-1a</availabilityZone>
<status>creating</status>
<createTime>YYYY-MM-DDTHH:MM:SS.000Z</createTime>
<volumeType>standard</volumeType>
<encrypted>true</encrypted>
</CreateVolumeResponse>
"""
def test_create_volume(self):
self.set_http_response(status_code=200)
result = self.ec2.create_volume(80, 'us-east-1e', snapshot='snap-1a2b3c4d',
encrypted=True)
self.assert_request_parameters({
'Action': 'CreateVolume',
'AvailabilityZone': 'us-east-1e',
'Size': 80,
'SnapshotId': 'snap-1a2b3c4d',
'Encrypted': 'true'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEqual(result.id, 'vol-1a2b3c4d')
self.assertTrue(result.encrypted)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
d42e96cade1e62edc396c7d0eacabec1d5079d75 | 987dca99da21542c6e2bd345d8d993bd2326a81f | /Prac07/guitarTest.py | 53eb6be7a4b92729e8677494b0c178f410d7c644 | [] | no_license | hamishorr/Practicals | 34debb5f48ebeeca6bf0f7a5248bcdaf5306a255 | ff01986bf9555850fe518adf97fbc9e55da827ac | refs/heads/master | 2020-05-21T20:30:12.565780 | 2016-10-24T01:14:10 | 2016-10-24T01:14:10 | 65,692,866 | 0 | 0 | null | 2016-09-22T02:33:55 | 2016-08-15T00:30:01 | Python | UTF-8 | Python | false | false | 423 | py | from Prac07.guitar import Guitar
print('My Guitars')
guitars = []
name = input("name:")
while name != "":
year = int(input("year:"))
cost = float(input("cost:"))
guitars.append(Guitar(name, year, cost))
print(guitars[-1])
name = input("name:")
for guitar in guitars:
if guitar.is_vintage():
vintage = "(vintage)"
else:
vintage = ""
print("{} {}".format(guitar, vintage))
| [
"[email protected]"
] | |
22eca2df01e9eef8496c7245dbe6266872cb0811 | d2f4c175851b06587d46a09b5d0c892c226f0ca6 | /pa_nmf.py | 18dfb7c4ef74b27000954a02a380231bb0413d6c | [] | no_license | stermart/SPOI-AE | 9b7ebc618aef38091ff8fee3bfdfba79349c86f5 | 37a28e089df8a40a85a797726bfd989d5c45d9b5 | refs/heads/main | 2023-04-14T18:13:58.091961 | 2023-02-10T17:21:15 | 2023-02-10T17:21:15 | 598,136,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,294 | py | ## NMF
import numpy as np
import os
import pickle as pkl
from sklearn.decomposition import NMF
def obj(k_NMF=2, seeded=False, max_iters=200, verbose=False):
if not seeded:
return NMF(n_components=k_NMF, init="random", max_iter=max_iters, verbose=verbose)
else:
return NMF(n_components=k_NMF, init="custom", max_iter=max_iters, verbose=verbose)
def train(nmf_obj, data, E0=None, C0=None):
k_NMF = nmf_obj.n_components
if E0 is not None or C0 is not None:
assert E0 is not None and C0 is not None, "Both E0 and C0 must be specified"
W0 = np.copy(C0.T, order='C')
H0 = np.copy(E0.T, order='C')
if k_NMF > W0.shape[1]:
W0 = np.concatenate((W0, np.ones((W0.shape[0], k_NMF - W0.shape[1]))), axis=1)
H0 = np.concatenate((H0, np.ones((k_NMF - H0.shape[0], H0.shape[1]))), axis=0)
return nmf_obj.fit(data.T, W=W0, H=H0)
else:
return nmf_obj.fit(data.T)
def get_spectra(nmf_obj):
return nmf_obj.components_.T
def unmix(nmf_obj, data):
return nmf_obj.transform(data.T).T
def save_model(nmf_obj, fname):
os.makedirs(os.path.dirname(fname), exist_ok=True)
with open(fname, 'wb') as fout:
pkl.dump(nmf_obj, fname)
| [
"[email protected]"
] | |
f61f6687a10e639df6a581469c85482272216d40 | 62b183cd98a22950e95e0968295970b1bf093034 | /mrp_cost_report/report/__init__.py | b6b3bea5cf052d69305aca0b935dd3a76bb26d13 | [] | no_license | dbertha/odoo-addons | c0bb926cbfafa003d1afd75d2190d51d8def187f | 3681cbad05d5748198318fc1774be77b5f6b420e | refs/heads/master | 2020-12-24T08:31:14.970208 | 2017-03-17T18:51:53 | 2017-03-17T18:51:53 | 31,593,425 | 1 | 4 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import bom_cost | [
"[email protected]"
] | |
f970d26407f174a743bcb989cdae5fd18f1cf862 | 969ae96c883fa8aee938a03af40be54dad60f0ca | /query_scripts/intersect_base.py | 13528a869bc3f58b6e98c732106540b26f4a338d | [] | no_license | fuxmanlab/altered_TFBS | 1cd695c734cbbfd23b72c683ff9a531306144337 | 2cc4a3c95836b3f980764619597b37cd967091dc | refs/heads/master | 2022-11-19T06:42:35.100582 | 2020-07-28T14:58:02 | 2020-07-28T14:58:02 | 264,718,739 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,149 | py | # Base class for loading, saving, and querying the .bed and .vcf
# files
import useful
import tempfile
import os
import glob
import shutil
class IntersectBase(object):
''' Base class for intersecting .bed and .vcf files
with the alterome datafiles using Spark-SQL.'''
def __init__(self, filename, hadoop=False):
if not hadoop:
if not os.path.exists(filename):
raise OSError("File not found: %s" % filename)
self.filename = filename
# The loaded dataframe from the file
self.df = None
#The result of the query
self.intersect_df = None
# Are we on hadoop?
self.hadoop = hadoop
def to_df(self, spark):
''' Convert the file to a Spark DataFrame, stored
internally in self.df. The df is registered as
a Spark SQL temp table. '''
pass
def intersection_query(self,spark):
''' Intersection query for the .bed and .vcf files with the
alterome in the tfbs_df and tf_info_df dataframes. Stores
the result internally in self.intersect_df. '''
pass
def write_df(self, output_csv, npartitions=None):
''' Write in parallel to a set of output CSV files
and them consolidate them into 1.'''
tmp_name = self.df_to_csv(output_csv, npartitions)
if not self.hadoop:
self.consolidate_csv(tmp_name,output_csv)
@useful.timeit
def df_to_csv(self,output_csv, npartitions=None):
# Repartition if asked
if npartitions:
self.intersect_df.repartition(npartitions)
# Get a unique temporary filename using the process id
if not self.hadoop:
tmp_name = str(os.getpid())+'_tmp'
tmp_path = os.path.join(os.environ['TMPDIR'],tmp_name+'.csv')
if os.path.exists(tmp_path):
shutil.rmtree(tmp_path)
self.intersect_df.write.option('header','true').csv(tmp_path)
return tmp_path
else:
self.intersect_df.write.option('header','true').csv(output_csv)
@useful.timeit
def consolidate_csv(self, input_dir,output_csv, delete_input=True):
print("Consolidating parallel CSV files.")
if os.path.exists(output_csv):
os.unlink(output_csv)
# Then write a loop to read them in one-by-one and append to the requested output_csv
csv_files = glob.glob(os.path.join(input_dir,'*.csv'))
shutil.copyfile(csv_files.pop(0),output_csv)
# Now open the output file for appending and add all the
# others to it.
with open(output_csv, 'ab') as outfile:
for fname in csv_files :
with open(fname, 'rb') as infile:
# Throw away the header line
infile.readline()
# Block copy rest of file from input to output without parsing
shutil.copyfileobj(infile, outfile)
# Finally delete the whole temp directory if requested.
if delete_input:
shutil.rmtree(input_dir)
| [
"[email protected]"
] | |
9e5b1b073c0e724704be0a80caf06b160652600f | abc1a497c41ddd8669c8c41da18af65d08ca54e4 | /try/recon/analize_recon_event.py | 94841cb3f10478d5f14b3da82297e1331ee0b6fd | [] | no_license | gerakolt/direxeno_privet | fcef5e3b654720e277c48935acc168472dfd8ecc | 75e88fb1ed44fce32fce02677f64106121259f6d | refs/heads/master | 2022-12-20T22:01:30.825891 | 2020-10-04T06:01:07 | 2020-10-04T06:01:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,176 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import time
import os
import sys
pmts=np.array([0,1,4,7,8,15])
BGpath='/home/gerak/Desktop/DireXeno/190803/BG/EventRecon/'
path='/home/gerak/Desktop/DireXeno/190803/Co57/EventRecon/'
blw_cut=15
init_cut=20
chi2_cut=5000
left=0
right=400
Rec=np.recarray(100000, dtype=[
('area', 'i8', len(pmts)),
('blw', 'f8', len(pmts)),
('id', 'i8'),
('chi2', 'f8', len(pmts)),
('h', 'i8', (200, len(pmts))),
('init_event', 'i8'),
('init_wf', 'i8', len(pmts))
])
j=0
id=0
WFs=np.zeros((len(pmts), 1000))
recon_WFs=np.zeros((len(pmts), 1000))
# for filename in os.listdir(path):
# if filename.endswith(".npz") and filename.startswith("recon1ns"):
# print(filename)
# data=np.load(path+filename)
# rec=data['rec']
# WFs+=data['WFs']
# recon_WFs+=data['recon_WFs']
# for r in rec:
# Rec[j]['area']=r['area']
# Rec[j]['blw']=r['blw']
# Rec[j]['id']=r['id']
# Rec[j]['chi2']=r['chi2']
# Rec[j]['init_wf']=r['init_wf']
# Rec[j]['h']=r['h']
# Rec[j]['init_event']=r['init_event']
# if r['id']>id:
# id=r['id']
# j+=1
# # sys.exit()
# os.remove(path+filename)
# np.savez(path+'recon1ns'.format(id), rec=Rec[:j-1], WFs=WFs, recon_WFs=recon_WFs)
data=np.load(BGpath+'recon1ns.npz')
BG=data['rec']
data=np.load(path+'recon1ns.npz')
rec=data['rec']
WFs=data['WFs']
recon_WFs=data['recon_WFs']
fig, ax=plt.subplots(2,3)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
x=np.arange(1000)/5
for i in range(len(pmts)):
np.ravel(ax)[i].plot(x, WFs[i], 'r1', label='WF: PMT{}'.format(pmts[i]))
np.ravel(ax)[i].plot(x, recon_WFs[i], 'b-.', label='Recon')
np.ravel(ax)[i].legend(fontsize=12)
fig, ax=plt.subplots(2,3)
fig.suptitle('Co57', fontsize=25)
x=np.arange(1000)/5
for i in range(len(pmts)):
np.ravel(ax)[i].hist(rec['init_wf'][:,i], bins=100, range=[0,400], label='PMT{} init_wf'.format(pmts[i]))
np.ravel(ax)[i].legend(fontsize=15)
rec=rec[np.all(rec['init_wf']>init_cut, axis=1)]
BG=BG[np.all(BG['init_wf']>init_cut, axis=1)]
fig, ax=plt.subplots(2,3)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
x=np.arange(1000)/5
for i in range(len(pmts)):
np.ravel(ax)[i].hist(rec['blw'][:,i], bins=100, range=[0,30], label='PMT{} BLW'.format(pmts[i]))
np.ravel(ax)[i].legend(fontsize=15)
plt.figure()
plt.hist(np.sqrt(np.sum(rec['blw']**2, axis=1)), bins=100, label='BLW', range=[0,30])
plt.axvline(blw_cut, ymin=0, ymax=1, color='k')
plt.legend(fontsize=15)
rec=rec[np.sqrt(np.sum(rec['blw']**2, axis=1))<blw_cut]
BG=BG[np.sqrt(np.sum(BG['blw']**2, axis=1))<blw_cut]
fig, ax=plt.subplots(3,2)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
for i in range(len(pmts)):
np.ravel(ax)[i].hist(rec['chi2'][:,i], bins=100, label='PMT{} chi2'.format(pmts[i]))
np.ravel(ax)[i].set_yscale('log')
np.ravel(ax)[i].legend(fontsize=15)
plt.figure()
plt.hist(np.sqrt(np.sum(rec['chi2']**2, axis=1)), bins=100, label='chi2')
plt.axvline(chi2_cut, ymin=0, ymax=1, color='k')
plt.legend(fontsize=15)
plt.yscale('log')
rec=rec[np.sqrt(np.sum(rec['chi2']**2, axis=1))<chi2_cut]
rec=rec[np.sum(np.sum(rec['h'][:,:100,:], axis=2), axis=1)>0]
BG=BG[np.sqrt(np.sum(BG['chi2']**2, axis=1))<chi2_cut]
BG=BG[np.sum(np.sum(BG['h'][:,:100,:], axis=2), axis=1)>0]
init=np.sum(np.sum(rec['h'][:,:10,:], axis=2), axis=1)
full=np.sum(np.sum(rec['h'][:,:100,:], axis=2), axis=1)
BGinit=np.sum(np.sum(BG['h'][:,:10,:], axis=2), axis=1)
BGfull=np.sum(np.sum(BG['h'][:,:100,:], axis=2), axis=1)
plt.figure()
plt.hist(init/full, bins=100, range=[0,1], label='Relative number of PEs in first 10 ns')
rec=rec[init/full<0.5]
BG=BG[BGinit/BGfull<0.5]
fig, ax=plt.subplots(3,2)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
for i in range(len(pmts)):
np.ravel(ax)[i].plot(np.mean(rec['h'][:,:,i], axis=0), 'k-.', label='PMT{}'.format(pmts[i]))
plt.figure()
up=np.sum(rec['h'][:,:100,0], axis=1)+np.sum(rec['h'][:,:100,1], axis=1)
dn=np.sum(rec['h'][:,:100,-1], axis=1)+np.sum(rec['h'][:,:100,-2], axis=1)+np.sum(rec['h'][:,:100,-3], axis=1)
plt.plot(np.arange(450), np.arange(450)*3+18, 'k--')
plt.hist2d(up, dn, bins=[100, 100], range=[[0,350], [0,700]], norm=mcolors.PowerNorm(0.3))
plt.xlabel('Sum of PEs in the top floor PMTs', fontsize=25)
plt.ylabel('Sum of PEs in the bottom floor PMTs', fontsize=25)
rec0=rec
rec=rec[dn<3*up+18]
plt.legend(fontsize=15)
TB=1564926608911-1564916365644
TA=1564916315672-1564886605156
TBG=1564874707904-1564826183355
TCs=1564823506349-1564820274767
hist, bins=np.histogram(np.sum(np.sum(BG['h'][:,:100,:], axis=2), axis=1), bins=np.arange(250)*4)
plt.figure()
plt.hist(np.sum(np.sum(rec['h'][:,:100,:], axis=2), axis=1), bins=np.arange(250)*4, histtype='step', linewidth=5, label='All events')
plt.bar(0.5*(bins[1:]+bins[:-1]) ,TA/TBG*hist, label='BG', width=bins[1:]-bins[:-1], color='orange', alpha=0.5)
plt.axvline(left, 0 ,1, color='k')
plt.axvline(right, 0 ,1, color='k')
plt.legend(fontsize=15)
fig, ax=plt.subplots(2,3)
# fig.suptitle('Co57 - Spec - slow', fontsize=25)
for i in range(len(pmts)):
np.ravel(ax)[i].hist(np.sum(rec['h'][:,:,i], axis=1), bins=np.arange(200), histtype='step', label='After\n up-dn cut\n PMT{}'.format(i), linewidth=3)
np.ravel(ax)[i].hist(np.sum(rec0['h'][:,:,i], axis=1), bins=np.arange(200), histtype='step', label='Before\n up-dn cut', linewidth=3)
np.ravel(ax)[i].legend(fontsize=15)
# fig, ax=plt.subplots(3,5)
# k=0
# for i in range(len(pmts)-1):
# hi=rec['h'][:,:,i]
# for j in range(i+1, len(pmts)):
# hj=rec['h'][:,:,j]
# np.ravel(ax)[k].hist((np.sum(hi, axis=1)-np.mean(np.sum(hi, axis=1)))*(np.sum(hj, axis=1)-np.mean(np.sum(hj, axis=1)))/(np.mean(np.sum(hj, axis=1))*np.mean(np.sum(hi, axis=1))),
# label='PMT{}-PMT{}'.format(pmts[i], pmts[j]), bins=100, range=[-1, 1])
# np.ravel(ax)[k].legend()
# k+=1
plt.show()
| [
"[email protected]"
] | |
403fac664e7532d39cf7a726cf9165c6b7e21555 | fa461310d67a51dc0f473e54bd02c90c12c7f7dc | /Query understanding/demo1.py | a9f1754dc771f5deedea583220bd1d8b0d3f305b | [] | no_license | yangeryang/Ads-ranking- | 624cf215eda0837e0df738a7ec96d2811d053916 | 216c10fa49c52e0fbb913ef2a7d53cd92700d576 | refs/heads/master | 2020-05-22T05:23:33.137029 | 2019-05-12T16:36:27 | 2019-05-12T16:36:27 | 186,234,700 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | import sys
from pyspark import SparkContext
if __name__ == "__main__":
file = sys.argv[1] #raw train file
sc = SparkContext(appName="demo1")
data_uc = sc.textFile(file).map(lambda line: line.upper())
data_filt = data_uc.filter(lambda line: line.startswith("T"))
#data_uc...
data_filt.saveAsTextFile("demo_T_output6")
sc.stop()
| [
"[email protected]"
] | |
8cd7008808ed4e68c620662f0cb044447e6f4d45 | 5517c6ab30ba8f54595e658f4c971e2bdee67fd0 | /lesson_4/tests.py | a59d425d6931a7d6ab76beea365b553269755be2 | [] | no_license | martyni/pythonistas | ae7d71fdc22f4c4b54c3cefa579d3bdf3b12aba4 | fff3a822e3bed6a9f0d31e6b3117d52c597a9519 | refs/heads/master | 2020-04-03T21:24:33.446996 | 2018-10-31T16:30:47 | 2018-10-31T16:30:47 | 155,573,507 | 0 | 0 | null | 2018-10-31T16:36:15 | 2018-10-31T14:43:47 | Python | UTF-8 | Python | false | false | 852 | py | import unittest
from module import multiply_numbers, join_strings, age_in_years
from solution import multiply_numbers, join_strings, age_in_years #Uncomment this line for solution
class TestStringMethods(unittest.TestCase):
def test_multiply_numbers(self):
print
print "Testing multiply_numbers"
print multiply_numbers(3,4)
self.assertEqual(multiply_numbers(3,4),12)
def test_join_strings(self):
print
print "Testing join_strings"
print join_strings("hello ","how are you?")
self.assertEqual(join_strings("hello ","how are you?"),"hello how are you?")
def test_age_in_years(self):
print
print "Testing age_in_years"
print age_in_years(13,4,1989)
self.assertEqual(age_in_years(13,4,1989), 29)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
3ab6dc1a4cc8e4eaed22794a34912223c44474b9 | 76aa6379086fd042d284435e7403765fdfae8302 | /examples/Demos/UR10/controller_manuell_custom.py | b60ec6e159affb28b646fb08f651cde61844bcd7 | [] | no_license | faichele/SofaROSConnector | 12449bdf2be4ca26146497f2b2e35bbd3e37035e | 7e7dcb16492003fdbc881df7b24842dd2d868a27 | refs/heads/master | 2020-06-05T18:30:25.101634 | 2020-01-31T13:47:24 | 2020-01-31T13:47:24 | 192,511,720 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,281 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import Sofa
import copy
import sys
import os
import time
from os.path import isfile, join
class robotJointController(Sofa.PythonScriptController):
def initGraph(self, node):
self.firstSteps = 2
self.executeMotion = False
self.steps=0
self.arbitraryControl = node.getRoot()\
.getChild("DAE_blendfix_scene")\
.getChild("KinematicsModels")\
.getChild("ur10_kinematics_model.ur10_kinematics_model.")\
.getObject("ToolControl")
if (self.arbitraryControl is not None):
print('(controller_manuell.py::robotJointController) ArbitraryController found: %s' % self.arbitraryControl.getName())
self.arbitraryControl.findData('ControlIndex').value = [[3,9]]
print('(controller_manuell.py::robotJointController) %s joints found.' % (len(self.arbitraryControl.findData('JointNames').value)))
print('(controller_manuell.py::robotJointController) Joint names: %s' % (self.arbitraryControl.findData('JointNames').value))
print('(controller_manuell.py::robotJointController) Control Index: %s' % (self.arbitraryControl.findData('ControlIndex').value))
#print('(controller_manuell.py::robotJointController) Initial joint angles: %s' % (self.arbitraryControl.findData('KinematicValues').value))
def cleanup(self):
print('Python script finishes')
sys.stdout.flush()
return 0
# called on each animation step
def onBeginAnimationStep(self, dt):
print('onBeginAnimationStep(' + str(dt) + ')')
kinematicValues = (self.arbitraryControl.findData('KinematicValues').value);
speedFactor = 1 # 0..1; 1 means maximum speed the real robot can do
robotIncrementValue=120 * dt * speedFactor;
maxStep = ((360 / 120) * (1/dt) * (1/speedFactor)) + 1;
if self.executeMotion and self.steps==0:
# circle trajectory
kinematicValues[4][0] = 0;
kinematicValues[5][0] = 0;
kinematicValues[6][0] = 0;
kinematicValues[7][0] = -90;
kinematicValues[8][0] = 0;
kinematicValues[9][0] = 0;
self.steps = self.steps + 1
else:
if self.executeMotion and self.steps < maxStep:
# circle trajectory
kinematicValues[4][0]-=robotIncrementValue;
self.steps = self.steps + 1
return 0
def onEndAnimationStep(self, dt):
print('onEndAnimationStep(' + str(dt) + ')')
return 0
def onKeyPressed(self,c):
print('onKeyPressed(' + str(c) + ')')
kinematicValues = (self.arbitraryControl.findData('KinematicValues').value);
jointControlledByROS = (self.arbitraryControl.findData('controlledByROS').value);
print('(controller_manuell.py::robotJointController) Current joint angles: %s' % (self.arbitraryControl.findData('KinematicValues').value))
robotIncrementValue=0.5
if (c == "C"):
self.executeMotion = not self.executeMotion
# toggle robot
if (c == "Y"):
jointControlledByROS[4][0] = 1 - jointControlledByROS[5][0];
jointControlledByROS[5][0] = 1 - jointControlledByROS[6][0];
jointControlledByROS[6][0] = 1 - jointControlledByROS[7][0];
jointControlledByROS[7][0] = 1 - jointControlledByROS[8][0];
jointControlledByROS[8][0] = 1 - jointControlledByROS[9][0];
jointControlledByROS[9][0] = 1 - jointControlledByROS[10][0];
if (c == "K"):
kinematicValues[4][0] = 165;
kinematicValues[5][0] = -85;
kinematicValues[6][0] = 65;
kinematicValues[7][0] = -5;
kinematicValues[8][0] = 95;
kinematicValues[9][0] = 0;
if (c == "L"):
kinematicValues[4][0] = 212;
kinematicValues[5][0] = -21;
kinematicValues[6][0] = -77;
kinematicValues[7][0] = 81;
kinematicValues[8][0] = 43;
kinematicValues[9][0] = 10;
### robot
# first joint
if (c == "1"):
kinematicValues[4][0]+=robotIncrementValue;
if (c == "A"):
kinematicValues[4][0]-=robotIncrementValue;
# second joint
if (c == "2"):
kinematicValues[5][0]+=robotIncrementValue;
if (c == "W"):
kinematicValues[5][0]-=robotIncrementValue;
# third joint
if (c == "3"):
kinematicValues[6][0]+=robotIncrementValue;
if (c == "D"):
kinematicValues[6][0]-=robotIncrementValue;
# fourth joint
if (c == "4"):
kinematicValues[7][0]+=robotIncrementValue;
if (c == "F"):
kinematicValues[7][0]-=robotIncrementValue;
# fifth joint
if (c == "5"):
kinematicValues[8][0]+=robotIncrementValue;
if (c == "G"):
kinematicValues[8][0]-=robotIncrementValue;
# sixth joint
if (c == "6"):
kinematicValues[9][0]+=robotIncrementValue;
if (c == "H"):
kinematicValues[9][0]-=robotIncrementValue;
# useful position
#if (c == "M"):
# kinematicValues[0][0]=0;
# kinematicValues[0][1]=-20;
# kinematicValues[0][2]=-120;
# kinematicValues[0][3]=0;
# kinematicValues[0][4]=50;
# kinematicValues[0][5]=0;
# if (c == "K"): # careful, 'K' is currently used above for the hand starting position
# kinematicValues[0][0]=0;
# kinematicValues[0][1]=-30;
# kinematicValues[0][2]=-70;
# kinematicValues[0][3]=0;
# kinematicValues[0][4]=-60;
# kinematicValues[0][5]=0;
# if (c == "M"):
# kinematicValues[0][0]=0;
# kinematicValues[0][1]=-50;
# kinematicValues[0][2]=-70;
# kinematicValues[0][3]=0;
# kinematicValues[0][4]=-60;
# kinematicValues[0][5]=0;
(self.arbitraryControl.findData('KinematicValues').value) = transformTableInString( transformDoubleTableInSimpleTable(kinematicValues) )
(self.arbitraryControl.findData('controlledByROS').value) = transformTableInString( transformDoubleTableInSimpleTable(jointControlledByROS) )
return 0
class objectController(Sofa.PythonScriptController):
def initGraph(self, node):
self.rigidMap = node.getObject('falling_1_Object');
def onKeyPressed(self,c):
objPos = self.rigidMap.findData('position').value;
numNode=len(objPos);
positionChange = 2
# if (c == "8"):
# for i in range(numNode):
# objPos[i][1]+=positionChange;
#
# if (c == "2"):
# for i in range(numNode):
# objPos[i][1]-=positionChange;
#
# if (c == "4"):
# for i in range(numNode):
# objPos[i][0]+=positionChange;
#
# if (c == "6"):
# for i in range(numNode):
# objPos[i][0]-=positionChange;
#
# if (c == "/"):
# for i in range(numNode):
# objPos[i][2]+=positionChange;
#
# if (c == "5"):
# for i in range(numNode):
# objPos[i][2]-=positionChange;
# UP key############################## NO more necessary ??? ######################
#if ord(c)==19:
# for i in range(numNode):
# restPos[i][2]+=0.005;
# DOWN key
#if ord(c)==21:
# for i in range(numNode):
# restPos[i][2]-=0.005;
# LEFT key
#if ord(c)==18:
# for i in range(numNode):
# restPos[i][0]-=0.005;
# RIGHT key
#if ord(c)==20:
# for i in range(numNode):
# restPos[i][0]+=0.005;
#########################################################
self.rigidMap.findData('position').value = transformTableInString( transformDoubleTableInSimpleTable(objPos) )
return 0
class controllerGrasper1(Sofa.PythonScriptController):
def initGraph(self, node):
# now we will change the values in the mapping !!!
self.rigidMap = node.getObject('map');
def onKeyPressed(self,c):
restPos = self.rigidMap.findData('initialPoints').value;
numNode=len(restPos);
if (c == "+"):
for i in range(numNode):
restPos[i][1]+=0.1;
if (c == "-"):
for i in range(numNode):
restPos[i][1]-=0.1;
if (c == "/"):
for i in range(numNode):
restPos[i][1]+=1;
if (c == "*"):
for i in range(numNode):
restPos[i][1]-=1;
# UP key############################## NO more necessary ??? ######################
#if ord(c)==19:
# for i in range(numNode):
# restPos[i][2]+=0.005;
# DOWN key
#if ord(c)==21:
# for i in range(numNode):
# restPos[i][2]-=0.005;
# LEFT key
#if ord(c)==18:
# for i in range(numNode):
# restPos[i][0]-=0.005;
# RIGHT key
#if ord(c)==20:
# for i in range(numNode):
# restPos[i][0]+=0.005;
#########################################################
self.rigidMap.findData('initialPoints').value = transformTableInString( transformDoubleTableInSimpleTable(restPos) )
return 0
class controllerGrasper2(Sofa.PythonScriptController):
def initGraph(self, node):
# now we will change the values in the mapping !!!
self.rigidMap = node.getObject('map');
def onKeyPressed(self,c):
restPos = self.rigidMap.findData('initialPoints').value;
numNode=len(restPos);
if (c == "+"):
for i in range(numNode):
restPos[i][1]-=0.1;
if (c == "-"):
for i in range(numNode):
restPos[i][1]+=0.1;
if (c == "/"):
for i in range(numNode):
restPos[i][1]-=1;
if (c == "*"):
for i in range(numNode):
restPos[i][1]+=1;
# UP key############################## NO more necessary ??? ######################
#if ord(c)==19:
# for i in range(numNode):
# restPos[i][2]+=0.005;
# DOWN key
#if ord(c)==21:
# for i in range(numNode):
# restPos[i][2]-=0.005;
# LEFT key
#if ord(c)==18:
# for i in range(numNode):
# restPos[i][0]-=0.005;
# RIGHT key
#if ord(c)==20:
# for i in range(numNode):
# restPos[i][0]+=0.005;
#########################################################
self.rigidMap.findData('initialPoints').value = transformTableInString( transformDoubleTableInSimpleTable(restPos) )
return 0
| [
"[email protected]"
] | |
3d7c5b4eb3d00606ff5abe8c11832193e7201eb5 | 4331b28f22a2efb12d462ae2a8270a9f666b0df1 | /.history/dvdstore/webapp/views_20190914163031.py | 980217ae2624c75216a4e349f8f403f3cc89970e | [] | no_license | ZiyaadLakay/csc312.group.project | ba772a905e0841b17478eae7e14e43d8b078a95d | 9cdd9068b5e24980c59a53595a5d513c2e738a5e | refs/heads/master | 2020-07-26T23:30:22.542450 | 2019-09-16T11:46:41 | 2019-09-16T11:46:41 | 200,703,160 | 0 | 0 | null | 2019-08-05T17:52:37 | 2019-08-05T17:52:37 | null | UTF-8 | Python | false | false | 10,234 | py | from django.shortcuts import render
from .models import DVD, Transaction, Customer
from django.core.paginator import EmptyPage,PageNotAnInteger, Paginator
from django.db.models import Q
from django.contrib.auth.models import User, auth
from django.shortcuts import render, redirect
from django.contrib import messages
from django.core.files.storage import FileSystemStorage
from django.contrib.auth.decorators import login_required, permission_required
from .form import DocumentForm
import datetime
#This is the homepage for the User
def home(request):
dvds = DVD.objects.all() #imports dvds from database
query = request.GET.get("query")
gen = request.GET.get("gen")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query))#Search Function according to name
if not DVD.objects.filter(Q(Title__icontains=query)).exists():
messages.info(request,'No search results for : '+query)
elif gen:
dvds = DVD.objects.filter(Q(genre__icontains=gen))#Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
genre = {'Action', 'Comedy', 'Drama', 'Family', 'Romance'}
return render(request, 'home.html', {'dvds':dvds}, {'genre':genre}) #renders the page
#This is the page for clerks
@login_required
def clerk(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'clerk.html',context_dict)
@login_required
def userstbl(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
users = User.objects.filter(Q(username__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'userstbl.html',context_dict)
@login_required
def transactions(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
trans = Transaction.objects.filter(Q(TransactionNumber__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'transactions.html',context_dict)
def register2(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
username= request.POST['username']
email= request.POST['email']
password1= first_name[0]+last_name
if User.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('clerk')
elif User.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
user = User.objects.create_user(username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
user.save()
messages.info(request, 'User Created')
return redirect('/clerk')
def model_form_upload(request):
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('/clerk')
def booking(request):
username= request.POST['username']
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup=username)
return redirect('home')
def checkout(request):
dvdID= request.POST['dvdID']
numOfDays=request.POST['numDaysBooked']
dvdPrice=request.POST['dvdPrice']
users_ID=request.POST['user_ID']
MovieTitle=request.POST['MovieTitle']
payment=request.POST['payment']
bill=int(numOfDays)*int(dvdPrice)
DVD.objects.filter(id=dvdID).update(NumDaysBooked=numOfDays,InStock=False)
RentDate= datetime.date.today()
DueDate=RentDate+datetime.timedelta(days=int(numOfDays))
t = datetime.datetime.now().strftime("%H%M%S")
TransactionNumber=payment+str(RentDate)[0:4]+str(RentDate)[8:10]+t
#Amount
trans = Transaction(users_ID=users_ID, TransactionNumber=TransactionNumber, RentDate=RentDate, DueDate=DueDate, MovieTitle=MovieTitle, Payment_Method=payment,Amount="R"+str(bill),dvdID=dvdID)
trans.save()
return redirect('/clerk')
def checkin(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup='None',InStock=True,NumDaysBooked=0)
return redirect('/clerk')
def deleteMovie(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).delete()
return redirect('/clerk')
def deleteTransaction(request):
transID= request.POST['transID']
Transaction.objects.filter(id=transID).delete()
return redirect('/transactions')
def deleteUser(request):
userID= request.POST['userID']
User.objects.filter(id=userID).delete()
return redirect('/userstbl')
def user_detail(request):
id = None
if request.user.is_authenticated:
id = request.user.id
print(id)
detail2 = Customer.objects.all()
detail1 = User.objects.filter( id = id )
#detail2 = Customer.objects.filter(Q(username__icontains=str(detail1[0]))).values()
#answers_list = list(detail2)
#myString=str(answers_list[0])
#import re
#myarray=re.split(':|,',myString)
#if len(myarray)>39:
# for i in range(len(myarray)):
# print(str(i)+" "+str(myarray[i]))
# phone_number=str(myarray[39])
# address=str(myarray[41])
# identification=str(myarray[43])
# return render(request, 'user_detail.html',{'detail1':detail1 , 'detail2' : detail2,'phone_number':phone_number,'identification':identification ,'address':address})
return render(request, 'user_detail.html',{'detail1':detail1 , 'detail2' : detail2})
def registerCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
password1= request.POST['password1']
password2= request.POST['password2']
username= request.POST['username']
if password1 == password2 :
if Customer.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('register.html')
elif Customer.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
return redirect('register.html')
user = Customer.objects.create_user(phone_number=phone_number, address=address,identification=identification,username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
user.save()
# customer.save()
messages.info(request, 'User Created')
# messages.info(request, 'Customer Created')
return redirect('login.html')
else:
print('password does not match')
messages.info(request, 'Password does not match')
return redirect('register.html')
return redirect('login.html')
else:
return render(request, 'register.html')
def updateCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = Customer.objects.filter(id=userID).update(phone_number=phone_number, address=address,identification=identification,username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
def updateUser(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = User.objects.filter(id=userID).update(username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
| [
"[email protected]"
] | |
76a29edd0e8bbc220e530784749c7239e7e13007 | 650772c1de39412ed293bdd9f28518d3e50b2ef0 | /transformations/color_demo.py | 4f726733b2310e4fb9232d6f0ae9d75b6b914973 | [] | no_license | tuftsceeo/Onshape-PLUS-Team | 2ecb62d40ba5349cad3ebd39368b771d95d88649 | 40bcd952ca7b84660615d8812c0e3ec3ce0211e6 | refs/heads/master | 2022-12-03T07:22:49.854357 | 2020-08-22T00:47:29 | 2020-08-22T00:47:29 | 285,607,231 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,350 | py | ###############################################################################
# Project name: Color Demo
# File name: color_demo.py
# Author: Therese (Teo) Patrosio @imnotartsy
# Date: 7/21/20
# Description: Connects spike bluetooth to onshape api for 7/23 demo
# History:
# Last modified by Teo 7/24/20
# (C) Tufts Center for Engineering Education and Outreach (CEEO)
###############################################################################
import serial #pip3 install pyserial
import utils.transform_utils as transform
import utils.onshape_utils as onshape
import argparse
from datetime import datetime
### Connect to Serial
ser = serial.Serial('/dev/tty.LEGOHubOwen-SerialPortP') # serial.Serial(port_args.port) #
### Gets Spike starter message
for i in range(0,2):
line = ser.readline()
print(line.decode(), end="")
### Catch case for if spike goes into data spewing mode (untested) (WIP)
# Cancels any Data Sending
ser.write('\x03'.encode())
ser.write('\x03'.encode())
ser.write('\x03'.encode())
ser.write('\x03'.encode())
### Message to send to serial
## This program gets the gesture of the spike
message = """
import hub,utime\r\n
from spike.control import wait_for_seconds\r\n
def setMotor(large, small):\r\n\b\b
hub.port.C.motor.run_to_position(large, 50)\r\n\b
hub.port.D.motor.run_to_position(small, 50)\r\n\b
\r\n\r\n\r\n\r\n
"""
print(message)
ser.write('\x03'.encode())
ser.write(message.encode())
last = 0
assembly = onshape.getAssemblyInfo(False)
# print(assembly["MvFKyhclA9pW5axe3"]["fullPath"])
### Read Data and call API
for i in range(0,1000):
line = ser.readline()
## Prints serial line
print(line.decode(), end="")
try:
curr = int(line.decode())
except:
print("position not updated")
curr = last
## If state changes, call a transform
if(abs(curr - last) > 5):
## Sets transformation
args = [0, 0, 0, 0, 0, 1, curr]
## Transforms set up (get matrix and part id from assembly info)
M = transform.getTranslationMatrix(args, False)
partsToTransform = [assembly["MvFKyhclA9pW5axe3"]["fullPath"]] # selects motor axle
state = onshape.postTransform(M, False, partsToTransform, False)
print("\tTransformation status:", state, datetime.now())
last = curr
ser.close() | [
"[email protected]"
] | |
736a6dd319cdb36e01d57e42fdf371c5db550c22 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/ghwatson_faststyle/faststyle-master/losses.py | 7a4cc6b60cea27257d8a4820a88ca8fb5d7f1574 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 3,526 | py | """
This file contains the different loss functions.
File author: Grant Watson
Date: Feb 2017
"""
import tensorflow as tf
import numpy as np
def content_loss(content_layers, target_content_layers,
content_weights):
"""Defines the content loss function.
:param content_layers
List of tensors for layers derived from training graph.
:param target_content_layers
List of placeholders to be filled with content layer data.
:param content_weights
List of floats to be used as weights for content layers.
"""
assert(len(target_content_layers) == len(content_layers))
num_content_layers = len(target_content_layers)
# Content loss
content_losses = []
for i in xrange(num_content_layers):
content_layer = content_layers[i]
target_content_layer = target_content_layers[i]
content_weight = content_weights[i]
loss = tf.reduce_sum(tf.squared_difference(content_layer,
target_content_layer))
loss = content_weight * loss
_, h, w, c = content_layer.get_shape().as_list()
num_elements = h * w * c
loss = loss / tf.cast(num_elements, tf.float32)
content_losses.append(loss)
content_loss = tf.add_n(content_losses, name='content_loss')
return content_loss
def style_loss(grams, target_grams, style_weights):
"""Defines the style loss function.
:param grams
List of tensors for Gram matrices derived from training graph.
:param target_grams
List of numpy arrays for Gram matrices precomputed from style image.
:param style_weights
List of floats to be used as weights for style layers.
"""
assert(len(grams) == len(target_grams))
num_style_layers = len(target_grams)
# Style loss
style_losses = []
for i in xrange(num_style_layers):
gram, target_gram = grams[i], target_grams[i]
style_weight = style_weights[i]
_, c1, c2 = gram.get_shape().as_list()
size = c1*c2
loss = tf.reduce_sum(tf.square(gram - tf.constant(target_gram)))
loss = style_weight * loss / size
style_losses.append(loss)
style_loss = tf.add_n(style_losses, name='style_loss')
return style_loss
def tv_loss(X):
"""Creates 2d TV loss using X as the input tensor. Acts on different colour
channels individually, and uses convolution as a means of calculating the
differences.
:param X:
4D Tensor
"""
# These filters for the convolution will take the differences across the
# spatial dimensions. Constructing these on paper has to be done carefully,
# but can be easily understood when one realizes that the sub-3x3 arrays
# should have no mixing terms as the RGB channels should not interact
# within this convolution. Thus, the 2 3x3 subarrays are identity and
# -1*identity. The filters should look like:
# v_filter = [ [(3x3)], [(3x3)] ]
# h_filter = [ [(3x3), (3x3)] ]
ident = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
v_array = np.array([[ident], [-1*ident]])
h_array = np.array([[ident, -1*ident]])
v_filter = tf.constant(v_array, tf.float32)
h_filter = tf.constant(h_array, tf.float32)
vdiff = tf.nn.conv2d(X, v_filter, strides=[1, 1, 1, 1], padding='VALID')
hdiff = tf.nn.conv2d(X, h_filter, strides=[1, 1, 1, 1], padding='VALID')
loss = tf.reduce_sum(tf.square(hdiff)) + tf.reduce_sum(tf.square(vdiff))
return loss
| [
"[email protected]"
] | |
f5e6065e2191f1f68e81fc65acc158143819626d | a884039e1a8b0ab516b80c2186e0e3bad28d5147 | /Livros/Livro-Introdução à Programação-Python/Capitulo 7/Exemplos 7/Listagem7_17.py | 69bd31b1f28fb805b79086213f580f796b1c8375 | [
"MIT"
] | permissive | ramonvaleriano/python- | 6e744e8bcd58d07f05cd31d42a5092e58091e9f0 | ada70918e945e8f2d3b59555e9ccc35cf0178dbd | refs/heads/main | 2023-04-10T14:04:24.497256 | 2021-04-22T18:49:11 | 2021-04-22T18:49:11 | 340,360,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | # Program: Listagem7_17.py
# Author: Ramon R. Valeriano
# Description:
# Developed: 18/05/2020 - 20:19
# Updated:
m = "Uma linha\nOutra Linhas\nE mais outra linha."
print(m)
print(m.splitlines())
| [
"[email protected]"
] | |
249071385eb8a37ac3664c4808a6a142e2effe1c | 9a212f79dfc3ea2c7441bcfc92a6f6eeaf551947 | /training/mix_dicts.py | 9d33e899681127cdffddbea0468c5d2b6581f39d | [] | no_license | ptavaresh/python_snippets | 87a6b8938e7821d4c183305d26ba73ef5040ba02 | 464b3ef13accb1db9b8b0876e0508a746b0f115c | refs/heads/master | 2020-05-09T19:26:09.350513 | 2019-07-10T16:47:46 | 2019-07-10T16:47:46 | 181,378,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | dict1 = {1:1,2:2,3:3,4:4}
dict2 = {5:5,6:6,7:7,8:8}
dict3 = {**dict1, **dict2}
#dict3
#{1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8} | [
"pedro [email protected]"
] | pedro [email protected] |
9f791105c2b2f0c2f16ac41f662ea6db976f3488 | ebdf7a55b3b935f2866e7693188117e9cf8f5417 | /createDataBase.py | c701f13b33a997b19c4c71304bd7ede7fca06148 | [] | no_license | superpiter19/Python-Scripts | 576f6f23d4a22e6ce72af9f3acb76f1d167e7a93 | 0c58f3c5b9c571d9125cf12ec45cc7a314f8eda9 | refs/heads/master | 2023-02-04T08:36:51.964900 | 2023-01-27T10:17:25 | 2023-01-27T10:17:25 | 53,940,315 | 0 | 0 | null | 2022-02-16T15:10:11 | 2016-03-15T11:50:32 | Python | UTF-8 | Python | false | false | 2,453 | py | import sqlite3
#MAIN
conn = sqlite3.connect('PiterBet.db')
c = conn.cursor()
conn.execute('pragma foreign_keys=ON')
# Create tables
c.execute('''CREATE TABLE TFederation
(id INTEGER PRIMARY KEY AUTOINCREMENT,name text NOT NULL UNIQUE)''')
c.execute('''CREATE TABLE TCompetition
(id INTEGER PRIMARY KEY AUTOINCREMENT,name text NOT NULL,countryID INTEGER NOT NULL,FOREIGN KEY(countryID) REFERENCES TFederation(id) )''')
c.execute('''CREATE TABLE TTeam
(id INTEGER PRIMARY KEY AUTOINCREMENT,name text NOT NULL UNIQUE)''')
c.execute('''CREATE TABLE TMatch
(id INTEGER PRIMARY KEY AUTOINCREMENT,localTeamID INTEGER NOT NULL,visitorTeamID INTEGER NOT NULL,competitionID INTEGER NOT NULL,
date INTEGER NOT NULL, goalsLocalTeam INTEGER NOT NULL,goalsVisitorTeam INTEGER NOT NULL,
FOREIGN KEY(localTeamID) REFERENCES TTeam(id),FOREIGN KEY(visitorTeamID) REFERENCES TTeam(id),FOREIGN KEY(competitionID) REFERENCES TCompetition(id) )''')
c.execute('''CREATE UNIQUE INDEX idx_Match ON TMatch(localTeamID,visitorTeamID,date)''')
c.execute('''CREATE TABLE TZuluBet
(id INTEGER PRIMARY KEY AUTOINCREMENT,matchID INTEGER NOT NULL,prob1 INTEGER NOT NULL,probX INTEGER NOT NULL,prob2 INTEGER NOT NULL,
odd1 REAL NOT NULL,oddX REAL NOT NULL,odd2 REAL NOT NULL,stake INTEGER,
FOREIGN KEY(matchID) REFERENCES TMatch(id))''')
conn.commit()
'''
# Insert a row of data
#c.execute("INSERT INTO stocks VALUES ('2006-01-05','BUY','RHAT',100,35.14)")
c.execute("INSERT INTO TFederation(name) VALUES ('Brasil')")
c.execute("INSERT INTO TFederation(name) VALUES ('España')")
c.execute("INSERT INTO TFederation(name) VALUES ('Francia')")
c.execute("INSERT INTO TFederation(name) VALUES ('Inglaterra')")
c.execute("INSERT INTO TFederation(name) VALUES ('UEFA')")
c.execute("INSERT INTO TFederation(name) VALUES ('FIFA')")
c.execute("INSERT INTO TCompetition(name,countryID) VALUES ('Premier League',4)")
c.execute("INSERT INTO TCompetition(name,countryID) VALUES ('Championship',4)")
c.execute("INSERT INTO TCompetition(name,countryID) VALUES ('1ª División',2)")
#c.execute("INSERT INTO TCompetition(name,countryID) VALUES ('Serie A',5)")
'''
# Save (commit) the changes
conn.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
conn.close() | [
"[email protected]"
] | |
ed7791ad961fa9dd1d63297906e9bc6fdf71ef7c | be84495751737bbf0a8b7d8db2fb737cbd9c297c | /tests/test_intersections/triangle2.py | 910e5c8217bcf254300859b37732a19f7136177f | [] | no_license | mario007/renmas | 5e38ff66cffb27b3edc59e95b7cf88906ccc03c9 | bfb4e1defc88eb514e58bdff7082d722fc885e64 | refs/heads/master | 2021-01-10T21:29:35.019792 | 2014-08-17T19:11:51 | 2014-08-17T19:11:51 | 1,688,798 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,283 | py |
from tdasm import Tdasm, Runtime
from renmas.maths import Vector3
from renmas.shapes import Triangle, intersect_ray_shape_array
from renmas.core import Ray
import random
import renmas.utils as util
import timeit
asm_structs = util.structs("ray", "triangle", "hitpoint")
SSE2_ASM = """
#DATA
"""
SSE2_ASM += asm_structs + """
ray r1
triangle tri1
hitpoint hp
float one = 1.0
float zero = 0.0
float epsilon = 0.00001
float beta
float coff
float min_dist = 999999.0
float xm0[4]
float xm1[4]
float xm2[4]
float xm3[4]
float xm4[4]
float xm5[4]
float xm6[4]
float xm7[4]
uint32 xm0i[4]
uint32 result
uint32 n = 1000000
#CODE
mov eax, r1
mov ebx, tri1
mov ecx, min_dist
mov edx, hp
call ray_triangle
movaps oword [xm0], xmm0
movaps oword [xm1], xmm1
movaps oword [xm2], xmm2
movaps oword [xm3], xmm3
movaps oword [xm4], xmm4
movaps oword [xm5], xmm5
movaps oword [xm6], xmm6
movaps oword [xm7], xmm7
movaps oword [xm0i], xmm0
mov dword [result], eax
#END
global ray_triangle:
movaps xmm0, oword [ebx + triangle.p0]
movaps xmm2, oword [eax + ray.dir]
movaps xmm1, xmm0
subps xmm1, oword [ebx + triangle.p2]
movaps xmm3, xmm0
subps xmm3, oword [eax + ray.origin]
subps xmm0, oword [ebx + triangle.p1]
; f f h f
movaps xmm4, xmm1
movlhps xmm4, xmm3
shufps xmm4, xmm4, 01110101B
; k k k l
movaps xmm5, xmm2
movhlps xmm5, xmm3
shufps xmm5, xmm5, 00101010B
; f f h f * k k k l
movaps xmm7, xmm4
mulps xmm7, xmm5
; g g g h
movaps xmm6, xmm2
movlhps xmm6, xmm3
shufps xmm6, xmm6, 11010101B
; j j l j
movaps xmm4, xmm1
movhlps xmm4, xmm3
shufps xmm4, xmm4, 10001010B
; g g g h * j j l j
mulps xmm4, xmm6
; f f h f * k k k l - g g g h * j j l j
subps xmm7, xmm4
; a d a a
movaps xmm5, xmm0
movlhps xmm5, xmm3
shufps xmm5, xmm5, 00001000B
; a d a a * (f f h f * k k k l - g g g h * j j l j)
mulps xmm7, xmm5
; i l i i
movaps xmm5, xmm0
movhlps xmm5, xmm3
shufps xmm5, xmm5, 10100010B
; g g g h * i l i i
mulps xmm6, xmm5
; e h e e
movaps xmm4, xmm0
movlhps xmm4, xmm3
shufps xmm4, xmm4, 01011101B
; k k k l
movaps xmm5, xmm2
movhlps xmm5, xmm3
shufps xmm5, xmm5, 00101010B
; e h e e * k k k l
mulps xmm5, xmm4
; g g g h * i l i i - e h e e * k k k l
subps xmm6, xmm5
; b b d b
movaps xmm5, xmm1
movlhps xmm5, xmm3
shufps xmm5, xmm5, 00100000B
; b b d b * (g g g h * i l i i - e h e e * k k k l)
mulps xmm6, xmm5
addps xmm7, xmm6
; j j l j
movaps xmm5, xmm1
movhlps xmm5, xmm3
shufps xmm5, xmm5, 10001010B
; e e h e * j j l j
mulps xmm4, xmm5
; f f h f
movaps xmm6, xmm1
movlhps xmm6, xmm3
shufps xmm6, xmm6, 01110101B
; i l i i
movaps xmm5, xmm0
movhlps xmm5, xmm3
shufps xmm5, xmm5, 10100010B
; f f h f * i l i i
mulps xmm6, xmm5
; e h e e * j j l j - f f h f * i l i i
subps xmm4, xmm6
; c c c d
movaps xmm5, xmm2
movlhps xmm5, xmm3
shufps xmm5, xmm5, 10000000B
; c c c d * (e h e e * j j l j - f f h f * i l i i)
mulps xmm4, xmm5
addps xmm7, xmm4
movhlps xmm5, xmm7
movaps xmm4, xmm7
shufps xmm4, xmm4, 0x55
movaps xmm6, xmm7
shufps xmm6, xmm6, 0xFF
; xmm7 = d
; xmm6 = td
; xmm5 = gamma
; xmm4 = beta
pxor xmm3, xmm3
; beta < 0.0
movaps xmm0, xmm7
xorps xmm0, xmm4
cmpss xmm0, xmm3, 5
; gamma < 0.0
movaps xmm1, xmm7
xorps xmm1, xmm5
cmpss xmm1, xmm3, 5
; accumulation of conditions
andps xmm0, xmm1
; beta + gamma < 1.0
movaps xmm2, xmm4
addps xmm2, xmm5
cmpss xmm2, xmm6, 2
andps xmm0, xmm2
movd esi, xmm0
cmp esi, 0
jne _accept
xor eax, eax
ret
_accept:
divss xmm6, xmm7
comiss xmm6, dword [epsilon]
jc _reject
comiss xmm6, dword [ecx] ;minimum distance
jnc _reject
;populate hitpoint structure
; t is in xmm6
movaps xmm2, oword [eax + ray.dir]
movaps xmm3, oword [ebx + triangle.normal]
movss xmm4, dword [ebx + triangle.mat_index]
movss dword [edx + hitpoint.t], xmm6
movaps oword [edx + hitpoint.normal], xmm3
movss dword [edx + hitpoint.mat_index], xmm4
macro broadcast xmm5 = xmm6[0]
mulps xmm5, xmm2
macro eq128 edx.hitpoint.hit = xmm5 + eax.ray.origin
mov eax, 1
ret
_reject:
xor eax, eax
ret
"""
def create_triangle():
p0 = Vector3(0.1, 0.0, -2.0)
p1 = Vector3(4.0, 0.5, 0.2)
p2 = Vector3(2.2, 4.3, -1.0)
tr = Triangle(p0, p1, p2, 3)
return tr
def create_ray():
origin = Vector3(0.0, 0.0, 0.0)
dirx = 0.985906665972
diry = 0.165777376892
dirz = 0.0224923832256
#direction = Vector3(8.8, 8.9, 8.7)
direction = Vector3(dirx, diry, dirz)
#direction.normalize()
ray = Ray(origin, direction)
return ray
def v4(v3):
return (v3.x, v3.y, v3.z, 0.0)
if __name__ == "__main__":
tr = create_triangle()
ray = create_ray()
hp = tr.isect(ray)
if hp is not False:
print(hp.t)
asm = util.get_asm()
mc = asm.assemble(SSE2_ASM)
#mc.print_machine_code()
runtime = Runtime()
ds = runtime.load("test", mc)
ds["tri1.p0"] = v4(tr.v0)
ds["tri1.p1"] = v4(tr.v1)
ds["tri1.p2"] = v4(tr.v2)
ds["tri1.normal"] = v4(tr.normal)
ds["tri1.mat_index"] = tr.material
ds["r1.origin"] = v4(ray.origin)
ds["r1.dir"] = v4(ray.dir)
runtime.run("test")
print("xmm0 = ", ds["xm0"])
print("xmm1 = ", ds["xm1"])
print("xmm2 = ", ds["xm2"])
print("xmm3 = ", ds["xm3"])
print("xmm4 = ", ds["xm4"])
print("xmm5 = ", ds["xm5"])
print("xmm6 = ", ds["xm6"])
print("xmm7 = ", ds["xm7"])
print("xmm7i = ", ds["xm0i"])
print("Rezultat je = ", ds["result"])
print(ds["hp.normal"])
print(hp.normal)
print(ds["hp.mat_index"])
print(hp.material)
print(ds["hp.hit"])
print(hp.hit_point)
print(ds["hp.t"])
print(hp.t)
| [
"[email protected]"
] | |
cc8579e637c44fae83edeccad4ee7128463ed18f | fa62a10dc55e5af03f24d0e0c1fe46502b814e8c | /python基础/文件/02写入文件.py | cf2f54eeb7347dd883d5d40f696ad13e3e5960ef | [] | no_license | jiangchuan617/PythonLearning | c5ede77f596c2282e819cc0270cef842e4147783 | a8b999e348a0f90b8febc6ded8f4ae14a5f0ebe5 | refs/heads/master | 2022-05-02T04:37:59.515143 | 2022-05-01T16:20:15 | 2022-05-01T16:20:15 | 129,497,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | file = open('readme','a') # 'w'只写;'a'追加
file.write('123hello\n')
file.close() | [
"[email protected]"
] | |
2f1822c341eee9ce51603373ad52b19f2fb18236 | 5dd18b8f6c1140bf91c4b1942125ea5f221e789c | /backend/app/routers/video.py | 6c472ae15dbc2a8212b67f0d044289ec08e94e7d | [
"MIT"
] | permissive | thanet-s/subme-selected-topics-project | 3509dbc2849d01ce8e8f10fb64c8c21afa46df78 | fac1630839c580bbd66b93f2dc9004c8637a7b15 | refs/heads/main | 2023-08-18T15:57:33.567669 | 2021-10-16T05:05:49 | 2021-10-16T05:05:49 | 410,764,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,105 | py | from fastapi import Depends, APIRouter, HTTPException, status, Response, File, Form, UploadFile
from ..pydatic_models import User_Pydantic, Video_Pydantic, VideoCard_Pydantic
from ..models import Users, Video
from ..jwt import get_current_user, get_current_active_user
from typing import List
router = APIRouter(
prefix="/video",
tags=["video"]
)
@router.get("/home", response_model=List[VideoCard_Pydantic])
async def home_video():
return await VideoCard_Pydantic.from_queryset(Video.all().order_by('-created_at'))
@router.get("/public", response_model=List[VideoCard_Pydantic])
async def public_video():
return await VideoCard_Pydantic.from_queryset(Video.filter(is_private=False).order_by('-created_at'))
@router.get("/get-{id}", response_model=Video_Pydantic)
async def get_video(id: int):
return await Video_Pydantic.from_queryset_single(Video.get(id=id))
@router.get("/search-{word}", response_model=List[VideoCard_Pydantic])
async def search_video(word: str):
return await VideoCard_Pydantic.from_queryset(Video.filter(title__icontains=word).order_by('-created_at')) | [
"[email protected]"
] | |
214c6c3886334797076857bd9cf4b62e4d931034 | ce5e472dd6b33a261b3651672ffa4bcf37db9687 | /q1.py | 12b50e1fc508429fd3ff3b28d4b41bfa9c58e4af | [] | no_license | vkobinski/tarefa4 | edbe140b8ba38689f703555c82b1bf186c9e9f16 | 6e5925c32a97d33a206dab709f55284c7ec35055 | refs/heads/master | 2023-07-15T22:04:51.622643 | 2021-08-26T14:30:52 | 2021-08-26T14:30:52 | 400,198,617 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 812 | py | from datetime import datetime
class Cadastro:
def __init__(self):
self.codigo_banco = input("Insira o código do banco: ")
self.valor_despesa = input("Insira o valor da despesa: ")
self.descricao_despesa = input("Insira a descrição da despesa: ")
self.data = str(datetime.date(datetime.now()))
self.formatarData()
def getString(self):
print(f"Banco: {self.codigo_banco}")
print(f"Valor {self.valor_despesa}")
print(f"Descrição da despesa: {self.descricao_despesa}")
print(f"Data do registro: {self.data}")
def formatarData(self):
data = self.data
separados = data.split("-")
self.data = f"Dia {separados[2]} do mês {separados[1]} do ano {separados[0]}"
cadastro = Cadastro()
cadastro.getString() | [
"[email protected]"
] |
Subsets and Splits