blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
871e2a48a189968cac97002cfa27a51d1f9daccb | b2ed893d04f04eeaf7209187133de7431c476a96 | /user_net/gender_class.py | c62093ef4ce48cc8a023d0c95a5f5f11497668c8 | [] | no_license | liruikaiyao/workshop | 4b5221259f59ad504d87d73c31f5fa0e58d4a1f0 | 6dbde74e35ef02f5e92c76dcdd1909f1d0afb89e | refs/heads/master | 2021-01-17T16:09:13.248109 | 2015-08-05T09:43:21 | 2015-08-05T09:43:21 | 23,420,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,550 | py | # coding=utf-8
__author__ = 'Carry lee'
from collections import Counter
import datetime
import urllib2
import json
from config.db import ICCv1, sh, utc, mapreduce
# 获取用户性别数据
class Gender(object):
def __init__(self, collection_name, begin_name, end_name):
self.collection_name = collection_name
self.begin = begin_name
self.end = end_name
self.detail = ICCv1[self.collection_name]
self.begin_utc = self.begin.astimezone(utc)
self.end_utc = self.end.astimezone(utc)
self.activity_info_db = mapreduce['gender_info_db']
def get_gender(self):
gender_info = dict()
male = 0
female = 0
for elem in self.detail.find({'__REMOVED__': False,
'__CREATE_TIME__': {'$gt': self.begin_utc,
'$lt': self.end_utc}}):
if 'sex' in elem:
if elem['sex'] == 1:
male += 1
elif elem['sex'] == 2:
female += 1
else:
pass
gender_info['begin'] = self.begin_utc
gender_info['end'] = self.end_utc
gender_info['male'] = male
gender_info['female'] = female
gender_info['__CREATE_TIME__'] = datetime.datetime.now(utc)
gender_info['__REMOVED__'] = False
gender_info['__MODIFY_TIME__'] = datetime.datetime.now(utc)
self.activity_info_db.insert(gender_info)
return 'work finished'
| [
"[email protected]"
] | |
87b11d42b884e37cf9618016f3c796bffb40d792 | 96cb70018587f32d8498a5b3bd98703b3bb10495 | /pymeasure/instruments/anapico/apsin12G.py | 1ac6df64e4565d4bcedb2e534ff58b0995868e03 | [
"MIT"
] | permissive | StevenSiegl/pymeasure | 724a8d48db7aaf97fd4d2b19e201d425f8918b28 | 1565c67d5507a61711a4f63e8235900081ee41d7 | refs/heads/master | 2021-05-22T20:07:15.255255 | 2021-01-17T16:15:07 | 2021-01-17T16:15:07 | 253,072,625 | 1 | 1 | MIT | 2020-12-21T17:53:34 | 2020-04-04T18:36:49 | Python | UTF-8 | Python | false | false | 3,078 | py | #
# This file is part of the PyMeasure package.
#
# Copyright (c) 2013-2020 PyMeasure Developers
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from pymeasure.instruments import Instrument
from pymeasure.instruments.validators import strict_range, strict_discrete_set
class APSIN12G(Instrument):
""" Represents the Anapico APSIN12G Signal Generator with option 9K,
HP and GPIB. """
FREQ_LIMIT = [9e3, 12e9]
POW_LIMIT = [-30, 27]
power = Instrument.control(
"SOUR:POW:LEV:IMM:AMPL?;", "SOUR:POW:LEV:IMM:AMPL %gdBm;",
""" A floating point property that represents the output power
in dBm. This property can be set. """,
validator=strict_range,
values=POW_LIMIT
)
frequency = Instrument.control(
"SOUR:FREQ:CW?;", "SOUR:FREQ:CW %eHz;",
""" A floating point property that represents the output frequency
in Hz. This property can be set. """,
validator=strict_range,
values=FREQ_LIMIT
)
blanking = Instrument.control(
":OUTP:BLAN:STAT?", ":OUTP:BLAN:STAT %s",
""" A string property that represents the blanking of output power
when frequency is changed. ON makes the output to be blanked (off) while
changing frequency. This property can be set. """,
validator=strict_discrete_set,
values=['ON','OFF']
)
reference_output = Instrument.control(
"SOUR:ROSC:OUTP:STAT?", "SOUR:ROSC:OUTP:STAT %s",
""" A string property that represents the 10MHz reference output from
the synth. This property can be set. """,
validator=strict_discrete_set,
values=['ON','OFF']
)
def __init__(self, resourceName, **kwargs):
super(APSIN12G, self).__init__(
resourceName,
"Anapico APSIN12G Signal Generator",
**kwargs
)
def enable_rf(self):
""" Enables the RF output. """
self.write("OUTP:STAT 1")
def disable_rf(self):
""" Disables the RF output. """
self.write("OUTP:STAT 0") | [
"[email protected]"
] | |
b66d57a519311e8f0a4f226ea6a3f5a9d8595aa8 | 8fcae139173f216eba1eaa01fd055e647d13fd4e | /.history/scraper_20191220161655.py | f1703b47ad7e8f00adc89277b1d03b707bd4eb9d | [] | no_license | EnriqueGalindo/backend-web-scraper | 68fdea5430a0ffb69cc7fb0e0d9bcce525147e53 | 895d032f4528d88d68719838a45dae4078ebcc82 | refs/heads/master | 2020-11-27T14:02:59.989697 | 2019-12-21T19:47:34 | 2019-12-21T19:47:34 | 229,475,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,261 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module docstring: One line description of what your program does.
There should be a blank line in between description above, and this
more detailed description. In this section you should put any caveats,
environment variable expectations, gotchas, and other notes about running
the program. Author tag (below) helps instructors keep track of who
wrote what, when grading.
"""
__author__ = "Enrique Galindo"
# Imports go at the top of your file, after the module docstring.
# One module per import line. These are for example only.
import sys
import requests
import re
import pprint
from html.parser import HTMLParser
regex_email = r'''(?:[a-z0-9!#$%&‘*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&‘*+/=?^_`{|}~-]+)*|“(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*“)@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])'''
regex_phone = r'''(1?\W*([2-9][0-8][0-9])\W*([2-9][0-9]{2})\W*([0-9]{4})(\se?x?t?(\d*))?)'''
class MyHTMLParser(HTMLParser):
a_list = []
def handle_starttag(self, tag, attrs):
link_list = []
if tag == 'a' and "http://":
for attr, value in attrs:
if attr == 'href' and value.startswith("http"):
self.a_list.append(value)
def main(args):
"""Main function is declared as standalone, for testability"""
good_phone_list = []
url = args[0]
response = requests.get(url)
response.raise_for_status()
url_list = re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', response.text)
email_list = set(re.findall(regex_email, response.text))
bad_phone_list = set(re.findall(regex_phone, response.text))
for number in bad_phone_list:
good_phone_list.append(number[1] + number[2] + number[3])
print(email_list)
pprint.pprint(good_phone_list)
parser = MyHTMLParser()
print(parser.a_list)
if __name__ == '__main__':
"""Docstring goes here"""
main(sys.argv[1:]) | [
"[email protected]"
] | |
a08e6d319ca3c3bf996f96299b7a1988523708b4 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/KCB_MM/YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_108.py | 3b6c2efda3954622e0d4679be0aa3586e9e884dc | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,305 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test//xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test//service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test//mysql")
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test//utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_108(xtp_test_case):
def setUp(self):
pass
# YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_108
def test_YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_108(self):
title = '上海A股股票交易日五档即成转撤销卖——输入的保护价格四舍五入后>保护价格上限(9999.995)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11010122,
'errorMSG': queryOrderErrorMsg(11010122),
'是否生成报单': '是',
'是否是撤废': '否',
# '是否是新股申购': '',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('688000', '1', '4', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
print(stkparm['错误原因'])
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price': 9999.995,
'quantity': 300,
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 201
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
c27f0888bbcb61d88c852dc4101cc460b2e025fb | a50a4e874d3d203344a47bc7ad9c317b213eab90 | /optimise/submitJob.py | a51a218a60c530524478ad2e55045ee87b4b4c26 | [] | no_license | fjl121029xx/yarn-api-python | d5b61ca0695d5fdc4f8923d5814f6576c3c87509 | 4468609dea2d7630fd9fc3dabbe7c02ded7aa4a1 | refs/heads/master | 2020-12-04T02:02:40.913088 | 2020-02-27T08:08:18 | 2020-02-27T08:08:18 | 231,563,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'fjl'
import json
import requests
from urllib import request
import time
headers = {
'Content-Type': 'application/json',
'X-Requested-By': 'admin'
}
data = {
'code': "SELECT count(*) FROM `db_yqs_b_777777777`.`livy_watcher`",
'kind': "sql"
}
# 172.20.44.6
# bi-olap1.sm02
sid = 8894
response = requests.post("http://bi-olap1.sm02:8999/sessions/" + str(sid) + '/statements', data=json.dumps(data),
headers=headers)
print(response.text)
id = response.json()['id']
print(id)
time.sleep(10)
response = request.urlopen('http://bi-olap1.sm02:8999/sessions/%d/statements/%d' % (sid, id))
statements = json.loads(response.read())
print(statements)
stmt = statements['state']
print('getStatements %s' % (statements['state']))
if 'available' == stmt:
print(111) | [
"[email protected]"
] | |
f9d96bee14be78d6e39e6381b8587843e94a81be | dcbdea089e2b4df2a1933169b1d792dd3fa85835 | /tests/library/client_test.py | 6d5553516e2cc389cf0af09a918d57d842ba45eb | [
"Apache-2.0"
] | permissive | mahmud83/ibm-analytics-engine-python | 3cdaa1eb90d20798285a5e66e1fa29313e7ec996 | ed02832b63cbe2981af5b44917f78da539e1a950 | refs/heads/master | 2020-04-07T06:11:33.741937 | 2018-09-29T18:59:28 | 2018-09-29T18:59:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,136 | py | from unittest import TestCase
from mock import Mock, patch
import sys
import tempfile
import os
import json
import requests
from requests.exceptions import RequestException
from ibm_analytics_engine import AnalyticsEngine, AnalyticsEngineException
class TestAnalyticsEngine(TestCase):
def test_invalid_api_key_file(self):
try:
error_class = IOError
except BaseException:
error_class = FileNotFoundError
with self.assertRaises(error_class):
cf = AnalyticsEngine(api_key_filename='does_not_exist')
def test_api_key_file(self):
# delete=True means the file will be deleted on close
tmp = tempfile.NamedTemporaryFile(delete=True)
try:
data = json.dumps({
"name": "iae-key",
"description": "",
"createdAt": "2017-11-14T12:30+0000",
"apiKey": ""
}).encode('utf-8')
tmp.write(data)
tmp.flush()
cf = AnalyticsEngine(api_key_filename=tmp.name)
finally:
tmp.close() # deletes the file
| [
"[email protected]"
] | |
425d525054de4ed30057775581035a612747caa0 | 57422dc8cc8e9fa7e6cf8602fb3799ed3869d685 | /helper.py | 8a3d9cd6e0fc951ad2cb76838a49af21f83406aa | [] | no_license | kamakshidasan/cricket-topology | 6a9996e06fdc95f8ae12a5b6204c14525b64ea37 | 9482bc5963cd6081801342e9daaa6907fd5fb67d | refs/heads/master | 2020-03-24T23:57:18.933323 | 2018-08-02T09:52:40 | 2018-08-02T09:52:40 | 143,162,475 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,956 | py | import os, re, shutil, pickle, inspect, csv, sys, math
import urllib, json
from urlparse import urljoin
############################
## Identifiers for scrapping cricket
############################
# List of constants
CSV_EXTENSION = '.csv'
PYTHON_COMMAND = 'python'
PAGE = 'page'
IDENTIFIER = 'id'
CONTENT = 'content'
NAME = 'name'
PAGE_INFORMATION = 'pageInfo'
PAGE_NUMBER = 'numPages'
SCHEDULE_ENTRY = 'scheduleEntry'
TOURNAMENT_IDENTIFIER = 'tournamentId'
MATCH_TYPE = 'matchType'
MATCH_IDENTIFIER = 'matchId'
DATABASE = 'uds'
STATISTICS = 'stats'
TRAJECTORY = 'traj'
METADATA = 'meta'
PLAYERS = 'players'
FIXTURES = 'fixtures'
COMMENTARY = 'commentary'
META = 'meta'
META_COMMENTARY = 'meta-commentary'
CUSTOMER = 'customer'
ICC = 'icc'
FEEDS = 'feeds'
CUSTOMER_INFORMATION = {CUSTOMER: ICC}
DATA_DIRECTORY = 'data/'
CLEANED_BOWLERS_DIRECTORY = 'cleaned-bowlers/'
CLEANED_BATSMAN_DIRECTORY = 'cleaned-batsman/'
TOURNAMENT_TYPES = 'tournamentTypes'
MATCH_TYPES = 'matchTypes'
STARTING_DATE = 'startDate'
ENDING_DATE = 'endDate'
TEST_MATCH = 'TEST'
ODI_MATCH = 'ODI'
TWENTY_INTERNATIONAL_MATCH = 'T20I'
INTERNATIONAL_TOURNAMENT = 'I'
JSON_EXTENSION = '.json'
ICC_API_URL = 'http://cricketapi-icc.pulselive.com'
bowlers_list = [
'Harbhajan Singh',
'Bhuvneshwar Kumar',
'Sunil Narine',
'Umesh Yadav',
'Amit Mishra',
'Lasith Malinga',
'Ravichandran Ashwin',
'Mohit Sharma',
'Ravindra Jadeja',
'Piyush Chawla',
'Praveen Kumar',
'Dwayne Bravo',
'Morne Morkel',
'Ashish Nehra',
'Axar Patel',
'James Faulkner',
'Sandeep Sharma',
'Vinay Kumar',
'Shane Watson',
'Yuzvendra Chahal',
'Zaheer Khan',
'Dale Steyn',
'Mitchell Johnson',
'Rajat Bhatia',
'Jasprit Bumrah',
'Karn Sharma',
'Chris Morris',
'Shahbaz Nadeem',
'Mitchell McClenaghan',
'Dhawal Kulkarni',
'Ashok Dinda'
]
batsman_list = [
'AB de Villiers',
'David Warner',
'Chris Gayle',
'Kieron Pollard',
'David Miller',
'Suresh Raina',
'Yusuf Pathan',
'Dwayne Smith',
'MS Dhoni',
'Shane Watson',
'Virat Kohli',
'Rohit Sharma',
'Robin Uthappa',
'Steve Smith',
'Aaron Finch',
'Brendon McCullum',
'Yuvraj Singh',
'Faf du Plessis',
'Ambati Rayudu',
'Shaun Marsh',
'Dinesh Karthik',
'Sanju Samson',
'Gautam Gambhir',
'Shikhar Dhawan',
'Parthiv Patel',
'Manish Pandey',
'Ajinkya Rahane',
'Michael Hussey',
'Murali Vijay'
]
def save_data(data_url, folder_name, file_arguments, file_extension):
file_name = join_strings(file_arguments) + file_extension
#urllib.urlretrieve(data_url, folder_name + '/' + file_name)
#return folder_name + '/' + file_name
urllib.urlretrieve(data_url, 'data/' + file_name)
return 'data/' + file_name
def get_data_url(arguments):
return '/'.join(str(argument) for argument in arguments)
# get working directory and add '/' at the end
def cwd():
return os.path.join(os.getcwd(), '')
# get path of current file
def current_path():
return os.path.abspath(inspect.getfile(inspect.currentframe()))
# join two strings
def join_strings(strings):
return '-'.join(strings)
| [
"[email protected]"
] | |
cf2eaf7e00ed5e442bcb2e5cec7034e2037df6a7 | d73cb659066925e17f35a084458a6418e851b6fd | /Bill Lubanovich - Introducing Python/04_exception_create.py | 3e79eaf14b8a08d1f50174a60a842e5ccd298f19 | [] | no_license | artag/Training | 11095dd67ae90f434666b156c4f778c54a754985 | 5ee29f5a77d33f682d8cf714a56cc4b0dfe40473 | refs/heads/master | 2023-08-31T08:10:21.535860 | 2023-08-30T21:32:16 | 2023-08-30T21:32:16 | 173,268,460 | 0 | 0 | null | 2023-03-06T20:03:34 | 2019-03-01T08:54:36 | C# | UTF-8 | Python | false | false | 171 | py | class UppercaseException(Exception):
pass
words = ('eeenie', 'meenie', 'miny', 'MO')
for word in words:
if word.isupper():
raise UppercaseException(word)
| [
"[email protected]"
] | |
5fc3b805f173861239b6139fb22c85e5c1597b16 | 66559f752758913c3b741f764cb3316b64f821f6 | /NewsPortal/settings.py | 9e7aec79643886a818e117cfc0599b3c9fe8441c | [] | no_license | Dapucla/NewsPortal | 7d58c0c6515ee0ebac03bb9764a94791578cdd45 | 83cd0093b33bb249db08b26f0a65ab200f87e0eb | refs/heads/main | 2023-07-02T13:20:27.131750 | 2021-08-12T23:09:56 | 2021-08-12T23:09:56 | 375,814,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,134 | py | """
Django settings for NewsPortal project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-9_cd#3d^^mce8x#0$%7w_9jnxjy_8hbvippi%cxi-1%otysa8)'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'news',
'django.contrib.sites',
'django.contrib.flatpages',
# 'django_filters'
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'NewsPortal.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
],
},
},
]
AUTHENTICATION_BACKENDS = [
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
]
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_EMAIL_VERIFICATION = 'none'
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = 'search'
WSGI_APPLICATION = 'NewsPortal.wsgi.application'
SITE_ID = 1
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
STATICFILES_DIRS = [
BASE_DIR / "static"
] | [
"[email protected]"
] | |
133e210f76872cc81160091bd693b155d285992a | fec543880620228a1e6e5f326d7b06d6a5c25a75 | /Tutorial_2_3.py | ce78a7d350877de6cd49aea6cc0d5ac438f090f3 | [] | no_license | andriyantohalim/PySide2_Tutorial | f6bf30b13d8bb150e41745f68e62e4ab525b5459 | 9317d252cfb1ec3067102b7e0dbf03e564ab8eda | refs/heads/master | 2020-04-22T05:49:35.658069 | 2019-02-14T15:05:41 | 2019-02-14T15:05:41 | 170,169,131 | 3 | 3 | null | null | null | null | UTF-8 | Python | false | false | 974 | py | ##################################################
# TUTORIAL 2_3. Toolbar
# @TODO: No Icon displayed, maybe on Mac OS only
##################################################
import sys
from PySide2.QtWidgets import QMainWindow, QApplication, QAction
from PySide2.QtGui import QIcon
class Example(QMainWindow):
def initUI(self):
# exitAction = QAction(QIcon('exit24.png'), 'Exit', self)
exitAction = QAction('Exit', self)
exitAction.setShortcut('Ctrl+Q') # CMD + Q in Mac OS
exitAction.triggered.connect(self.close)
self.toolbar = self.addToolBar('Exit')
self.toolbar.addAction(exitAction)
self.setGeometry(300, 300, 350, 250)
self.setWindowTitle('Toolbar')
self.show()
def __init__(self):
super(Example, self).__init__()
self.initUI()
def main():
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
ad139485701a6ef5cc4b1afc855f9bdbdfcb39fb | aa91bc5be73b742d37a726d2599ece7e45e6b82a | /tests/sentry/quotas/redis/tests.py | fd73a945c13ea3f506802b16cfe4d79fe2039e59 | [
"BSD-2-Clause"
] | permissive | vaginessa/BurntVeggies | 8f5f18d008bbe1bf0736b4adfb373624a83a9cf8 | 0bc4fe9bf0aa301e8314612cb8e56c50e1feab4a | refs/heads/master | 2020-07-12T09:55:30.669118 | 2014-02-04T01:34:05 | 2014-02-04T01:34:05 | 37,351,359 | 0 | 0 | BSD-3-Clause | 2019-01-18T06:17:33 | 2015-06-13T00:36:52 | Python | UTF-8 | Python | false | false | 2,623 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from exam import fixture, patcher
from sentry.quotas.redis import RedisQuota
from sentry.testutils import TestCase
class RedisQuotaTest(TestCase):
@fixture
def quota(self):
inst = RedisQuota(hosts={
0: {'db': 9}
})
inst.conn.flushdb()
return inst
@patcher.object(RedisQuota, 'get_system_quota')
def get_system_quota(self):
inst = mock.MagicMock()
inst.return_value = 0
return inst
@patcher.object(RedisQuota, 'get_team_quota')
def get_team_quota(self):
inst = mock.MagicMock()
inst.return_value = 0
return inst
@patcher.object(RedisQuota, 'get_project_quota')
def get_project_quota(self):
inst = mock.MagicMock()
inst.return_value = 0
return inst
@patcher.object(RedisQuota, '_incr_project')
def _incr_project(self):
inst = mock.MagicMock()
inst.return_value = (0, 0, 0)
return inst
def test_default_host_is_local(self):
quota = RedisQuota()
self.assertEquals(len(quota.conn.hosts), 1)
self.assertEquals(quota.conn.hosts[0].host, 'localhost')
def test_bails_immediately_without_any_quota(self):
self._incr_project.return_value = (0, 0, 0)
result = self.quota.is_rate_limited(self.project)
assert not self._incr_project.called
assert result is False
def test_enforces_project_quota(self):
self.get_project_quota.return_value = 100
self._incr_project.return_value = (0, 0, 101)
result = self.quota.is_rate_limited(self.project)
assert result is True
self._incr_project.return_value = (0, 0, 99)
result = self.quota.is_rate_limited(self.project)
assert result is False
def test_enforces_team_quota(self):
self.get_team_quota.return_value = 100
self._incr_project.return_value = (0, 101, 0)
result = self.quota.is_rate_limited(self.project)
assert result is True
self._incr_project.return_value = (0, 99, 0)
result = self.quota.is_rate_limited(self.project)
assert result is False
def test_enforces_system_quota(self):
self.get_system_quota.return_value = 100
self._incr_project.return_value = (101, 0, 0)
result = self.quota.is_rate_limited(self.project)
assert result is True
self._incr_project.return_value = (99, 0, 0)
result = self.quota.is_rate_limited(self.project)
assert result is False
| [
"[email protected]"
] | |
f601c9bb6fd53fe0bef6a60838676b07583a7dda | 64290261c1466f0bcac0c302f59783cccad740a9 | /googletest/googletest/scripts/gen_gtest_pred_impl.py | 7cee3c7f7815849dcb2831dd5ed530a4457ca8ec | [
"BSD-3-Clause"
] | permissive | qinlang3/LaplacianFilter | 49560ec7933658db99710699448fd08f8f3c990f | dd5c1b150b14ff218b8c5d6cbf1a4bb644f13e04 | refs/heads/main | 2023-03-27T00:44:56.166409 | 2021-03-27T06:55:00 | 2021-03-27T06:55:00 | 347,609,035 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 22,618 | py | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""gen_gtest_pred_impl.py v0.1
Generates the implementation of Google Test predicate assertions and
accompanying tests.
Usage:
gen_gtest_pred_impl.py MAX_ARITY
where MAX_ARITY is a positive integer.
The command generates the implementation of up-to MAX_ARITY-ary
predicate assertions, and writes it to file gtest_pred_impl.h in the
directory where the script is. It also generates the accompanying
unit test in file gtest_pred_impl_unittest.cc.
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import sys
import time
# Where this script is.
SCRIPT_DIR = os.path.dirname(sys.argv[0])
# Where to store the generated header.
HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
# Where to store the generated unit test.
UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
def HeaderPreamble(n):
"""Returns the preamble for the header file.
Args:
n: the maximum arity of the predicate macros to be generated.
"""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
'n' : n
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
//
// Implements a family of generic predicate assertion macros.
// GOOGLETEST_CM0001 DO NOT DELETE
#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
#include "gtest/gtest.h"
namespace testing {
// This header implements a family of generic predicate assertion
// macros:
//
// ASSERT_PRED_FORMAT1(pred_format, v1)
// ASSERT_PRED_FORMAT2(pred_format, v1, v2)
// ...
//
// where pred_format is a function or functor that takes n (in the
// case of ASSERT_PRED_FORMATn) values and their source expression
// text, and returns a testing::AssertionResult. See the definition
// of ASSERT_EQ in gtest.h for an example.
//
// If you don't care about formatting, you can use the more
// restrictive version:
//
// ASSERT_PRED1(pred, v1)
// ASSERT_PRED2(pred, v1, v2)
// ...
//
// where pred is an n-ary function or functor that returns bool,
// and the values v1, v2, ..., must support the << operator for
// streaming to std::ostream.
//
// We also define the EXPECT_* variations.
//
// For now we only support predicates whose arity is at most %(n)s.
// Please email [email protected] if you need
// support for higher arities.
// GTEST_ASSERT_ is the basic statement to which all of the assertions
// in this file reduce. Don't use this in your code.
#define GTEST_ASSERT_(expression, on_failure) \\
GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
if (const ::testing::AssertionResult gtest_ar = (expression)) \\
; \\
else \\
on_failure(gtest_ar.failure_message())
""" % DEFS)
def Arity(n):
"""Returns the English name of the given arity."""
if n < 0:
return None
elif n <= 3:
return ['nullary', 'unary', 'binary', 'ternary'][n]
else:
return '%s-ary' % n
def Title(word):
"""Returns the given word in title case. The difference between
this and string's title() method is that Title('4-ary') is '4-ary'
while '4-ary'.title() is '4-Ary'."""
return word[0].upper() + word[1:]
def OneTo(n):
"""Returns the list [1, 2, 3, ..., n]."""
return range(1, n + 1)
def Iter(n, format, sep=''):
"""Given a positive integer n, a format string that contains 0 or
more '%s' format specs, and optionally a separator string, returns
the join of n strings, each formatted with the format string on an
iterator ranged from 1 to n.
Example:
Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
"""
# How many '%s' specs are in format?
spec_count = len(format.split('%s')) - 1
return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
def ImplementationForArity(n):
"""Returns the implementation of n-ary predicate assertions."""
# A map the defines the values used in the implementation template.
DEFS = {
'n' : str(n),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'arity' : Arity(n),
'Arity' : Title(Arity(n))
}
impl = """
// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
template <typename Pred""" % DEFS
impl += Iter(n, """,
typename T%s""")
impl += """>
AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
impl += Iter(n, """,
const char* e%s""")
impl += """,
Pred pred"""
impl += Iter(n, """,
const T%s& v%s""")
impl += """) {
if (pred(%(vs)s)) return AssertionSuccess();
""" % DEFS
impl += ' return AssertionFailure() << pred_text << "("'
impl += Iter(n, """
<< e%s""", sep=' << ", "')
impl += ' << ") evaluates to false, where"'
impl += Iter(
n, """
<< "\\n" << e%s << " evaluates to " << ::testing::PrintToString(v%s)"""
)
impl += """;
}
// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
// Don't use this in your code.
#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s), \\
on_failure)
// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
impl += Iter(n, """, \\
#v%s""")
impl += """, \\
pred"""
impl += Iter(n, """, \\
v%s""")
impl += """), on_failure)
// %(Arity)s predicate assertion macros.
#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
""" % DEFS
return impl
def HeaderPostamble():
"""Returns the postamble for the header file."""
return """
} // namespace testing
#endif // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
"""
def GenerateFile(path, content):
"""Given a file path and a content string
overwrites it with the given content.
"""
print 'Updating file %s . . .' % path
f = file(path, 'w+')
print >>f, content,
f.close()
print 'File %s has been updated.' % path
def GenerateHeader(n):
"""Given the maximum arity n, updates the header file that implements
the predicate assertions.
"""
GenerateFile(HEADER,
HeaderPreamble(n)
+ ''.join([ImplementationForArity(i) for i in OneTo(n)])
+ HeaderPostamble())
def UnitTestPreamble():
"""Returns the preamble for the unit test file."""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
// Regression test for gtest_pred_impl.h
//
// This file is generated by a script and quite long. If you intend to
// learn how Google Test works by reading its unit tests, read
// gtest_unittest.cc instead.
//
// This is intended as a regression test for the Google Test predicate
// assertions. We compile it as part of the gtest_unittest target
// only to keep the implementation tidy and compact, as it is quite
// involved to set up the stage for testing Google Test using Google
// Test itself.
//
// Currently, gtest_unittest takes ~11 seconds to run in the testing
// daemon. In the future, if it grows too large and needs much more
// time to finish, we should consider separating this file into a
// stand-alone regression test.
#include <iostream>
#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
// A user-defined data type.
struct Bool {
explicit Bool(int val) : value(val != 0) {}
bool operator>(int n) const { return value > Bool(n).value; }
Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
bool operator==(const Bool& rhs) const { return value == rhs.value; }
bool value;
};
// Enables Bool to be used in assertions.
std::ostream& operator<<(std::ostream& os, const Bool& x) {
return os << (x.value ? "true" : "false");
}
""" % DEFS)
def TestsForArity(n):
"""Returns the tests for n-ary predicate assertions."""
# A map that defines the values used in the template for the tests.
DEFS = {
'n' : n,
'es' : Iter(n, 'e%s', sep=', '),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'tvs' : Iter(n, 'T%s v%s', sep=', '),
'int_vs' : Iter(n, 'int v%s', sep=', '),
'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
'types' : Iter(n, 'typename T%s', sep=', '),
'v_sum' : Iter(n, 'v%s', sep=' + '),
'arity' : Arity(n),
'Arity' : Title(Arity(n)),
}
tests = (
"""// Sample functions/functors for testing %(arity)s predicate assertions.
// A %(arity)s predicate function.
template <%(types)s>
bool PredFunction%(n)s(%(tvs)s) {
return %(v_sum)s > 0;
}
// The following two functions are needed to circumvent a bug in
// gcc 2.95.3, which sometimes has problem with the above template
// function.
bool PredFunction%(n)sInt(%(int_vs)s) {
return %(v_sum)s > 0;
}
bool PredFunction%(n)sBool(%(Bool_vs)s) {
return %(v_sum)s > 0;
}
""" % DEFS)
tests += """
// A %(arity)s predicate functor.
struct PredFunctor%(n)s {
template <%(types)s>
bool operator()(""" % DEFS
tests += Iter(n, 'const T%s& v%s', sep=""",
""")
tests += """) {
return %(v_sum)s > 0;
}
};
""" % DEFS
tests += """
// A %(arity)s predicate-formatter function.
template <%(types)s>
testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) {
if (PredFunction%(n)s(%(vs)s))
return testing::AssertionSuccess();
return testing::AssertionFailure()
<< """ % DEFS
tests += Iter(n, 'e%s', sep=' << " + " << ')
tests += """
<< " is expected to be positive, but evaluates to "
<< %(v_sum)s << ".";
}
""" % DEFS
tests += """
// A %(arity)s predicate-formatter functor.
struct PredFormatFunctor%(n)s {
template <%(types)s>
testing::AssertionResult operator()(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) const {
return PredFormatFunction%(n)s(%(es)s, %(vs)s);
}
};
""" % DEFS
tests += """
// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
class Predicate%(n)sTest : public testing::Test {
protected:
void SetUp() override {
expected_to_finish_ = true;
finished_ = false;""" % DEFS
tests += """
""" + Iter(n, 'n%s_ = ') + """0;
}
"""
tests += """
void TearDown() override {
// Verifies that each of the predicate's arguments was evaluated
// exactly once."""
tests += ''.join(["""
EXPECT_EQ(1, n%s_) <<
"The predicate assertion didn't evaluate argument %s "
"exactly once.";""" % (i, i + 1) for i in OneTo(n)])
tests += """
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
FAIL() << "The predicate assertion unexpactedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
}
}
// true iff the test function is expected to run to finish.
static bool expected_to_finish_;
// true iff the test function did run to finish.
static bool finished_;
""" % DEFS
tests += Iter(n, """
static int n%s_;""")
tests += """
};
bool Predicate%(n)sTest::expected_to_finish_;
bool Predicate%(n)sTest::finished_;
""" % DEFS
tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
""") % DEFS
tests += """
typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
""" % DEFS
def GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type):
"""Returns the test for a predicate assertion macro.
Args:
use_format: true iff the assertion is a *_PRED_FORMAT*.
use_assert: true iff the assertion is a ASSERT_*.
expect_failure: true iff the assertion is expected to fail.
use_functor: true iff the first argument of the assertion is
a functor (as opposed to a function)
use_user_type: true iff the predicate functor/function takes
argument(s) of a user-defined type.
Example:
GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
of a successful EXPECT_PRED_FORMATn() that takes a functor
whose arguments have built-in types."""
if use_assert:
assrt = 'ASSERT' # 'assert' is reserved, so we cannot use
# that identifier here.
else:
assrt = 'EXPECT'
assertion = assrt + '_PRED'
if use_format:
pred_format = 'PredFormat'
assertion += '_FORMAT'
else:
pred_format = 'Pred'
assertion += '%(n)s' % DEFS
if use_functor:
pred_format_type = 'functor'
pred_format += 'Functor%(n)s()'
else:
pred_format_type = 'function'
pred_format += 'Function%(n)s'
if not use_format:
if use_user_type:
pred_format += 'Bool'
else:
pred_format += 'Int'
test_name = pred_format_type.title()
if use_user_type:
arg_type = 'user-defined type (Bool)'
test_name += 'OnUserType'
if expect_failure:
arg = 'Bool(n%s_++)'
else:
arg = 'Bool(++n%s_)'
else:
arg_type = 'built-in type (int)'
test_name += 'OnBuiltInType'
if expect_failure:
arg = 'n%s_++'
else:
arg = '++n%s_'
if expect_failure:
successful_or_failed = 'failed'
expected_or_not = 'expected.'
test_name += 'Failure'
else:
successful_or_failed = 'successful'
expected_or_not = 'UNEXPECTED!'
test_name += 'Success'
# A map that defines the values used in the test template.
defs = DEFS.copy()
defs.update({
'assert' : assrt,
'assertion' : assertion,
'test_name' : test_name,
'pf_type' : pred_format_type,
'pf' : pred_format,
'arg_type' : arg_type,
'arg' : arg,
'successful' : successful_or_failed,
'expected' : expected_or_not,
})
test = """
// Tests a %(successful)s %(assertion)s where the
// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
indent = (len(assertion) + 3)*' '
extra_indent = ''
if expect_failure:
extra_indent = ' '
if use_assert:
test += """
expected_to_finish_ = false;
EXPECT_FATAL_FAILURE({ // NOLINT"""
else:
test += """
EXPECT_NONFATAL_FAILURE({ // NOLINT"""
test += '\n' + extra_indent + """ %(assertion)s(%(pf)s""" % defs
test = test % defs
test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
test += ');\n' + extra_indent + ' finished_ = true;\n'
if expect_failure:
test += ' }, "");\n'
test += '}\n'
return test
# Generates tests for all 2**6 = 64 combinations.
tests += ''.join([GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type)
for use_format in [0, 1]
for use_assert in [0, 1]
for expect_failure in [0, 1]
for use_functor in [0, 1]
for use_user_type in [0, 1]
])
return tests
def UnitTestPostamble():
"""Returns the postamble for the tests."""
return ''
def GenerateUnitTest(n):
"""Returns the tests for up-to n-ary predicate assertions."""
GenerateFile(UNIT_TEST,
UnitTestPreamble()
+ ''.join([TestsForArity(i) for i in OneTo(n)])
+ UnitTestPostamble())
def _Main():
"""The entry point of the script. Generates the header file and its
unit test."""
if len(sys.argv) != 2:
print __doc__
print 'Author: ' + __author__
sys.exit(1)
n = int(sys.argv[1])
GenerateHeader(n)
GenerateUnitTest(n)
if __name__ == '__main__':
_Main()
| [
"[email protected]"
] | |
0e1c61643756a0babee382e35dc2724271ea21a6 | 3785be3d8f6e852cfdd46e426747e7aaad1461ed | /for_format.py | 88c09c863a33eeb2c9a22caabaeb1b21b682625c | [] | no_license | ONOentrepreneur/python-code | d18a6f1beabcc8a4eedd1077a49458fc8593a16d | 7246f48b65d8d2e18291debd376796afaf88c6ec | refs/heads/master | 2021-10-19T18:58:48.506357 | 2019-02-23T09:14:34 | 2019-02-23T09:14:34 | 93,308,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | v=0
for i in range(1,10):
v+=i
print("{0}を足すと{1}".format(i,v))
print("1から10を足すと...{0}".format(v))
| [
"[email protected]"
] | |
287dd1a5e3fdc62d41b3076b4cad6580e6f9daca | e62b02d6c028612a7dce488344a0246abd5a7433 | /src/point_plane_net/conv_onet/scripts/dataset_scannet_depth/check_max_depth_value.py | 86ab749ac585ddc87dc7925f085fefe690c32b91 | [
"MIT"
] | permissive | daniil-777/dynamic_geo_convolutional_onet | 335688016870c14ea08af676fa768d5c1175c348 | c4db94a64c8b2f2c83ecb507b02b9aecd4c7bb62 | refs/heads/main | 2023-02-24T14:26:45.772083 | 2021-01-30T20:36:12 | 2021-01-30T20:36:12 | 322,916,712 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | import os
import numpy as np
import trimesh
import glob
import imageio
import sys
import torch
from tqdm import tqdm
from os.path import join
item_path = '/is/rg/avg/mniemeyer/project_data/2020/scalable_onet/data/ScanNet_depth14/scenes'
item_path = os.path.join(item_path, os.listdir(item_path)[0])
depth_path = join(item_path, 'depth')
depth_files = glob.glob(join(depth_path, '*.png'))
depth_files.sort()
fmin = np.inf
fmax = -np.inf
for f in tqdm(depth_files):
img = imageio.imread(f).astype(np.float32) / 1000.
mmin, mmax = img.min(), img.max()
if mmin < fmin:
fmin = mmin
if mmax > fmax:
fmax = mmax
print('Final numbers: %.6f, %.6f' % (fmin, fmax)) | [
"[email protected]"
] | |
9c1700b52b12a939eb75cda0d99d195d51d4f086 | a6df69adeda3c2b08c988e023ca5cac63a0d03cf | /tests/multiviewica/test_mvica.py | 0119b3e63c90fbf91db1d0ae90e39c1c05e56d90 | [
"MIT"
] | permissive | hugorichard/mvlearn | 89cf575070e847b7002cbecc185ddfa4431511b6 | 688548b55ca3b61ea4bcc3b339cee140d03c0adc | refs/heads/main | 2023-04-03T00:42:45.017104 | 2021-03-18T14:50:21 | 2021-03-18T14:50:21 | 294,060,397 | 0 | 0 | Apache-2.0 | 2020-09-09T09:06:43 | 2020-09-09T09:06:43 | null | UTF-8 | Python | false | false | 5,545 | py | # BSD 3-Clause License
# Copyright (c) 2020, Hugo RICHARD and Pierre ABLIN
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Modified from source package https://github.com/hugorichard/multiviewica
import pytest
import numpy as np
import scipy
from sklearn.decomposition import PCA
from mvlearn.decomposition import MultiviewICA
from mvlearn.compose import ViewTransformer
from mvlearn.utils import requires_multiviewica
def hungarian(M):
u, order = scipy.optimize.linear_sum_assignment(-abs(M))
vals = M[u, order]
return order, np.sign(vals)
def normalize(A):
A_ = A - np.mean(A, axis=1, keepdims=True)
A_ = A_ / np.linalg.norm(A_, axis=1, keepdims=True)
return A_
def amari_d(W, A):
P = np.dot(W, A)
def s(r):
return np.sum(np.sum(r ** 2, axis=1) / np.max(r ** 2, axis=1) - 1)
return (s(np.abs(P)) + s(np.abs(P.T))) / (2 * P.shape[0])
def error(M):
order, _ = hungarian(M)
return 1 - M[np.arange(M.shape[0]), order]
# Initialize data
@pytest.fixture(scope="module")
def Xs():
np.random.seed(0)
view1 = np.random.random((10, 9))
view2 = np.random.random((10, 9))
Xs = [view1, view2]
return np.asarray(Xs)
@requires_multiviewica
@pytest.mark.parametrize(
("algo", "init"),
[(MultiviewICA, "permica"), (MultiviewICA, "groupica"),],
)
def test_ica(algo, init):
# Test that all algo can recover the sources
sigma = 1e-4
n, v, p, t = 3, 10, 5, 1000
# Generate signals
rng = np.random.RandomState(0)
S_true = rng.laplace(size=(p, t))
S_true = normalize(S_true)
A_list = rng.randn(n, v, p)
noises = rng.randn(n, v, t)
Xs = [A.dot(S_true) for A in A_list]
Xs = [X + sigma * N for X, A, N in zip(Xs, A_list, noises)]
# Run ICA
if init is None:
algo = algo(n_components=5, tol=1e-5, multiview_output=False).fit(
np.swapaxes(Xs, 1, 2)
)
else:
algo = algo(
n_components=5, tol=1e-5, init=init, multiview_output=False
).fit(np.swapaxes(Xs, 1, 2))
K = algo.pca_components_
W = algo.components_
S = algo.transform(np.swapaxes(Xs, 1, 2)).T
dist = np.mean(
[
amari_d(W[i], np.linalg.pinv(K[i]).T.dot(A_list[i]))
for i in range(n)
]
)
S = normalize(S)
err = np.mean(error(np.abs(S.dot(S_true.T))))
assert dist < 0.01
assert err < 0.01
@requires_multiviewica
def test_transform(Xs):
ica = MultiviewICA(n_components=2)
with pytest.raises(ValueError):
ica.transform(Xs)
assert ica.fit_transform(Xs).shape == (Xs.shape[0], Xs.shape[1], 2)
ica = MultiviewICA()
assert ica.fit_transform(Xs).shape == Xs.shape
@requires_multiviewica
@pytest.mark.parametrize("multiview_output", [True, False])
def test_inverse_transform(Xs, multiview_output):
ica = MultiviewICA(n_components=2, multiview_output=multiview_output)
with pytest.raises(ValueError):
ica.inverse_transform(Xs)
S = ica.fit_transform(Xs)
Xs_mixed = ica.inverse_transform(S)
avg_mixed = np.mean(
[X @ np.linalg.pinv(C) for X, C in zip(Xs, ica.pca_components_)],
axis=0,
)
avg_mixed2 = np.mean(
[X @ np.linalg.pinv(C) for X, C in zip(Xs_mixed, ica.pca_components_)],
axis=0,
)
assert np.linalg.norm(avg_mixed2 - avg_mixed) < 0.2
@requires_multiviewica
def test_inverse_transform_no_preproc(Xs):
ica = MultiviewICA()
S = ica.fit_transform(Xs)
Xs_mixed = ica.inverse_transform(S)
assert np.mean((Xs_mixed - Xs) ** 2) / np.mean(Xs ** 2) < 0.05
@requires_multiviewica
def test_fit_errors(Xs):
with pytest.raises(ValueError):
ica = MultiviewICA()
ica.fit(Xs[:, :5, :])
with pytest.raises(ValueError):
ica = MultiviewICA(init="WRONG")
ica.fit(Xs)
with pytest.raises(TypeError):
ica = MultiviewICA(init=list())
ica.fit(Xs)
@requires_multiviewica
def test_fit(Xs, capfd):
ica = MultiviewICA(verbose=True)
ica.fit(Xs)
out, err = capfd.readouterr()
assert out[:2] == "it"
| [
"[email protected]"
] | |
c02215ad3b73954a6dbd3a4c1899a46d2ae927eb | 45cd72d5f000f25c5efcd31bbe4547008d944c24 | /AlgoProjectLatest/venv/Scripts/easy_install-3.7-script.py | 5b7e22c5741d52a3e96b10adeae227b2d69659d4 | [] | no_license | fatinqnahar/Best-Flight | 2b09fa1b7a34bc218d9b0c1b64ce6a7775093409 | 419bbfc568a9279903c48882a4910772b79777f0 | refs/heads/master | 2023-04-24T11:45:51.239479 | 2021-05-04T14:11:03 | 2021-05-04T14:11:03 | 364,277,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | #!C:\Users\Amirah\PycharmProjects\AlgoProject\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"[email protected]"
] | |
52cbb065169570a9caa1dc72b04b06eb5949d804 | 8c59093c20719903cee2ddc348544a224f6ecf47 | /mongo_rest_django/bricks_rest_api/bricks_rest_api/wsgi.py | 6b91726d25fc59366b4ed1dab1ad0d8c90a4a33e | [] | no_license | anhuaxiang/django_rest_test | 6679e12470048deda546c5061edc677271046266 | bc946d50ef8f85d78de67d9ca84430e6a50d1ade | refs/heads/master | 2020-03-27T04:30:17.749535 | 2018-08-24T10:16:58 | 2018-08-24T10:16:58 | 145,946,140 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | """
WSGI config for bricks_rest_api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bricks_rest_api.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
6b067f8119195dc5b8ec6b3d534aebb0b9424bb5 | 3a9f2b3d79cf214704829427ee280f4b49dca70a | /saigon/rat/RuckusAutoTest/scripts/zd/ats_ZD_Combo_Saigon_Comparison_Smart_Redundancy.py | 0e22c37e3a9aa75eeee259cd6893243d98da5d95 | [] | no_license | jichunwei/MyGitHub-1 | ae0c1461fe0a337ef459da7c0d24d4cf8d4a4791 | f826fc89a030c6c4e08052d2d43af0b1b4b410e3 | refs/heads/master | 2021-01-21T10:19:22.900905 | 2016-08-20T03:34:52 | 2016-08-20T03:34:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,411 | py | '''
Udpate @2011/9/8, by [email protected]
Update Content:
1) Append TCID to test cases so that report to test link directly.
2) Update Level hierarchical for test cases.
'''
import sys
import random
import libZD_TestSuite as testsuite
from RuckusAutoTest.common import lib_KwList as kwlist
def defineTestConfiguration():
test_cfgs = []
input_cfg = defineInputConfig()
test_name = 'CB_ZD_SR_Init_Env'
common_name = 'Initial Test Environment'
test_cfgs.append(({'zd1_ip_addr':input_cfg['zd1_ip_addr'], 'zd2_ip_addr':\
input_cfg['zd2_ip_addr'],'share_secret':input_cfg['share_secret'],
'sw_ip':input_cfg['sw_ip']},test_name, common_name, 0, False))
test_name = 'CB_ZD_SR_Disable'
common_name = 'Disable Smart Redundancy before test'
test_cfgs.append(({}, test_name, common_name, 0, False))
test_name = 'CB_ZD_Disable_Given_Mac_Switch_Port'
common_name = '[Testing choice Active ZD rule by MAC]Disable switch port connectet to all ap'
test_cfgs.append(({'ap_tag':'all','device':'ap'},test_name, common_name, 1, False))
test_name = 'CB_ZD_SR_Enable'
common_name = '[Testing choice Active ZD rule by MAC]Enable Smart Redundancy'
test_cfgs.append(({},test_name,common_name, 2,False))
test_name = 'CB_ZD_SR_Check_Lower_Mac_State'
common_name = '[Testing choice Active ZD rule by MAC]ZD with lower MAC address is Active when the ZDs have no APs.'
test_cfgs.append(({'except_state':'active'},test_name,common_name,2,False))
test_name = 'CB_ZD_Enable_Sw_Port_Connect_To_Given_Device'
common_name = '[Testing choice Active ZD rule by MAC]Enable sw port connected to all ap'
test_cfgs.append(({},test_name, common_name, 2, True))
test_name = 'CB_ZD_SR_Disable'
common_name = '[Testing choice Active ZD rule by MAC]Disable Smart Redundancy for comparison with same AP test'
test_cfgs.append(({}, test_name, common_name, 2, False))
test_name = 'CB_ZD_SR_Adjust_To_Same_AP'
common_name = '[Testing choice Active ZD rule by MAC]Adjust the 2 ZDs to have the same APs'
test_cfgs.append(({}, test_name, common_name, 2, False))
test_name = 'CB_ZD_SR_Enable'
common_name = '[Testing choice Active ZD rule by MAC]Enable Smart Redundancy for comparison with same AP test'
test_cfgs.append(({},test_name,common_name, 2,False))
test_name = 'CB_ZD_SR_Check_Lower_Mac_State'
common_name = '[Testing choice Active ZD rule by MAC]ZD with lower MAC address is Active when the ZDs have the same APs.'
test_cfgs.append(({'except_state':'active'},test_name,common_name,1,False))
test_name = 'CB_ZD_SR_Disable'
common_name = '[Testing choice Active ZD rule by MAC]Disable Smart Redundancy after test'
test_cfgs.append(({}, test_name, common_name, 2, True))
test_name = 'CB_ZD_SR_Enable'
common_name = '[Testing choice Acitve ZD rule by AP Number]Enable Smart Redundancy'
test_cfgs.append(({},test_name,common_name, 1,False))
test_name = 'CB_ZD_SR_Get_Active_ZD'
common_name = '[Testing choice Acitve ZD rule by AP Number]Get the Active ZD'
test_cfgs.append(({},test_name,common_name,2,False))
test_name = 'CB_ZD_SR_Set_Active_ZD'
common_name = '[Testing choice Acitve ZD rule by AP Number]set active zd as high mac zd'
test_cfgs.append(({'zd':'higher_mac_zd'},test_name,common_name,2,False))
test_name = 'CB_Scaling_Waiting'
common_name = '[Testing choice Acitve ZD rule by AP Number]Waiting ap connect for %d mins ' % 1
test_cfgs.append(({'timeout':1*60}, test_name, common_name, 2, False))
test_name = 'CB_ZD_SR_Disable'
common_name = '[Testing choice Acitve ZD rule by AP Number]Disable Smart Redundancy for comparison with different APs test'
test_cfgs.append(({}, test_name, common_name, 2, False))
test_name = 'CB_ZD_SR_Check_High_Mac_Has_More_AP'
common_name = '[Testing choice Acitve ZD rule by AP Number]Make sure the higher MAC address has more AP.'
test_cfgs.append(({}, test_name, common_name, 2, False))
test_name = 'CB_ZD_SR_Enable'
common_name = '[Testing choice Acitve ZD rule by AP Number]Enable Smart Redundancy for comparison with different APs test'
test_cfgs.append(({},test_name,common_name, 2,False))
test_name = 'CB_ZD_SR_Check_Lower_Mac_State'
common_name = '[Testing choice Acitve ZD rule by AP Number]Make sure the ZD with lower MAC address is Standby when the ZD has less APs.'
test_cfgs.append(({'except_state':'standby'},test_name,common_name, 2,False))
test_name = 'CB_ZD_SR_Disable'
common_name = '[Testing choice Acitve ZD rule by AP Number]Disable Smart Redundancy after test'
test_cfgs.append(({}, test_name, common_name, 2, True))
test_name = 'CB_ZD_SR_Clear_Up'
common_name = "Clear up the Smart Redundancy test environment"
test_cfgs.append(({},test_name, common_name,0,True))
return test_cfgs
def defineInputConfig():
test_conf = {'zd1_ip_addr':'192.168.0.2',
'zd2_ip_addr':'192.168.0.3',
'sw_ip':'192.168.0.253'
}
test_conf['share_secret'] = _generate_secret_key(random.randint(5,15))
return test_conf
def _generate_secret_key(n):
al=list('abcdefghijklmnopqrstuvwxyz0123456789')
st=''
for i in range(n):
index = random.randint(0,35)
st = st + al[index]
return st
def createTestSuite(**kwargs):
tb = testsuite.getTestbed2(**kwargs)
tb_cfg = testsuite.getTestbedConfig(tb)
ts_name = 'Smart Redundancy Active ZD selection Rule'
ts = testsuite.get_testsuite(ts_name, 'Verify ZDs Comparison(AP and MAC)', combotest=True)
test_cfgs = defineTestConfiguration()
test_order = 1
test_added = 0
for test_params, testname, common_name, exc_level, is_cleanup in test_cfgs:
if testsuite.addTestCase(ts, testname, common_name, test_params, test_order, exc_level, is_cleanup) > 0:
test_added += 1
test_order += 1
print "Add test case with test name: %s\n\t\common name: %s" % (testname, common_name)
print "\n-- Summary: added %d test cases into test suite '%s'" % (test_added, ts.name)
if __name__ == "__main__":
_dict = kwlist.as_dict(sys.argv[1:])
createTestSuite(**_dict)
| [
"[email protected]"
] | |
5de0c3ecbd041cd5a3bda07015a1c63b74c5eff3 | be1c1bb17f267b5fb2c440f70e97c6442ebde819 | /componentes_basicos/botao.py | 100b15105c196a30f6cfb2f8efa5c8f88e636afa | [] | no_license | leuribeiru/QtforPhyton | 2fb4edebcee00c6acda55d87bb8d0359e3c3c1b7 | ee3ca119d03ae09012e2a564b66b6f0703727978 | refs/heads/main | 2023-01-11T01:17:32.731045 | 2020-11-14T02:46:02 | 2020-11-14T02:46:02 | 311,734,424 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | py | import sys
from PySide2.QtWidgets import QApplication, QMainWindow, QPushButton
aplicacao = QApplication(sys.argv)
janela = QMainWindow()
# setGeometry(esquerda, topo, largura, altura)
janela.setGeometry( 100, 50, 300, 200 )
janela.setWindowTitle("Primeira Janela")
# instância de um botão dentro da janela
botao = QPushButton("Meu Botão", janela)
# posição dentro da janela (esquerda, topo)
botao.move(50,50)
# tamanho (largura, altura)
botao.resize(200,100)
# estilo do botão
botao.setStyleSheet("QPushButton \
{background-color: blue; color: white; font-size: 32px}")
janela.show()
aplicacao.exec_()
sys.exit()
| [
"[email protected]"
] | |
a6d3629bd67191a216b820d9cbbab017175d10e4 | 4f725d021518117851cb4607c8dda0346abbcaf2 | /bigdata_moretv/bigdata/bigdata/settings.py | ca016cc2af514b0908f8c9dcc471e47ddd1df6e3 | [] | no_license | milespeng/mydjango | 424515d38cd4aeeb2baaa87a955f514134b80d6e | d6b9f5a21bdfb08e077905cce81dc156ee71de83 | refs/heads/master | 2020-03-08T03:24:44.085175 | 2018-04-04T09:24:26 | 2018-04-04T09:24:26 | 127,890,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,124 | py | """
Django settings for bigdata project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@=!4oy_r*)oibzp2-v@ojc(ii%8j7mzh_un9-m=7#5h31*to5#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'bigdata_service',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bigdata.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bigdata.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
e85a4f334cd6cde981e33cfde51223548351b8c6 | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/tags/2008-EOL/applications/games/openarena-data/actions.py | a3a100c26fc8ba76e36b5d9c0ce8e1bd9ef8e90c | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 869 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import shelltools
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
import os
NoStrip = "/"
WorkDir = "openarena-%s" % get.srcVERSION()
docs = ["CHANGES", "COPYING", "CREDITS", "LINUXNOTES", "README", "WENEED"]
datadir = "/usr/share/openarena"
def fixperms(d):
for root, dirs, files in os.walk(d):
for name in dirs:
shelltools.chmod(os.path.join(root, name), 0755)
for name in files:
shelltools.chmod(os.path.join(root, name), 0644)
def install():
for data in ("missionpack", "baseoa"):
fixperms(data)
pisitools.insinto(datadir, data)
for doc in docs:
pisitools.dodoc(doc)
| [
"[email protected]"
] | |
5aae8e966b5c54cd412a07b852f046b196f7080b | 77311ad9622a7d8b88707d7cee3f44de7c8860cb | /res/scripts/client/gui/shared/fortifications/controls.py | 5b33b5497ab7e2d68156385958d6c1d445f7bd76 | [] | no_license | webiumsk/WOT-0.9.14-CT | 9b193191505a4560df4e872e022eebf59308057e | cfe0b03e511d02c36ce185f308eb48f13ecc05ca | refs/heads/master | 2021-01-10T02:14:10.830715 | 2016-02-14T11:59:59 | 2016-02-14T11:59:59 | 51,606,676 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 40,137 | py | # 2016.02.14 12:41:27 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/shared/fortifications/controls.py
import BigWorld
from functools import partial
from operator import attrgetter
from debug_utils import LOG_ERROR, LOG_DEBUG
import fortified_regions
from gui.shared import g_eventBus, events, EVENT_BUS_SCOPE
from gui.shared.fortifications import getClientFort, getClientFortMgr
from gui.shared.fortifications.FortFinder import FortFinder
from gui.shared.fortifications.context import FortRequestCtx
from gui.shared.fortifications.fort_ext import FortCooldownManager
from gui.shared.fortifications.fort_ext import PlayerFortRequester
from gui.game_control.battle_availability import SortiesCurfewController
from gui.shared.fortifications.fort_seqs import SortiesCache, PublicInfoCache, FortBattlesCache
from gui.shared.fortifications.interfaces import IFortController
from gui.shared.fortifications.restrictions import FortPermissions, NoFortLimits, IntroFortLimits, NoFortValidators, FortValidators
from gui.shared.fortifications.restrictions import FortLimits
from gui.shared.fortifications.settings import FORT_REQUEST_TYPE, CLIENT_FORT_STATE
from helpers import time_utils
class _FortController(IFortController):
_TIME_OUT = 45
def __init__(self, handlers):
super(_FortController, self).__init__()
self._requester = None
self._limits = None
self._validators = None
self._sortiesCache = None
self._sortiesCurfewCtrl = None
self._fortBattlesCache = None
self._publicInfoCache = None
self._handlers = handlers
self._cooldown = FortCooldownManager()
self.clear()
return
def clear(self):
self._clan = None
self._listeners = None
self._waiters = None
return
def init(self, clan, listeners, prevController = None):
self._requester = PlayerFortRequester()
self._requester.init()
self._setLimits()
self._setValidators()
self._clan = clan
self._listeners = listeners
self._addFortListeners()
self._waiters = {}
def fini(self, clearCache = True):
self._removeFortListeners()
self.stopProcessing()
if self._requester:
self._requester.fini()
self._requester = None
if self._limits:
self._limits = None
if self._validators:
self._validators.fini()
self._validators = None
self.clear()
self._handlers.clear()
return
def stopProcessing(self):
self._clearWaiters()
if self._requester is not None:
self._requester.stopProcessing()
return
def getFort(self):
return getClientFort()
def getPermissions(self):
if self._clan:
roles = self._clan.clanRole
else:
roles = 0
return FortPermissions(roles)
def getLimits(self):
return self._limits
def getValidators(self):
return self._validators
def getSortiesCache(self):
return self._sortiesCache
def getFortBattlesCache(self):
return self._fortBattlesCache
def getSortiesCurfewCtrl(self):
return self._sortiesCurfewCtrl
def getPublicInfoCache(self):
return self._publicInfoCache
def removeSortiesCache(self):
SortiesCache._removeStoredData()
def removeFortBattlesCache(self):
FortBattlesCache._removeStoredData()
def request(self, ctx, callback = None):
if self._clan is None:
return self._failChecking('Clan is not defined', ctx, callback)
else:
requestType = ctx.getRequestType()
if requestType in self._handlers:
cooldown = ctx.getCooldown()
if self._cooldown.validate(requestType, cooldown):
if callback:
callback(False)
else:
LOG_DEBUG('Fort request', ctx)
if self._handlers[requestType](ctx, callback=partial(self._callbackWrapper, requestType, callback, cooldown)):
self._waiters[requestType] = BigWorld.callback(self._TIME_OUT, self._onTimeout)
self._cooldown.process(requestType, cooldown)
else:
self._failChecking('Handler not found', ctx, callback)
return
def subscribe(self, callback = None):
def _doRequest():
LOG_DEBUG('Fort request to subscribe')
result = self._requester.doRequestEx(FortRequestCtx(), callback, 'subscribe')
if result:
self._waiters[FORT_REQUEST_TYPE.SUBSCRIBE] = BigWorld.callback(self._TIME_OUT, self._onTimeout)
self._cooldown.process(FORT_REQUEST_TYPE.SUBSCRIBE)
if self._cooldown.validate(FORT_REQUEST_TYPE.SUBSCRIBE):
BigWorld.callback(self._cooldown.getTime(FORT_REQUEST_TYPE.SUBSCRIBE), _doRequest)
else:
_doRequest()
def unsubscribe(self, callback = None):
LOG_DEBUG('Fort request to unsubscribe')
self._requester.doRequestEx(FortRequestCtx(), callback, 'unsubscribe')
return False
def _failChecking(self, ctx, msg, callback = None):
if callback:
callback(False)
LOG_ERROR(msg, ctx)
return False
def _addFortListeners(self):
pass
def _removeFortListeners(self):
pass
def _setLimits(self):
self._limits = NoFortLimits()
def _setValidators(self):
self._validators = NoFortValidators()
def _callbackWrapper(self, requestType, callback, cooldown, *args):
callbackID = self._waiters.pop(requestType, None)
if callbackID is not None:
BigWorld.cancelCallback(callbackID)
self._cooldown.adjust(requestType, cooldown)
callback(*args)
return
def _clearWaiters(self):
if self._waiters is not None:
while len(self._waiters):
_, callbackID = self._waiters.popitem()
BigWorld.cancelCallback(callbackID)
return
def _onTimeout(self):
LOG_ERROR('Fort request time out!')
self.stopProcessing()
g_eventBus.handleEvent(events.FortEvent(events.FortEvent.REQUEST_TIMEOUT), scope=EVENT_BUS_SCOPE.FORT)
class NoFortController(_FortController):
def __init__(self):
super(NoFortController, self).__init__({})
@classmethod
def isNext(cls, stateID, isLeader):
if stateID in [CLIENT_FORT_STATE.NO_CLAN, CLIENT_FORT_STATE.UNSUBSCRIBED]:
return True
if not isLeader and stateID == CLIENT_FORT_STATE.NO_FORT:
return True
def request(self, ctx, callback = None):
self._failChecking('Has been invoked NoFortController.request', ctx, callback)
def init(self, clan, listeners, prevController = None):
super(NoFortController, self).init(clan, listeners, prevController)
self._sortiesCurfewCtrl = SortiesCurfewController()
self._sortiesCurfewCtrl.start()
def fini(self, clearCache = True):
if self._sortiesCurfewCtrl:
self._sortiesCurfewCtrl.stop()
self._sortiesCurfewCtrl = None
super(NoFortController, self).fini(clearCache)
return
class CenterUnavailableController(_FortController):
def __init__(self):
super(CenterUnavailableController, self).__init__({})
@classmethod
def isNext(cls, stateID, isLeader):
if stateID in [CLIENT_FORT_STATE.CENTER_UNAVAILABLE]:
return True
if not isLeader and stateID == CLIENT_FORT_STATE.NO_FORT:
return True
def init(self, clan, listeners, prevController = None):
super(CenterUnavailableController, self).init(clan, listeners, prevController)
if prevController is not None:
self._sortiesCache = prevController.getSortiesCache()
if self._sortiesCache is not None:
self._sortiesCache.setController(self)
return
def fini(self, clearCache = True):
if self._sortiesCache and clearCache:
self._sortiesCache.stop()
self._sortiesCache = None
super(CenterUnavailableController, self).fini()
return
def request(self, ctx, callback = None):
self._failChecking('Has been invoked CenterUnavailableController.request', ctx, callback)
class IntroController(_FortController):
def __init__(self):
super(IntroController, self).__init__({FORT_REQUEST_TYPE.CREATE_FORT: self.create})
@classmethod
def isNext(cls, stateID, isLeader):
return isLeader and stateID == CLIENT_FORT_STATE.NO_FORT
def create(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canCreate():
return self._failChecking('Player can not create fort', ctx, callback)
fort = self.getFort()
if not fort.isEmpty():
return self._failChecking('Fort is already created', ctx, callback)
limits = self.getLimits()
valid, reason = limits.isCreationValid()
if not valid:
return self._failChecking('Creation is not valid: {0}'.format(reason), ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'create')
def _setLimits(self):
self._limits = IntroFortLimits()
class FortController(_FortController):
def __init__(self):
super(FortController, self).__init__({FORT_REQUEST_TYPE.OPEN_DIRECTION: self.openDirection,
FORT_REQUEST_TYPE.CLOSE_DIRECTION: self.closeDirection,
FORT_REQUEST_TYPE.ADD_BUILDING: self.addBuilding,
FORT_REQUEST_TYPE.DELETE_BUILDING: self.deleteBuilding,
FORT_REQUEST_TYPE.TRANSPORTATION: self.transport,
FORT_REQUEST_TYPE.ADD_ORDER: self.addOrder,
FORT_REQUEST_TYPE.ACTIVATE_ORDER: self.activateOrder,
FORT_REQUEST_TYPE.ATTACH: self.attach,
FORT_REQUEST_TYPE.UPGRADE: self.upgrade,
FORT_REQUEST_TYPE.CREATE_SORTIE: self.createSortie,
FORT_REQUEST_TYPE.REQUEST_SORTIE_UNIT: self.requestSortieUnit,
FORT_REQUEST_TYPE.CHANGE_DEF_HOUR: self.changeDefHour,
FORT_REQUEST_TYPE.CHANGE_OFF_DAY: self.changeOffDay,
FORT_REQUEST_TYPE.CHANGE_PERIPHERY: self.changePeriphery,
FORT_REQUEST_TYPE.CHANGE_VACATION: self.changeVacation,
FORT_REQUEST_TYPE.CHANGE_SETTINGS: self.changeSettings,
FORT_REQUEST_TYPE.SHUTDOWN_DEF_HOUR: self.shutDownDefHour,
FORT_REQUEST_TYPE.CANCEL_SHUTDOWN_DEF_HOUR: self.cancelShutDownDefHour,
FORT_REQUEST_TYPE.REQUEST_PUBLIC_INFO: self.requestFortPublicInfo,
FORT_REQUEST_TYPE.REQUEST_CLAN_CARD: self.requestClanCard,
FORT_REQUEST_TYPE.ADD_FAVORITE: self.addFavorite,
FORT_REQUEST_TYPE.REMOVE_FAVORITE: self.removeFavorite,
FORT_REQUEST_TYPE.PLAN_ATTACK: self.planAttack,
FORT_REQUEST_TYPE.CREATE_OR_JOIN_FORT_BATTLE: self.createOrJoinFortBattle,
FORT_REQUEST_TYPE.ACTIVATE_CONSUMABLE: self.activateConsumable,
FORT_REQUEST_TYPE.RETURN_CONSUMABLE: self.returnConsumable})
self.__cooldownCallback = None
self.__cooldownBuildings = []
self.__cooldownPassed = False
self._upgradeVisitedBuildings = set()
self._finder = None
self.__defencePeriodCallback = None
return
@classmethod
def isNext(cls, stateID, _):
return stateID in [CLIENT_FORT_STATE.WIZARD, CLIENT_FORT_STATE.HAS_FORT]
def init(self, clan, listeners, prevController = None):
super(FortController, self).init(clan, listeners, prevController)
self._sortiesCache = SortiesCache(self)
self._sortiesCache.start()
self._sortiesCurfewCtrl = SortiesCurfewController()
self._sortiesCurfewCtrl.start()
self._fortBattlesCache = FortBattlesCache(self)
self._fortBattlesCache.start()
self._finder = FortFinder()
self._finder.init()
self._publicInfoCache = PublicInfoCache(self)
self._publicInfoCache.start()
def fini(self, clearCache = True):
if self._sortiesCache and clearCache:
self._sortiesCache.stop()
self._sortiesCache = None
if self._sortiesCurfewCtrl:
self._sortiesCurfewCtrl.stop()
self._sortiesCurfewCtrl = None
if self._fortBattlesCache:
self._fortBattlesCache.stop()
self._fortBattlesCache = None
if self._publicInfoCache:
self._publicInfoCache.stop()
self._publicInfoCache = None
super(FortController, self).fini()
return
def stopProcessing(self):
if self._finder is not None:
self._finder.stopProcessing()
super(FortController, self).stopProcessing()
return
def openDirection(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
direction = ctx.getDirection()
if fort.isDirectionOpened(direction):
return self._failChecking('Direction already is opened', ctx, callback)
if not perm.canOpenDirection():
return self._failChecking('Player can not open direction', ctx, callback)
limits = self.getLimits()
valid, reason = limits.isDirectionValid(direction)
if not valid:
return self._failChecking('Direction is invalid: {0}'.format(reason), ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'openDir', direction)
def closeDirection(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
direction = ctx.getDirection()
if not fort.isDirectionOpened(direction):
return self._failChecking('Direction is not opened', ctx, callback)
if not perm.canCloseDirection():
return self._failChecking('Player can not open direction', ctx, callback)
limits = self.getLimits()
valid, reason = limits.isDirectionValid(direction, open=False)
if not valid:
return self._failChecking('Direction is invalid: {0}'.format(reason), ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'closeDir', direction)
def addBuilding(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
buildingTypeID = ctx.getBuildingTypeID()
direction = ctx.getDirection()
position = ctx.getPosition()
if not fort.isPositionAvailable(direction, position):
return self._failChecking('Positions is not available', ctx, callback)
if fort.isBuildingBuilt(buildingTypeID):
return self._failChecking('Building is already built', ctx, callback)
if not perm.canAddBuilding():
return self._failChecking('Player can not build buildings', ctx, callback)
limits = self.getLimits()
valid, reason = limits.canBuild(buildingTypeID)
if not valid:
return self._failChecking('Building is invalid: {0}'.format(reason), ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'addBuilding', buildingTypeID, direction, position)
def deleteBuilding(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
buildingTypeID = ctx.getBuildingTypeID()
if not fort.isBuildingBuilt(buildingTypeID):
return self._failChecking('Building is not built', ctx, callback)
if not perm.canDeleteBuilding():
return self._failChecking('Player can not build buildings', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'delBuilding', buildingTypeID)
def transport(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
fromBuildingID = ctx.getFromBuildingTypeID()
toBuildingID = ctx.getToBuildingTypeID()
resCount = ctx.getResCount()
if not fort.isBuildingBuilt(fromBuildingID):
return self._failChecking('Exporting building is not built', ctx, callback)
if not fort.isBuildingBuilt(toBuildingID):
return self._failChecking('Importing building is not built', ctx, callback)
fromBuilding = fort.getBuilding(fromBuildingID)
if not fromBuilding.isReady() or not fromBuilding.isExportAvailable(resCount):
return self._failChecking('Exporting from building is not available', ctx, callback)
toBuilding = fort.getBuilding(toBuildingID)
if not toBuilding.isImportAvailable(resCount):
return self._failChecking('Importing into building is not available', ctx, callback)
if not perm.canTransport():
return self._failChecking('Player can not transport', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'transport', fromBuildingID, toBuildingID, resCount)
def attach(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
buildingTypeID = ctx.getBuildingTypeID()
if not fort.isBuildingBuilt(buildingTypeID):
return self._failChecking('Building is not built', ctx, callback)
if not perm.canAttach():
return self._failChecking('Player can not attach', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'attach', buildingTypeID)
def upgrade(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
buildingTypeID = ctx.getBuildingTypeID()
if not fort.isBuildingBuilt(buildingTypeID):
return self._failChecking('Building is not built', ctx, callback)
building = fort.getBuilding(buildingTypeID)
if not building.isReady():
return self._failChecking('Building is not available', ctx, callback)
if not perm.canUpgradeBuilding():
return self._failChecking('Player can not upgrade building', ctx, callback)
limits = self.getLimits()
valid, reason = limits.canUpgrade(buildingTypeID)
if not valid:
return self._failChecking('Building is invalid: {0}'.format(reason), ctx, callback)
self.removeUpgradeVisitedBuilding(buildingTypeID)
return self._requester.doRequestEx(ctx, callback, 'upgrade', buildingTypeID)
def addOrder(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
orderTypeID = ctx.getOrderTypeID()
count = ctx.getCount()
order = fort.getOrder(orderTypeID)
if not order.hasBuilding:
return self._failChecking('Order building is not built', ctx, callback)
orderBuilding = fort.getBuilding(order.buildingID)
if not orderBuilding.isReady() and not orderBuilding.orderInProduction:
return self._failChecking('Building is not ready to add order', ctx, callback)
if not perm.canAddOrder():
return self._failChecking('Player can not add order', ctx, callback)
limits = self.getLimits()
valid, reason = limits.isOrderValid(orderTypeID, add=False)
if not valid:
return self._failChecking('Orded is invalid: {0}'.format(reason), ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'addOrder', order.buildingID, count)
def activateOrder(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
orderTypeID = ctx.getOrderTypeID()
order = fort.getOrder(orderTypeID)
if not order.hasBuilding:
return self._failChecking('Order building is not built', ctx, callback)
orderBuilding = fort.getBuilding(order.buildingID)
if not orderBuilding.isReady():
return self._failChecking('Building is not ready to add order', ctx, callback)
if not perm.canActivateOrder():
return self._failChecking('Player can not add order', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'activateOrder', orderTypeID)
def createSortie(self, ctx, callback = None):
level = ctx.getDivisionLevel()
perm = self.getPermissions()
if not perm.canCreateSortie():
return self._failChecking('Player can not create sortie, no permission', ctx, callback)
limits = self.getLimits()
valid, reason = limits.isSortieCreationValid(level)
if not valid:
return self._failChecking('Player can not create sortie: {0}'.format(reason), ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'createSortie', level)
def requestSortieUnit(self, ctx, callback = None):
unitMgrID = ctx.getUnitMgrID()
peripheryID = ctx.getPeripheryID()
fort = self.getFort()
if not fort:
return self._failChecking('Client fort is not found', ctx, callback)
if (unitMgrID, peripheryID) not in fort.sorties:
return self._failChecking('Sortie does not exists on client', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'getSortieData', unitMgrID, peripheryID)
def getUpgradeVisitedBuildings(self):
return self._upgradeVisitedBuildings
def addUpgradeVisitedBuildings(self, buildingID):
if buildingID not in self._upgradeVisitedBuildings:
self._upgradeVisitedBuildings.add(buildingID)
self._listeners.notify('onUpgradeVisitedBuildingChanged', buildingID)
def removeUpgradeVisitedBuilding(self, buildingID):
if buildingID in self._upgradeVisitedBuildings:
self._upgradeVisitedBuildings.remove(buildingID)
self._listeners.notify('onUpgradeVisitedBuildingChanged', buildingID)
def changeDefHour(self, ctx, callback = None):
perm = self.getPermissions()
defHour = ctx.getDefenceHour()
if not perm.canChangeDefHour():
return self._failChecking('Player can not change defence hour', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'changeDefHour', defHour)
def changeOffDay(self, ctx, callback = None):
perm = self.getPermissions()
offDay = ctx.getOffDay()
if not perm.canChangeOffDay():
return self._failChecking('Player can not change off day', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'changeOffDay', offDay)
def changePeriphery(self, ctx, callback = None):
perm = self.getPermissions()
peripheryID = ctx.getPeripheryID()
if not perm.canChangePeriphery():
return self._failChecking('Player can not change periphery', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'changePeriphery', peripheryID)
def changeVacation(self, ctx, callback = None):
perm = self.getPermissions()
timeVacationStart = ctx.getTimeVacationStart()
timeVacationDuration = ctx.getTimeVacationDuration()
if not perm.canChangeVacation():
return self._failChecking('Player can not change vacation', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'changeVacation', timeVacationStart, timeVacationDuration)
def changeSettings(self, ctx, callback = None):
perm = self.getPermissions()
fort = self.getFort()
chain = []
defHour = ctx.getDefenceHour()
if defHour != fort.defenceHour:
if not perm.canChangeDefHour():
return self._failChecking('Player can not change defence hour', ctx, callback)
chain.append(('changeDefHour', (defHour,), {}))
offDay = ctx.getOffDay()
if offDay != fort.offDay:
if not perm.canChangeOffDay():
return self._failChecking('Player can not change off day', ctx, callback)
chain.append(('changeOffDay', (offDay,), {}))
peripheryID = ctx.getPeripheryID()
if peripheryID != fort.peripheryID:
if not perm.canChangePeriphery():
return self._failChecking('Player can not change periphery', ctx, callback)
chain.append(('changePeriphery', (peripheryID,), {}))
if not chain:
return self._failChecking('No requests to process', ctx, callback)
return self._requester.doRequestChainEx(ctx, callback, chain)
def shutDownDefHour(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canShutDownDefHour():
return self._failChecking('Player can not shut down def hour', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'shutdownDefHour')
def cancelShutDownDefHour(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canCancelShutDownDefHour():
return self._failChecking('Player can not cancel shut down def hour', ctx, callback)
return self._requester.doRequestEx(ctx, callback, 'cancelDefHourShutdown')
def requestFortPublicInfo(self, ctx, callback = None):
fort = self.getFort()
perm = self.getPermissions()
if not perm.canRequestPublicInfo():
return self._failChecking('Player can not request public info', ctx, callback)
filterType = ctx.getFilterType()
abbrevPattern = ctx.getAbbrevPattern()
homePeripheryID = fort.peripheryID
limit = ctx.getLimit()
lvlFrom = ctx.getLvlFrom()
lvlTo = ctx.getLvlTo()
ownStartDefHourFrom = fort.defenceHour
nextOwnStartDefHourFrom, defHourChangeDay = fort.getNextDefenceHourData()
extStartDefHourFrom = ctx.getStartDefHourFrom()
extStartDefHourTo = ctx.getStartDefHourTo()
attackDay = ctx.getAttackDay()
ownFortLvl = fort.level
battleStats = fort.getFortDossier().getBattlesStats()
ownProfitFactor10 = int(battleStats.getProfitFactor() * 10)
buildingLevels = map(attrgetter('level'), fort.getBuildings().itervalues())
minLevel = fortified_regions.g_cache.defenceConditions.minRegionLevel
validBuildingLevels = filter(lambda x: x >= minLevel, buildingLevels)
avgBuildingLevel10 = 0
if validBuildingLevels:
avgBuildingLevel10 = int(float(sum(validBuildingLevels)) / len(validBuildingLevels) * 10)
ownBattleCountForFort = battleStats.getBattlesCount()
firstDefaultQuery = ctx.isFirstDefaultQuery()
electedClanDBIDs = tuple(fort.favorites)
val = self.getValidators()
validationResult, validationReason = val.validate(ctx.getRequestType(), filterType, abbrevPattern)
if not validationResult:
self._listeners.notify('onFortPublicInfoValidationError', validationReason)
return self._failChecking('Player input is invalid', ctx, callback)
return self._finder.request(filterType, abbrevPattern, homePeripheryID, limit, lvlFrom, lvlTo, ownStartDefHourFrom, ownStartDefHourFrom + 1, nextOwnStartDefHourFrom, nextOwnStartDefHourFrom + 1, defHourChangeDay, extStartDefHourFrom, extStartDefHourTo, attackDay, ownFortLvl, ownProfitFactor10, avgBuildingLevel10, ownBattleCountForFort, firstDefaultQuery, electedClanDBIDs, callback)
def requestClanCard(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canRequestClanCard():
return self._failChecking('Player can not request clan card', ctx, callback)
clanDBID = ctx.getClanDBID()
return self._requester.doRequestEx(ctx, callback, 'getEnemyClanCard', clanDBID)
def addFavorite(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canAddToFavorite():
return self._failChecking('Player can not add favorite', ctx, callback)
clanDBID = ctx.getClanDBID()
return self._requester.doRequestEx(ctx, callback, 'addFavorite', clanDBID)
def removeFavorite(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canRemoveFavorite():
return self._failChecking('Player can not remove favorite', ctx, callback)
clanDBID = ctx.getClanDBID()
return self._requester.doRequestEx(ctx, callback, 'removeFavorite', clanDBID)
def planAttack(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canPlanAttack():
return self._failChecking('Player can not plan attack', ctx, callback)
clanDBID = ctx.getClanDBID()
timeAttack = ctx.getTimeAttack()
dirFrom = ctx.getDirFrom()
dirTo = ctx.getDirTo()
return self._requester.doRequestEx(ctx, callback, 'planAttack', clanDBID, timeAttack, dirFrom, dirTo)
def createOrJoinFortBattle(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canCreateFortBattle():
return self._failChecking('Player can not plan attack', ctx, callback)
battleID = ctx.getBattleID()
slotIdx = ctx.getSlotIdx()
return self._requester.doRequestEx(ctx, callback, 'createOrJoinFortBattle', battleID, slotIdx)
def activateConsumable(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canActivateConsumable():
return self._failChecking('Player can not activate consumable', ctx, callback)
orderTypeID = ctx.getConsumableOrderTypeID()
slotIdx = ctx.getSlotIdx()
return self._requester.doRequestEx(ctx, callback, 'activateConsumable', orderTypeID, slotIdx)
def returnConsumable(self, ctx, callback = None):
perm = self.getPermissions()
if not perm.canReturnConsumable():
return self._failChecking('Player can not return consumable', ctx, callback)
orderTypeID = ctx.getConsumableOrderTypeID()
return self._requester.doRequestEx(ctx, callback, 'returnConsumable', orderTypeID)
def _setLimits(self):
self._limits = FortLimits()
def _setValidators(self):
self._validators = FortValidators()
def _addFortListeners(self):
super(FortController, self)._addFortListeners()
fort = self.getFort()
if not fort:
LOG_ERROR('No fort to subscribe')
return
fort.onBuildingChanged += self.__fort_onBuildingChanged
fort.onTransport += self.__fort_onTransport
fort.onDirectionOpened += self.__fort_onDirectionOpened
fort.onDirectionClosed += self.__fort_onDirectionClosed
fort.onDirectionLockChanged += self.__fort_onDirectionLockChanged
fort.onStateChanged += self.__fort_onStateChanged
fort.onOrderChanged += self.__fort_onOrderChanged
fort.onDossierChanged += self.__fort_onDossierChanged
fort.onPlayerAttached += self.__fort_onPlayerAttached
fort.onSettingCooldown += self.__fort_onSettingCooldown
fort.onPeripheryChanged += self.__fort_onPeripheryChanged
fort.onDefenceHourChanged += self.__fort_onDefenceHourChanged
fort.onOffDayChanged += self.__fort_onOffDayChanged
fort.onVacationChanged += self.__fort_onVacationChanged
fort.onFavoritesChanged += self.__fort_onFavoritesChanged
fort.onEnemyClanCardReceived += self.__fort_onEnemyClanCardReceived
fort.onShutdownDowngrade += self.__fort_onShutdownDowngrade
fort.onDefenceHourShutdown += self.__fort_onDefenceHourShutdown
fort.onEmergencyRestore += self.__fort_onEmergencyRestore
fort.onConsumablesChanged += self.__fort_onConsumablesChanged
fort.onDefenceHourActivated += self.__fort_onDefenceHourActivated
fortMgr = getClientFortMgr()
if not fortMgr:
LOG_ERROR('No fort manager to subscribe')
return
fortMgr.onFortUpdateReceived += self.__fortMgr_onFortUpdateReceived
fortMgr.onFortPublicInfoReceived += self.__fortMgr_onFortPublicInfoReceived
self.__refreshCooldowns(False)
self.__processDefencePeriodCallback()
def _removeFortListeners(self):
self.__cancelCooldownCallback()
self.__cancelDefencePeriodCallback()
fort = self.getFort()
if fort:
fort.onBuildingChanged -= self.__fort_onBuildingChanged
fort.onTransport -= self.__fort_onTransport
fort.onDirectionOpened -= self.__fort_onDirectionOpened
fort.onDirectionClosed -= self.__fort_onDirectionClosed
fort.onDirectionLockChanged -= self.__fort_onDirectionLockChanged
fort.onStateChanged -= self.__fort_onStateChanged
fort.onOrderChanged -= self.__fort_onOrderChanged
fort.onDossierChanged -= self.__fort_onDossierChanged
fort.onPlayerAttached -= self.__fort_onPlayerAttached
fort.onSettingCooldown -= self.__fort_onSettingCooldown
fort.onPeripheryChanged -= self.__fort_onPeripheryChanged
fort.onDefenceHourChanged -= self.__fort_onDefenceHourChanged
fort.onOffDayChanged -= self.__fort_onOffDayChanged
fort.onVacationChanged -= self.__fort_onVacationChanged
fort.onFavoritesChanged -= self.__fort_onFavoritesChanged
fort.onEnemyClanCardReceived -= self.__fort_onEnemyClanCardReceived
fort.onShutdownDowngrade -= self.__fort_onShutdownDowngrade
fort.onDefenceHourShutdown -= self.__fort_onDefenceHourShutdown
fort.onEmergencyRestore -= self.__fort_onEmergencyRestore
fort.onConsumablesChanged -= self.__fort_onConsumablesChanged
fort.onDefenceHourActivated -= self.__fort_onDefenceHourActivated
fortMgr = getClientFortMgr()
if fortMgr:
fortMgr.onFortUpdateReceived -= self.__fortMgr_onFortUpdateReceived
fortMgr.onFortPublicInfoReceived -= self.__fortMgr_onFortPublicInfoReceived
super(FortController, self)._removeFortListeners()
def __refreshCooldowns(self, doNotify = True):
self.__cancelCooldownCallback()
if self.__cooldownBuildings and doNotify:
self._listeners.notify('onBuildingsUpdated', self.__cooldownBuildings, self.__cooldownPassed)
fort = self.getFort()
self.__cooldownBuildings = fort.getBuildingsOnCooldown()
if self.__cooldownBuildings:
time = 30
self.__cooldownPassed = False
for buildingID in self.__cooldownBuildings:
building = fort.getBuilding(buildingID)
estimatedCooldown = building.getEstimatedCooldown()
if 0 < estimatedCooldown <= time:
time = estimatedCooldown
self.__cooldownPassed = True
productionCooldown = building.getProductionCooldown()
if 0 < productionCooldown < time:
time = productionCooldown
self.__cooldownCallback = BigWorld.callback(time, self.__refreshCooldowns)
def __cancelCooldownCallback(self):
if self.__cooldownCallback is not None:
LOG_DEBUG('Cooldown callback cancelling: ', self.__cooldownCallback)
BigWorld.cancelCallback(self.__cooldownCallback)
self.__cooldownCallback = None
self.__cooldownBuildings = []
self.__cooldownPassed = False
return
def __processDefencePeriodCallback(self):
self.__cancelDefencePeriodCallback()
fort = self.getFort()
self._listeners.notify('onDefenceHourStateChanged')
start, finish = fort.getClosestDefencePeriod()
if fort.isOnDefenceHour():
timer = time_utils.getTimeDeltaFromNow(finish)
else:
timer = time_utils.getTimeDeltaFromNow(start)
if timer > 0:
self.__defencePeriodCallback = BigWorld.callback(timer, self.__processDefencePeriodCallback)
def __cancelDefencePeriodCallback(self):
if self.__defencePeriodCallback is not None:
BigWorld.cancelCallback(self.__defencePeriodCallback)
self.__defencePeriodCallback = None
return
def __fort_onBuildingChanged(self, buildingTypeID, reason, ctx = None):
self._listeners.notify('onBuildingChanged', buildingTypeID, reason, ctx)
def __fort_onBuildingRemoved(self, buildingTypeID):
self._listeners.notify('onBuildingRemoved', buildingTypeID)
def __fort_onTransport(self):
self._listeners.notify('onTransport')
def __fort_onDirectionOpened(self, dir):
self._listeners.notify('onDirectionOpened', dir)
def __fort_onDirectionClosed(self, dir):
self._listeners.notify('onDirectionClosed', dir)
def __fort_onDirectionLockChanged(self):
self._listeners.notify('onDirectionLockChanged')
def __fort_onStateChanged(self, state):
self._listeners.notify('onStateChanged', state)
def __fort_onOrderChanged(self, orderTypeID, reason):
self._listeners.notify('onOrderChanged', orderTypeID, reason)
def __fort_onDossierChanged(self, compDossierDescr):
self._listeners.notify('onDossierChanged', compDossierDescr)
def __fort_onPlayerAttached(self, buildingTypeID):
self._listeners.notify('onPlayerAttached', buildingTypeID)
def __fort_onSettingCooldown(self, eventTypeID):
self._listeners.notify('onSettingCooldown', eventTypeID)
def __fort_onPeripheryChanged(self, peripheryID):
self._listeners.notify('onPeripheryChanged', peripheryID)
def __fort_onDefenceHourChanged(self, hour):
self._listeners.notify('onDefenceHourChanged', hour)
self.__processDefencePeriodCallback()
def __fort_onDefenceHourActivated(self, hour, initiatorDBID):
self._listeners.notify('onDefenceHourActivated', hour, initiatorDBID)
self.__processDefencePeriodCallback()
def __fort_onOffDayChanged(self, offDay):
self._listeners.notify('onOffDayChanged', offDay)
self.__processDefencePeriodCallback()
def __fort_onVacationChanged(self, vacationStart, vacationEnd):
self._listeners.notify('onVacationChanged', vacationStart, vacationEnd)
self.__processDefencePeriodCallback()
def __fort_onFavoritesChanged(self, clanDBID):
self._listeners.notify('onFavoritesChanged', clanDBID)
def __fort_onEnemyClanCardReceived(self, card):
if self._publicInfoCache is not None:
self._publicInfoCache.storeSelectedClanCard(card)
self._listeners.notify('onEnemyClanCardReceived', card)
return
def __fort_onShutdownDowngrade(self):
self._listeners.notify('onShutdownDowngrade')
def __fort_onDefenceHourShutdown(self):
self._listeners.notify('onDefenceHourShutdown')
def __fortMgr_onFortUpdateReceived(self, isFullUpdate = False):
self.__refreshCooldowns(isFullUpdate)
def __fortMgr_onFortPublicInfoReceived(self, requestID, errorID, resultSet):
self._finder.response(requestID, errorID, resultSet)
self._listeners.notify('onFortPublicInfoReceived', bool(resultSet))
def __fort_onEmergencyRestore(self):
self.stopProcessing()
def __fort_onConsumablesChanged(self, unitMgrID):
self._listeners.notify('onConsumablesChanged', unitMgrID)
def createInitial():
return NoFortController()
def createByState(state, isLeader = False, exclude = None):
all = [NoFortController,
IntroController,
FortController,
CenterUnavailableController]
if exclude:
if exclude in all:
all.remove(exclude)
else:
LOG_ERROR('Fort controller is not found', exclude)
return None
stateID = state.getStateID()
for clazz in all:
if clazz.isNext(stateID, isLeader):
return clazz()
return None
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\shared\fortifications\controls.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:41:28 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
ca21b5945e98da6ee90e624bc7ccd1fd01b3f5bd | a43e6385ae9128ff5eb9850161f2015dcf1c95ac | /count_letter.py | 77f3b66e4f920fe4ed4313b8d6afcd1699d5eac8 | [] | no_license | Alymbekov/tasks_of_python_crash_course | 18289ac99e3751cdc691c15492630f67388737dd | e0a58b23b2b89ee01c40a96661e50a63cf1513ca | refs/heads/master | 2020-04-21T03:00:16.846204 | 2019-02-05T16:27:55 | 2019-02-05T16:27:55 | 169,271,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | for x in range(1,21):
print('Считаем %s' % x)
list_of_numbers = []
for x in range(1,1000001):
list_of_numbers.append(x)
print(max(list_of_numbers))
print(min(list_of_numbers))
print(sum(list_of_numbers))
for i in range(1,21,2):
print(i)
for x in range(1,21,3):
print(x)
| [
"[email protected]"
] | |
79fae613f8a13fe0d22c0c35588c39344289dbd1 | 0561900bf01598e6c453131952fe4f62b2f9c6e9 | /week1/1-IO-Simple-Problems/n_dice.py | 61db22c92341f0586f24b2693d212ecc08e2f9da | [] | no_license | Rositsazz/HackBulgaria-Programming0 | 497f40eefa373b024389c58e7d83aff3ffc547ac | 0590d7430ff0aadfb737593a04d3ab1eb894f8d3 | refs/heads/master | 2016-09-06T09:09:19.416648 | 2015-08-11T10:02:49 | 2015-08-11T10:02:49 | 30,868,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | from random import randint
number = input ('Enter sides:')
number = int(number)
print ('The dice rolled:')
rolled =randint(1,number)
print(rolled)
| [
"[email protected]"
] | |
2453c6123ee35125969e723e32bbc3ab9182653d | b00f7fe6fef51ac814baae7327c5e5fd13419d98 | /PyGameGameRefactor/data/Widgets/Button.py | 25a6bf1ebceec5821b2292cc45dfe48749e44026 | [] | no_license | JamesCollison/current_projects | 3cdfb304f6f3b65298a2245692e95b41e14bfbd6 | b2462eeaf13eb5a50c7c6c822a3819727c1f7118 | refs/heads/main | 2023-03-29T01:41:55.527547 | 2021-03-24T22:53:07 | 2021-03-24T22:53:07 | 351,235,704 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,586 | py | from .Widget import *
class Button(Widget):
def __init__(self, rect, command, **kwargs):
Widget.__init__(self) # TODO for later...
self.text = None # to make warning go away...
self.rect = pg.Rect(rect)
self.command = command
self.clicked = False
self.hovered = False
self.hover_text = None
self.clicked_text = None
self.process_kwargs(kwargs)
self.render_text()
def get_event(self, event):
if event.type == pg.MOUSEBUTTONDOWN and event.button == 1:
self.on_click(event)
elif event.type == pg.MOUSEBUTTONUP and event.button == 1:
self.on_release()
def update(self):
self.check_hover()
def draw(self, surface):
color = self.color
text = self.text
border_color = self.border_color
if not self.disabled:
if self.clicked and self.clicked_color:
color, text = self.get_clicked_color(text)
elif self.hovered:
border_color, color, text = self.get_hover_color(color, text)
else:
color = self.disabled_color
self.draw_button(surface, self.rect, border_color, color)
self.draw_text(surface, text)
def process_kwargs(self, kwargs):
settings = {
"font" : pg.font.Font(None, 16),
"text" : None,
"call_on_release" : True,
'disabled' : False,
"click_sound" : None,
"hover_sound" : None,
'border_width' : 2,
"color" : pg.Color('white'),
"font_color" : pg.Color("black"),
"hover_color" : None,
"hover_font_color" : None,
"clicked_color" : None,
"clicked_font_color" : None,
"border_color" : pg.Color("white"),
"border_hover_color" : pg.Color("white"),
"disabled_color" : pg.Color("grey"),
}
self.update_dict(kwargs, settings)
def update_dict(self, kwargs, settings):
for arg in kwargs:
if arg in settings:
settings[arg] = kwargs[arg]
else:
raise AttributeError(f"{self.__class__.__name__} has no keyword: {arg}")
self.__dict__.update(settings)
def render_text(self):
if self.text:
if self.hover_font_color:
color = self.hover_font_color
self.hover_text = self.font.render(self.text, True, color)
if self.clicked_font_color:
color = self.clicked_font_color
self.clicked_text = self.font.render(self.text, True, color)
self.text = self.font.render(self.text, True, self.font_color)
def on_click(self, event):
if self.rect.collidepoint(event.pos):
self.clicked = True
if not self.call_on_release:
self.command()
def on_release(self):
if self.clicked and self.call_on_release:
if self.rect.collidepoint(pg.mouse.get_pos()):
self.command()
self.clicked = False
def check_hover(self):
if self.rect.collidepoint(pg.mouse.get_pos()):
if not self.hovered:
self.hovered = True
if self.hover_sound:
self.hover_sound.play()
else:
self.hovered = False
def get_hover_color(self, color, text):
if self.hover_color:
color = self.hover_color
if self.hover_font_color:
text = self.hover_text
border_color = self.border_hover_color
return border_color, color, text
def get_clicked_color(self, text):
color = self.clicked_color
if self.clicked_font_color:
text = self.clicked_text
return color, text
def draw_text(self, surface, text):
if self.text:
text_rect = text.get_rect(center=self.rect.center)
surface.blit(text, text_rect)
def draw_button(self, surface, rect, border_color, inside):
button_surface = pg.Surface(rect.size).convert_alpha()
button_rect = pg.Rect(0, 0, *rect.size)
if True:
button_surface.fill(border_color)
button_rect.inflate_ip(-self.border_width, -self.border_width)
button_surface.fill(inside, button_rect)
surface.blit(button_surface, rect)
| [
"[email protected]"
] | |
2fbd773bb02f8799eaaf4f2ac52581d52e45f694 | df92d6ad9c776320a597a574bd75f3aab2982de0 | /output/html5/deleted/users/fushman/cd978a337d71/Run_test_6c/out_ani.py | 571a72e2dfc680039afc833dc54cee04420f5237 | [] | no_license | TracyYXChen/SES_web | fa6eb41c5c18562508f9d1eb2d14c07f58f93b3c | 016dae4260c07398a9824e552b97c07ff6060f7d | refs/heads/master | 2020-04-28T18:53:07.640170 | 2019-03-13T20:21:36 | 2019-03-13T20:21:36 | 175,492,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | # axes plot script
from pymol.cgo import *
from pymol import cmd
from pymol.vfont import plain
#create the axes object, draw axes with cylinders colored red (Dx), green (Dy), blue (Dz)
obj = [
CYLINDER, 16.09, 0.40, 1.66, -14.52, -1.12, -1.52, 0.2, 1.0, 1.0, 1.0, 1.0, 0.0, 0.,
CYLINDER, 1.79, 4.15, -18.15, -1.32, -4.37, 15.87, 0.2, 1.0, 1.0, 1.0, 0., 1.0, 0.,
CYLINDER, 1.68, -20.91, -5.00, -1.61, 22.85, 5.66, 0.2, 1.0, 1.0, 1.0, 0., 0.0, 1.0,
]
# then we load it into PyMOL
cmd.load_cgo(obj,'out_ani')
| [
"[email protected]"
] | |
4c9daf4e8525bebebfa29428d85d4ccaa95340bc | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02911/s147801842.py | 38b45ab7d54781bdfa0d8f3c43ca05b08b88ac90 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | import numpy as np
N,K,Q = map(int, input().split())
n = [0]*N
for _ in range(Q):
n[int(input())-1] +=1
nn = np.array(n)
ans = K - Q + nn
for an in ans:
if an > 0:
print('Yes')
else:
print("No") | [
"[email protected]"
] | |
941655e62c7eee31262492505c51c5a69db54a1c | 61336264d1b3662d59f59a7a5b8ba69ea2aa6c19 | /src/util/drishti.py | 0b04ca1eb58c5eadb8bfa91ad0da06de8d8991e0 | [
"MIT"
] | permissive | dermatologist/drishti-plan | 6d8cbb34ea0c19cb5c1e35a54665feb3641ad338 | 35e8a493ffcb754efdef374a610257c2aee75e11 | refs/heads/develop | 2023-05-10T16:39:38.625918 | 2018-12-30T02:55:06 | 2018-12-30T02:55:06 | 155,443,971 | 1 | 0 | MIT | 2023-05-01T20:13:48 | 2018-10-30T19:25:48 | Python | UTF-8 | Python | false | false | 3,228 | py | import os
import fhirclient.models.bundle as bundle
import fhirclient.models.careplan as careplan
import fhirclient.models.codeableconcept as codeconcept
import fhirclient.models.fhirreference as reference
import fhirclient.models.identifier as identifier
import fhirclient.models.patient as patient
from fhirclient import client
class Drishti():
api_base = os.getenv('VUE_APP_omhOnFhirAPIBase', 'http://tomcat.nuchange.ca') + os.getenv('VUE_APP_omhOnFhirPath',
'/fhir')
settings = {
'app_id': 'drishti_plan',
'api_base': api_base
}
search = None
def __init__(self):
try:
self.smart = client.FHIRClient(settings=self.settings)
self.smart.prepare()
if not self.smart.ready:
print ("Smart Client Error")
except:
print ("Smart Client Error")
def get_bundle(self, uuid="d4970147-dff5-43e7-a7c8-a326f98874a6"):
self.search = bundle.Bundle.where(struct={"identifier": uuid})
return self.search.perform(self.smart.server)
def create_bundle(self, uuid="d4970147-dff5-43e7-a7c8-a326f98874a6"):
b = bundle.Bundle()
p = patient.Patient()
c = careplan.CarePlan()
ca = careplan.CarePlanActivity()
cad = careplan.CarePlanActivityDetail()
concept = codeconcept.CodeableConcept()
concept.text = "6397004"
cad.code = concept
cad.status = 'scheduled'
ca.detail = cad
i = identifier.Identifier()
i.system = os.getenv('IDENTIFIER_SYSTEM', 'urn:system')
i.value = uuid
c.identifier = [i]
if self.get_steps(uuid) < 8000:
c.activity = [ca]
p.identifier = [i]
b.identifier = i
patient_dict = p.create(self.smart.server)
this_patient = patient.Patient.read(patient_dict['id'], self.smart.server)
patient_ref = reference.FHIRReference()
patient_ref.reference = u'Patient/' + patient_dict['id']
c.description = 'Drishti Plan'
c.status = 'active'
c.intent = 'plan'
c.subject = patient_ref
p_entry = bundle.BundleEntry()
p_entry.resource = p
c_entry = bundle.BundleEntry()
c_entry.resource = c
b.entry = [p_entry, c_entry]
b.type = "document"
b.create(self.smart.server)
def get_steps(self, uuid="d4970147-dff5-43e7-a7c8-a326f98874a6"):
b = self.get_bundle(uuid)
# Take the first bundle
resources = []
if b.entry is not None:
for entry in b.entry:
resources.append(entry.resource)
# Second iteration
steps = 0
if len(resources) > 1:
b = resources[0]
resources = []
if b.entry is not None:
for entry in b.entry:
resources.append(entry.resource)
for resource in resources:
try:
o = resource.component
steps += o[0].valueQuantity.value
except:
steps += 0
return steps
| [
"[email protected]"
] | |
507fbf29090596be9011733ac8e20583dd595808 | 6b60ddc66bf37de64a8171b505cb18660a3a54e0 | /manage.py | 27fcc9c0c3553fe440d0f7a44c04915081745316 | [] | no_license | RyoJ/DBcalc | 5a864420ad1e09d20c740dc30de755bf13b6c92c | be256ebadd9fbd2d714b6b86c3b6df3062b0b4bd | refs/heads/master | 2020-06-12T02:59:27.193323 | 2019-06-27T23:38:24 | 2019-06-27T23:38:24 | 194,175,923 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 539 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dentaku.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
fbad7f40d93d3c5b698af8539c64109640c6c720 | 6b82695358c309bd09da9153dbedf26720fa7dc6 | /2021/17.py | 8c9b69721eb351fd8c9f057d381518f75890eeac | [] | no_license | viliampucik/adventofcode | 0e7b4cca7d7aaed86bdc2b8c57d1056b4620e625 | e7e0ab44ace3cf762b796730e582ab222a45f7d0 | refs/heads/master | 2023-01-04T18:19:07.064653 | 2022-12-26T19:42:59 | 2022-12-26T19:42:59 | 226,700,744 | 33 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,616 | py | #!/usr/bin/env python
from collections import defaultdict
import re
xmin, xmax, ymin, ymax = map(int, re.findall(r"-?\d+", input()))
print((ymin + 1) * ymin // 2)
v, n = 0, int((xmin * 2) ** 0.5 - 1) # n-th member of arithmetic progression
# Fast, precomputed steps version
dxs = defaultdict(set)
for dx_init in range(n, xmax + 1):
x, dx, step = 0, dx_init, 0
while x <= xmax and (dx == 0 and xmin <= x or dx != 0):
x += dx
# fmt:off
if dx > 0: dx -= 1
# fmt:on
step += 1
if xmin <= x <= xmax:
dxs[dx_init].add(step)
if dx == 0:
dxs[dx_init] = min(dxs[dx_init])
break
dys = defaultdict(set)
for dy_init in range(ymin, -ymin):
y, dy, step = 0, dy_init, 0
while ymin <= y:
y += dy
dy -= 1
step += 1
if ymin <= y <= ymax:
dys[dy_init].add(step)
for xsteps in dxs.values():
for ysteps in dys.values():
if type(xsteps) is int:
if xsteps <= max(ysteps):
v += 1
elif xsteps & ysteps:
v += 1
# Slower, brute force version
#
# for dy_init in range(ymin, -ymin):
# for dx_init in range(n, xmax + 1):
# x, y, dx, dy = 0, 0, dx_init, dy_init
# while x <= xmax and y >= ymin and (dx == 0 and xmin <= x or dx != 0):
# x += dx
# y += dy
# # fmt:off
# if dx > 0: dx -= 1
# # fmt:on
# dy -= 1
# if xmin <= x <= xmax and ymin <= y <= ymax:
# v += 1
# break
print(v)
| [
"[email protected]"
] | |
9a9a23998e7101a9cf5bc48aa9e53961e5bb0c4b | f9821e7200c1be14a8d15fd148c35fe36c2c9f15 | /task3/map.py | f0949fbeabbe5da51d79458ed6be86a465f9beb3 | [] | no_license | fducau/BD_A_01 | b3ae7d137ebd28693fb9f633645d56bb80f32f81 | 7860a792aaecb96d2b389eac0558303e21dbcd21 | refs/heads/master | 2021-01-21T05:14:31.257829 | 2017-02-26T22:06:30 | 2017-02-26T22:06:30 | 83,157,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 915 | py | #!/usr/bin/python
# Call:
# hjs -D mapreduce.job.reduces=2 -files \
# /home/fnd212/BD/BD_A_01/task3 -mapper task3/map.py \
# -reducer task3/reduce.py -input \
# /user/ecc290/HW1data/open-violations.csv \
# -output /user/fnd212/task3/task3.out
import sys
import csv
o_header = ['summons_number', 'plate',
'license_type',
'county', 'state', 'prescint',
'issuing_agency', 'violation',
'violation_status', 'issue_date',
'violation_time', 'judgment_entry_date',
'amount_due', 'payment_amount',
'penalty_amount', 'fine_amount',
'interest_amount', 'reduction_amount']
# input comes from STDIN (stream data that goes to the program)
for entry in csv.reader(sys.stdin, delimiter=','):
key = entry[o_header.index('license_type')]
value = entry[o_header.index('amount_due')]
print('{0}\t{1}'.format(key, value))
| [
"[email protected]"
] | |
808c56f1953e3dff10b5545f5077b54d924b733f | b3ce05ef8d01f2a656639075cb7da987a19fa43b | /posts_app/migrations/0014_auto_20200621_2337.py | 2b9e51eadfd8d00d456a431eb28517c726fe4ccd | [] | no_license | Deltapimol/posts | 79113377fb13088256b0999dd2f6180f3aef214f | a4fed07af11a2916479c5372c41f702b338d772f | refs/heads/master | 2022-11-07T02:31:36.843042 | 2020-06-28T18:30:50 | 2020-06-28T18:30:50 | 268,334,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | # Generated by Django 2.2.12 on 2020-06-21 18:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('posts_app', '0013_auto_20200621_1844'),
]
operations = [
migrations.RenameField(
model_name='replytoreply',
old_name='this_reply',
new_name='reply_reply',
),
migrations.RenameField(
model_name='replytoreply',
old_name='this_respondent',
new_name='reply_respondent',
),
]
| [
"[email protected]"
] | |
37bbb126df8bb723e3a9447667d805ec0a4da682 | f434c6057ae15568f2c6c2971259a9eaee46d918 | /pulse_API_3.py | 99c74242fdb4e95c8e00d41b7ae7a5f2b0b9b146 | [] | no_license | jwatson-CO-edu/py_music_mgmt | 3542f5e93b223f7c314678c80230a86efeff61d9 | 1dd31fd5d07b2c15034b6f3cd27e512acecedd5a | refs/heads/master | 2021-07-14T09:02:20.381122 | 2021-01-29T01:11:49 | 2021-01-29T01:11:49 | 230,323,268 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,943 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ~~ Future First ~~
from __future__ import division # Future imports must be called before everything else, including triple-quote docs!
__progname__ = "pulse_API.py"
__version__ = "2019.05"
__desc__ = "Query the API to keep it from dying"
"""
James Watson , Template Version: 2019-05-12
Built on Wing 101 IDE for Python 2.7
Dependencies: numpy
"""
"""
~~~ Developmnent Plan ~~~
[ ] ITEM1
[ ] ITEM2
"""
# === Init Environment =====================================================================================================================
# ~~~ Prepare Paths ~~~
import sys, os.path
SOURCEDIR = os.path.dirname( os.path.abspath( __file__ ) ) # URL, dir containing source file: http://stackoverflow.com/a/7783326
PARENTDIR = os.path.dirname( SOURCEDIR )
# ~~ Path Utilities ~~
def prepend_dir_to_path( pathName ): sys.path.insert( 0 , pathName ) # Might need this to fetch a lib in a parent directory
from math import sqrt
# ~~ Local ~~
from API_session import Session , open_all_APIs
from retrieve_yt import fetch_metadata_by_yt_video_ID
# ~~ Constants , Shortcuts , Aliases ~~
EPSILON = 1e-7
infty = 1e309 # URL: http://stackoverflow.com/questions/1628026/python-infinity-any-caveats#comment31860436_1628026
endl = os.linesep
sqt2 = sqrt(2)
# ~~ Script Signature ~~
def __prog_signature__(): return __progname__ + " , Version " + __version__ # Return a string representing program name and verions
# ___ End Init _____________________________________________________________________________________________________________________________
# === Main Program =========================================================================================================================
# === Program Classes ===
# ___ End Class ___
# === Program Functions ===
# __ End Func __
# === Program Vars ===
# ___ End Vars ___
# === Main Func ===
if __name__ == "__main__":
print( __prog_signature__() )
termArgs = sys.argv[1:] # Terminal arguments , if they exist
# 1. Open the API
sssn = Session()
open_all_APIs( sssn )
# 2. Query a video
discard = fetch_metadata_by_yt_video_ID( sssn.youtube , sssn.METADATA_SPEC , "PG1R9OPofEg" )
# 3. Close session
if 0: # WARNING: Calling this MAY overwrite the current session file
close_session( sssn )
# ___ End Main ___
# ___ End Program __________________________________________________________________________________________________________________________
# === Spare Parts ==========================================================================================================================
# ___ End Spare ____________________________________________________________________________________________________________________________
| [
"[email protected]"
] | |
6305cc21beb1ececb54b6951ddd244b010d33513 | 677002b757c0a1a00b450d9710a8ec6aeb9b9e9a | /tiago_public_ws/build/demo_motions/catkin_generated/generate_cached_setup.py | bf8cec67fb5fb8b4e4d0c897fb4496c08f278078 | [] | no_license | mrrocketraccoon/tiago_development | ce686c86459dbfe8623aa54cf4279021342887fb | a0539bdcf21b67ab902a4649b516dcb929c54042 | refs/heads/main | 2023-06-16T19:39:33.391293 | 2021-07-08T21:20:03 | 2021-07-08T21:20:03 | 384,249,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,306 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/melodic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/melodic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in '/tiago_public_ws/devel;/opt/ros/melodic'.split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/tiago_public_ws/devel/.private/demo_motions/env.sh')
output_filename = '/tiago_public_ws/build/demo_motions/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
# print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
| [
"[email protected]"
] | |
87f4145d63f887989e347080608661feabbb110a | 80d2edd9a88f43b8cac6db3dcff765eec70ae742 | /src/registration/contrib/autologin/tests.py | e0d4c246b0f7f97e935af61ac7af5d3ebfeff1ca | [] | no_license | lambdalisue/django-inspectional-registration | e66004e66cd3255af25706556ebc0b748122cf32 | 9a4adc6587d343f553f41e570508bc6f4ee0a5c5 | refs/heads/master | 2021-06-26T10:55:29.980298 | 2016-11-15T04:02:52 | 2016-11-15T04:02:52 | 3,525,101 | 28 | 22 | null | 2020-04-09T07:40:36 | 2012-02-23T12:23:56 | Python | UTF-8 | Python | false | false | 2,937 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
"""
__author__ = 'Alisue <[email protected]>'
from django.test import TestCase
from django.contrib.auth.models import AnonymousUser
from registration.backends.default import DefaultRegistrationBackend
from registration.tests.mock import mock_request
from registration.tests.compat import override_settings
@override_settings(
ACCOUNT_ACTIVATION_DAYS=7,
REGISTRATION_OPEN=True,
REGISTRATION_SUPPLEMENT_CLASS=None,
REGISTRATION_BACKEND_CLASS=(
'registration.backends.default.DefaultRegistrationBackend'),
REGISTRATION_AUTO_LOGIN=True,
_REGISTRATION_AUTO_LOGIN_IN_TESTS=True,
)
class RegistrationAutoLoginTestCase(TestCase):
backend = DefaultRegistrationBackend()
mock_request = mock_request()
def test_no_auto_login_with_setting(self):
"""Auto login feature should be able to off with ``REGISTRATION_AUTO_LOGIN = False``"""
self.mock_request.user = AnonymousUser()
with override_settings(REGISTRATION_AUTO_LOGIN = False):
new_user = self.backend.register(
'bob', '[email protected]', request=self.mock_request,
)
self.backend.accept(
new_user.registration_profile, request=self.mock_request,
)
self.backend.activate(
new_user.registration_profile.activation_key,
password='password',request=self.mock_request,
)
self.failIf(self.mock_request.user.is_authenticated())
def test_no_auto_login_with_no_password(self):
"""Auto login feature should not be occur with no password
(programatically activated by Django Admin action)
"""
self.mock_request.user = AnonymousUser()
new_user = self.backend.register(
'bob', '[email protected]', request=self.mock_request,
)
self.backend.accept(
new_user.registration_profile, request=self.mock_request,
)
self.backend.activate(
new_user.registration_profile.activation_key,
request=self.mock_request,
)
self.failIf(self.mock_request.user.is_authenticated())
def test_auto_login(self):
"""Wheather auto login feature works correctly"""
self.mock_request.user = AnonymousUser()
new_user = self.backend.register(
'bob', '[email protected]', request=self.mock_request,
)
self.backend.accept(
new_user.registration_profile, request=self.mock_request,
)
self.backend.activate(
new_user.registration_profile.activation_key,
password='password',request=self.mock_request,
)
self.failUnless(self.mock_request.user.is_authenticated())
| [
"[email protected]"
] | |
41c72df5a5313f6f791caee351821355ddf7853f | 59f16317714656f40f4ef2b5dbaf84e8a42316e9 | /get_clear_sky_days.py | 5982604115bb80e38d1d63b5328e2addb423a152 | [] | no_license | mssamhan31/CANVAS | b6820464dc3ce9ac60af1df63a782c50cd84e152 | 71e6e04e39cbb2ae222f84e8e088375589ab97ed | refs/heads/main | 2023-05-17T10:33:20.689432 | 2021-06-07T07:26:28 | 2021-06-07T07:26:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,445 | py | # Useful functions
# Import required things
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
# Get clear sky days
# def get_clear_sky_days()
# Get first month of data
# df = pd.read_csv('F:/irradiance_data/1_min_solar/adelaide/sl_023034_2020_01.txt')
# 2019 data
df = pd.read_csv('F:/irradiance_data/1_min_solar/adelaide/sl_023034_2019_01.txt')
# List rest of months
# list=['02','03','04','05','06','07']
# 2019
list=['02','03','04','05','06','07', '08', '09', '10', '11', '12']
# Concat on rest of months
for i in list:
# df_temp = pd.read_csv('F:/irradiance_data/1_min_solar/adelaide/sl_023034_2020_'+i+'.txt')
# 2019
df_temp = pd.read_csv('F:/irradiance_data/1_min_solar/adelaide/sl_023034_2019_'+i+'.txt')
df = pd.concat([df, df_temp])
# Make sure numeric
df['min_1s_global_irr'] = pd.to_numeric(df["Minimum 1 second global irradiance (over 1 minute) in W/sq m"], errors='coerce')
df['min_1s_global_irr'] = df['min_1s_global_irr'].fillna(0)
# Also get mean for plotting
df['mean_1s_global_irr'] = pd.to_numeric(df["Mean global irradiance (over 1 minute) in W/sq m"], errors='coerce')
# df['mean_1s_global_irr'] = df['mean_1s_global_irr'].fillna(0)
# Get change in irradiance for "Minimum 1 second global irradiance (over 1 minute) in W/sq m"
df["min_1s_global_irr_SHIFTED"] = df["min_1s_global_irr"].shift(periods=-1)
# Set fill value to 0 (from nan) since fill_value arg is giving an error.
df["min_1s_global_irr_SHIFTED"] = df["min_1s_global_irr_SHIFTED"].fillna(0)
# Calculate change in irradiance
df['change_in_irr'] = (df['min_1s_global_irr'] - df["min_1s_global_irr_SHIFTED"])/df['min_1s_global_irr']
df['abs_change_in_irr'] = df['change_in_irr'].abs()
# Get times
df['year'] = pd.to_numeric(df["Year Month Day Hours Minutes in YYYY"], errors='coerce')
df['month'] = pd.to_numeric(df["MM"], errors='coerce')
df['day'] = pd.to_numeric(df["DD"], errors='coerce')
df['hour'] = pd.to_numeric(df["HH24"], errors='coerce')
df['minute'] = pd.to_numeric(df["MI format in Local standard time"], errors='coerce')
df['date'] = pd.to_datetime(df[["year","month","day"]])
df['date_time'] = pd.to_datetime(df[["year","month","day","hour","minute"]])
# Set index to date_time and filter VERY ROUGHLY for solar hours
# TODO would be great to instead find start and end pts for ecah day and filter for about an hour 'inside' this range,
# see prevoius attempts on 23 March 2021.
# OR try using Astral package to get sunrise/set times: https://astral.readthedocs.io/en/latest/index.html
df = df.set_index('date_time')
df_approx_solar_hours = df.between_time('07:00', '17:00')
# ------------------------------------- Factor 1: max absolute variation in irradiance per day
# Get max absolute variation on each date.
df_max_abs_change_in_irr = pd.DataFrame(df_approx_solar_hours.groupby('date')['abs_change_in_irr'].max())
# Identify possible clear sky days
df_clear_sky_days = df_max_abs_change_in_irr[df_max_abs_change_in_irr['abs_change_in_irr']<=0.15]
print(len(df_clear_sky_days))
clear_sky_day_list = df_clear_sky_days.index.tolist()
df_clear_sky_days = df[df['date'].isin(clear_sky_day_list)]
# ------------------------------------- Factor 2: 95th and 99th percentile of change per day
# NOTE does NOT need to use the approx solar hours
df_temp_2 = df[['date','abs_change_in_irr']]
df_95th_percentile_abs_change_in_irr = pd.DataFrame(df_temp_2.groupby('date').quantile(0.95))
df_95th_percentile_abs_change_in_irr = df_95th_percentile_abs_change_in_irr.rename(columns={'abs_change_in_irr' : '95th percentile'})
df_99th_percentile_abs_change_in_irr = pd.DataFrame(df_temp_2.groupby('date').quantile(0.99))
df_99th_percentile_abs_change_in_irr = df_99th_percentile_abs_change_in_irr.rename(columns={'abs_change_in_irr' : '99th percentile'})
# Identify possible clear sky days
df_clear_sky_days_method_2 = df_95th_percentile_abs_change_in_irr[df_95th_percentile_abs_change_in_irr['95th percentile']<=0.1]
print(len(df_clear_sky_days_method_2))
# ------------------------------------- Factor 3: Total energy
# TODO access online at:
# ------------------------------------- Export by date
df_export = pd.concat([df_max_abs_change_in_irr, df_95th_percentile_abs_change_in_irr, df_99th_percentile_abs_change_in_irr], axis=1)
# df_export.to_csv('F:/irradiance_data/1_min_solar/adelaide/2020_adelaide_irradiance_summary_statistics_for_clear_sky_day_analysis.csv')
# 2019
df_export.to_csv('F:/irradiance_data/1_min_solar/adelaide/2019_adelaide_irradiance_summary_statistics_for_clear_sky_day_analysis.csv')
# ------------------------------------- Plotting
# Plot the min irradiance
fig, ax = plt.subplots()
ax.plot(df['min_1s_global_irr'], c='purple', label='Minimum 1s global irradiance over 1min (W/sq m)')
ax.plot(df['mean_1s_global_irr'], c='blue', label='Mean global irradiance over 1min (W/sq m)')
ax1 = ax.twinx()
ax1.plot(df['change_in_irr'])
ax.legend(loc='upper right')
plt.show()
# Plot the min irradiance for approx solar hours
fig, ax = plt.subplots()
ax.plot(df_clear_sky_days['min_1s_global_irr'])
ax1 = ax.twinx()
ax1.plot(df_clear_sky_days['change_in_irr'])
plt.show()
# Plot the min irradiance for clear sky days
fig, ax = plt.subplots()
ax.plot(df_approx_solar_hours['min_1s_global_irr'])
ax1 = ax.twinx()
ax1.plot(df_approx_solar_hours['change_in_irr'])
plt.show()
# TODO - currently this funcationality doesn't work and I don't know why
# Plot the manually identified clear sky days
clear_sky_manual_check = pd.Series(['1/01/2020','2/01/2020','6/01/2020','7/01/2020','8/01/2020','12/01/2020','13/01/2020',
'14/01/2020','29/01/2020','30/01/2020','4/02/2020','5/02/2020','6/02/2020','10/02/2020',
'11/02/2020','22/02/2020','9/03/2020','15/03/2020','25/03/2020','26/03/2020','9/04/2020',
'13/04/2020','24/04/2020','15/05/2020','16/05/2020','17/05/2020','29/05/2020','5/06/2020',
'10/06/2020','28/06/2020','29/06/2020','15/07/2020','16/07/2020','17/07/2020','23/07/2020',
'25/07/2020'])
# Filter
df_clear_sky_days_manual = df[df['date'].isin(clear_sky_manual_check)]
# Plot
fig, ax = plt.subplots()
ax.plot(df_clear_sky_days_manual['min_1s_global_irr'], c='purple')
ax.plot(df_clear_sky_days_manual['mean_1s_global_irr'], c='blue')
ax1 = ax.twinx()
ax1.plot(df_clear_sky_days_manual['change_in_irr'])
plt.show() | [
"[email protected]"
] | |
0fce58ebe41868e1449864353d86af15ba8d3c30 | a4d0be7303a3cf40ef83e912ec5d8ed06ac55eb2 | /2020_10_01_tictactoe_AI.py | acf4e79bb1956e505dd927448eddc1f1eeea5959 | [] | no_license | wezmiolem/TicTacToe-AI | 2c9e00826436f527abc0a270f5bc955afc9963c9 | f450a04edde4b3a6acde7fac8e1bf3dce03879e5 | refs/heads/main | 2023-08-04T23:35:55.983231 | 2021-09-14T08:27:46 | 2021-09-14T08:27:46 | 406,285,431 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,689 | py | import os
init_board = [['.','.','.'],['.','.','.'],['.','.','.']]
ABC = ["A","B","C"]
A123 = ["1","2","3"]
a_len_init_board = len(init_board)
def game_board():
os.system('cls' if os.name == 'nt' else 'clear')
print(" "," ".join(A123))
for i in range(a_len_init_board):
print(ABC[i],' | '.join(init_board[i]))
if i < 2:
print(" ---+---+---")
print()
def assign(row, column):
row = row.upper()
if row in ABC:
if column in A123:
return (ABC.index(row), A123.index(column))
else:
return False
else:
return False
def mark_the_spot(spot, mark):
a = int(spot[0])
b = int(spot[1])
init_board[a][b] = mark
def win_set():
win_set = []
for i in init_board:
win_set.append(set(i))
for i in range(0,3):
col = [init_board[0][i],init_board[1][i],init_board[2][i]]
win_set.append(set(col))
diag1 = []
for i in range(0,3):
diag1.append(init_board[i][i])
win_set.append(set(diag1))
diag2 = []
for i in range(0,3):
diag2.append(init_board[i][2-i])
win_set.append(set(diag2))
return win_set
def check_for_winners(win_set):
x = set("X")
o = set("O")
if x in win_set:
return True
elif o in win_set:
return True
else:
return False
def check_if_full():
symbols = []
for i in range(0,3):
for j in range(0,3):
symbols.append(init_board[i][j])
if "." in symbols:
return False
else:
return True
def moves_available(init_board):
avail_moves = []
for i in range(0,3):
for j in range(0,3):
if init_board[i][j] == ".":
avail_moves.append([i, j])
return avail_moves
def best_move(mark):
avail_moves = moves_available(init_board)
if mark == "X":
best_score = -100
for move in avail_moves:
mark_the_spot(move, mark)
score = minimax(init_board, 0, False)
mark_the_spot(move, ".")
if score > best_score:
best_score = score
chosen_move = move
return chosen_move
elif mark == "O":
best_score = 100
for move in avail_moves:
mark_the_spot(move, mark)
score = minimax(init_board, 0, True)
mark_the_spot(move, ".")
if score < best_score:
best_score = score
chosen_move = move
return chosen_move
def minimax(init_board, depth, is_maxin):
a = win_set()
avail_moves = moves_available(init_board)
if is_maxin == True:
if check_for_winners(a) == True:
return -10
elif check_if_full() == True:
return 0
else:
best_score = -100
mark = "X"
for move in avail_moves:
mark_the_spot(move, mark)
score = minimax(init_board, depth+1, False)
mark_the_spot(move, ".")
best_score = max(score, best_score)
return best_score - depth
if is_maxin == False:
if check_for_winners(a) == True:
return 10
elif check_if_full() == True:
return 0
else:
best_score = 100
mark = "O"
for move in avail_moves:
mark_the_spot(move, mark)
score = minimax(init_board, depth+1, True)
mark_the_spot(move, ".")
best_score = min(score, best_score)
return best_score + depth
def game_play():
counter = 1
in_list = []
while True:
if counter % 2 == 1:
mark = "X"
else:
mark = "O"
game_board()
print(f'{mark} player turn!!\n')
coords = input("your move: e.g A1 or B2 \n")
if len(coords) < 2:
continue
elif assign(coords[0],coords[1]) == False:
continue
place = assign(coords[0],coords[1])
if place in in_list:
print("Already choosen")
continue
else:
in_list.append(place)
mark_the_spot(place,mark)
a = win_set()
counter += 1
if check_for_winners(a) == True:
game_board()
print(f'congratulation {mark} won\n')
break
elif check_if_full() == True:
game_board()
print(f'Unfortunately it is a draw')
break
else:
continue
def single_player():
counter = 0
in_list = []
while True:
if counter % 2 == 0:
mark = "X"
game_board()
print(f'{mark} player turn!!\n')
coords = input("your move: e.g A1 or B2 \n")
if len(coords) < 2:
continue
elif assign(coords[0],coords[1]) == False:
continue
place = assign(coords[0],coords[1])
if place in in_list:
print("Already choosen")
continue
else:
in_list.append(place)
mark_the_spot(place,mark)
a = win_set()
counter += 1
if check_for_winners(a) == True:
game_board()
print(f'congratulation {mark} won\n')
break
elif check_if_full() == True:
game_board()
print('Unfortunately it is a draw')
break
else:
continue
else:
mark = "O"
choice = best_move(mark)
mark_the_spot(choice, mark)
in_list.append(tuple(choice))
a = win_set()
counter += 1
a = win_set()
if check_for_winners(a) == True:
game_board()
print(f'unfortunately you lost')
break
elif check_if_full() == True:
game_board()
print('its a tie')
break
else:
continue
def main():
a = input('''choose:
1 for singleplayer
2 for multiplayer\n''')
if a == "1":
single_player()
elif a == "2":
game_play()
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
bb3e9bf5343bd336e788fc1b9d8d633f5f747b2b | dd3229c9a5ccf936e72d4e624e07ffdaf821cf5b | /tests/unit/test_table_creation.py | c17adcba9b8d32aa5cc31d590e6c06ef25109bd1 | [] | no_license | AndriiShchur/Github-Repository-Analysis | ba9aee94d6031c2875d4aa20501fb4a9e9b2724e | 270f4257832ed50a62fdb19d58f94ca31caedc22 | refs/heads/main | 2023-02-08T10:34:39.318288 | 2020-12-23T09:01:30 | 2020-12-23T09:01:30 | 323,592,569 | 0 | 0 | null | 2020-12-23T09:01:31 | 2020-12-22T10:23:24 | Python | UTF-8 | Python | false | false | 2,211 | py | import pytest
import getpass
import pyodbc
import pandas as pd
from dotenv import load_dotenv
import os
import math
print("Load env variables")
project_folder = os.getcwd() # adjust as appropriate
load_dotenv(os.path.join(project_folder, 'env.dist'))
server = os.getenv("SERVER")
database = os.getenv("DB_NAME_TEST")
username = os.getenv("USER")
password = os.getenv("PASSWORD")
print ("Connecting via ODBC")
cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
cursor = cnxn.cursor()
print ("Connected!\n")
query = ("SELECT DISTINCT TABLE_NAME FROM INFORMATION_SCHEMA.COLUMNS")
tables_df = pd.read_sql(query,con=cnxn)
tables = ["RepoMain", "PRMain", "PRFiles", "RepoAnalytics"]
@pytest.mark.unittesttableexc
def test_tables_exc():
for table in tables:
assert table in tables_df.TABLE_NAME.values
query = ("SELECT TABLE_NAME, COLUMN_NAME, IS_NULLABLE, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH \
FROM INFORMATION_SCHEMA.COLUMNS \
WHERE TABLE_NAME NOT IN ('database_firewall_rules','sysdiagrams')")
tables_df = pd.read_sql(query,con=cnxn)
tables_str = pd.read_csv(os.path.join(os.getcwd(), "scr\\tables_str\\tables_str.csv"))
@pytest.mark.unittesttablestr
def test_tables_exc():
for table in tables:
tmp_df1 = tables_df[tables_df.TABLE_NAME==table]
tmp_str1 = tables_str[tables_str.TABLE_NAME==table]
for column in tmp_str1.COLUMN_NAME:
print(column in tmp_df1.COLUMN_NAME.values)
tmp_df2 = tmp_df1[tmp_df1.COLUMN_NAME==column]
tmp_str2 = tmp_str1[tmp_str1.COLUMN_NAME==column]
print(tmp_str2.IS_NULLABLE.values == tmp_df2.IS_NULLABLE.values)
print(tmp_str2.DATA_TYPE.values == tmp_df2.DATA_TYPE.values)
if(math.isnan(tmp_str2.CHARACTER_MAXIMUM_LENGTH.values[0]) and
math.isnan(tmp_df2.CHARACTER_MAXIMUM_LENGTH.values[0])):
print(True)
else:
print(tmp_str2.CHARACTER_MAXIMUM_LENGTH.values == tmp_df2.CHARACTER_MAXIMUM_LENGTH.values)
| [
"[email protected]"
] | |
b3acc2c2f86299eaca7b23f8d1a00c35a3b61d33 | 252c2fee3bc8b4dd092fc3203bb9f76c51c8e4f1 | /tests/testData/visualTest.py | 1111b3544bede89f6d38fecc277a86838454ec56 | [
"MIT"
] | permissive | typemytype/booleanOperations | d9711e212c99921c23900c6fd0f18d8520100b61 | 25f709175b471508a169be790778b80ec328da7d | refs/heads/master | 2022-11-14T22:20:31.913043 | 2022-06-05T16:40:59 | 2022-06-05T16:40:59 | 14,314,043 | 34 | 23 | MIT | 2022-10-24T08:03:32 | 2013-11-11T21:35:22 | Python | UTF-8 | Python | false | false | 2,005 | py | # run in DrawBot RoboFont extension
border = 20
dotSize = 10
offDotSize = dotSize * .5
try:
CurrentFont
except NameError:
class CurrentFont(dict):
glyphOrder = []
def save(self, path=None):
pass
try:
saveImage
except NameError:
def saveImage(*args, **kwargs):
pass
f = CurrentFont()
def drawOffCurve(anchor, off):
x, y = anchor
offx, offy = off
if offx or offy:
offx += x
offy += y
with savedState():
stroke(1, 0, 0)
fill(1, 0, 0)
line((x, y), (offx, offy))
oval(offx - offDotSize, offy - offDotSize, offDotSize * 2, offDotSize * 2)
def drawGlyphWithPoints(glyph):
fill(0, .1)
stroke(0)
drawGlyph(glyph)
stroke(None)
for contour in glyph:
fill(0, 1, 0)
for point in contour.bPoints:
x, y = point.anchor
drawOffCurve((x, y), point.bcpIn)
drawOffCurve((x, y), point.bcpOut)
oval(x - dotSize, y - dotSize, dotSize * 2, dotSize * 2)
fill(1, 0, 0)
for glyphName in f.glyphOrder:
if glyphName not in f:
continue
g = f[glyphName]
bounds = g.bounds
if not bounds:
continue
minx, miny, maxx, maxy = bounds
w = maxx - minx
h = maxy - miny
layerCount = len(f.layers)
newPage((w + border) * layerCount + border, h + border * 2 + 100)
translate(border, border + 100)
translate(-minx, -miny)
fontSize(20)
stroke()
text("%s" % g.name, (w * .5, -100 + miny), align="center")
drawGlyphWithPoints(g)
translate(w + border, 0)
for layer in f.layers:
if layer.name == "foreground":
continue
fill(0)
text(layer.name, (w * .5, -100 + miny), align="center")
if g.name not in layer:
translate(w + border)
continue
lg = layer[g.name]
drawGlyphWithPoints(lg)
translate(w + border)
saveImage("visualTest.pdf") | [
"[email protected]"
] | |
ee87e6aa5b95880e017fd14693c6070a0ad7a835 | bca5c9f713629f478512735eb7696982fe75b7f8 | /jacobian.py | eb6c944e52faea78da5ba28e18488246fbdfa27b | [] | no_license | karabutov/robot-3RRR | e994e7e47ee34d05546ece50b077edbd1c445366 | 6cbef2fd9b30b4c1ee554bd2ee54a99bbfda8799 | refs/heads/master | 2021-04-06T20:36:19.428472 | 2019-05-12T20:45:53 | 2019-05-12T20:45:53 | 125,269,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,737 | py | import math
import numpy as np
import scipy.optimize as so
size_side = 1.
k = (2./3.) * (size_side * math.sqrt(3.) / 2.)
l = 1.
a_phi = 0.;
gamma1 = math.pi * (0.5 + 1. * 2./3.)
gamma2 = math.pi * (0.5 + 2. * 2./3.)
gamma3 = math.pi * (0.5 + 3. * 2./3.)
x1 = 0.0
y1 = 0.0
x2 = 3.4
y2 = 0.33
x3 = 2.0
y3 = 3.0
def jacobian_matrix(x, y, phi, theta1, theta2, theta3):
def dx1(x, y, phi, theta1, theta2, theta3):
xB1 = x1 + l * math.cos(theta1)
return 2*(x + k*math.cos(gamma1+phi) - xB1)
def dy1(x, y, phi, theta1, theta2, theta3):
yB1 = y1 + l * math.sin(theta1)
return 2*(y + k*math.sin(gamma1+phi) - yB1)
def dphi1(x, y, phi, theta1, theta2, theta3):
xB1 = x1 + l * math.cos(theta1)
yB1 = y1 + l * math.sin(theta1)
return 2*k*math.cos(gamma1+phi) * (y + k*math.sin(gamma1+phi) - yB1) - 2*k*math.sin(gamma1+phi) * (x + k*math.cos(gamma1+phi) - xB1)
def dx2(x, y, phi, theta1, theta2, theta3):
xB2 = x2 + l * math.cos(theta2)
return 2*(x + k*math.cos(gamma2+phi) - xB2)
def dy2(x, y, phi, theta1, theta2, theta3):
yB2 = y2 + l * math.sin(theta2)
return 2*(y + k*math.sin(gamma2+phi) - yB2)
def dphi2(x, y, phi, theta1, theta2, theta3):
xB2 = x2 + l * math.cos(theta2)
yB2 = y2 + l * math.sin(theta2)
return 2*k*math.cos(gamma2+phi) * (y + k*math.sin(gamma2+phi) - yB2) - 2*k*math.sin(gamma2+phi) * (x + k*math.cos(gamma2+phi) - xB2)
def dx3(x, y, phi, theta1, theta2, theta3):
xB3 = x3 + l * math.cos(theta3)
return 2*(x + k*math.cos(gamma3+phi) - xB3)
def dy3(x, y, phi, theta1, theta2, theta3):
yB3 = y3 + l * math.sin(theta3)
return 2*(y + k*math.sin(gamma3+phi) - yB3)
def dphi3(x, y, phi, theta1, theta2, theta3):
xB3 = x3 + l * math.cos(theta3)
yB3 = y3 + l * math.sin(theta3)
return 2*k*math.cos(gamma3+phi) * (y + k*math.sin(gamma3+phi) - yB3) - 2*k*math.sin(gamma3+phi) * (x + k*math.cos(gamma3+phi) - xB3)
return np.array([[dx1(x, y, phi, theta1, theta2, theta3), dy1(x, y, phi, theta1, theta2, theta3), dphi1(x, y, phi, theta1, theta2, theta3)],
[dx2(x, y, phi, theta1, theta2, theta3), dy2(x, y, phi, theta1, theta2, theta3), dphi2(x, y, phi, theta1, theta2, theta3)],
[dx3(x, y, phi, theta1, theta2, theta3), dy3(x, y, phi, theta1, theta2, theta3), dphi3(x, y, phi, theta1, theta2, theta3)]])
def constr1(t):
x = t[0]
y = t[1]
phi = t[2]
theta1 = t[3]
xB1 = x1 + l * math.cos(theta1)
yB1 = y1 + l * math.sin(theta1)
return (x + k * math.cos(gamma1 + phi) - xB1)**2 + (y + k * math.sin(gamma1 + phi) - yB1)**2 - l**2
def constr2(t):
x = t[0]
y = t[1]
phi = t[2]
theta2 = t[3]
xB2 = x2 + l * math.cos(theta2)
yB2 = y2 + l * math.sin(theta2)
return (x + k * math.cos(gamma2 + phi) - xB2)**2 + (y + k * math.sin(gamma2 + phi) - yB2)**2 - l**2
def constr3(t):
x = t[0]
y = t[1]
phi = t[2]
theta3 = t[3]
xB3 = x3 + l * math.cos(theta3)
yB3 = y3 + l * math.sin(theta3)
return (x + k * math.cos(gamma3 + phi) - xB3)**2 + (y + k * math.sin(gamma3 + phi) - yB3)**2 - l**2
def det_jacobian_func(args):
x = args[0]
y = args[1]
phi = args[2]
theta1 = args[3]
theta2 = args[4]
theta3 = args[5]
j = np.array(jacobian_matrix(x, y, phi, theta1, theta2, theta3))
return j[0][0] * j[1][1] * j[2][2] + j[0][2] * j[1][0] * j[2][1] + j[2][0] * j[0][1] * j[1][2] - j[0][2] * j[1][1] * j[2][0] - j[0][0] * j[2][1] * j[1][2] - j[2][2] * j[1][0] * j[0][1]
def minus_det_jacobian_func(args):
return -1 * det_jacobian_func(args)
def is_jacobian_deg(aria):
args_b = np.array([aria[0][0], aria[1][0], 0., 0., 0., 0.])
args_e = np.array([aria[0][1], aria[1][1], 2 * math.pi, 2 * math.pi, 2 * math.pi, 2 * math.pi])
bnd = ((args_b[0], args_e[0]), (args_b[1], args_e[1]), (args_b[2], args_e[2]), (args_b[3], args_e[3]), (args_b[4], args_e[4]), (args_b[5], args_e[5]))
cons = ({'type': 'eq', 'fun': constr1},
{'type': 'eq', 'fun': constr2},
{'type': 'eq', 'fun': constr3})
min_solut = so.minimize(det_jacobian_func, (args_e + args_b) / 2., method="SLSQP", bounds = bnd, constraints=cons)
max_solut = so.minimize(minus_det_jacobian_func, (args_e + args_b) / 2., method="SLSQP", bounds = bnd, constraints=cons)
max_of_f = -max_solut.fun
min_of_f = min_solut.fun
if max_of_f >= 0. and min_of_f <= 0.:
return True
return False | [
"[email protected]"
] | |
c7236d2392d3e52ec8473bc9b970f3634cb1872a | a408222d976dde673e941e14a53e7ecd1ec1b974 | /test/test_client_async.py | 4047ce2154fb05419f35415f728afa63fff8e772 | [
"MIT"
] | permissive | yangliping-ql/python-snap7 | 12686c02e46b932aec8230ac10f72076a067bd7b | 0731267a025c6ac7a7b02ba574ddcb132a52e925 | refs/heads/master | 2023-01-04T04:01:17.107334 | 2020-10-28T08:56:12 | 2020-10-28T08:56:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,214 | py | import logging
import time
import unittest
from multiprocessing import Process
from os import kill
import snap7
from snap7.server import mainloop
logging.basicConfig(level=logging.WARNING)
ip = '127.0.0.1'
tcpport = 1102
db_number = 1
rack = 1
slot = 1
class TestClient(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.process = Process(target=mainloop)
cls.process.start()
time.sleep(2) # wait for server to start
@classmethod
def tearDownClass(cls):
kill(cls.server_pid, 1)
def setUp(self):
self.client = snap7.client.Client()
self.client.connect(ip, rack, slot, tcpport)
def tearDown(self):
self.client.disconnect()
self.client.destroy()
@unittest.skip("TODO: RuntimeWarning: coroutine 'TestClient.test_as_db_read' was never awaited")
async def test_as_db_read(self):
size = 40
start = 0
db = 1
data = bytearray(40)
self.client.db_write(db_number=db, start=start, data=data)
result = await self.client.as_db_read(db_number=db, start=start, size=size)
self.assertEqual(data, result)
@unittest.skip("TODO: RuntimeWarning: coroutine 'TestClient.test_as_db_write' was never awaited")
async def test_as_db_write(self):
size = 40
data = bytearray(size)
check = await self.client.as_db_write(db_number=1, start=0, data=data)
self.assertEqual(check, 0)
@unittest.skip("TODO: crash client: FATAL: exception not rethrown")
async def test_as_ab_read(self):
start = 1
size = 1
await self.client.as_ab_read(start=start, size=size)
@unittest.skip("TODO: not yet fully implemented")
async def test_as_ab_write(self):
start = 1
size = 10
data = bytearray(size)
await self.client.as_ab_write(start=start, data=data)
async def test_as_db_get(self):
await self.client.db_get(db_number=db_number)
@unittest.skip("TODO: RuntimeWarning: coroutine 'TestClient.test_as_download' was never awaited")
async def test_as_download(self):
data = bytearray(128)
await self.client.as_download(block_num=-1, data=data)
| [
"[email protected]"
] | |
1668a7aae7090c8534830f2e645e2471e2e7873f | b06c6847089ff73dcf1e6094b048bc312022a156 | /utility.py | 0261478127e3fe477868be94cb907e28add1b6c4 | [] | no_license | yqli-ds/Recurrent-DirBN | 743fb93b8a30dc713a76ad1b6f181eb8e80889ab | 68f89d200230f4ab443a4c41249487c0337cd18d | refs/heads/main | 2023-04-08T16:57:07.360221 | 2021-04-11T06:30:11 | 2021-04-11T06:30:11 | 356,785,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,360 | py | import numpy as np
import scipy
import copy
import scipy.io as scio
from sklearn.metrics import mean_squared_error
from math import sqrt
from scipy.stats import poisson, norm, gamma, dirichlet, uniform, beta
import math
def load_data_fan(data, dataNum_1, dataNum_2, Times):
# Input:
# data: Original data T*N*N
# dataNum, Times: N,T
# Output:
# relation_matrix: T*N*N, training relation matrix, Rii = 0, test relation = -1
# test_matrix: T*N*N, test relation matrix, Rii = 0, training relation = -1
# Initialize the the relation Rij to 0 and 1
relation_matrix = data.astype(int)
relation_matrix[relation_matrix>1] = 1
relation_matrix[relation_matrix<0] = 0
# Initialize the relation Rii to 0
for tt in range(Times):
relation_matrix[tt][(np.arange(dataNum_1), np.arange(dataNum_2))] = 0
# Initialization size of training = test, with the relation =-1 in each other's matrix
test_matrix = (np.ones((Times, dataNum_1, dataNum_2))*(-1)).astype(int)
test_ratio = 0.1
# test relation chose from the relation from each column with the set test_ratio
# only choose the N*N' N'*0.1
for tt in range(Times):
for ii in range(dataNum_1):
test_index_i = np.random.choice(dataNum_2, int(dataNum_2*test_ratio), replace=False)
test_matrix[tt, ii, test_index_i] = copy.copy(relation_matrix[tt, ii, test_index_i])
relation_matrix[tt, ii, test_index_i] = -1
return relation_matrix, test_matrix
def initialize_model(whole_dataR, dataNum_1, dataNum_2, Times, KK, LL):
# Input:
# dataR_T1/T2: the list locations of Rij=1 for training data: T arrays with L(positive edges) x 2
# KK: number of communities
# LL: number of features
# dataNum_1, dataNum_2, Times: N, N', T
# Output:
# M: Poisson distribution parameter in generating X_{ik}
# X_i: T X N X K, latent counts for node i
# Z_ik: T X N X K, latent integers summary, calculating as \sum_{j,k_2} Z_{ij,kk_2}/Z_{ji,kk_2}
# Z_k1k2: K X K latent integers summary, calculating as \sum_{i,j k_1,k_2} Z_{ij,kk_2}
# pis: LL X T X N X KK: layer-wise mixed-membership distributions
# betas: LL-1 X T X N' X N: layer-wise information propagation coefficient
# gammas: LL X T-1 X N' X N: layer-wise information propagation coefficient
# Lambdas: K*K community compatibility matrix
#### Pis
pis = np.zeros((LL, Times, dataNum_1, KK)) ####### LL*T*N*KK
### Pis layer 0
## Pis layer 0, Times 0
for ii in range(dataNum_1):
pis[0, 0, ii] = dirichlet.rvs(0.1 * np.ones(KK))
## Pis layer 0, Times 1 to t
gammas = gamma.rvs(1, 1, size=(LL, Times-1, dataNum_2, dataNum_1)) ####### LL*(T-1)*N'*N
psi_0 = np.zeros((Times-1, dataNum_1, KK)) ####### (T-1)*N*KK
for tt in range(1, Times):
psi_0[tt-1] = np.dot(gammas[0, tt-1].T, pis[0, tt-1]) # (N'*N).T * N'*K
for ii in range(dataNum_1):
pis[0, tt, ii] = dirichlet.rvs(0.1 + psi_0[tt-1, ii])
### Pis layer 1 to L
betas = gamma.rvs(1, 1, size=(LL - 1, Times, dataNum_2, dataNum_1))
for ll in range(1, LL):
## Pis layer ll, Times 0
psi_ll_t0 = np.dot(betas[ll-1, 0].T, pis[ll-1, 0])
psi_ll_t0 += 1e-16
for ii in range(dataNum_1):
pis[ll, 0, ii] = dirichlet.rvs(psi_ll_t0[ii])
## Pis layer ll, Times 1 to t
psi_ll_tt1 = np.zeros((Times, dataNum_1, KK))
psi_ll_tt2 = np.zeros((Times-1, dataNum_1, KK))
for tt in range(1, Times):
psi_ll_tt1[tt] = np.dot(betas[ll-1, tt].T, pis[ll-1, tt])
psi_ll_tt2[tt-1] = np.dot(gammas[ll, tt-1].T, pis[ll, tt-1])
psi_ll_tt = psi_ll_tt1[tt] + psi_ll_tt2[tt-1] + 1e-16
for ii in range(dataNum_1):
pis[ll, tt, ii] = dirichlet.rvs(psi_ll_tt[ii])
M = dataNum_1
X_i = poisson.rvs(M*pis[-1]).astype(int)
k_Lambda = 1
theta_Lambda = 1/(M*dataNum_1)
QQ = theta_Lambda
Lambdas = gamma.rvs(a = k_Lambda, scale = theta_Lambda, size = (KK, KK))
Z_ik = np.zeros((Times, dataNum_1, KK), dtype=int)
Z_k1k2 = np.zeros((KK, KK), dtype=int)
scale_val = 1
for tt in range(Times):
for ii in range(len(whole_dataR[tt])):
pois_lambda = scale_val*(X_i[tt, whole_dataR[tt][ii][0]][:, np.newaxis] * X_i[tt, whole_dataR[tt][ii][1]][np.newaxis, :]) * Lambdas
total_val = positive_poisson_sample(np.sum(pois_lambda))
new_counts = np.random.multinomial(total_val, pois_lambda.reshape((-1)) / np.sum(pois_lambda)).reshape((KK, KK))
Z_k1k2 += new_counts
Z_ik[tt, whole_dataR[tt][ii][0]] += np.sum(new_counts, axis=1)
Z_ik[tt, whole_dataR[tt][ii][1]] += np.sum(new_counts, axis=0)
return M, X_i, Z_ik, Z_k1k2, pis, betas, Lambdas, gammas, QQ, scale_val
def positive_poisson_sample(z_lambda):
# return positive truncated poisson random variables Z = 1, 2, 3, 4, ...
# z_lambda: parameter for Poisson distribution
sum_1 = np.exp(z_lambda)-1
candidate = 1000
can_val = np.arange(1, candidate)
vals = np.exp(can_val*np.log(z_lambda)-np.cumsum(np.log(can_val)))
select_val = can_val[np.sum((sum_1*uniform.rvs())>np.cumsum(vals))]
# candidate = 1000
# can_val = np.arange(1, candidate)
# log_vals = can_val * np.log(z_lambda) - np.cumsum(np.log(can_val))
# vals = np.exp(log_vals - np.max(log_vals))
# select_val = np.random.choice(can_val, p=(vals / np.sum(vals)))
return select_val
class sDGRM_class:
def __init__(self, dataNum_1, dataNum_2, Times, LL, KK, Lambdas, QQ, M, X_i, Z_ik, Z_k1k2, pis, betas, gammas, scale_val):
self.dataNum_1 = dataNum_1
self.dataNum_2 = dataNum_2
self.Times = Times
self.LL = LL
self.KK = KK
self.Lambdas = Lambdas # K * K
self.QQ = QQ
self.M = M
self.X_i = X_i
self.Z_ik = Z_ik # T * N * K
self.Z_k1k2 = Z_k1k2 # K * K
self.pis = pis # LL * T * N * K
self.betas = betas # LL-1 * T * N * K
self.gammas = gammas # LL * T-1 * N * K
self.alphas = 0.1
self.scale = scale_val
def back_propagate_fan(self, dataR_H):
# Back propagate the latent counts from X_i to the feature layer
# Input:
# dataR_H: the non-zeros locations of \beta (the information propagation matrix)
# Output:
# X_ik: LL X T X N X KK: layer-wise latent counting statistics matrix
# y_ik: LL X T X N X KK: auxiliary counts for each pi introduced in back propagation
# q_il: LL X T X N auxiliary variables used
# psi_ll_tt: LL X T X N X KK: Prior of Pi's parameter
# z_llj1_tt_ji_k: y_ik pass to pi of next layer, the layer L is 0
# A_ll_tj1_ji_k: y_ik pass to pi of next time, the time T is 0
# Z_ik_sum_k, A_ik_sum_k: LL X T X N X N auxiliary variables used
## ll=L,tt=T X(ll*tt)_ik = self.Xi[T]
## ll=L,tt=t, X(ll*tt)_ik = self.Xi[t]+
#### y_ik, LL X T X N X KK
y_ik = np.zeros((self.LL, self.Times, self.dataNum_1, self.KK)) # LL * T * N * K
#### Z&A (LL X T X N X KK, LL layer of Z and T time of A should be all zero)
Z_ik_sum_k = np.zeros((self.LL-1, self.Times, self.dataNum_1, self.dataNum_2)) # LL * T * N' * N
A_ik_sum_k = np.zeros((self.LL, self.Times-1, self.dataNum_1, self.dataNum_2)) # LL * T * N' * N
z_llj1_tt_ji_k = np.zeros((self.LL-1, self.Times, self.dataNum_1, self.dataNum_2,self.KK)) # LL * T * N' * N *K
A_ll_tj1_ji_k = np.zeros((self.LL, self.Times-1, self.dataNum_1, self.dataNum_2,self.KK)) # LL * T * N' * N *K
##### Phi
psi_ll_tt = np.zeros((self.LL, self.Times, self.dataNum_1, self.KK))
#### q_il, LL X T X N
q_il = np.zeros((self.LL, self.Times, self.dataNum_1))
### Layer L, time T, X_ik[L, T]=self.X_i[T],
### Layer L, time t, X_i[L,t] = self.X_ik[L, t] + A((L,t)_i'ik_sum i
### Layer L, time 0, X_i[L,0] = self.X_ik[L, 0] + A((L,0)_i'ik_sum i, psi_ll_tt only has beta*pi, y only generate z
### Layer ll!=0, time T, X_i[ll,T]= Z(ll,t)_i'ik_sum i
### Layer ll!=0, time t, X_i[ll,t]= Z(ll,t)_i'ik_sum i + A(ll,t)_i'ik_sum i,
#### X_ik, LL X T X N X KK
X_ik = np.zeros((self.LL, self.Times, self.dataNum_1, self.KK)) # LL * T * N * K
X_ik[-1] = self.X_i # T * N * K
## Layer L to 1, time T to 1
for ll in np.arange(self.LL-1, -1, -1):
for tt in np.arange(self.Times-1, -1, -1):
if ll > 0:
if tt > 0:
## beta(ll-1,tt)_i',i * Pi(ll-1, tt)_i',k N'*1*K * N'*N*1 = N'*N*K
psi_ll_tt_kk_1 = self.pis[ll-1, tt][:, np.newaxis, :] * ((self.betas[ll-1, tt] * (dataR_H[tt]))[:, :, np.newaxis])
## gamma(ll,tt-1)_i',i * Pi(ll, tt-1)_i',k N'*1*K * N'*N*1 = N'*N*K
psi_ll_tt_kk_2 = self.pis[ll, tt - 1][:, np.newaxis, :] * ((self.gammas[ll, tt - 1] * (dataR_H[tt - 1]))[:, :, np.newaxis])
## phi N*K
psi_ll_tt[ll, tt] = np.sum(psi_ll_tt_kk_1, axis=0) + np.sum(psi_ll_tt_kk_2, axis=0)
for nn in range(self.dataNum_1):
for kk in range(self.KK):
if X_ik[ll, tt, nn, kk] > 0:
if np.sum(psi_ll_tt[ll, tt]) > 0:
y_ik[ll, tt, nn, kk] = np.sum(uniform.rvs(size=int(X_ik[ll, tt, nn, kk])) < (
psi_ll_tt[ll, tt, nn, kk] / (psi_ll_tt[ll, tt, nn, kk] + np.arange(
int(X_ik[ll, tt, nn, kk])))))
pp = np.zeros(self.dataNum_2 + self.dataNum_2)
pp[:self.dataNum_2] = psi_ll_tt_kk_1[:, nn, kk] / psi_ll_tt[ll, tt, nn, kk]
pp[self.dataNum_2:] = psi_ll_tt_kk_2[:, nn, kk] / psi_ll_tt[ll, tt, nn, kk]
counts = np.random.multinomial(y_ik[ll, tt, nn, kk], pp)
z_llj1_tt_ji_k[ll - 1, tt, :, nn, kk] = counts[:self.dataNum_2]
A_ll_tj1_ji_k[ll, tt - 1, :, nn, kk] = counts[self.dataNum_2:]
Z_ik_sum_k[ll - 1, tt, :, nn] += z_llj1_tt_ji_k[ll - 1, tt, :, nn, kk] # N' = (L-1*T)*N*:*K sum K
A_ik_sum_k[ll, tt - 1, :, nn] += A_ll_tj1_ji_k[ll, tt - 1, :, nn, kk] # N' = ((L*T-1)*N*:*K sum K
# back to ll-1, tt, kk
X_ik[ll - 1, tt, :, kk] += z_llj1_tt_ji_k[ll - 1, tt, :, nn, kk]
# back to ll, tt , kk
X_ik[ll, tt - 1, :, kk] += A_ll_tj1_ji_k[ll, tt - 1, :, nn, kk]
else:
psi_ll_tt_kk_1 = self.pis[ll-1, tt][:, np.newaxis, :] * ((self.betas[ll-1, tt] * (dataR_H[tt]))[:, :, np.newaxis])
psi_ll_tt[ll, tt] = np.sum(psi_ll_tt_kk_1, axis=0)
for nn in range(self.dataNum_1):
for kk in range(self.KK):
if X_ik[ll, tt, nn, kk] > 0:
if np.sum(psi_ll_tt[ll, tt]) > 0:
y_ik[ll, tt, nn, kk] = np.sum(uniform.rvs(size=int(X_ik[ll, tt, nn, kk])) < (
psi_ll_tt[ll, tt, nn, kk] / (psi_ll_tt[ll, tt, nn, kk] + np.arange(
int(X_ik[ll, tt, nn, kk])))))
pp = psi_ll_tt_kk_1[:, nn, kk] / psi_ll_tt[ll, tt, nn, kk]
counts = np.random.multinomial(y_ik[ll, tt, nn, kk], pp)
z_llj1_tt_ji_k[ll - 1, tt, :, nn, kk] = counts
Z_ik_sum_k[ll - 1, tt, :, nn] += z_llj1_tt_ji_k[ll - 1, tt, :, nn,
kk] # N' = (L-1*T)*N*:*K sum K
# back to ll-1, tt, kk
X_ik[ll - 1, tt, :, kk] += z_llj1_tt_ji_k[ll - 1, tt, :, nn, kk]
else:
if tt>0:
psi_ll_tt_kk_2 = self.pis[ll, tt - 1][:, np.newaxis, :] * ((self.gammas[ll, tt - 1] * (dataR_H[tt - 1]))[:, :, np.newaxis])
psi_ll_tt[ll, tt] = np.sum(psi_ll_tt_kk_2, axis=0)
for nn in range(self.dataNum_1):
for kk in range(self.KK):
if X_ik[ll, tt, nn, kk] > 0:
if np.sum(psi_ll_tt[ll, tt]) > 0:
y_ik[ll, tt, nn, kk] = np.sum(uniform.rvs(size=int(X_ik[ll, tt, nn, kk])) < (
psi_ll_tt[ll, tt, nn, kk] / (psi_ll_tt[ll, tt, nn, kk] + np.arange(
int(X_ik[ll, tt, nn, kk])))))
pp = psi_ll_tt_kk_2[:, nn, kk] / psi_ll_tt[ll, tt, nn, kk]
counts = np.random.multinomial(y_ik[ll, tt, nn, kk], pp)
A_ll_tj1_ji_k[ll, tt-1, :, nn, kk] = counts
A_ik_sum_k[ll, tt-1, :, nn] += A_ll_tj1_ji_k[ll, tt-1, :, nn,
kk] # N' = (L-1*T)*N*:*K sum K
# back to ll, tt-1, kk
X_ik[ll, tt-1, :, kk] += z_llj1_tt_ji_k[ll, tt-1, :, nn, kk]
latent_count_i = np.sum(X_ik[ll, tt], axis=1).astype(float) # N X(ll,tt)_ik sum K
beta_para_1 = np.sum(psi_ll_tt[ll, tt], axis=1) # Phi(L,tt)_ik sum k
inte1 = gamma.rvs(a=beta_para_1 + 1e-16, scale=1) + 1e-16
inte2 = gamma.rvs(a=latent_count_i + 1e-16, scale=1) + 1e-16
qil_val = inte1 / (inte1 + inte2)
q_il[ll, tt] = qil_val
return X_ik, y_ik, q_il, z_llj1_tt_ji_k, A_ll_tj1_ji_k, Z_ik_sum_k, A_ik_sum_k, psi_ll_tt
def sample_pis(self, X_ik, psi_ll_tt):
# layer-wise sample mixed-membership distribution
# Input:
# X_ik: LL X T X N X KK: layer-wise latent counting statistics matrix
# psi_ll_tt: LL X T X N X KK: Prior of Pi's parameter
for ll in np.arange(self.LL):
for tt in np.arange(self.Times):
psi_ll = psi_ll_tt[ll, tt]
if ll == 0:
psi_ll += self.alphas
para_nn = psi_ll+X_ik[ll, tt]
nn_pis = gamma.rvs(a=para_nn, scale=1) + 1e-16
self.pis[ll, tt] = nn_pis / (np.sum(nn_pis, axis=1)[:, np.newaxis])
def sample_X_i(self, dataR_matrix):
# sample the latent counts X_i
idx = (dataR_matrix != (-1))
for tt in range(dataR_matrix.shape[0]):
np.fill_diagonal(idx[tt], False)
for tt in range(self.Times):
for nn in range(self.dataNum_1):
Xik_Lambda = np.sum(np.dot(self.Lambdas, ((idx[tt, nn][:, np.newaxis] * self.X_i[tt]).T)), axis=1) + \
np.sum(np.dot(self.Lambdas.T, (idx[tt, :, nn][:, np.newaxis] * self.X_i[tt]).T), axis=1)
log_alpha_X = np.log(self.M) + np.log(self.pis[-1, tt][nn]) - Xik_Lambda
for kk in range(self.KK):
n_X = self.Z_ik[tt, nn, kk]
if n_X == 0:
select_val = poisson.rvs(np.exp(log_alpha_X[kk]))
else:
candidates = np.arange(1, 10*self.M+1)
pseudos = candidates*log_alpha_X[kk]+n_X*np.log(candidates)-np.cumsum(np.log(candidates))
pseudos_max = np.max(pseudos)
proportions = np.exp(pseudos-pseudos_max)
select_val = np.random.choice(candidates, p=proportions / np.sum(proportions))
self.X_i[tt, nn, kk] = select_val
def sample_Lambda_k1k2(self, dataR_matrix):
# sample Lambda according to the gamma distribution
idx = (dataR_matrix != (-1))
Phi_KK = np.zeros((self.KK, self.KK))
for tt in range(dataR_matrix.shape[0]):
np.fill_diagonal(idx[tt], False)
Phi_KK += np.dot(np.dot(self.X_i[tt].T, idx[tt]), self.X_i[tt])
R_KK = np.ones((self.KK, self.KK))/(self.KK**2)
np.fill_diagonal(R_KK, 1/self.KK)
self.Lambdas = gamma.rvs(a = self.Z_k1k2 + R_KK, scale = 1)/(1+Phi_KK)
def sample_Z_ik_k1k2(self, whole_dataR):
# sampling the latent integers
# Input:
# whole_dataR: tt list of array, the L list of Rij=1 for training data
Z_ik = np.zeros((self.Times, self.dataNum_1, self.KK), dtype=int)
Z_k1k2 = np.zeros((self.KK, self.KK), dtype=int)
for tt in range(self.Times):
for ii in range(len(whole_dataR[tt])):
pois_lambda = self.scale*(self.X_i[tt, whole_dataR[tt][ii][0]][:, np.newaxis]*self.X_i[tt, whole_dataR[tt][ii][1]][np.newaxis, :])*self.Lambdas
total_val = positive_poisson_sample(np.sum(pois_lambda))
new_counts = np.random.multinomial(total_val, pois_lambda.reshape((-1))/np.sum(pois_lambda)).reshape((self.KK, self.KK))
Z_k1k2 += new_counts
Z_ik[tt, whole_dataR[tt][ii][0]] += np.sum(new_counts, axis=1)
Z_ik[tt, whole_dataR[tt][ii][1]] += np.sum(new_counts, axis=0)
self.Z_k1k2 = Z_k1k2
self.Z_ik = Z_ik
def sample_beta(self, Z_ik_sum_k, q_il):
# Sampling the information propagation coefficients
# Input:
# Z_ik_sum_k: LL X T X N X N auxiliary variables used
# q_il: LL X T X N auxiliary variables used
hyper_alpha = 1
hyper_beta = 1
for ll in range(self.betas.shape[0]):
for tt in range(self.betas.shape[1]):
self.betas[ll, tt] = gamma.rvs(a=hyper_alpha+Z_ik_sum_k[ll, tt], scale=1)/(hyper_beta -np.log(q_il[ll, tt][:, np.newaxis]))
def sample_gammas(self, A_ik_sum_k, q_il):
# Sampling the information propagation coefficients
# Input:
# A_ik_sum_k: LL X T X N X N auxiliary variables used
# q_il: LL X T X N auxiliary variables used
hyper_alpha = 1
hyper_beta = 1
for ll in range(self.gammas.shape[0]):
for tt in range(self.gammas.shape[1]):
self.gammas[ll, tt] = gamma.rvs(a=hyper_alpha+A_ik_sum_k[ll, tt], scale=1)/(hyper_beta -np.log(q_il[ll, tt][np.newaxis, :]))
def sample_M(self, M_val):
# updating the hyper-parameter M
# Input:
# M_val: Poisson distribution parameter in generating X_{ik}
k_M = M_val
theta_M_inverse = 1
self.M = gamma.rvs(a=k_M+np.sum(self.X_i), scale=1)/(theta_M_inverse+self.Times*self.dataNum_1)
| [
"[email protected]"
] | |
8c0a6958d0fc28783f5c750e3591c16f0966e082 | 6f401aed6b736b07224c5da774fef5124536e4c3 | /automate_the_bouring_stuff/if_else_exaple.py | 1a38bf510adbb9ccad2150d54bbc03ff508275f3 | [] | no_license | Fumitus/First_Python_lessons | 871a417cdfe828c68da1003e24d5e93de5a466dd | 07a161c886a00ddb86eca3ede32e82df00b938a8 | refs/heads/master | 2020-04-10T06:49:20.666396 | 2018-12-07T19:23:41 | 2018-12-07T19:23:41 | 160,865,229 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | password = 'swordfish'
if password == 'swordfish':
print('Acces granted')
else:
print('Wrong password')
| [
"[email protected]"
] | |
cdd0fb9b79616b82673e39202061c831e37586b3 | 7636292e3e32fa6fea4be6f2491b3bd303e68867 | /MethodsPractice/003无重复字符的最长子串.py | 16bfa38e26e722ad9c94d29122f7c5f013b82c1b | [] | no_license | MariOdyssey/Weeping-Hall | 0d595eea6d22c9c928b42daaef0712a3dbb44fda | d064068d92a65a8e94fa77583c9b4d4ccc529111 | refs/heads/master | 2020-04-17T16:27:17.210611 | 2019-09-07T18:52:26 | 2019-09-07T18:52:26 | 166,740,783 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | class Solution:
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
tempList = []
for i in s:
if not i in s:
tempList.append() | [
"[email protected]"
] | |
dd24036430468b0bc02e658fe3954b26de047e3b | 69c470989c058fac9cd1a99e5bf2454e129d44dc | /assignment1/cs231n/classifiers/k_nearest_neighbor.py | c22d095d14640cda31b2131a381ecdaa1951a8b3 | [] | no_license | PedroFerreiradaCosta/CS231n-Solutions | 34efa55916936cf43c20f0ce78acb08913c146f5 | a77f72294618307f87922bc16817f7e218891f29 | refs/heads/master | 2022-12-14T03:58:06.042204 | 2019-11-12T14:42:49 | 2019-11-12T14:42:49 | 196,404,053 | 3 | 0 | null | 2022-12-08T05:18:44 | 2019-07-11T13:50:33 | Jupyter Notebook | UTF-8 | Python | false | false | 8,924 | py | from builtins import range
from builtins import object
import numpy as np
from past.builtins import xrange
class KNearestNeighbor(object):
""" a kNN classifier with L2 distance """
def __init__(self):
pass
def train(self, X, y):
"""
Train the classifier. For k-nearest neighbors this is just
memorizing the training data.
Inputs:
- X: A numpy array of shape (num_train, D) containing the training data
consisting of num_train samples each of dimension D.
- y: A numpy array of shape (N,) containing the training labels, where
y[i] is the label for X[i].
"""
self.X_train = X
self.y_train = y
def predict(self, X, k=1, num_loops=0):
"""
Predict labels for test data using this classifier.
Inputs:
- X: A numpy array of shape (num_test, D) containing test data consisting
of num_test samples each of dimension D.
- k: The number of nearest neighbors that vote for the predicted labels.
- num_loops: Determines which implementation to use to compute distances
between training points and testing points.
Returns:
- y: A numpy array of shape (num_test,) containing predicted labels for the
test data, where y[i] is the predicted label for the test point X[i].
"""
if num_loops == 0:
dists = self.compute_distances_no_loops(X)
elif num_loops == 1:
dists = self.compute_distances_one_loop(X)
elif num_loops == 2:
dists = self.compute_distances_two_loops(X)
else:
raise ValueError('Invalid value %d for num_loops' % num_loops)
return self.predict_labels(dists, k=k)
def compute_distances_two_loops(self, X):
"""
Compute the distance between each test point in X and each training point
in self.X_train using a nested loop over both the training data and the
test data.
Inputs:
- X: A numpy array of shape (num_test, D) containing test data.
Returns:
- dists: A numpy array of shape (num_test, num_train) where dists[i, j]
is the Euclidean distance between the ith test point and the jth training
point.
"""
num_test = X.shape[0]
num_train = self.X_train.shape[0]
dists = np.zeros((num_test, num_train))
for i in range(num_test):
for j in range(num_train):
#####################################################################
# TODO: #
# Compute the l2 distance between the ith test point and the jth #
# training point, and store the result in dists[i, j]. You should #
# not use a loop over dimension, nor use np.linalg.norm(). #
#####################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
dists[i,j] = np.sqrt(np.sum((self.X_train[j]-X[i])**2))
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
return dists
def compute_distances_one_loop(self, X):
"""
Compute the distance between each test point in X and each training point
in self.X_train using a single loop over the test data.
Input / Output: Same as compute_distances_two_loops
"""
num_test = X.shape[0]
num_train = self.X_train.shape[0]
dists = np.zeros((num_test, num_train))
for i in range(num_test):
#######################################################################
# TODO: #
# Compute the l2 distance between the ith test point and all training #
# points, and store the result in dists[i, :]. #
# Do not use np.linalg.norm(). #
#######################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
powered_dist = np.power((self.X_train-X[i]), 2)
dists[i,:] = np.sqrt(np.sum(powered_dist, axis = 1))
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
return dists
def compute_distances_no_loops(self, X):
"""
Compute the distance between each test point in X and each training point
in self.X_train using no explicit loops.
Input / Output: Same as compute_distances_two_loops
"""
num_test = X.shape[0]
num_train = self.X_train.shape[0]
dists = np.zeros((num_test, num_train))
#########################################################################
# TODO: #
# Compute the l2 distance between all test points and all training #
# points without using any explicit loops, and store the result in #
# dists. #
# #
# You should implement this function using only basic array operations; #
# in particular you should not use functions from scipy, #
# nor use np.linalg.norm(). #
# #
# HINT: Try to formulate the l2 distance using matrix multiplication #
# and two broadcast sums. #
#########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
dists = np.sqrt(-2 * np.dot(X, self.X_train.T) +
np.sum(self.X_train**2, axis=1)+ np.sum(X**2, axis=1)[:,np.newaxis])
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
return dists
def predict_labels(self, dists, k=1):
"""
Given a matrix of distances between test points and training points,
predict a label for each test point.
Inputs:
- dists: A numpy array of shape (num_test, num_train) where dists[i, j]
gives the distance betwen the ith test point and the jth training point.
Returns:
- y: A numpy array of shape (num_test,) containing predicted labels for the
test data, where y[i] is the predicted label for the test point X[i].
"""
num_test = dists.shape[0]
y_pred = np.zeros(num_test)
for i in range(num_test):
# A list of length k storing the labels of the k nearest neighbors to
# the ith test point.
closest_y = []
#########################################################################
# TODO: #
# Use the distance matrix to find the k nearest neighbors of the ith #
# testing point, and use self.y_train to find the labels of these #
# neighbors. Store these labels in closest_y. #
# Hint: Look up the function numpy.argsort. #
#########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
sorted_idx = np.argsort(dists[i,:])
for j in range(k):
closest_y.append(self.y_train[sorted_idx[j]])
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
#########################################################################
# TODO: #
# Now that you have found the labels of the k nearest neighbors, you #
# need to find the most common label in the list closest_y of labels. #
# Store this label in y_pred[i]. Break ties by choosing the smaller #
# label. #
#########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
if(k==1):
y_pred[i] = closest_y[0]
else:
y_pred[i] = max(set(closest_y), key = closest_y.count)
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
return y_pred
| [
"[email protected]"
] | |
957b402ca0ef50b2e6fe249c5db02dfa2e581343 | af683cae09e2ab3b49d50995f681b5616eb1b16c | /FC.py | 532353e2abdded1b03b8a1013b0cdad86e81eb76 | [
"MIT"
] | permissive | cutierobot/Folder-Compare | aefb967c8186a9eac2ab4bcdfaedaa097ca05139 | 206bdba879690ca0816d6b25f93f9aa2bb9100dc | refs/heads/master | 2020-04-13T03:28:06.564768 | 2019-01-08T11:15:40 | 2019-01-08T11:15:40 | 162,932,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,463 | py | # \033[1;32;40m green
# \033[1;35;40m purple
# \033[1;34;40m Bright Blue
# \033[0;37;40m Normal text
# \033[2;37;40m Normal text underline
import filecmp
import sys
# folder1="/Volumes/Mikaela\'s Hardrive/TV SHOWS"
# folder2 = "/Volumes/Grayson/Tv shows/"
# dc = filecmp.dircmp(folder1, folder2)
# dc.report_full_closure()
class Compare(object):
"""The compare class that is the brains behind this whole program"""
def __init__(self, folder1, folder2):
self.dc = ""
self.folder1 = folder1
self.folder2 = folder2
"""
Sets the class "dc" to the given dc value
@param dc The value to become self.dc value
"""
def set_dc(self, dc):
self.dc = dc
# TODO: [write] function definition (99)
def get_init_details(self, dc):
# folder1=sys.argv[1]
# folder2 = sys.argv[2]
if self.dc == "":
Compare.set_dc(self,dc)
# self.table_print(self.dc)
# left = '\n'.join(dc.left_only)
# right = '\n'.join(dc.right_only)
# com = '\n'.join(dc.common_dirs)
# sub = '\n'.join(dc.subdirs)
# print statments
print("Legends of colours:\n"+"\033[1;35;40m"+"Left Only"+
"\033[1;34;40m"+" Right Only"+
"\033[1;32;40m"+" Common Directories/files\033[0;37;40m\n")
self.init_prompt()
# self.recursive_dir(dc,self.folder1,self.folder2)
"""
Prints the left directory only. Where only file/folders that appear in the
left compared directory appear.
"""
def left_directory_only(self):
left = '\n'.join(self.dc.left_only)
# Left Only
print("\033[1;35;40m" +"left_only: \033[0;37;40m\n" + left + "\n")
"""
Prints the right directory only. where only file/folders that appear in the
right compared directory appear.
"""
def right_directory_only(self):
right = '\n'.join(self.dc.right_only)
# right Only
print("\033[1;34;40m"+"Right Only: \033[0;37;40m\n" + right + "\n")
"""
Prints the comparison between directory 1 and 2 together.
"""
def compared_only(self):
com = '\n'.join(self.dc.common_dirs)
# common directories print
print("\033[1;32;40m"+"Common Directories/files\033[0;37;40m\n" + com + "\n")
# //TODO: [write] function definition (99)
# @staticmethod
def recus_details(self,dc,folder1,folder2):
# left = '\n'.join(dc.left_only)
# right = '\n'.join(dc.right_only)
# com = '\n'.join(dc.common)
# leftOnly = "\033[1;35;40m" +format("left_only: ","^25s")+"\033[0;37;40m"
# rightOnly = "\033[1;34;40m"+format("Right Only: ","^25s")+"\033[0;37;40m"
# Common = "\033[1;32;40m"+format("Common Directories/files","^25s")+"\033[0;37;40m"
# yellow_pole = "\033[1;33;40m"+"|"+"\033[0;37;40m"
# # Left Only
# print(yellow_pole+leftOnly+yellow_pole)
# print(left)
# # right Only
# print(yellow_pole+rightOnly+yellow_pole)
# print(right)
# # common directories print
# print(yellow_pole+Common+yellow_pole)
# print(com)
self.table_print(dc)
"""
Recursive of sorts, finds subfolders and go through them and
prints out subfolders info.
@see recus_details
"""
# @staticmethod
def recursive_dir(self,dc,folder1,folder2):
if len(dc.common_dirs)>0:
for folder in dc.common_dirs:
# print folder name in yellow
pole = "|"+ format(folder,"^25s")+"|"
print("\033[1;33;40m"+"+"+format("--","-^25s")+"+"+"\033[0;37;40m")
print("\033[1;33;40m"+pole+"\033[0;37;40m")
print("\033[1;33;40m"+"+"+format("--","-^25s")+"+"+"\033[0;37;40m")
left = folder1+"/"+folder
right = folder2+"/"+folder
newdc = filecmp.dircmp(left, right)
# Compare.recus_details(newdc,left,right)
self.recus_details(newdc,left,right)
# # deals with the subfolders in the current folder. i.e Season 1
# if len(newdc.common_dirs)>0:
# for subfolder in newdc.common_dirs:
# # print("\nsubfolder: "+subfolder)
# # print("======================\n")
# print("\n\033[1;33;40m"+subfolder+"\033[0;37;40m")
# subleft = left+"/"+subfolder
# subright = right+"/"+subfolder
# subdc = filecmp.dircmp(subleft, subright)
# # Compare.recus_details(subdc,subleft,subright)
# self.recus_details(subdc,subleft,subright)
# print("#######################\n")
# # Compare.give_best_score(len(dc.common),newdc)
# self.give_best_score(len(dc.common),newdc)
# print("======================\n")
# self.subfolder_rec(newdc, left, right, dc)
"""
Takes in the dc and newdc, which is for the subfolders of the dc, and
prints out the subfolder infomation for that dc folder.
@param newdc The newdc for the subfolder
@param dc teh dc for the parent directory
@param left The left directory subfolder creation
@param right The right directory subfolder creation
"""
def subfolder_rec(self, newdc, left, right, dc):
# deals with the subfolders in the current folder. i.e Season 1
if len(newdc.common_dirs)>0:
for subfolder in newdc.common_dirs:
# print("\nsubfolder: "+subfolder)
# print("======================\n")
print("\n\033[1;33;40m"+subfolder+"\033[0;37;40m")
subleft = left+"/"+subfolder
subright = right+"/"+subfolder
subdc = filecmp.dircmp(subleft, subright)
# Compare.recus_details(subdc,subleft,subright)
self.recus_details(subdc,subleft,subright)
print("#######################\n")
# Compare.give_best_score(len(dc.common),newdc)
self.give_best_score(len(dc.common),newdc)
print("======================\n")
"""
The initial prompt that appears after successful initialisation of program.
It launches all the sub-programs of this program. If anything but 0-4, or h
is entered then there is a reprompt.
"""
def init_prompt(self):
# name of python program
# options
# - see only things on first directory(quick look)
# - see only things on second directory(quick look)
# - see things on both first and second directory(quick look)
# - go into sub directories
# input prompt
print("[0] Left Only")
print("[1] Right Only")
print("[2] Comparison Only")
print("[3] Table Comparison")
print("[4] Recursive Comparison")
print("[h] Reprint these help messages")
print("[exit] Exit this program")
usrInput = input("Type number from 0 - 3: ")
if usrInput == "0":
self.left_directory_only()
elif usrInput == "1":
self.right_directory_only()
elif usrInput == "2":
self.compared_only()
elif usrInput == "3":
self.table_print(self.dc)
elif usrInput == "4":
self.recursive_dir(self.dc,self.folder1,self.folder2)
elif usrInput == "h":
self.init_prompt()
elif usrInput == "exit":
exInput = input("Are you sure you want to exit [Y|N]: ")
if exInput == "Y":
sys.exit(2)
else:
self.print_red("error please input a number between 0 - 3")
self.init_prompt()
"""
In a nice table format gives all comparason types (left_only, right_only,
and comparison)
@param dc the self.dc value
"""
@staticmethod
def table_print(dc):
# left = '\n'.join(dc.left_only)
# right = '\n'.join(dc.right_only)
# com = '\n'.join(dc.common_dirs)
# coloured words
leftOnly = "\033[1;35;40m" +"left_only\033[0;37;40m"
rightOnly = "\033[1;34;40m"+"Right Only\033[0;37;40m"
commonDirs = "\033[1;32;40m"+"Common Directories/files\033[0;37;40m"
# leftTitle = format(leftOnly,":^10")
# ------ borders
leftPole = "-"*23
middlePole = "-"*24
rightPole = "-"*28
print("+"+"-"*23+"+"+"-"*24+"+"+"-"*28+"+")
print("| "+leftOnly+" | "+rightOnly+" | "+commonDirs+" |")
print("+"+"-"*23+"+"+"-"*24+"+"+"-"*28+"+")
# get heights of the three coloums and get the largest number and use for
# for loop
sizes = []
sizes.append(len(dc.left_only))
sizes.append(len(dc.right_only))
sizes.append(len(dc.common))
for x in range(0,max(sizes)):
# 1st column
temp = "|"
if x < sizes[0]:
dcl = str(dc.left_only[x])
# lft = 23 - len(dc.left_only[x])
temp += dcl[0:23].ljust(23, ' ')
temp = temp.splitlines()
temp = "?".join(temp)
# print('{0:.23}'.format(dcl) + " "*lft, end="")
else:
temp += " "*23
temp += "|"
# 2nd column
if x < sizes[1]:
temp_right = dc.right_only[x]
# rght = 24 - len(dc.right_only[x])
# if temp.isprintable():
# print("is printable")
# else:
# print("is not printable")
temp += temp_right[0:24].ljust(24, ' ')
temp = temp.splitlines()
temp = "?".join(temp)
# if temp.isprintable():
# print("is printable")
# else:
# print("is not printable")
# print('{0:.24}'.format(dc.right_only[x]) + " "*rght, end="")
else:
temp += " "*24
# print(" "*24, end="")
# print("[temp = ]"+temp)
temp += "|"
# print("|", end="",flush=True)
# 3rd column
if x < sizes[2]:
temp_comm = dc.common[x]
# comp = 28 - len(dc.common[x])
temp += temp_comm[0:28].ljust(28, ' ')
temp = temp.splitlines()
temp = "?".join(temp)
# print('{0:.28}'.format(dc.common[x]) + " "*comp, end="")
else:
temp += " "*28
# print(" "*28, end="",flush=True)
temp += "|"
# print("|")
print(temp)
temp = ""
print("+"+"-"*23+"+"+"-"*24+"+"+"-"*28+"+")
Compare.print_red("left_only = "+str(len(dc.left_only)))
Compare.print_red("right_only: "+str(len(dc.right_only)))
Compare.print_red("common: "+str(len(dc.common)))
# get size of all coloums and use the larget number for "for loop"
# bigest = max(sizes)
"""
Give best average to help with calulating what version to delete
of what location. takes both directories with same folder name and
determines which folder has more items and such to help establish what
folder version to keep and delete.
@param commonNumber The number of files/folders that both directories have
in common.
@param subdc The dc value from filecmp
@see filecmp#dircmp
{https://docs.python.org/3/library/filecmp.html#filecmp.dircmp}
"""
@staticmethod
def give_best_score(commonNumber,subdc):
# tv show folder common directs/folders number
# sub
leftScore = commonNumber
rightScore = commonNumber
# for subfolders in subdc.common_dirs:
leftScore += len(subdc.left_only)
rightScore += len(subdc.right_only)
leftScore += len(subdc.common)
rightScore += len(subdc.common)
# rightScore = len(subdc.right_only)+len(subdc.common)
# leftScore = len(subdc.left_only)+len(subdc.common)
Compare.print_red("left:"+str(leftScore)+" right: "+str(rightScore))
if leftScore > rightScore:
print("keep left, delete right")
elif rightScore > leftScore:
print("keep right, delete left")
else:
print("both equal")
# ============helper function======================
"""
Uses the inbuild "print" method but changes all the text to red
@param string - The string to print out red
@see print
"""
@staticmethod
def print_red(string):
print("\033[1;31;40m"+string+"\033[0;37;40m")
"""
Checks argv value. If when initialising progam there are not 2 directories
given then print error message and exit program.
"""
def check_argv():
if len(sys.argv) != 3:
print("\033[1;31;40m"+"name folder1 folder2\033[0;37;40m")
sys.exit(2)
# //TODO: create testing functions and stuff
"""
Main funtion
@param argv Arguments from commandline
"""
def main(argv):
# print(sys.argv[1])
# folder1="/Volumes/Mikaela\'s Hardrive/TV SHOWS"
# folder2 = "/Volumes/Grayson/Tv shows/"
check_argv()
comp = Compare(sys.argv[1],sys.argv[2])
folder1 = sys.argv[1]
folder2 = sys.argv[2]
dc = filecmp.dircmp(folder1, folder2)
comp.get_init_details(dc)
if __name__== "__main__":
main(sys.argv)
| [
"[email protected]"
] | |
a85ca2c33438c8d0eb7d6e5178f79143aa5e13ea | 24d6790dedd7473002513b2d58aa7e03e1b9b7c2 | /Week4/lesson_files/using_hint.py | 67fbed258d62b75f184596f05e61855a747d0c34 | [
"MIT"
] | permissive | italoag/M101P | 3b50ddcc9935f6977d95f23dfc5aa7ee56a716e7 | 708bdd793735228f820f3f50f57c44ce8fc637ef | refs/heads/master | 2021-01-01T20:01:30.181394 | 2013-09-24T02:29:58 | 2013-09-24T02:29:58 | 12,883,560 | 0 | 0 | MIT | 2020-08-13T20:06:29 | 2013-09-17T01:43:13 | Python | UTF-8 | Python | false | false | 449 | py |
import pymongo
import sys
# establish a connection to the database
connection = pymongo.Connection("mongodb://localhost", safe=True)
# get a handle to the test database
db=connection.test
foo = db.foo
query = {'a':40000,'b':40000, 'c':40000}
try:
doc = foo.find(query).hint([('c', pymongo.ASCENDING)]).explain()
except:
print "Unexpected error:", sys.exc_info()[0]
for key in doc:
print str(key).rjust(20),": ", str(doc[key])
| [
"[email protected]"
] | |
d9711c688ed5e63261e8c5597b0c4d6284345508 | bcc6ba85119503185759f17ccc89c32699b9d82f | /lib/stochastic/tauleap.py | 041531859c706eea9ac1d70b768e23da13d0676d | [
"MIT"
] | permissive | JordiVillaFreixa/ByoDyn | 01ca756c0cd50b42ccb35585c174ec02c6349877 | be5554323299d806ed2def5e0e68207fc22367fa | refs/heads/main | 2023-08-07T08:53:25.448622 | 2021-10-07T09:25:41 | 2021-10-07T09:25:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,545 | py | #
# Project: ByoDyn
#
# Copyright (C) 2008 Alex Gomez-Garrido, Adrian L. Garcia-Lomana and Jordi Villa-Freixa
#
# Author: Pau Rue-Queralt
#
# Created: 2008-06-05 by Pau Rue-Queralt
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this library; if not, write to the Free
# Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111 USA.
#
# $Id: tauleap.py,v 1.4 2008/12/11 15:00:39 paurue Exp $
## \file
# This module is contains the the stochastic simulation algorithms for the tau-leap method.
import numpy as np
def simulate (evalPropensities, stoichiometry, x_0, time, tau, seed = None):
'''
Gillespie's Poisson tau-leap stochastic simulation algorithm
evalPropensities list of propensity functions
stoichiometry stoichiometrty matrix, each column corresponds to a reaction
x_0 initial conditions
time simulation time
tau the time step for which the system state is written (if set
to None all reactions are written in the output)
seed for the pseudo-random number generator
'''
def poisson(x):
'''
This function takes a list of (nonnegative) real numbers and samples a
Poisson random number for each of them with mean and variance given by
this. If any number in the list is negative, it is treated as zero.
'''
xt = np.maximum(x,0)
return np.array(map(np.random.poisson, xt)).T
if seed != None:
np.random.seed(seed)
nsteps = np.ceil(time/tau) + 1
x = np.array(x_0, dtype = np.int64)
tHist = tau * np.arange(nsteps)
xHist = np.zeros([x.size, nsteps], dtype = np.int64)
xHist[:,0] = x
i = 1
while(i < nsteps and sum(abs(x)) > 0):
k = poisson(tau * evalPropensities(x))
update = np.dot(stoichiometry, k)
x += update
xHist[:,i] = x
i = i + 1
return xHist.T, tHist
| [
"[email protected]"
] | |
4f949d20d79e29074f35b084d9b4298331cd0dc0 | 6d3b383497283fd479162c719fabf033cdcf076c | /groupAnagrams.py | 6e2a1d0c282d2f00e96e2b7d10d791a630ffcfd6 | [] | no_license | zunzunwang/python-leetcode | ea15b62929174397f39553280466c29a5883f235 | 2091a45cf825e3d1f8c318ed4e7d00f7fe97d017 | refs/heads/master | 2021-05-22T21:21:24.411217 | 2020-04-28T11:58:28 | 2020-04-28T11:58:28 | 253,101,880 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,474 | py | import collections
from typing import List
'''
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
All inputs will be in lowercase.
The order of your output does not matter.
review
pthon 创建 dict的两种方式
d = {}
1 0 BUILD_MAP 0
2 RETURN_VALUE
d = dict()
1 0 LOAD_NAME 0 (dict)
2 CALL_FUNCTION 0
4 RETURN_VALUE
defaultdict接受一个工厂函数作为参数,如下来构造:
dict =defaultdict( factory_function)
这个factory_function可以是list、set、str等等,作用是当key不存在时,返回的是工厂函数的默认值,比如list对应[ ],str对应的是空字符串,set对应set( ),int对应0,如下举例:
from collections import defaultdict
dict1 = defaultdict(int)
dict2 = defaultdict(set)
dict3 = defaultdict(str)
dict4 = defaultdict(list)
dict1[2] ='two'
print(dict1[1])
print(dict2[1])
print(dict3[1])
print(dict4[1])
输出:
0
set()
[]
'''
class Solution:
def groupAnagrams1(self, strs: List[str]) -> List[List[str]]:
"""
Time Complexity: O(NK \log K)O(NKlogK), where NN is the length of strs, and KK is the maximum length of a string in strs. The outer loop has complexity O(N)O(N) as we iterate through each string. Then, we sort each string in O(K \log K)O(KlogK) time.
Space Complexity: O(NK)O(NK), the total information content stored in ans.
:param strs:
:return:
"""
result = collections.defaultdict(list)
for e in strs:
result[tuple(sorted(e))].append(e)
return result.values()
def groupAnagrams(self, strs: List[str]) -> List[List[str]]:
'''
Complexity Analysis
Time Complexity: O(NK)O(NK), where NN is the length of strs, and KK is the maximum length of a string in strs. Counting each string is linear in the size of the string, and we count every string.
Space Complexity: O(NK)O(NK), the total information content stored in ans.
:param strs:
:return:
'''
result = collections.defaultdict(list)
for s in strs:
count = [0] * 26
for c in s:
count[ord(c) - ord('a')] += 1
result[tuple(count)].append(s)
return result.values()
| [
"[email protected]"
] | |
ee3fa06556b05ef72b5e2a9ecbaaafb5b7c8d4d4 | 4f51b503c321f31a1088e1c884f2853d9ba127c2 | /flour_problem.py | 2b3b36f5941a103d38a8588e52496cf54e56202e | [] | no_license | pi-q/gluten_free_flour_optimization | 751e698b178e75bb3247e715fc10842540ae9286 | af93a8e6f2f30ea05f4ba3ea263661ee694a3b6f | refs/heads/main | 2023-01-12T23:50:48.834648 | 2020-11-16T14:29:52 | 2020-11-16T14:29:52 | 312,671,552 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,937 | py | # https://scipy-lectures.org/advanced/mathematical_optimization/#optimization-with-constraints
import numpy as np
import scipy.optimize
#https://www.fqmc.org/vivre-sans-gluten/infolettres-anterieures/valentin-2018/farines-sans-gluten
#https://tools.myfooddata.com/nutrition-facts/172023/100g/1
#Assume water ~ 100% - sum of all the rest
#Minerals = magnesium, potassium, phosphore
#Ash not included in this model
# [Starch, Sugars, Fiber, Proteins, Fat, Minerals ]
wheat = [ 0.7, 0.02, 0.03, 0.115, 0.013, 0.0055]
brown_rice = [ 0.76, 0.0066, 0.046, 0.0723, 0.0278, 0.0074]
white_rice = [ 0.80, 0.0012, 0.024, 0.0595, 0.0142, 0.0021]
tapioca = [ 0.89, 0.0002, 0., 0.0006, 0., 0.]
potato = [ 0.80, 0.035, 0.06, 0.007, 0.003, 0.0106]
arrowroot = [0.875, 0., 0.031, 0., 0., 0.0002]
almond = [ 0.16, 0.04, 0.12, 0.2, 0.47, 0.0134]
banana = [ 0.82, 0., 0.07, 0.036, 0., 0.0035]
chickpea = [ 0.47, 0.11, 0.11, 0.22, 0.07, 0.0139]
psyllium = [ 0.88, 0., 0.85, 0.02, 0., 0.0060] #mineral content unsure
quinoa = [0.648, 0.029, 0.065, 0.118, 0.059, 0.0015] #mineral content unsure
oat = [0.733, 0., 0.133, 0.133, 0.05, 0.0048] #mineral content unsure
teff = [0.707, 0., 0.122, 0.122, 0.037, 0.0025] #mineral content unsure
sorghum = [0.074, 0.01, 0.1, 0.11, 0.035, 0.0074]
millet = [0.734, 0.017, 0.035, 0.108, 0.043, 0.0065]
cassava = [0.886, 0., 0.057, 0., 0., 0.0046] #mineral content unsure
pea_protein = [ 0., 0., 0., 0.84, 0.15, 0.0050] #mineral content unsure
pumpkin_protein = [ 0.11, 0., 0.1, 0.65, 0.08, 0.0020] #mineral content unsure
A = np.array([ brown_rice,
white_rice,
tapioca,
potato,
arrowroot,
almond,
banana,
chickpea,
psyllium,
quinoa,
oat,
teff,
sorghum,
millet,
cassava,
pea_protein,
pumpkin_protein ])
#Function to optimize
def f(x):
#C = A x
# norm(Ax - Cd)**2
# Cd = np.array(wheat)
diff = (np.dot(np.transpose(A), np.transpose(x))) - np.array(wheat)
return np.dot(np.transpose(diff), diff)
#Inequality constraint
def constraint(x):
return np.sum(x) - 1
def print_results(x, desired_flour_grams):
for idx in range(0, len(x)):
if x[idx] < 1.e-10:
x[idx] = 0
else:
x[idx] = round(x[idx] * desired_flour_grams, 1)
print "Desired flour amounts for a total of", str(desired_flour_grams), "g:"
print "brown rice: ", x[ 0], " g"
print "white rice: ", x[ 1], " g"
print "tapioca : ", x[ 2], " g"
print "potato : ", x[ 3], " g"
print "arrowroot : ", x[ 4], " g"
print "almond : ", x[ 5], " g"
print "banana : ", x[ 6], " g"
print "chickpea : ", x[ 7], " g"
print "psyllium : ", x[ 8], " g"
print "quinoa : ", x[ 9], " g"
print "oat : ", x[10], " g"
print "teff : ", x[11], " g"
print "sorghum : ", x[12], " g"
print "millet : ", x[13], " g"
print "cassava : ", x[14], " g"
print "pea : ", x[15], " g"
print "pumpkin : ", x[16], " g"
x0 = np.zeros(np.shape(A)[0])
x = scipy.optimize.minimize(f,
x0,
bounds = ((0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.), (0., 1.)),
constraints={"fun": constraint, "type": "eq"})
print " composition: [ Starch, Sugars, Fiber, Proteins, Fat, Minerals ]"
print "Target composition: ", str(wheat)
print "Obtained composition: ", str(np.dot(np.transpose(A), np.transpose(x.x)))
print_results(x.x, 115) | [
"[email protected]"
] | |
22d318dd89abba5341491b3b6eb850002e4371b4 | 51c5f1bec4c64f2f4596404dc54ac19de7cbc0f4 | /Python/LT/listas.py | b40a034008e1a46938f70f5a6b67cf00703a0db7 | [] | no_license | jos3s/Linguagens | 53c49f6cb9849562d3f95a9125158d030ba2a096 | a72c240e87b81bc9676de32da53fc01fb3130661 | refs/heads/master | 2020-11-24T03:30:33.173559 | 2020-04-17T00:19:45 | 2020-04-17T00:19:45 | 227,946,835 | 1 | 1 | null | 2020-02-11T15:10:48 | 2019-12-14T00:59:03 | Python | UTF-8 | Python | false | false | 437 | py | valores=[]
pares=[]
impares=[]
while True:
valores.append(int(input('Digite um valor: ')))
opc=str(input('Quer continuar? [S/N] '))
if opc in 'Nn':
break
for i,v in enumerate(valores):
if v%2==0:
pares.append(v)
else:
impares.append(v)
print(f'Os valores digitados foram {valores}')
print(f'Os valores pares digitados foram {pares}')
print(f'Os valores impares digitados foram {impares}') | [
"[email protected]"
] | |
d5e6c3f2b59984b852d3eb925817cf4a395ad31e | bec2ccc5f19575518649932fb3f2853adf54c11e | /blog/myapp/migrations/0001_initial.py | d0dd55015f40c321d0a6d361bcfc47b094a5139d | [] | no_license | liuxinqiqi/djangosite | 08831c63c5fa5a4c8a14dd4bf8beed62138eb58a | 9a1b425cbdb73feb34d7fb1f60c3f2923e262d64 | refs/heads/master | 2022-12-13T11:00:07.039593 | 2017-08-12T08:40:03 | 2017-08-12T08:40:03 | 100,082,409 | 0 | 0 | null | 2022-12-08T00:43:01 | 2017-08-12T01:55:14 | JavaScript | UTF-8 | Python | false | false | 9,447 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-04 20:35
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('avatar', models.ImageField(blank=True, default='avatar/1.png', max_length=200, null=True, upload_to='avatar/%Y/%m', verbose_name='\u7528\u6237\u5934\u50cf')),
('qq', models.CharField(blank=True, max_length=20, null=True, verbose_name='QQ\u53f7\u7801')),
('mobile', models.CharField(blank=True, max_length=11, null=True, verbose_name='\u4e2a\u4eba\u7f51\u9875\u5730\u5740')),
('email', models.EmailField(blank=True, max_length=30, null=True, verbose_name='\u7535\u5b50\u90ae\u7bb1')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'ordering': ['-id'],
'verbose_name': '\u7528\u6237',
'verbose_name_plural': '\u7528\u6237',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Ad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50, verbose_name='\u5e7f\u544a\u6807\u9898')),
('description', models.CharField(max_length=100, verbose_name='\u5e7f\u544a\u63cf\u8ff0')),
('image', models.ImageField(upload_to='ad/%Y/%m', verbose_name='\u56fe\u7247\u8def\u5f84')),
('date_publish', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u8868\u65f6\u95f4')),
('back_content', models.URLField(blank=True, null=True, verbose_name='\u53cd\u9988\u5185\u5bb9')),
],
options={
'ordering': ['id'],
'verbose_name': '\u5e7f\u544a',
'verbose_name_plural': '\u5e7f\u544a',
},
),
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=150, verbose_name='\u6587\u7ae0\u6807\u9898')),
('desc', models.CharField(max_length=50, verbose_name='\u6587\u7ae0\u63cf\u8ff0')),
('content', models.TextField(verbose_name='\u6587\u7ae0\u5185\u5bb9')),
('click_count', models.IntegerField(default=0, verbose_name='\u70b9\u51fb\u6b21\u6570')),
('is_recommend', models.BooleanField(default=False, verbose_name='\u662f\u5426\u63a8\u8350')),
('date_publish', models.DateTimeField(verbose_name='\u53d1\u8868\u65f6\u95f4')),
],
options={
'ordering': ['-date_publish'],
'verbose_name': '\u6587\u7ae0',
'verbose_name_plural': '\u6587\u7ae0',
},
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, verbose_name='\u5206\u7c7b\u540d\u79f0')),
('index', models.IntegerField(default=999, verbose_name='\u5206\u7c7b\u7684\u6392\u5e8f')),
],
options={
'ordering': ['index', 'id'],
'verbose_name': '\u7c7b\u578b',
'verbose_name_plural': '\u7c7b\u578b',
},
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField(verbose_name='\u8bc4\u8bba\u5185\u5bb9')),
('username', models.CharField(blank=True, max_length=30, null=True, verbose_name='\u7528\u6237\u540d')),
('email', models.EmailField(blank=True, max_length=50, null=True, verbose_name='\u90ae\u7bb1\u5730\u5740')),
('url', models.URLField(blank=True, max_length=50, null=True, verbose_name='\u4e2a\u4eba\u7f51\u9875\u5730\u5740')),
('date_publish', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u8868\u65f6\u95f4')),
('article', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='myapp.Article', verbose_name='\u6587\u7ae0')),
('parent_id', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='myapp.Comment', verbose_name='\u7236\u7ea7\u8bc4\u8bba')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='\u7528\u6237')),
],
options={
'verbose_name': '\u8bc4\u8bba',
'verbose_name_plural': '\u8bc4\u8bba',
},
),
migrations.CreateModel(
name='Links',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=30, verbose_name='\u6807\u9898')),
('back_content', models.URLField(verbose_name='\u8fd4\u56de\u7f51\u9875')),
('desc', models.CharField(max_length=100, verbose_name='\u94fe\u63a5\u63cf\u8ff0')),
('date_publish', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
],
options={
'ordering': ['id'],
'verbose_name': '\u53cb\u60c5\u94fe\u63a5',
'verbose_name_plural': '\u53cb\u60c5\u94fe\u63a5',
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, verbose_name='\u6807\u7b7e\u540d\u79f0')),
],
options={
'verbose_name': '\u6807\u7b7e',
'verbose_name_plural': '\u6807\u7b7e',
},
),
migrations.AddField(
model_name='article',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='myapp.Category', verbose_name='\u5206\u7c7b'),
),
migrations.AddField(
model_name='article',
name='tag',
field=models.ManyToManyField(to='myapp.Tag', verbose_name='\u6807\u7b7e'),
),
migrations.AddField(
model_name='article',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='\u7528\u6237'),
),
]
| [
"[email protected]"
] | |
24435742a100eb44eebbaaf0e1f1c8741128254a | 79e1a5ad019b261034bc6338e894679d3f5d54d9 | /Min Cost Climbing Stairs.py | 9a9950a033bd4e855d1802436a93afb1b12d2cc0 | [
"MIT"
] | permissive | ngdeva99/Fulcrum | c615f457ec34c563199cc1dab243ecc62e23ad0b | 3a5c69005bbaf2a5aebe13d1907f13790210fb32 | refs/heads/master | 2022-12-15T19:35:46.508701 | 2020-09-09T06:47:48 | 2020-09-09T06:48:08 | 294,027,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | class Solution:
def minCostClimbingStairs(self, cost: List[int]) -> int:
f1 = f2 = 0
for x in reversed(cost):
print(f1,f2)
f1, f2 = x + min(f1, f2), f1
print(f1,f2)
return min(f1, f2)
| [
"[email protected]"
] | |
84c5dce05f108711b9f0c7145f5a308f8dc04b85 | b001b44c95f4a7c5574385baa4fe72c5f3d02236 | /home/migrations/0023_auto_20181105_1127.py | de37b9f338419b1882e0f38acef6049ef0a5a320 | [
"MIT"
] | permissive | gjergjk71/Attendence | 3ae9b9f3cb3c4e0bfe2addf4124b7612a78f1533 | 9e9370125bfc4958de02171a3ae4c8e16bf10913 | refs/heads/master | 2020-04-14T08:46:58.649674 | 2019-01-01T14:27:29 | 2019-01-01T14:27:29 | 163,743,067 | 0 | 0 | MIT | 2019-01-01T14:34:28 | 2019-01-01T14:34:28 | null | UTF-8 | Python | false | false | 491 | py | # Generated by Django 2.1.3 on 2018-11-05 05:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0022_auto_20181104_1620'),
]
operations = [
migrations.AlterField(
model_name='semester_1',
name='professerr_name',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='home.Sections'),
),
]
| [
"[email protected]"
] | |
b1fca8e72085fe1fa234e38b71acb35fdb96361e | ee054fbb74b9d015b5b2e61f5801a7117ee6f872 | /postgresql/import_csv.py | fd4947158711fcea2fab090957aaa58d02521209 | [] | no_license | Sperryfreak01/other | 992571ca9dee8a6b0c07a408cb3b9d0bf7a7097b | f38be8d089316a0c56c3e94ef1683ec64c218f92 | refs/heads/master | 2021-05-30T08:15:06.427689 | 2016-02-07T00:27:03 | 2016-02-07T00:27:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,309 | py | """
Script to load csv files into remote PostgreSQL database
Overview:
1. Get list of files to import
2. Get list of tables
3. Clear each table of existing data
4. Import new csv data into each table
"""
import os
import psycopg2
# Get list of files to import
file_list = os.listdir('C:/Users/pfriedhoff/Desktop/Arena Extracts')
file_list.sort()
# Setup connection: http://initd.org/psycopg/docs/module.html
try:
connection = psycopg2.connect(database = 'NTarenadump', host = 'nt-arena-dump.co2objn467gs.us-west-2.rds.amazonaws.com', user = 'pfriedhoff', password = '')
except psycopg2.Error as e:
print ('Cannot connect to db: %s, %s' % (e.pgcode, e.pgerror))
# Get list of tables from database
cursor = connection.cursor()
cursor.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'")
table_list = []
for i in cursor.fetchall():
table_list.append(i)
table_list.sort()
cursor.close()
# Build {table: file} dictionary
tables_to_files = dict(zip(table_list, file_list))
# Create tables (look into how to cursor.execute(my_sql_file.sql))
# Drop (remove) existing tables [not really needed]
# cursor = connection.cursor()
# for i in table_list:
# try:
# cursor.execute("DROP TABLE IF EXISTS %s" % i)
# except psycopg2.Error as e:
# print ('Something went wrong dropping table: %s' % i)
# print ('%s, %s' % (e.pgcode, e.pgerror))
# break
# else:
# print ('Dropped: %s' % i)
# connection.commit()
# cursor.close()
# Delete existing data from tables
cursor = connection.cursor()
for i in table_list:
try:
cursor.execute("DELETE FROM %s" % i)
except psycopg2.Error as e:
print ('Something went wrong deleting data from %s' % i)
print ('%s, %s' % (e.pgcode, e.pgerror))
break
else:
print ('Deleted from: %s' % i)
connection.commit()
cursor.close()
# Import csv data using copy_expert()
cursor = connection.cursor()
for k, v in tables_to_files.items():
try:
cursor.copy_expert(sql = "COPY %s FROM STDIN WITH DELIMITER ',' CSV HEADER" % k, file = open('C:/Users/pfriedhoff/Desktop/Arena Extracts/%s' % v, encoding = 'utf8'))
connection.commit()
except psycopg2.Error as e:
print ('Import error: %s, %s' % (k, v))
print ('%s, %s' % (e.pgcode, e.pgerror))
break
else:
print ('File copied: %s' % v)
cursor.close()
connection.close() | [
"[email protected]"
] | |
ed491ef29d52c9db882bb740da80eef161bcd96c | aeb1bf8e189d7aabe55d86697f0b5712e35ed6ab | /KontoInhaber/main.py | 4bcb79d95c46578c487cca865e7892ea3fdbc5f6 | [
"MIT"
] | permissive | sandrodegiorgi/pythonits | bdb1b870fd3844aadf81082e7e0e6c6ecede3a1f | 1677e8fcf7c117c7657273ef0f6cc98718c5895b | refs/heads/master | 2023-05-30T17:37:39.108912 | 2021-06-17T17:08:32 | 2021-06-17T17:08:32 | 377,806,160 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | # This is a sample Python script.
# Press Umschalt+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
from Girokonto import Girokonto
from Inhaber import Inhaber
from Sparkonto import Sparkonto
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
erstesKonto = Girokonto(499, 500, 1000)
ersterInhaber = Inhaber("Helga",erstesKonto)
erstesKonto.setInhaber(ersterInhaber)
print(erstesKonto.getInhaber().getName())
naechstesKonto = Girokonto(500, 100, 319)
print(naechstesKonto.getKontoNr())
print(naechstesKonto.getKontostand())
naechstesKonto.einzahlen(100)
print(naechstesKonto.getKontostand())
naechstesKonto.abheben(150)
print(naechstesKonto.getKontostand())
anderesKonto = Sparkonto(501)
print(anderesKonto.getKontostand())
anderesKonto.einzahlen(100)
einKunde = Inhaber("Bruno", anderesKonto)
einKunde.fuegeKontoHinzu(naechstesKonto)
print(einKunde.berechneGesamtStand())
print(einKunde.berechneGesamtGebuehr())
| [
"[email protected]"
] | |
31ff3e39a7a68715fe32a0839c2e2d057417d29b | 931db874c996cfedea38607922e3ceef7497a727 | /api.py | 5e635899c35e455759048e741505c14bf497a3e7 | [] | no_license | Cy8erEgo/vk-mailing-bot | f809fd501504909cc7f2f5359b0eb7ecbb07a654 | 4cb3a834c742578fb6c03fffa8170829a774f830 | refs/heads/master | 2023-01-02T06:10:59.950392 | 2020-10-25T13:26:15 | 2020-10-25T13:26:15 | 306,930,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,917 | py | import random
from time import sleep
import requests
from typing import List
_BASE_URL = "https://api.vk.com/method/"
_HTTP_HEADERS = {"Content-Type": "application/json;charset=utf-8"}
_API_VERSION = 5.124
class IncorrectTokenException(Exception):
pass
class Api:
def __init__(self, token: str):
self._token = token
def _query(self, endpoint: str, params: dict = None, data: dict = None, method: str = "GET") -> dict:
url = "{}{}".format(_BASE_URL, endpoint)
if not params:
params = {}
params["access_token"] = self._token
params["v"] = _API_VERSION
if method == "GET":
response = requests.get(url, params=params)
elif method == "POST":
response = requests.post(url, data=data, params=params)
else:
raise ValueError("Incorrect method passed")
json_ = response.json()
if "error" in json_:
if json_["error"]["error_code"] == 5:
raise IncorrectTokenException
else:
raise RuntimeError(json_["error"]["error_msg"])
return json_
def get_chats(self, max_count: int = 0) -> list:
chats = []
offset = 0
while 1:
# высчитываем count исходя из max_count
if max_count and max_count - len(chats) < 200:
count = max_count - len(chats)
if count == 0:
break
else:
count = 200
# получение списка чатов
response = self._query(
"messages.getConversations", {"count": count, "offset": offset}
)
chats_portion = response["response"]["items"]
chats.extend(chats_portion)
# выходим из цикла, если это последняя партия чатов
if len(chats_portion) < 200:
break
offset += 200
return chats
def mailing(self, user_ids: List[int], text: str, delay: int = 0) -> None:
offset = 0
while True:
# формируем список получателей до 100 шт.
peer_ids = ",".join(
[str(user_id) for user_id in user_ids[offset : offset + 100]]
)
# формируем уникальный ID сообщения
random_id = random.randint(1_000_000_000, 9_999_999_999)
# отправляем сообщение вышеуказанным получателям
self._query(
"messages.send",
data={"peer_ids": peer_ids, "message": text, "random_id": random_id},
method="POST"
)
offset += 100
if offset >= len(user_ids):
break
sleep(delay)
| [
"[email protected]"
] | |
3afe47de2ced6f21ac6206f27a8fca59cac32d6b | ff1324c007edb02dc4fa84c428f7b1b3b45bd318 | /yql3/__init__.py | 7ac9f82c1e616113f2d6f711b952b16acb756f2a | [] | no_license | softsign/yahoo_ff_bot | d4888d74cf7d1c507b86e6daa1ec655951fc42c3 | d5bbc7a8198153cff805fac82b08463a79dc47e6 | refs/heads/master | 2020-03-28T13:25:52.525901 | 2018-09-12T03:30:13 | 2018-09-12T03:30:13 | 148,394,868 | 1 | 0 | null | 2018-09-12T03:32:40 | 2018-09-11T23:56:38 | Python | UTF-8 | Python | false | false | 21,350 | py | """
Python YQL
==========
YQL client for Python
Author: Stuart Colville http://muffinresearch.co.uk/
Docs at: http://python-yql.org/
Updates by: Joe M
TODO: More granular error handling
"""
import json
import re
import time
import pprint
from urllib.parse import urlparse, urlencode
from httplib2 import Http
from six import string_types
from yql3.utils import get_http_method, clean_url, clean_query
from yql3.logger import get_logger
import oauth2 as oauth
try:
from urllib.parse import parse_qs, parse_qsl
except ImportError: # pragma: no cover
from cgi import parse_qs, parse_qsl
__author__ = 'Stuart Colville'
__version__ = '0.7.7'
__all__ = ['Public', 'TwoLegged', 'ThreeLegged']
QUERY_PLACEHOLDER = re.compile(r"[ =]@(?P<param>[a-z].*?\b)", re.IGNORECASE)
REQUEST_TOKEN_URL = 'https://api.login.yahoo.com/oauth/v2/get_request_token'
ACCESS_TOKEN_URL = 'https://api.login.yahoo.com/oauth/v2/get_token'
AUTHORIZATION_URL = 'https://api.login.yahoo.com/oauth/v2/request_auth'
PUBLIC_ENDPOINT = 'https://query.yahooapis.com/v1/public/yql'
PRIVATE_ENDPOINT = 'https://query.yahooapis.com/v1/yql'
yql_logger = get_logger()
class YQLObj(object):
"""A YQLObject is the object created as the result of a YQL query"""
def __init__(self, result_dict):
"""Init query object"""
self._raw = result_dict and result_dict.get('query') or {}
@property
def raw(self):
"""The raw data response"""
return self._raw
@property
def uri(self):
"""The uri used to query the YQL API"""
return self._raw.get('uri')
@property
def query_params(self):
"""The query parameters of the uri used to call the YQL API"""
if self.uri:
q_string = urlparse(self.uri)[4]
return dict(parse_qsl(q_string))
else:
return {}
@property
def results(self):
"""The query results dict."""
return self._raw.get('results')
def one(self):
"""Return just one result directly."""
rows = self.rows
if len(rows) > 1:
raise NotOneError("More than one result")
else:
return rows[0]
@property
def rows(self):
"""Get a list of rows returned by the query.
Results is a dict with one key but that key changes depending on the results
This provides a way of getting at the rows list in an arbitrary way.
Added in version: 0.6 fixes results with 1 item so that they are still
returned within a list.
"""
result = []
if self.results:
vals = self.results.values()
if len(vals) == 1:
result = list(self.results.values())[0]
if self.count == 1 and result:
result = [result]
return result
@property
def query(self):
"""The YQL query"""
return self.query_params.get('q')
@property
def lang(self):
"""The language"""
return self._raw.get('lang')
@property
def count(self):
"""The results count"""
count = self._raw.get('count')
if count:
return int(count)
@property
def diagnostics(self):
"""The query diagnostics"""
return self._raw.get('diagnostics')
def pprint_raw(self, indent=4): # pragma: no cover
"""Pretty print the raw data"""
pprint.pprint(self._raw, indent=indent)
def pformat_raw(self, indent=4): # pragma: no cover
"""Pretty format the raw data"""
return pprint.pformat(self._raw, indent=indent)
class YQLError(Exception):
"""Default Error"""
def __init__(self, resp, content, url=None, query=None):
yql_logger.error("%s", content)
yql_logger.error("Error Response: %s", resp)
yql_logger.error("Error url: %s", url)
self.response = resp
self.content = content
self.url = url
self.query = query
def __str__(self):
"""Return the error message.
Attempt to parse the json if it fails
simply return the content attribute instead.
"""
try:
content = json.loads(self.content)
except:
content = {}
if content and content.get("error") and content["error"].get(
"description"):
return content['error']['description']
else:
if isinstance(self.content, string_types):
return self.content
else:
return repr(self.content)
class NotOneError(Exception):
"""Not One Error."""
def __init__(self, message):
self.message = message
def __str__(self):
"""Return the error message"""
return self.message
class YQLQuery(object):
"""A YQL Query class used to inspect and validate a query"""
def __init__(self, query):
self.query = clean_query(query)
def __str__(self):
"""Return the query"""
return self.query
def get_http_method(self):
"""Return the HTTP method associated with the type of this query"""
return get_http_method(self.query)
def get_placeholder_keys(self):
"""Gets the @var placeholders
http://developer.yahoo.com/yql/guide/var_substitution.html
"""
result = []
for match in QUERY_PLACEHOLDER.finditer(self.query):
result.append(match.group('param'))
if result:
yql_logger.debug("placeholder_keys: %s", result)
return result
def validate(self, substitutions=None):
"""Validate the query placeholders"""
placeholders = set(self.get_placeholder_keys())
if substitutions is not None:
self._validate_substitutions(substitutions, placeholders)
elif placeholders:
raise ValueError("Query uses placeholders so a dictionary "
"of substitutions is required")
return True
def _validate_substitutions(self, substitutions, placeholders):
if hasattr(substitutions, 'keys'):
self._validate_substitutions_dictionary(placeholders, substitutions)
else:
raise ValueError("Substitutions must be a dictionary.")
def _validate_substitutions_dictionary(self, placeholders, substitutions):
if not placeholders:
raise ValueError("Got a dictionary of substitutions but "
"the query doesn't have any placeholders")
if set(placeholders) != set(substitutions.keys()):
raise ValueError("Substitution keys don't match "
"the placeholders")
class Public(object):
"""Class for making public YQL queries"""
def __init__(self, api_key=None, shared_secret=None, httplib2_inst=None):
"""Init the base class.
Optionally you can pass in an httplib2 instance which allows you
to set-up the instance in a different way for your own uses.
Also it's very helpful in a testing scenario.
"""
self.api_key = api_key
self.secret = shared_secret
self.http = httplib2_inst or Http()
self.endpoint = PUBLIC_ENDPOINT
def get_query_params(self, query, params, **kwargs):
"""Get the query params and validate placeholders"""
query_params = {}
if query.validate(params) and params:
query_params.update(params)
query_params['q'] = query.query
query_params['format'] = 'json'
env = kwargs.get('env')
if env:
query_params['env'] = env
return query_params
def get_uri(self, query, params=None, **kwargs):
"""Get the the request url"""
if isinstance(query, string_types):
query = YQLQuery(query)
params = self.get_query_params(query, params, **kwargs)
query_string = urlencode(params)
uri = '%s?%s' % (self.endpoint, query_string)
uri = clean_url(uri)
return uri
def execute(self, query, params=None, **kwargs):
"""Execute YQL query"""
yqlquery = YQLQuery(query)
url = self.get_uri(yqlquery, params, **kwargs)
yql_logger.debug("executed url: %s", url)
http_method = yqlquery.get_http_method()
if http_method in ["DELETE", "PUT", "POST"]:
data = {"q": query}
# Encode as json and set Content-Type header
# to reflect we are sending JSON
# Fixes LP: 629064
data = json.dumps(data)
headers = {"Content-Type": "application/json"}
resp, content = self.http.request(
url, http_method, headers=headers, body=data)
yql_logger.debug("body: %s", data)
else:
resp, content = self.http.request(url, http_method)
yql_logger.debug("http_method: %s", http_method)
if resp.get('status') == '200':
return YQLObj(json.loads(content.decode('utf-8')))
else:
raise YQLError(resp, content)
class TwoLegged(Public):
"""Two legged Auth is simple request which is signed prior to sending"""
def __init__(self, api_key, shared_secret, httplib2_inst=None):
"""Override init to ensure required args"""
super(TwoLegged, self).__init__(api_key, shared_secret, httplib2_inst)
self.endpoint = PRIVATE_ENDPOINT
self.hmac_sha1_signature = oauth.SignatureMethod_HMAC_SHA1()
self.plaintext_signature = oauth.SignatureMethod_PLAINTEXT()
def get_signature(self, url):
url_parts = urlparse(url)
scheme = url_parts.scheme
if scheme == "http":
sig = self.hmac_sha1_signature
elif scheme == "https":
sig = self.plaintext_signature
else:
raise ValueError("Invalid scheme: %s " % scheme)
return sig
@staticmethod
def get_base_params():
"""Set-up the basic parameters needed for a request"""
params = {}
params['oauth_version'] = "1.0"
params['oauth_nonce'] = oauth.generate_nonce()
params['oauth_timestamp'] = int(time.time())
return params
def __two_legged_request(self, parameters=None, method=None):
"""Sign a request for two-legged authentication"""
params = self.get_base_params()
if parameters:
params.update(parameters)
url = self.endpoint
yql_logger.debug("params: %s", params)
yql_logger.debug("endpoint_url: %s", url)
if not method:
method = "GET"
consumer = oauth.Consumer(self.api_key, self.secret)
request = oauth.Request(method=method, url=url, parameters=params)
sig = self.get_signature(url)
yql_logger.debug("signature: %s", sig)
request.sign_request(sig, consumer, None)
return request
def get_uri(self, query, params=None, **kwargs):
"""Get the the request url"""
if isinstance(query, string_types):
query = YQLQuery(query)
query_params = self.get_query_params(query, params, **kwargs)
http_method = query.get_http_method()
request = self.__two_legged_request(parameters=query_params,
method=http_method)
url = request.to_url()
return clean_url(url)
class ThreeLegged(TwoLegged):
"""
Three-legged Auth is used when it involves private data such as a
user's contacts.
Three-legged auth is most likely to be used in a web-site or
web-accessible application. Three-legged auth requires the user
to authenticate the request through the Yahoo login.
Three-legged auth requires the implementation to:
* Request a token
* Get a authentication url
* User uses the auth url to login which will redirect to a callback
or shows a verfier string on screen
* Verifier is read at the callback url or manually provided to get
the access token
* resources is access
For an implementation this will require calling the following methods
in order the first time the user needs to authenticate
* :meth:`get_token_and_auth_url` (returns a token and the auth url)
* get verifier through callback or from screen
* :meth:`get_access_token` (returns the access token)
* :meth:`execute` - makes the request to the protected resource.
Once the access token has been provided subsequent requests can re-use it.
Access tokens expire after 1 hour, however they can be refreshed with
the :meth:`refresh_token` method
"""
def __init__(self, api_key, shared_secret, httplib2_inst=None):
"""Override init to add consumer"""
super(ThreeLegged, self).__init__(
api_key, shared_secret, httplib2_inst)
self.consumer = oauth.Consumer(self.api_key, self.secret)
def get_token_and_auth_url(self, callback_url=None):
"""First step is to get the token and then send the request that
provides the auth URL
Returns a tuple of token and the authorisation URL.
"""
client = oauth.Client(self.consumer)
params = {}
params['oauth_callback'] = callback_url or 'oob'
request = oauth.Request(parameters=params)
url = REQUEST_TOKEN_URL
resp, content = client.request(url, "POST", request.to_postdata())
if resp.get('status') == '200':
token = oauth.Token.from_string(content)
yql_logger.debug("token: %s", token)
data = dict(parse_qsl(content))
yql_logger.debug("data: %s", data)
return token, data[b'xoauth_request_auth_url']
else:
raise YQLError(resp, content, url)
def get_access_token(self, token, verifier):
"""Get the access token
The verifier (required) should have been provided to the
user following login to at the url returned
by the :meth:`get_token_and_auth_url` method.
If not you will need need to extract the auth_verifier
parameter from your callback url on the site where you
are implementing 3-legged auth in order to pass it to this
function.
The access token can be stored and re-used for subsequent
calls.
The stored token will also need to be refreshed periodically
with :meth:`refresh_token`
"""
params = {}
params['oauth_verifier'] = verifier
oauth_request = oauth.Request.from_consumer_and_token(
self.consumer, token=token,
http_url=ACCESS_TOKEN_URL,
http_method="POST",
parameters=params)
yql_logger.debug("oauth_request: %s", oauth_request)
oauth_request.sign_request(
self.hmac_sha1_signature, self.consumer, token)
url = oauth_request.to_url()
yql_logger.debug("oauth_url: %s", url)
postdata = oauth_request.to_postdata()
yql_logger.debug("oauth_postdata: %s", postdata)
resp, content = self.http.request(url, "POST", postdata)
if resp.get('status') == '200':
access_token = YahooToken.from_string(content)
access_token.timestamp = oauth_request['oauth_timestamp']
return access_token
else:
raise YQLError(resp, content, url)
def check_token(self, token):
"""Check to see if a token has expired"""
if not hasattr(token, 'timestamp'):
raise AttributeError('token doesn\'t have a timestamp attribute')
if (int(token.timestamp) + 3600) < time.time():
token = self.refresh_token(token)
return token
def refresh_token(self, token):
"""Access Tokens only last for one hour from the point of being issued.
When a token has expired it needs to be refreshed this method takes an
expired token and refreshes it.
token parameter can be either a token object or a token string.
"""
if not hasattr(token, "key"):
token = YahooToken.from_string(token)
params = self.get_base_params()
params['oauth_token'] = token.key
params['oauth_token_secret'] = token.secret
if hasattr(token, "session_handle"):
params['oauth_session_handle'] = token.session_handle
oauth_request = oauth.Request.from_consumer_and_token(
self.consumer, token=token,
http_url=ACCESS_TOKEN_URL,
http_method="POST",
parameters=params)
yql_logger.debug("oauth_request: %s", oauth_request)
oauth_request.sign_request(
self.hmac_sha1_signature, self.consumer, token)
url = oauth_request.to_url()
yql_logger.debug("oauth_url: %s", url)
postdata = oauth_request.to_postdata()
yql_logger.debug("oauth_postdata: %s", postdata)
resp, content = self.http.request(url, "POST", postdata)
if resp.get('status') == '200':
access_token = YahooToken.from_string(content)
yql_logger.debug("oauth_access_token: %s", access_token)
access_token.timestamp = oauth_request['oauth_timestamp']
return access_token
else:
raise YQLError(resp, content, url)
def get_uri(self, query, params=None, **kwargs):
"""Get the the request url"""
if isinstance(query, string_types):
query = YQLQuery(query)
query_params = self.get_query_params(query, params, **kwargs)
token = kwargs.get("token")
if hasattr(token, "yahoo_guid"):
query_params["oauth_yahoo_guid"] = getattr(token, "yahoo_guid")
if not token:
raise ValueError("Without a token three-legged-auth cannot be"
" carried out")
yql_logger.debug("query_params: %s", query_params)
http_method = query.get_http_method()
url = self.endpoint
oauth_request = oauth.Request.from_consumer_and_token(
self.consumer, http_url=url,
token=token, parameters=query_params,
http_method=http_method)
yql_logger.debug("oauth_request: %s", oauth_request)
# Sign request
sig = self.get_signature(url)
oauth_request.sign_request(sig, self.consumer, token)
yql_logger.debug("oauth_signed_request: %s", oauth_request)
url = oauth_request.to_url()
url = clean_url(url)
return url.replace('+', '%20').replace('%7E', '~')
def convert(data):
if isinstance(data, bytes): return data.decode('ascii')
if isinstance(data, dict): return dict(map(convert, data.items()))
if isinstance(data, tuple): return map(convert, data)
return data
class YahooToken(oauth.Token):
"""A subclass of oauth.Token with the addition of a place to
stash the session_handler which is required for token refreshing
"""
@staticmethod
def from_string(data_string):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(data_string):
raise ValueError("Invalid parameter string.")
orig_params = parse_qs(data_string, keep_blank_values=False)
params = convert(orig_params)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = YahooToken(key, secret)
session_handle = params.get('oauth_session_handle')
if session_handle:
setattr(token, 'session_handle', session_handle[0])
timestamp = params.get('token_creation_timestamp')
if timestamp:
setattr(token, 'timestamp', timestamp[0])
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if hasattr(self, 'session_handle'):
data['oauth_session_handle'] = self.session_handle
if hasattr(self, 'timestamp'):
data['token_creation_timestamp'] = self.timestamp
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urlencode(data)
| [
"[email protected]"
] | |
081eec9129daf0365812630378010bba524f2a46 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_246/ch31_2020_03_19_21_18_43_133674.py | 96a74701687347579e4909926913a5fbe4407c46 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | def eh_primo(x):
if x==25:
return False
elif x==3:
return True
elif x==2:
return True
elif x==1:
return False
elif x%2==0:
return False
else:
y=3
while not x%y==0:
y=y+2
return True
return False
| [
"[email protected]"
] | |
00e7a05b4daac5c0a2b0e1da8441ad4c7712e449 | 4718417c66065904e53c141c476c8b232f96ed55 | /June16/WordAndCharCount.py | faac691ae448bf1f74fd93e9eeb9ffd36333c96a | [] | no_license | alvas-education-foundation/Srilatha-K-Kamath-Daily-Report | c28dec426f1e57b65fd21d7370b7d742f823c22f | 6d56662633ae5b1df87dd6d5c23232c1a09dcd34 | refs/heads/master | 2022-11-30T04:59:26.548878 | 2020-08-19T12:20:01 | 2020-08-19T12:20:01 | 265,306,429 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | string=input('Enter the String: ')
wordcount=0
charcount=0
for word in string.split():
wordcount+=1
for i in word:
if(len(i)>0):
charcount+=1
print('Word count is ',wordcount)
print('Character count is ',charcount)
| [
"[email protected]"
] | |
0b4074c7855ab1f8347ee4e06bf3d439e4e0ac55 | c01fdb66ee949ce14fa7b2fc64c94b15a9ba645f | /3.无重复字符的最长子串.py | cc705193f7011053287ff3b0614d4b83c0a47fb0 | [] | no_license | yxudong/LeetCode | fab58e73cf25fed435f9bd8365503ca0a1a16ac0 | 5fa8d84f91f6d2583a963313fde8fda48301c495 | refs/heads/master | 2021-08-04T03:50:52.409993 | 2021-07-15T08:33:52 | 2021-07-15T08:33:52 | 167,906,353 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 833 | py | #
# @lc app=leetcode.cn id=3 lang=python3
#
# [3] 无重复字符的最长子串
#
# @lc code=start
# Tips: string, sliding-window, two-pointers, hash-table
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
len_s = len(s)
if len_s <= 1:
return len_s
max_sub_len, right_point = 1, 1
set_s = set()
set_s.add(s[0])
for left_point in range(0, len_s):
while right_point < len_s:
if s[right_point] not in set_s:
set_s.add(s[right_point])
right_point = right_point + 1
max_sub_len = max(max_sub_len, right_point - left_point)
else:
set_s.remove(s[left_point])
break
return max_sub_len
# @lc code=end
| [
"[email protected]"
] | |
cfee516f6a9befc78f63ec344ce9bc4d23c5925f | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/classification/convmixer/timm/models/layers/drop.py | df2526309ae305202741bf5836e408b6b8e94bc5 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 8,618 | py | # BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
""" DropBlock, DropPath
PyTorch implementations of DropBlock and DropPath (Stochastic Depth) regularization layers.
Papers:
DropBlock: A regularization method for convolutional networks (https://arxiv.org/abs/1810.12890)
Deep Networks with Stochastic Depth (https://arxiv.org/abs/1603.09382)
Code:
DropBlock impl inspired by two Tensorflow impl that I liked:
- https://github.com/tensorflow/tpu/blob/master/models/official/resnet/resnet_model.py#L74
- https://github.com/clovaai/assembled-cnn/blob/master/nets/blocks.py
Hacked together by / Copyright 2020 Ross Wightman
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
def drop_block_2d(
x, drop_prob: float = 0.1, block_size: int = 7, gamma_scale: float = 1.0,
with_noise: bool = False, inplace: bool = False, batchwise: bool = False):
""" DropBlock. See https://arxiv.org/pdf/1810.12890.pdf
DropBlock with an experimental gaussian noise option. This layer has been tested on a few training
runs with success, but needs further validation and possibly optimization for lower runtime impact.
"""
B, C, H, W = x.shape
total_size = W * H
clipped_block_size = min(block_size, min(W, H))
# seed_drop_rate, the gamma parameter
gamma = gamma_scale * drop_prob * total_size / clipped_block_size ** 2 / (
(W - block_size + 1) * (H - block_size + 1))
# Forces the block to be inside the feature map.
w_i, h_i = torch.meshgrid(torch.arange(W).to(x.device), torch.arange(H).to(x.device))
valid_block = ((w_i >= clipped_block_size // 2) & (w_i < W - (clipped_block_size - 1) // 2)) & \
((h_i >= clipped_block_size // 2) & (h_i < H - (clipped_block_size - 1) // 2))
valid_block = torch.reshape(valid_block, (1, 1, H, W)).to(dtype=x.dtype)
if batchwise:
# one mask for whole batch, quite a bit faster
uniform_noise = torch.rand((1, C, H, W), dtype=x.dtype, device=x.device)
else:
uniform_noise = torch.rand_like(x)
block_mask = ((2 - gamma - valid_block + uniform_noise) >= 1).to(dtype=x.dtype)
block_mask = -F.max_pool2d(
-block_mask,
kernel_size=clipped_block_size, # block_size,
stride=1,
padding=clipped_block_size // 2)
if with_noise:
normal_noise = torch.randn((1, C, H, W), dtype=x.dtype, device=x.device) if batchwise else torch.randn_like(x)
if inplace:
x.mul_(block_mask).add_(normal_noise * (1 - block_mask))
else:
x = x * block_mask + normal_noise * (1 - block_mask)
else:
normalize_scale = (block_mask.numel() / block_mask.to(dtype=torch.float32).sum().add(1e-7)).to(x.dtype)
if inplace:
x.mul_(block_mask * normalize_scale)
else:
x = x * block_mask * normalize_scale
return x
def drop_block_fast_2d(
x: torch.Tensor, drop_prob: float = 0.1, block_size: int = 7,
gamma_scale: float = 1.0, with_noise: bool = False, inplace: bool = False, batchwise: bool = False):
""" DropBlock. See https://arxiv.org/pdf/1810.12890.pdf
DropBlock with an experimental gaussian noise option. Simplied from above without concern for valid
block mask at edges.
"""
B, C, H, W = x.shape
total_size = W * H
clipped_block_size = min(block_size, min(W, H))
gamma = gamma_scale * drop_prob * total_size / clipped_block_size ** 2 / (
(W - block_size + 1) * (H - block_size + 1))
if batchwise:
# one mask for whole batch, quite a bit faster
block_mask = torch.rand((1, C, H, W), dtype=x.dtype, device=x.device) < gamma
else:
# mask per batch element
block_mask = torch.rand_like(x) < gamma
block_mask = F.max_pool2d(
block_mask.to(x.dtype), kernel_size=clipped_block_size, stride=1, padding=clipped_block_size // 2)
if with_noise:
normal_noise = torch.randn((1, C, H, W), dtype=x.dtype, device=x.device) if batchwise else torch.randn_like(x)
if inplace:
x.mul_(1. - block_mask).add_(normal_noise * block_mask)
else:
x = x * (1. - block_mask) + normal_noise * block_mask
else:
block_mask = 1 - block_mask
normalize_scale = (block_mask.numel() / block_mask.to(dtype=torch.float32).sum().add(1e-7)).to(dtype=x.dtype)
if inplace:
x.mul_(block_mask * normalize_scale)
else:
x = x * block_mask * normalize_scale
return x
class DropBlock2d(nn.Module):
""" DropBlock. See https://arxiv.org/pdf/1810.12890.pdf
"""
def __init__(self,
drop_prob=0.1,
block_size=7,
gamma_scale=1.0,
with_noise=False,
inplace=False,
batchwise=False,
fast=True):
super(DropBlock2d, self).__init__()
self.drop_prob = drop_prob
self.gamma_scale = gamma_scale
self.block_size = block_size
self.with_noise = with_noise
self.inplace = inplace
self.batchwise = batchwise
self.fast = fast # FIXME finish comparisons of fast vs not
def forward(self, x):
if not self.training or not self.drop_prob:
return x
if self.fast:
return drop_block_fast_2d(
x, self.drop_prob, self.block_size, self.gamma_scale, self.with_noise, self.inplace, self.batchwise)
else:
return drop_block_2d(
x, self.drop_prob, self.block_size, self.gamma_scale, self.with_noise, self.inplace, self.batchwise)
def drop_path(x, drop_prob: float = 0., training: bool = False):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
This is the same as the DropConnect impl I created for EfficientNet, etc networks, however,
the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for
changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use
'survival rate' as the argument.
"""
if drop_prob == 0. or not training:
return x
keep_prob = 1 - drop_prob
shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets
random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device)
random_tensor.floor_() # binarize
output = x.div(keep_prob) * random_tensor
return output
class DropPath(nn.Module):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
"""
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training)
| [
"[email protected]"
] | |
0007f103813cae3d297e93408d84f95ee4198ba1 | b63142e8540cb30bb0c663332e29a4112721073e | /1396_set_union.py | 0d857ee6689a0f91d0670cc733bef0a70168eb09 | [] | no_license | HaydenInEdinburgh/LintCode | 025bb2f0d75686097061de324c0fd292536dbb14 | dbeae2bf631e57667d1415164d452d5ca2df7447 | refs/heads/master | 2023-08-18T19:52:54.561623 | 2021-10-06T21:46:50 | 2021-10-06T21:46:50 | 370,733,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,100 | py | class UnionFind:
def __init__(self, length):
self.father = {i: i for i in range(length)}
self.count = length
def union(self, a, b):
a_root = self.find(a)
b_root = self.find(b)
if a_root == b_root:
return
self.father[b_root] = a_root
self.count -= 1
def find(self, point):
path = []
while point != self.father[point]:
path.append(point)
point = self.father[point]
for p in path:
self.father[p] = point
return point
class Solution:
"""
@param sets: Initial set list
@return: The final number of sets
"""
def setUnion(self, sets):
# Write your code here
uf = UnionFind(len(sets))
idx = {}
for i in range(len(sets)):
cur_set = sets[i]
for j in range(len(sets[i])):
cur_ele = cur_set[j]
if cur_ele in idx:
uf.union(i, idx[cur_ele])
else:
idx[cur_ele] = i
return uf.count | [
"[email protected]"
] | |
b98b728c632282bacfbc50e8f93d8247d8fe05da | 5348732cf26a42a83a714adb5841cec9b5f64f81 | /Task 1/lecture02.py | 61cd167e48718a91ac938e91ccd3738ef3b54c6f | [
"MIT"
] | permissive | SchemingWeasels/EEEE1027_face-recognition_template-matching | 7b4468f76164a784a8f2c299b5d8744730537e5c | 22a9ebf174d21bbbfbe637d9c7dadba34d468614 | refs/heads/master | 2022-07-23T21:35:32.024385 | 2020-05-22T09:32:29 | 2020-05-22T09:32:29 | 266,066,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,287 | py | import cv2
import numpy as np
face_cascade = cv2.CascadeClassifier('Pictures/haarcascade_frontalface_alt.xml')
cap = cv2.VideoCapture(0)
scaling_factor = 0.5
while True:
ret, frame = cap.read()
frame = cv2.resize(frame, None, fx=scaling_factor,
fy=scaling_factor, interpolation=cv2.INTER_AREA)
face_rects = face_cascade.detectMultiScale(frame, scaleFactor=1.3,minNeighbors=3)
for (x,y,w,h) in face_rects:
cv2.rectangle(frame, (x,y), (x+w,y+h), (0,255,0), 3)
cv2.imshow('Face Detector', frame)
c = cv2.waitKey(1)
if c == 27:
break
cap.release()
cv2.destroyAllWindows()
___________________________________________________________________
import cv2
import numpy as np
face_cascade =cv2.CascadeClassifier('Pictures/haarcascade_frontalface_alt.xml')
eye_cascade = cv2.CascadeClassifier('Pictures/haarcascade_eye.xml')
if face_cascade.empty():
raise IOError('Unable to load the face cascade classifier xml file')
if eye_cascade.empty():
raise IOError('Unable to load the eye cascade classifier xml file')
cap = cv2.VideoCapture(0)
scaling_factor = 0.5
while True:
ret, frame = cap.read()
frame = cv2.resize(frame, None, fx=scaling_factor, fy=scaling_factor,
interpolation=cv2.INTER_AREA)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.3,minNeighbors=1)
for (x,y,w,h) in faces:
roi_gray = gray[y:y+h, x:x+w]
roi_color = frame[y:y+h, x:x+w]
eyes = eye_cascade.detectMultiScale(roi_gray)
for (x_eye,y_eye,w_eye,h_eye) in eyes:
center = (int(x_eye + 0.5*w_eye), int(y_eye + 0.5*h_eye))
radius = int(0.3 * (w_eye + h_eye))
color = (0, 255, 0)
thickness = 3
cv2.circle(roi_color, center, radius, color, thickness)
cv2.imshow('Eye Detector', frame)
c = cv2.waitKey(1)
if c == 27:
break
cap.release()
cv2.destroyAllWindows()
_________________________________________________________________________
import cv2
import numpy as np
left_ear_cascade = cv2.CascadeClassifier('Pictures/haarcascade_mcs_leftear.xml')
right_ear_cascade = cv2.CascadeClassifier('Pictures/haarcascade_mcs_rightear.xml')
if left_ear_cascade.empty():
raise IOError('Unable to load the left ear cascade classifier xml file')
if right_ear_cascade.empty():
raise IOError('Unable to load the right ear cascade classifier xml file')
cap = cv2.VideoCapture(0)
scaling_factor = 0.5
while True:
ret, frame = cap.read()
frame = cv2.resize(frame, None, fx=scaling_factor, fy=scaling_factor,
interpolation=cv2.INTER_AREA)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
left_ear = left_ear_cascade.detectMultiScale(gray, scaleFactor=1.3,minNeighbors=3)
right_ear = right_ear_cascade.detectMultiScale(gray, scaleFactor=1.3,minNeighbors=3)
for (x,y,w,h) in left_ear:
cv2.rectangle(frame, (x,y), (x+w,y+h), (0,255,0), 3)
for (x,y,w,h) in right_ear:
cv2.rectangle(frame, (x,y), (x+w,y+h), (255,0,0), 3)
cv2.imshow('Ear Detector', frame)
c = cv2.waitKey(1)
if c == 27:
break
cap.release()
cv2.destroyAllWindows()
______________________________________________________________-
| [
"[email protected]"
] | |
c4582d858eedc192af567422de49a7e58b240a1c | 53dd5d2cfb79edc87f6c606bbfb7d0bedcf6da61 | /.history/EMR/zhzd_2_20190605154039.py | c6472ca4dd8a3ad43ec2696d5a071ed5ec48ca2b | [] | no_license | cyc19950621/python | 4add54894dc81187211aa8d45e5115903b69a182 | d184b83e73334a37d413306d3694e14a19580cb0 | refs/heads/master | 2020-04-11T20:39:34.641303 | 2019-07-02T12:54:49 | 2019-07-02T12:54:49 | 162,078,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,178 | py | import time
import math
import os
import sys
import os, os.path,shutil
import codecs
import EMRdef
import re
import pandas as pd
emrtxts = EMRdef.txttq(u'D:\DeepLearning ER\EHRzhzd')#txt目录提取
g = open(r'C:\Users\Administrator\Desktop\JBML.txt',errors='ignore')
dics=g.readlines()
ryzd=[]
output=[]
line_re = []
for emrtxt in emrtxts:
f = open(emrtxt,'r',errors="ignore")#中文加入errors
emrpath = os.path.basename(emrtxt)
emrpath = os.path.splitext(emrpath)[0]#提取目录
pattern =r'\s*\d+、+\s?(.*)'
c=re.compile(pattern)
for line in f.readlines():
line1=line.strip('\n')
line2 = ''.join(line1)
line2 = line2.strip( )
line3=c.findall(line2)
line3=''.join(line3)
line4 = str(line3)
line = line4
line=re.sub('\n','',line)
line=re.sub(' ','',line)
line = re.sub(r'\?|?', '',line)
line = re.sub(r'\,|\.|;','',line)
out = line
out= re.sub(r'右侧|两侧|双侧|左侧|右|左|双','',out)
out = re.sub(r'肺肺','肺',out)
out = re.sub('(.*?)', '', out)
out = re.sub(r'很高危|极高危', '', out)
line = out
line_re.append(line)
while '' in line_re:
line_re.remove('')
for line in line_re:
for dic in dics:
dic=re.sub('\n','',dic)
if set(line) == set(dic):
out.append(dic)
elif SBS(line,dic)>0.8 and SBS(line,dic) <1:
out.append(dic)
output=EMRdef.delre(output)
#output1='\n'.join(output)
#EMRdef.text_create(r'D:\DeepLearning ER\EHRzhzd2','.txt',emrpath,output1)
ryzd.append(output)
#导入关联规则
import orangecontrib.associate.fpgrowth as oaf
def dealRules(rules):
returnRules = []
for i in rules:
temStr = '';
for j in i[0]: #处理第一个frozenset
temStr = temStr+j+'&'
temStr = temStr[:-1]
temStr = temStr + ' ==> '
for j in i[1]:
temStr = temStr+j+'&'
temStr = temStr[:-1]
temStr = temStr + ';' +'\t'+str(i[2])+ ';' +'\t'+str(i[3])
# print(temStr)
returnRules.append(temStr)
return returnRules
def dealResult(rules):#对规则处理
returnRules = []
for i in rules:
temStr = '';
for j in i[0]: #处理第一个frozenset
temStr = temStr+j+'&'
temStr = temStr[:-1]
temStr = temStr + ' ==> '
for j in i[1]:
temStr = temStr+j+'&'
temStr = temStr[:-1]
temStr = temStr + ';' +'\t'+str(i[2])+ ';' +'\t'+str(i[3])+ ';' +'\t'+str(i[4])+ ';' +'\t'+str(i[5])+ ';' +'\t'+str(i[6])+ ';' +'\t'+str(i[7])
# print(temStr)
returnRules.append(temStr)
return returnRules
def ResultDFToSave(rules): #根据Qrange3关联分析生成的规则得到并返回对于的DataFrame数据结构的函数
returnRules = []
for i in rules:
temList = []
temStr = '';
for j in i[0]: #处理第一个frozenset
temStr = temStr + str(j) + '&'
temStr = temStr[:-1]
temStr = temStr + ' ==> '
for j in i[1]:
temStr = temStr + str(j) + '&'
temStr = temStr[:-1]
temList.append(temStr); temList.append(i[2]); temList.append(i[3]); temList.append(i[4])
temList.append(i[5]); temList.append(i[6]); temList.append(i[7])
returnRules.append(temList)
return pd.DataFrame(returnRules,columns=('规则','项集出现数目','置信度','覆盖度','力度','提升度','利用度'))
if __name__ == '__main__':
supportRate = 0.002
confidenceRate = 0.2
itemsets = dict(oaf.frequent_itemsets(ryzd, supportRate))
rules = oaf.association_rules(itemsets, confidenceRate)
rules = list(rules)
regularNum = len(rules)
printRules = dealRules(rules)
result = list(oaf.rules_stats(rules, itemsets, len(ryzd))) #下面这个函数改变了rules,把rules用完了!
printResult = dealResult(result)
#################################################
# 下面将结果保存成excel格式的文件
dfToSave = ResultDFToSave(result)
#saveRegularName = str(supportRate)+'支持度_'+str(confidenceRate)+'置信度_产生了'+str(regularNum)+'条规则'+'.xlsx'
dfToSave.to_excel(r'C:\Users\Administrator\Desktop\2.xlsx')
#######################################################
# 下面是根据不同置信度和关联度得到关联规则数目
listTable = []
supportRate = 0.001
confidenceRate = 0.1
for i in range(9):
support = supportRate*(i+1)
listS = []
for j in range(9):
confidence = confidenceRate*(j+1)
itemsets = dict(oaf.frequent_itemsets(ryzd, support))
rules = list(oaf.association_rules(itemsets, confidence))
listS.append(len(rules))
listTable.append(listS)
dfList = pd.DataFrame(listTable,index = [supportRate*(i+1) for i in range(9)],columns=[confidenceRate*(i+1) for i in range(9)])
dfList.to_excel(r'C:\Users\Administrator\Desktop\outlunwen.xlsx')
| [
"[email protected]"
] | |
bfc3f2d2974b18bddcac2a327a085bced230123a | bfad81de459052ea439a06d4346a4fdc60b3cad2 | /Project/Project/urls.py | b938a774ed61e21c007646c1637cf0a69b8236a0 | [] | no_license | MarCastellnou/Deriverable_Proj-Web | ecc9841b48c479cc05071a85f94bc92d5b8323a5 | 0a3b39e3afe1cc233c0e0e0186635037da26e0cd | refs/heads/master | 2021-07-19T02:49:02.962373 | 2019-05-26T09:54:39 | 2019-05-26T09:54:39 | 175,597,908 | 0 | 0 | null | 2020-06-05T20:59:43 | 2019-03-14T10:17:58 | Python | UTF-8 | Python | false | false | 989 | py | """Project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('django.contrib.auth.urls')),
path('accounts/', include('accounts.urls')),
path('', include('criptomonedasapp.urls')),
]
| [
"[email protected]"
] | |
97ef71708fa9e7f58b078ba005c13ae776855b84 | 6a3f2da99bd10f6a701964bc46c3bc6dc2ba1043 | /lab02/lab02.py | 768b05a4c58e67cc9e02627b0a981b8752742bcc | [] | no_license | ziliang865/CS61A | 678a1a5d1963529b94b7664075333c6729242126 | 5d98e0ceff10607800f81f75d35363a218fabf59 | refs/heads/master | 2021-01-24T08:21:01.479598 | 2016-09-25T06:09:00 | 2016-09-25T06:09:00 | 69,146,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,371 | py | """Lab 2: Higher Order Functions & Lambdas & Recursions"""
def lambda_curry2(func):
"""
Returns a Curried version of a two argument function func.
>>> from operator import add
>>> from operator import mul
>>> x = lambda_curry2(add)
>>> y = x(3)
>>> y(5)
8
>>> x = lambda_curry2(mul)(3)(5)
>>> x
15
"""
"*** YOUR CODE HERE ***"
return lambda x: lambda y:func(x,y)
from doctest import run_docstring_examples
run_docstring_examples(lambda_curry2,globals(),True)
def adder(f1, f2):
"""
Return a function that takes in a single variable x, and returns
f1(x) + f2(x). You can assume the result of f1(x) and f2(x) can be
added together, and they both take in one argument.
>>> identity = lambda x: x # returns input
>>> square = lambda x: x**2
>>> a1 = adder(identity, square) # x + x^2
>>> a1(4)
20
>>> a2 = adder(a1, identity) # (x + x^2) + x
>>> a2(4)
24
>>> a2(5)
35
>>> a3 = adder(a1, a2) # (x + x^2) + (x + x^2 + x)
>>> a3(4)
44
"""
from operator import add
def addFunc(x):
return add(f1(x),f2(x))
return addFunc
run_docstring_examples(adder, globals(), True)
def skip_mul(n):
"""Return the product of n * (n - 2) * (n - 4) * ...
>>> skip_mul(5) # 5 * 3 * 1
15
>>> skip_mul(8) # 8 * 6 * 4 * 2 * 0
0
"""
if n == 0:
return 0
if n==1:
return 1
else:
return n * skip_mul(n - 2)
def count_up(n):
"""Print out all numbers up to and including n in ascending order.
>>> count_up(5)
1
2
3
4
5
"""
def counter(i):
if(i>n):
return
else:
print(i)
return counter(i+1)
i=1
counter(1)
run_docstring_examples(count_up, globals(), True)
def gcd(a, b):
"""Returns the greatest common divisor of a and b.
Should be implemented using recursion.
>>> gcd(34, 19)
1
>>> gcd(39, 91)
13
>>> gcd(20, 30)
10
>>> gcd(40, 40)
40
"""
# exchange a and b when a is less than b
if(a<b):
return gcd(b,a)
else:
#return if b can be divided by b
if(a%b==0):
return b
else:
return gcd(b,a%b)
"*** YOUR CODE HERE ***"
run_docstring_examples(gcd, globals(), True) | [
"[email protected]"
] | |
d4edaed4db9a9e82e6c748f1fa17f782f84aa519 | 62a3db23d44532ef538b6d57f0ea0fd6a43050a0 | /finalproject/app/migrations/0005_tournament_fighters.py | f858a094804b0f258e29b31be31e3c952d63395d | [] | no_license | beatrizrp/final | 377f009a9a828b9809ab73e66a32d10065bb6347 | ce0deb52a8b17e4ae999ca63bd644a2a15f91a9d | refs/heads/master | 2020-03-17T11:31:21.147101 | 2018-10-02T10:55:40 | 2018-10-02T10:55:40 | 133,550,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-23 21:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0004_auto_20180523_2304'),
]
operations = [
migrations.AddField(
model_name='tournament',
name='fighters',
field=models.ManyToManyField(to='app.Fighter', verbose_name='Luchadores'),
),
]
| [
"[email protected]"
] | |
e6cffb75f19e4afab6d2810287b9595e26dc13e8 | 0301717c32f8a919fd00b6e14f944192fe8ba332 | /f0005c.py | 70dd8100e048bf15df73111b7049ee7a26c7d552 | [] | no_license | vmarci80/f0005 | e727b2117fe36e463a20b7abc1c355758b477ffd | 5fb2c50852e9d20203e401239eb696de2d1b11cc | refs/heads/master | 2023-01-12T11:08:15.843474 | 2020-11-09T13:17:19 | 2020-11-09T13:17:19 | 311,344,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | #!/usr/bin/env python3
sz1 = 23
sz2 = 9
tipp = int(input ('Mennyi ' + str(sz1) + '+' + str(sz2) + ' ? '))
if tipp == sz1 + sz2:
print ('Nem rossz, nem rossz...')
else:
print('Háát mit csinálsz?!')
| [
"[email protected]"
] | |
679a32a7d56a4b696e1f7578d001282b1b18aa18 | 0529dbee4b9defe6097f226656cc86079f81df8a | /NN_main.py | 2e033b32eb27f7d81083ffca15748faf8ed1eb2c | [] | no_license | xiaoke0515/Bioinformatic_class_project | 8f2665ac50f5c3763cbb4e507f5faa92a6865fd1 | dac0b5a369a0a702ba3e6627624d2ac01998a595 | refs/heads/master | 2021-07-13T06:48:09.738880 | 2020-08-01T14:54:14 | 2020-08-01T14:54:14 | 190,893,840 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 509 | py | from ReadClass import GetClass
from NN import NN
import os
os.environ["CUDA_VISIBLE_DEVICES"]="-1"
class_reader = GetClass()
filename = "Gene_Chip_Data/Gene_Chip_Data/microarray.original.txt"
classifier = NN(class_reader, filename, train_proportion=0.7)
(sess, accuracy, loss, input_data, label) = classifier.BuildGraph(30, 20, 15)
classifier.Train(sess, accuracy, loss, input_data, label, batch_size=100, train_time=500, lr=1e-3)
classifier.Test(sess, accuracy, loss, input_data, label, batch_size=100)
| [
"[email protected]"
] | |
5b7b69ff6abc4a5daba3036daa61e8568b31a038 | 89336f4937f7ef5416f6cf66669fbf128bb35f02 | /app.py | da67d3def794148b3ddf9a3bd512c001d83afddc | [] | no_license | romanstr-code/milestone-project-threee | 770605a005bb8102bb9b65b0f47ce747b51dde61 | b7d0650123c8beb07c68835a40cd02a941e7c0c0 | refs/heads/master | 2023-04-20T09:52:20.989531 | 2021-05-12T09:59:01 | 2021-05-12T09:59:01 | 363,171,631 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,254 | py | # Imported Filesw
import os
from flask import (
Flask, flash, render_template,
redirect, request, session, url_for)
from flask_pymongo import PyMongo
from bson.objectid import ObjectId
from werkzeug.security import generate_password_hash, check_password_hash
if os.path.exists("env.py"):
import env
app = Flask(__name__)
app.config["MONGO_DBNAME"] = os.environ.get("MONGO_DBNAME")
app.config["MONGO_URI"] = os.environ.get("MONGO_URI")
app.secret_key = os.environ.get("SECRET_KEY")
mongo = PyMongo(app)
@app.route("/")
# Route to Home Page
@app.route("/home")
def home():
return render_template("home.html")
# All Recipes Page
@app.route("/recipes")
def recipes():
recipes = list(mongo.db.recipes.find())
return render_template("recipes.html", recipes=recipes)
# Search Box
@app.route("/search", methods=["GET", "POST"])
def search():
query = request.form.get("query")
recipes = list(mongo.db.recipes.find({"$text": {"$search": query}}))
return render_template("recipes.html", recipes=recipes)
# Individual Recipe Page
@app.route("/french_press")
def french_press():
return render_template(
"french_press.html",
recipes=mongo.db.recipes.find({"category_name": "French Press"})
.sort("recipe_name"))
# Individual Recipe Page
@app.route("/siphon")
def siphon():
return render_template(
"siphon.html",
recipes=mongo.db.recipes.find({"category_name": "Siphon Method"})
.sort("recipe_name"))
# Individual Recipe Page
@app.route("/turkish")
def turkish():
return render_template(
"turkish.html",
recipes=mongo.db.recipes.find({"category_name": "Turkish Method"})
.sort("recipe_name"))
# Individual Recipe Page
@app.route("/pour_over")
def pour_over():
return render_template(
"pour_over.html",
recipes=mongo.db.recipes.find({"category_name": "Pour Over Method"})
.sort("recipe_name"))
# Individual Recipe Page
@app.route("/aeropress")
def aeropress():
return render_template(
"aeropress.html",
recipes=mongo.db.recipes.find({"category_name": "Aeropress Method"})
.sort("recipe_name"))
# Individual Recipe Page
@app.route("/iced_coffee")
def iced_coffee():
return render_template(
"iced_coffee.html",
recipes=mongo.db.recipes.find({"category_name": "Iced Method"})
.sort("recipe_name"))
# Register Page
@app.route("/register", methods=["GET", "POST"])
def register():
if request.method == "POST":
# check if username already exists in db
existing_user = mongo.db.users.find_one(
{"username": request.form.get("username").lower()})
if existing_user:
flash("Username already exists!")
return redirect(url_for("register"))
register = {
"username": request.form.get("username").lower(),
"password": generate_password_hash(request.form.get("password"))
}
mongo.db.users.insert_one(register)
# put new user into 'session' cookie
session["user"] = request.form.get("username").lower()
flash("Registration Successful!")
return redirect(url_for("profile", username=session["user"]))
return render_template("register.html")
# Log In Page
@app.route("/login", methods=["GET", "POST"])
def login():
if request.method == "POST":
# check if username exists in db
existing_user = mongo.db.users.find_one(
{"username": request.form.get("username").lower()})
if existing_user:
# ensure hashed password matches user input
if check_password_hash(
existing_user["password"], request.form.get("password")):
session["user"] = request.form.get("username").lower()
flash("Welcome,{}".format(
request.form.get("username")))
return redirect(url_for(
"profile", username=session["user"]))
else:
# invalid password match
flash("Incorrect Username and/or Password")
return redirect(url_for("login"))
else:
# username doesn't exist
flash("Incorrect Username and/or Password")
return redirect(url_for("login"))
return render_template("login.html")
# User Profile Page
@app.route("/profile/<username>", methods=["GET", "POST"])
def profile(username):
# grab the session user's username from db
username = mongo.db.users.find_one(
{"username": session["user"]})["username"]
if session["user"]:
return render_template("profile.html", username=username)
return redirect(url_for("login"))
# Log Session User Out Page
@app.route("/logout")
def logout():
# remove user from session cookies
flash("You have been logged out!")
session.pop("user")
return redirect("login")
# Add Recipe Page
@app.route("/add_recipe", methods=["GET", "POST"])
def add_recipe():
# Send User Input to DB
if request.method == "POST":
recipes = {
"url": request.form.get("url"),
"category_name": request.form.get("category_name"),
"recipe_name": request.form.get("recipe_name"),
"ingridients": request.form.get("ingridients"),
"method": request.form.get("method"),
"created_by": session["user"]
}
# Post the User Input to MDB
mongo.db.recipes.insert_one(recipes)
flash("Thank You for your Recipe!")
return redirect(url_for("recipes"))
return render_template("add_recipe.html")
# Edit Recipe Page
@app.route("/edit_recipes/<recipes_id>", methods=["GET", "POST"])
def edit_recipe(recipes_id):
if request.method == "POST":
submit = {
"url": request.form.get("url"),
"category_name": request.form.get("category_name"),
"recipe_name": request.form.get("recipe_name"),
"ingridients": request.form.get("ingridients"),
"method": request.form.get("method"),
"created_by": session["user"]
}
mongo.db.recipes.update({"_id": ObjectId(recipes_id)}, submit)
flash("Yeeey! Edited with Success!")
return redirect(url_for('recipes'))
recipes = mongo.db.recipes.find_one({"_id": ObjectId(recipes_id)})
return render_template("edit_recipe.html", recipes=recipes)
# Delete Recipe
@app.route("/delete_recipe/<recipes_id>")
def delete_recipe(recipes_id):
mongo.db.recipes.remove({"_id": ObjectId(recipes_id)})
flash("Deleted Permanently!")
return redirect(url_for('recipes'))
# Contact Page
@app.route("/contact_us")
def contact_us():
return render_template("contact_us.html")
# About Us Page
@app.route("/about_us")
def about_us():
return render_template("about_us.html")
# 404 Error Page
@app.errorhandler(404)
def page_not_found(e):
return render_template("404.html"), 404
# 500 Server Error
@app.errorhandler(500)
def internal_server_error(e):
return render_template("500.html"), 500
if __name__ == "__main__":
app.run(host=os.environ.get("IP"),
port=int(os.environ.get("PORT")),
debug=False)
| [
"[email protected]"
] | |
f082dfbd85f23540fe495dddc50f961bfcb35818 | 86e6dd3df6f5a2dc0a5e08839eadfdab0a856759 | /cmdb/cmdb/cmdb/wsgi.py | dded9f98cb9a83ad913c45a2a66f3489769190e6 | [] | no_license | 99649952/practice | 37b0f4349cfd4957d8d2bcc9e0e6d042f9b157d9 | a486b6949a08594612428e88e8a5b73ad73b0b16 | refs/heads/master | 2021-01-17T15:41:09.769013 | 2016-06-12T06:16:34 | 2016-06-12T06:16:34 | 55,946,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | """
WSGI config for MadKing project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cmdb.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
f85b15a4823c6810244d576b6c8ac4f14a27b370 | 7ef8833dbf5f71dff7093f8daf72f5046cfbc914 | /datastructure/python-data-structure-linkedlist/deleteduplicates.py | c33ab7b90b8c0e7f513d61bcd3ca5f3a7a6ccbd1 | [] | no_license | zhangwhitemouse/pythonDataStructure | 84259177a98e6a4fc7cd816fa3939c2ae3665ff0 | eb9d8020207263074b528b6a221bf6fa75c04533 | refs/heads/master | 2021-06-29T04:32:01.650360 | 2021-01-16T07:52:55 | 2021-01-16T07:52:55 | 214,996,795 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,550 | py | #删除链表的重复元素
"""
问题分析:保证链表中没有重复的元素(保证没有相邻的元素存在相同)
算法分析:一边遍历一边判断,看当前节点值与前一个节点值是否相同,
若相同,则删除当前节点,若不相同,则继续遍历
"""
# class Solution(object):
# def deleteDuplicates(self, head):
# cur = head
# while cur and cur.next:
# if cur.val == cur.next.val:
# cur.next = cur.next.next
# else:
# cur = cur.next
# return head
"""
问题延伸:上面删除链表的重复节点是删除多出来的一个节点,而现在要求只要这个元素出现了重复元素,删除这个元素的所有值
例如:1,2,3,3,4 第一个问题结果:1,2,3,4 第二个结果:1,2,4
保证整个链表中,出现过相同元素的直接全部删除
"""
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def deleteDuplicates(self, head):
newlink = ListNode(-1)
newlink.next = head
prev = None
cur = newlink
while cur:
pre = cur
cur = cur.next
while cur and cur.next and cur.val == cur.next.val:
tmp = cur.val
while cur and tmp == cur.val:
cur = cur.next
pre.next = cur
return newlink.next
| [
"[email protected]"
] | |
665ca7c8480cf43f3293241817377b6208c1060c | 35c774ee67feae65cf3379781db389513cf8416e | /old_mac.py | ab76e287d25bc8e0af55d41cac282b302ed12d97 | [] | no_license | divyamtalreja/basic-python-program | c95e4bb64eca9b1f378a74ca7c610d36f5150e76 | 2bcb0f65334f42d7690a17c7355253544b3cbb4a | refs/heads/master | 2021-02-28T01:47:24.867857 | 2020-03-07T14:59:27 | 2020-03-07T14:59:27 | 245,651,923 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | def old_macdonald(s):
first=s[0]
inbetween=s[1:3]
fourth=s[3]
rem=s[4:]
return first.upper()+inbetween+fourth.upper()+rem
print(old_macdonald("macdonald")) | [
"[email protected]"
] | |
a384997b92fd4d648e8df0aa05d3606b08def541 | 2ce7346a38ff819b7cbd69df73a7948b0abab12f | /10.자료구조변경.py | 67fc354d7787da7f70a12afc19fdc4fd64cd6dde | [] | no_license | poebus0102/JINWOO | c615f3540b18730bfce0926bdc20d35399367f60 | 761cd7ca5d9f8a1efe66ad89c2909850b0512e0d | refs/heads/main | 2023-04-24T13:03:14.266366 | 2021-04-26T04:08:11 | 2021-04-26T04:08:11 | 361,612,907 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | # 자료구조의 변경
# 커피숍
menu = {"커피","우유","주스"}
print(menu,type(menu))
| [
"[email protected]"
] | |
a6c1a20dfdb3da33fd7e96ed02470a8318d32b13 | d65128e38be0243f279e0d72ef85e7d3c5e116ca | /base/site-packages/django/contrib/gis/geoip/tests.py | 3fa64bf6bef6ebe66e0523ad8e22ee07ec3b4e8b | [
"Apache-2.0"
] | permissive | ZxwZero/fastor | 19bfc568f9a68f1447c2e049428330ade02d451d | dd9e299e250362802032d1984801bed249e36d8d | refs/heads/master | 2021-06-26T06:40:38.555211 | 2021-06-09T02:05:38 | 2021-06-09T02:05:38 | 229,753,500 | 1 | 1 | Apache-2.0 | 2019-12-23T12:59:25 | 2019-12-23T12:59:24 | null | UTF-8 | Python | false | false | 4,672 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.conf import settings
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.geoip import HAS_GEOIP
from django.utils import unittest
from django.utils.unittest import skipUnless
from django.utils import six
if HAS_GEOIP:
from . import GeoIP, GeoIPException
if HAS_GEOS:
from ..geos import GEOSGeometry
# Note: Requires use of both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoIP.dat' and
# 'GeoLiteCity.dat'.
@skipUnless(HAS_GEOIP and getattr(settings, "GEOIP_PATH", None),
"GeoIP is required along with the GEOIP_DATA setting.")
class GeoIPTest(unittest.TestCase):
def test01_init(self):
"Testing GeoIP initialization."
g1 = GeoIP() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP(path, 0) # Passing in data path explicitly.
g3 = GeoIP.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertEqual(True, bool(g._country))
self.assertEqual(True, bool(g._city))
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLiteCity.dat')
cntry = os.path.join(path, 'GeoIP.dat')
g4 = GeoIP(city, country='')
self.assertEqual(None, g4._country)
g5 = GeoIP(cntry, city='')
self.assertEqual(None, g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
self.assertRaises(GeoIPException, GeoIP, cache=bad)
if isinstance(bad, six.string_types):
e = GeoIPException
else:
e = TypeError
self.assertRaises(e, GeoIP, bad, 0)
def test02_bad_query(self):
"Testing GeoIP query parameter checking."
cntry_g = GeoIP(city='<foo>')
# No city database available, these calls should fail.
self.assertRaises(GeoIPException, cntry_g.city, 'google.com')
self.assertRaises(GeoIPException, cntry_g.coords, 'yahoo.com')
# Non-string query should raise TypeError
self.assertRaises(TypeError, cntry_g.country_code, 17)
self.assertRaises(TypeError, cntry_g.country_name, GeoIP)
def test03_country(self):
"Testing GeoIP country querying methods."
g = GeoIP(city='<foo>')
fqdn = 'www.google.com'
addr = '12.215.42.19'
for query in (fqdn, addr):
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
@skipUnless(HAS_GEOS, "Geos is required")
def test04_city(self):
"Testing GeoIP city querying methods."
g = GeoIP(country='<foo>')
addr = '128.249.1.1'
fqdn = 'tmc.edu'
for query in (fqdn, addr):
# Country queries should still work.
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
g.country(query))
# City information dictionary.
d = g.city(query)
self.assertEqual('USA', d['country_code3'])
self.assertEqual('Houston', d['city'])
self.assertEqual('TX', d['region'])
self.assertEqual(713, d['area_code'])
geom = g.geos(query)
self.assertTrue(isinstance(geom, GEOSGeometry))
lon, lat = (-95.4010, 29.7079)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 4)
self.assertAlmostEqual(lat, tup[1], 4)
def test05_unicode_response(self):
"Testing that GeoIP strings are properly encoded, see #16553."
g = GeoIP()
d = g.city("www.osnabrueck.de")
self.assertEqual('Osnabrück', d['city'])
| [
"[email protected]"
] | |
a24e94b51e575f5924c1a09583866578e9030e35 | f0858aae73097c49e995ff3526a91879354d1424 | /nova/virt/hardware.py | 830a74c70c635ef3b2a1621d054fa53bbab810ad | [
"Apache-2.0"
] | permissive | bopopescu/nested_quota_final | 7a13f7c95e9580909d91db83c46092148ba1403b | 7c3454883de9f5368fa943924540eebe157a319d | refs/heads/master | 2022-11-20T16:14:28.508150 | 2015-02-16T17:47:59 | 2015-02-16T17:47:59 | 282,100,691 | 0 | 0 | Apache-2.0 | 2020-07-24T02:14:02 | 2020-07-24T02:14:02 | null | UTF-8 | Python | false | false | 51,509 | py | # Copyright 2014 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import itertools
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import units
import six
from nova import context
from nova import exception
from nova.i18n import _
from nova import objects
from nova.openstack.common import log as logging
virt_cpu_opts = [
cfg.StrOpt('vcpu_pin_set',
help='Defines which pcpus that instance vcpus can use. '
'For example, "4-12,^8,15"'),
]
CONF = cfg.CONF
CONF.register_opts(virt_cpu_opts)
LOG = logging.getLogger(__name__)
MEMPAGES_SMALL = -1
MEMPAGES_LARGE = -2
MEMPAGES_ANY = -3
def get_vcpu_pin_set():
"""Parsing vcpu_pin_set config.
Returns a set of pcpu ids can be used by instances.
"""
if not CONF.vcpu_pin_set:
return None
cpuset_ids = parse_cpu_spec(CONF.vcpu_pin_set)
if not cpuset_ids:
raise exception.Invalid(_("No CPUs available after parsing %r") %
CONF.vcpu_pin_set)
return cpuset_ids
def parse_cpu_spec(spec):
"""Parse a CPU set specification.
:param spec: cpu set string eg "1-4,^3,6"
Each element in the list is either a single
CPU number, a range of CPU numbers, or a
caret followed by a CPU number to be excluded
from a previous range.
:returns: a set of CPU indexes
"""
cpuset_ids = set()
cpuset_reject_ids = set()
for rule in spec.split(','):
rule = rule.strip()
# Handle multi ','
if len(rule) < 1:
continue
# Note the count limit in the .split() call
range_parts = rule.split('-', 1)
if len(range_parts) > 1:
# So, this was a range; start by converting the parts to ints
try:
start, end = [int(p.strip()) for p in range_parts]
except ValueError:
raise exception.Invalid(_("Invalid range expression %r")
% rule)
# Make sure it's a valid range
if start > end:
raise exception.Invalid(_("Invalid range expression %r")
% rule)
# Add available CPU ids to set
cpuset_ids |= set(range(start, end + 1))
elif rule[0] == '^':
# Not a range, the rule is an exclusion rule; convert to int
try:
cpuset_reject_ids.add(int(rule[1:].strip()))
except ValueError:
raise exception.Invalid(_("Invalid exclusion "
"expression %r") % rule)
else:
# OK, a single CPU to include; convert to int
try:
cpuset_ids.add(int(rule))
except ValueError:
raise exception.Invalid(_("Invalid inclusion "
"expression %r") % rule)
# Use sets to handle the exclusion rules for us
cpuset_ids -= cpuset_reject_ids
return cpuset_ids
def format_cpu_spec(cpuset, allow_ranges=True):
"""Format a libvirt CPU range specification.
:param cpuset: set (or list) of CPU indexes
Format a set/list of CPU indexes as a libvirt CPU
range specification. It allow_ranges is true, it
will try to detect continuous ranges of CPUs,
otherwise it will just list each CPU index explicitly.
:returns: a formatted CPU range string
"""
# We attempt to detect ranges, but don't bother with
# trying to do range negations to minimize the overall
# spec string length
if allow_ranges:
ranges = []
previndex = None
for cpuindex in sorted(cpuset):
if previndex is None or previndex != (cpuindex - 1):
ranges.append([])
ranges[-1].append(cpuindex)
previndex = cpuindex
parts = []
for entry in ranges:
if len(entry) == 1:
parts.append(str(entry[0]))
else:
parts.append("%d-%d" % (entry[0], entry[len(entry) - 1]))
return ",".join(parts)
else:
return ",".join(str(id) for id in sorted(cpuset))
def get_number_of_serial_ports(flavor, image_meta):
"""Get the number of serial consoles from the flavor or image
:param flavor: Flavor object to read extra specs from
:param image_meta: Image object to read image metadata from
If flavor extra specs is not set, then any image meta value is permitted.
If flavour extra specs *is* set, then this provides the default serial
port count. The image meta is permitted to override the extra specs, but
*only* with a lower value. ie
- flavor hw:serial_port_count=4
VM gets 4 serial ports
- flavor hw:serial_port_count=4 and image hw_serial_port_count=2
VM gets 2 serial ports
- image hw_serial_port_count=6
VM gets 6 serial ports
- flavor hw:serial_port_count=4 and image hw_serial_port_count=6
Abort guest boot - forbidden to exceed flavor value
:returns: number of serial ports
"""
def get_number(obj, property):
num_ports = obj.get(property)
if num_ports is not None:
try:
num_ports = int(num_ports)
except ValueError:
raise exception.ImageSerialPortNumberInvalid(
num_ports=num_ports, property=property)
return num_ports
image_meta_prop = (image_meta or {}).get('properties', {})
flavor_num_ports = get_number(flavor.extra_specs, "hw:serial_port_count")
image_num_ports = get_number(image_meta_prop, "hw_serial_port_count")
if (flavor_num_ports and image_num_ports) is not None:
if image_num_ports > flavor_num_ports:
raise exception.ImageSerialPortNumberExceedFlavorValue()
return image_num_ports
return flavor_num_ports or image_num_ports or 1
class InstanceInfo(object):
def __init__(self, state=None, max_mem_kb=0, mem_kb=0, num_cpu=0,
cpu_time_ns=0, id=None):
"""Create a new Instance Info object
:param state: the running state, one of the power_state codes
:param max_mem_kb: (int) the maximum memory in KBytes allowed
:param mem_kb: (int) the memory in KBytes used by the instance
:param num_cpu: (int) the number of virtual CPUs for the instance
:param cpu_time_ns: (int) the CPU time used in nanoseconds
:param id: a unique ID for the instance
"""
self.state = state
self.max_mem_kb = max_mem_kb
self.mem_kb = mem_kb
self.num_cpu = num_cpu
self.cpu_time_ns = cpu_time_ns
self.id = id
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def _score_cpu_topology(topology, wanttopology):
"""Calculate score for the topology against a desired configuration
:param wanttopology: nova.objects.VirtCPUTopology instance for
preferred topology
Calculate a score indicating how well this topology
matches against a preferred topology. A score of 3
indicates an exact match for sockets, cores and threads.
A score of 2 indicates a match of sockets & cores or
sockets & threads or cores and threads. A score of 1
indicates a match of sockets or cores or threads. A
score of 0 indicates no match
:returns: score in range 0 (worst) to 3 (best)
"""
score = 0
if (wanttopology.sockets != -1 and
topology.sockets == wanttopology.sockets):
score = score + 1
if (wanttopology.cores != -1 and
topology.cores == wanttopology.cores):
score = score + 1
if (wanttopology.threads != -1 and
topology.threads == wanttopology.threads):
score = score + 1
return score
def _get_cpu_topology_constraints(flavor, image_meta):
"""Get the topology constraints declared in flavor or image
:param flavor: Flavor object to read extra specs from
:param image_meta: Image object to read image metadata from
Gets the topology constraints from the configuration defined
in the flavor extra specs or the image metadata. In the flavor
this will look for
hw:cpu_sockets - preferred socket count
hw:cpu_cores - preferred core count
hw:cpu_threads - preferred thread count
hw:cpu_maxsockets - maximum socket count
hw:cpu_maxcores - maximum core count
hw:cpu_maxthreads - maximum thread count
In the image metadata this will look at
hw_cpu_sockets - preferred socket count
hw_cpu_cores - preferred core count
hw_cpu_threads - preferred thread count
hw_cpu_maxsockets - maximum socket count
hw_cpu_maxcores - maximum core count
hw_cpu_maxthreads - maximum thread count
The image metadata must be strictly lower than any values
set in the flavor. All values are, however, optional.
This will return a pair of nova.objects.VirtCPUTopology instances,
the first giving the preferred socket/core/thread counts,
and the second giving the upper limits on socket/core/
thread counts.
exception.ImageVCPULimitsRangeExceeded will be raised
if the maximum counts set against the image exceed
the maximum counts set against the flavor
exception.ImageVCPUTopologyRangeExceeded will be raised
if the preferred counts set against the image exceed
the maximum counts set against the image or flavor
:returns: (preferred topology, maximum topology)
"""
# Obtain the absolute limits from the flavor
flvmaxsockets = int(flavor.extra_specs.get(
"hw:cpu_max_sockets", 65536))
flvmaxcores = int(flavor.extra_specs.get(
"hw:cpu_max_cores", 65536))
flvmaxthreads = int(flavor.extra_specs.get(
"hw:cpu_max_threads", 65536))
LOG.debug("Flavor limits %(sockets)d:%(cores)d:%(threads)d",
{"sockets": flvmaxsockets,
"cores": flvmaxcores,
"threads": flvmaxthreads})
# Get any customized limits from the image
maxsockets = int(image_meta.get("properties", {})
.get("hw_cpu_max_sockets", flvmaxsockets))
maxcores = int(image_meta.get("properties", {})
.get("hw_cpu_max_cores", flvmaxcores))
maxthreads = int(image_meta.get("properties", {})
.get("hw_cpu_max_threads", flvmaxthreads))
LOG.debug("Image limits %(sockets)d:%(cores)d:%(threads)d",
{"sockets": maxsockets,
"cores": maxcores,
"threads": maxthreads})
# Image limits are not permitted to exceed the flavor
# limits. ie they can only lower what the flavor defines
if ((maxsockets > flvmaxsockets) or
(maxcores > flvmaxcores) or
(maxthreads > flvmaxthreads)):
raise exception.ImageVCPULimitsRangeExceeded(
sockets=maxsockets,
cores=maxcores,
threads=maxthreads,
maxsockets=flvmaxsockets,
maxcores=flvmaxcores,
maxthreads=flvmaxthreads)
# Get any default preferred topology from the flavor
flvsockets = int(flavor.extra_specs.get("hw:cpu_sockets", -1))
flvcores = int(flavor.extra_specs.get("hw:cpu_cores", -1))
flvthreads = int(flavor.extra_specs.get("hw:cpu_threads", -1))
LOG.debug("Flavor pref %(sockets)d:%(cores)d:%(threads)d",
{"sockets": flvsockets,
"cores": flvcores,
"threads": flvthreads})
# If the image limits have reduced the flavor limits
# we might need to discard the preferred topology
# from the flavor
if ((flvsockets > maxsockets) or
(flvcores > maxcores) or
(flvthreads > maxthreads)):
flvsockets = flvcores = flvthreads = -1
# Finally see if the image has provided a preferred
# topology to use
sockets = int(image_meta.get("properties", {})
.get("hw_cpu_sockets", -1))
cores = int(image_meta.get("properties", {})
.get("hw_cpu_cores", -1))
threads = int(image_meta.get("properties", {})
.get("hw_cpu_threads", -1))
LOG.debug("Image pref %(sockets)d:%(cores)d:%(threads)d",
{"sockets": sockets,
"cores": cores,
"threads": threads})
# Image topology is not permitted to exceed image/flavor
# limits
if ((sockets > maxsockets) or
(cores > maxcores) or
(threads > maxthreads)):
raise exception.ImageVCPUTopologyRangeExceeded(
sockets=sockets,
cores=cores,
threads=threads,
maxsockets=maxsockets,
maxcores=maxcores,
maxthreads=maxthreads)
# If no preferred topology was set against the image
# then use the preferred topology from the flavor
# We use 'and' not 'or', since if any value is set
# against the image this invalidates the entire set
# of values from the flavor
if sockets == -1 and cores == -1 and threads == -1:
sockets = flvsockets
cores = flvcores
threads = flvthreads
LOG.debug("Chosen %(sockets)d:%(cores)d:%(threads)d limits "
"%(maxsockets)d:%(maxcores)d:%(maxthreads)d",
{"sockets": sockets, "cores": cores,
"threads": threads, "maxsockets": maxsockets,
"maxcores": maxcores, "maxthreads": maxthreads})
return (objects.VirtCPUTopology(sockets=sockets, cores=cores,
threads=threads),
objects.VirtCPUTopology(sockets=maxsockets, cores=maxcores,
threads=maxthreads))
def _get_possible_cpu_topologies(vcpus, maxtopology,
allow_threads, specified_threads):
"""Get a list of possible topologies for a vCPU count
:param vcpus: total number of CPUs for guest instance
:param maxtopology: nova.objects.VirtCPUTopology for upper limits
:param allow_threads: if the hypervisor supports CPU threads
:param specified_threads: if there is a specific request for threads we
should attempt to honour
Given a total desired vCPU count and constraints on the
maximum number of sockets, cores and threads, return a
list of nova.objects.VirtCPUTopology instances that represent every
possible topology that satisfies the constraints.
exception.ImageVCPULimitsRangeImpossible is raised if
it is impossible to achieve the total vcpu count given
the maximum limits on sockets, cores & threads.
:returns: list of nova.objects.VirtCPUTopology instances
"""
# Clamp limits to number of vcpus to prevent
# iterating over insanely large list
maxsockets = min(vcpus, maxtopology.sockets)
maxcores = min(vcpus, maxtopology.cores)
maxthreads = min(vcpus, maxtopology.threads)
if not allow_threads:
# NOTE (ndipanov): If we don't support threads - it doesn't matter that
# they are specified by the NUMA logic.
specified_threads = None
maxthreads = 1
LOG.debug("Build topologies for %(vcpus)d vcpu(s) "
"%(maxsockets)d:%(maxcores)d:%(maxthreads)d",
{"vcpus": vcpus, "maxsockets": maxsockets,
"maxcores": maxcores, "maxthreads": maxthreads})
def _get_topology_for_vcpus(vcpus, sockets, cores, threads):
if threads * cores * sockets == vcpus:
return objects.VirtCPUTopology(sockets=sockets,
cores=cores,
threads=threads)
# Figure out all possible topologies that match
# the required vcpus count and satisfy the declared
# limits. If the total vCPU count were very high
# it might be more efficient to factorize the vcpu
# count and then only iterate over its factors, but
# that's overkill right now
possible = []
for s in range(1, maxsockets + 1):
for c in range(1, maxcores + 1):
if specified_threads:
o = _get_topology_for_vcpus(vcpus, s, c, specified_threads)
if o is not None:
possible.append(o)
else:
for t in range(1, maxthreads + 1):
o = _get_topology_for_vcpus(vcpus, s, c, t)
if o is not None:
possible.append(o)
# We want to
# - Minimize threads (ie larger sockets * cores is best)
# - Prefer sockets over cores
possible = sorted(possible, reverse=True,
key=lambda x: (x.sockets * x.cores,
x.sockets,
x.threads))
LOG.debug("Got %d possible topologies", len(possible))
if len(possible) == 0:
raise exception.ImageVCPULimitsRangeImpossible(vcpus=vcpus,
sockets=maxsockets,
cores=maxcores,
threads=maxthreads)
return possible
def _sort_possible_cpu_topologies(possible, wanttopology):
"""Sort the topologies in order of preference
:param possible: list of nova.objects.VirtCPUTopology instances
:param wanttopology: nova.objects.VirtCPUTopology for preferred
topology
This takes the list of possible topologies and resorts
it such that those configurations which most closely
match the preferred topology are first.
:returns: sorted list of nova.objects.VirtCPUTopology instances
"""
# Look at possible topologies and score them according
# to how well they match the preferred topologies
# We don't use python's sort(), since we want to
# preserve the sorting done when populating the
# 'possible' list originally
scores = collections.defaultdict(list)
for topology in possible:
score = _score_cpu_topology(topology, wanttopology)
scores[score].append(topology)
# Build list of all possible topologies sorted
# by the match score, best match first
desired = []
desired.extend(scores[3])
desired.extend(scores[2])
desired.extend(scores[1])
desired.extend(scores[0])
return desired
def _threads_requested_by_user(flavor, image_meta):
keys = ("cpu_threads", "cpu_maxthreads")
if any(flavor.extra_specs.get("hw:%s" % key) for key in keys):
return True
if any(image_meta.get("properties", {}).get("hw_%s" % key)
for key in keys):
return True
return False
def _get_desirable_cpu_topologies(flavor, image_meta, allow_threads=True,
numa_topology=None):
"""Get desired CPU topologies according to settings
:param flavor: Flavor object to query extra specs from
:param image_meta: ImageMeta object to query properties from
:param allow_threads: if the hypervisor supports CPU threads
:param numa_topology: InstanceNUMATopology object that may contain
additional topology constraints (such as threading
information) that we should consider
Look at the properties set in the flavor extra specs and
the image metadata and build up a list of all possible
valid CPU topologies that can be used in the guest. Then
return this list sorted in order of preference.
:returns: sorted list of nova.objects.VirtCPUTopology instances
"""
LOG.debug("Getting desirable topologies for flavor %(flavor)s "
"and image_meta %(image_meta)s",
{"flavor": flavor, "image_meta": image_meta})
preferred, maximum = _get_cpu_topology_constraints(flavor, image_meta)
specified_threads = None
if numa_topology:
min_requested_threads = None
cell_topologies = [cell.cpu_topology for cell in numa_topology.cells
if cell.cpu_topology]
if cell_topologies:
min_requested_threads = min(
topo.threads for topo in cell_topologies)
if min_requested_threads:
if _threads_requested_by_user(flavor, image_meta):
min_requested_threads = min(preferred.threads,
min_requested_threads)
specified_threads = max(1, min_requested_threads)
possible = _get_possible_cpu_topologies(flavor.vcpus,
maximum,
allow_threads,
specified_threads)
desired = _sort_possible_cpu_topologies(possible, preferred)
return desired
def get_best_cpu_topology(flavor, image_meta, allow_threads=True,
numa_topology=None):
"""Get best CPU topology according to settings
:param flavor: Flavor object to query extra specs from
:param image_meta: ImageMeta object to query properties from
:param allow_threads: if the hypervisor supports CPU threads
:param numa_topology: InstanceNUMATopology object that may contain
additional topology constraints (such as threading
information) that we should consider
Look at the properties set in the flavor extra specs and
the image metadata and build up a list of all possible
valid CPU topologies that can be used in the guest. Then
return the best topology to use
:returns: a nova.objects.VirtCPUTopology instance for best topology
"""
return _get_desirable_cpu_topologies(flavor, image_meta,
allow_threads, numa_topology)[0]
class VirtNUMATopologyCell(object):
"""Class for reporting NUMA resources in a cell
The VirtNUMATopologyCell class represents the
hardware resources present in a NUMA cell.
"""
def __init__(self, id, cpuset, memory):
"""Create a new NUMA Cell
:param id: integer identifier of cell
:param cpuset: set containing list of CPU indexes
:param memory: RAM measured in MiB
Creates a new NUMA cell object to record the hardware
resources.
:returns: a new NUMA cell object
"""
super(VirtNUMATopologyCell, self).__init__()
self.id = id
self.cpuset = cpuset
self.memory = memory
def _to_dict(self):
return {'cpus': format_cpu_spec(self.cpuset, allow_ranges=False),
'mem': {'total': self.memory},
'id': self.id}
@classmethod
def _from_dict(cls, data_dict):
cpuset = parse_cpu_spec(data_dict.get('cpus', ''))
memory = data_dict.get('mem', {}).get('total', 0)
cell_id = data_dict.get('id')
return cls(cell_id, cpuset, memory)
class VirtNUMATopologyCellLimit(VirtNUMATopologyCell):
def __init__(self, id, cpuset, memory, cpu_limit, memory_limit):
"""Create a new NUMA Cell with usage
:param id: integer identifier of cell
:param cpuset: set containing list of CPU indexes
:param memory: RAM measured in MiB
:param cpu_limit: maximum number of CPUs allocated
:param memory_limit: maxumum RAM allocated in MiB
Creates a new NUMA cell object to represent the max hardware
resources and utilization. The number of CPUs specified
by the @cpu_usage parameter may be larger than the number
of bits set in @cpuset if CPU overcommit is used. Likewise
the amount of RAM specified by the @memory_limit parameter
may be larger than the available RAM in @memory if RAM
overcommit is used.
:returns: a new NUMA cell object
"""
super(VirtNUMATopologyCellLimit, self).__init__(
id, cpuset, memory)
self.cpu_limit = cpu_limit
self.memory_limit = memory_limit
def _to_dict(self):
data_dict = super(VirtNUMATopologyCellLimit, self)._to_dict()
data_dict['mem']['limit'] = self.memory_limit
data_dict['cpu_limit'] = self.cpu_limit
return data_dict
@classmethod
def _from_dict(cls, data_dict):
cpuset = parse_cpu_spec(data_dict.get('cpus', ''))
memory = data_dict.get('mem', {}).get('total', 0)
cpu_limit = data_dict.get('cpu_limit', len(cpuset))
memory_limit = data_dict.get('mem', {}).get('limit', memory)
cell_id = data_dict.get('id')
return cls(cell_id, cpuset, memory, cpu_limit, memory_limit)
def _numa_cell_supports_pagesize_request(host_cell, inst_cell):
"""Determines whether the cell can accept the request.
:param host_cell: host cell to fit the instance cell onto
:param inst_cell: instance cell we want to fit
:returns: The page size able to be handled by host_cell
"""
avail_pagesize = [page.size_kb for page in host_cell.mempages]
avail_pagesize.sort(reverse=True)
def verify_pagesizes(host_cell, inst_cell, avail_pagesize):
inst_cell_mem = inst_cell.memory * units.Ki
for pagesize in avail_pagesize:
if host_cell.can_fit_hugepages(pagesize, inst_cell_mem):
return pagesize
if inst_cell.pagesize == MEMPAGES_SMALL:
return verify_pagesizes(host_cell, inst_cell, avail_pagesize[-1:])
elif inst_cell.pagesize == MEMPAGES_LARGE:
return verify_pagesizes(host_cell, inst_cell, avail_pagesize[:-1])
elif inst_cell.pagesize == MEMPAGES_ANY:
return verify_pagesizes(host_cell, inst_cell, avail_pagesize)
else:
return verify_pagesizes(host_cell, inst_cell, [inst_cell.pagesize])
def _pack_instance_onto_cores(available_siblings, instance_cell, host_cell_id):
"""Pack an instance onto a set of siblings
:param available_siblings: list of sets of CPU id's - available
siblings per core
:param instance_cell: An instance of objects.InstanceNUMACell describing
the pinning requirements of the instance
:returns: An instance of objects.InstanceNUMACell containing the pinning
information, and potentially a new topology to be exposed to the
instance. None if there is no valid way to satisfy the sibling
requirements for the instance.
This method will calculate the pinning for the given instance and it's
topology, making sure that hyperthreads of the instance match up with
those of the host when the pinning takes effect.
"""
# We build up a data structure 'can_pack' that answers the question:
# 'Given the number of threads I want to pack, give me a list of all
# the available sibling sets that can accommodate it'
can_pack = collections.defaultdict(list)
for sib in available_siblings:
for threads_no in range(1, len(sib) + 1):
can_pack[threads_no].append(sib)
def _can_pack_instance_cell(instance_cell, threads_per_core, cores_list):
"""Determines if instance cell can fit an avail set of cores."""
if threads_per_core * len(cores_list) < len(instance_cell):
return False
if instance_cell.siblings:
return instance_cell.cpu_topology.threads <= threads_per_core
else:
return len(instance_cell) % threads_per_core == 0
# We iterate over the can_pack dict in descending order of cores that
# can be packed - an attempt to get even distribution over time
for cores_per_sib, sib_list in sorted(
(t for t in can_pack.items()), reverse=True):
if _can_pack_instance_cell(instance_cell,
cores_per_sib, sib_list):
sliced_sibs = map(lambda s: list(s)[:cores_per_sib], sib_list)
if instance_cell.siblings:
pinning = zip(itertools.chain(*instance_cell.siblings),
itertools.chain(*sliced_sibs))
else:
pinning = zip(sorted(instance_cell.cpuset),
itertools.chain(*sliced_sibs))
topology = (instance_cell.cpu_topology or
objects.VirtCPUTopology(sockets=1,
cores=len(sliced_sibs),
threads=cores_per_sib))
instance_cell.pin_vcpus(*pinning)
instance_cell.cpu_topology = topology
instance_cell.id = host_cell_id
return instance_cell
def _numa_fit_instance_cell_with_pinning(host_cell, instance_cell):
"""Figure out if cells can be pinned to a host cell and return details
:param host_cell: objects.NUMACell instance - the host cell that
the isntance should be pinned to
:param instance_cell: objects.InstanceNUMACell instance without any
pinning information
:returns: objects.InstanceNUMACell instance with pinning information,
or None if instance cannot be pinned to the given host
"""
if (host_cell.avail_cpus < len(instance_cell.cpuset) or
host_cell.avail_memory < instance_cell.memory):
# If we do not have enough CPUs available or not enough memory
# on the host cell, we quit early (no oversubscription).
return
if host_cell.siblings:
# Instance requires hyperthreading in it's topology
if instance_cell.cpu_topology and instance_cell.siblings:
return _pack_instance_onto_cores(host_cell.free_siblings,
instance_cell, host_cell.id)
else:
# Try to pack the instance cell in one core
largest_free_sibling_set = sorted(
host_cell.free_siblings, key=len)[-1]
if (len(instance_cell.cpuset) <=
len(largest_free_sibling_set)):
return _pack_instance_onto_cores(
[largest_free_sibling_set], instance_cell, host_cell.id)
# We can't to pack it onto one core so try with avail siblings
else:
return _pack_instance_onto_cores(
host_cell.free_siblings, instance_cell, host_cell.id)
else:
# Straightforward to pin to available cpus when there is no
# hyperthreading on the host
return _pack_instance_onto_cores(
[host_cell.free_cpus], instance_cell, host_cell.id)
def _numa_fit_instance_cell(host_cell, instance_cell, limit_cell=None):
"""Check if a instance cell can fit and set it's cell id
:param host_cell: host cell to fit the instance cell onto
:param instance_cell: instance cell we want to fit
:param limit_cell: cell with limits of the host_cell if any
Make sure we can fit the instance cell onto a host cell and if so,
return a new objects.InstanceNUMACell with the id set to that of
the host, or None if the cell exceeds the limits of the host
:returns: a new instance cell or None
"""
# NOTE (ndipanov): do not allow an instance to overcommit against
# itself on any NUMA cell
if (instance_cell.memory > host_cell.memory or
len(instance_cell.cpuset) > len(host_cell.cpuset)):
return None
if instance_cell.cpu_pinning_requested:
new_instance_cell = _numa_fit_instance_cell_with_pinning(
host_cell, instance_cell)
if not new_instance_cell:
return
new_instance_cell.pagesize = instance_cell.pagesize
instance_cell = new_instance_cell
elif limit_cell:
memory_usage = host_cell.memory_usage + instance_cell.memory
cpu_usage = host_cell.cpu_usage + len(instance_cell.cpuset)
if (memory_usage > limit_cell.memory_limit or
cpu_usage > limit_cell.cpu_limit):
return None
pagesize = None
if instance_cell.pagesize:
pagesize = _numa_cell_supports_pagesize_request(
host_cell, instance_cell)
if not pagesize:
return
instance_cell.id = host_cell.id
instance_cell.pagesize = pagesize
return instance_cell
class VirtNUMATopology(object):
"""Base class for tracking NUMA topology information
The VirtNUMATopology class represents the NUMA hardware
topology for memory and CPUs in any machine. It is
later specialized for handling either guest instance
or compute host NUMA topology.
"""
def __init__(self, cells=None):
"""Create a new NUMA topology object
:param cells: list of VirtNUMATopologyCell instances
"""
super(VirtNUMATopology, self).__init__()
self.cells = cells or []
def __len__(self):
"""Defined so that boolean testing works the same as for lists."""
return len(self.cells)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, str(self._to_dict()))
def _to_dict(self):
return {'cells': [cell._to_dict() for cell in self.cells]}
@classmethod
def _from_dict(cls, data_dict):
return cls(cells=[cls.cell_class._from_dict(cell_dict)
for cell_dict in data_dict.get('cells', [])])
def to_json(self):
return jsonutils.dumps(self._to_dict())
@classmethod
def from_json(cls, json_string):
return cls._from_dict(jsonutils.loads(json_string))
def _numa_get_flavor_or_image_prop(flavor, image_meta, propname):
"""Return the value of propname from flavor or image
:param flavor: a Flavor object or dict of instance type information
:param image_meta: a dict of image information
:returns: a value or None
"""
flavor_val = flavor.get('extra_specs', {}).get("hw:" + propname)
image_val = (image_meta or {}).get("properties", {}).get("hw_" + propname)
if flavor_val is not None:
if image_val is not None:
raise exception.ImageNUMATopologyForbidden(
name='hw_' + propname)
return flavor_val
else:
return image_val
def _numa_get_pagesize_constraints(flavor, image_meta):
"""Return the requested memory page size
:param flavor: a Flavor object to read extra specs from
:param image_meta: an Image object to read meta data from
:raises: MemoryPagesSizeInvalid or MemoryPageSizeForbidden
:returns: a page size requested or MEMPAGES_*
"""
def check_and_return_pages_size(request):
if request == "any":
return MEMPAGES_ANY
elif request == "large":
return MEMPAGES_LARGE
elif request == "small":
return MEMPAGES_SMALL
else:
try:
request = int(request)
except ValueError:
request = 0
if request <= 0:
raise exception.MemoryPageSizeInvalid(pagesize=request)
return request
image_meta_prop = (image_meta or {}).get("properties", {})
flavor_request = flavor.get('extra_specs', {}).get("hw:mem_page_size", "")
image_request = image_meta_prop.get("hw_mem_page_size", "")
if not flavor_request and image_request:
raise exception.MemoryPageSizeForbidden(
pagesize=image_request,
against="<empty>")
if not flavor_request:
# Nothing was specified for hugepages,
# let's the default process running.
return None
pagesize = check_and_return_pages_size(flavor_request)
if image_request and (pagesize in (MEMPAGES_ANY, MEMPAGES_LARGE)):
return check_and_return_pages_size(image_request)
elif image_request:
raise exception.MemoryPageSizeForbidden(
pagesize=image_request,
against=flavor_request)
return pagesize
def _numa_get_constraints_manual(nodes, flavor, image_meta):
cells = []
totalmem = 0
availcpus = set(range(flavor['vcpus']))
for node in range(nodes):
cpus = _numa_get_flavor_or_image_prop(
flavor, image_meta, "numa_cpus.%d" % node)
mem = _numa_get_flavor_or_image_prop(
flavor, image_meta, "numa_mem.%d" % node)
# We're expecting both properties set, so
# raise an error if either is missing
if cpus is None or mem is None:
raise exception.ImageNUMATopologyIncomplete()
mem = int(mem)
cpuset = parse_cpu_spec(cpus)
for cpu in cpuset:
if cpu > (flavor['vcpus'] - 1):
raise exception.ImageNUMATopologyCPUOutOfRange(
cpunum=cpu, cpumax=(flavor['vcpus'] - 1))
if cpu not in availcpus:
raise exception.ImageNUMATopologyCPUDuplicates(
cpunum=cpu)
availcpus.remove(cpu)
cells.append(objects.InstanceNUMACell(
id=node, cpuset=cpuset, memory=mem))
totalmem = totalmem + mem
if availcpus:
raise exception.ImageNUMATopologyCPUsUnassigned(
cpuset=str(availcpus))
if totalmem != flavor['memory_mb']:
raise exception.ImageNUMATopologyMemoryOutOfRange(
memsize=totalmem,
memtotal=flavor['memory_mb'])
return objects.InstanceNUMATopology(cells=cells)
def _numa_get_constraints_auto(nodes, flavor, image_meta):
if ((flavor['vcpus'] % nodes) > 0 or
(flavor['memory_mb'] % nodes) > 0):
raise exception.ImageNUMATopologyAsymmetric()
cells = []
for node in range(nodes):
cpus = _numa_get_flavor_or_image_prop(
flavor, image_meta, "numa_cpus.%d" % node)
mem = _numa_get_flavor_or_image_prop(
flavor, image_meta, "numa_mem.%d" % node)
# We're not expecting any properties set, so
# raise an error if there are any
if cpus is not None or mem is not None:
raise exception.ImageNUMATopologyIncomplete()
ncpus = int(flavor['vcpus'] / nodes)
mem = int(flavor['memory_mb'] / nodes)
start = node * ncpus
cpuset = set(range(start, start + ncpus))
cells.append(objects.InstanceNUMACell(
id=node, cpuset=cpuset, memory=mem))
return objects.InstanceNUMATopology(cells=cells)
def _add_cpu_pinning_constraint(flavor, image_meta, numa_topology):
flavor_pinning = flavor.get('extra_specs', {}).get("hw:cpu_policy")
image_pinning = image_meta.get('properties', {}).get("hw_cpu_policy")
if flavor_pinning == "dedicated":
requested = True
elif flavor_pinning == "shared":
if image_pinning == "dedicated":
raise exception.ImageCPUPinningForbidden()
requested = False
else:
requested = image_pinning == "dedicated"
if not requested:
return numa_topology
if numa_topology:
# NOTE(ndipanov) Setting the cpu_pinning attribute to a non-None value
# means CPU pinning was requested
for cell in numa_topology.cells:
cell.cpu_pinning = {}
return numa_topology
else:
single_cell = objects.InstanceNUMACell(
id=0,
cpuset=set(range(flavor['vcpus'])),
memory=flavor['memory_mb'],
cpu_pinning={})
numa_topology = objects.InstanceNUMATopology(cells=[single_cell])
return numa_topology
# TODO(sahid): Move numa related to hardward/numa.py
def numa_get_constraints(flavor, image_meta):
"""Return topology related to input request
:param flavor: Flavor object to read extra specs from
:param image_meta: Image object to read image metadata from
:returns: InstanceNUMATopology or None
"""
nodes = _numa_get_flavor_or_image_prop(
flavor, image_meta, "numa_nodes")
pagesize = _numa_get_pagesize_constraints(
flavor, image_meta)
numa_topology = None
if nodes or pagesize:
nodes = nodes and int(nodes) or 1
# We'll pick what path to go down based on whether
# anything is set for the first node. Both paths
# have logic to cope with inconsistent property usage
auto = _numa_get_flavor_or_image_prop(
flavor, image_meta, "numa_cpus.0") is None
if auto:
numa_topology = _numa_get_constraints_auto(
nodes, flavor, image_meta)
else:
numa_topology = _numa_get_constraints_manual(
nodes, flavor, image_meta)
# We currently support same pagesize for all cells.
[setattr(c, 'pagesize', pagesize) for c in numa_topology.cells]
return _add_cpu_pinning_constraint(flavor, image_meta, numa_topology)
class VirtNUMALimitTopology(VirtNUMATopology):
"""Class to represent the max resources of a compute node used
for checking oversubscription limits.
"""
cell_class = VirtNUMATopologyCellLimit
def numa_fit_instance_to_host(
host_topology, instance_topology, limits_topology=None,
pci_requests=None, pci_stats=None):
"""Fit the instance topology onto the host topology given the limits
:param host_topology: objects.NUMATopology object to fit an instance on
:param instance_topology: objects.InstanceNUMATopology to be fitted
:param limits_topology: VirtNUMALimitTopology that defines limits
:param pci_requests: instance pci_requests
:param pci_stats: pci_stats for the host
Given a host and instance topology and optionally limits - this method
will attempt to fit instance cells onto all permutations of host cells
by calling the _numa_fit_instance_cell method, and return a new
InstanceNUMATopology with it's cell ids set to host cell id's of
the first successful permutation, or None.
"""
if (not (host_topology and instance_topology) or
len(host_topology) < len(instance_topology)):
return
else:
if limits_topology is None:
limits_topology_cells = itertools.repeat(
None, len(host_topology))
else:
limits_topology_cells = limits_topology.cells
# TODO(ndipanov): We may want to sort permutations differently
# depending on whether we want packing/spreading over NUMA nodes
for host_cell_perm in itertools.permutations(
zip(host_topology.cells, limits_topology_cells),
len(instance_topology)
):
cells = []
for (host_cell, limit_cell), instance_cell in zip(
host_cell_perm, instance_topology.cells):
got_cell = _numa_fit_instance_cell(
host_cell, instance_cell, limit_cell)
if got_cell is None:
break
cells.append(got_cell)
if len(cells) == len(host_cell_perm):
if not pci_requests:
return objects.InstanceNUMATopology(cells=cells)
elif ((pci_stats is not None) and
pci_stats.support_requests(pci_requests,
cells)):
return objects.InstanceNUMATopology(cells=cells)
def _numa_pagesize_usage_from_cell(hostcell, instancecell, sign):
topo = []
for pages in hostcell.mempages:
if pages.size_kb == instancecell.pagesize:
topo.append(objects.NUMAPagesTopology(
size_kb=pages.size_kb,
total=pages.total,
used=max(0, pages.used +
instancecell.memory * units.Ki /
pages.size_kb * sign)))
else:
topo.append(pages)
return topo
def numa_usage_from_instances(host, instances, free=False):
"""Get host topology usage
:param host: objects.NUMATopology with usage information
:param instances: list of objects.InstanceNUMATopology
:param free: If True usage of the host will be decreased
Sum the usage from all @instances to report the overall
host topology usage
:returns: objects.NUMATopology including usage information
"""
if host is None:
return
instances = instances or []
cells = []
sign = -1 if free else 1
for hostcell in host.cells:
memory_usage = hostcell.memory_usage
cpu_usage = hostcell.cpu_usage
newcell = objects.NUMACell(
id=hostcell.id, cpuset=hostcell.cpuset, memory=hostcell.memory,
cpu_usage=0, memory_usage=0, mempages=hostcell.mempages,
pinned_cpus=hostcell.pinned_cpus, siblings=hostcell.siblings)
for instance in instances:
for instancecell in instance.cells:
if instancecell.id == hostcell.id:
memory_usage = (
memory_usage + sign * instancecell.memory)
cpu_usage = cpu_usage + sign * len(instancecell.cpuset)
if instancecell.pagesize and instancecell.pagesize > 0:
newcell.mempages = _numa_pagesize_usage_from_cell(
hostcell, instancecell, sign)
if instance.cpu_pinning_requested:
pinned_cpus = set(instancecell.cpu_pinning.values())
if free:
newcell.unpin_cpus(pinned_cpus)
else:
newcell.pin_cpus(pinned_cpus)
newcell.cpu_usage = max(0, cpu_usage)
newcell.memory_usage = max(0, memory_usage)
cells.append(newcell)
return objects.NUMATopology(cells=cells)
# TODO(ndipanov): Remove when all code paths are using objects
def instance_topology_from_instance(instance):
"""Convenience method for getting the numa_topology out of instances
Since we may get an Instance as either a dict, a db object, or an actual
Instance object, this makes sure we get beck either None, or an instance
of objects.InstanceNUMATopology class.
"""
if isinstance(instance, objects.Instance):
# NOTE (ndipanov): This may cause a lazy-load of the attribute
instance_numa_topology = instance.numa_topology
else:
if 'numa_topology' in instance:
instance_numa_topology = instance['numa_topology']
elif 'uuid' in instance:
try:
instance_numa_topology = (
objects.InstanceNUMATopology.get_by_instance_uuid(
context.get_admin_context(), instance['uuid'])
)
except exception.NumaTopologyNotFound:
instance_numa_topology = None
else:
instance_numa_topology = None
if instance_numa_topology:
if isinstance(instance_numa_topology, six.string_types):
instance_numa_topology = (
objects.InstanceNUMATopology.obj_from_primitive(
jsonutils.loads(instance_numa_topology)))
elif isinstance(instance_numa_topology, dict):
# NOTE (ndipanov): A horrible hack so that we can use
# this in the scheduler, since the
# InstanceNUMATopology object is serialized raw using
# the obj_base.obj_to_primitive, (which is buggy and
# will give us a dict with a list of InstanceNUMACell
# objects), and then passed to jsonutils.to_primitive,
# which will make a dict out of those objects. All of
# this is done by scheduler.utils.build_request_spec
# called in the conductor.
#
# Remove when request_spec is a proper object itself!
dict_cells = instance_numa_topology.get('cells')
if dict_cells:
cells = [objects.InstanceNUMACell(
id=cell['id'],
cpuset=set(cell['cpuset']),
memory=cell['memory'],
pagesize=cell.get('pagesize'))
for cell in dict_cells]
instance_numa_topology = objects.InstanceNUMATopology(
cells=cells)
return instance_numa_topology
# TODO(ndipanov): Remove when all code paths are using objects
def host_topology_and_format_from_host(host):
"""Convenience method for getting the numa_topology out of hosts
Since we may get a host as either a dict, a db object, or an actual
ComputeNode object, or an instance of HostState class, this makes sure we
get beck either None, or an instance of objects.NUMATopology class.
:returns: A two-tuple, first element is the topology itself or None, second
is a boolean set to True if topology was in json format.
"""
was_json = False
try:
host_numa_topology = host.get('numa_topology')
except AttributeError:
host_numa_topology = host.numa_topology
if host_numa_topology is not None and isinstance(
host_numa_topology, six.string_types):
was_json = True
host_numa_topology = (objects.NUMATopology.obj_from_db_obj(
host_numa_topology))
return host_numa_topology, was_json
# TODO(ndipanov): Remove when all code paths are using objects
def get_host_numa_usage_from_instance(host, instance, free=False,
never_serialize_result=False):
"""Calculate new 'numa_usage' of 'host' from 'instance' NUMA usage
This is a convenience method to help us handle the fact that we use several
different types throughout the code (ComputeNode and Instance objects,
dicts, scheduler HostState) which may have both json and deserialized
versions of VirtNUMATopology classes.
Handles all the complexity without polluting the class method with it.
:param host: nova.objects.ComputeNode instance, or a db object or dict
:param instance: nova.objects.Instance instance, or a db object or dict
:param free: if True the the returned topology will have it's usage
decreased instead.
:param never_serialize_result: if True result will always be an instance of
objects.NUMATopology class.
:returns: numa_usage in the format it was on the host or
objects.NUMATopology instance if never_serialize_result was True
"""
instance_numa_topology = instance_topology_from_instance(instance)
if instance_numa_topology:
instance_numa_topology = [instance_numa_topology]
host_numa_topology, jsonify_result = host_topology_and_format_from_host(
host)
updated_numa_topology = (
numa_usage_from_instances(
host_numa_topology, instance_numa_topology, free=free))
if updated_numa_topology is not None:
if jsonify_result and not never_serialize_result:
updated_numa_topology = updated_numa_topology._to_json()
return updated_numa_topology
| [
"[email protected]"
] | |
2c1698703cd35a9b3ddfb3fb9f2d79719b99005f | bf49a1f5a7e25777f5af626e8b9b2130c73d6450 | /SignalSmooth.py | 9beabef68d27f3f3f6c7ad2eb422f28d25a323c8 | [] | no_license | maryammokhberi/BCI_AudioTactile | 070b6bd70e139392c30b0331b0e7902602cc39ba | df9489f7224ed42ba3c87e059cdde38f733ac587 | refs/heads/master | 2021-09-05T03:04:05.644240 | 2018-01-23T20:54:22 | 2018-01-23T20:54:22 | 104,111,767 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,722 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 17 12:37:38 2017
@author: Downloaded from http://scipy-cookbook.readthedocs.io/items/SignalSmooth.html
"""
import numpy
def smooth(x,window_len=11,window='hanning'):
"""smooth the data using a window with requested size.
This method is based on the convolution of a scaled window with the signal.
The signal is prepared by introducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the begining and end part of the output signal.
input:
x: the input signal
window_len: the dimension of the smoothing window; should be an odd integer
window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
the smoothed signal
example:
t=linspace(-2,2,0.1)
x=sin(t)+randn(len(t))*0.1
y=smooth(x)
see also:
numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
scipy.signal.lfilter
TODO: the window parameter could be the window itself if an array instead of a string
NOTE: length(output) != length(input), to correct this: return y[(window_len/2-1):-(window_len/2)] instead of just y.
"""
if x.ndim != 1:
raise ValueError, "smooth only accepts 1 dimension arrays."
if x.size < window_len:
raise ValueError, "Input vector needs to be bigger than window size."
if window_len<3:
return x
if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'"
s=numpy.r_[x[window_len-1:0:-1],x,x[-2:-window_len-1:-1]]
#print(len(s))
if window == 'flat': #moving average
w=numpy.ones(window_len,'d')
else:
w=eval('numpy.'+window+'(window_len)')
y=numpy.convolve(w/w.sum(),s,mode='valid')
return y
from numpy import *
from pylab import *
def smooth_demo():
t=linspace(-4,4,100)
x=sin(t)
xn=x+randn(len(t))*0.1
y=smooth(x)
ws=31
subplot(211)
plot(ones(ws))
windows=['flat', 'hanning', 'hamming', 'bartlett', 'blackman']
hold(True)
for w in windows[1:]:
eval('plot('+w+'(ws) )')
axis([0,30,0,1.1])
legend(windows)
title("The smoothing windows")
subplot(212)
plot(x)
plot(xn)
for w in windows:
plot(smooth(xn,10,w))
l=['original signal', 'signal with noise']
l.extend(windows)
legend(l)
title("Smoothing a noisy signal")
show()
if __name__=='__main__':
smooth_demo() | [
"[email protected]"
] | |
869c610d451114a41b0ede9a9dd0219b59828c68 | ff9303e0a6230fb508f33e55a0d4ff156270f350 | /recreation-foptimise.py | afe9438a4fe27ee01b4b802402cff564cd127acd | [] | no_license | Vani4ka/recreation | 026883bf7ae4207e245158b27e9184200b1c4aff | f6cc2b1c957c7096469c9db7d5734a672a876d5a | refs/heads/master | 2021-01-25T12:20:44.776787 | 2018-03-14T15:14:52 | 2018-03-14T15:14:52 | 123,466,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,866 | py | from sklearn import svm
from numpy import genfromtxt
import features as f
import functions as func
import sys
import datetime
import math
def findBestErrorRate(d):
"""Determines the best error rates by first selecting the lowest false
alarm rate and then selecting the lowest miss rate if there are several
pairs with an equal FAR."""
lowest = ((1, 1), (1, 1))
def fPos(p):
return p[0][0]
def fNeg(p):
return p[0][1]
for pair in d.iteritems():
# new lowest FAR found
if fPos(pair) < fPos(lowest):
lowest = pair
# equal FAR - take a look at MR
elif fPos(pair) == fPos(lowest):
if fNeg(pair) < fNeg(lowest):
lowest = pair
return lowest
def findBestErrorRateM(d):
"""Determines the best error rates by first selecting the lowest false
alarm rate and then selecting the lowest miss rate if there are several
pairs with an equal FAR."""
lowest = ((1, 1), (1, 1))
def fPos(p):
return p[0][0]
def fNeg(p):
return p[0][1]
for pair in d.iteritems():
# new lowest MR found
if fNeg(pair) < fNeg(lowest):
lowest = pair
# equal MR - take a look at FAR
elif fNeg(pair) == fNeg(lowest):
if fPos(pair) < fPos(lowest):
lowest = pair
return lowest
def compute(tpl):
"""Takes a combination of gamma and nu value and trains and
evaluates a OC-SVM using the current feature set"""
g = tpl[0]
n = tpl[1]
ocsvm = svm.OneClassSVM(nu=n, kernel="rbf", gamma=g, cache_size=1000)
errorRate = func.calcErrorRate(func.run(ocsvm, config["folds"], malicious, benign))
tmpResults[errorRate] = (g, n)
print str(format(errorRate[0], '.20f')) + " " + str(format(errorRate[1], '.20f')) + " : gamma= " + str(
g) + " nu= " + str(n)
def createCombos():
"""Creates all combinations of gamma and nu values"""
combos = []
for gamma in gammaVal:
for nu in nuVal:
combos.append((gamma, nu))
return combos
def printResults(resultList):
"""Determines the best results of the given lists and prints them well
readable."""
# for FAR
best = findBestErrorRate(resultList)
# for MR
# best = findBestErrorRateM(resultList)
print "Best found feature subset / model parameters for " + str(config["folds"]) + "-folded CV with " + str(
len(gammaVal)) + " gamma values and " + str(len(nuVal)) + " nu values:"
print "gamma : %s" % str(best[1][0])
print "nu : %s" % str(best[1][1])
print "grid search results : %s%% false alarm rate, %s%% miss rate" % (
str(best[0][0] * 100), str(best[0][1] * 100))
print "------------------------------------------------------------"
results = {}
config = {
"folds": 8,
"trainingSet": "data/new-datasets/original/fopt/malicious-subset.txt",
"testingSet": "data/new-datasets/original/fopt/benign-subset.txt",
}
if __name__ == '__main__':
started = datetime.datetime.now()
# arrays with gamma and nu values
gammaVal = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1,
0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2,
0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29]
nuVal = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1,
0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2,
0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3,
0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39]
# training data
malicious = genfromtxt(config["trainingSet"], delimiter='\t')
# benign data
benign = genfromtxt(config["testingSet"], delimiter='\t')
numFeatures = f.getFeatureAmount(config["trainingSet"])
combinations = createCombos()
# writing temporary results to a file
orig_stdout = sys.stdout
temp = open('data/results/optimisations/fopt-temp-' + str(config["folds"]) + '.txt', 'a+')
temp.write("Started on " + started.strftime("%Y-%m-%d %H:%M") + "\n")
sys.stdout = temp
for c in combinations:
tmpResults = {}
compute(c)
results.update(tmpResults)
temp.write("Finished on " + datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + "\n\n")
sys.stdout = orig_stdout
temp.close()
# writing the final result to a file
f = open('data/results/optimisations/fopt-results-' + str(config["folds"]) + '.txt', 'a+')
sys.stdout = f
print "Started on " + started.strftime("%Y-%m-%d %H:%M")
print "Finished on " + datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
print "Execution time " + str(datetime.datetime.now() - started) + "\n"
printResults(results)
sys.stdout = orig_stdout
f.close()
| [
"[email protected]"
] | |
b20a49531606d2c5ae1e5574c0ba06d080ab4e78 | 0a6959eb1802f132e2fd4f9d1321420e293cb451 | /app/src/utils/predictor.py | 06d6e9af727b97ee700d204b87b01ab4b8ba2872 | [
"MIT"
] | permissive | muskanmahajan37/Heart-Disease-Web-Application | 398555940a5d1f363562c03896abc14a6f78ba70 | 02b65ece5606a18c3a293b53ec0f56271882cbb1 | refs/heads/master | 2022-12-24T04:04:34.701813 | 2020-09-25T06:48:51 | 2020-09-25T06:48:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,214 | py | import streamlit as st
import pandas as pd
import joblib
def create_inference_input(df):
input_list = []
age = st.sidebar.slider(label="Age",
min_value=min(df["age"]),
max_value=max(df["age"]))
input_list.append(age)
st.sidebar.write("\n")
sex = st.sidebar.radio(label="Sex",
options=df["sex"].unique().tolist())
st.sidebar.write("\n")
input_list.append(sex)
chest_pain_type = st.sidebar.selectbox(label="Chest pain type",
options=df["chest_pain_type"].unique().tolist())
st.sidebar.write("\n")
input_list.append(chest_pain_type)
resting_blood_pressure = st.sidebar.slider(label="Resting blood pressure mm Hg",
min_value=min(df["resting_blood_pressure"]),
max_value=max(df["resting_blood_pressure"]))
st.sidebar.write("\n")
input_list.append(resting_blood_pressure)
cholesterol = st.sidebar.slider(label="Cholesterol measurement in mg/dl",
min_value=min(df["cholesterol"]),
max_value=max(df["cholesterol"]))
st.sidebar.write("\n")
input_list.append(cholesterol)
fasting_blood_sugar = st.sidebar.radio(label="Enter the range for the fasting blood sugar",
options=df["fasting_blood_sugar"].unique().tolist())
st.sidebar.write("\n")
input_list.append(fasting_blood_sugar)
rest_ecg = st.sidebar.selectbox(label="Resting electromagnetic measurement.",
options=df["rest_ecg"].unique().tolist())
st.sidebar.write("\n")
input_list.append(rest_ecg)
max_heart_rate_achieved = st.sidebar.slider(label="Maximum heart rate achieved",
min_value=min(df["max_heart_rate_achieved"]),
max_value=max(df["max_heart_rate_achieved"]))
st.sidebar.write("\n")
input_list.append(max_heart_rate_achieved)
exercise_induced_angina = st.sidebar.radio(label="Exercise induced Angina?",
options=df["exercise_induced_angina"].unique().tolist())
st.sidebar.write("\n")
input_list.append(exercise_induced_angina)
st_depression = st.sidebar.slider("Enter the ST depression during exercise",
min_value=min(df["st_depression"]),
max_value=max(df["st_depression"]))
st.sidebar.write("\n")
input_list.append(st_depression)
st_slope = st.sidebar.selectbox(label="Slope of peak exercise ST segment",
options=df["st_slope"].unique().tolist())
st.sidebar.write("\n")
input_list.append(st_slope)
num_major_vessels = st.sidebar.slider(label="Number of major vessels",
min_value=min(df["num_major_vessels"]),
max_value=max(df["num_major_vessels"]))
st.sidebar.write("\n")
input_list.append(num_major_vessels)
thalassemia = st.sidebar.selectbox(label="History of Thalassemia?",
options=df["thalassemia"].unique().tolist())
st.sidebar.write("\n")
input_list.append(thalassemia)
response_dict = {column:value for column, value in zip(df.columns, input_list)}
return response_dict
def predictor(df):
st.header("Machine Learning model predictor")
st.write("""
A **machine learning model** trained on the heart disease dataset will be used
to predict whether a patient has heart disease or not. We will be providing dropdowns
for the user to select inputs for different attributes. These will then be fed into
the machine learning model which will help predict the possibility of heart disease or not.
""")
st.sidebar.header("Input form for ML model")
response_dict = create_inference_input(df)
name = st.text_input(label="Enter your name")
if st.sidebar.button(label="Submit input to model"):
joblib.dump(response_dict, "app/src/utils/payload_dump/response_dict.bin")
if st.button(label="Predict"):
response_dict = joblib.load("app/src/utils/payload_dump/response_dict.bin")
df = df.append(response_dict, ignore_index=True)
model = joblib.load("app/src/models/rf_model.bin")
df.drop(["target"], axis=1, inplace=True)
df = pd.get_dummies(df, drop_first=True)
pred = model.predict(df.iloc[-1, :].values.reshape(1, -1))
pred_prob = model.predict_proba(df.iloc[-1, :].values.reshape(1, -1))
pred = "No you do not have heart disease" if pred == 0 else "You have heart disease"
result = pd.DataFrame({"Values": [name, round(pred_prob[0][1], 2), pred]},
index=["Name",
"Probability of Heart Disease",
"Verdict"])
st.write(result)
| [
"[email protected]"
] | |
9545603abda16bc46d541742d01db6610d959d1b | f2b833774224a626741752c5ab3c692b18692b37 | /vfoa/methods/geometrical_model.py | 51da6cf9db40b724c84ee6cd08d31008beeef456 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | idiap/vfoa | 6a3a691ec5ebb048147e597db234f6e28c85618c | c42697ebd848dc4871a5aaf480e2bbc40effc2c6 | refs/heads/main | 2022-12-27T22:23:56.364523 | 2020-10-12T07:03:02 | 2020-10-12T07:03:02 | 303,306,127 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,832 | py | """
Copyright (c) 2018 Idiap Research Institute, http://www.idiap.ch/
Written by Remy Siegfried <[email protected]>
This file contains a vfoa estimation method based on geometrical features.
Estimate vfoa based on a geometric model: if the angular distance between the gaze vector and the target
direction (i.e. line frome eye to target) is below a given threshold, the gaze is allocated to this target.
If several targets fill this condition, the nearest to the gaze vector wins.
"""
from copy import deepcopy
import numpy as np
from vfoa.utils.geometry import angleBetweenVectors_deg, yawElevationToVector
class GeometricModel:
def __init__(self, thresh=10):
self.name = 'geometricalModel'
self.thresh = thresh # Threshold applied on the angular distance between gaze and target direction
self.vfoa_prob = None # Unnormalized vfoa
self.vfoa = None # Normalized distribution
def set_parameters(self, parameterList):
if parameterList is None or len(parameterList) != 1:
raise Exception('given parameters ({}) do not fit the model. Need 1 parameter'.format(parameterList))
threshold = parameterList[0]
if threshold is not None:
self.thresh = threshold
def compute_vfoa(self, person, targetDict, timestamp):
""" <gaze>: [x, y, z, yaw, pitch, roll]
<targetDict>: {'targetname': Target}
<gazeFormat>: should be 'vector' or 'angles' """
# Check coordinate systems and units
person.convert_to(positionCS='CCS', poseCS='CCS', poseUnit='deg')
for key in targetDict.keys():
targetDict[key].convert_to(positionCS='CCS')
# Get nearest Target
nearestTarget, minDist = 'aversion', np.inf
for targetName, target in targetDict.items():
if targetName != person.name:
if target.position is not None:
# Compute 3D anglular error
gazeVec = yawElevationToVector(person.gaze[3:5])
angDist = angleBetweenVectors_deg(gazeVec, target.position - person.gaze[0:3]) # Error in 3D
# Compute error in 2D angles (yaw, elevation)
# targetAngles = np.array(vectorToYawElevation(target.position - person.gaze[0:3]))
# yawEleDist = person.gaze[3:5] - targetAngles
if angDist < self.thresh and angDist < minDist:
minDist = angDist
nearestTarget = targetName
# Build vfoa
self.vfoa = {'aversion': int(nearestTarget is 'aversion')}
for targetName, target in targetDict.items():
if targetName != person.name:
self.vfoa[targetName] = int(nearestTarget is targetName)
self.vfoa_prob = self.vfoa
def get_vfoa_prob(self, targetName=None):
""" Return probability that the person looks to the target (dict of {'targetName': probability} or this
probability for all targets if <targetName> is None.)
Note that those probabilites does not sum to 1 (not like <self.vfoa>, which is a distribution)."""
if targetName:
return deepcopy(self.vfoa_prob[targetName])
else:
return deepcopy(self.vfoa_prob)
def get_vfoa_distribution(self, targetName=None):
""" Return distribution that the person looks to the target (dict of {'targetName': probability} or this
probability for all targets if <targetName> is None.) """
if targetName:
return deepcopy(self.vfoa[targetName])
else:
return deepcopy(self.vfoa)
def train(self, personList, targetDictList, groundTruth):
distCollection_targets = []
distCollection_aversion = []
# Gather decision value (i.e. angular distance between target direction and gaze)
nbSamples = 0
for person, targetDict, gt in zip(personList, targetDictList, groundTruth):
nearestTarget, minDist = 'aversion', np.inf
for targetName, target in targetDict.items():
if targetName != person.name:
if target.position is not None:
# Compute 3D angular error
gazeVec = yawElevationToVector(person.gaze[3:5])
angDist = angleBetweenVectors_deg(gazeVec, target.position - person.gaze[0:3])
if angDist < minDist:
minDist = angDist
nearestTarget = targetName
if gt == nearestTarget:
distCollection_targets.append(minDist)
nbSamples += 1
elif gt == 'aversion':
distCollection_aversion.append(minDist)
nbSamples += 1
# Build GMM model of classes
mean1 = np.mean(np.array(distCollection_targets))
var1 = np.var(np.array(distCollection_targets))
mean2 = np.mean(np.array(distCollection_aversion))
var2 = np.var(np.array(distCollection_aversion))
# Compute gaussian intersections
a = 1/var1 - 1/var2
b = 2*(mean2/var2 - mean1/var1)
c = mean1**2/var1 - mean2**2/var2 - np.log(var2/var1)
intersections = np.roots([a, b, c])
# Select the intersection representing a highest probability (intuition)
def _gaussian(x, mu, var):
return 1 / (2 * np.pi * var) ** .5 * np.exp(-.5 * (x - mu) ** 2 / var)
if _gaussian(intersections[0], mean1, var1) > _gaussian(intersections[1], mean1, var1):
self.thresh = intersections[0]
else:
self.thresh = intersections[1]
print('Training {} ({} samples), threshold was updated to {}'.format(self.name, nbSamples, self.thresh))
| [
"[email protected]"
] | |
2420836ad929495008eab115570e2265b2d7c035 | 7001344d76da760d24852ceadf0d2da18aa45a4d | /Algorithm/SVD/SVD_Anonymous_Microsoft_Web_Data/SVD_Anonymous_Microsoft_Web_Data_FromScratch.py | cb9b37d119d37ef8850bd2c28c1d8f6bd5e1d958 | [] | no_license | JimXiongGM/MachineLearningPractice | e1038df38c740219562bca366472d67e898bdae0 | 9e76066303e317085a22d91785948d6ce12bac6d | refs/heads/master | 2020-04-30T15:37:42.054069 | 2019-03-20T13:03:31 | 2019-03-20T13:03:58 | 176,925,255 | 0 | 1 | null | 2019-03-21T10:47:57 | 2019-03-21T10:47:55 | null | UTF-8 | Python | false | false | 8,787 | py | ## Automobile From Scratch Version
#
# Author: David Lee
# Create Date: 2018/11/10
#
# PS. Recommendation System
import numpy as np
import pandas as pd # Read csv
from scipy.sparse.linalg import svds # using ARPACK as an eigensolver
from scipy.sparse import csc_matrix
### Recommendation System
# Item-based recommendation engine
## Similarity Measures
# These metrics assumed the data was in column vectors
def euclidianDistanceSimilarity(A, B):
return 1.0/(1.0 + np.linalg.norm(A - B))
def pearsonCorrelationSimilarity(A, B):
if len(A) < 3: return 1
else: return 0.5 + 0.5 * np.corrcoef(A, B, rowvar=0)[0][1]
def cosineSimilarity(A, B):
num = float(A.T*B)
denom = np.linalg.norm(A) * np.linalg.norm(B)
return 0.5 + 0.5 * (num/denom)
######
def standEstimation(dataMat, user, similarityMeasurement, item):
n_items = np.shape(dataMat)[1]
simTotal = 0; ratSimTotal = 0
for j in range(n_items):
userRating = dataMat[user, j]
if userRating == 0 or j == item: continue
overLap = np.nonzero(np.logical_and(dataMat[:, item].A > 0, dataMat[:, j].A > 0))[0]
if len(overLap) == 0: similarity = 0
else: similarity = similarityMeasurement(dataMat[overLap, item], dataMat[overLap, j])
# print("The %d and %d similarity is: %f" % (item, j, similarity))
simTotal += similarity
ratSimTotal += similarity * userRating
if simTotal == 0: return 0
else: return ratSimTotal / simTotal
def svdEstimation(dataMat, user, similarityMeasurement, item, xformedItems):
n_items = np.shape(dataMat)[1]
simTotal = 0; ratSimTotal = 0
for j in range(n_items):
userRating = dataMat[user, j]
if userRating == 0 or j == item: continue
similarity = similarityMeasurement(xformedItems[item, :].T, xformedItems[j, :].T)
# print("The %d and %d similarity is: %f" % (item, j, similarity))
simTotal += similarity
ratSimTotal += similarity * userRating
if simTotal == 0: return 0
else: return ratSimTotal / simTotal
def recommend(dataMat, user, N=None, simMeas=cosineSimilarity, estMethod='svdEstimation'):
unratedItems = np.nonzero(dataMat[user, :].A == 0)[1]
if len(unratedItems) == 0: return "You rated everything"
itemScores = []
if estMethod == 'svdEstimation':
# SVD Calculations
U, Sigma, VT = np.linalg.svd(dataMat, full_matrices=False)
U_new, Sigma_new, VT_new = keepSingularValue(U, Sigma, VT)
diagonal_mat = np.mat(np.eye(len(Sigma_new)) * Sigma_new) # Create diagonal matrix
xformedItems = dataMat.T * U_new * diagonal_mat.I # Create transformed items
for item in unratedItems:
if estMethod == 'svdEstimation':
estimatedScore = svdEstimation(dataMat, user, simMeas, item, xformedItems)
elif estMethod == 'standEstimation':
estimatedScore = standEstimation(dataMat, user, simMeas, item)
itemScores.append((item, estimatedScore))
recommendations = sorted(itemScores, key=lambda jj: jj[1], reverse=True)
if not N:
# Return all recommendation
return recommendations
else:
return recommendations[:N]
########
def loadExData1_1():
matrix = [[1, 1, 1, 0, 0],
[2, 2, 2, 0, 0],
[1, 1, 1, 0, 0],
[5, 5, 5, 0, 0],
[1, 1, 0, 2, 2],
[0, 0, 0, 3, 3],
[0, 0, 0, 1, 1]]
return np.mat(matrix)
def loadExData1_2():
matrix = [[4, 4, 0, 2, 2],
[4, 0, 0, 3, 3],
[4, 0, 0, 1, 1],
[1, 1, 1, 2, 0],
[2, 2, 2, 0, 0],
[1, 1, 1, 0, 0],
[5, 5, 5, 0, 0]]
return np.mat(matrix)
def textbook_example():
print("\n=== Standard Estimation ===\n")
## Machine Learning in Action Example
ex_data = loadExData1_1()
U, Sigma, VT = np.linalg.svd(ex_data) # This will consume all your RAM
print('Sigma:', Sigma)
# Drop the last two value since they're so small to ignore
Sig3 = np.mat([[Sigma[0], 0, 0],
[0, Sigma[1], 0],
[0, 0, Sigma[2]]])
# Reconstruct an apprximation of the original matrix
approx_mat = U[:, :3] * Sig3 * VT[:3, :]
print('Original matrix\n', ex_data)
print('Apprximate matrix by SVD\n (round to 2 decimals)', np.round(approx_mat, decimals=2))
# Similarity test
# print(euclidianDistanceSimilarity(approx_mat[:, 0], approx_mat[:, 4]))
# print(euclidianDistanceSimilarity(approx_mat[:, 0], approx_mat[:, 0]))
# print(pearsonCorrelationSimilarity(approx_mat[:, 0], approx_mat[:, 4]))
# print(pearsonCorrelationSimilarity(approx_mat[:, 0], approx_mat[:, 0]))
# print(cosineSimilarity(approx_mat[:, 0], approx_mat[:, 4]))
# print(cosineSimilarity(approx_mat[:, 0], approx_mat[:, 0]))
ex_data2 = loadExData1_2()
# Recommend user 2
print("Eucilidian similarity:", recommend(ex_data2, 2, N=3, simMeas=euclidianDistanceSimilarity, estMethod='standEstimation'))
print("Pearson similarity:", recommend(ex_data2, 2, N=3, simMeas=pearsonCorrelationSimilarity, estMethod='standEstimation'))
print("Cos similarity:", recommend(ex_data2, 2, N=3, simMeas=cosineSimilarity, estMethod='standEstimation'))
def loadExData2():
matrix = [[2, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 4, 0],
[3, 3, 4, 0, 3, 0, 0, 2, 2, 0, 0],
[5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 5, 0, 0, 5, 0],
[4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 5],
[0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 4],
[0, 0, 0, 0, 0, 0, 5, 0, 0, 5, 0],
[0, 0, 0, 3, 0, 0, 0, 0, 4, 5, 0],
[1, 1, 2, 1, 1, 2, 1, 0, 4, 5, 0]]
return np.mat(matrix)
def textbook_example2():
print("\n=== SVD Estimation ===\n")
data = loadExData2()
U, Sigma, VT = np.linalg.svd(data)
U_new, Sigma_new, VT_new = keepSingularValue(U, Sigma, VT, energy=0.9)
print('90%% of the energy expressed in the matrix is the first %d sigma' % len(Sigma_new))
# Recommend user 1
print("Eucilidian similarity:", recommend(data, 1, N=3, simMeas=euclidianDistanceSimilarity, estMethod='svdEstimation'))
print("Pearson similarity:", recommend(data, 1, N=3, simMeas=pearsonCorrelationSimilarity, estMethod='svdEstimation'))
print("Cos similarity:", recommend(data, 1, N=3, simMeas=cosineSimilarity, estMethod='svdEstimation'))
def loadWebData():
pageId = []
description = []
url = []
with open('Datasets/anonymous-msweb.csv', 'r') as webdata_file:
lines = webdata_file.read().splitlines()
for line in lines:
items = line.split(',')
if items[0] == 'A':
pageId.append('X' + str(items[1]))
description.append(items[3])
url.append(items[4])
webdata_table = pd.DataFrame({'description': description, 'url': url}, index=pageId)
return webdata_table
def loadRatingsMatrix():
# This is preprocessed data by using R
ratings_matrix = pd.read_csv('Datasets/MS_ratings_matrix.csv', index_col=0) # the first column is index
return ratings_matrix
# Keep default 90% of the energy expressed in the matrix
def keepSingularValue(U, Sigma, VT, energy=0.9):
target = sum(Sigma**2) * energy
temp_sum = 0
for i, sigma in enumerate(Sigma**2):
temp_sum += sigma
if temp_sum > target:
break
return U[:, :i], Sigma[:i], VT[:i, :]
def main():
textbook_example()
textbook_example2()
print("\n=== Anonymous Microsoft Web Data ===\n")
## Anonymous Microsoft Web Data
# Load Data
matrix = loadRatingsMatrix()
data = np.mat(matrix)
# Truncated SVD
U, Sigma, VT = np.linalg.svd(data, full_matrices=False) # Compute the entire matrix will consume all your RAM
#U, Sigma, VT = svds(csc_matrix(data, dtype=float), k = 165)
U_new, Sigma_new, VT_new = keepSingularValue(U, Sigma, VT, energy=0.9)
print('90%% of the energy expressed in the matrix is the first %d sigma' % len(Sigma_new))
table = loadWebData()
user = 100
N = 5
result = recommend(data, user, N, simMeas=pearsonCorrelationSimilarity, estMethod='svdEstimation')
print(result)
print('For user %s with the top %d recommendation' % (matrix.index[user], N))
ids = []
for item, score in result:
pageId = matrix.columns[item]
ids.append(pageId)
print('We recommend %s (recommendation score: %s)' % (pageId, score))
print(table.loc[pageId])
print("Recommendation Table\n", table.loc[ids,])
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
bba6df8d6d59232a1b97cbb8eea71d9b6a69427e | fb18183bb84f20b47dd75b158a48cbee1e68169b | /python/menu.py | 54ab403b6e2d009a8af647ef1962eae4859910ac | [] | no_license | jasolson/development | f5cef579c2397d99636d1d117c39f9cb9adc1615 | 83a9a1accdea84ac3a6c5f0c457a11fa795c9706 | refs/heads/master | 2021-01-19T08:50:28.862441 | 2013-10-26T22:16:37 | 2013-10-26T22:16:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,502 | py | import sys
import aws_ec2_functions as ec2
def main():
print "Welcome to AWS!"
while(True):
function = mainMenu()
if(function == 'listec2'):
ec2.listEC2Instances()
elif(function == 'listebs'):
ec2.listEBSVolumes()
elif(function == 'listzones'):
ec2.listEC2Zones()
elif(function == 'deleteebs'):
deleteVol()
elif(function == 'exit'):
break
def mainMenu():
while(True):
print "Please select an option\n"
print "1) List instances"
print "2) List EBS instances"
print "3) List EC2 Zones"
print "4) Delete EBS instance by id"
print "0) Exit"
try:
selection = input("Your selection:")
except SyntaxError:
continue
# print "You selected:{0}".format(selection)
if(str(selection) == '1'):
return 'listec2'
elif(str(selection) == '2'):
return 'listebs'
elif(str(selection) == '3'):
return 'listzones'
elif(str(selection) == '4'):
return 'deleteebs'
elif(str(selection) == '0'):
return 'exit'
else:
print "Select one of the options, i.e. \"1\""
def deleteVol():
while(True):
print "Select from this list of volumes..."
vols = ec2.getEBSVolumes()
count = 0
for vol in vols:
count = count +1
print count,") Volume ID:",vol.id," Volume Size:",vol.size
print "0) Exit(Do nothing)"
try:
selection = raw_input("What would you like to select:")
if(str(selection) == '0'):
return
else:
ec2.deleteEBSVolume(vols[int(selection)-1])
return
except SyntaxError:
continue
main()
| [
"[email protected]"
] | |
59e85beb15aa658989b3e8f94c10d8171a846512 | 5fbfdc83f2577f42b50656e60e503d08c0278f2d | /4palindromes.py | f2828ae3053b31bbf291cea2128f3777aec09c0f | [] | no_license | jchen1352/project-euler | 25089a4a751bb47417975bf23dc562d2aa977b49 | 6f5a15aaa1a4879198f5ba9946c78291f61b0084 | refs/heads/master | 2021-01-19T09:41:56.495030 | 2015-09-06T17:43:13 | 2015-09-06T17:43:13 | 42,011,751 | 0 | 0 | null | 2015-09-06T17:42:10 | 2015-09-06T17:38:28 | Python | UTF-8 | Python | false | false | 800 | py | def factor(number):
factors = []
factor = 2
while number > 1:
if number % factor == 0:
factors.append(factor)
number = number / factor
factor = 2
else:
factor += 1
return factors
palin_list = []
for x in range(10):
for y in range(10):
for z in range(10):
palin_list.append(100001*x+10010*y+1100*z)
palin_list.append(10001*x+1010*y+100*z)
'''
for palin in palin_list:
palin_factors = factor(palin)
if len(palin_factors) == 2:
if len(str(palin_factors[0])) == 3 and len(str(palin_factors[1])) == 3:
print(palin, palin_factors)
'''
for x in range(900,1000):
for y in range(900,1000):
xy = x*y
if xy in palin_list:
print(xy)
| [
"[email protected]"
] | |
7a48106962ab4daa026afd3b6f4ebe6826ba7d85 | c73c158654a2e76dbc63f34a47b0c77a4e98b5a8 | /rexparse/__init__.py | 2a7568b6c2e4463fcd091e598a2cb47e0fdc177d | [] | no_license | gregbanks/rexparse | 2b61ebfe2638a446658965934325b7558b9bc72e | cd8b85df3219336c10bfb623dfc78b06785be593 | refs/heads/master | 2021-01-01T18:17:55.098929 | 2013-11-12T18:12:37 | 2013-11-12T18:12:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,021 | py | import os
import re
from functools import wraps
from distutils.errors import DistutilsSetupError
from rex import Requirements, Requirement
from _version import __version__
__all__ = ['Requirements', 'Requirement', 'get_version']
def get_version(path):
version = None
try:
version = re.search(r'__version__\s*=\s*[\'"]([\d.]+)[\'"]',
open(path).read()).group(1)
except (IOError, AttributeError):
pass
return version
def set_dist_attr(dist, attr, val):
if not isinstance(val, Requirements):
if not isinstance(val, basestring):
raise DistutilsSetupError('%r must be a string, got %r' %
(attr, type(val)))
if not os.path.isfile(val):
raise DistutilsSetupError('%s does not appear to be a file' %
(val))
if attr in ['install_requires', 'tests_require', 'dependency_links']:
if not isinstance(val, Requirements):
val = Requirements(val, parse=True)
setattr(dist, attr, getattr(val, attr))
elif attr == 'version':
setattr(dist, attr, get_version(val))
else:
raise DistutilsSetupError('unknown attr %s' % (attr))
def rexparse(dist, attr, val):
if not isinstance(val, dict):
raise DistutilsSetupError('%s must be a dict with keys '
'"requirements_path" and (optionally) '
'"version_path"' % (attr))
valid_args = set(['requirements_path', 'version_path'])
if len(set(val.keys()) - valid_args) > 0:
raise DistutilsSetupError('got unknown arguments %r' %
(set(val.keys()) - valid_args))
if 'version_path' in val:
set_dist_attr(dist, 'version', val['version_path'])
reqs = Requirements(val['requirements_path'], parse=True)
for key in ['install_requires', 'tests_require', 'dependency_links']:
set_dist_attr(dist, key, reqs)
| [
"[email protected]"
] | |
6a56579dde04c041a43a789d84507459c53c3aac | 432ed7648ad8b634e2e03d3c721ce9b96b79c25c | /simpledot.py | 63ab3de3db226e12b447c109648ffc6f12dfa507 | [] | no_license | leeseunghow/cps-IOT-project | ded7026b0b4585165b328cca5e089e275eec7856 | eb35fe26081ab528000b8301cfdcdd3979c1dfe9 | refs/heads/master | 2022-11-05T22:01:57.669797 | 2020-06-21T10:33:07 | 2020-06-21T10:33:07 | 269,158,111 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,229 | py | latlon = [[37.550997 , 126.922986],
[37.548769, 126.918073],
[37.551596, 126.924945],
[37.554642, 126.921294],
[37.548534, 126.913838],
[37.55101598, 126.98866975],
[37.56504849, 127.00790431],
[37.56078470, 127.00615551],
[37.56260513, 126.98268300],
[37.56894269, 127.00058382],
[37.502764, 126.928561],
[37.510359, 126.926577],
[37.508868, 126.962832],
[37.482276, 126.965329],
[37.522440, 126.924428],
[37.497784, 127.027488],
[37.498431, 127.036973],
[37.510731, 127.098378],
[37.481164, 127.121328],
[37.462367, 127.036932],
[37.579293, 126.951680],
[37.536174, 126.998101],
[37.504387, 126.861700],
[37.550512, 127.142187],
[37.471272, 127.029326],
[37.601556, 126.929571],
[37.595284, 126.915647],
[37.530422, 126.882938],
[37.525064, 126.903054],
[37.468586, 126.953032]
]
import folium
m = folium.Map(
location = [37.52206126, 126.99322917],
zoom_start = 11.6,
tiles = 'Cartodb Positron'
)
for i in range(len(latlon)):
folium.Circle(
location = latlon[i],
radius = 350,
color = 'blue',
fill = 'crimson',
).add_to(m)
m.save('simpledots.html')
| [
"[email protected]"
] | |
814593c51e85d2d29852853aa98aa56328942c71 | fc7d5eb1cc94e7b7ed0e4e0bdb68268cc2016dcb | /blog/migrations/0004_auto_20190910_1928.py | 8818d917e764965f5e4682457cd2bc2c769ed235 | [
"MIT"
] | permissive | danceandfight/sensitive-blog-dvmn | 65847a6796f393547976c051f3e7231f409295ed | 1e93bace2c548644723de0ec17feab064e8f1643 | refs/heads/master | 2022-11-29T02:22:16.430997 | 2020-01-08T17:34:24 | 2020-01-08T17:34:24 | 224,164,480 | 0 | 1 | MIT | 2022-11-22T04:18:51 | 2019-11-26T10:29:03 | HTML | UTF-8 | Python | false | false | 1,505 | py | # Generated by Django 2.2.5 on 2019-09-10 16:28
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0003_post_published_at'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=20, verbose_name='Тег')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField(verbose_name='Текст комментария')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Автор')),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Post', verbose_name='Пост, к которому написан')),
],
),
migrations.AddField(
model_name='post',
name='tags',
field=models.ManyToManyField(related_name='posts', to='blog.Tag', verbose_name='Теги поста'),
),
]
| [
"[email protected]"
] | |
4a7e666225218172bb22d81d113bb13f1659271d | c22db8895c04cf8d34f52a6ba687153ad01886a4 | /Chapter_5.2_Algorithms/Exercises/CountDown.py | 1fdbac84ba0484cc9420d7aa9c3073183987fb45 | [] | no_license | chyjuls/Computing-in-Python-IV-Objects-Algorithms-GTx-CS1301xIV-Exercises | a8b20e92a6d83a53f99fecc44a82ac02fb3dbec1 | aac897f86ea5537e4038f05f83f62f314fd8f837 | refs/heads/master | 2022-12-30T17:27:07.093956 | 2020-10-15T20:07:36 | 2020-10-15T20:07:36 | 420,781,183 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,521 | py | #We've written a function below called count_down(). This
#function takes an int parameter, start, and prints every
#number from that start to 0. The way we've written it uses
#recursion. Below that funtion, write a function that does
#the exact same thing, but do not use recursion.
#
#The purpose of this exercise is for you to recognize some
#example instances in which you can use recursion, and what
#differences can be seen in the actual code.
#
#Make sure to actually print 0 as the last number!
def count_down(start):
#If we've reached 0 already, print 0 but do not call
#another copy
if start <= 0:
print(start)
#If we haven't reached 0 yet, print the current number,
#then call count_down with the current number minus 1.
else:
# count_down(start - 1) # Head recursion
print(start)
count_down(start - 1) # Tail recursion
#Do not modify the code above.
#Fill in the function below to do the same as the function
#above, but without recursion. You could use for loops,
#while loops, or some other approach.
def count_down2(start):
#Add your code here!
for count in range(start, -1, -1):
print(count)
#Below are some lines of code that will test your function.
#You can change the value of the variable(s) to test your
#function with different inputs.
#
#If your function works correctly, this will originally
#print: 5, 4, 3, 2, 1, 0, each on their own line.
count_down2(5)
print('--------------------------')
count_down(5)
| [
"[email protected]"
] | |
0e0e2fab975591c749f87685e0fb40a01e7cd269 | 67a0b708ee86c23102dcb03b90bcfed3e4e4181b | /yzrpc/utils/__init__.py | 12b77131ae05834a4fbb68df549bc5e49f8e9ecc | [
"Apache-2.0"
] | permissive | limi444/yz-rpc-1 | ebfdd841abb4a08d1b4e179e4d7d9f6bd2bb2f3c | f3b6cb76dab72e1763d759080854c11aa6ade872 | refs/heads/master | 2023-04-11T08:01:49.074491 | 2021-04-27T07:52:58 | 2021-04-27T07:52:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | #!/usr/bin/python3.7+
# -*- coding:utf-8 -*-
"""
@auth: cml
@date: 2021/2/23
@desc: ...
"""
from .type_transform import *
from ._path import *
from ._import import *
from ._autoreload import *
# from ._logger import *
# from ._inspect import *
# from ._singleton import *
| [
"[email protected]"
] | |
712cb2fc42c9ad1f000da08ca648de2bab63137a | e8e77fbd3b128d5691db0dd3fbafaebddcca2f20 | /mySpider_HSN/mySpider_HSN/items.py | 8a0a5324cac66e6f7cd32c0912b37b36d2d78dea | [] | no_license | hashmeta/mySpider_HSN | f166f814209a48fc0aa636da57c91aecd29fe80e | 2d9a5803469666b5c17472ae3ab334abcf9b2970 | refs/heads/master | 2022-02-03T23:55:21.717763 | 2019-08-08T22:04:27 | 2019-08-08T22:04:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
from scrapy.item import Item, Field
class MyspiderHsnItem(Item):
# define the fields for your item here like:
# name = scrapy.Field()
HSN_Code=Field()
Desc=Field()
| [
"[email protected]"
] | |
87f3d9a51925ed0fd72b5aba493e1d775bb21891 | ed23c841f17a5c2eee2f3707f35d8641aa1ffc89 | /366. Fibonacci.py | 4105ce4063a5fa26a4f5e834580a652369d4754b | [
"MIT"
] | permissive | bicongwang/lintcode-python3 | 077f5fe2c4c354fb3e0a294abea1bc94b7e05275 | 23ba29677b4d4b0a90014dc3a92ab7fae5a4d980 | refs/heads/master | 2020-03-10T18:26:29.680084 | 2018-05-05T17:31:37 | 2018-05-05T17:31:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | # Solution 1: Simple
#
# Comment: You have no need to use array which will cause a larger space complexity
# Space complexity: O(1)
# Time complexity: O(n)
class Solution:
"""
@param n: an integer
@return: an ineger f(n)
"""
def fibonacci(self, n):
i_1, i_2 = 0, 1
for i in range(n-1):
i_1, i_2 = i_2, i_1 + i_2
return i_1 | [
"[email protected]"
] | |
e4c1bc4025cbb82d36058bfe6430850c83afe832 | f94463add4743b33ca3cb405309eb2cb939165cb | /accounts/migrations/0001_initial.py | bef9e4898162a3087d999998704acf7ff583c07f | [] | no_license | JaaMata/Book-Ecommers | e813d490bcd7d6aed3c0aa7dbb38cd21eef21722 | 839fe6231d551f09aa2785b4bbc79809d8bc5d70 | refs/heads/main | 2023-07-16T03:12:30.991192 | 2021-08-30T14:09:17 | 2021-08-30T14:09:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | # Generated by Django 3.2.5 on 2021-07-21 18:51
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('email', models.EmailField(max_length=255, unique=True)),
('active', models.BooleanField(default=True)),
('staff', models.BooleanField(default=False)),
('admin', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
]
| [
"[email protected]"
] | |
8d2e68641e3022d1d0d4e3a364c9138e8574d8c2 | 746e766a5bffb98a1f353f107f370cfc37fa95f4 | /dc.py | e9b8cf399420643cf6732c684d46a93158c1fcbf | [] | no_license | girishjakkam/python-repo | eb955a0a9e4b8375469f4a04ee9558a8020e295d | 7d8cacb1df5d4f4751ec2a46f9d68744b604d69b | refs/heads/master | 2021-01-21T15:00:08.331719 | 2016-06-09T16:40:53 | 2016-06-09T16:40:53 | 58,341,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py |
name = raw_input("Enter file:")
handle = open(name)
lst=list()
z=list()
counts=dict()
for line in handle:
line.split()
if line.startswith('From')==True:
pos=line.find('From')
pos1=line.find('@')
lst.append(line[pos+5:pos1+9])
for name in lst:
counts[name]=counts.get(name,0)+1
bigword=None
bigcount=None
for word,count in counts.items():
if bigcount is None or count>bigcount:
bigword=word
bigcount=count
print bigword,bigcount
| [
"[email protected]"
] | |
ff73c351f221334739ef870b16ae97803f134220 | 23ea05ebeb26c4d5672bd1ddbfd7508196bddf99 | /meditation_tracker/meditation_tracker/meditation_tracker/urls.py | a3149c886dbaa40423863f7f1f67b88fe91b767a | [] | no_license | nucly/python | 7547c7a7bc5f90fe777b607cdac799db4e7e2d89 | dae2f1b649b0933e26a6c1826e0cdf54284eec20 | refs/heads/main | 2023-02-26T09:26:09.692651 | 2021-01-29T00:59:40 | 2021-01-29T00:59:40 | 325,858,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 819 | py | """meditation_tracker URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('log/', include('meditation_log.urls')),
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
9ee3976091d42ea805caecd71de5304d1f0f6bef | e9c82c51f8f3a3eaa828c35d6254f82daa96bf88 | /UTKFace.py | f3d81c0341376a1e74b8c05ce519a86cf8f9d810 | [] | no_license | rampant-lancer/FaceDL | 8bb6f4a1a61801ffe74da89f204cbfecece5b4f6 | a2f7e1bb026ac4c4624e0672a7ff769c6a2514a9 | refs/heads/master | 2020-03-29T11:23:43.042788 | 2018-10-06T16:06:13 | 2018-10-06T16:06:13 | 149,850,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,602 | py | import os
import tarfile
import numpy as np
import cv2
import requests
class UTKFace(object):
def __init__(self):
'''
To initialize the self.req_file variable with the zipped file and also to initialize
self.data_dir where extracted data is stored
'''
self.req_file = 'downloaded/UTKFace.tar.gz'
self.data_dir = 'downloaded/UTKFace'
self.base_dir = 'downloaded'
self.file_id = '0BxYys69jI14kYVM3aVhKS1VhRUk'
curr_dir = os.listdir()
if not 'downloaded' in curr_dir:
os.mkdir('downloaded')
def download_file_from_google_drive(self, id, destination):
'''
The implementation of this function and it's helper function is taken from :
https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url
It takes the file id of the shared google drive file and download it using requests package
'''
URL = "https://docs.google.com/uc?export=download"
session = requests.Session()
response = session.get(URL, params = { 'id' : id }, stream = True)
token = self.get_confirm_token(response)
if token:
params = { 'id' : id, 'confirm' : token }
response = session.get(URL, params = params, stream = True)
self.save_response_content(response, destination)
def get_confirm_token(self, response):
'''This is the helper function of the above download_file_from_google_drive function'''
for key, value in response.cookies.items():
if key.startswith('download_warning'):
return value
return None
def save_response_content(self, response, destination):
'''This is the helper function of the above download_file_from_google_drive function'''
CHUNK_SIZE = 32768
print('Processing...')
with open(destination, 'wb') as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
def get_data(self):
'''
This function reads the self.data_dir i.e "UTKFace" directory
and convert those images into numpy array and returns them as X and Y
@return1: X: Image data in numpy array format with dtype as np.uint8
X.shape = [None, 200, 200, 3]
@return2: Y: Class label in numpy array format with dtype as np.int32
Y.shape = [None, ]
@param1: countOfImages: Number of images that are to be converted to numpy array
positive integer to represent number of images NB: only for debugging or
-1 to represent all images
@param2: mode: The class label which would be extracted from the data
0 or 'age' for age
1 or 'gender' for gender
2 or 'race' for race
'''
X = []
Y = []
all_files = os.listdir(self.data_dir)
_range = len(all_files)
if self.countOfImages == -1 :
sample_size = _range
else:
sample_size = self.countOfImages
# Making sure that the selection is random.
random_indices = np.random.choice(range(_range), sample_size, replace=False)
for indx in random_indices:
fpath = self.data_dir + '/' + all_files[indx]
img = cv2.imread(fpath)
label = int(all_files[indx].split('_')[self.mode])
X.append(img)
Y.append(label)
return np.array(X), np.array(Y)
def load_data(self, countOfImages, mode):
'''
This is the main function of the class this function will extract images and labels
from the directory specified.
'''
all_files = os.listdir(self.base_dir)
self.countOfImages = countOfImages
self.mode = mode
if not self.req_file.split('/')[1] in all_files:
'''
This is to deal with the case when the required file is not
present in the current downloaded directory
'''
self.download_file_from_google_drive(self.file_id, self.req_file)
if not self.data_dir.split('/')[1] in all_files:
tar = tarfile.open(self.req_file, "r:gz")
tar.extractall(self.base_dir)
tar.close()
return self.get_data()
| [
"[email protected]"
] | |
0a013dbeac63b6fe9a9a83342f2a4d953a07c59f | 8f848713ace51c3d7cb0491429f4abe808379431 | /KRR/WME.py | 363c602453c970c3714ed041bc8a24345e1c4c66 | [] | no_license | AshwiniTagadghar1/ReteNet | 20e1d7dcf74ffae71f292bda4b6374cf17d07c7c | 48d6c098dc1702e2ff1f5f38e1d17766f41b735a | refs/heads/master | 2022-11-17T17:43:42.075362 | 2020-07-11T07:50:28 | 2020-07-11T07:50:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | class WME:
def __init__(self, className, attribute_value_pairs):
self.className=className
self.attribute_value_pairs=attribute_value_pairs
| [
"[email protected]"
] | |
69fa7989cab255908dc5566d8284bd79b02113c2 | 2d96739882edc57788b7876255947cad184f34e4 | /SMS_Packages/Tech/work.py | 76901374ed7d0cbf472d8f9179cac3a40d54a9a7 | [] | no_license | akash639743/Python_Assignment | afdd8ac126473fe9d2ef1a0f983376b17b5c4b92 | c04242af0d31037fa536655b1319acb1fb879e7b | refs/heads/master | 2023-07-18T04:55:48.333488 | 2021-08-27T07:41:39 | 2021-08-27T07:41:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | py | # import sys
# sys.path.append('C:/Users/mande/Desktop/New folder/SMS/User')
# import request
from User import request
def tech_work():
print("Tech Package --> work Module")
print("tech_work Function")
print()
request.user_request()
| [
"[email protected]"
] | |
a3fa89807dd7197e628cef4a3dc812403a098dec | 47da5e297ca31b69387817dfaa920b47cc8ff8ca | /Basics/Colt_Data_structure/Sorting_Algorithms/Selection_Sort/Selection_Sort_Test.py | 81aa410e6c94609843affb81c0110b8381cb3ce2 | [] | no_license | shafirpl/InterView_Prep | 546623b165903c2b3a1ffde55c65196f5e56f298 | f0c1f7b581210da120b8325a4846695afc1b9da7 | refs/heads/master | 2020-12-18T21:35:41.680029 | 2020-05-24T06:26:17 | 2020-05-24T06:26:17 | 235,527,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 646 | py | import unittest
from Selection_Sort import selection_sort
class SelectionSortTest(unittest.TestCase):
def test_Bubble_Sort(self):
self.assertEqual(selection_sort([8, 1, 2, 3, 4, 5, 6, 7]), [1, 2, 3, 4, 5, 6, 7, 8])
self.assertEqual(selection_sort([10, 5, 90, 0, 10]), [0, 5, 10, 10, 90])
self.assertEqual(selection_sort([64, 34, 25, 22, 11, 90, 12]), [
11, 12, 22, 25, 34, 64, 90])
self.assertEqual(selection_sort([8, 90, 4, 1, 5, 5]), [1, 4, 5, 5, 8, 90])
self.assertEqual(selection_sort(
[0,1,5,2]), [0,1,2,5])
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
6ce93375ceff30229cb3953ab10690c269168af9 | c51f02dc9bf560ce419e847c0a4c81cc507e6425 | /users/migrations/0005_alter_profile_user.py | 1ea9dc5aebaede819586885bd0e50f61ba745eda | [] | no_license | Emmanuelbreezy/votingapp | 0bbd214fb67d3073ee5b0460fb008ceb9266d754 | 3880b8df0fe5702da4a7ffebb80ca4b5f9dfcc87 | refs/heads/main | 2023-06-30T06:03:02.442261 | 2021-08-04T20:37:34 | 2021-08-04T20:37:34 | 392,818,312 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | # Generated by Django 3.2.5 on 2021-07-21 00:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0004_alter_profile_user'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='userprofile', to=settings.AUTH_USER_MODEL),
),
]
| [
"[email protected]"
] | |
7160dd58ad1a75f527a42d7ae7cd601eb8a90b90 | c28df1b0bf09c1ce1cb6d9de219676827f899e8d | /person/migrations/0014_auto_20201003_1524.py | 3fd510d20ea5ea36dec65400e05c8f6880e99e1c | [] | no_license | frozyum/split-expenses-app-backend | 36fc501c432b6508a7c34bc08b6ab27a6bff245d | 55e23ea222396814a80512df3fab0d5710c88fdf | refs/heads/master | 2023-01-01T08:56:16.145140 | 2020-10-27T09:17:14 | 2020-10-27T09:17:14 | 298,987,935 | 0 | 0 | null | 2020-09-28T16:49:28 | 2020-09-27T08:32:47 | Python | UTF-8 | Python | false | false | 501 | py | # Generated by Django 3.0.8 on 2020-10-03 15:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('group', '0001_initial'),
('person', '0013_auto_20201003_1356'),
]
operations = [
migrations.AlterField(
model_name='person',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='group.Group'),
),
]
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.